Compare commits

..

3 Commits

Author SHA1 Message Date
e832fccacb wip 2025-05-03 22:12:15 -07:00
8ab910e18f wip: hash_str 2025-05-03 22:12:15 -07:00
b46fb04a5d directly use rbx-dom by path 2025-05-03 22:12:15 -07:00
82 changed files with 2189 additions and 3240 deletions

1
.gitignore vendored
View File

@@ -1,2 +1 @@
/target
.zed

1989
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -25,13 +25,6 @@ resolver = "2"
strip = true
codegen-units = 1
[workspace.lints.rust]
# unsafe_code = "forbid"
# missing_docs = "warn"
# missing_debug_implementations = "warn"
single_use_lifetimes = "warn"
trivial_casts = "warn"
unused_lifetimes = "warn"
unused_qualifications = "warn"
# variant_size_differences = "warn"
unexpected_cfgs = "warn"
[profile.dev]
strip = false
opt-level = 3

View File

@@ -11,7 +11,4 @@ id = { version = "0.1.0", registry = "strafesnet" }
strafesnet_common = { path = "../../lib/common", registry = "strafesnet" }
strafesnet_session = { path = "../session", registry = "strafesnet" }
strafesnet_settings = { path = "../settings", registry = "strafesnet" }
wgpu = "27.0.0"
[lints]
workspace = true
wgpu = "25.0.0"

View File

@@ -94,7 +94,7 @@ impl GraphicsCamera{
raw
}
}
impl Default for GraphicsCamera{
impl std::default::Default for GraphicsCamera{
fn default()->Self{
Self{
screen_size:glam::UVec2::ONE,
@@ -167,7 +167,7 @@ impl GraphicsState{
}
pub fn generate_models(&mut self,device:&wgpu::Device,queue:&wgpu::Queue,map:&map::CompleteMap){
//generate texture view per texture
let texture_views:HashMap<model::TextureId,wgpu::TextureView>=map.textures.iter().enumerate().filter_map(|(texture_id,texture_data)|{
let texture_views:HashMap<strafesnet_common::model::TextureId,wgpu::TextureView>=map.textures.iter().enumerate().filter_map(|(texture_id,texture_data)|{
let texture_id=model::TextureId::new(texture_id as u32);
let image=match ddsfile::Dds::read(std::io::Cursor::new(texture_data)){
Ok(image)=>image,
@@ -803,7 +803,7 @@ impl GraphicsState{
module:&shader,
entry_point:Some("vs_entity_texture"),
buffers:&[wgpu::VertexBufferLayout{
array_stride:size_of::<GraphicsVertex>() as wgpu::BufferAddress,
array_stride:std::mem::size_of::<GraphicsVertex>() as wgpu::BufferAddress,
step_mode:wgpu::VertexStepMode::Vertex,
attributes:&wgpu::vertex_attr_array![0=>Float32x3,1=>Float32x2,2=>Float32x3,3=>Float32x4],
}],
@@ -953,7 +953,6 @@ impl GraphicsState{
}),
store:wgpu::StoreOp::Store,
},
depth_slice:None,
})],
depth_stencil_attachment:Some(wgpu::RenderPassDepthStencilAttachment{
view:&self.depth_view,

View File

@@ -8,6 +8,3 @@ arrayvec = "0.7.6"
glam = "0.30.0"
id = { version = "0.1.0", registry = "strafesnet" }
strafesnet_common = { path = "../../lib/common", registry = "strafesnet" }
[lints]
workspace = true

View File

@@ -18,17 +18,6 @@ impl<T> std::ops::Neg for Body<T>{
}
}
}
impl<T:Copy> std::ops::Neg for &Body<T>{
type Output=Body<T>;
fn neg(self)->Self::Output{
Body{
position:self.position,
velocity:-self.velocity,
acceleration:self.acceleration,
time:-self.time,
}
}
}
impl<T> Body<T>
where Time<T>:Copy,

View File

@@ -1,9 +1,7 @@
use crate::model::{into_giga_time,GigaTime,FEV,MeshQuery,DirectedEdge};
use crate::model::{GigaTime,FEV,MeshQuery,DirectedEdge};
use strafesnet_common::integer::{Fixed,Ratio,vec3::Vector3};
use crate::physics::{Time,Body};
use core::ops::Bound;
enum Transition<M:MeshQuery>{
Miss,
Next(FEV<M>,GigaTime),
@@ -29,47 +27,6 @@ impl<M:MeshQuery> CrawlResult<M>{
}
}
// TODO: move predict_collision_face_out algorithm in here or something
/// check_lower_bound
pub fn low<LhsNum,LhsDen,RhsNum,RhsDen,T>(lower_bound:&Bound<Ratio<LhsNum,LhsDen>>,dt:&Ratio<RhsNum,RhsDen>)->bool
where
RhsNum:Copy,
RhsDen:Copy,
LhsNum:Copy,
LhsDen:Copy,
LhsDen:strafesnet_common::integer::Parity,
RhsDen:strafesnet_common::integer::Parity,
LhsNum:core::ops::Mul<RhsDen,Output=T>,
LhsDen:core::ops::Mul<RhsNum,Output=T>,
T:Ord+Copy,
{
match lower_bound{
Bound::Included(time)=>time.le_ratio(*dt),
Bound::Excluded(time)=>time.lt_ratio(*dt),
Bound::Unbounded=>true,
}
}
/// check_upper_bound
pub fn upp<LhsNum,LhsDen,RhsNum,RhsDen,T>(dt:&Ratio<LhsNum,LhsDen>,upper_bound:&Bound<Ratio<RhsNum,RhsDen>>)->bool
where
RhsNum:Copy,
RhsDen:Copy,
LhsNum:Copy,
LhsDen:Copy,
LhsDen:strafesnet_common::integer::Parity,
RhsDen:strafesnet_common::integer::Parity,
LhsNum:core::ops::Mul<RhsDen,Output=T>,
LhsDen:core::ops::Mul<RhsNum,Output=T>,
T:Ord+Copy,
{
match upper_bound{
Bound::Included(time)=>dt.le_ratio(*time),
Bound::Excluded(time)=>dt.lt_ratio(*time),
Bound::Unbounded=>true,
}
}
impl<F:Copy,M:MeshQuery<Normal=Vector3<F>,Offset=Fixed<4,128>>> FEV<M>
where
// This is hardcoded for MinkowskiMesh lol
@@ -78,9 +35,9 @@ impl<F:Copy,M:MeshQuery<Normal=Vector3<F>,Offset=Fixed<4,128>>> FEV<M>
M::Vert:Copy,
F:core::ops::Mul<Fixed<1,32>,Output=Fixed<4,128>>,
<F as core::ops::Mul<Fixed<1,32>>>::Output:core::iter::Sum,
M::Offset:core::ops::Sub<<F as std::ops::Mul<Fixed<1,32>>>::Output>,
<M as MeshQuery>::Offset:core::ops::Sub<<F as std::ops::Mul<Fixed<1,32>>>::Output>,
{
fn next_transition(&self,mesh:&M,body:&Body,lower_bound:Bound<GigaTime>,mut upper_bound:Bound<GigaTime>)->Transition<M>{
fn next_transition(&self,body_time:GigaTime,mesh:&M,body:&Body,mut best_time:GigaTime)->Transition<M>{
//conflicting derivative means it crosses in the wrong direction.
//if the transition time is equal to an already tested transition, do not replace the current best.
let mut best_transition=Transition::Miss;
@@ -93,8 +50,8 @@ impl<F:Copy,M:MeshQuery<Normal=Vector3<F>,Offset=Fixed<4,128>>> FEV<M>
//TODO: use higher precision d value?
//use the mesh transform translation instead of baking it into the d value.
for dt in Fixed::<4,128>::zeroes2((n.dot(body.position)-d)*2,n.dot(body.velocity)*2,n.dot(body.acceleration)){
if low(&lower_bound,&dt)&&upp(&dt,&upper_bound)&&n.dot(body.extrapolated_velocity_ratio_dt(dt)).is_negative(){
upper_bound=Bound::Included(dt);
if body_time.le_ratio(dt)&&dt.lt_ratio(best_time)&&n.dot(body.extrapolated_velocity_ratio_dt(dt)).is_negative(){
best_time=dt;
best_transition=Transition::Hit(face_id,dt);
break;
}
@@ -108,8 +65,8 @@ impl<F:Copy,M:MeshQuery<Normal=Vector3<F>,Offset=Fixed<4,128>>> FEV<M>
//WARNING: precision is swept under the rug!
//wrap for speed
for dt in Fixed::<4,128>::zeroes2(n.dot(body.position*2-(mesh.vert(v0)+mesh.vert(v1))).wrap_4(),n.dot(body.velocity).wrap_4()*2,n.dot(body.acceleration).wrap_4()){
if low(&lower_bound,&dt)&&upp(&dt,&upper_bound)&&n.dot(body.extrapolated_velocity_ratio_dt(dt)).is_negative(){
upper_bound=Bound::Included(dt);
if body_time.le_ratio(dt)&&dt.lt_ratio(best_time)&&n.dot(body.extrapolated_velocity_ratio_dt(dt)).is_negative(){
best_time=dt;
best_transition=Transition::Next(FEV::Edge(directed_edge_id.as_undirected()),dt);
break;
}
@@ -119,11 +76,10 @@ impl<F:Copy,M:MeshQuery<Normal=Vector3<F>,Offset=Fixed<4,128>>> FEV<M>
},
&FEV::Edge(edge_id)=>{
//test each face collision time, ignoring roots with zero or conflicting derivative
let edge_n=mesh.edge_n(edge_id);
let edge_verts=mesh.edge_verts(edge_id);
let &[ev0,ev1]=edge_verts.as_ref();
let (v0,v1)=(mesh.vert(ev0),mesh.vert(ev1));
let edge_n=v1-v0;
let delta_pos=body.position*2-(v0+v1);
let delta_pos=body.position*2-(mesh.vert(ev0)+mesh.vert(ev1));
for (i,&edge_face_id) in mesh.edge_faces(edge_id).as_ref().iter().enumerate(){
let face_n=mesh.face_nd(edge_face_id).0;
//edge_n gets parity from the order of edge_faces
@@ -131,8 +87,8 @@ impl<F:Copy,M:MeshQuery<Normal=Vector3<F>,Offset=Fixed<4,128>>> FEV<M>
//WARNING yada yada d *2
//wrap for speed
for dt in Fixed::<4,128>::zeroes2(n.dot(delta_pos).wrap_4(),n.dot(body.velocity).wrap_4()*2,n.dot(body.acceleration).wrap_4()){
if low(&lower_bound,&dt)&&upp(&dt,&upper_bound)&&n.dot(body.extrapolated_velocity_ratio_dt(dt)).is_negative(){
upper_bound=Bound::Included(dt);
if body_time.le_ratio(dt)&&dt.lt_ratio(best_time)&&n.dot(body.extrapolated_velocity_ratio_dt(dt)).is_negative(){
best_time=dt;
best_transition=Transition::Next(FEV::Face(edge_face_id),dt);
break;
}
@@ -143,9 +99,9 @@ impl<F:Copy,M:MeshQuery<Normal=Vector3<F>,Offset=Fixed<4,128>>> FEV<M>
//vertex normal gets parity from vert index
let n=edge_n*(1-2*(i as i64));
for dt in Fixed::<2,64>::zeroes2((n.dot(body.position-mesh.vert(vert_id)))*2,n.dot(body.velocity)*2,n.dot(body.acceleration)){
if low(&lower_bound,&dt)&&upp(&dt,&upper_bound)&&n.dot(body.extrapolated_velocity_ratio_dt(dt)).is_negative(){
if body_time.le_ratio(dt)&&dt.lt_ratio(best_time)&&n.dot(body.extrapolated_velocity_ratio_dt(dt)).is_negative(){
let dt=Ratio::new(dt.num.widen_4(),dt.den.widen_4());
upper_bound=Bound::Included(dt);
best_time=dt;
best_transition=Transition::Next(FEV::Vert(vert_id),dt);
break;
}
@@ -159,9 +115,9 @@ impl<F:Copy,M:MeshQuery<Normal=Vector3<F>,Offset=Fixed<4,128>>> FEV<M>
//edge is directed away from vertex, but we want the dot product to turn out negative
let n=-mesh.directed_edge_n(directed_edge_id);
for dt in Fixed::<2,64>::zeroes2((n.dot(body.position-mesh.vert(vert_id)))*2,n.dot(body.velocity)*2,n.dot(body.acceleration)){
if low(&lower_bound,&dt)&&upp(&dt,&upper_bound)&&n.dot(body.extrapolated_velocity_ratio_dt(dt)).is_negative(){
if body_time.le_ratio(dt)&&dt.lt_ratio(best_time)&&n.dot(body.extrapolated_velocity_ratio_dt(dt)).is_negative(){
let dt=Ratio::new(dt.num.widen_4(),dt.den.widen_4());
upper_bound=Bound::Included(dt);
best_time=dt;
best_transition=Transition::Next(FEV::Edge(directed_edge_id.as_undirected()),dt);
break;
}
@@ -172,13 +128,19 @@ impl<F:Copy,M:MeshQuery<Normal=Vector3<F>,Offset=Fixed<4,128>>> FEV<M>
}
best_transition
}
pub fn crawl(mut self,mesh:&M,relative_body:&Body,lower_bound:Bound<&Time>,upper_bound:Bound<&Time>)->CrawlResult<M>{
let mut lower_bound=lower_bound.map(|&t|into_giga_time(t,relative_body.time));
let upper_bound=upper_bound.map(|&t|into_giga_time(t,relative_body.time));
pub fn crawl(mut self,mesh:&M,relative_body:&Body,start_time:Time,time_limit:Time)->CrawlResult<M>{
let mut body_time={
let r=(start_time-relative_body.time).to_ratio();
Ratio::new(r.num.widen_4(),r.den.widen_4())
};
let time_limit={
let r=(time_limit-relative_body.time).to_ratio();
Ratio::new(r.num.widen_4(),r.den.widen_4())
};
for _ in 0..20{
match self.next_transition(mesh,relative_body,lower_bound,upper_bound){
match self.next_transition(body_time,mesh,relative_body,time_limit){
Transition::Miss=>return CrawlResult::Miss(self),
Transition::Next(next_fev,next_time)=>(self,lower_bound)=(next_fev,Bound::Included(next_time)),
Transition::Next(next_fev,next_time)=>(self,body_time)=(next_fev,next_time),
Transition::Hit(face,time)=>return CrawlResult::Hit(face,time),
}
}

View File

@@ -1,5 +1,5 @@
use std::collections::{HashSet,HashMap};
use core::ops::{Bound,RangeBounds};
use core::ops::Range;
use strafesnet_common::integer::vec3::Vector3;
use strafesnet_common::model::{self,MeshId,PolygonIter};
use strafesnet_common::integer::{self,vec3,Fixed,Planar64,Planar64Vec3,Ratio};
@@ -68,12 +68,11 @@ pub enum FEV<M:MeshQuery>{
}
//use Unit32 #[repr(C)] for map files
#[derive(Clone,Copy,Debug,Hash,Eq,PartialEq)]
#[derive(Clone,Hash,Eq,PartialEq)]
struct Face{
normal:Planar64Vec3,
dot:Planar64,
}
#[derive(Debug)]
struct Vert(Planar64Vec3);
pub trait MeshQuery{
type Face:Copy;
@@ -98,22 +97,18 @@ pub trait MeshQuery{
fn vert_edges(&self,vert_id:Self::Vert)->impl AsRef<[Self::Edge]>;
fn vert_faces(&self,vert_id:Self::Vert)->impl AsRef<[Self::Face]>;
}
#[derive(Debug)]
struct FaceRefs{
edges:Vec<SubmeshDirectedEdgeId>,
//verts:Vec<VertId>,
}
#[derive(Debug)]
struct EdgeRefs{
faces:[SubmeshFaceId;2],//left, right
verts:[SubmeshVertId;2],//bottom, top
}
#[derive(Debug)]
struct VertRefs{
faces:Vec<SubmeshFaceId>,
edges:Vec<SubmeshDirectedEdgeId>,
}
#[derive(Debug)]
pub struct PhysicsMeshData{
//this contains all real and virtual faces used in both the complete mesh and convex submeshes
//faces are sorted such that all faces that belong to the complete mesh appear first, and then
@@ -123,7 +118,6 @@ pub struct PhysicsMeshData{
faces:Vec<Face>,//MeshFaceId indexes this list
verts:Vec<Vert>,//MeshVertId indexes this list
}
#[derive(Debug)]
pub struct PhysicsMeshTopology{
//mapping of local ids to PhysicsMeshData ids
faces:Vec<MeshFaceId>,//SubmeshFaceId indexes this list
@@ -149,12 +143,10 @@ impl From<MeshId> for PhysicsMeshId{
pub struct PhysicsSubmeshId(u32);
pub struct PhysicsMesh{
data:PhysicsMeshData,
// The complete mesh is unused at this time.
// complete_mesh:PhysicsMeshTopology,
// Submeshes are guaranteed to be convex and may contain
// "virtual" faces which are not part of the complete mesh.
// Physics calculations should never resolve to hitting
// a virtual face.
complete_mesh:PhysicsMeshTopology,
//Most objects in roblox maps are already convex, so the list length is 0
//as soon as the mesh is divided into 2 submeshes, the list length jumps to 2.
//length 1 is unnecessary since the complete mesh would be a duplicate of the only submesh, but would still function properly
submeshes:Vec<PhysicsMeshTopology>,
}
impl PhysicsMesh{
@@ -218,24 +210,19 @@ impl PhysicsMesh{
};
Self{
data,
// complete_mesh:mesh_topology.clone(),
submeshes:vec![mesh_topology],
complete_mesh:mesh_topology,
submeshes:Vec::new(),
}
}
pub fn unit_cylinder()->Self{
Self::unit_cube()
}
#[inline]
pub fn complete_mesh(&self)->&PhysicsMeshTopology{
// If there is exactly one submesh, then the complete mesh is identical to it.
if self.submeshes.len()==1{
self.submeshes.first().unwrap()
}else{
panic!("PhysicsMesh complete mesh is not known");
}
pub const fn complete_mesh(&self)->&PhysicsMeshTopology{
&self.complete_mesh
}
#[inline]
pub fn complete_mesh_view(&self)->PhysicsMeshView<'_>{
pub const fn complete_mesh_view(&self)->PhysicsMeshView{
PhysicsMeshView{
data:&self.data,
topology:self.complete_mesh(),
@@ -243,16 +230,21 @@ impl PhysicsMesh{
}
#[inline]
pub fn submeshes(&self)->&[PhysicsMeshTopology]{
&self.submeshes
//the complete mesh is already a convex mesh when len()==0, len()==1 is invalid but will still work
if self.submeshes.len()==0{
std::slice::from_ref(&self.complete_mesh)
}else{
&self.submeshes.as_slice()
}
}
#[inline]
pub fn submesh_view(&self,submesh_id:PhysicsSubmeshId)->PhysicsMeshView<'_>{
pub fn submesh_view(&self,submesh_id:PhysicsSubmeshId)->PhysicsMeshView{
PhysicsMeshView{
data:&self.data,
topology:&self.submeshes()[submesh_id.get() as usize],
}
}
pub fn submesh_views(&self)->impl Iterator<Item=PhysicsMeshView<'_>>{
pub fn submesh_views(&self)->impl Iterator<Item=PhysicsMeshView>{
self.submeshes().iter().map(|topology|PhysicsMeshView{
data:&self.data,
topology,
@@ -321,20 +313,14 @@ impl TryFrom<&model::Mesh> for PhysicsMesh{
if mesh.unique_pos.len()==0{
return Err(PhysicsMeshError::ZeroVertices);
}
// An empty physics mesh is a waste of resources
if mesh.physics_groups.len()==0{
return Err(PhysicsMeshError::NoPhysicsGroups);
}
let verts=mesh.unique_pos.iter().copied().map(Vert).collect();
//TODO: fix submeshes
//flat map mesh.physics_groups[$1].groups.polys()[$2] as face_id
//lower face_id points to upper face_id
//the same face is not allowed to be in multiple polygon groups
// because SubmeshFaceId -> CompleteMeshFaceId -> SubmeshFaceId is ambiguous
// when multiple SubmeshFaceId point to one MeshFaceId
let mut faces=Vec::new();
let mut face_id_from_face=HashMap::new();
let mesh_topologies:Vec<PhysicsMeshTopology>=mesh.physics_groups.iter().map(|physics_group|{
let mut mesh_topologies:Vec<PhysicsMeshTopology>=mesh.physics_groups.iter().map(|physics_group|{
//construct submesh
let mut submesh_faces=Vec::new();//these contain a map from submeshId->meshId
let mut submesh_verts=Vec::new();
@@ -395,11 +381,15 @@ impl TryFrom<&model::Mesh> for PhysicsMesh{
normal:(normal/len as i64).divide().narrow_1().unwrap(),
dot:(dot/(len*len) as i64).narrow_1().unwrap(),
};
let face_id=*face_id_from_face.entry(face).or_insert_with(||{
let face_id=MeshFaceId::new(faces.len() as u32);
faces.push(face);
face_id
});
let face_id=match face_id_from_face.get(&face){
Some(&face_id)=>face_id,
None=>{
let face_id=MeshFaceId::new(faces.len() as u32);
face_id_from_face.insert(face.clone(),face_id);
faces.push(face);
face_id
}
};
submesh_faces.push(face_id);
face_ref_guys.push(FaceRefEdges(face_edges));
}
@@ -407,16 +397,16 @@ impl TryFrom<&model::Mesh> for PhysicsMesh{
PhysicsMeshTopology{
faces:submesh_faces,
verts:submesh_verts,
face_topology:face_ref_guys.into_iter().map(|FaceRefEdges(edges)|{
FaceRefs{edges}
face_topology:face_ref_guys.into_iter().map(|face_ref_guy|{
FaceRefs{edges:face_ref_guy.0}
}).collect(),
edge_topology:edge_pool.edge_guys.into_iter().map(|(EdgeRefVerts(verts),EdgeRefFaces(faces))|
EdgeRefs{faces,verts}
edge_topology:edge_pool.edge_guys.into_iter().map(|(edge_ref_verts,edge_ref_faces)|
EdgeRefs{faces:edge_ref_faces.0,verts:edge_ref_verts.0}
).collect(),
vert_topology:vert_ref_guys.into_iter().map(|VertRefGuy{edges,faces}|
vert_topology:vert_ref_guys.into_iter().map(|vert_ref_guy|
VertRefs{
edges:edges.into_iter().collect(),
faces:faces.into_iter().collect(),
edges:vert_ref_guy.edges.into_iter().collect(),
faces:vert_ref_guy.faces.into_iter().collect(),
}
).collect(),
}
@@ -426,13 +416,12 @@ impl TryFrom<&model::Mesh> for PhysicsMesh{
faces,
verts,
},
// complete_mesh:None,
complete_mesh:mesh_topologies.pop().ok_or(PhysicsMeshError::NoPhysicsGroups)?,
submeshes:mesh_topologies,
})
}
}
#[derive(Debug)]
pub struct PhysicsMeshView<'a>{
data:&'a PhysicsMeshData,
topology:&'a PhysicsMeshTopology,
@@ -469,7 +458,6 @@ impl MeshQuery for PhysicsMeshView<'_>{
}
}
#[derive(Debug)]
pub struct PhysicsMeshTransform{
pub vertex:integer::Planar64Affine3,
pub normal:integer::mat3::Matrix3<Fixed<2,64>>,
@@ -485,7 +473,6 @@ impl PhysicsMeshTransform{
}
}
#[derive(Debug)]
pub struct TransformedMesh<'a>{
view:PhysicsMeshView<'a>,
transform:&'a PhysicsMeshTransform,
@@ -500,9 +487,12 @@ impl TransformedMesh<'_>{
transform,
}
}
pub fn verts<'a>(&'a self)->impl Iterator<Item=Vector3<Fixed<2,64>>>+'a{
pub fn verts<'a>(&'a self)->impl Iterator<Item=vec3::Vector3<Fixed<2,64>>>+'a{
self.view.data.verts.iter().map(|&Vert(pos)|self.transform.vertex.transform_point3(pos))
}
pub fn faces(&self)->impl Iterator<Item=SubmeshFaceId>{
(0..self.view.topology.faces.len() as u32).map(SubmeshFaceId::new)
}
fn farthest_vert(&self,dir:Planar64Vec3)->SubmeshVertId{
//this happens to be well-defined. there are no virtual virtices
SubmeshVertId::new(
@@ -608,7 +598,6 @@ pub enum MinkowskiFace{
//FaceFace
}
#[derive(Debug)]
pub struct MinkowskiMesh<'a>{
mesh0:TransformedMesh<'a>,
mesh1:TransformedMesh<'a>,
@@ -626,10 +615,6 @@ enum EV{
}
pub type GigaTime=Ratio<Fixed<4,128>,Fixed<4,128>>;
pub fn into_giga_time(time:Time,relative_to:Time)->GigaTime{
let r=(time-relative_to).to_ratio();
Ratio::new(r.num.widen_4(),r.den.widen_4())
}
impl MinkowskiMesh<'_>{
pub fn minkowski_sum<'a>(mesh0:TransformedMesh<'a>,mesh1:TransformedMesh<'a>)->MinkowskiMesh<'a>{
@@ -700,7 +685,7 @@ impl MinkowskiMesh<'_>{
}
}
/// This function drops a vertex down to an edge or a face if the path from infinity did not cross any vertex-edge boundaries but the point is supposed to have already crossed a boundary down from a vertex
fn infinity_fev(&self,infinity_dir:Planar64Vec3,point:Planar64Vec3)->FEV::<MinkowskiMesh<'_>>{
fn infinity_fev(&self,infinity_dir:Planar64Vec3,point:Planar64Vec3)->FEV::<MinkowskiMesh>{
//start on any vertex
//cross uncrossable vertex-edge boundaries until you find the closest vertex or edge
//cross edge-face boundary if it's uncrossable
@@ -745,56 +730,44 @@ impl MinkowskiMesh<'_>{
//
// Most of the calculation time is just calculating the starting point
// for the "actual" crawling algorithm below (predict_collision_{in|out}).
fn closest_fev_not_inside(&self,mut infinity_body:Body,start_time:Bound<&Time>)->Option<FEV<MinkowskiMesh<'_>>>{
fn closest_fev_not_inside(&self,mut infinity_body:Body,start_time:Time)->Option<FEV<MinkowskiMesh>>{
infinity_body.infinity_dir().and_then(|dir|{
let infinity_fev=self.infinity_fev(-dir,infinity_body.position);
//a line is simpler to solve than a parabola
infinity_body.velocity=dir;
infinity_body.acceleration=vec3::ZERO;
//crawl in from negative infinity along a tangent line to get the closest fev
match infinity_fev.crawl(self,&infinity_body,Bound::Unbounded,start_time){
// This is the expected case.
// We expect to never hit the mesh while setting up for the real crawl
// since the algorithm breaks down on the inside of the mesh.
crate::face_crawler::CrawlResult::Miss(fev)=>Some(fev),
// An exact hit is allowed, it has not crossed the boundary.
crate::face_crawler::CrawlResult::Hit(face,ratio)=>match start_time{
Bound::Included(_)=>ratio.num.is_zero().then(||FEV::Face(face)),
// You are looking for collision events within a range that does not include the start_time.
// The boundary is crossed at exactly start_time, so the range is not met.
// Therefore, the correct return value is None.
Bound::Excluded(_)=>unimplemented!(),
// To infinity and beyond!
Bound::Unbounded=>None,
},
}
// TODO: change crawl_fev args to delta time? Optional values?
infinity_fev.crawl(self,&infinity_body,Time::MIN/4,start_time).miss()
})
}
pub fn predict_collision_in(&self,relative_body:&Body,range:impl RangeBounds<Time>)->Option<(MinkowskiFace,GigaTime)>{
self.closest_fev_not_inside(*relative_body,range.start_bound()).and_then(|fev|{
pub fn predict_collision_in(&self,relative_body:&Body,Range{start:start_time,end:time_limit}:Range<Time>)->Option<(MinkowskiFace,GigaTime)>{
self.closest_fev_not_inside(relative_body.clone(),start_time).and_then(|fev|{
//continue forwards along the body parabola
fev.crawl(self,relative_body,range.start_bound(),range.end_bound()).hit()
fev.crawl(self,relative_body,start_time,time_limit).hit()
})
}
pub fn predict_collision_out(&self,relative_body:&Body,range:impl RangeBounds<Time>)->Option<(MinkowskiFace,GigaTime)>{
let (lower_bound,upper_bound)=(range.start_bound(),range.end_bound());
// swap and negate bounds to do a time inversion
let (lower_bound,upper_bound)=(upper_bound.map(|&t|-t),lower_bound.map(|&t|-t));
let infinity_body=-relative_body;
self.closest_fev_not_inside(infinity_body,lower_bound.as_ref()).and_then(|fev|{
pub fn predict_collision_out(&self,relative_body:&Body,Range{start:start_time,end:time_limit}:Range<Time>)->Option<(MinkowskiFace,GigaTime)>{
//create an extrapolated body at time_limit
let infinity_body=-relative_body.clone();
self.closest_fev_not_inside(infinity_body,-time_limit).and_then(|fev|{
//continue backwards along the body parabola
fev.crawl(self,&infinity_body,lower_bound.as_ref(),upper_bound.as_ref()).hit()
fev.crawl(self,&infinity_body,-time_limit,-start_time).hit()
//no need to test -time<time_limit because of the first step
.map(|(face,time)|(face,-time))
})
}
pub fn predict_collision_face_out(&self,relative_body:&Body,range:impl RangeBounds<Time>,contact_face_id:MinkowskiFace)->Option<(MinkowskiDirectedEdge,GigaTime)>{
// TODO: make better
use crate::face_crawler::{low,upp};
pub fn predict_collision_face_out(&self,relative_body:&Body,Range{start:start_time,end:time_limit}:Range<Time>,contact_face_id:MinkowskiFace)->Option<(MinkowskiEdge,GigaTime)>{
//no algorithm needed, there is only one state and two cases (Edge,None)
//determine when it passes an edge ("sliding off" case)
let start_time=range.start_bound().map(|&t|(t-relative_body.time).to_ratio());
let mut best_time=range.end_bound().map(|&t|into_giga_time(t,relative_body.time));
let start_time={
let r=(start_time-relative_body.time).to_ratio();
Ratio::new(r.num,r.den)
};
let mut best_time={
let r=(time_limit-relative_body.time).to_ratio();
Ratio::new(r.num.widen_4(),r.den.widen_4())
};
let mut best_edge=None;
let face_n=self.face_nd(contact_face_id).0;
for &directed_edge_id in self.face_edges(contact_face_id).as_ref(){
@@ -807,19 +780,18 @@ impl MinkowskiMesh<'_>{
//WARNING: truncated precision
//wrap for speed
for dt in Fixed::<4,128>::zeroes2(((n.dot(relative_body.position))*2-d).wrap_4(),n.dot(relative_body.velocity).wrap_4()*2,n.dot(relative_body.acceleration).wrap_4()){
if low(&start_time,&dt)&&upp(&dt,&best_time)&&n.dot(relative_body.extrapolated_velocity_ratio_dt(dt)).is_negative(){
best_time=Bound::Included(dt);
best_edge=Some((directed_edge_id,dt));
if start_time.le_ratio(dt)&&dt.lt_ratio(best_time)&&n.dot(relative_body.extrapolated_velocity_ratio_dt(dt)).is_negative(){
best_time=dt;
best_edge=Some(directed_edge_id);
break;
}
}
}
best_edge
best_edge.map(|e|(e.as_undirected(),best_time))
}
fn infinity_in(&self,infinity_body:Body)->Option<(MinkowskiFace,GigaTime)>{
let infinity_fev=self.infinity_fev(-infinity_body.velocity,infinity_body.position);
// Bound::Included means that the surface of the mesh is included in the mesh
infinity_fev.crawl(self,&infinity_body,Bound::Unbounded,Bound::Included(&infinity_body.time)).hit()
infinity_fev.crawl(self,&infinity_body,Time::MIN/4,infinity_body.time).hit()
}
pub fn is_point_in_mesh(&self,point:Planar64Vec3)->bool{
let infinity_body=Body::new(point,vec3::Y,vec3::ZERO,Time::ZERO);
@@ -957,10 +929,10 @@ impl MeshQuery for MinkowskiMesh<'_>{
}
fn edge_verts(&self,edge_id:MinkowskiEdge)->impl AsRef<[MinkowskiVert;2]>{
AsRefHelper(match edge_id{
MinkowskiEdge::VertEdge(v0,e1)=>self.mesh1.edge_verts(e1).as_ref().map(|vert_id1|
MinkowskiEdge::VertEdge(v0,e1)=>(*self.mesh1.edge_verts(e1).as_ref()).map(|vert_id1|
MinkowskiVert::VertVert(v0,vert_id1)
),
MinkowskiEdge::EdgeVert(e0,v1)=>self.mesh0.edge_verts(e0).as_ref().map(|vert_id0|
MinkowskiEdge::EdgeVert(e0,v1)=>(*self.mesh0.edge_verts(e0).as_ref()).map(|vert_id0|
MinkowskiVert::VertVert(vert_id0,v1)
),
})
@@ -970,43 +942,38 @@ impl MeshQuery for MinkowskiMesh<'_>{
MinkowskiVert::VertVert(v0,v1)=>{
let mut edges=Vec::new();
//detect shared volume when the other mesh is mirrored along a test edge dir
let v0f_thing=self.mesh0.vert_faces(v0);
let v1f_thing=self.mesh1.vert_faces(v1);
let v0f=v0f_thing.as_ref();
let v1f=v1f_thing.as_ref();
let v0f_n:Vec<_>=v0f.iter().map(|&face_id|self.mesh0.face_nd(face_id).0).collect();
let v1f_n:Vec<_>=v1f.iter().map(|&face_id|self.mesh1.face_nd(face_id).0).collect();
// scratch vector
let mut face_normals=Vec::with_capacity(v0f.len()+v1f.len());
face_normals.clone_from(&v0f_n);
let v0f=self.mesh0.vert_faces(v0);
let v1f=self.mesh1.vert_faces(v1);
let v0f_n:Vec<_>=v0f.as_ref().iter().map(|&face_id|self.mesh0.face_nd(face_id).0).collect();
let v1f_n:Vec<_>=v1f.as_ref().iter().map(|&face_id|self.mesh1.face_nd(face_id).0).collect();
let the_len=v0f.as_ref().len()+v1f.as_ref().len();
for &directed_edge_id in self.mesh0.vert_edges(v0).as_ref(){
let n=self.mesh0.directed_edge_n(directed_edge_id);
let nn=n.dot(n);
// TODO: there's gotta be a better way to do this
// drop faces beyond v0f_n
face_normals.truncate(v0f.len());
// make a set of faces from mesh0's perspective
//make a set of faces
let mut face_normals=Vec::with_capacity(the_len);
//add mesh0 faces as-is
face_normals.clone_from(&v0f_n);
for face_n in &v1f_n{
//add reflected mesh1 faces
//wrap for speed
face_normals.push(*face_n-(n*face_n.dot(n)*2/nn).divide().wrap_3());
}
if is_empty_volume(&face_normals){
if is_empty_volume(face_normals){
edges.push(MinkowskiDirectedEdge::EdgeVert(directed_edge_id,v1));
}
}
face_normals.clone_from(&v1f_n);
for &directed_edge_id in self.mesh1.vert_edges(v1).as_ref(){
let n=self.mesh1.directed_edge_n(directed_edge_id);
let nn=n.dot(n);
// drop faces beyond v1f_n
face_normals.truncate(v1f.len());
// make a set of faces from mesh1's perspective
let mut face_normals=Vec::with_capacity(the_len);
face_normals.clone_from(&v1f_n);
for face_n in &v0f_n{
//wrap for speed
face_normals.push(*face_n-(n*face_n.dot(n)*2/nn).divide().wrap_3());
}
if is_empty_volume(&face_normals){
if is_empty_volume(face_normals){
edges.push(MinkowskiDirectedEdge::VertEdge(v0,directed_edge_id));
}
}
@@ -1016,12 +983,11 @@ impl MeshQuery for MinkowskiMesh<'_>{
}
fn vert_faces(&self,_vert_id:MinkowskiVert)->impl AsRef<[MinkowskiFace]>{
unimplemented!();
#[expect(unreachable_code)]
Vec::new()
vec![]
}
}
fn is_empty_volume(normals:&[Vector3<Fixed<3,96>>])->bool{
fn is_empty_volume(normals:Vec<Vector3<Fixed<3,96>>>)->bool{
let len=normals.len();
for i in 0..len-1{
for j in i+1..len{
@@ -1047,6 +1013,6 @@ fn is_empty_volume(normals:&[Vector3<Fixed<3,96>>])->bool{
#[test]
fn test_is_empty_volume(){
assert!(!is_empty_volume(&[vec3::X.widen_3(),vec3::Y.widen_3(),vec3::Z.widen_3()]));
assert!(is_empty_volume(&[vec3::X.widen_3(),vec3::Y.widen_3(),vec3::Z.widen_3(),vec3::NEG_X.widen_3()]));
assert!(!is_empty_volume([vec3::X.widen_3(),vec3::Y.widen_3(),vec3::Z.widen_3()].to_vec()));
assert!(is_empty_volume([vec3::X.widen_3(),vec3::Y.widen_3(),vec3::Z.widen_3(),vec3::NEG_X.widen_3()].to_vec()));
}

File diff suppressed because it is too large Load Diff

View File

@@ -192,7 +192,7 @@ fn get_push_ray_3(point:Planar64Vec3,c0:&Contact,c1:&Contact,c2:&Contact)->Optio
const fn get_best_push_ray_and_conts_0<'a>(point:Planar64Vec3)->(Ray,Conts<'a>){
(get_push_ray_0(point),Conts::new_const())
}
fn get_best_push_ray_and_conts_1(point:Planar64Vec3,c0:&Contact)->Option<(Ray,Conts<'_>)>{
fn get_best_push_ray_and_conts_1(point:Planar64Vec3,c0:&Contact)->Option<(Ray,Conts)>{
get_push_ray_1(point,c0)
.map(|ray|(ray,Conts::from_iter([c0])))
}

View File

@@ -10,6 +10,3 @@ strafesnet_common = { path = "../../lib/common", registry = "strafesnet" }
strafesnet_physics = { path = "../physics", registry = "strafesnet" }
strafesnet_settings = { path = "../settings", registry = "strafesnet" }
strafesnet_snf = { path = "../../lib/snf", registry = "strafesnet" }
[lints]
workspace = true

View File

@@ -31,7 +31,7 @@ pub enum SessionInputInstruction{
Mouse(glam::IVec2),
SetControl(strafesnet_common::physics::SetControlInstruction),
Mode(ImplicitModeInstruction),
Misc(MiscInstruction),
Misc(strafesnet_common::physics::MiscInstruction),
}
/// Implicit mode instruction are fed separately to session.
/// Session generates the explicit mode instructions interlaced with a SetSensitivity instruction
@@ -152,10 +152,10 @@ enum ViewState{
pub struct Session{
directories:Directories,
user_settings:UserSettings,
mouse_interpolator:MouseInterpolator,
mouse_interpolator:crate::mouse_interpolator::MouseInterpolator,
view_state:ViewState,
//gui:GuiState
geometry_shared:PhysicsData,
geometry_shared:physics::PhysicsData,
simulation:Simulation,
// below fields not included in lite session
recording:Recording,
@@ -172,7 +172,7 @@ impl Session{
user_settings,
directories,
mouse_interpolator:MouseInterpolator::new(),
geometry_shared:PhysicsData::empty(),
geometry_shared:Default::default(),
simulation,
view_state:ViewState::Play,
recording:Default::default(),
@@ -184,7 +184,7 @@ impl Session{
}
fn change_map(&mut self,map:&strafesnet_common::map::CompleteMap){
self.simulation.physics.clear();
self.geometry_shared=PhysicsData::new(map);
self.geometry_shared.generate_models(map);
}
pub fn get_frame_state(&self,time:SessionTime)->Option<FrameState>{
match &self.view_state{

View File

@@ -8,6 +8,3 @@ configparser = "3.0.2"
directories = "6.0.0"
glam = "0.30.0"
strafesnet_common = { path = "../../lib/common", registry = "strafesnet" }
[lints]
workspace = true

View File

@@ -1 +0,0 @@
/test_files

View File

@@ -4,12 +4,6 @@ version = "0.1.0"
edition = "2024"
[dependencies]
glam = "0.30.0"
strafesnet_common = { path = "../lib/common", registry = "strafesnet" }
strafesnet_physics = { path = "../engine/physics", registry = "strafesnet" }
strafesnet_snf = { path = "../lib/snf", registry = "strafesnet" }
# this is just for the primitive constructor
strafesnet_rbx_loader = { path = "../lib/rbx_loader", registry = "strafesnet" }
[lints]
workspace = true

View File

@@ -1,28 +0,0 @@
#[expect(dead_code)]
#[derive(Debug)]
pub enum ReplayError{
IO(std::io::Error),
SNF(strafesnet_snf::Error),
SNFM(strafesnet_snf::map::Error),
SNFB(strafesnet_snf::bot::Error),
}
impl From<std::io::Error> for ReplayError{
fn from(value:std::io::Error)->Self{
Self::IO(value)
}
}
impl From<strafesnet_snf::Error> for ReplayError{
fn from(value:strafesnet_snf::Error)->Self{
Self::SNF(value)
}
}
impl From<strafesnet_snf::map::Error> for ReplayError{
fn from(value:strafesnet_snf::map::Error)->Self{
Self::SNFM(value)
}
}
impl From<strafesnet_snf::bot::Error> for ReplayError{
fn from(value:strafesnet_snf::bot::Error)->Self{
Self::SNFB(value)
}
}

View File

@@ -1,15 +1,7 @@
mod error;
mod util;
#[cfg(test)]
mod tests;
#[cfg(test)]
mod test_scenes;
use std::io::Cursor;
use std::path::Path;
use std::time::Instant;
use error::ReplayError;
use util::read_entire_file;
use strafesnet_physics::physics::{PhysicsData,PhysicsState,PhysicsContext};
fn main(){
@@ -21,6 +13,40 @@ fn main(){
}
}
#[allow(unused)]
#[derive(Debug)]
enum ReplayError{
IO(std::io::Error),
SNF(strafesnet_snf::Error),
SNFM(strafesnet_snf::map::Error),
SNFB(strafesnet_snf::bot::Error),
}
impl From<std::io::Error> for ReplayError{
fn from(value:std::io::Error)->Self{
Self::IO(value)
}
}
impl From<strafesnet_snf::Error> for ReplayError{
fn from(value:strafesnet_snf::Error)->Self{
Self::SNF(value)
}
}
impl From<strafesnet_snf::map::Error> for ReplayError{
fn from(value:strafesnet_snf::map::Error)->Self{
Self::SNFM(value)
}
}
impl From<strafesnet_snf::bot::Error> for ReplayError{
fn from(value:strafesnet_snf::bot::Error)->Self{
Self::SNFB(value)
}
}
fn read_entire_file(path:impl AsRef<Path>)->Result<Cursor<Vec<u8>>,std::io::Error>{
let data=std::fs::read(path)?;
Ok(Cursor::new(data))
}
fn run_replay()->Result<(),ReplayError>{
println!("loading map file..");
let data=read_entire_file("../tools/bhop_maps/5692113331.snfm")?;
@@ -31,8 +57,9 @@ fn run_replay()->Result<(),ReplayError>{
let bot=strafesnet_snf::read_bot(data)?.read_all()?;
// create recording
let mut physics_data=PhysicsData::default();
println!("generating models..");
let physics_data=PhysicsData::new(&map);
physics_data.generate_models(&map);
println!("simulating...");
let mut physics=PhysicsState::default();
for ins in bot.instructions{
@@ -140,8 +167,9 @@ fn test_determinism()->Result<(),ReplayError>{
let data=read_entire_file("../tools/bhop_maps/5692113331.snfm")?;
let map=strafesnet_snf::read_map(data)?.into_complete_map()?;
let mut physics_data=PhysicsData::default();
println!("generating models..");
let physics_data=PhysicsData::new(&map);
physics_data.generate_models(&map);
let (send,recv)=std::sync::mpsc::channel();

View File

@@ -1,119 +0,0 @@
use strafesnet_physics::physics::{InternalInstruction,PhysicsData,PhysicsState,PhysicsContext};
use strafesnet_common::gameplay_modes::NormalizedModes;
use strafesnet_common::gameplay_attributes::{CollisionAttributes,CollisionAttributesId};
use strafesnet_common::integer::{vec3,mat3,Planar64Affine3,Time};
use strafesnet_common::model::{Mesh,Model,MeshId,ModelId,RenderConfigId};
use strafesnet_common::map::CompleteMap;
use strafesnet_rbx_loader::primitives::{unit_cube,CubeFaceDescription};
struct TestSceneBuilder{
meshes:Vec<Mesh>,
models:Vec<Model>,
}
impl TestSceneBuilder{
fn new()->Self{
Self{
meshes:Vec::new(),
models:Vec::new(),
}
}
fn push_mesh(&mut self,mesh:Mesh)->MeshId{
let mesh_id=self.meshes.len();
self.meshes.push(mesh);
MeshId::new(mesh_id as u32)
}
fn push_mesh_instance(&mut self,mesh:MeshId,transform:Planar64Affine3)->ModelId{
let model=Model{
mesh,
attributes:CollisionAttributesId::new(0),
color:glam::Vec4::ONE,
transform,
};
let model_id=self.models.len();
self.models.push(model);
ModelId::new(model_id as u32)
}
fn build(self)->PhysicsData{
let modes=NormalizedModes::new(Vec::new());
let attributes=vec![CollisionAttributes::contact_default()];
let meshes=self.meshes;
let models=self.models;
let textures=Vec::new();
let render_configs=Vec::new();
PhysicsData::new(&CompleteMap{
modes,
attributes,
meshes,
models,
textures,
render_configs,
})
}
}
fn test_scene()->PhysicsData{
let mut builder=TestSceneBuilder::new();
let cube_face_description=CubeFaceDescription::new(Default::default(),RenderConfigId::new(0));
let mesh=builder.push_mesh(unit_cube(cube_face_description));
// place two 5x5x5 cubes.
builder.push_mesh_instance(mesh,Planar64Affine3::new(
mat3::from_diagonal(vec3::int(5,5,5)>>1),
vec3::int(0,0,0)
));
builder.push_mesh_instance(mesh,Planar64Affine3::new(
mat3::from_diagonal(vec3::int(5,5,5)>>1),
vec3::int(5,-5,0)
));
builder.build()
}
#[test]
fn simultaneous_collision(){
let physics_data=test_scene();
let body=strafesnet_physics::physics::Body::new(
(vec3::int(5+2,0,0)>>1)+vec3::int(1,1,0),
vec3::int(-1,-1,0),
vec3::int(0,0,0),
Time::ZERO,
);
let mut physics=PhysicsState::new_with_body(body);
physics.style_mut().gravity=vec3::ZERO;
let mut phys_iter=PhysicsContext::iter_internal(&mut physics,&physics_data,Time::from_secs(2))
.filter(|ins|!matches!(ins.instruction,InternalInstruction::StrafeTick));
// the order that they hit does matter, but we aren't currently worrying about that.
// See multi-collision branch
assert_eq!(phys_iter.next().unwrap().time,Time::from_secs(1));
assert_eq!(phys_iter.next().unwrap().time,Time::from_secs(1));
assert!(phys_iter.next().is_none());
let body=physics.body();
assert_eq!(body.position,vec3::int(5,0,0));
assert_eq!(body.velocity,vec3::int(0,0,0));
assert_eq!(body.acceleration,vec3::int(0,0,0));
assert_eq!(body.time,Time::from_secs(1));
}
#[test]
fn bug_3(){
let physics_data=test_scene();
let body=strafesnet_physics::physics::Body::new(
(vec3::int(5+2,0,0)>>1)+vec3::int(1,2,0),
vec3::int(-1,-1,0),
vec3::int(0,0,0),
Time::ZERO,
);
let mut physics=PhysicsState::new_with_body(body);
physics.style_mut().gravity=vec3::ZERO;
let mut phys_iter=PhysicsContext::iter_internal(&mut physics,&physics_data,Time::from_secs(3))
.filter(|ins|!matches!(ins.instruction,InternalInstruction::StrafeTick));
// touch side of part at 0,0,0
assert_eq!(phys_iter.next().unwrap().time,Time::from_secs(1));
// EXPECTED: touch top of part at 5,-5,0
// OBSERVED: CollisionEnd part at 0,0,0; clip through part at 5,-5,0
assert_eq!(phys_iter.next().unwrap().time,Time::from_secs(2));
assert_eq!(phys_iter.next().unwrap().time,Time::from_secs(2));
assert!(phys_iter.next().is_none());
let body=physics.body();
assert_eq!(body.position,vec3::int(5+2,0,0)>>1);
assert_eq!(body.velocity,vec3::int(0,0,0));
assert_eq!(body.acceleration,vec3::int(0,0,0));
assert_eq!(body.time,Time::from_secs(2));
}

View File

@@ -1,76 +0,0 @@
use crate::error::ReplayError;
use crate::util::read_entire_file;
use strafesnet_physics::physics::{PhysicsData,PhysicsState,PhysicsContext};
#[test]
#[ignore]
fn physics_bug_2()->Result<(),ReplayError>{
println!("loading map file..");
let data=read_entire_file("test_files/bhop_monster_jam.snfm")?;
let map=strafesnet_snf::read_map(data)?.into_complete_map()?;
// create recording
println!("generating models..");
let physics_data=PhysicsData::new(&map);
println!("simulating...");
//teleport to bug
// body pos = Vector { array: [Fixed { bits: 554895163352 }, Fixed { bits: 1485633089990 }, Fixed { bits: 1279601007173 }] }
// after the fix it's still happening, possibly for a different reason, new position to evince:
// body pos = Vector { array: [Fixed { bits: 555690659654 }, Fixed { bits: 1490485868773 }, Fixed { bits: 1277783839382 }] }
use strafesnet_common::integer::{vec3,Time};
let body=strafesnet_physics::physics::Body::new(
vec3::raw_xyz(555690659654,1490485868773,1277783839382),
vec3::int(0,0,0),
vec3::int(0,-100,0),
Time::ZERO,
);
let mut physics=PhysicsState::new_with_body(body);
// wait one second to activate the bug
// hit=Some(ModelId(2262))
PhysicsContext::run_input_instruction(&mut physics,&physics_data,strafesnet_common::instruction::TimedInstruction{
time:Time::from_millis(500),
instruction:strafesnet_common::physics::Instruction::Idle,
});
Ok(())
}
#[test]
#[ignore]
fn physics_bug_3()->Result<(),ReplayError>{
println!("loading map file..");
let data=read_entire_file("../tools/bhop_maps/5692152916.snfm")?;
let map=strafesnet_snf::read_map(data)?.into_complete_map()?;
// create recording
println!("generating models..");
let physics_data=PhysicsData::new(&map);
println!("simulating...");
//teleport to bug
use strafesnet_common::integer::{vec3,Time};
let body=strafesnet_physics::physics::Body::new(
// bhop_toc corner position after wall hits
// vec3::raw_xyz(-1401734815424,3315081280280,-2466057177493),
// vec3::raw_xyz(0,-96915585363,1265),
// vec3::raw_xyz(0,-429496729600,0),
// alternate room center position
// vec3::raw_xyz(-1129043783837,3324870327882,-2014012350212),
// vec3::raw_xyz(0,-96915585363,1265),
// vec3::raw_xyz(0,-429496729600,0),
// corner setup before wall hits
vec3::raw_xyz(-1392580080675,3325402529458,-2444727738679),
vec3::raw_xyz(-30259028820,-22950929553,-71141663007),
vec3::raw_xyz(0,-429496729600,0),
Time::ZERO,
);
let mut physics=PhysicsState::new_with_body(body);
// wait one second to activate the bug
PhysicsContext::run_input_instruction(&mut physics,&physics_data,strafesnet_common::instruction::TimedInstruction{
time:Time::from_millis(500),
instruction:strafesnet_common::physics::Instruction::Idle,
});
Ok(())
}

View File

@@ -1,7 +0,0 @@
use std::io::Cursor;
use std::path::Path;
pub fn read_entire_file(path:impl AsRef<Path>)->Result<Cursor<Vec<u8>>,std::io::Error>{
let data=std::fs::read(path)?;
Ok(Cursor::new(data))
}

View File

@@ -1,6 +1,6 @@
[package]
name = "strafesnet_bsp_loader"
version = "0.3.1"
version = "0.3.0"
edition = "2024"
repository = "https://git.itzana.me/StrafesNET/strafe-project"
license = "MIT OR Apache-2.0"
@@ -11,12 +11,9 @@ authors = ["Rhys Lloyd <krakow20@gmail.com>"]
[dependencies]
glam = "0.30.0"
strafesnet_common = { version = "0.7.0", path = "../common", registry = "strafesnet" }
strafesnet_deferred_loader = { version = "0.5.1", path = "../deferred_loader", registry = "strafesnet" }
strafesnet_common = { version = "0.6.0", path = "../common", registry = "strafesnet" }
strafesnet_deferred_loader = { version = "0.5.0", path = "../deferred_loader", registry = "strafesnet" }
vbsp = "0.9.1"
vbsp-entities-css = "0.6.0"
vmdl = "0.2.0"
vpk = "0.3.0"
[lints]
workspace = true

View File

@@ -7,7 +7,7 @@ use crate::{valve_transform_normal,valve_transform_dist};
#[derive(Hash,Eq,PartialEq)]
struct Face{
normal:integer::Planar64Vec3,
dot:Planar64,
dot:integer::Planar64,
}
#[derive(Debug)]
@@ -187,7 +187,7 @@ fn planes_to_faces(face_list:std::collections::HashSet<Face>)->Result<Faces,Plan
}
}
#[expect(dead_code)]
#[allow(dead_code)]
#[derive(Debug)]
pub enum BrushToMeshError{
SliceBrushSides,

View File

@@ -347,9 +347,9 @@ pub struct PartialMap1{
modes:NormalizedModes,
}
impl PartialMap1{
pub fn add_prop_meshes(
pub fn add_prop_meshes<'a>(
self,
prop_meshes:Meshes<model::Mesh>,
prop_meshes:Meshes,
)->PartialMap2{
PartialMap2{
attributes:self.attributes,

View File

@@ -5,6 +5,7 @@ use strafesnet_deferred_loader::{loader::Loader,texture::Texture};
use crate::{Bsp,Vpk};
#[allow(dead_code)]
#[derive(Debug)]
pub enum TextureError{
Io(std::io::Error),
@@ -31,7 +32,7 @@ impl Loader for TextureLoader{
type Error=TextureError;
type Index<'a>=Cow<'a,str>;
type Resource=Texture;
fn load(&mut self,index:Self::Index<'_>)->Result<Self::Resource,Self::Error>{
fn load<'a>(&mut self,index:Self::Index<'a>)->Result<Self::Resource,Self::Error>{
let file_name=format!("textures/{}.dds",index);
let mut file=std::fs::File::open(file_name)?;
let mut data=Vec::new();
@@ -40,6 +41,7 @@ impl Loader for TextureLoader{
}
}
#[allow(dead_code)]
#[derive(Debug)]
pub enum MeshError{
Io(std::io::Error),
@@ -111,7 +113,7 @@ impl ModelLoader<'_,'_>{
}
}
}
impl Loader for ModelLoader<'_,'_>{
impl<'bsp,'vpk> Loader for ModelLoader<'bsp,'vpk>{
type Error=MeshError;
type Index<'a>=&'a str where Self:'a;
type Resource=vmdl::Model;
@@ -151,7 +153,7 @@ impl MeshLoader<'_,'_,'_,'_>{
}
}
}
impl Loader for MeshLoader<'_,'_,'_,'_>{
impl<'str,'bsp,'vpk,'load> Loader for MeshLoader<'bsp,'vpk,'load,'str>{
type Error=MeshError;
type Index<'a>=&'a str where Self:'a;
type Resource=Mesh;

View File

@@ -61,7 +61,7 @@ pub fn convert_mesh(model:vmdl::Model,deferred_loader:&mut RenderConfigDeferredL
_=>None,
}
})
}).filter_map(|[v1,v2,v3]|{
}).flat_map(|[v1,v2,v3]|{
// this should probably be a fatal error :D
let v1=model_vertices.get(v1)?;
let v2=model_vertices.get(v2)?;

View File

@@ -1,6 +1,6 @@
[package]
name = "strafesnet_common"
version = "0.7.0"
version = "0.6.0"
edition = "2024"
repository = "https://git.itzana.me/StrafesNET/strafe-project"
license = "MIT OR Apache-2.0"
@@ -17,6 +17,3 @@ linear_ops = { version = "0.1.1", path = "../linear_ops", registry = "strafesnet
ratio_ops = { version = "0.1.0", path = "../ratio_ops", registry = "strafesnet" }
glam = "0.30.0"
id = { version = "0.1.0", registry = "strafesnet" }
[lints]
workspace = true

View File

@@ -2,9 +2,7 @@ use crate::integer::{vec3,Planar64Vec3};
#[derive(Clone)]
pub struct Aabb{
// min is inclusive
min:Planar64Vec3,
// max is not inclusive
max:Planar64Vec3,
}
@@ -45,7 +43,7 @@ impl Aabb{
}
#[inline]
pub fn contains(&self,point:Planar64Vec3)->bool{
let bvec=self.min.le(point)&point.lt(self.max);
let bvec=self.min.lt(point)&point.lt(self.max);
bvec.all()
}
#[inline]
@@ -61,11 +59,11 @@ impl Aabb{
pub fn center(&self)->Planar64Vec3{
self.min.map_zip(self.max,|(min,max)|min.midpoint(max))
}
#[inline]
pub fn area_weight(&self)->fixed_wide::fixed::Fixed<2,64>{
let d=self.max-self.min;
d.x*d.y+d.y*d.z+d.z*d.x
}
//probably use floats for area & volume because we don't care about precision
// pub fn area_weight(&self)->f32{
// let d=self.max-self.min;
// d.x*d.y+d.y*d.z+d.z*d.x
// }
// pub fn volume(&self)->f32{
// let d=self.max-self.min;
// d.x*d.y*d.z

View File

@@ -245,19 +245,18 @@ pub fn generate_bvh<T>(boxen:Vec<(T,Aabb)>)->BvhNode<T>{
fn generate_bvh_node<T>(boxen:Vec<(T,Aabb)>,force:bool)->BvhNode<T>{
let n=boxen.len();
const MAX_TERMINAL_BRANCH_LEAF_NODES:usize=20;
if force||n<MAX_TERMINAL_BRANCH_LEAF_NODES{
let mut aabb_outer=Aabb::default();
let nodes=boxen.into_iter().map(|(data,aabb)|{
aabb_outer.join(&aabb);
if force||n<20{
let mut aabb=Aabb::default();
let nodes=boxen.into_iter().map(|b|{
aabb.join(&b.1);
BvhNode{
content:RecursiveContent::Leaf(data),
aabb,
content:RecursiveContent::Leaf(b.0),
aabb:b.1,
}
}).collect();
BvhNode{
content:RecursiveContent::Branch(nodes),
aabb:aabb_outer,
aabb,
}
}else{
let mut sort_x=Vec::with_capacity(n);
@@ -273,9 +272,9 @@ fn generate_bvh_node<T>(boxen:Vec<(T,Aabb)>,force:bool)->BvhNode<T>{
sort_y.sort_by_key(|&(_,c)|c);
sort_z.sort_by_key(|&(_,c)|c);
let h=n/2;
let (_,median_x)=sort_x[h];
let (_,median_y)=sort_y[h];
let (_,median_z)=sort_z[h];
let median_x=sort_x[h].1;
let median_y=sort_y[h].1;
let median_z=sort_z[h].1;
//locate a run of values equal to the median
//partition point gives the first index for which the predicate evaluates to false
let first_index_eq_median_x=sort_x.partition_point(|&(_,x)|x<median_x);
@@ -314,10 +313,10 @@ fn generate_bvh_node<T>(boxen:Vec<(T,Aabb)>,force:bool)->BvhNode<T>{
};
list_list[list_id].push((data,aabb));
}
let mut aabb=Aabb::default();
if list_list.len()==1{
generate_bvh_node(list_list.remove(0),true)
}else{
let mut aabb=Aabb::default();
BvhNode{
content:RecursiveContent::Branch(
list_list.into_iter().map(|b|{

View File

@@ -140,15 +140,6 @@ impl ModeId{
pub const MAIN:Self=Self(0);
pub const BONUS:Self=Self(1);
}
impl core::fmt::Display for ModeId{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->core::fmt::Result{
match self{
&Self::MAIN=>write!(f,"Main"),
&Self::BONUS=>write!(f,"Bonus"),
&Self(mode_id)=>write!(f,"Bonus{mode_id}"),
}
}
}
#[derive(Clone)]
pub struct Mode{
style:gameplay_style::StyleModifiers,

View File

@@ -34,7 +34,7 @@ pub struct StyleModifiers{
//unused
pub mass:Planar64,
}
impl Default for StyleModifiers{
impl std::default::Default for StyleModifiers{
fn default()->Self{
Self::roblox_bhop()
}

View File

@@ -34,41 +34,12 @@ pub trait InstructionFeedback<I,T>:InstructionEmitter<I,Time=T>+InstructionConsu
self.process_instruction(instruction);
}
}
#[inline]
fn into_iter(self,time_limit:T)->InstructionIter<I,T,Self>
where
Self:Sized
{
InstructionIter{
time_limit,
feedback:self,
_phantom:core::marker::PhantomData,
}
}
}
impl<I,T,F> InstructionFeedback<I,T> for F
impl<I,T,X> InstructionFeedback<I,T> for X
where
T:Copy,
F:InstructionEmitter<I,Time=T>+InstructionConsumer<I,Time=T>,
X:InstructionEmitter<I,Time=T>+InstructionConsumer<I,Time=T>,
{}
pub struct InstructionIter<I,T:Copy,F:InstructionFeedback<I,T>>{
time_limit:T,
feedback:F,
_phantom:core::marker::PhantomData<I>,
}
impl<I,T,F> Iterator for InstructionIter<I,T,F>
where
I:Clone,
T:Clone+Copy,
F:InstructionFeedback<I,T>,
{
type Item=TimedInstruction<I,T>;
fn next(&mut self)->Option<Self::Item>{
let instruction=self.feedback.next_instruction(self.time_limit)?;
self.feedback.process_instruction(instruction.clone());
Some(instruction)
}
}
//PROPER PRIVATE FIELDS!!!
pub struct InstructionCollector<I,T>{

View File

@@ -1,5 +1,5 @@
pub use fixed_wide::fixed::*;
pub use ratio_ops::ratio::{Ratio,Divide,Parity};
pub use ratio_ops::ratio::{Ratio,Divide};
//integer units
@@ -86,7 +86,7 @@ impl<T> std::fmt::Display for Time<T>{
write!(f,"{}s+{:09}ns",self.0/Self::ONE_SECOND.0,self.0%Self::ONE_SECOND.0)
}
}
impl<T> Default for Time<T>{
impl<T> std::default::Default for Time<T>{
fn default()->Self{
Self::raw(0)
}
@@ -126,27 +126,26 @@ impl_time_additive_assign_operator!(core::ops::AddAssign,add_assign);
impl_time_additive_assign_operator!(core::ops::SubAssign,sub_assign);
impl_time_additive_assign_operator!(core::ops::RemAssign,rem_assign);
impl<T> std::ops::Mul for Time<T>{
type Output=Ratio<Fixed<2,64>,Fixed<2,64>>;
type Output=Ratio<fixed_wide::fixed::Fixed<2,64>,fixed_wide::fixed::Fixed<2,64>>;
#[inline]
fn mul(self,rhs:Self)->Self::Output{
Ratio::new(Fixed::raw(self.0)*Fixed::raw(rhs.0),Fixed::raw_digit(1_000_000_000i64.pow(2)))
}
}
macro_rules! impl_time_i64_rhs_operator {
($op:ident,$method:ident)=>{
impl<T> core::ops::$op<i64> for Time<T>{
type Output=Self;
#[inline]
fn $method(self,rhs:i64)->Self::Output{
Self::raw(self.0.$method(rhs))
}
}
impl<T> std::ops::Div<i64> for Time<T>{
type Output=Self;
#[inline]
fn div(self,rhs:i64)->Self::Output{
Self::raw(self.0/rhs)
}
}
impl<T> std::ops::Mul<i64> for Time<T>{
type Output=Self;
#[inline]
fn mul(self,rhs:i64)->Self::Output{
Self::raw(self.0*rhs)
}
}
impl_time_i64_rhs_operator!(Div,div);
impl_time_i64_rhs_operator!(Mul,mul);
impl_time_i64_rhs_operator!(Shr,shr);
impl_time_i64_rhs_operator!(Shl,shl);
impl<T> core::ops::Mul<Time<T>> for Planar64{
type Output=Ratio<Fixed<2,64>,Planar64>;
fn mul(self,rhs:Time<T>)->Self::Output{
@@ -156,7 +155,7 @@ impl<T> core::ops::Mul<Time<T>> for Planar64{
#[cfg(test)]
mod test_time{
use super::*;
type Time=AbsoluteTime;
type Time=super::AbsoluteTime;
#[test]
fn time_from_planar64(){
let a:Time=Planar64::from(1).into();
@@ -552,7 +551,7 @@ impl TryFrom<[f32;3]> for Unit32Vec3{
}
*/
pub type Planar64TryFromFloatError=FixedFromFloatError;
pub type Planar64TryFromFloatError=fixed_wide::fixed::FixedFromFloatError;
pub type Planar64=fixed_wide::types::I32F32;
pub type Planar64Vec3=linear_ops::types::Vector3<Planar64>;
pub type Planar64Mat3=linear_ops::types::Matrix3<Planar64>;
@@ -562,11 +561,11 @@ pub mod vec3{
pub const MIN:Planar64Vec3=Planar64Vec3::new([Planar64::MIN;3]);
pub const MAX:Planar64Vec3=Planar64Vec3::new([Planar64::MAX;3]);
pub const ZERO:Planar64Vec3=Planar64Vec3::new([Planar64::ZERO;3]);
pub const ZERO_2:Vector3<Fixed::<2,64>>=Vector3::new([Fixed::<2,64>::ZERO;3]);
pub const ZERO_3:Vector3<Fixed::<3,96>>=Vector3::new([Fixed::<3,96>::ZERO;3]);
pub const ZERO_4:Vector3<Fixed::<4,128>>=Vector3::new([Fixed::<4,128>::ZERO;3]);
pub const ZERO_5:Vector3<Fixed::<5,160>>=Vector3::new([Fixed::<5,160>::ZERO;3]);
pub const ZERO_6:Vector3<Fixed::<6,192>>=Vector3::new([Fixed::<6,192>::ZERO;3]);
pub const ZERO_2:linear_ops::types::Vector3<Fixed::<2,64>>=linear_ops::types::Vector3::new([Fixed::<2,64>::ZERO;3]);
pub const ZERO_3:linear_ops::types::Vector3<Fixed::<3,96>>=linear_ops::types::Vector3::new([Fixed::<3,96>::ZERO;3]);
pub const ZERO_4:linear_ops::types::Vector3<Fixed::<4,128>>=linear_ops::types::Vector3::new([Fixed::<4,128>::ZERO;3]);
pub const ZERO_5:linear_ops::types::Vector3<Fixed::<5,160>>=linear_ops::types::Vector3::new([Fixed::<5,160>::ZERO;3]);
pub const ZERO_6:linear_ops::types::Vector3<Fixed::<6,192>>=linear_ops::types::Vector3::new([Fixed::<6,192>::ZERO;3]);
pub const X:Planar64Vec3=Planar64Vec3::new([Planar64::ONE,Planar64::ZERO,Planar64::ZERO]);
pub const Y:Planar64Vec3=Planar64Vec3::new([Planar64::ZERO,Planar64::ONE,Planar64::ZERO]);
pub const Z:Planar64Vec3=Planar64Vec3::new([Planar64::ZERO,Planar64::ZERO,Planar64::ONE]);
@@ -657,7 +656,7 @@ pub mod mat3{
}
}
#[derive(Clone,Copy,Debug,Default,Hash,Eq,PartialEq)]
#[derive(Clone,Copy,Default,Hash,Eq,PartialEq)]
pub struct Planar64Affine3{
pub matrix3:Planar64Mat3,//includes scale above 1
pub translation:Planar64Vec3,

View File

@@ -1,6 +1,6 @@
use std::collections::HashMap;
use crate::integer::{Planar64,Planar64Vec3,Planar64Affine3};
use crate::integer::{Planar64Vec3,Planar64Affine3};
use crate::gameplay_attributes;
pub type TextureCoordinate=glam::Vec2;
@@ -168,11 +168,6 @@ impl MeshBuilder{
}
}
pub fn acquire_pos_id(&mut self,pos:Planar64Vec3)->PositionId{
// Truncate the 16 most precise bits of the vertex positions.
// This allows the normal vectors to exactly represent the face.
// Remove this in Mesh V2
const MASK:Planar64=Planar64::raw(!((1<<16)-1));
let pos=pos.map(|c|c&MASK);
*self.pos_id_from.entry(pos).or_insert_with(||{
let pos_id=PositionId::new(self.unique_pos.len() as u32);
self.unique_pos.push(pos);

View File

@@ -1,6 +1,6 @@
[package]
name = "strafesnet_deferred_loader"
version = "0.5.1"
version = "0.5.0"
edition = "2024"
repository = "https://git.itzana.me/StrafesNET/strafe-project"
license = "MIT OR Apache-2.0"
@@ -10,7 +10,4 @@ authors = ["Rhys Lloyd <krakow20@gmail.com>"]
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
strafesnet_common = { version = "0.7.0", path = "../common", registry = "strafesnet" }
[lints]
workspace = true
strafesnet_common = { version = "0.6.0", path = "../common", registry = "strafesnet" }

View File

@@ -2,7 +2,7 @@ use std::collections::HashMap;
use crate::loader::Loader;
use crate::mesh::Meshes;
use crate::texture::{RenderConfigs,Texture};
use strafesnet_common::model::{MeshId,RenderConfig,RenderConfigId,TextureId};
use strafesnet_common::model::{Mesh,MeshId,RenderConfig,RenderConfigId,TextureId};
#[derive(Clone,Copy,Debug)]
pub enum LoadFailureMode{
@@ -93,7 +93,7 @@ impl<H:core::hash::Hash+Eq> MeshDeferredLoader<H>{
pub fn into_indices(self)->impl Iterator<Item=H>{
self.mesh_id_from_asset_id.into_keys()
}
pub fn into_meshes<'a,M:Clone,L:Loader<Resource=M,Index<'a>=H>+'a>(self,loader:&mut L,failure_mode:LoadFailureMode)->Result<Meshes<M>,L::Error>{
pub fn into_meshes<'a,L:Loader<Resource=Mesh,Index<'a>=H>+'a>(self,loader:&mut L,failure_mode:LoadFailureMode)->Result<Meshes,L::Error>{
let mut mesh_list=vec![None;self.mesh_id_from_asset_id.len()];
for (index,mesh_id) in self.mesh_id_from_asset_id{
let resource_result=loader.load(index);

View File

@@ -4,5 +4,5 @@ pub trait Loader{
type Error:Error;
type Index<'a> where Self:'a;
type Resource;
fn load(&mut self,index:Self::Index<'_>)->Result<Self::Resource,Self::Error>;
fn load<'a>(&mut self,index:Self::Index<'a>)->Result<Self::Resource,Self::Error>;
}

View File

@@ -1,15 +1,15 @@
use strafesnet_common::model::MeshId;
use strafesnet_common::model::{Mesh,MeshId};
pub struct Meshes<M>{
meshes:Vec<Option<M>>,
pub struct Meshes{
meshes:Vec<Option<Mesh>>,
}
impl<M> Meshes<M>{
pub(crate) const fn new(meshes:Vec<Option<M>>)->Self{
impl Meshes{
pub(crate) const fn new(meshes:Vec<Option<Mesh>>)->Self{
Self{
meshes,
}
}
pub fn consume(self)->impl Iterator<Item=(MeshId,M)>{
pub fn consume(self)->impl Iterator<Item=(MeshId,Mesh)>{
self.meshes.into_iter().enumerate().filter_map(|(mesh_id,maybe_mesh)|
maybe_mesh.map(|mesh|(MeshId::new(mesh_id as u32),mesh))
)

View File

@@ -1,6 +1,6 @@
[package]
name = "fixed_wide"
version = "0.2.1"
version = "0.2.0"
edition = "2024"
repository = "https://git.itzana.me/StrafesNET/strafe-project"
license = "MIT OR Apache-2.0"
@@ -18,6 +18,3 @@ bnum = "0.13.0"
arrayvec = { version = "0.7.6", optional = true }
paste = "1.0.15"
ratio_ops = { version = "0.1.0", path = "../ratio_ops", registry = "strafesnet", optional = true }
[lints]
workspace = true

View File

@@ -1,8 +1,6 @@
use bnum::{BInt,cast::As};
const BNUM_DIGIT_WIDTH:usize=64;
#[derive(Clone,Copy,Default,Hash,PartialEq,Eq,PartialOrd,Ord)]
#[derive(Clone,Copy,Debug,Default,Hash,PartialEq,Eq,PartialOrd,Ord)]
/// A Fixed point number for which multiply operations widen the bits in the output. (when the wide-mul feature is enabled)
/// N is the number of u64s to use
/// F is the number of fractional bits (always N*32 lol)
@@ -101,6 +99,28 @@ impl_from!(
i8,i16,i32,i64,i128,isize
);
impl<const N:usize,const F:usize,T> PartialEq<T> for Fixed<N,F>
where
T:Copy,
BInt::<N>:From<T>,
{
#[inline]
fn eq(&self,&other:&T)->bool{
self.bits.eq(&other.into())
}
}
impl<const N:usize,const F:usize,T> PartialOrd<T> for Fixed<N,F>
where
T:Copy,
BInt::<N>:From<T>,
{
#[inline]
fn partial_cmp(&self,&other:&T)->Option<std::cmp::Ordering>{
self.bits.partial_cmp(&other.into())
}
}
impl<const N:usize,const F:usize> std::ops::Neg for Fixed<N,F>{
type Output=Self;
#[inline]
@@ -213,11 +233,6 @@ impl FixedFromFloatError{
}
}
}
impl core::fmt::Display for FixedFromFloatError{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
write!(f,"{self:?}")
}
}
macro_rules! impl_from_float {
( $decode:ident, $input: ty, $mantissa_bits:expr ) => {
impl<const N:usize,const F:usize> TryFrom<$input> for Fixed<N,F>{
@@ -266,23 +281,6 @@ macro_rules! impl_from_float {
impl_from_float!(integer_decode_f32,f32,24);
impl_from_float!(integer_decode_f64,f64,53);
impl<const N:usize,const F:usize> core::fmt::Debug for Fixed<N,F>{
#[inline]
fn fmt(&self,f:&mut core::fmt::Formatter)->Result<(),core::fmt::Error>{
let integral=self.as_bits().unsigned_abs()>>F;
let fractional=self.as_bits().unsigned_abs()&((bnum::BUint::<N>::ONE<<F)-bnum::BUint::<N>::ONE);
let leading_zeroes=(fractional.leading_zeros() as usize).saturating_sub(N*BNUM_DIGIT_WIDTH-F)>>2;
if self.is_negative(){
core::write!(f,"-")?;
}
if fractional.is_zero(){
core::write!(f,"{integral:x}.{}","0".repeat(leading_zeroes))
}else{
core::write!(f,"{integral:x}.{}{fractional:x}","0".repeat(leading_zeroes))
}
}
}
impl<const N:usize,const F:usize> core::fmt::Display for Fixed<N,F>{
#[inline]
fn fmt(&self,f:&mut core::fmt::Formatter)->Result<(),core::fmt::Error>{
@@ -306,6 +304,16 @@ macro_rules! impl_additive_operator {
self.$method(other)
}
}
impl<const N:usize,const F:usize,U> core::ops::$trait<U> for $struct<N,F>
where
BInt::<N>:From<U>,
{
type Output = $output;
#[inline]
fn $method(self, other: U) -> Self::Output {
Self::from_bits(self.bits.$method(BInt::<N>::from(other).shl(F as u32)))
}
}
};
}
macro_rules! impl_additive_assign_operator {
@@ -316,6 +324,15 @@ macro_rules! impl_additive_assign_operator {
self.bits.$method(other.bits);
}
}
impl<const N:usize,const F:usize,U> core::ops::$trait<U> for $struct<N,F>
where
BInt::<N>:From<U>,
{
#[inline]
fn $method(&mut self, other: U) {
self.bits.$method(BInt::<N>::from(other).shl(F as u32));
}
}
};
}

View File

@@ -229,16 +229,3 @@ fn test_zeroes_deferred_division(){
])
);
}
#[test]
fn test_debug(){
assert_eq!(format!("{:?}",I32F32::EPSILON),"0.00000001");
assert_eq!(format!("{:?}",I32F32::ONE),"1.00000000");
assert_eq!(format!("{:?}",I32F32::TWO),"2.00000000");
assert_eq!(format!("{:?}",I32F32::MAX),"7fffffff.ffffffff");
assert_eq!(format!("{:?}",I32F32::try_from(core::f64::consts::PI).unwrap()),"3.243f6a88");
assert_eq!(format!("{:?}",I32F32::NEG_EPSILON),"-0.00000001");
assert_eq!(format!("{:?}",I32F32::NEG_ONE),"-1.00000000");
assert_eq!(format!("{:?}",I32F32::NEG_TWO),"-2.00000000");
assert_eq!(format!("{:?}",I32F32::MIN),"-80000000.00000000");
}

View File

@@ -20,6 +20,3 @@ paste = { version = "1.0.15", optional = true }
[dev-dependencies]
fixed_wide = { path = "../fixed_wide", registry = "strafesnet", features = ["wide-mul"] }
[lints]
workspace = true

View File

@@ -205,8 +205,7 @@ macro_rules! impl_matrix_named_fields_shape {
#[inline]
fn deref(&self)->&Self::Target{
// This cast is valid because Matrix has #[repr(transparent)]
let ptr:*const [[T;$size_inner];$size_outer]=&self.array;
let ptr=ptr as *const Self::Target;
let ptr=&self.array as *const [[T;$size_inner];$size_outer] as *const Self::Target;
// SAFETY: this pointer is non-null because it comes from a reference
unsafe{&*ptr}
}
@@ -215,8 +214,7 @@ macro_rules! impl_matrix_named_fields_shape {
#[inline]
fn deref_mut(&mut self)->&mut Self::Target{
// This cast is valid because Matrix has #[repr(transparent)]
let ptr:*mut [[T;$size_inner];$size_outer]=&mut self.array;
let ptr=ptr as *mut Self::Target;
let ptr=&mut self.array as *mut [[T;$size_inner];$size_outer] as *mut Self::Target;
// SAFETY: this pointer is non-null because it comes from a reference
unsafe{&mut*ptr}
}

View File

@@ -331,8 +331,7 @@ macro_rules! impl_vector_named_fields {
#[inline]
fn deref(&self)->&Self::Target{
// This cast is valid because Vector has #[repr(transparent)]
let ptr:*const [T;$size]=&self.array;
let ptr=ptr as *const Self::Target;
let ptr=&self.array as *const [T;$size] as *const Self::Target;
// SAFETY: this pointer is non-null because it comes from a reference
unsafe{&*ptr}
}
@@ -341,8 +340,7 @@ macro_rules! impl_vector_named_fields {
#[inline]
fn deref_mut(&mut self)->&mut Self::Target{
// This cast is valid because Vector has #[repr(transparent)]
let ptr:*mut [T;$size]=&mut self.array;
let ptr=ptr as *mut Self::Target;
let ptr=&mut self.array as *mut [T;$size] as *mut Self::Target;
// SAFETY: this pointer is non-null because it comes from a reference
unsafe{&mut*ptr}
}

View File

@@ -8,6 +8,3 @@ description = "Ratio operations using trait bounds for avoiding division like th
authors = ["Rhys Lloyd <krakow20@gmail.com>"]
[dependencies]
[lints]
workspace = true

View File

@@ -1,6 +1,6 @@
[package]
name = "strafesnet_rbx_loader"
version = "0.7.0"
version = "0.6.0"
edition = "2024"
repository = "https://git.itzana.me/StrafesNET/strafe-project"
license = "MIT OR Apache-2.0"
@@ -12,17 +12,13 @@ authors = ["Rhys Lloyd <krakow20@gmail.com>"]
[dependencies]
bytemuck = "1.14.3"
glam = "0.30.0"
regex = { version = "1.11.3", default-features = false }
rbx_binary = { version = "1.0.1-sn5", registry = "strafesnet" }
rbx_dom_weak = { version = "3.0.1-sn5", registry = "strafesnet" }
rbx_mesh = "0.5.0"
rbx_reflection = "5.0.0"
rbx_reflection_database = "1.0.0"
rbx_xml = { version = "1.0.1-sn5", registry = "strafesnet" }
lazy-regex = "3.1.0"
rbx_mesh = "0.3.1"
rbx_binary = { path = "../../../../git/rbx-dom/rbx_binary", registry = "strafesnet" }
rbx_dom_weak = { path = "../../../../git/rbx-dom/rbx_dom_weak", registry = "strafesnet" }
rbx_reflection_database = { path = "../../../../git/rbx-dom/rbx_reflection_database"}
rbx_xml = { path = "../../../../git/rbx-dom/rbx_xml", registry = "strafesnet" }
rbxassetid = { version = "0.1.0", path = "../rbxassetid", registry = "strafesnet" }
roblox_emulator = { version = "0.5.1", path = "../roblox_emulator", default-features = false, registry = "strafesnet" }
strafesnet_common = { version = "0.7.0", path = "../common", registry = "strafesnet" }
strafesnet_deferred_loader = { version = "0.5.1", path = "../deferred_loader", registry = "strafesnet" }
[lints]
workspace = true
roblox_emulator = { version = "0.5.0", path = "../roblox_emulator", default-features = false, registry = "strafesnet" }
strafesnet_common = { version = "0.6.0", path = "../common", registry = "strafesnet" }
strafesnet_deferred_loader = { version = "0.5.0", path = "../deferred_loader", registry = "strafesnet" }

View File

@@ -1,245 +0,0 @@
use std::collections::HashSet;
use std::num::ParseIntError;
use strafesnet_common::gameplay_modes::{StageId,ModeId};
use strafesnet_common::integer::{FixedFromFloatError,Planar64TryFromFloatError};
/// A collection of errors which can be ignored at your peril
#[derive(Debug,Default)]
pub struct RecoverableErrors{
/// A basepart has an invalid / missing property.
pub basepart_property:Vec<InstancePath>,
/// A part has an unconvertable CFrame.
pub basepart_cframe:Vec<CFrameError>,
/// A part has an unconvertable Velocity.
pub basepart_velocity:Vec<Planar64ConvertError>,
/// A part has an invalid / missing property.
pub part_property:Vec<InstancePath>,
/// A part has an invalid shape.
pub part_shape:Vec<ShapeError>,
/// A meshpart has an invalid / missing property.
pub meshpart_property:Vec<InstancePath>,
/// A meshpart has no mesh.
pub meshpart_content:Vec<InstancePath>,
/// A basepart has an unsupported subclass.
pub unsupported_class:HashSet<String>,
/// A decal has an invalid / missing property.
pub decal_property:Vec<InstancePath>,
/// A decal has an invalid normal_id.
pub normal_id:Vec<NormalIdError>,
/// A texture has an invalid / missing property.
pub texture_property:Vec<InstancePath>,
/// A mode_id failed to parse.
pub mode_id_parse_int:Vec<ParseIntContext>,
/// There is a duplicate mode.
pub duplicate_mode:HashSet<ModeId>,
/// A mode_id failed to parse.
pub stage_id_parse_int:Vec<ParseIntContext>,
/// A Stage was duplicated leading to undefined behaviour.
pub duplicate_stage:HashSet<DuplicateStageError>,
/// A WormholeOut id failed to parse.
pub wormhole_out_id_parse_int:Vec<ParseIntContext>,
/// A WormholeOut was duplicated leading to undefined behaviour.
pub duplicate_wormhole_out:HashSet<u32>,
/// A WormholeIn id failed to parse.
pub wormhole_in_id_parse_int:Vec<ParseIntContext>,
/// A jump limit failed to parse.
pub jump_limit_parse_int:Vec<ParseIntContext>,
}
impl RecoverableErrors{
pub fn count(&self)->usize{
self.basepart_property.len()+
self.basepart_cframe.len()+
self.basepart_velocity.len()+
self.part_property.len()+
self.part_shape.len()+
self.meshpart_property.len()+
self.meshpart_content.len()+
self.unsupported_class.len()+
self.decal_property.len()+
self.normal_id.len()+
self.texture_property.len()+
self.mode_id_parse_int.len()+
self.duplicate_mode.len()+
self.stage_id_parse_int.len()+
self.duplicate_stage.len()+
self.wormhole_out_id_parse_int.len()+
self.duplicate_wormhole_out.len()+
self.wormhole_in_id_parse_int.len()+
self.jump_limit_parse_int.len()
}
}
fn write_comma_separated<T>(
f:&mut std::fmt::Formatter<'_>,
mut it:impl Iterator<Item=T>,
custom_write:impl Fn(&mut std::fmt::Formatter<'_>,T)->std::fmt::Result
)->std::fmt::Result{
if let Some(t)=it.next(){
custom_write(f,t)?;
for t in it{
write!(f,", ")?;
custom_write(f,t)?;
}
}
Ok(())
}
macro_rules! write_instance_path_error{
($f:ident,$self:ident,$field:ident,$class:literal,$class_plural:literal,$problem:literal)=>{
let len=$self.$field.len();
if len!=0{
let plural=if len==1{$class}else{$class_plural};
write!($f,"The following {plural} {}: ",$problem)?;
write_comma_separated($f,$self.$field.iter(),|f,InstancePath(path)|
write!(f,"{path}")
)?;
writeln!($f)?;
}
};
}
macro_rules! write_duplicate_error{
($f:ident,$self:ident,$field:ident,$class:literal,$class_plural:literal)=>{
let len=$self.$field.len();
if len!=0{
let plural=if len==1{$class}else{$class_plural};
write!($f,"The following {plural} duplicates: ")?;
write_comma_separated($f,$self.$field.iter(),|f,id|
write!(f,"{id}")
)?;
writeln!($f)?;
}
};
}
macro_rules! write_bespoke_error{
($f:ident,$self:ident,$field:ident,$class:literal,$class_plural:literal,$problem:literal,$path_field:ident,$error_field:ident)=>{
let len=$self.$field.len();
if len!=0{
let plural=if len==1{$class}else{$class_plural};
write!($f,"The following {plural} {}: ",$problem)?;
write_comma_separated($f,$self.$field.iter(),|f,context|
write!(f,"{} ({})",context.$path_field,context.$error_field)
)?;
writeln!($f)?;
}
};
}
impl core::fmt::Display for RecoverableErrors{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
write_instance_path_error!(f,self,basepart_property,"BasePart is","BaseParts are","missing a property");
write_bespoke_error!(f,self,basepart_cframe,"BasePart","BaseParts","CFrame float convert failed",path,error);
write_bespoke_error!(f,self,basepart_velocity,"BasePart","BaseParts","Velocity float convert failed",path,error);
write_instance_path_error!(f,self,part_property,"Part is","Parts are","missing a property");
write_bespoke_error!(f,self,part_shape,"Part","Parts","Shape is invalid",path,shape);
write_instance_path_error!(f,self,meshpart_property,"MeshPart is","MeshParts are","missing a property");
write_instance_path_error!(f,self,meshpart_content,"MeshPart has","MeshParts have","no mesh");
{
let len=self.unsupported_class.len();
if len!=0{
let plural=if len==1{"Class is"}else{"Classes are"};
write!(f,"The following {plural} not supported: ")?;
write_comma_separated(f,self.unsupported_class.iter(),|f,classname|write!(f,"{classname}"))?;
writeln!(f)?;
}
}
write_instance_path_error!(f,self,decal_property,"Decal is","Decals are","missing a property");
write_bespoke_error!(f,self,normal_id,"Decal","Decals","NormalId is invalid",path,normal_id);
write_instance_path_error!(f,self,texture_property,"Texture is","Textures are","missing a property");
write_bespoke_error!(f,self,mode_id_parse_int,"ModeId","ModeIds","failed to parse",context,error);
write_duplicate_error!(f,self,duplicate_mode,"ModeId has","ModeIds have");
write_bespoke_error!(f,self,stage_id_parse_int,"StageId","StageIds","failed to parse",context,error);
write_duplicate_error!(f,self,duplicate_stage,"StageId has","StageIds have");
write_bespoke_error!(f,self,wormhole_out_id_parse_int,"WormholeOutId","WormholeOutIds","failed to parse",context,error);
write_duplicate_error!(f,self,duplicate_wormhole_out,"WormholeOutId has","WormholeOutIds have");
write_bespoke_error!(f,self,wormhole_in_id_parse_int,"WormholeInId","WormholeInIds","failed to parse",context,error);
write_bespoke_error!(f,self,jump_limit_parse_int,"jump limit","jump limits","failed to parse",context,error);
Ok(())
}
}
/// A Decal was missing required properties
#[derive(Debug)]
pub struct InstancePath(pub String);
impl core::fmt::Display for InstancePath{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
self.0.fmt(f)
}
}
impl InstancePath{
pub fn new(dom:&rbx_dom_weak::WeakDom,instance:&rbx_dom_weak::Instance)->InstancePath{
let mut names:Vec<_>=core::iter::successors(
Some(instance),
|i|dom.get_by_ref(i.parent())
).map(
|i|i.name.as_str()
).collect();
// discard the name of the root object
names.pop();
names.reverse();
InstancePath(names.join("."))
}
}
#[derive(Debug)]
pub struct ParseIntContext{
pub context:String,
pub error:ParseIntError,
}
impl ParseIntContext{
pub fn parse<T:core::str::FromStr<Err=ParseIntError>>(input:&str)->Result<T,Self>{
input.parse().map_err(|error|ParseIntContext{
context:input.to_owned(),
error,
})
}
}
#[derive(Debug)]
pub struct NormalIdError{
pub path:InstancePath,
pub normal_id:u32,
}
#[derive(Debug)]
pub struct ShapeError{
pub path:InstancePath,
pub shape:u32,
}
#[derive(Debug)]
pub enum CFrameErrorType{
ZeroDeterminant,
Convert(FixedFromFloatError),
}
impl core::fmt::Display for CFrameErrorType{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
write!(f,"{self:?}")
}
}
#[derive(Debug)]
pub struct CFrameError{
pub path:InstancePath,
pub error:CFrameErrorType,
}
#[derive(Debug)]
pub struct Planar64ConvertError{
pub path:InstancePath,
pub error:Planar64TryFromFloatError,
}
#[derive(Debug,Hash,Eq,PartialEq)]
pub struct DuplicateStageError{
pub mode_id:ModeId,
pub stage_id:StageId,
}
impl core::fmt::Display for DuplicateStageError{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
write!(f,"{}-Spawn{}",self.mode_id,self.stage_id.get())
}
}

View File

@@ -1,18 +1,13 @@
use std::io::Read;
use rbx_dom_weak::WeakDom;
use roblox_emulator::context::Context;
use strafesnet_common::map::CompleteMap;
use strafesnet_deferred_loader::deferred_loader::{LoadFailureMode,MeshDeferredLoader,RenderConfigDeferredLoader};
pub use error::RecoverableErrors;
pub use roblox_emulator::runner::Error as RunnerError;
mod rbx;
mod mesh;
mod error;
mod union;
pub mod loader;
pub mod primitives;
mod primitives;
pub mod data{
pub struct RobloxMeshBytes(Vec<u8>);
@@ -26,19 +21,19 @@ pub mod data{
}
}
pub struct Model{
dom:WeakDom,
pub struct Model<'a>{
dom:WeakDom<'a>,
}
impl Model{
fn new(dom:WeakDom)->Self{
impl<'a> Model<'a>{
fn new(dom:WeakDom<'a>)->Self{
Self{dom}
}
pub fn to_snf(&self,failure_mode:LoadFailureMode)->Result<(CompleteMap,RecoverableErrors),LoadError>{
pub fn to_snf(&self,failure_mode:LoadFailureMode)->Result<strafesnet_common::map::CompleteMap,LoadError>{
to_snf(self,failure_mode)
}
}
impl AsRef<WeakDom> for Model{
fn as_ref(&self)->&WeakDom{
impl<'a> AsRef<WeakDom<'a>> for Model<'a>{
fn as_ref(&self)->&WeakDom<'a>{
&self.dom
}
}
@@ -53,20 +48,18 @@ impl Place{
context,
})
}
pub fn run_scripts(&mut self)->Result<Vec<RunnerError>,RunnerError>{
pub fn run_scripts(&mut self){
let Place{context}=self;
let runner=roblox_emulator::runner::Runner::new()?;
let runner=roblox_emulator::runner::Runner::new().unwrap();
let scripts=context.scripts();
let runnable=runner.runnable_context(context)?;
let mut errors=Vec::new();
let runnable=runner.runnable_context(context).unwrap();
for script in scripts{
if let Err(e)=runnable.run_script(script){
errors.push(e);
println!("runner error: {e}");
}
}
Ok(errors)
}
pub fn to_snf(&self,failure_mode:LoadFailureMode)->Result<(CompleteMap,RecoverableErrors),LoadError>{
pub fn to_snf(&self,failure_mode:LoadFailureMode)->Result<strafesnet_common::map::CompleteMap,LoadError>{
to_snf(self,failure_mode)
}
}
@@ -102,7 +95,7 @@ pub fn read<R:Read>(input:R)->Result<Model,ReadError>{
let mut buf=std::io::BufReader::new(input);
let peek=std::io::BufRead::fill_buf(&mut buf).map_err(ReadError::Io)?;
match peek.get(0..8){
Some(b"<roblox!")=>rbx_binary::from_reader(buf).map(Model::new).map_err(ReadError::RbxBinary),
Some(b"<roblox!")=>rbx_binary::from_reader_default(buf).map(Model::new).map_err(ReadError::RbxBinary),
Some(b"<roblox ")=>rbx_xml::from_reader_default(buf).map(Model::new).map_err(ReadError::RbxXml),
_=>Err(ReadError::UnknownFileFormat),
}
@@ -130,7 +123,7 @@ impl From<loader::MeshError> for LoadError{
}
}
fn to_snf(dom:impl AsRef<WeakDom>,failure_mode:LoadFailureMode)->Result<(CompleteMap,RecoverableErrors),LoadError>{
fn to_snf(dom:impl AsRef<WeakDom>,failure_mode:LoadFailureMode)->Result<strafesnet_common::map::CompleteMap,LoadError>{
let dom=dom.as_ref();
let mut texture_deferred_loader=RenderConfigDeferredLoader::new();
@@ -150,5 +143,7 @@ fn to_snf(dom:impl AsRef<WeakDom>,failure_mode:LoadFailureMode)->Result<(Complet
let mut texture_loader=loader::TextureLoader::new();
let render_configs=texture_deferred_loader.into_render_configs(&mut texture_loader,failure_mode).map_err(LoadError::Texture)?;
Ok(map_step2.add_render_configs_and_textures(render_configs))
let map=map_step2.add_render_configs_and_textures(render_configs);
Ok(map)
}

View File

@@ -6,10 +6,7 @@ use strafesnet_deferred_loader::{loader::Loader,texture::Texture};
use crate::data::RobloxMeshBytes;
use crate::rbx::RobloxPartDescription;
// disallow non-static lifetimes
fn static_ustr(s:&'static str)->rbx_dom_weak::Ustr{
rbx_dom_weak::ustr(s)
}
use rbx_dom_weak::hstr;
fn read_entire_file(path:impl AsRef<std::path::Path>)->Result<Vec<u8>,std::io::Error>{
let mut file=std::fs::File::open(path)?;
@@ -18,6 +15,7 @@ fn read_entire_file(path:impl AsRef<std::path::Path>)->Result<Vec<u8>,std::io::E
Ok(data)
}
#[allow(dead_code)]
#[derive(Debug)]
pub enum TextureError{
Io(std::io::Error),
@@ -50,7 +48,7 @@ impl Loader for TextureLoader{
type Error=TextureError;
type Index<'a>=&'a str;
type Resource=Texture;
fn load(&mut self,index:Self::Index<'_>)->Result<Self::Resource,Self::Error>{
fn load<'a>(&mut self,index:Self::Index<'a>)->Result<Self::Resource,Self::Error>{
let RobloxAssetId(asset_id)=index.parse()?;
let file_name=format!("textures/{}.dds",asset_id);
let data=read_entire_file(file_name)?;
@@ -58,6 +56,7 @@ impl Loader for TextureLoader{
}
}
#[allow(dead_code)]
#[derive(Debug)]
pub enum MeshError{
Io(std::io::Error),
@@ -116,7 +115,7 @@ pub struct MeshIndex<'a>{
content:&'a str,
}
impl MeshIndex<'_>{
pub fn file_mesh(content:&str)->MeshIndex<'_>{
pub fn file_mesh(content:&str)->MeshIndex{
MeshIndex{
mesh_type:MeshType::FileMesh,
content,
@@ -141,12 +140,6 @@ impl MeshIndex<'_>{
}
}
#[derive(Clone)]
pub struct MeshWithSize{
pub(crate) mesh:Mesh,
pub(crate) size:strafesnet_common::integer::Planar64Vec3,
}
pub struct MeshLoader;
impl MeshLoader{
pub fn new()->Self{
@@ -156,8 +149,8 @@ impl MeshLoader{
impl Loader for MeshLoader{
type Error=MeshError;
type Index<'a>=MeshIndex<'a>;
type Resource=MeshWithSize;
fn load(&mut self,index:Self::Index<'_>)->Result<Self::Resource,Self::Error>{
type Resource=Mesh;
fn load<'a>(&mut self,index:Self::Index<'a>)->Result<Self::Resource,Self::Error>{
let mesh=match index.mesh_type{
MeshType::FileMesh=>{
let RobloxAssetId(asset_id)=index.content.parse()?;
@@ -172,7 +165,7 @@ impl Loader for MeshLoader{
let RobloxAssetId(asset_id)=index.content.parse()?;
let file_name=format!("unions/{}",asset_id);
let data=read_entire_file(file_name)?;
let dom=rbx_binary::from_reader(std::io::Cursor::new(data))?;
let dom=rbx_binary::from_reader_default(data.as_slice())?;
let &[referent]=dom.root().children()else{
return Err(MeshError::OneChildPolicy);
};
@@ -180,12 +173,12 @@ impl Loader for MeshLoader{
return Err(MeshError::MissingInstance);
};
if physics_data.is_empty(){
if let Some(rbx_dom_weak::types::Variant::BinaryString(data))=instance.properties.get(&static_ustr("PhysicsData")){
if let Some(rbx_dom_weak::types::Variant::BinaryString(data))=instance.properties.get(hstr!("PhysicsData")){
physics_data=data.as_ref();
}
}
if mesh_data.is_empty(){
if let Some(rbx_dom_weak::types::Variant::BinaryString(data))=instance.properties.get(&static_ustr("MeshData")){
if let Some(rbx_dom_weak::types::Variant::BinaryString(data))=instance.properties.get(hstr!("MeshData")){
mesh_data=data.as_ref();
}
}

View File

@@ -1,14 +1,12 @@
use std::collections::HashMap;
use rbx_mesh::mesh::{Vertex2,Vertex2Truncated};
use strafesnet_common::aabb::Aabb;
use strafesnet_common::integer::vec3;
use strafesnet_common::model::{self,ColorId,IndexedVertex,PolygonGroup,PolygonList,RenderConfigId,VertexId};
use crate::loader::MeshWithSize;
use strafesnet_common::{integer::vec3,model::{self,ColorId,IndexedVertex,NormalId,PolygonGroup,PolygonList,PositionId,RenderConfigId,TextureCoordinateId,VertexId}};
#[allow(dead_code)]
#[derive(Debug)]
pub enum Error{
Planar64Vec3(strafesnet_common::integer::Planar64TryFromFloatError),
RbxMesh(rbx_mesh::mesh::Error)
}
impl std::fmt::Display for Error{
@@ -18,46 +16,69 @@ impl std::fmt::Display for Error{
}
impl std::error::Error for Error{}
fn ingest_vertices2(
fn ingest_vertices2<
AcquirePosId,
AcquireTexId,
AcquireNormalId,
AcquireColorId,
AcquireVertexId,
>(
vertices:Vec<Vertex2>,
mb:&mut model::MeshBuilder,
)->HashMap<rbx_mesh::mesh::VertexId2,VertexId>{
acquire_pos_id:&mut AcquirePosId,
acquire_tex_id:&mut AcquireTexId,
acquire_normal_id:&mut AcquireNormalId,
acquire_color_id:&mut AcquireColorId,
acquire_vertex_id:&mut AcquireVertexId,
)->Result<HashMap<rbx_mesh::mesh::VertexId2,VertexId>,Error>
where
AcquirePosId:FnMut([f32;3])->Result<PositionId,Error>,
AcquireTexId:FnMut([f32;2])->TextureCoordinateId,
AcquireNormalId:FnMut([f32;3])->Result<NormalId,Error>,
AcquireColorId:FnMut([f32;4])->ColorId,
AcquireVertexId:FnMut(IndexedVertex)->VertexId,
{
//this monster is collecting a map of old_vertices_index -> unique_vertices_index
//while also doing the inserting unique entries into lists simultaneously
// vertex positions that fail to convert are DROPPED
vertices.into_iter().enumerate().filter_map(|(vertex_id,vertex)|Some((
Ok(vertices.into_iter().enumerate().map(|(vertex_id,vertex)|Ok((
rbx_mesh::mesh::VertexId2(vertex_id as u32),
{
let vertex=IndexedVertex{
pos:mb.acquire_pos_id(vec3::try_from_f32_array(vertex.pos).ok()?),
tex:mb.acquire_tex_id(glam::Vec2::from_array(vertex.tex)),
normal:mb.acquire_normal_id(vec3::try_from_f32_array(vertex.norm).ok()?),
color:mb.acquire_color_id(glam::Vec4::from_array(vertex.color.map(|f|f as f32/255.0f32))),
};
mb.acquire_vertex_id(vertex)
}
))).collect()
acquire_vertex_id(IndexedVertex{
pos:acquire_pos_id(vertex.pos)?,
tex:acquire_tex_id(vertex.tex),
normal:acquire_normal_id(vertex.norm)?,
color:acquire_color_id(vertex.color.map(|f|f as f32/255.0f32))
}),
))).collect::<Result<_,_>>()?)
}
fn ingest_vertices_truncated2(
fn ingest_vertices_truncated2<
AcquirePosId,
AcquireTexId,
AcquireNormalId,
AcquireVertexId,
>(
vertices:Vec<Vertex2Truncated>,
mb:&mut model::MeshBuilder,
acquire_pos_id:&mut AcquirePosId,
acquire_tex_id:&mut AcquireTexId,
acquire_normal_id:&mut AcquireNormalId,
static_color_id:ColorId,//pick one color and fill everything with it
)->HashMap<rbx_mesh::mesh::VertexId2,VertexId>{
acquire_vertex_id:&mut AcquireVertexId,
)->Result<HashMap<rbx_mesh::mesh::VertexId2,VertexId>,Error>
where
AcquirePosId:FnMut([f32;3])->Result<PositionId,Error>,
AcquireTexId:FnMut([f32;2])->TextureCoordinateId,
AcquireNormalId:FnMut([f32;3])->Result<NormalId,Error>,
AcquireVertexId:FnMut(IndexedVertex)->VertexId,
{
//this monster is collecting a map of old_vertices_index -> unique_vertices_index
//while also doing the inserting unique entries into lists simultaneously
// vertex positions that fail to convert are DROPPED
vertices.into_iter().enumerate().filter_map(|(vertex_id,vertex)|Some((
Ok(vertices.into_iter().enumerate().map(|(vertex_id,vertex)|Ok((
rbx_mesh::mesh::VertexId2(vertex_id as u32),
{
let vertex=IndexedVertex{
pos:mb.acquire_pos_id(vec3::try_from_f32_array(vertex.pos).ok()?),
tex:mb.acquire_tex_id(glam::Vec2::from_array(vertex.tex)),
normal:mb.acquire_normal_id(vec3::try_from_f32_array(vertex.norm).ok()?),
color:static_color_id,
};
mb.acquire_vertex_id(vertex)
}
))).collect()
acquire_vertex_id(IndexedVertex{
pos:acquire_pos_id(vertex.pos)?,
tex:acquire_tex_id(vertex.tex),
normal:acquire_normal_id(vertex.norm)?,
color:static_color_id
}),
))).collect::<Result<_,_>>()?)
}
fn ingest_faces2_lods3(
@@ -68,80 +89,129 @@ fn ingest_faces2_lods3(
){
//faces have to be split into polygon groups based on lod
polygon_groups.extend(lods.windows(2).map(|lod_pair|
PolygonGroup::PolygonList(PolygonList::new(faces[lod_pair[0].0 as usize..lod_pair[1].0 as usize].iter().filter_map(|rbx_mesh::mesh::Face2(v0,v1,v2)|
Some(vec![*vertex_id_map.get(&v0)?,*vertex_id_map.get(&v1)?,*vertex_id_map.get(&v2)?])
PolygonGroup::PolygonList(PolygonList::new(faces[lod_pair[0].0 as usize..lod_pair[1].0 as usize].iter().map(|rbx_mesh::mesh::Face2(v0,v1,v2)|
vec![vertex_id_map[&v0],vertex_id_map[&v1],vertex_id_map[&v2]]
).collect()))
))
}
pub fn convert(roblox_mesh_bytes:crate::data::RobloxMeshBytes)->Result<MeshWithSize,Error>{
pub fn convert(roblox_mesh_bytes:crate::data::RobloxMeshBytes)->Result<model::Mesh,Error>{
//generate that mesh boi
let mut unique_pos=Vec::new();
let mut pos_id_from=HashMap::new();
let mut unique_tex=Vec::new();
let mut tex_id_from=HashMap::new();
let mut unique_normal=Vec::new();
let mut normal_id_from=HashMap::new();
let mut unique_color=Vec::new();
let mut color_id_from=HashMap::new();
let mut unique_vertices=Vec::new();
let mut vertex_id_from=HashMap::new();
let mut polygon_groups=Vec::new();
let mut mb=model::MeshBuilder::new();
let mut acquire_pos_id=|pos|{
let p=vec3::try_from_f32_array(pos).map_err(Error::Planar64Vec3)?;
Ok(PositionId::new(*pos_id_from.entry(p).or_insert_with(||{
let pos_id=unique_pos.len();
unique_pos.push(p);
pos_id
}) as u32))
};
let mut acquire_tex_id=|tex|{
let h=bytemuck::cast::<[f32;2],[u32;2]>(tex);
TextureCoordinateId::new(*tex_id_from.entry(h).or_insert_with(||{
let tex_id=unique_tex.len();
unique_tex.push(glam::Vec2::from_array(tex));
tex_id
}) as u32)
};
let mut acquire_normal_id=|normal|{
let n=vec3::try_from_f32_array(normal).map_err(Error::Planar64Vec3)?;
Ok(NormalId::new(*normal_id_from.entry(n).or_insert_with(||{
let normal_id=unique_normal.len();
unique_normal.push(n);
normal_id
}) as u32))
};
let mut acquire_color_id=|color|{
let h=bytemuck::cast::<[f32;4],[u32;4]>(color);
ColorId::new(*color_id_from.entry(h).or_insert_with(||{
let color_id=unique_color.len();
unique_color.push(glam::Vec4::from_array(color));
color_id
}) as u32)
};
let mut acquire_vertex_id=|vertex:IndexedVertex|{
VertexId::new(*vertex_id_from.entry(vertex.clone()).or_insert_with(||{
let vertex_id=unique_vertices.len();
unique_vertices.push(vertex);
vertex_id
}) as u32)
};
match rbx_mesh::read_versioned(roblox_mesh_bytes.cursor()).map_err(Error::RbxMesh)?{
rbx_mesh::mesh::Mesh::V1(mesh)=>{
let color_id=mb.acquire_color_id(glam::Vec4::ONE);
polygon_groups.push(PolygonGroup::PolygonList(PolygonList::new(mesh.vertices.chunks_exact(3).filter_map(|trip|{
let mut ingest_vertex1=|vertex:&rbx_mesh::mesh::Vertex1|{
let vertex=IndexedVertex{
pos:mb.acquire_pos_id(vec3::try_from_f32_array(vertex.pos).ok()?),
tex:mb.acquire_tex_id(glam::vec2(vertex.tex[0],vertex.tex[1])),
normal:mb.acquire_normal_id(vec3::try_from_f32_array(vertex.norm).ok()?),
color:color_id,
};
Some(mb.acquire_vertex_id(vertex))
};
Some(vec![ingest_vertex1(&trip[0])?,ingest_vertex1(&trip[1])?,ingest_vertex1(&trip[2])?])
}).collect())));
rbx_mesh::mesh::VersionedMesh::Version1(mesh)=>{
let color_id=acquire_color_id([1.0f32;4]);
polygon_groups.push(PolygonGroup::PolygonList(PolygonList::new(mesh.vertices.chunks_exact(3).map(|trip|{
let mut ingest_vertex1=|vertex:&rbx_mesh::mesh::Vertex1|Ok(acquire_vertex_id(IndexedVertex{
pos:acquire_pos_id(vertex.pos)?,
tex:acquire_tex_id([vertex.tex[0],vertex.tex[1]]),
normal:acquire_normal_id(vertex.norm)?,
color:color_id,
}));
Ok(vec![ingest_vertex1(&trip[0])?,ingest_vertex1(&trip[1])?,ingest_vertex1(&trip[2])?])
}).collect::<Result<_,_>>()?)));
},
rbx_mesh::mesh::Mesh::V2(mesh)=>{
rbx_mesh::mesh::VersionedMesh::Version2(mesh)=>{
let vertex_id_map=match mesh.header.sizeof_vertex{
rbx_mesh::mesh::SizeOfVertex2::Truncated=>{
//pick white and make all the vertices white
let color_id=mb.acquire_color_id(glam::Vec4::ONE);
ingest_vertices_truncated2(mesh.vertices_truncated,&mut mb,color_id)
let color_id=acquire_color_id([1.0f32;4]);
ingest_vertices_truncated2(mesh.vertices_truncated,&mut acquire_pos_id,&mut acquire_tex_id,&mut acquire_normal_id,color_id,&mut acquire_vertex_id)
},
rbx_mesh::mesh::SizeOfVertex2::Full=>ingest_vertices2(mesh.vertices,&mut mb),
};
rbx_mesh::mesh::SizeOfVertex2::Full=>ingest_vertices2(mesh.vertices,&mut acquire_pos_id,&mut acquire_tex_id,&mut acquire_normal_id,&mut acquire_color_id,&mut acquire_vertex_id),
}?;
//one big happy group for all the faces
polygon_groups.push(PolygonGroup::PolygonList(PolygonList::new(mesh.faces.into_iter().filter_map(|face|
Some(vec![*vertex_id_map.get(&face.0)?,*vertex_id_map.get(&face.1)?,*vertex_id_map.get(&face.2)?])
polygon_groups.push(PolygonGroup::PolygonList(PolygonList::new(mesh.faces.into_iter().map(|face|
vec![vertex_id_map[&face.0],vertex_id_map[&face.1],vertex_id_map[&face.2]]
).collect())));
},
rbx_mesh::mesh::Mesh::V3(mesh)=>{
rbx_mesh::mesh::VersionedMesh::Version3(mesh)=>{
let vertex_id_map=match mesh.header.sizeof_vertex{
rbx_mesh::mesh::SizeOfVertex2::Truncated=>{
let color_id=mb.acquire_color_id(glam::Vec4::ONE);
ingest_vertices_truncated2(mesh.vertices_truncated,&mut mb,color_id)
let color_id=acquire_color_id([1.0f32;4]);
ingest_vertices_truncated2(mesh.vertices_truncated,&mut acquire_pos_id,&mut acquire_tex_id,&mut acquire_normal_id,color_id,&mut acquire_vertex_id)
},
rbx_mesh::mesh::SizeOfVertex2::Full=>ingest_vertices2(mesh.vertices,&mut mb),
};
rbx_mesh::mesh::SizeOfVertex2::Full=>ingest_vertices2(mesh.vertices,&mut acquire_pos_id,&mut acquire_tex_id,&mut acquire_normal_id,&mut acquire_color_id,&mut acquire_vertex_id),
}?;
ingest_faces2_lods3(&mut polygon_groups,&vertex_id_map,&mesh.faces,&mesh.lods);
},
rbx_mesh::mesh::Mesh::V4(mesh)=>{
let vertex_id_map=ingest_vertices2(mesh.vertices,&mut mb);
rbx_mesh::mesh::VersionedMesh::Version4(mesh)=>{
let vertex_id_map=ingest_vertices2(
mesh.vertices,&mut acquire_pos_id,&mut acquire_tex_id,&mut acquire_normal_id,&mut acquire_color_id,&mut acquire_vertex_id
)?;
ingest_faces2_lods3(&mut polygon_groups,&vertex_id_map,&mesh.faces,&mesh.lods);
},
rbx_mesh::mesh::Mesh::V5(mesh)=>{
let vertex_id_map=ingest_vertices2(mesh.vertices,&mut mb);
rbx_mesh::mesh::VersionedMesh::Version5(mesh)=>{
let vertex_id_map=ingest_vertices2(
mesh.vertices,&mut acquire_pos_id,&mut acquire_tex_id,&mut acquire_normal_id,&mut acquire_color_id,&mut acquire_vertex_id
)?;
ingest_faces2_lods3(&mut polygon_groups,&vertex_id_map,&mesh.faces,&mesh.lods);
},
}
let mesh=mb.build(
Ok(model::Mesh{
unique_pos,
unique_normal,
unique_tex,
unique_color,
unique_vertices,
polygon_groups,
//these should probably be moved to the model...
//but what if models want to use the same texture
vec![model::IndexedGraphicsGroup{
graphics_groups:vec![model::IndexedGraphicsGroup{
render:RenderConfigId::new(0),
//the lowest lod is highest quality
groups:vec![model::PolygonGroupId::new(0)]
}],
//disable physics
Vec::new(),
);
let mut aabb=Aabb::default();
for &point in &mesh.unique_pos{
aabb.grow(point);
}
Ok(MeshWithSize{mesh,size:aabb.size()})
physics_groups:Vec::new(),
})
}

View File

@@ -56,7 +56,7 @@ const CUBE_DEFAULT_TEXTURE_COORDS:[TextureCoordinate;4]=[
TextureCoordinate::new(1.0,1.0),
TextureCoordinate::new(0.0,1.0),
];
pub const CUBE_DEFAULT_VERTICES:[Planar64Vec3;8]=[
const CUBE_DEFAULT_VERTICES:[Planar64Vec3;8]=[
vec3::int(-1,-1, 1),//0 left bottom back
vec3::int( 1,-1, 1),//1 right bottom back
vec3::int( 1, 1, 1),//2 right top back
@@ -66,7 +66,7 @@ pub const CUBE_DEFAULT_VERTICES:[Planar64Vec3;8]=[
vec3::int( 1,-1,-1),//6 right bottom front
vec3::int(-1,-1,-1),//7 left bottom front
];
pub const CUBE_DEFAULT_NORMALS:[Planar64Vec3;6]=[
const CUBE_DEFAULT_NORMALS:[Planar64Vec3;6]=[
vec3::int( 1, 0, 0),//CubeFace::Right
vec3::int( 0, 1, 0),//CubeFace::Top
vec3::int( 0, 0, 1),//CubeFace::Back
@@ -121,7 +121,8 @@ impl FaceDescription{
}
}
}
pub const CUBE_DEFAULT_POLYS:[[[u32;2];4];6]=[
pub fn unit_cube(CubeFaceDescription(face_descriptions):CubeFaceDescription)->Mesh{
const CUBE_DEFAULT_POLYS:[[[u32;2];4];6]=[
// right (1, 0, 0)
[
[6,2],//[vertex,tex]
@@ -165,7 +166,6 @@ pub const CUBE_DEFAULT_POLYS:[[[u32;2];4];6]=[
[7,2],
],
];
pub fn unit_cube(CubeFaceDescription(face_descriptions):CubeFaceDescription)->Mesh{
let mut generated_pos=Vec::new();
let mut generated_tex=Vec::new();
let mut generated_normal=Vec::new();

View File

@@ -1,76 +1,68 @@
use std::collections::HashMap;
use crate::error::{RecoverableErrors,CFrameError,CFrameErrorType,DuplicateStageError,InstancePath,NormalIdError,Planar64ConvertError,ParseIntContext,ShapeError};
use crate::loader::{MeshWithSize,MeshIndex};
use crate::loader::MeshIndex;
use crate::primitives::{self,CubeFace,CubeFaceDescription,WedgeFaceDescription,CornerWedgeFaceDescription,FaceDescription,Primitives};
use strafesnet_common::aabb::Aabb;
use strafesnet_common::map;
use strafesnet_common::model;
use strafesnet_common::gameplay_modes::{NormalizedModes,Mode,ModeId,ModeUpdate,ModesBuilder,Stage,StageElement,StageElementBehaviour,StageId,Zone};
use strafesnet_common::gameplay_style;
use strafesnet_common::gameplay_attributes as attr;
use strafesnet_common::integer::{self,vec3,Planar64TryFromFloatError,Planar64,Planar64Vec3,Planar64Mat3,Planar64Affine3};
use strafesnet_common::integer::{self,vec3,Planar64,Planar64Vec3,Planar64Mat3,Planar64Affine3};
use strafesnet_common::model::RenderConfigId;
use strafesnet_deferred_loader::deferred_loader::{RenderConfigDeferredLoader,MeshDeferredLoader};
use strafesnet_deferred_loader::mesh::Meshes;
use strafesnet_deferred_loader::texture::{RenderConfigs,Texture};
// disallow non-static lifetimes
fn static_ustr(s:&'static str)->rbx_dom_weak::Ustr{
rbx_dom_weak::ustr(s)
use rbx_dom_weak::{hstr,HashStr};
fn recursive_collect_superclass(
objects:&mut std::vec::Vec<rbx_dom_weak::types::Ref>,
dom:&rbx_dom_weak::WeakDom,
instance:&rbx_dom_weak::Instance,
superclass:&HashStr,
){
let instance=instance;
let db=rbx_reflection_database::get();
let Some(superclass)=db.classes.get(superclass)else{
return;
};
objects.extend(
dom.descendants_of(instance.referent()).filter_map(|instance|{
let class=db.classes.get(instance.class)?;
db.has_superclass(class,superclass).then(||instance.referent())
})
);
}
macro_rules! lazy_regex{
($r:literal)=>{{
use regex::Regex;
use std::sync::LazyLock;
static RE:LazyLock<Regex>=LazyLock::new(||Regex::new($r).unwrap());
&RE
}};
}
fn planar64_affine3_from_roblox(cf:&rbx_dom_weak::types::CFrame,size:&rbx_dom_weak::types::Vector3)->Result<Planar64Affine3,Planar64TryFromFloatError>{
Ok(Planar64Affine3::new(
fn planar64_affine3_from_roblox(cf:&rbx_dom_weak::types::CFrame,size:&rbx_dom_weak::types::Vector3)->Planar64Affine3{
Planar64Affine3::new(
Planar64Mat3::from_cols([
(vec3::try_from_f32_array([cf.orientation.x.x,cf.orientation.y.x,cf.orientation.z.x])?
*integer::try_from_f32(size.x/2.0)?).narrow_1().unwrap(),//.map_err(Planar64ConvertError::Narrow)?
(vec3::try_from_f32_array([cf.orientation.x.y,cf.orientation.y.y,cf.orientation.z.y])?
*integer::try_from_f32(size.y/2.0)?).narrow_1().unwrap(),//.map_err(Planar64ConvertError::Narrow)?
(vec3::try_from_f32_array([cf.orientation.x.z,cf.orientation.y.z,cf.orientation.z.z])?
*integer::try_from_f32(size.z/2.0)?).narrow_1().unwrap(),//.map_err(Planar64ConvertError::Narrow)?
]),
vec3::try_from_f32_array([cf.position.x,cf.position.y,cf.position.z])?
))
vec3::try_from_f32_array([cf.orientation.x.x,cf.orientation.y.x,cf.orientation.z.x]).unwrap()
*integer::try_from_f32(size.x/2.0).unwrap(),
vec3::try_from_f32_array([cf.orientation.x.y,cf.orientation.y.y,cf.orientation.z.y]).unwrap()
*integer::try_from_f32(size.y/2.0).unwrap(),
vec3::try_from_f32_array([cf.orientation.x.z,cf.orientation.y.z,cf.orientation.z.z]).unwrap()
*integer::try_from_f32(size.z/2.0).unwrap(),
].map(|t|t.narrow_1().unwrap())),
vec3::try_from_f32_array([cf.position.x,cf.position.y,cf.position.z]).unwrap()
)
}
enum GetAttributesError{
ModeIdParseInt(ParseIntContext),
DuplicateMode(ModeId),
StageIdParseInt(ParseIntContext),
DuplicateStage(DuplicateStageError),
WormholeOutIdParseInt(ParseIntContext),
DuplicateWormholeOut(u32),
WormholeInIdParseInt(ParseIntContext),
JumpLimitParseInt(ParseIntContext),
}
fn get_attributes(name:&str,can_collide:bool,velocity:Planar64Vec3,model_id:model::ModelId,modes_builder:&mut ModesBuilder,wormhole_in_model_to_id:&mut HashMap<model::ModelId,u32>,wormhole_id_to_out_model:&mut HashMap<u32,model::ModelId>)->Result<attr::CollisionAttributes,GetAttributesError>{
fn get_attributes(name:&str,can_collide:bool,velocity:Planar64Vec3,model_id:model::ModelId,modes_builder:&mut ModesBuilder,wormhole_in_model_to_id:&mut HashMap<model::ModelId,u32>,wormhole_id_to_out_model:&mut HashMap<u32,model::ModelId>)->attr::CollisionAttributes{
let mut general=attr::GeneralAttributes::default();
let mut intersecting=attr::IntersectingAttributes::default();
let mut contacting=attr::ContactingAttributes::default();
let mut force_can_collide=can_collide;
let mut force_intersecting=false;
let mut allow_booster=true;
match name{
"Water"=>{
force_can_collide=false;
allow_booster=false;
//TODO: read stupid CustomPhysicalProperties
intersecting.water=Some(attr::IntersectingWater{density:Planar64::ONE,viscosity:Planar64::ONE/10,velocity});
},
"Accelerator"=>{
//although the new game supports collidable accelerators, this is a roblox compatability map loader
force_can_collide=false;
// Accelerator is not allowed to be booster in roblox
allow_booster=false;
general.accelerator=Some(attr::Accelerator{acceleration:velocity});
},
// "UnorderedCheckpoint"=>general.teleport_behaviour=Some(model::TeleportBehaviour::StageElement(attr::StageElement{
@@ -79,21 +71,17 @@ fn get_attributes(name:&str,can_collide:bool,velocity:Planar64Vec3,model_id:mode
// force:false,
// behaviour:model::StageElementBehaviour::Unordered
// })),
"SetVelocity"=>{
allow_booster=false;
general.trajectory=Some(attr::SetTrajectory::Velocity(velocity));
},
"SetVelocity"=>general.trajectory=Some(attr::SetTrajectory::Velocity(velocity)),
"MapStart"=>{
force_can_collide=false;
force_intersecting=true;
let mode_id=ModeId::MAIN;
modes_builder.insert_mode(
mode_id,
ModeId::MAIN,
Mode::empty(
gameplay_style::StyleModifiers::roblox_bhop(),
model_id
)
).map_err(|_|GetAttributesError::DuplicateMode(mode_id))?;
).unwrap();
},
"MapFinish"=>{
force_can_collide=false;
@@ -127,36 +115,32 @@ fn get_attributes(name:&str,can_collide:bool,velocity:Planar64Vec3,model_id:mode
);
},
other=>{
let regman=lazy_regex!(r"^(BonusStart|WormholeOut)(\d+)$");
let regman=lazy_regex::regex!(r"^(BonusStart|WormholeOut)(\d+)$");
if let Some(captures)=regman.captures(other){
match &captures[1]{
"BonusStart"=>{
force_can_collide=false;
force_intersecting=true;
let mode_id=ModeId::new(ParseIntContext::parse(&captures[2]).map_err(GetAttributesError::ModeIdParseInt)?);
modes_builder.insert_mode(
mode_id,
ModeId::new(captures[2].parse::<u32>().unwrap()),
Mode::empty(
gameplay_style::StyleModifiers::roblox_bhop(),
model_id
)
).map_err(|_|GetAttributesError::DuplicateMode(mode_id))?;
).unwrap();
},
"WormholeOut"=>{
//the PhysicsModelId has to exist for it to be teleported to!
force_intersecting=true;
//this object is not special in strafe client, but the roblox mapping needs to be converted to model id
let wormhole_id=ParseIntContext::parse(&captures[2]).map_err(GetAttributesError::WormholeOutIdParseInt)?;
if wormhole_id_to_out_model.insert(wormhole_id,model_id).is_some(){
return Err(GetAttributesError::DuplicateWormholeOut(wormhole_id));
}
assert!(wormhole_id_to_out_model.insert(captures[2].parse::<u32>().unwrap(),model_id).is_none(),"Cannot have multiple WormholeOut with same id");
},
_=>(),
}
}else if let Some(captures)=lazy_regex!(r"^(Force)?(Spawn|SpawnAt|Trigger|Teleport|Platform)(\d+)$")
}else if let Some(captures)=lazy_regex::regex!(r"^(Force)?(Spawn|SpawnAt|Trigger|Teleport|Platform)(\d+)$")
.captures(other){
force_intersecting=true;
let stage_id=StageId::new(ParseIntContext::parse(&captures[3]).map_err(GetAttributesError::StageIdParseInt)?);
let stage_id=StageId::new(captures[3].parse::<u32>().unwrap());
let stage_element=StageElement::new(
//stage_id:
stage_id,
@@ -168,12 +152,11 @@ fn get_attributes(name:&str,can_collide:bool,velocity:Planar64Vec3,model_id:mode
//behaviour:
match &captures[2]{
"Spawn"=>{
let mode_id=ModeId::MAIN;
modes_builder.insert_stage(
mode_id,
ModeId::MAIN,
stage_id,
Stage::empty(model_id),
).map_err(|_|GetAttributesError::DuplicateStage(DuplicateStageError{mode_id,stage_id}))?;
).unwrap();
//TODO: let denormalize handle this
StageElementBehaviour::SpawnAt
},
@@ -183,7 +166,7 @@ fn get_attributes(name:&str,can_collide:bool,velocity:Planar64Vec3,model_id:mode
"Trigger"=>{force_can_collide=false;StageElementBehaviour::Trigger},
"Teleport"=>{force_can_collide=false;StageElementBehaviour::Teleport},
"Platform"=>StageElementBehaviour::Platform,
_=>unreachable!("regex1[2] messed up bad"),
_=>panic!("regex1[2] messed up bad"),
},
None
);
@@ -194,7 +177,7 @@ fn get_attributes(name:&str,can_collide:bool,velocity:Planar64Vec3,model_id:mode
stage_element,
),
);
}else if let Some(captures)=lazy_regex!(r"^(Jump|WormholeIn)(\d+)$")
}else if let Some(captures)=lazy_regex::regex!(r"^(Jump|WormholeIn)(\d+)$")
.captures(other){
match &captures[1]{
"Jump"=>modes_builder.push_mode_update(
@@ -206,33 +189,30 @@ fn get_attributes(name:&str,can_collide:bool,velocity:Planar64Vec3,model_id:mode
StageId::FIRST,
false,
StageElementBehaviour::Check,
Some(ParseIntContext::parse(&captures[2]).map_err(GetAttributesError::JumpLimitParseInt)?)
Some(captures[2].parse::<u8>().unwrap())
)
),
),
"WormholeIn"=>{
force_can_collide=false;
force_intersecting=true;
let wormhole_id=ParseIntContext::parse(&captures[2]).map_err(GetAttributesError::WormholeInIdParseInt)?;
// It is impossible for two different objects to have the same model id
assert!(wormhole_in_model_to_id.insert(model_id,wormhole_id).is_none(),"Impossible");
assert!(wormhole_in_model_to_id.insert(model_id,captures[2].parse::<u32>().unwrap()).is_none(),"Impossible");
},
_=>unreachable!("regex2[1] messed up bad"),
_=>panic!("regex2[1] messed up bad"),
}
}else if let Some(captures)=lazy_regex!(r"^Bonus(Finish|Anticheat)(\d+)$")
}else if let Some(captures)=lazy_regex::regex!(r"^Bonus(Finish|Anticheat)(\d+)$")
.captures(other){
force_can_collide=false;
force_intersecting=true;
let mode_id=ModeId::new(ParseIntContext::parse(&captures[2]).map_err(GetAttributesError::ModeIdParseInt)?);
modes_builder.push_mode_update(
mode_id,
ModeId::new(captures[2].parse::<u32>().unwrap()),
ModeUpdate::zone(
model_id,
//zone:
match &captures[1]{
"Finish"=>Zone::Finish,
"Anticheat"=>Zone::Anticheat,
_=>unreachable!("regex3[1] messed up bad"),
_=>panic!("regex3[1] messed up bad"),
},
),
);
@@ -251,10 +231,10 @@ fn get_attributes(name:&str,can_collide:bool,velocity:Planar64Vec3,model_id:mode
}
}
//need some way to skip this
if allow_booster&&velocity!=vec3::ZERO{
if velocity!=vec3::ZERO{
general.booster=Some(attr::Booster::Velocity(velocity));
}
Ok(match force_can_collide{
match force_can_collide{
true=>{
match name{
"Bounce"=>contacting.contact_behaviour=Some(attr::ContactingBehaviour::Elastic(u32::MAX)),
@@ -272,7 +252,7 @@ fn get_attributes(name:&str,can_collide:bool,velocity:Planar64Vec3,model_id:mode
}else{
attr::CollisionAttributes::Decoration
},
})
}
}
#[derive(Clone,Copy)]
@@ -344,12 +324,12 @@ pub struct RobloxFaceTextureDescription{
pub color:glam::Vec4,
pub transform:RobloxTextureTransform,
}
impl PartialEq for RobloxFaceTextureDescription{
impl core::cmp::PartialEq for RobloxFaceTextureDescription{
fn eq(&self,other:&Self)->bool{
self.to_bits().eq(&other.to_bits())
}
}
impl Eq for RobloxFaceTextureDescription{}
impl core::cmp::Eq for RobloxFaceTextureDescription{}
impl core::hash::Hash for RobloxFaceTextureDescription{
fn hash<H:core::hash::Hasher>(&self,state:&mut H){
self.to_bits().hash(state);
@@ -418,46 +398,42 @@ fn get_content_url(content:&rbx_dom_weak::types::Content)->Option<&str>{
}
fn get_texture_description<'a>(
temp_objects:&mut Vec<rbx_dom_weak::types::Ref>,
render_config_deferred_loader:&mut RenderConfigDeferredLoader<&'a str>,
recoverable_errors:&mut RecoverableErrors,
db:&rbx_reflection::ReflectionDatabase,
dom:&'a rbx_dom_weak::WeakDom,
object:&rbx_dom_weak::Instance,
size:&rbx_dom_weak::types::Vector3,
)->RobloxPartDescription{
//use the biggest one and cut it down later...
let mut part_texture_description=RobloxPartDescription::default();
let decal=&db.classes["Decal"];
let decals=object.children().iter().filter_map(|&referent|{
let instance=dom.get_by_ref(referent)?;
db.classes.get(instance.class.as_str()).is_some_and(|class|
db.has_superclass(class,decal)
).then_some(instance)
});
for decal in decals{
// decals should always have these properties,
// but it is not guaranteed by the rbx_dom_weak data structure.
temp_objects.clear();
recursive_collect_superclass(temp_objects,&dom,object,hstr!("Decal"));
for &mut decal_ref in temp_objects{
let Some(decal)=dom.get_by_ref(decal_ref) else{
println!("Decal get_by_ref failed");
continue;
};
let (
Some(rbx_dom_weak::types::Variant::Content(content)),
Some(rbx_dom_weak::types::Variant::Enum(normalid)),
Some(rbx_dom_weak::types::Variant::Color3(decal_color3)),
Some(rbx_dom_weak::types::Variant::Float32(decal_transparency)),
)=(
decal.properties.get(&static_ustr("TextureContent")),
decal.properties.get(&static_ustr("Face")),
decal.properties.get(&static_ustr("Color3")),
decal.properties.get(&static_ustr("Transparency")),
decal.properties.get(hstr!("TextureContent")),
decal.properties.get(hstr!("Face")),
decal.properties.get(hstr!("Color3")),
decal.properties.get(hstr!("Transparency")),
)else{
recoverable_errors.decal_property.push(InstancePath::new(dom,decal));
println!("Decal is missing a required property");
continue;
};
let texture_id=get_content_url(content);
let texture_id=match content.value(){
rbx_dom_weak::types::ContentType::Uri(uri)=>Some(uri.as_str()),
_=>None,
};
let render_id=render_config_deferred_loader.acquire_render_config_id(texture_id);
let Ok(cube_face)=normalid.to_u32().try_into()else{
recoverable_errors.normal_id.push(NormalIdError{
path:InstancePath::new(dom,decal),
normal_id:normalid.to_u32(),
});
println!("NormalId is invalid");
continue;
};
let (roblox_texture_color,roblox_texture_transform)=if decal.class=="Texture"{
@@ -468,10 +444,10 @@ fn get_texture_description<'a>(
Some(&rbx_dom_weak::types::Variant::Float32(studs_per_tile_u)),
Some(&rbx_dom_weak::types::Variant::Float32(studs_per_tile_v)),
) = (
decal.properties.get(&static_ustr("OffsetStudsU")),
decal.properties.get(&static_ustr("OffsetStudsV")),
decal.properties.get(&static_ustr("StudsPerTileU")),
decal.properties.get(&static_ustr("StudsPerTileV")),
decal.properties.get(hstr!("OffsetStudsU")),
decal.properties.get(hstr!("OffsetStudsV")),
decal.properties.get(hstr!("StudsPerTileU")),
decal.properties.get(hstr!("StudsPerTileV")),
)
{
let (size_u,size_v)=match cube_face{
@@ -494,7 +470,6 @@ fn get_texture_description<'a>(
}
)
}else{
recoverable_errors.texture_property.push(InstancePath::new(dom,decal));
(glam::Vec4::ONE,RobloxTextureTransform::identity())
}
}else{
@@ -548,8 +523,6 @@ pub fn convert<'a>(
render_config_deferred_loader:&mut RenderConfigDeferredLoader<&'a str>,
mesh_deferred_loader:&mut MeshDeferredLoader<MeshIndex<'a>>,
)->PartialMap1<'a>{
let mut recoverable_errors=RecoverableErrors::default();
let mut deferred_models_deferred_attributes=Vec::new();
let mut deferred_unions_deferred_attributes=Vec::new();
let mut primitive_models_deferred_attributes=Vec::new();
@@ -559,83 +532,63 @@ pub fn convert<'a>(
//just going to leave it like this for now instead of reworking the data structures for this whole thing
let textureless_render_group=render_config_deferred_loader.acquire_render_config_id(None);
let db=rbx_reflection_database::get();
let basepart=&db.classes["BasePart"];
let baseparts=dom.descendants().filter(|&instance|
db.classes.get(instance.class.as_str()).is_some_and(|class|
db.has_superclass(class,basepart)
)
);
for object in baseparts{
let (
Some(rbx_dom_weak::types::Variant::CFrame(cf)),
Some(rbx_dom_weak::types::Variant::Vector3(size)),
Some(rbx_dom_weak::types::Variant::Vector3(velocity)),
Some(rbx_dom_weak::types::Variant::Float32(transparency)),
Some(rbx_dom_weak::types::Variant::Color3uint8(color3)),
Some(&rbx_dom_weak::types::Variant::Bool(can_collide)),
) = (
object.properties.get(&static_ustr("CFrame")),
object.properties.get(&static_ustr("Size")),
object.properties.get(&static_ustr("Velocity")),
object.properties.get(&static_ustr("Transparency")),
object.properties.get(&static_ustr("Color")),
object.properties.get(&static_ustr("CanCollide")),
)else{
recoverable_errors.basepart_property.push(InstancePath::new(dom,object));
continue;
};
let model_transform=match planar64_affine3_from_roblox(cf,size){
Ok(model_transform)=>{
let mut object_refs=Vec::new();
let mut temp_objects=Vec::new();
recursive_collect_superclass(&mut object_refs, &dom, dom.root(),hstr!("BasePart"));
for object_ref in object_refs {
if let Some(object)=dom.get_by_ref(object_ref){
if let (
Some(rbx_dom_weak::types::Variant::CFrame(cf)),
Some(rbx_dom_weak::types::Variant::Vector3(size)),
Some(rbx_dom_weak::types::Variant::Vector3(velocity)),
Some(rbx_dom_weak::types::Variant::Float32(transparency)),
Some(rbx_dom_weak::types::Variant::Color3uint8(color3)),
Some(rbx_dom_weak::types::Variant::Bool(can_collide)),
) = (
object.properties.get(hstr!("CFrame")),
object.properties.get(hstr!("Size")),
object.properties.get(hstr!("Velocity")),
object.properties.get(hstr!("Transparency")),
object.properties.get(hstr!("Color")),
object.properties.get(hstr!("CanCollide")),
)
{
let model_transform=planar64_affine3_from_roblox(cf,size);
if model_transform.matrix3.det().is_zero(){
recoverable_errors.basepart_cframe.push(CFrameError{
path:InstancePath::new(dom,object),
error:CFrameErrorType::ZeroDeterminant,
});
let mut parent_ref=object.parent();
let mut full_path=object.name.clone();
while let Some(parent)=dom.get_by_ref(parent_ref){
full_path=format!("{}.{}",parent.name,full_path);
parent_ref=parent.parent();
}
println!("Zero determinant CFrame at location {}",full_path);
println!("matrix3:{}",model_transform.matrix3);
continue;
}
model_transform
},
Err(e)=>{
recoverable_errors.basepart_cframe.push(CFrameError{
path:InstancePath::new(dom,object),
error:CFrameErrorType::Convert(e),
});
continue;
}
};
//TODO: also detect "CylinderMesh" etc here
let shape=match object.class.as_str(){
"Part"|"Seat"|"SpawnLocation"=>{
let Some(rbx_dom_weak::types::Variant::Enum(shape))=object.properties.get(&static_ustr("Shape"))else{
recoverable_errors.part_property.push(InstancePath::new(dom,object));
continue;
//TODO: also detect "CylinderMesh" etc here
let shape=match object.class.as_str(){
"Part"=>if let Some(rbx_dom_weak::types::Variant::Enum(shape))=object.properties.get(hstr!("Shape")){
Shape::Primitive(shape.to_u32().try_into().expect("Funky roblox PartType"))
}else{
panic!("Part has no Shape!");
},
"TrussPart"=>Shape::Primitive(Primitives::Cube),
"WedgePart"=>Shape::Primitive(Primitives::Wedge),
"CornerWedgePart"=>Shape::Primitive(Primitives::CornerWedge),
"MeshPart"=>Shape::MeshPart,
"UnionOperation"=>Shape::PhysicsData,
_=>{
println!("Unsupported BasePart ClassName={}; defaulting to cube",object.class);
Shape::Primitive(Primitives::Cube)
}
};
let Ok(shape)=shape.to_u32().try_into()else{
recoverable_errors.part_shape.push(ShapeError{
path:InstancePath::new(dom,object),
shape:shape.to_u32(),
});
continue;
};
Shape::Primitive(shape)
},
"TrussPart"|"VehicleSeat"=>Shape::Primitive(Primitives::Cube),
"WedgePart"=>Shape::Primitive(Primitives::Wedge),
"CornerWedgePart"=>Shape::Primitive(Primitives::CornerWedge),
"MeshPart"=>Shape::MeshPart,
"UnionOperation"=>Shape::PhysicsData,
"Terrain"=>continue,
_=>{
recoverable_errors.unsupported_class.insert(object.class.as_str().to_owned());
Shape::Primitive(Primitives::Cube)
}
};
let (availability,mesh_id)=match shape{
Shape::Primitive(primitive_shape)=>{
let part_texture_description=get_texture_description(render_config_deferred_loader,&mut recoverable_errors,db,dom,object,size);
let (availability,mesh_id)=match shape{
Shape::Primitive(primitive_shape)=>{
//TODO: TAB TAB
let part_texture_description=get_texture_description(&mut temp_objects,render_config_deferred_loader,dom,object,size);
//obscure rust syntax "slice pattern"
let RobloxPartDescription([
f0,//Cube::Right
@@ -684,83 +637,66 @@ pub fn convert<'a>(
mesh_id
};
(MeshAvailability::Immediate,mesh_id)
},
Shape::MeshPart=>{
let (
Some(rbx_dom_weak::types::Variant::Content(mesh_content)),
Some(rbx_dom_weak::types::Variant::Content(texture_content)),
)=(
// mesh must exist
object.properties.get(&static_ustr("MeshContent")),
// texture is allowed to be none
object.properties.get(&static_ustr("TextureContent")),
)else{
recoverable_errors.meshpart_property.push(InstancePath::new(dom,object));
continue;
},
Shape::MeshPart=>if let (
Some(rbx_dom_weak::types::Variant::Content(mesh_content)),
Some(rbx_dom_weak::types::Variant::Content(texture_content)),
)=(
// mesh must exist
object.properties.get(hstr!("MeshContent")),
// texture is allowed to be none
object.properties.get(hstr!("TextureContent")),
){
let mesh_asset_id=get_content_url(mesh_content).expect("MeshPart Mesh is not a Uri");
let texture_asset_id=get_content_url(texture_content);
(
MeshAvailability::DeferredMesh(render_config_deferred_loader.acquire_render_config_id(texture_asset_id)),
mesh_deferred_loader.acquire_mesh_id(MeshIndex::file_mesh(mesh_asset_id)),
)
}else{
panic!("Mesh has no Mesh or Texture");
},
Shape::PhysicsData=>{
let mut content="";
let mut mesh_data:&[u8]=&[];
let mut physics_data:&[u8]=&[];
if let Some(rbx_dom_weak::types::Variant::ContentId(asset_id))=object.properties.get(hstr!("AssetId")){
content=asset_id.as_ref();
}
if let Some(rbx_dom_weak::types::Variant::BinaryString(data))=object.properties.get(hstr!("MeshData")){
mesh_data=data.as_ref();
}
if let Some(rbx_dom_weak::types::Variant::BinaryString(data))=object.properties.get(hstr!("PhysicsData")){
physics_data=data.as_ref();
}
let part_texture_description=get_texture_description(&mut temp_objects,render_config_deferred_loader,dom,object,size);
let mesh_index=MeshIndex::union(content,mesh_data,physics_data,size,part_texture_description.clone());
let mesh_id=mesh_deferred_loader.acquire_mesh_id(mesh_index);
(MeshAvailability::DeferredUnion(part_texture_description),mesh_id)
},
};
let mesh_asset_id=match get_content_url(mesh_content){
Some(mesh_asset_id)=>mesh_asset_id,
None=>{
recoverable_errors.meshpart_content.push(InstancePath::new(dom,object));
// Return an empty string which will fail to parse as an asset id
""
}
let model_deferred_attributes=ModelDeferredAttributes{
mesh:mesh_id,
transform:model_transform,
color:glam::vec4(color3.r as f32/255f32, color3.g as f32/255f32, color3.b as f32/255f32, 1.0-*transparency),
deferred_attributes:GetAttributesArgs{
name:object.name.as_str(),
can_collide:*can_collide,
velocity:vec3::try_from_f32_array([velocity.x,velocity.y,velocity.z]).unwrap(),
},
};
let texture_asset_id=get_content_url(texture_content);
(
MeshAvailability::DeferredMesh(render_config_deferred_loader.acquire_render_config_id(texture_asset_id)),
mesh_deferred_loader.acquire_mesh_id(MeshIndex::file_mesh(mesh_asset_id)),
)
},
Shape::PhysicsData=>{
let mut content="";
let mut mesh_data:&[u8]=&[];
let mut physics_data:&[u8]=&[];
if let Some(rbx_dom_weak::types::Variant::ContentId(asset_id))=object.properties.get(&static_ustr("AssetId")){
content=asset_id.as_ref();
match availability{
MeshAvailability::Immediate=>primitive_models_deferred_attributes.push(model_deferred_attributes),
MeshAvailability::DeferredMesh(render)=>deferred_models_deferred_attributes.push(DeferredModelDeferredAttributes{
render,
model:model_deferred_attributes
}),
MeshAvailability::DeferredUnion(part_texture_description)=>deferred_unions_deferred_attributes.push(DeferredUnionDeferredAttributes{
render:part_texture_description,
model:model_deferred_attributes,
}),
}
if let Some(rbx_dom_weak::types::Variant::BinaryString(data))=object.properties.get(&static_ustr("MeshData")){
mesh_data=data.as_ref();
}
if let Some(rbx_dom_weak::types::Variant::BinaryString(data))=object.properties.get(&static_ustr("PhysicsData")){
physics_data=data.as_ref();
}
let part_texture_description=get_texture_description(render_config_deferred_loader,&mut recoverable_errors,db,dom,object,size);
let mesh_index=MeshIndex::union(content,mesh_data,physics_data,size,part_texture_description.clone());
let mesh_id=mesh_deferred_loader.acquire_mesh_id(mesh_index);
(MeshAvailability::DeferredUnion(part_texture_description),mesh_id)
},
};
let velocity=match vec3::try_from_f32_array([velocity.x,velocity.y,velocity.z]){
Ok(velocity)=>velocity,
Err(e)=>{
recoverable_errors.basepart_velocity.push(Planar64ConvertError{
path:InstancePath::new(dom,object),
error:e,
});
continue;
}
};
let model_deferred_attributes=ModelDeferredAttributes{
mesh:mesh_id,
transform:model_transform,
color:glam::vec4(color3.r as f32/255f32,color3.g as f32/255f32,color3.b as f32/255f32,1.0-*transparency),
deferred_attributes:GetAttributesArgs{
name:object.name.as_str(),
can_collide,
velocity,
},
};
match availability{
MeshAvailability::Immediate=>primitive_models_deferred_attributes.push(model_deferred_attributes),
MeshAvailability::DeferredMesh(render)=>deferred_models_deferred_attributes.push(DeferredModelDeferredAttributes{
render,
model:model_deferred_attributes
}),
MeshAvailability::DeferredUnion(part_texture_description)=>deferred_unions_deferred_attributes.push(DeferredUnionDeferredAttributes{
render:part_texture_description,
model:model_deferred_attributes,
}),
}
}
PartialMap1{
@@ -768,26 +704,25 @@ pub fn convert<'a>(
primitive_models_deferred_attributes,
deferred_models_deferred_attributes,
deferred_unions_deferred_attributes,
recoverable_errors,
}
}
struct MeshIdWithSize{
mesh:model::MeshId,
size:Planar64Vec3,
struct MeshWithAabb{
mesh:model::Mesh,
aabb:Aabb,
}
fn acquire_mesh_id_from_render_config_id(
fn acquire_mesh_id_from_render_config_id<'a>(
primitive_meshes:&mut Vec<model::Mesh>,
mesh_id_from_render_config_id:&mut HashMap<model::MeshId,HashMap<RenderConfigId,model::MeshId>>,
loaded_meshes:&HashMap<model::MeshId,MeshWithSize>,
loaded_meshes:&'a HashMap<model::MeshId,MeshWithAabb>,
old_mesh_id:model::MeshId,
render:RenderConfigId,
)->Option<MeshIdWithSize>{
)->Option<(model::MeshId,&'a Aabb)>{
//ignore meshes that fail to load completely for now
loaded_meshes.get(&old_mesh_id).map(|&MeshWithSize{ref mesh,size}|MeshIdWithSize{
mesh:*mesh_id_from_render_config_id.entry(old_mesh_id).or_insert_with(||HashMap::new())
loaded_meshes.get(&old_mesh_id).map(|mesh_with_aabb|(
*mesh_id_from_render_config_id.entry(old_mesh_id).or_insert_with(||HashMap::new())
.entry(render).or_insert_with(||{
let mesh_id=model::MeshId::new(primitive_meshes.len() as u32);
let mut mesh_clone=mesh.clone();
let mut mesh_clone=mesh_with_aabb.mesh.clone();
//set the render group lool
if let Some(graphics_group)=mesh_clone.graphics_groups.first_mut(){
graphics_group.render=render;
@@ -795,22 +730,22 @@ fn acquire_mesh_id_from_render_config_id(
primitive_meshes.push(mesh_clone);
mesh_id
}),
size,
})
&mesh_with_aabb.aabb,
))
}
fn acquire_union_id_from_render_config_id(
fn acquire_union_id_from_render_config_id<'a>(
primitive_meshes:&mut Vec<model::Mesh>,
union_id_from_render_config_id:&mut HashMap<model::MeshId,HashMap<RobloxPartDescription,model::MeshId>>,
loaded_meshes:&HashMap<model::MeshId,MeshWithSize>,
loaded_meshes:&'a HashMap<model::MeshId,MeshWithAabb>,
old_union_id:model::MeshId,
part_texture_description:RobloxPartDescription,
)->Option<MeshIdWithSize>{
)->Option<(model::MeshId,&'a Aabb)>{
//ignore uniones that fail to load completely for now
loaded_meshes.get(&old_union_id).map(|&MeshWithSize{ref mesh,size}|MeshIdWithSize{
mesh:*union_id_from_render_config_id.entry(old_union_id).or_insert_with(||HashMap::new())
loaded_meshes.get(&old_union_id).map(|union_with_aabb|(
*union_id_from_render_config_id.entry(old_union_id).or_insert_with(||HashMap::new())
.entry(part_texture_description.clone()).or_insert_with(||{
let union_id=model::MeshId::new(primitive_meshes.len() as u32);
let mut union_clone=mesh.clone();
let mut union_clone=union_with_aabb.mesh.clone();
//set the render groups
for (graphics_group,maybe_face_texture_description) in union_clone.graphics_groups.iter_mut().zip(part_texture_description.0){
if let Some(face_texture_description)=maybe_face_texture_description{
@@ -820,20 +755,19 @@ fn acquire_union_id_from_render_config_id(
primitive_meshes.push(union_clone);
union_id
}),
size,
})
&union_with_aabb.aabb,
))
}
pub struct PartialMap1<'a>{
primitive_meshes:Vec<model::Mesh>,
primitive_models_deferred_attributes:Vec<ModelDeferredAttributes<'a>>,
deferred_models_deferred_attributes:Vec<DeferredModelDeferredAttributes<'a>>,
deferred_unions_deferred_attributes:Vec<DeferredUnionDeferredAttributes<'a>>,
recoverable_errors:RecoverableErrors,
}
impl PartialMap1<'_>{
pub fn add_meshpart_meshes_and_calculate_attributes(
mut self,
meshpart_meshes:Meshes<MeshWithSize>,
meshpart_meshes:Meshes,
)->PartialMap2{
//calculate attributes
let mut modes_builder=ModesBuilder::default();
@@ -845,14 +779,22 @@ impl PartialMap1<'_>{
//decode roblox meshes
//generate mesh_id_map based on meshes that failed to load
let loaded_meshes:HashMap<model::MeshId,MeshWithSize>=
meshpart_meshes.consume().collect();
let loaded_meshes:HashMap<model::MeshId,MeshWithAabb>=
meshpart_meshes.consume().map(|(old_mesh_id,mesh)|{
let mut aabb=strafesnet_common::aabb::Aabb::default();
for &pos in &mesh.unique_pos{
aabb.grow(pos);
}
(old_mesh_id,MeshWithAabb{
mesh,
aabb,
})
}).collect();
// SAFETY: I have no idea what I'm doing and this is definitely unsound in some subtle way
// I just want to chain iterators together man
let aint_no_way=core::cell::UnsafeCell::new(&mut self.primitive_meshes);
let mut model_counter=0;
let mut mesh_id_from_render_config_id=HashMap::new();
let mut union_id_from_render_config_id=HashMap::new();
//now that the meshes are loaded, these models can be generated
@@ -860,22 +802,23 @@ impl PartialMap1<'_>{
self.deferred_models_deferred_attributes.into_iter().flat_map(|deferred_model_deferred_attributes|{
//meshes need to be cloned from loaded_meshes with a new id when they are used with a new render_id
//insert into primitive_meshes
let MeshIdWithSize{mesh,size:mesh_size}=acquire_mesh_id_from_render_config_id(
let (mesh,aabb)=acquire_mesh_id_from_render_config_id(
unsafe{*aint_no_way.get()},
&mut mesh_id_from_render_config_id,
&loaded_meshes,
deferred_model_deferred_attributes.model.mesh,
deferred_model_deferred_attributes.render
)?;
let size=aabb.size();
Some(ModelDeferredAttributes{
mesh,
deferred_attributes:deferred_model_deferred_attributes.model.deferred_attributes,
color:deferred_model_deferred_attributes.model.color,
transform:Planar64Affine3::new(
Planar64Mat3::from_cols([
(deferred_model_deferred_attributes.model.transform.matrix3.x_axis*2/mesh_size.x).divide().narrow_1().unwrap(),
(deferred_model_deferred_attributes.model.transform.matrix3.y_axis*2/mesh_size.y).divide().narrow_1().unwrap(),
(deferred_model_deferred_attributes.model.transform.matrix3.z_axis*2/mesh_size.z).divide().narrow_1().unwrap(),
(deferred_model_deferred_attributes.model.transform.matrix3.x_axis*2/size.x).divide().narrow_1().unwrap(),
(deferred_model_deferred_attributes.model.transform.matrix3.y_axis*2/size.y).divide().narrow_1().unwrap(),
(deferred_model_deferred_attributes.model.transform.matrix3.z_axis*2/size.z).divide().narrow_1().unwrap(),
]),
deferred_model_deferred_attributes.model.transform.translation
),
@@ -883,13 +826,14 @@ impl PartialMap1<'_>{
}).chain(self.deferred_unions_deferred_attributes.into_iter().flat_map(|deferred_union_deferred_attributes|{
//meshes need to be cloned from loaded_meshes with a new id when they are used with a new render_id
//insert into primitive_meshes
let MeshIdWithSize{mesh,size}=acquire_union_id_from_render_config_id(
let (mesh,aabb)=acquire_union_id_from_render_config_id(
unsafe{*aint_no_way.get()},
&mut union_id_from_render_config_id,
&loaded_meshes,
deferred_union_deferred_attributes.model.mesh,
deferred_union_deferred_attributes.render
)?;
let size=aabb.size();
Some(ModelDeferredAttributes{
mesh,
deferred_attributes:deferred_union_deferred_attributes.model.deferred_attributes,
@@ -905,78 +849,61 @@ impl PartialMap1<'_>{
})
}))
.chain(self.primitive_models_deferred_attributes.into_iter())
.filter_map(|model_deferred_attributes|{
let model_id=model::ModelId::new(model_counter);
let attributes=match get_attributes(
&model_deferred_attributes.deferred_attributes.name,
model_deferred_attributes.deferred_attributes.can_collide,
model_deferred_attributes.deferred_attributes.velocity,
model_id,
&mut modes_builder,
&mut wormhole_in_model_to_id,
&mut wormhole_id_to_out_model,
){
Ok(attributes)=>attributes,
Err(e)=>{
match e{
GetAttributesError::ModeIdParseInt(e)=>self.recoverable_errors.mode_id_parse_int.push(e),
GetAttributesError::DuplicateMode(mode_id)=>{self.recoverable_errors.duplicate_mode.insert(mode_id);},
GetAttributesError::StageIdParseInt(e)=>self.recoverable_errors.stage_id_parse_int.push(e),
GetAttributesError::DuplicateStage(duplicate_stage)=>{self.recoverable_errors.duplicate_stage.insert(duplicate_stage);},
GetAttributesError::WormholeOutIdParseInt(e)=>self.recoverable_errors.wormhole_out_id_parse_int.push(e),
GetAttributesError::DuplicateWormholeOut(wormhole_id)=>{self.recoverable_errors.duplicate_wormhole_out.insert(wormhole_id);},
GetAttributesError::WormholeInIdParseInt(e)=>self.recoverable_errors.wormhole_in_id_parse_int.push(e),
GetAttributesError::JumpLimitParseInt(e)=>self.recoverable_errors.jump_limit_parse_int.push(e),
}
return None;
}
};
model_counter+=1;
Some(ModelOwnedAttributes{
.enumerate().map(|(model_id,model_deferred_attributes)|{
let model_id=model::ModelId::new(model_id as u32);
ModelOwnedAttributes{
mesh:model_deferred_attributes.mesh,
attributes,
attributes:get_attributes(
&model_deferred_attributes.deferred_attributes.name,
model_deferred_attributes.deferred_attributes.can_collide,
model_deferred_attributes.deferred_attributes.velocity,
model_id,
&mut modes_builder,
&mut wormhole_in_model_to_id,
&mut wormhole_id_to_out_model,
),
color:model_deferred_attributes.color,
transform:model_deferred_attributes.transform,
})
}
}).collect();
let models=models_owned_attributes.into_iter().enumerate().map(|(model_id,mut model_owned_attributes)|{
let model_id=model::ModelId::new(model_id as u32);
//update attributes with wormhole id
//TODO: errors/prints
if let Some(wormhole_id)=wormhole_in_model_to_id.get(&model_id){
if let Some(&wormhole_out_model_id)=wormhole_id_to_out_model.get(wormhole_id){
match &mut model_owned_attributes.attributes{
attr::CollisionAttributes::Contact(attr::ContactAttributes{contacting:_,general})
|attr::CollisionAttributes::Intersect(attr::IntersectAttributes{intersecting:_,general})
=>general.wormhole=Some(attr::Wormhole{destination_model:wormhole_out_model_id}),
attr::CollisionAttributes::Decoration=>println!("Not a wormhole"),
}
//TODO: TAB
let model_id=model::ModelId::new(model_id as u32);
//update attributes with wormhole id
//TODO: errors/prints
if let Some(wormhole_id)=wormhole_in_model_to_id.get(&model_id){
if let Some(&wormhole_out_model_id)=wormhole_id_to_out_model.get(wormhole_id){
match &mut model_owned_attributes.attributes{
attr::CollisionAttributes::Contact(attr::ContactAttributes{contacting:_,general})
|attr::CollisionAttributes::Intersect(attr::IntersectAttributes{intersecting:_,general})
=>general.wormhole=Some(attr::Wormhole{destination_model:wormhole_out_model_id}),
attr::CollisionAttributes::Decoration=>println!("Not a wormhole"),
}
}
//index the attributes
let attributes_id=if let Some(&attributes_id)=attributes_id_from_attributes.get(&model_owned_attributes.attributes){
attributes_id
}else{
let attributes_id=attr::CollisionAttributesId::new(unique_attributes.len() as u32);
attributes_id_from_attributes.insert(model_owned_attributes.attributes.clone(),attributes_id);
unique_attributes.push(model_owned_attributes.attributes);
attributes_id
};
model::Model{
mesh:model_owned_attributes.mesh,
transform:model_owned_attributes.transform,
color:model_owned_attributes.color,
attributes:attributes_id,
}
}).collect();
PartialMap2{
meshes:self.primitive_meshes,
models,
modes:modes_builder.build_normalized(),
attributes:unique_attributes,
recoverable_errors:self.recoverable_errors,
}
//index the attributes
let attributes_id=if let Some(&attributes_id)=attributes_id_from_attributes.get(&model_owned_attributes.attributes){
attributes_id
}else{
let attributes_id=attr::CollisionAttributesId::new(unique_attributes.len() as u32);
attributes_id_from_attributes.insert(model_owned_attributes.attributes.clone(),attributes_id);
unique_attributes.push(model_owned_attributes.attributes);
attributes_id
};
model::Model{
mesh:model_owned_attributes.mesh,
transform:model_owned_attributes.transform,
color:model_owned_attributes.color,
attributes:attributes_id,
}
}).collect();
PartialMap2{
meshes:self.primitive_meshes,
models,
modes:modes_builder.build_normalized(),
attributes:unique_attributes,
}
}
}
@@ -985,13 +912,12 @@ pub struct PartialMap2{
models:Vec<model::Model>,
modes:NormalizedModes,
attributes:Vec<strafesnet_common::gameplay_attributes::CollisionAttributes>,
recoverable_errors:RecoverableErrors,
}
impl PartialMap2{
pub fn add_render_configs_and_textures(
self,
render_configs:RenderConfigs,
)->(map::CompleteMap,RecoverableErrors){
)->map::CompleteMap{
let (textures,render_configs)=render_configs.consume();
let (textures,texture_id_map):(Vec<Vec<u8>>,HashMap<model::TextureId,model::TextureId>)
=textures.into_iter().enumerate().map(|(new_texture_id,(old_texture_id,Texture::ImageDDS(texture)))|{
@@ -1010,17 +936,14 @@ impl PartialMap2{
);
render_config
}).collect();
(
map::CompleteMap{
modes:self.modes,
attributes:self.attributes,
meshes:self.meshes,
models:self.models,
//the roblox legacy texture thing always works
textures,
render_configs,
},
self.recoverable_errors,
)
map::CompleteMap{
modes:self.modes,
attributes:self.attributes,
meshes:self.meshes,
models:self.models,
//the roblox legacy texture thing always works
textures,
render_configs,
}
}
}

View File

@@ -1,12 +1,9 @@
use crate::loader::MeshWithSize;
use crate::rbx::RobloxPartDescription;
use crate::primitives::{CUBE_DEFAULT_VERTICES,CUBE_DEFAULT_POLYS,FaceDescription};
use rbx_mesh::mesh_data::{VertexId as MeshDataVertexId,NormalId as MeshDataNormalId,NormalId2 as MeshDataNormalId2,NormalId5 as MeshDataNormalId5};
use rbx_mesh::mesh_data::{NormalId2 as MeshDataNormalId2,VertexId as MeshDataVertexId};
use rbx_mesh::physics_data::VertexId as PhysicsDataVertexId;
use strafesnet_common::model::{self,IndexedVertex,MeshBuilder,PolygonGroup,PolygonGroupId,PolygonList,RenderConfigId,VertexId};
use strafesnet_common::model::{self,IndexedVertex,PolygonGroup,PolygonGroupId,PolygonList,RenderConfigId};
use strafesnet_common::integer::vec3;
#[allow(dead_code)]
#[derive(Debug)]
pub enum Error{
Block,
@@ -24,7 +21,7 @@ impl std::fmt::Display for Error{
// wacky state machine to make sure all vertices in a face agree upon what NormalId to use.
// Roblox duplicates this information per vertex when it should only exist per-face.
enum MeshDataNormalStatus{
Agree(MeshDataNormalId),
Agree(MeshDataNormalId2),
Conflicting,
}
struct MeshDataNormalChecker{
@@ -34,7 +31,7 @@ impl MeshDataNormalChecker{
fn new()->Self{
Self{status:None}
}
fn check(&mut self,normal:MeshDataNormalId){
fn check(&mut self,normal:MeshDataNormalId2){
self.status=match self.status.take(){
None=>Some(MeshDataNormalStatus::Agree(normal)),
Some(MeshDataNormalStatus::Agree(old_normal))=>{
@@ -47,7 +44,7 @@ impl MeshDataNormalChecker{
Some(MeshDataNormalStatus::Conflicting)=>Some(MeshDataNormalStatus::Conflicting),
};
}
fn into_agreed_normal(self)->Option<MeshDataNormalId>{
fn into_agreed_normal(self)->Option<MeshDataNormalId2>{
self.status.and_then(|status|match status{
MeshDataNormalStatus::Agree(normal)=>Some(normal),
MeshDataNormalStatus::Conflicting=>None,
@@ -55,127 +52,18 @@ impl MeshDataNormalChecker{
}
}
fn build_mesh2(
mb:&mut MeshBuilder,
polygon_groups_normal_id:&mut [Vec<Vec<VertexId>>;NORMAL_FACES],
cube_face_description:&[Option<FaceDescription>;NORMAL_FACES],
mesh:rbx_mesh::mesh_data::Mesh2,
)->Result<(),Error>{
//autoscale to size, idk what roblox is doing with the graphics mesh size
let mut pos_min=glam::Vec3::MAX;
let mut pos_max=glam::Vec3::MIN;
for vertex in &mesh.vertices{
let p=vertex.pos.into();
pos_min=pos_min.min(p);
pos_max=pos_max.max(p);
}
let graphics_size=pos_max-pos_min;
for [MeshDataVertexId(vertex_id0),MeshDataVertexId(vertex_id1),MeshDataVertexId(vertex_id2)] in mesh.faces{
let face=[
mesh.vertices.get(vertex_id0 as usize).ok_or(Error::MissingVertexId(vertex_id0))?,
mesh.vertices.get(vertex_id1 as usize).ok_or(Error::MissingVertexId(vertex_id1))?,
mesh.vertices.get(vertex_id2 as usize).ok_or(Error::MissingVertexId(vertex_id2))?,
];
let mut normal_agreement_checker=MeshDataNormalChecker::new();
let face=face.into_iter().map(|vertex|{
let MeshDataNormalId2(normal_id)=vertex.normal_id;
normal_agreement_checker.check(normal_id);
let pos=glam::Vec3::from_array(vertex.pos)/graphics_size;
let pos=mb.acquire_pos_id(vec3::try_from_f32_array(pos.to_array())?);
let normal=mb.acquire_normal_id(vec3::try_from_f32_array(vertex.norm)?);
let tex_coord=glam::Vec2::from_array(vertex.tex);
let maybe_face_description=&cube_face_description[normal_id as usize-1];
let (tex,color)=match maybe_face_description{
Some(face_description)=>{
// transform texture coordinates and set decal color
let tex=mb.acquire_tex_id(face_description.transform.transform_point2(tex_coord));
let color=mb.acquire_color_id(face_description.color);
(tex,color)
},
None=>{
// texture coordinates don't matter and pass through mesh vertex color
let tex=mb.acquire_tex_id(tex_coord);
let color=mb.acquire_color_id(glam::Vec4::from_array(vertex.color.map(|f|f as f32/255.0f32)));
(tex,color)
},
};
Ok(mb.acquire_vertex_id(IndexedVertex{pos,tex,normal,color}))
}).collect::<Result<Vec<_>,_>>().map_err(Error::Planar64Vec3)?;
if let Some(normal_id)=normal_agreement_checker.into_agreed_normal(){
polygon_groups_normal_id[normal_id as usize-1].push(face);
}else{
panic!("Empty face!");
}
}
Ok(())
}
fn build_mesh5(
mb:&mut MeshBuilder,
polygon_groups_normal_id:&mut [Vec<Vec<VertexId>>;NORMAL_FACES],
cube_face_description:&[Option<FaceDescription>;NORMAL_FACES],
mesh:rbx_mesh::mesh_data::CSGMDL5,
)->Result<(),Error>{
//autoscale to size, idk what roblox is doing with the graphics mesh size
let mut pos_min=glam::Vec3::MAX;
let mut pos_max=glam::Vec3::MIN;
for &pos in &mesh.positions{
let p=pos.into();
pos_min=pos_min.min(p);
pos_max=pos_max.max(p);
}
let graphics_size=pos_max-pos_min;
for face in mesh.faces.indices.chunks_exact(3){
let mut normal_agreement_checker=MeshDataNormalChecker::new();
let face=face.into_iter().map(|&vertex_id|{
let vertex_index=vertex_id as usize;
let &pos=mesh.positions.get(vertex_index).ok_or(Error::MissingVertexId(vertex_id))?;
let &MeshDataNormalId5(normal_id)=mesh.normal_ids.get(vertex_index).ok_or(Error::MissingVertexId(vertex_id))?;
let &norm=mesh.normals.get(vertex_index).ok_or(Error::MissingVertexId(vertex_id))?;
let &tex=mesh.tex.get(vertex_index).ok_or(Error::MissingVertexId(vertex_id))?;
let &color=mesh.colors.get(vertex_index).ok_or(Error::MissingVertexId(vertex_id))?;
normal_agreement_checker.check(normal_id);
let pos=glam::Vec3::from_array(pos)/graphics_size;
let pos=mb.acquire_pos_id(vec3::try_from_f32_array(pos.to_array()).map_err(Error::Planar64Vec3)?);
let normal=mb.acquire_normal_id(vec3::try_from_f32_array(norm).map_err(Error::Planar64Vec3)?);
let tex_coord=glam::Vec2::from_array(tex);
let maybe_face_description=&cube_face_description[normal_id as usize-1];
let (tex,color)=match maybe_face_description{
Some(face_description)=>{
// transform texture coordinates and set decal color
let tex=mb.acquire_tex_id(face_description.transform.transform_point2(tex_coord));
let color=mb.acquire_color_id(face_description.color);
(tex,color)
},
None=>{
// texture coordinates don't matter and pass through mesh vertex color
let tex=mb.acquire_tex_id(tex_coord);
let color=mb.acquire_color_id(glam::Vec4::from_array(color.map(|f|f as f32/255.0f32)));
(tex,color)
},
};
Ok(mb.acquire_vertex_id(IndexedVertex{pos,tex,normal,color}))
}).collect::<Result<Vec<_>,_>>()?;
if let Some(normal_id)=normal_agreement_checker.into_agreed_normal(){
polygon_groups_normal_id[normal_id as usize-1].push(face);
}else{
panic!("Empty face!");
}
}
Ok(())
}
const NORMAL_FACES:usize=6;
impl std::error::Error for Error{}
pub fn convert(
roblox_physics_data:&[u8],
roblox_mesh_data:&[u8],
size:glam::Vec3,
RobloxPartDescription(part_texture_description):RobloxPartDescription,
)->Result<MeshWithSize,Error>{
let mut polygon_groups_normal_id:[_;NORMAL_FACES]=[vec![],vec![],vec![],vec![],vec![],vec![]];
crate::rbx::RobloxPartDescription(part_texture_description):crate::rbx::RobloxPartDescription,
)->Result<model::Mesh,Error>{
const NORMAL_FACES:usize=6;
let mut polygon_groups_normal_id=vec![Vec::new();NORMAL_FACES];
// build graphics and physics meshes
let mut mb=MeshBuilder::new();
let mut mb=strafesnet_common::model::MeshBuilder::new();
// graphics
let graphics_groups=if !roblox_mesh_data.is_empty(){
// create per-face texture coordinate affine transforms
@@ -187,12 +75,46 @@ pub fn convert(
let mesh_data=rbx_mesh::read_mesh_data_versioned(
std::io::Cursor::new(roblox_mesh_data)
).map_err(Error::RobloxMeshData)?;
match mesh_data{
let graphics_mesh=match mesh_data{
rbx_mesh::mesh_data::MeshData::CSGK(_)=>return Err(Error::Block),
rbx_mesh::mesh_data::MeshData::CSGMDL(rbx_mesh::mesh_data::CSGMDL::V2(mesh_data2))=>build_mesh2(&mut mb,&mut polygon_groups_normal_id,&cube_face_description,mesh_data2.mesh)?,
rbx_mesh::mesh_data::MeshData::CSGMDL(rbx_mesh::mesh_data::CSGMDL::V4(mesh_data4))=>build_mesh2(&mut mb,&mut polygon_groups_normal_id,&cube_face_description,mesh_data4.mesh)?,
rbx_mesh::mesh_data::MeshData::CSGMDL(rbx_mesh::mesh_data::CSGMDL::V5(mesh_data4))=>build_mesh5(&mut mb,&mut polygon_groups_normal_id,&cube_face_description,mesh_data4)?,
rbx_mesh::mesh_data::MeshData::CSGMDL(rbx_mesh::mesh_data::CSGMDL::CSGMDL2(mesh_data2))=>mesh_data2.mesh,
rbx_mesh::mesh_data::MeshData::CSGMDL(rbx_mesh::mesh_data::CSGMDL::CSGMDL4(mesh_data4))=>mesh_data4.mesh,
};
for [MeshDataVertexId(vertex_id0),MeshDataVertexId(vertex_id1),MeshDataVertexId(vertex_id2)] in graphics_mesh.faces{
let face=[
graphics_mesh.vertices.get(vertex_id0 as usize).ok_or(Error::MissingVertexId(vertex_id0))?,
graphics_mesh.vertices.get(vertex_id1 as usize).ok_or(Error::MissingVertexId(vertex_id1))?,
graphics_mesh.vertices.get(vertex_id2 as usize).ok_or(Error::MissingVertexId(vertex_id2))?,
];
let mut normal_agreement_checker=MeshDataNormalChecker::new();
let face=face.into_iter().map(|vertex|{
normal_agreement_checker.check(vertex.normal_id);
let pos=mb.acquire_pos_id(vec3::try_from_f32_array(vertex.pos)?);
let normal=mb.acquire_normal_id(vec3::try_from_f32_array(vertex.norm)?);
let tex_coord=glam::Vec2::from_array(vertex.tex);
let maybe_face_description=&cube_face_description[vertex.normal_id as usize-1];
let (tex,color)=match maybe_face_description{
Some(face_description)=>{
// transform texture coordinates and set decal color
let tex=mb.acquire_tex_id(face_description.transform.transform_point2(tex_coord));
let color=mb.acquire_color_id(face_description.color);
(tex,color)
},
None=>{
// texture coordinates don't matter and pass through mesh vertex color
let tex=mb.acquire_tex_id(tex_coord);
let color=mb.acquire_color_id(glam::Vec4::from_array(vertex.color.map(|f|f as f32/255.0f32)));
(tex,color)
},
};
Ok(mb.acquire_vertex_id(IndexedVertex{pos,tex,normal,color}))
}).collect::<Result<Vec<_>,_>>().map_err(Error::Planar64Vec3)?;
if let Some(normal_id)=normal_agreement_checker.into_agreed_normal(){
polygon_groups_normal_id[normal_id as usize-1].push(face);
}else{
panic!("Empty face!");
}
}
(0..NORMAL_FACES).map(|polygon_group_id|{
model::IndexedGraphicsGroup{
render:cube_face_description[polygon_group_id].as_ref().map_or(RenderConfigId::new(0),|face_description|face_description.render),
@@ -204,11 +126,7 @@ pub fn convert(
};
//physics
let polygon_groups_normal_it=polygon_groups_normal_id.into_iter().map(|faces|
// graphics polygon groups (to be rendered)
Ok(PolygonGroup::PolygonList(PolygonList::new(faces)))
);
let polygon_groups:Vec<PolygonGroup>=if !roblox_physics_data.is_empty(){
let physics_convex_meshes=if !roblox_physics_data.is_empty(){
let physics_data=rbx_mesh::read_physics_data_versioned(
std::io::Cursor::new(roblox_physics_data)
).map_err(Error::RobloxPhysicsData)?;
@@ -217,56 +135,44 @@ pub fn convert(
// have not seen this format in practice
|rbx_mesh::physics_data::PhysicsData::CSGPHS(rbx_mesh::physics_data::CSGPHS::Block)
=>return Err(Error::Block),
rbx_mesh::physics_data::PhysicsData::CSGPHS(rbx_mesh::physics_data::CSGPHS::V3(meshes))
|rbx_mesh::physics_data::PhysicsData::CSGPHS(rbx_mesh::physics_data::CSGPHS::V5(meshes))
=>meshes.meshes,
rbx_mesh::physics_data::PhysicsData::CSGPHS(rbx_mesh::physics_data::CSGPHS::V6(meshes))
=>vec![meshes.mesh],
rbx_mesh::physics_data::PhysicsData::CSGPHS(rbx_mesh::physics_data::CSGPHS::V7(meshes))
rbx_mesh::physics_data::PhysicsData::CSGPHS(rbx_mesh::physics_data::CSGPHS::Meshes(meshes))
=>meshes.meshes,
rbx_mesh::physics_data::PhysicsData::CSGPHS(rbx_mesh::physics_data::CSGPHS::PhysicsInfoMesh(pim))
=>vec![pim.mesh],
};
let physics_convex_meshes_it=physics_convex_meshes.into_iter().map(|mesh|{
// this can be factored out of the loop but I am lazy
let color=mb.acquire_color_id(glam::Vec4::ONE);
let tex=mb.acquire_tex_id(glam::Vec2::ZERO);
// physics polygon groups (to do physics)
Ok(PolygonGroup::PolygonList(PolygonList::new(mesh.faces.into_iter().map(|[PhysicsDataVertexId(vertex_id0),PhysicsDataVertexId(vertex_id1),PhysicsDataVertexId(vertex_id2)]|{
let face=[
mesh.vertices.get(vertex_id0 as usize).ok_or(Error::MissingVertexId(vertex_id0))?,
mesh.vertices.get(vertex_id1 as usize).ok_or(Error::MissingVertexId(vertex_id1))?,
mesh.vertices.get(vertex_id2 as usize).ok_or(Error::MissingVertexId(vertex_id2))?,
].map(|v|glam::Vec3::from_slice(v)/size);
let vertex_norm=(face[1]-face[0])
.cross(face[2]-face[0]);
let normal=mb.acquire_normal_id(vec3::try_from_f32_array(vertex_norm.to_array()).map_err(Error::Planar64Vec3)?);
face.into_iter().map(|vertex_pos|{
let pos=mb.acquire_pos_id(vec3::try_from_f32_array(vertex_pos.to_array()).map_err(Error::Planar64Vec3)?);
Ok(mb.acquire_vertex_id(IndexedVertex{pos,tex,normal,color}))
}).collect()
}).collect::<Result<_,_>>()?)))
});
polygon_groups_normal_it.chain(physics_convex_meshes_it).collect::<Result<_,_>>()?
physics_convex_meshes
}else{
// generate a unit cube as default physics
let pos_list=CUBE_DEFAULT_VERTICES.map(|pos|mb.acquire_pos_id(pos>>1));
let tex=mb.acquire_tex_id(glam::Vec2::ZERO);
let normal=mb.acquire_normal_id(vec3::ZERO);
let color=mb.acquire_color_id(glam::Vec4::ONE);
let polygon_group=PolygonGroup::PolygonList(PolygonList::new(CUBE_DEFAULT_POLYS.map(|poly|poly.map(|[pos_id,_]|
mb.acquire_vertex_id(IndexedVertex{pos:pos_list[pos_id as usize],tex,normal,color})
).to_vec()).to_vec()));
polygon_groups_normal_it.chain([Ok(polygon_group)]).collect::<Result<_,_>>()?
Vec::new()
};
let polygon_groups:Vec<PolygonGroup>=polygon_groups_normal_id.into_iter().map(|faces|
// graphics polygon groups (to be rendered)
Ok(PolygonGroup::PolygonList(PolygonList::new(faces)))
).chain(physics_convex_meshes.into_iter().map(|mesh|{
// this can be factored out of the loop but I am lazy
let color=mb.acquire_color_id(glam::Vec4::ONE);
let tex=mb.acquire_tex_id(glam::Vec2::ZERO);
// physics polygon groups (to do physics)
Ok(PolygonGroup::PolygonList(PolygonList::new(mesh.faces.into_iter().map(|[PhysicsDataVertexId(vertex_id0),PhysicsDataVertexId(vertex_id1),PhysicsDataVertexId(vertex_id2)]|{
let face=[
mesh.vertices.get(vertex_id0 as usize).ok_or(Error::MissingVertexId(vertex_id0))?,
mesh.vertices.get(vertex_id1 as usize).ok_or(Error::MissingVertexId(vertex_id1))?,
mesh.vertices.get(vertex_id2 as usize).ok_or(Error::MissingVertexId(vertex_id2))?,
].map(|v|glam::Vec3::from_slice(v)/size);
let vertex_norm=(face[1]-face[0])
.cross(face[2]-face[0]);
let normal=mb.acquire_normal_id(vec3::try_from_f32_array(vertex_norm.to_array()).map_err(Error::Planar64Vec3)?);
face.into_iter().map(|vertex_pos|{
let pos=mb.acquire_pos_id(vec3::try_from_f32_array(vertex_pos.to_array()).map_err(Error::Planar64Vec3)?);
Ok(mb.acquire_vertex_id(IndexedVertex{pos,tex,normal,color}))
}).collect()
}).collect::<Result<_,_>>()?)))
})).collect::<Result<_,_>>()?;
let physics_groups=(NORMAL_FACES..polygon_groups.len()).map(|id|model::IndexedPhysicsGroup{
groups:vec![PolygonGroupId::new(id as u32)]
}).collect();
let mesh=mb.build(
Ok(mb.build(
polygon_groups,
graphics_groups,
physics_groups,
);
Ok(MeshWithSize{
mesh,
size:vec3::ONE,
})
))
}

View File

@@ -9,6 +9,3 @@ authors = ["Rhys Lloyd <krakow20@gmail.com>"]
[dependencies]
url = "2.5.4"
[lints]
workspace = true

View File

@@ -1,6 +1,6 @@
[package]
name = "roblox_emulator"
version = "0.5.1"
version = "0.5.0"
edition = "2024"
repository = "https://git.itzana.me/StrafesNET/strafe-project"
license = "MIT OR Apache-2.0"
@@ -8,17 +8,14 @@ description = "Run embedded Luau scripts which manipulate the DOM."
authors = ["Rhys Lloyd <krakow20@gmail.com>"]
[features]
default=["run-service"]
default=[]
run-service=[]
[dependencies]
glam = "0.30.0"
mlua = { version = "0.11.3", features = ["luau"] }
phf = { version = "0.13.1", features = ["macros"] }
rbx_dom_weak = { version = "3.0.1-sn5", registry = "strafesnet" }
rbx_reflection = "5.0.0"
rbx_reflection_database = "1.0.0"
rbx_types = "2.0.0"
[lints]
workspace = true
mlua = { version = "0.10.1", features = ["luau"] }
phf = { version = "0.11.2", features = ["macros"] }
rbx_dom_weak = { path = "../../../../git/rbx-dom/rbx_dom_weak", registry = "strafesnet" }
rbx_reflection = { path = "../../../../git/rbx-dom/rbx_reflection" }
rbx_reflection_database = { path = "../../../../git/rbx-dom/rbx_reflection_database" }
rbx_types = { path = "../../../../git/rbx-dom/rbx_types" }

View File

@@ -1,5 +1,4 @@
use crate::util::static_ustr;
use rbx_dom_weak::{types::Ref,InstanceBuilder,WeakDom};
use rbx_dom_weak::{hstr,types::Ref,InstanceBuilder,WeakDom,UnhashedStr};
#[derive(Debug)]
pub enum ServicesError{
@@ -28,36 +27,36 @@ impl Services{
}
}
pub type LuaAppData=&'static mut WeakDom;
pub struct Context{
pub(crate)dom:WeakDom,
pub type LuaAppData=&'static mut WeakDom<'static>;
pub struct Context<'a>{
pub(crate)dom:WeakDom<'a>,
pub(crate)services:Services,
}
impl Context{
pub fn from_place(dom:WeakDom)->Result<Context,ServicesError>{
impl<'a> Context<'a>{
pub fn from_place(dom:WeakDom<'a>)->Result<Context<'a>,ServicesError>{
let services=Services::find_services(&dom)?;
Ok(Self{dom,services})
}
pub fn script_singleton(source:String)->(Context,crate::runner::instance::Instance){
let script=InstanceBuilder::new("Script")
.with_property("Source",rbx_types::Variant::String(source));
pub fn script_singleton(source:String)->(Context<'a>,crate::runner::instance::Instance){
let script=InstanceBuilder::new(hstr!("Script"))
.with_property(hstr!("Source"),rbx_types::Variant::String(source));
let script_ref=script.referent();
let dom=WeakDom::new(
InstanceBuilder::new("DataModel")
InstanceBuilder::new(hstr!("DataModel"))
.with_child(script)
);
let context=Self::from_model(dom);
(context,crate::runner::instance::Instance::new_unchecked(script_ref))
}
/// Creates an iterator over all items of a particular class.
pub fn superclass_iter<'a>(&'a self,superclass:&'a str)->impl Iterator<Item=Ref>+'a{
pub fn superclass_iter<'b>(&'b self,superclass:&'b str)->impl Iterator<Item=Ref>+'b{
let db=rbx_reflection_database::get();
let Some(superclass)=db.classes.get(superclass)else{
let Some(superclass)=db.classes.get(UnhashedStr::from_ref(superclass))else{
panic!("Invalid class");
};
self.dom.descendants().filter_map(|instance|{
let class=db.classes.get(instance.class.as_str())?;
let class=db.classes.get(instance.class)?;
db.has_superclass(class,superclass).then(||instance.referent())
})
}
@@ -65,7 +64,7 @@ impl Context{
self.superclass_iter("Script")
.filter_map(|script_ref|{
let script=self.dom.get_by_ref(script_ref)?;
if let None|Some(rbx_dom_weak::types::Variant::Bool(false))=script.properties.get(&static_ustr("Disabled")){
if let None|Some(rbx_dom_weak::types::Variant::Bool(false))=script.properties.get(hstr!("Disabled")){
return Some(crate::runner::instance::Instance::new_unchecked(script_ref));
}
None
@@ -73,25 +72,17 @@ impl Context{
.collect()
}
pub fn from_model(mut dom:WeakDom)->Context{
pub fn from_model(mut dom:WeakDom<'a>)->Context<'a>{
//snapshot root instances
let children=dom.root().children().to_owned();
//insert services
let game=dom.root_ref();
let terrain_bldr=InstanceBuilder::new("Terrain")
.with_properties([
("CFrame",rbx_dom_weak::types::Variant::CFrame(rbx_dom_weak::types::CFrame::new(rbx_dom_weak::types::Vector3::new(0.0,0.0,0.0),rbx_dom_weak::types::Matrix3::identity()))),
("Size",rbx_dom_weak::types::Variant::Vector3(rbx_dom_weak::types::Vector3::new(1.0,1.0,1.0))),
("Velocity",rbx_dom_weak::types::Variant::Vector3(rbx_dom_weak::types::Vector3::new(0.0,0.0,0.0))),
("Transparency",rbx_dom_weak::types::Variant::Float32(0.0)),
("Color",rbx_dom_weak::types::Variant::Color3uint8(rbx_dom_weak::types::Color3uint8::new(255,255,255))),
("CanCollide",rbx_dom_weak::types::Variant::Bool(true)),
]);
let terrain_bldr=InstanceBuilder::new(hstr!("Terrain"));
let workspace=dom.insert(game,
InstanceBuilder::new("Workspace")
InstanceBuilder::new(hstr!("Workspace"))
//Set Workspace.Terrain property equal to Terrain
.with_property("Terrain",terrain_bldr.referent())
.with_property(hstr!("Terrain"),terrain_bldr.referent())
.with_child(terrain_bldr)
);
@@ -104,18 +95,18 @@ impl Context{
//Lowercase and upper case workspace property!
let game=dom.root_mut();
// TODO: DELETE THIS!
game.properties.insert(static_ustr("workspace"),rbx_types::Variant::Ref(workspace));
game.properties.insert(static_ustr("Workspace"),rbx_types::Variant::Ref(workspace));
game.properties.insert(hstr!("workspace"),rbx_types::Variant::Ref(workspace));
game.properties.insert(hstr!("Workspace"),rbx_types::Variant::Ref(workspace));
}
dom.insert(game,InstanceBuilder::new("Lighting"));
dom.insert(game,InstanceBuilder::new(hstr!("Lighting")));
let services=Services{game,workspace};
Self{dom,services}
}
}
impl AsRef<WeakDom> for Context{
fn as_ref(&self)->&WeakDom{
impl<'a> AsRef<WeakDom<'a>> for Context<'a>{
fn as_ref(&self)->&WeakDom<'a>{
&self.dom
}
}

View File

@@ -1,4 +1,3 @@
mod util;
pub mod runner;
pub mod context;
#[cfg(feature="run-service")]

View File

@@ -1,14 +1,16 @@
use rbx_dom_weak::{HashStr, UnhashedStr};
#[derive(Clone,Copy)]
pub struct EnumItem<'a>{
name:Option<&'a str>,
name:Option<&'a HashStr>,
value:u32,
}
impl<'a> EnumItem<'a>{
fn known_name((name,&value):(&'a std::borrow::Cow<'a,str>,&u32))->Self{
Self{name:Some(name.as_ref()),value}
fn known_name((name,&value):(&&'a HashStr,&u32))->Self{
Self{name:Some(name),value}
}
}
impl From<rbx_types::Enum> for EnumItem<'_>{
impl<'a> From<rbx_types::Enum> for EnumItem<'a>{
fn from(e:rbx_types::Enum)->Self{
EnumItem{
name:None,
@@ -38,7 +40,7 @@ pub struct Enums;
impl Enums{
pub fn get(&self,index:&str)->Option<EnumItems<'static>>{
let db=rbx_reflection_database::get();
db.enums.get(index).map(|ed|EnumItems{ed})
db.enums.get(UnhashedStr::from_ref(index)).map(|ed|EnumItems{ed})
}
}
#[derive(Clone,Copy)]
@@ -51,7 +53,7 @@ impl<'a> EnumItems<'a>{
self.ed.items.iter().find(|&(_,&v)|v==value).map(EnumItem::known_name)
}
pub fn from_name(&self,name:&str)->Option<EnumItem<'a>>{
self.ed.items.get_key_value(name).map(EnumItem::known_name)
self.ed.items.get_key_value(UnhashedStr::from_ref(name)).map(EnumItem::known_name)
}
pub fn from_enum(&self,enum_item:EnumItem)->Option<EnumItem<'a>>{
match enum_item.name{
@@ -65,7 +67,7 @@ impl<'a> EnumItems<'a>{
}
pub enum CoerceEnum<'a>{
Integer(i64),
Integer(i32),
String(mlua::String),
Enum(EnumItem<'a>),
}
@@ -105,7 +107,7 @@ impl mlua::UserData for EnumItems<'static>{
});
methods.add_meta_function(mlua::MetaMethod::Index,|_,(this,val):(EnumItems,mlua::String)|{
let index=&*val.to_str()?;
Ok(this.ed.items.get_key_value(index).map(EnumItem::known_name))
Ok(this.ed.items.get_key_value(UnhashedStr::from_ref(index)).map(EnumItem::known_name))
});
}
}
@@ -124,7 +126,7 @@ type_from_lua_userdata!(Enums);
impl mlua::UserData for EnumItem<'_>{
fn add_fields<F:mlua::UserDataFields<Self>>(fields:&mut F){
fields.add_field_method_get("Name",|_,this|Ok(this.name));
fields.add_field_method_get("Name",|_,this|Ok(this.name.map(|s|s.as_str())));
fields.add_field_method_get("Value",|_,this|Ok(this.value));
}
fn add_methods<M:mlua::UserDataMethods<Self>>(methods:&mut M){

View File

@@ -2,15 +2,14 @@ use std::collections::{hash_map::Entry,HashMap};
use mlua::{FromLua,FromLuaMulti,IntoLua,IntoLuaMulti};
use rbx_types::Ref;
use rbx_dom_weak::{Ustr,InstanceBuilder,WeakDom};
use rbx_dom_weak::{hstr,HashStr,UnhashedStr,InstanceBuilder,WeakDom};
use crate::util::static_ustr;
use crate::runner::vector3::Vector3;
use crate::runner::number::Number;
pub fn set_globals(lua:&mlua::Lua,globals:&mlua::Table)->Result<(),mlua::Error>{
//class functions store
lua.set_app_data(ClassMethodsStore::default());
lua.set_app_data(InstanceValueStore::default());
let table=lua.create_table()?;
@@ -38,12 +37,12 @@ pub fn dom_mut<T>(lua:&mlua::Lua,mut f:impl FnMut(&mut WeakDom)->mlua::Result<T>
pub fn class_is_a(class:&str,superclass:&str)->bool{
let db=rbx_reflection_database::get();
let (Some(class),Some(superclass))=(db.classes.get(class),db.classes.get(superclass))else{
let (Some(class),Some(superclass))=(db.classes.get(UnhashedStr::from_ref(class)),db.classes.get(UnhashedStr::from_ref(superclass)))else{
return false;
};
db.has_superclass(class,superclass)
}
fn get_full_name(dom:&WeakDom,instance:&rbx_dom_weak::Instance)->String{
fn get_full_name(dom:&rbx_dom_weak::WeakDom,instance:&rbx_dom_weak::Instance)->String{
let mut full_name=instance.name.clone();
let mut pref=instance.parent();
while let Some(parent)=dom.get_by_ref(pref){
@@ -57,7 +56,7 @@ fn get_full_name(dom:&WeakDom,instance:&rbx_dom_weak::Instance)->String{
pub fn get_name_source(lua:&mlua::Lua,script:Instance)->Result<(String,String),mlua::Error>{
dom_mut(lua,|dom|{
let instance=script.get(dom)?;
let source=match instance.properties.get(&static_ustr("Source")){
let source=match instance.properties.get(hstr!("Source")){
Some(rbx_dom_weak::types::Variant::String(s))=>s.clone(),
_=>Err(mlua::Error::external("Missing script.Source"))?,
};
@@ -65,32 +64,32 @@ pub fn get_name_source(lua:&mlua::Lua,script:Instance)->Result<(String,String),m
})
}
pub fn find_first_child<'a>(dom:&'a WeakDom,instance:&rbx_dom_weak::Instance,name:&str)->Option<&'a rbx_dom_weak::Instance>{
pub fn find_first_child<'a>(dom:&'a rbx_dom_weak::WeakDom<'a>,instance:&rbx_dom_weak::Instance<'a>,name:&str)->Option<&'a rbx_dom_weak::Instance<'a>>{
instance.children().iter().filter_map(|&r|dom.get_by_ref(r)).find(|inst|inst.name==name)
}
pub fn find_first_descendant<'a>(dom:&'a WeakDom,instance:&rbx_dom_weak::Instance,name:&str)->Option<&'a rbx_dom_weak::Instance>{
pub fn find_first_descendant<'a>(dom:&'a rbx_dom_weak::WeakDom<'a>,instance:&rbx_dom_weak::Instance<'a>,name:&str)->Option<&'a rbx_dom_weak::Instance<'a>>{
dom.descendants_of(instance.referent()).find(|&inst|inst.name==name)
}
pub fn find_first_child_of_class<'a>(dom:&'a WeakDom,instance:&rbx_dom_weak::Instance,class:&str)->Option<&'a rbx_dom_weak::Instance>{
pub fn find_first_child_of_class<'a>(dom:&'a rbx_dom_weak::WeakDom<'a>,instance:&rbx_dom_weak::Instance<'a>,class:&str)->Option<&'a rbx_dom_weak::Instance<'a>>{
instance.children().iter().filter_map(|&r|dom.get_by_ref(r)).find(|inst|inst.class==class)
}
pub fn find_first_descendant_of_class<'a>(dom:&'a WeakDom,instance:&rbx_dom_weak::Instance,class:&str)->Option<&'a rbx_dom_weak::Instance>{
pub fn find_first_descendant_of_class<'a>(dom:&'a rbx_dom_weak::WeakDom<'a>,instance:&rbx_dom_weak::Instance<'a>,class:&str)->Option<&'a rbx_dom_weak::Instance<'a>>{
dom.descendants_of(instance.referent()).find(|&inst|inst.class==class)
}
pub fn find_first_child_which_is_a<'a>(dom:&'a WeakDom,instance:&rbx_dom_weak::Instance,superclass:&str)->Option<&'a rbx_dom_weak::Instance>{
pub fn find_first_child_which_is_a<'a>(dom:&'a rbx_dom_weak::WeakDom<'a>,instance:&rbx_dom_weak::Instance<'a>,superclass:&str)->Option<&'a rbx_dom_weak::Instance<'a>>{
let db=rbx_reflection_database::get();
let superclass_descriptor=db.classes.get(superclass)?;
let superclass_descriptor=db.classes.get(UnhashedStr::from_ref(superclass))?;
instance.children().iter().filter_map(|&r|dom.get_by_ref(r)).find(|inst|{
db.classes.get(inst.class.as_str()).is_some_and(|descriptor|db.has_superclass(descriptor,superclass_descriptor))
db.classes.get(inst.class).is_some_and(|descriptor|db.has_superclass(descriptor,superclass_descriptor))
})
}
pub fn find_first_descendant_which_is_a<'a>(dom:&'a WeakDom,instance:&rbx_dom_weak::Instance,superclass:&str)->Option<&'a rbx_dom_weak::Instance>{
pub fn find_first_descendant_which_is_a<'a>(dom:&'a rbx_dom_weak::WeakDom<'a>,instance:&rbx_dom_weak::Instance<'a>,superclass:&str)->Option<&'a rbx_dom_weak::Instance<'a>>{
let db=rbx_reflection_database::get();
let superclass_descriptor=db.classes.get(superclass)?;
let superclass_descriptor=db.classes.get(UnhashedStr::from_ref(superclass))?;
dom.descendants_of(instance.referent()).find(|inst|{
db.classes.get(inst.class.as_str()).is_some_and(|descriptor|db.has_superclass(descriptor,superclass_descriptor))
db.classes.get(inst.class).is_some_and(|descriptor|db.has_superclass(descriptor,superclass_descriptor))
})
}
@@ -105,10 +104,10 @@ impl Instance{
pub fn new(referent:Ref)->Option<Self>{
referent.is_some().then_some(Self{referent})
}
pub fn get<'a>(&self,dom:&'a WeakDom)->mlua::Result<&'a rbx_dom_weak::Instance>{
pub fn get<'a>(&self,dom:&'a WeakDom<'a>)->mlua::Result<&'a rbx_dom_weak::Instance<'a>>{
dom.get_by_ref(self.referent).ok_or_else(||mlua::Error::runtime("Instance missing"))
}
pub fn get_mut<'a>(&self,dom:&'a mut WeakDom)->mlua::Result<&'a mut rbx_dom_weak::Instance>{
pub fn get_mut<'a,'b>(&self,dom:&'a mut WeakDom<'b>)->mlua::Result<&'a mut rbx_dom_weak::Instance<'b>>{
dom.get_by_ref_mut(self.referent).ok_or_else(||mlua::Error::runtime("Instance missing"))
}
}
@@ -150,7 +149,7 @@ impl mlua::UserData for Instance{
fields.add_field_method_get("ClassName",|lua,this|{
dom_mut(lua,|dom|{
let instance=this.get(dom)?;
Ok(instance.class.to_owned())
Ok(instance.class.as_str().to_owned())
})
});
}
@@ -283,7 +282,7 @@ impl mlua::UserData for Instance{
let instance=this.get(dom)?;
//println!("__index t={} i={index:?}",instance.name);
let db=rbx_reflection_database::get();
let class=db.classes.get(instance.class.as_str()).ok_or_else(||mlua::Error::runtime("Class missing"))?;
let class=db.classes.get(instance.class).ok_or_else(||mlua::Error::runtime("Class missing"))?;
// Find existing property
// Interestingly, ustr can know ahead of time if
// a property does not exist in any runtime instance
@@ -325,16 +324,13 @@ impl mlua::UserData for Instance{
}
//find or create an associated userdata object
if let Some(value)=instance_value_store_mut(lua,|ivs|{
//TODO: walk class tree somehow
match ivs.get_or_create_instance_values(&instance){
Some(mut instance_values)=>instance_values.get_or_create_value(lua,index_str),
None=>Ok(None)
}
})?{
let instance=this.get_mut(dom)?;
if let Some(value)=get_or_create_userdata(instance,lua,index_str)?{
return value.into_lua(lua);
}
// drop mutable borrow
//find a child with a matching name
let instance=this.get(dom)?;
find_first_child(dom,instance,index_str)
.map(|instance|Instance::new_unchecked(instance.referent()))
.into_lua(lua)
@@ -345,7 +341,7 @@ impl mlua::UserData for Instance{
dom_mut(lua,|dom|{
let instance=this.get_mut(dom)?;
let db=rbx_reflection_database::get();
let class=db.classes.get(instance.class.as_str()).ok_or_else(||mlua::Error::runtime("Class missing"))?;
let class=db.classes.get(instance.class).ok_or_else(||mlua::Error::runtime("Class missing"))?;
let property=db.superclasses_iter(class).find_map(|cls|
cls.properties.get(index_str)
).ok_or_else(||
@@ -422,7 +418,7 @@ impl mlua::UserData for Instance{
rbx_types::Variant::CFrame(typed_value.clone().into())
},
rbx_reflection::DataType::Value(rbx_types::VariantType::ContentId)=>{
let typed_value=value.as_string().ok_or_else(||mlua::Error::runtime("Expected string"))?.to_str()?.to_owned();
let typed_value=value.as_str().ok_or_else(||mlua::Error::runtime("Expected string"))?.to_owned();
rbx_types::Variant::ContentId(typed_value.into())
},
rbx_reflection::DataType::Value(rbx_types::VariantType::Ref)=>{
@@ -487,8 +483,8 @@ static CLASS_FUNCTION_DATABASE:CFD=phf::phf_map!{
"GetService"=>GET_SERVICE,
},
"Terrain"=>phf::phf_map!{
"FillBall"=>cf!(|_lua,_,_:(crate::runner::vector3::Vector3,Number,crate::runner::r#enum::CoerceEnum)|mlua::Result::Ok(())),
"FillBlock"=>cf!(|_lua,_,_:(crate::runner::cframe::CFrame,crate::runner::vector3::Vector3,crate::runner::r#enum::CoerceEnum)|mlua::Result::Ok(())),
"FillBall"=>cf!(|_lua,_,_:(Vector3,Number,crate::runner::r#enum::CoerceEnum)|mlua::Result::Ok(())),
"FillBlock"=>cf!(|_lua,_,_:(crate::runner::cframe::CFrame,Vector3,crate::runner::r#enum::CoerceEnum)|mlua::Result::Ok(())),
"FillCylinder"=>cf!(|_lua,_,_:(crate::runner::cframe::CFrame,Number,Number,crate::runner::r#enum::CoerceEnum)|mlua::Result::Ok(())),
"SetMaterialColor"=>cf!(|_lua,_,_:(crate::runner::r#enum::CoerceEnum,crate::runner::color3::Color3)|mlua::Result::Ok(())),
},
@@ -522,7 +518,7 @@ struct ClassMethodsStore{
}
impl ClassMethodsStore{
/// return self.classes[class] or create the ClassMethods and then return it
fn get_or_create_class_methods(&mut self,class:&str)->Option<ClassMethods<'_>>{
fn get_or_create_class_methods(&mut self,class:&str)->Option<ClassMethods>{
// Use get_entry to get the &'static str keys of the database
// and use it as a key for the classes hashmap
CLASS_FUNCTION_DATABASE.get_entry(class)
@@ -563,7 +559,7 @@ fn class_methods_store_mut<T>(lua:&mlua::Lua,mut f:impl FnMut(&mut ClassMethodsS
/// A virtual property pointer definition shorthand.
type VirtualPropertyFunctionPointer=fn(&rbx_types::Variant)->Option<rbx_types::Variant>;
const fn vpp(
property:&'static str,
property:&'static HashStr,
pointer:VirtualPropertyFunctionPointer,
)->VirtualProperty{
VirtualProperty{
@@ -572,7 +568,7 @@ const fn vpp(
}
}
struct VirtualProperty{
property:&'static str,// Source property name
property:&'static HashStr,// Source property name
pointer:VirtualPropertyFunctionPointer,
}
type VPD=phf::Map<&'static str,// Class name
@@ -580,9 +576,10 @@ type VPD=phf::Map<&'static str,// Class name
VirtualProperty
>
>;
static CFRAME:&HashStr=hstr!("CFrame");
static VIRTUAL_PROPERTY_DATABASE:VPD=phf::phf_map!{
"BasePart"=>phf::phf_map!{
"Position"=>vpp("CFrame",|c:&rbx_types::Variant|{
"Position"=>vpp(CFRAME,|c:&rbx_types::Variant|{
let c=match c{
rbx_types::Variant::CFrame(c)=>c,
_=>return None,//fail silently and ungracefully
@@ -593,7 +590,7 @@ static VIRTUAL_PROPERTY_DATABASE:VPD=phf::phf_map!{
};
fn find_virtual_property(
properties:&rbx_dom_weak::UstrMap<rbx_types::Variant>,
properties:&rbx_dom_weak::HashStrMap<rbx_types::Variant>,
class:&rbx_reflection::ClassDescriptor,
index:&str,
)->Option<rbx_types::Variant>{
@@ -602,15 +599,13 @@ fn find_virtual_property(
let virtual_property=class_virtual_properties.get(index)?;
//Get source property
let variant=properties.get(&static_ustr(virtual_property.property))?;
let variant=properties.get(virtual_property.property)?;
//Transform Source property with provided function
(virtual_property.pointer)(variant)
}
// lazy-loaded per-instance userdata values
// This whole thing is a bad idea and a garbage collection nightmare.
// TODO: recreate rbx_dom_weak with my own instance type that owns this data.
type CreateUserData=fn(&mlua::Lua)->mlua::Result<mlua::AnyUserData>;
type LUD=phf::Map<&'static str,// Class name
phf::Map<&'static str,// Value name
@@ -643,47 +638,21 @@ static LAZY_USER_DATA:LUD=phf::phf_map!{
"MouseClick"=>create_script_signal,
},
};
#[derive(Default)]
pub struct InstanceValueStore{
values:HashMap<Ref,
HashMap<&'static str,
mlua::AnyUserData
>
>,
}
pub struct InstanceValues<'a>{
named_values:&'static phf::Map<&'static str,CreateUserData>,
values:&'a mut HashMap<&'static str,mlua::AnyUserData>,
}
impl InstanceValueStore{
pub fn get_or_create_instance_values(&mut self,instance:&rbx_dom_weak::Instance)->Option<InstanceValues<'_>>{
LAZY_USER_DATA.get(instance.class.as_str())
.map(|named_values|
InstanceValues{
named_values,
values:self.values.entry(instance.referent())
.or_insert_with(||HashMap::new()),
}
)
fn get_or_create_userdata(instance:&mut rbx_dom_weak::Instance,lua:&mlua::Lua,index:&str)->mlua::Result<Option<mlua::AnyUserData>>{
use std::collections::hash_map::Entry;
let db=rbx_reflection_database::get();
let Some(class)=db.classes.get(instance.class)else{
return Ok(None)
};
if let Some((hstr,create_userdata))=db.superclasses_iter(class).find_map(|superclass|
// find pair (class,index)
LAZY_USER_DATA.get(&superclass.name)
.and_then(|map|map.get(index)).map(|f|(superclass.name,f))
){
return Ok(Some(match instance.userdata.entry(hstr){
Entry::Occupied(entry)=>entry.get().clone(),
Entry::Vacant(entry)=>entry.insert(create_userdata(lua)?).clone(),
}));
}
}
impl InstanceValues<'_>{
pub fn get_or_create_value(&mut self,lua:&mlua::Lua,index:&str)->mlua::Result<Option<mlua::AnyUserData>>{
Ok(match self.named_values.get_entry(index){
Some((&static_index_str,&function_pointer))=>Some(
match self.values.entry(static_index_str){
Entry::Occupied(entry)=>entry.get().clone(),
Entry::Vacant(entry)=>entry.insert(
function_pointer(lua)?
).clone(),
}
),
None=>None,
})
}
}
pub fn instance_value_store_mut<T>(lua:&mlua::Lua,mut f:impl FnMut(&mut InstanceValueStore)->mlua::Result<T>)->mlua::Result<T>{
let mut cf=lua.app_data_mut::<InstanceValueStore>().ok_or_else(||mlua::Error::runtime("InstanceValueStore missing"))?;
f(&mut *cf)
Ok(None)
}

View File

@@ -4,7 +4,7 @@
#[derive(Clone,Copy)]
pub enum Number{
Integer(i64),
Integer(i32),
Number(f64),
}
macro_rules! impl_ty{

View File

@@ -12,12 +12,14 @@ pub enum Error{
error:mlua::Error
},
RustLua(mlua::Error),
Services(crate::context::ServicesError),
}
impl std::fmt::Display for Error{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
match self{
Self::Lua{source,error}=>write!(f,"lua error: source:\n{source}\n{error}"),
Self::RustLua(error)=>write!(f,"rust-side lua error: {error}"),
other=>write!(f,"{other:?}"),
}
}
}
@@ -48,11 +50,6 @@ fn init(lua:&mlua::Lua)->mlua::Result<()>{
Ok(())
}
unsafe fn extend_lifetime_mut<'a,T>(src:&mut T)->&'a mut T{
let ptr:*mut T=src;
unsafe{&mut*ptr}
}
impl Runner{
pub fn new()->Result<Self,Error>{
let runner=Self{
@@ -61,7 +58,7 @@ impl Runner{
init(&runner.lua).map_err(Error::RustLua)?;
Ok(runner)
}
pub fn runnable_context<'a>(self,context:&'a mut Context)->Result<Runnable<'a>,Error>{
pub fn runnable_context<'a>(self,context:&'a mut Context<'a>)->Result<Runnable<'a>,Error>{
{
let globals=self.lua.globals();
globals.set("game",super::instance::Instance::new_unchecked(context.services.game)).map_err(Error::RustLua)?;
@@ -70,7 +67,7 @@ impl Runner{
// SAFETY: This is not a &'static mut WeakDom,
// but as long as Runnable<'a> holds the lifetime of &'a mut Context
// it is a valid unique reference.
self.lua.set_app_data::<crate::context::LuaAppData>(unsafe{extend_lifetime_mut(&mut context.dom)});
self.lua.set_app_data::<crate::context::LuaAppData>(unsafe{core::mem::transmute(&mut context.dom)});
#[cfg(feature="run-service")]
self.lua.set_app_data::<crate::scheduler::Scheduler>(crate::scheduler::Scheduler::default());
Ok(Runnable{
@@ -129,20 +126,15 @@ impl Runnable<'_>{
}
#[cfg(feature="run-service")]
pub fn run_service_step(&self)->Result<(),mlua::Error>{
let render_stepped=super::instance::instance::dom_mut(&self.lua,|dom|{
let render_stepped_signal=super::instance::instance::dom_mut(&self.lua,|dom|{
let run_service=super::instance::instance::find_first_child_of_class(dom,dom.root(),"RunService").ok_or_else(||mlua::Error::runtime("RunService missing"))?;
super::instance::instance::instance_value_store_mut(&self.lua,|instance_value_store|{
//unwrap because I trust my find_first_child_of_class function to
let mut instance_values=instance_value_store.get_or_create_instance_values(run_service).ok_or_else(||mlua::Error::runtime("RunService InstanceValues missing"))?;
let render_stepped=instance_values.get_or_create_value(&self.lua,"RenderStepped")?;
//let stepped=instance_values.get_or_create_value(&self.lua,"Stepped")?;
//let heartbeat=instance_values.get_or_create_value(&self.lua,"Heartbeat")?;
Ok(render_stepped)
Ok(match run_service.userdata.get(&static_ustr("RenderStepped")){
Some(render_stepped)=>Some(render_stepped.borrow::<super::script_signal::ScriptSignal>()?.clone()),
None=>None
})
})?;
if let Some(render_stepped)=render_stepped{
let signal:&super::script_signal::ScriptSignal=&*render_stepped.borrow()?;
signal.fire(&mlua::MultiValue::new());
if let Some(render_stepped_signal)=render_stepped_signal{
render_stepped_signal.fire(&mlua::MultiValue::new());
}
Ok(())
}

View File

@@ -117,7 +117,7 @@ impl mlua::FromLua for ScriptSignal{
}
impl mlua::UserData for ScriptConnection{
fn add_fields<F:UserDataFields<Self>>(fields:&mut F){
fn add_fields<F:mlua::UserDataFields<Self>>(fields:&mut F){
fields.add_field_method_get("Connected",|_,this|{
Ok(this.position().is_some())
});

View File

@@ -1,7 +1,7 @@
use super::instance::Instance;
use super::tween_info::TweenInfo;
#[expect(dead_code)]
#[allow(dead_code)]
#[derive(Clone)]
pub struct Tween{
instance:Instance,

View File

@@ -1,7 +1,7 @@
use super::number::Number;
use super::r#enum::{CoerceEnum,Enums};
#[expect(dead_code)]
#[allow(dead_code)]
#[derive(Clone)]
pub struct TweenInfo{
time:f64,

View File

@@ -46,8 +46,8 @@ impl Scheduler{
}
}
pub fn scheduler_mut<T>(lua:&mlua::Lua,mut f:impl FnMut(&mut Scheduler)->mlua::Result<T>)->mlua::Result<T>{
let mut scheduler=lua.app_data_mut::<Scheduler>().ok_or_else(||mlua::Error::runtime("Scheduler missing"))?;
pub fn scheduler_mut<T>(lua:&mlua::Lua,mut f:impl FnMut(&mut crate::scheduler::Scheduler)->mlua::Result<T>)->mlua::Result<T>{
let mut scheduler=lua.app_data_mut::<crate::scheduler::Scheduler>().ok_or_else(||mlua::Error::runtime("Scheduler missing"))?;
f(&mut *scheduler)
}

View File

@@ -1,3 +0,0 @@
pub fn static_ustr(s:&'static str)->rbx_dom_weak::Ustr{
rbx_dom_weak::ustr(s)
}

View File

@@ -1,14 +1,11 @@
[package]
name = "strafesnet_snf"
version = "0.3.1"
version = "0.3.0"
edition = "2024"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
binrw = "0.15.0"
binrw = "0.14.0"
id = { version = "0.1.0", registry = "strafesnet" }
strafesnet_common = { version = "0.7.0", path = "../common", registry = "strafesnet" }
[lints]
workspace = true
strafesnet_common = { version = "0.6.0", path = "../common", registry = "strafesnet" }

View File

@@ -6,7 +6,7 @@ use strafesnet_common::physics::Time;
const VERSION:u32=0;
type TimedPhysicsInstruction=strafesnet_common::instruction::TimedInstruction<strafesnet_common::physics::Instruction,Time>;
type TimedPhysicsInstruction=strafesnet_common::instruction::TimedInstruction<strafesnet_common::physics::Instruction,strafesnet_common::physics::Time>;
#[derive(Debug)]
pub enum Error{
@@ -85,7 +85,6 @@ pub struct Segment{
#[derive(Clone,Copy,Debug)]
pub struct SegmentInfo{
/// time of the first instruction in this segment.
#[expect(dead_code)]
time:Time,
instruction_count:u32,
/// How many total instructions in segments up to and including this segment
@@ -117,7 +116,6 @@ impl<R:BinReaderExt> StreamableBot<R>{
segment_map,
})
}
#[expect(dead_code)]
fn get_segment_info(&self,segment_id:SegmentId)->Result<SegmentInfo,Error>{
Ok(*self.segment_map.get(segment_id.get() as usize).ok_or(Error::InvalidSegmentId(segment_id))?)
}
@@ -274,7 +272,7 @@ pub fn write_bot<W:BinWriterExt>(mut writer:W,physics_version:u32,instructions:i
//probe header length
let mut bot_header_data=Vec::new();
header.write_le(&mut std::io::Cursor::new(&mut bot_header_data)).map_err(Error::InvalidData)?;
binrw::BinWrite::write_le(&header,&mut std::io::Cursor::new(&mut bot_header_data)).map_err(Error::InvalidData)?;
// the first block location is the map header
block_location.push(offset);

View File

@@ -53,6 +53,8 @@ pub(crate) enum FourCC{
Map,
#[brw(magic=b"SNFB")]
Bot,
#[brw(magic=b"SNFD")]
Demo,
}
#[binrw]
#[brw(little)]

View File

@@ -5,6 +5,7 @@ mod newtypes;
mod file;
pub mod map;
pub mod bot;
pub mod demo;
#[derive(Debug)]
pub enum Error{
@@ -12,6 +13,7 @@ pub enum Error{
Header(file::Error),
Map(map::Error),
Bot(bot::Error),
Demo(demo::Error),
}
impl std::fmt::Display for Error{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
@@ -23,6 +25,7 @@ impl std::error::Error for Error{}
pub enum SNF<R:BinReaderExt>{
Map(map::StreamableMap<R>),
Bot(bot::StreamableBot<R>),
Demo(demo::StreamableDemo<R>),
}
pub fn read_snf<R:BinReaderExt>(input:R)->Result<SNF<R>,Error>{
@@ -30,6 +33,7 @@ pub fn read_snf<R:BinReaderExt>(input:R)->Result<SNF<R>,Error>{
Ok(match file.fourcc(){
file::FourCC::Map=>SNF::Map(map::StreamableMap::new(file).map_err(Error::Map)?),
file::FourCC::Bot=>SNF::Bot(bot::StreamableBot::new(file).map_err(Error::Bot)?),
file::FourCC::Demo=>SNF::Demo(demo::StreamableDemo::new(file).map_err(Error::Demo)?),
})
}
pub fn read_map<R:BinReaderExt>(input:R)->Result<map::StreamableMap<R>,Error>{
@@ -46,6 +50,13 @@ pub fn read_bot<R:BinReaderExt>(input:R)->Result<bot::StreamableBot<R>,Error>{
_=>Err(Error::UnexpectedFourCC)
}
}
pub fn read_demo<R:BinReaderExt>(input:R)->Result<demo::StreamableDemo<R>,Error>{
let file=file::File::new(input).map_err(Error::Header)?;
match file.fourcc(){
file::FourCC::Demo=>Ok(demo::StreamableDemo::new(file).map_err(Error::Demo)?),
_=>Err(Error::UnexpectedFourCC)
}
}
#[cfg(test)]
mod tests {

View File

@@ -97,8 +97,8 @@ enum ResourceType{
}
struct ResourceMap<T>{
meshes:HashMap<model::MeshId,T>,
textures:HashMap<model::TextureId,T>,
meshes:HashMap<strafesnet_common::model::MeshId,T>,
textures:HashMap<strafesnet_common::model::TextureId,T>,
}
impl<T> Default for ResourceMap<T>{
fn default()->Self{
@@ -185,7 +185,7 @@ pub struct StreamableMap<R:BinReaderExt>{
//this is every possible attribute... need some sort of streaming system
attributes:Vec<strafesnet_common::gameplay_attributes::CollisionAttributes>,
//this is every possible render configuration... shaders and such... need streaming
render_configs:Vec<model::RenderConfig>,
render_configs:Vec<strafesnet_common::model::RenderConfig>,
//this makes sense to keep in memory for streaming, a map of which blocks occupy what space
bvh:BvhNode<BlockId>,
//something something resources hashmaps
@@ -223,7 +223,7 @@ impl<R:BinReaderExt> StreamableMap<R>{
}
Ok(Self{
file,
modes:gameplay_modes::NormalizedModes::new(modes),
modes:strafesnet_common::gameplay_modes::NormalizedModes::new(modes),
attributes,
render_configs,
bvh:strafesnet_common::bvh::generate_bvh(bvh),
@@ -366,12 +366,12 @@ fn collect_spacial_blocks(
block_location.push(sequential_block_data.position());
}else{
match bvh_node.into_content(){
RecursiveContent::Branch(bvh_node_list)=>{
strafesnet_common::bvh::RecursiveContent::Branch(bvh_node_list)=>{
for bvh_node in bvh_node_list{
collect_spacial_blocks(block_location,block_headers,sequential_block_data,bvh_node)?;
}
},
RecursiveContent::Leaf(_)=>panic!(),//bvh branches are 20 leaves minimum
strafesnet_common::bvh::RecursiveContent::Leaf(_)=>panic!(),//bvh branches are 20 leaves minimum
}
}
Ok(())
@@ -384,13 +384,13 @@ pub fn write_map<W:BinWriterExt>(mut writer:W,map:strafesnet_common::map::Comple
let boxen=map.models.into_iter().enumerate().map(|(model_id,model)|{
//grow your own aabb
let mesh=map.meshes.get(model.mesh.get() as usize).ok_or(Error::InvalidMeshId(model.mesh))?;
let mut aabb=Aabb::default();
let mut aabb=strafesnet_common::aabb::Aabb::default();
for &pos in &mesh.unique_pos{
aabb.grow(model.transform.transform_point3(pos).narrow_1().unwrap());
}
Ok(((model::ModelId::new(model_id as u32),model.into()),aabb))
}).collect::<Result<Vec<_>,_>>()?;
let bvh=weigh_contents(strafesnet_common::bvh::generate_bvh(boxen),&|_|size_of::<newtypes::model::Model>());
let bvh=weigh_contents(strafesnet_common::bvh::generate_bvh(boxen),&|_|std::mem::size_of::<newtypes::model::Model>());
//build blocks
//block location is initialized with two values
//the first value represents the location of the first byte after the file header

View File

@@ -104,7 +104,6 @@ impl From<strafesnet_common::gameplay_style::StyleModifiers> for StyleModifiers{
#[binrw::binrw]
#[brw(little,repr=u8)]
#[expect(dead_code)]
pub enum JumpCalculation{
Max,
BoostThenJump,
@@ -129,7 +128,6 @@ impl From<strafesnet_common::gameplay_style::JumpCalculation> for JumpCalculatio
}
}
#[expect(dead_code)]
pub enum JumpImpulse{
Time(Time),
Height(Planar64),

View File

@@ -20,7 +20,7 @@ impl TryInto<TimedPhysicsInstruction> for TimedInstruction{
}
}
impl TryFrom<TimedPhysicsInstruction> for TimedInstruction{
type Error=InstructionConvert;
type Error=super::physics::InstructionConvert;
fn try_from(value:TimedPhysicsInstruction)->Result<Self,Self::Error>{
Ok(Self{
time:value.time.get(),

View File

@@ -1,6 +1,6 @@
[package]
name = "map-tool"
version = "1.7.2"
version = "1.7.0"
edition = "2024"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@@ -12,16 +12,17 @@ flate2 = "1.0.27"
futures = "0.3.31"
image = "0.25.2"
image_dds = "0.7.1"
rbx_asset = { version = "0.5.0", registry = "strafesnet" }
rbx_binary = { version = "1.0.1-sn5", registry = "strafesnet" }
rbx_dom_weak = { version = "3.0.1-sn5", registry = "strafesnet" }
rbx_reflection_database = "1.0.0"
rbx_xml = { version = "1.0.1-sn5", registry = "strafesnet" }
lazy-regex = "3.1.0"
rbx_asset = { version = "0.4.4", registry = "strafesnet" }
rbx_binary = { path = "../../../git/rbx-dom/rbx_binary", registry = "strafesnet" }
rbx_dom_weak = { path = "../../../git/rbx-dom/rbx_dom_weak", registry = "strafesnet" }
rbx_reflection_database = { path = "../../../git/rbx-dom/rbx_reflection_database"}
rbx_xml = { path = "../../../git/rbx-dom/rbx_xml", registry = "strafesnet" }
rbxassetid = { version = "0.1.0", registry = "strafesnet" }
strafesnet_bsp_loader = { version = "0.3.1", path = "../lib/bsp_loader", registry = "strafesnet" }
strafesnet_deferred_loader = { version = "0.5.1", path = "../lib/deferred_loader", registry = "strafesnet" }
strafesnet_rbx_loader = { version = "0.7.0", path = "../lib/rbx_loader", registry = "strafesnet" }
strafesnet_snf = { version = "0.3.1", path = "../lib/snf", registry = "strafesnet" }
strafesnet_bsp_loader = { version = "0.3.0", path = "../lib/bsp_loader", registry = "strafesnet" }
strafesnet_deferred_loader = { version = "0.5.0", path = "../lib/deferred_loader", registry = "strafesnet" }
strafesnet_rbx_loader = { version = "0.6.0", path = "../lib/rbx_loader", registry = "strafesnet" }
strafesnet_snf = { version = "0.3.0", path = "../lib/snf", registry = "strafesnet" }
thiserror = "2.0.11"
tokio = { version = "1.43.0", features = ["macros", "rt-multi-thread", "fs"] }
vbsp = "0.9.1"
@@ -35,6 +36,3 @@ vtf = "0.3.0"
#lto = true
#strip = true
#codegen-units = 1
[lints]
workspace = true

View File

@@ -32,12 +32,8 @@ pub struct RobloxToSNFSubcommand {
pub struct DownloadAssetsSubcommand{
#[arg(required=true)]
roblox_files:Vec<PathBuf>,
#[arg(long,group="cookie",required=true)]
cookie_literal:Option<String>,
#[arg(long,group="cookie",required=true)]
cookie_envvar:Option<String>,
#[arg(long,group="cookie",required=true)]
cookie_file:Option<PathBuf>,
// #[arg(long)]
// cookie_file:Option<String>,
}
impl Commands{
@@ -46,27 +42,13 @@ impl Commands{
Commands::RobloxToSNF(subcommand)=>roblox_to_snf(subcommand.input_files,subcommand.output_folder).await,
Commands::DownloadAssets(subcommand)=>download_assets(
subcommand.roblox_files,
cookie_from_args(
subcommand.cookie_literal,
subcommand.cookie_envvar,
subcommand.cookie_file,
).await?,
rbx_asset::cookie::Cookie::new("".to_string()),
).await,
}
}
}
async fn cookie_from_args(literal:Option<String>,environment:Option<String>,file:Option<PathBuf>)->AResult<rbx_asset::cookie::Cookie>{
let cookie=match (literal,environment,file){
(Some(cookie_literal),None,None)=>cookie_literal,
(None,Some(cookie_environment),None)=>std::env::var(cookie_environment)?,
(None,None,Some(cookie_file))=>tokio::fs::read_to_string(cookie_file).await?,
_=>Err(anyhow::Error::msg("Illegal cookie argument triple"))?,
};
Ok(rbx_asset::cookie::Cookie::new(cookie))
}
#[expect(dead_code)]
#[allow(unused)]
#[derive(Debug)]
enum LoadDomError{
IO(std::io::Error),
@@ -192,7 +174,7 @@ impl UniqueAssets{
}
}
#[expect(dead_code)]
#[allow(unused)]
#[derive(Debug)]
enum UniqueAssetError{
IO(std::io::Error),
@@ -267,8 +249,8 @@ async fn download_retry(stats:&mut Stats,context:&rbx_asset::cookie::Context,dow
tokio::fs::write(path,&data).await?;
break Ok(DownloadResult::Data(data));
},
Err(rbx_asset::cookie::GetError::Response(rbx_asset::types::ResponseError::Details{status_code,url_and_body}))=>{
if status_code.as_u16()==429{
Err(rbx_asset::cookie::GetError::Response(rbx_asset::types::ResponseError::StatusCodeWithUrlAndBody(scwuab)))=>{
if scwuab.status_code.as_u16()==429{
if retry==12{
println!("Giving up asset download {asset_id}");
stats.timed_out_downloads+=1;
@@ -280,7 +262,7 @@ async fn download_retry(stats:&mut Stats,context:&rbx_asset::cookie::Context,dow
retry+=1;
}else{
stats.failed_downloads+=1;
println!("weird status_code error: status_code={status_code} url={} body={}",url_and_body.url,url_and_body.body);
println!("weird scuwab error: {scwuab:?}");
break Ok(DownloadResult::Failed);
}
},
@@ -421,7 +403,7 @@ async fn download_assets(paths:Vec<PathBuf>,cookie:rbx_asset::cookie::Cookie)->A
}
#[derive(Debug)]
#[expect(dead_code)]
#[allow(dead_code)]
enum ConvertError{
IO(std::io::Error),
SNFMap(strafesnet_snf::map::Error),
@@ -434,23 +416,17 @@ impl std::fmt::Display for ConvertError{
}
}
impl std::error::Error for ConvertError{}
struct Errors{
script_errors:Vec<strafesnet_rbx_loader::RunnerError>,
convert_errors:strafesnet_rbx_loader::RecoverableErrors,
}
fn convert_to_snf(path:&Path,output_folder:PathBuf)->Result<Errors,ConvertError>{
let entire_file=std::fs::read(path).map_err(ConvertError::IO)?;
async fn convert_to_snf(path:&Path,output_folder:PathBuf)->AResult<()>{
let entire_file=tokio::fs::read(path).await?;
let model=strafesnet_rbx_loader::read(
entire_file.as_slice()
std::io::Cursor::new(entire_file)
).map_err(ConvertError::RobloxRead)?;
let mut place=strafesnet_rbx_loader::Place::from(model);
let script_errors=place.run_scripts().unwrap_or_else(|e|vec![e]);
place.run_scripts();
let (map,convert_errors)=place.to_snf(LoadFailureMode::DefaultToNone).map_err(ConvertError::RobloxLoad)?;
let map=place.to_snf(LoadFailureMode::DefaultToNone).map_err(ConvertError::RobloxLoad)?;
let mut dest=output_folder;
dest.push(path.file_stem().unwrap());
@@ -459,37 +435,24 @@ fn convert_to_snf(path:&Path,output_folder:PathBuf)->Result<Errors,ConvertError>
strafesnet_snf::map::write_map(file,map).map_err(ConvertError::SNFMap)?;
Ok(Errors{
script_errors,
convert_errors,
})
Ok(())
}
async fn roblox_to_snf(paths:Vec<PathBuf>,output_folder:PathBuf)->AResult<()>{
async fn roblox_to_snf(paths:Vec<std::path::PathBuf>,output_folder:PathBuf)->AResult<()>{
let start=std::time::Instant::now();
let thread_limit=std::thread::available_parallelism()?.get();
let mut it=paths.into_iter();
static SEM:tokio::sync::Semaphore=tokio::sync::Semaphore::const_new(0);
// This is wrong! Calling roblox_to_snf multiple times keeps adding permits
SEM.add_permits(thread_limit);
while let (Ok(permit),Some(path))=(SEM.acquire().await,it.next()){
let output_folder=output_folder.clone();
tokio::task::spawn_blocking(move||{
let result=convert_to_snf(path.as_path(),output_folder);
tokio::spawn(async move{
let result=convert_to_snf(path.as_path(),output_folder).await;
drop(permit);
match result{
Ok(errors)=>{
for error in errors.script_errors{
println!("Script error: {error}");
}
let error_count=errors.convert_errors.count();
if error_count!=0{
println!("Error count: {error_count}");
println!("Errors: {}",errors.convert_errors);
}
},
Ok(())=>(),
Err(e)=>println!("Convert error: {e:?}"),
}
});

View File

@@ -452,7 +452,7 @@ fn bsp_contents(path:PathBuf)->AResult<()>{
}
#[derive(Debug)]
#[expect(dead_code)]
#[allow(dead_code)]
enum ConvertError{
IO(std::io::Error),
SNFMap(strafesnet_snf::map::Error),
@@ -484,7 +484,7 @@ async fn convert_to_snf(path:&Path,vpk_list:&[strafesnet_bsp_loader::Vpk],output
Ok(())
}
async fn source_to_snf(paths:Vec<PathBuf>,output_folder:PathBuf,vpk_paths:Vec<PathBuf>)->AResult<()>{
async fn source_to_snf(paths:Vec<std::path::PathBuf>,output_folder:PathBuf,vpk_paths:Vec<PathBuf>)->AResult<()>{
let start=std::time::Instant::now();
let thread_limit=std::thread::available_parallelism()?.get();

View File

@@ -28,12 +28,5 @@ strafesnet_rbx_loader = { path = "../lib/rbx_loader", registry = "strafesnet", o
strafesnet_session = { path = "../engine/session", registry = "strafesnet" }
strafesnet_settings = { path = "../engine/settings", registry = "strafesnet" }
strafesnet_snf = { path = "../lib/snf", registry = "strafesnet", optional = true }
wgpu = "27.0.0"
wgpu = "25.0.0"
winit = "0.30.7"
[profile.dev]
strip = false
opt-level = 3
[lints]
workspace = true

View File

@@ -3,7 +3,7 @@ use std::io::Read;
#[cfg(any(feature="roblox",feature="source"))]
use strafesnet_deferred_loader::deferred_loader::LoadFailureMode;
#[expect(dead_code)]
#[allow(dead_code)]
#[derive(Debug)]
pub enum ReadError{
#[cfg(feature="roblox")]
@@ -63,7 +63,7 @@ pub fn read<R:Read+std::io::Seek>(input:R)->Result<ReadFormat,ReadError>{
}
}
#[expect(dead_code)]
#[allow(dead_code)]
#[derive(Debug)]
pub enum LoadError{
ReadError(ReadError),
@@ -98,15 +98,10 @@ pub fn load<P:AsRef<std::path::Path>>(path:P)->Result<LoadFormat,LoadError>{
#[cfg(feature="roblox")]
ReadFormat::Roblox(model)=>{
let mut place=strafesnet_rbx_loader::Place::from(model);
let script_errors=place.run_scripts().unwrap();
for error in script_errors{
println!("Script error: {error}");
}
let (map,errors)=place.to_snf(LoadFailureMode::DefaultToNone).map_err(LoadError::LoadRoblox)?;
if errors.count()!=0{
print!("Errors encountered while loading the map:\n{}",errors);
}
Ok(LoadFormat::Map(map))
place.run_scripts();
Ok(LoadFormat::Map(
place.to_snf(LoadFailureMode::DefaultToNone).map_err(LoadError::LoadRoblox)?
))
},
#[cfg(feature="source")]
ReadFormat::Source(bsp)=>Ok(LoadFormat::Map(

View File

@@ -21,7 +21,7 @@ WorkerDescription{
pub fn new(
mut graphics:graphics::GraphicsState,
mut config:wgpu::SurfaceConfiguration,
surface:wgpu::Surface<'_>,
surface:wgpu::Surface,
device:wgpu::Device,
queue:wgpu::Queue,
)->crate::compat_worker::INWorker<'_,Instruction>{

View File

@@ -119,13 +119,12 @@ impl<'a> SetupContextPartial3<'a>{
let (device, queue)=pollster::block_on(self.adapter
.request_device(
&wgpu::DeviceDescriptor{
label:None,
required_features:(optional_features&self.adapter.features())|required_features,
required_limits:needed_limits,
&wgpu::DeviceDescriptor {
label: None,
required_features: (optional_features & self.adapter.features()) | required_features,
required_limits: needed_limits,
memory_hints:wgpu::MemoryHints::Performance,
trace:wgpu::Trace::Off,
experimental_features:wgpu::ExperimentalFeatures::disabled(),
trace: wgpu::Trace::Off,
},
))
.expect("Unable to find a suitable GPU adapter!");

View File

@@ -188,7 +188,7 @@ impl WindowContext<'_>{
}
}
fn device_event(&mut self,time:SessionTime,event:winit::event::DeviceEvent){
fn device_event(&mut self,time:SessionTime,event: winit::event::DeviceEvent){
match event{
winit::event::DeviceEvent::MouseMotion{
delta,

View File

@@ -1 +0,0 @@
mangohud ../target/release/strafe-client bhop_maps/5692098704.snfm "$@"