Compare commits
6 Commits
sussy-tips
...
refactor-l
Author | SHA1 | Date | |
---|---|---|---|
2c729adf64 | |||
3467bc77b0 | |||
2cf5ff5059 | |||
b550778a60 | |||
69599b23be | |||
2d9ad990c2 |
Cargo.lockCargo.toml
engine
integration-testing
lib
bsp_loader
common
deferred_loader
fixed_wide
linear_ops
rbx_loader
rbxassetid
snf
map-tool
strafe-client/src
tools
2043
Cargo.lock
generated
2043
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -15,7 +15,6 @@ members = [
|
||||
"lib/rbxassetid",
|
||||
"lib/roblox_emulator",
|
||||
"lib/snf",
|
||||
"map-tool",
|
||||
"strafe-client",
|
||||
]
|
||||
resolver = "2"
|
||||
|
@ -556,7 +556,7 @@ impl MoveState{
|
||||
=>None,
|
||||
}
|
||||
}
|
||||
fn next_move_instruction(&self,strafe:&Option<gameplay_style::StrafeSettings>,time:Time)->Option<TimedInstruction<InternalInstruction,Time>>{
|
||||
fn next_move_instruction(&self,strafe:&Option<gameplay_style::StrafeSettings>,time:Time)->Option<TimedInstruction<InternalInstruction,TimeInner>>{
|
||||
//check if you have a valid walk state and create an instruction
|
||||
match self{
|
||||
MoveState::Walk(walk_state)|MoveState::Ladder(walk_state)=>match &walk_state.target{
|
||||
@ -784,7 +784,7 @@ impl TouchingState{
|
||||
}).collect();
|
||||
crate::push_solve::push_solve(&contacts,acceleration)
|
||||
}
|
||||
fn predict_collision_end(&self,collector:&mut instruction::InstructionCollector<InternalInstruction,Time>,models:&PhysicsModels,hitbox_mesh:&HitboxMesh,body:&Body,start_time:Time){
|
||||
fn predict_collision_end(&self,collector:&mut instruction::InstructionCollector<InternalInstruction,TimeInner>,models:&PhysicsModels,hitbox_mesh:&HitboxMesh,body:&Body,start_time:Time){
|
||||
// let relative_body=body.relative_to(&Body::ZERO);
|
||||
let relative_body=body;
|
||||
for contact in &self.contacts{
|
||||
@ -878,7 +878,7 @@ impl PhysicsState{
|
||||
fn reset_to_default(&mut self){
|
||||
*self=Self::default();
|
||||
}
|
||||
fn next_move_instruction(&self)->Option<TimedInstruction<InternalInstruction,Time>>{
|
||||
fn next_move_instruction(&self)->Option<TimedInstruction<InternalInstruction,TimeInner>>{
|
||||
self.move_state.next_move_instruction(&self.style.strafe,self.time)
|
||||
}
|
||||
fn cull_velocity(&mut self,data:&PhysicsData,velocity:Planar64Vec3){
|
||||
@ -935,7 +935,7 @@ pub struct PhysicsData{
|
||||
impl Default for PhysicsData{
|
||||
fn default()->Self{
|
||||
Self{
|
||||
bvh:bvh::BvhNode::empty(),
|
||||
bvh:bvh::BvhNode::default(),
|
||||
models:Default::default(),
|
||||
modes:Default::default(),
|
||||
hitbox_mesh:StyleModifiers::default().calculate_mesh(),
|
||||
@ -950,21 +950,21 @@ pub struct PhysicsContext<'a>{
|
||||
// the physics consumes both Instruction and PhysicsInternalInstruction,
|
||||
// but can only emit PhysicsInternalInstruction
|
||||
impl InstructionConsumer<InternalInstruction> for PhysicsContext<'_>{
|
||||
type Time=Time;
|
||||
fn process_instruction(&mut self,ins:TimedInstruction<InternalInstruction,Time>){
|
||||
type TimeInner=TimeInner;
|
||||
fn process_instruction(&mut self,ins:TimedInstruction<InternalInstruction,TimeInner>){
|
||||
atomic_internal_instruction(&mut self.state,&self.data,ins)
|
||||
}
|
||||
}
|
||||
impl InstructionConsumer<Instruction> for PhysicsContext<'_>{
|
||||
type Time=Time;
|
||||
fn process_instruction(&mut self,ins:TimedInstruction<Instruction,Time>){
|
||||
type TimeInner=TimeInner;
|
||||
fn process_instruction(&mut self,ins:TimedInstruction<Instruction,TimeInner>){
|
||||
atomic_input_instruction(&mut self.state,&self.data,ins)
|
||||
}
|
||||
}
|
||||
impl InstructionEmitter<InternalInstruction> for PhysicsContext<'_>{
|
||||
type Time=Time;
|
||||
type TimeInner=TimeInner;
|
||||
//this little next instruction function could cache its return value and invalidate the cached value by watching the State.
|
||||
fn next_instruction(&self,time_limit:Time)->Option<TimedInstruction<InternalInstruction,Time>>{
|
||||
fn next_instruction(&self,time_limit:Time)->Option<TimedInstruction<InternalInstruction,TimeInner>>{
|
||||
next_instruction_internal(&self.state,&self.data,time_limit)
|
||||
}
|
||||
}
|
||||
@ -972,7 +972,7 @@ impl PhysicsContext<'_>{
|
||||
pub fn run_input_instruction(
|
||||
state:&mut PhysicsState,
|
||||
data:&PhysicsData,
|
||||
instruction:TimedInstruction<Instruction,Time>
|
||||
instruction:TimedInstruction<Instruction,TimeInner>
|
||||
){
|
||||
let mut context=PhysicsContext{state,data};
|
||||
context.process_exhaustive(instruction.time);
|
||||
@ -1121,7 +1121,7 @@ impl PhysicsData{
|
||||
}
|
||||
|
||||
//this is the one who asks
|
||||
fn next_instruction_internal(state:&PhysicsState,data:&PhysicsData,time_limit:Time)->Option<TimedInstruction<InternalInstruction,Time>>{
|
||||
fn next_instruction_internal(state:&PhysicsState,data:&PhysicsData,time_limit:Time)->Option<TimedInstruction<InternalInstruction,TimeInner>>{
|
||||
//JUST POLLING!!! NO MUTATION
|
||||
let mut collector=instruction::InstructionCollector::new(time_limit);
|
||||
|
||||
@ -1136,7 +1136,7 @@ impl PhysicsData{
|
||||
//relative to moving platforms
|
||||
//let relative_body=state.body.relative_to(&Body::ZERO);
|
||||
let relative_body=&state.body;
|
||||
data.bvh.sample_aabb(&aabb,&mut |&convex_mesh_id|{
|
||||
data.bvh.the_tester(&aabb,&mut |&convex_mesh_id|{
|
||||
//no checks are needed because of the time limits.
|
||||
let model_mesh=data.models.mesh(convex_mesh_id);
|
||||
let minkowski=model_physics::MinkowskiMesh::minkowski_sum(model_mesh,data.hitbox_mesh.transformed_mesh());
|
||||
@ -1198,7 +1198,7 @@ fn recalculate_touching(
|
||||
aabb.inflate(hitbox_mesh.halfsize);
|
||||
//relative to moving platforms
|
||||
//let relative_body=state.body.relative_to(&Body::ZERO);
|
||||
bvh.sample_aabb(&aabb,&mut |&convex_mesh_id|{
|
||||
bvh.the_tester(&aabb,&mut |&convex_mesh_id|{
|
||||
//no checks are needed because of the time limits.
|
||||
let model_mesh=models.mesh(convex_mesh_id);
|
||||
let minkowski=model_physics::MinkowskiMesh::minkowski_sum(model_mesh,hitbox_mesh.transformed_mesh());
|
||||
@ -1651,7 +1651,7 @@ fn collision_end_intersect(
|
||||
}
|
||||
}
|
||||
}
|
||||
fn atomic_internal_instruction(state:&mut PhysicsState,data:&PhysicsData,ins:TimedInstruction<InternalInstruction,Time>){
|
||||
fn atomic_internal_instruction(state:&mut PhysicsState,data:&PhysicsData,ins:TimedInstruction<InternalInstruction,TimeInner>){
|
||||
state.time=ins.time;
|
||||
let (should_advance_body,goober_time)=match ins.instruction{
|
||||
InternalInstruction::CollisionStart(_,dt)
|
||||
@ -1747,7 +1747,7 @@ fn atomic_internal_instruction(state:&mut PhysicsState,data:&PhysicsData,ins:Tim
|
||||
}
|
||||
}
|
||||
|
||||
fn atomic_input_instruction(state:&mut PhysicsState,data:&PhysicsData,ins:TimedInstruction<Instruction,Time>){
|
||||
fn atomic_input_instruction(state:&mut PhysicsState,data:&PhysicsData,ins:TimedInstruction<Instruction,TimeInner>){
|
||||
state.time=ins.time;
|
||||
let should_advance_body=match ins.instruction{
|
||||
//the body may as well be a quantum wave function
|
||||
|
@ -5,13 +5,13 @@ use strafesnet_common::physics::{
|
||||
TimeInner as PhysicsTimeInner,
|
||||
Time as PhysicsTime,
|
||||
};
|
||||
use strafesnet_common::session::Time as SessionTime;
|
||||
use strafesnet_common::session::{Time as SessionTime,TimeInner as SessionTimeInner};
|
||||
use strafesnet_common::instruction::{InstructionConsumer,InstructionEmitter,TimedInstruction};
|
||||
|
||||
type TimedSelfInstruction=TimedInstruction<Instruction,PhysicsTime>;
|
||||
type DoubleTimedSelfInstruction=TimedInstruction<TimedSelfInstruction,SessionTime>;
|
||||
type TimedSelfInstruction=TimedInstruction<Instruction,PhysicsTimeInner>;
|
||||
type DoubleTimedSelfInstruction=TimedInstruction<TimedSelfInstruction,SessionTimeInner>;
|
||||
|
||||
type TimedPhysicsInstruction=TimedInstruction<PhysicsInstruction,PhysicsTime>;
|
||||
type TimedPhysicsInstruction=TimedInstruction<PhysicsInstruction,PhysicsTimeInner>;
|
||||
|
||||
const MOUSE_TIMEOUT:SessionTime=SessionTime::from_millis(10);
|
||||
|
||||
@ -89,14 +89,14 @@ pub struct MouseInterpolator{
|
||||
// Maybe MouseInterpolator manipulation is better expressed using impls
|
||||
// and called from Instruction trait impls in session
|
||||
impl InstructionConsumer<TimedSelfInstruction> for MouseInterpolator{
|
||||
type Time=SessionTime;
|
||||
type TimeInner=SessionTimeInner;
|
||||
fn process_instruction(&mut self,ins:DoubleTimedSelfInstruction){
|
||||
self.push_unbuffered_input(ins.time,ins.instruction.time,ins.instruction.instruction.into())
|
||||
}
|
||||
}
|
||||
impl InstructionEmitter<StepInstruction> for MouseInterpolator{
|
||||
type Time=SessionTime;
|
||||
fn next_instruction(&self,time_limit:SessionTime)->Option<TimedInstruction<StepInstruction,Self::Time>>{
|
||||
type TimeInner=SessionTimeInner;
|
||||
fn next_instruction(&self,time_limit:SessionTime)->Option<TimedInstruction<StepInstruction,Self::TimeInner>>{
|
||||
self.buffered_instruction_with_timeout(time_limit)
|
||||
}
|
||||
}
|
||||
@ -108,7 +108,7 @@ impl MouseInterpolator{
|
||||
output:std::collections::VecDeque::new(),
|
||||
}
|
||||
}
|
||||
fn push_mouse_and_flush_buffer(&mut self,ins:TimedInstruction<MouseInstruction,PhysicsTime>){
|
||||
fn push_mouse_and_flush_buffer(&mut self,ins:TimedInstruction<MouseInstruction,PhysicsTimeInner>){
|
||||
self.buffer.push_front(TimedInstruction{
|
||||
time:ins.time,
|
||||
instruction:BufferedInstruction::Mouse(ins.instruction).into(),
|
||||
@ -219,7 +219,7 @@ impl MouseInterpolator{
|
||||
}
|
||||
}
|
||||
}
|
||||
fn buffered_instruction_with_timeout(&self,time_limit:SessionTime)->Option<TimedInstruction<StepInstruction,SessionTime>>{
|
||||
fn buffered_instruction_with_timeout(&self,time_limit:SessionTime)->Option<TimedInstruction<StepInstruction,SessionTimeInner>>{
|
||||
match self.get_mouse_timedout_at(time_limit){
|
||||
Some(timeout)=>Some(TimedInstruction{
|
||||
time:timeout,
|
||||
@ -232,7 +232,7 @@ impl MouseInterpolator{
|
||||
}),
|
||||
}
|
||||
}
|
||||
pub fn pop_buffered_instruction(&mut self,ins:TimedInstruction<StepInstruction,PhysicsTime>)->Option<TimedPhysicsInstruction>{
|
||||
pub fn pop_buffered_instruction(&mut self,ins:TimedInstruction<StepInstruction,PhysicsTimeInner>)->Option<TimedPhysicsInstruction>{
|
||||
match ins.instruction{
|
||||
StepInstruction::Pop=>(),
|
||||
StepInstruction::Timeout=>self.timeout_mouse(ins.time),
|
||||
@ -244,7 +244,6 @@ impl MouseInterpolator{
|
||||
#[cfg(test)]
|
||||
mod test{
|
||||
use super::*;
|
||||
use strafesnet_common::session::TimeInner as SessionTimeInner;
|
||||
#[test]
|
||||
fn test(){
|
||||
let mut interpolator=MouseInterpolator::new();
|
||||
|
@ -88,11 +88,11 @@ impl Simulation{
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct Recording{
|
||||
instructions:Vec<TimedInstruction<PhysicsInputInstruction,PhysicsTime>>,
|
||||
instructions:Vec<TimedInstruction<PhysicsInputInstruction,PhysicsTimeInner>>,
|
||||
}
|
||||
impl Recording{
|
||||
pub fn new(
|
||||
instructions:Vec<TimedInstruction<PhysicsInputInstruction,PhysicsTime>>,
|
||||
instructions:Vec<TimedInstruction<PhysicsInputInstruction,PhysicsTimeInner>>,
|
||||
)->Self{
|
||||
Self{instructions}
|
||||
}
|
||||
@ -207,8 +207,8 @@ impl Session{
|
||||
// Session emits DoStep
|
||||
|
||||
impl InstructionConsumer<Instruction<'_>> for Session{
|
||||
type Time=SessionTime;
|
||||
fn process_instruction(&mut self,ins:TimedInstruction<Instruction,Self::Time>){
|
||||
type TimeInner=SessionTimeInner;
|
||||
fn process_instruction(&mut self,ins:TimedInstruction<Instruction,Self::TimeInner>){
|
||||
// repetitive procedure macro
|
||||
macro_rules! run_mouse_interpolator_instruction{
|
||||
($instruction:expr)=>{
|
||||
@ -425,8 +425,8 @@ impl InstructionConsumer<Instruction<'_>> for Session{
|
||||
}
|
||||
}
|
||||
impl InstructionConsumer<StepInstruction> for Session{
|
||||
type Time=SessionTime;
|
||||
fn process_instruction(&mut self,ins:TimedInstruction<StepInstruction,Self::Time>){
|
||||
type TimeInner=SessionTimeInner;
|
||||
fn process_instruction(&mut self,ins:TimedInstruction<StepInstruction,Self::TimeInner>){
|
||||
let time=self.simulation.timer.time(ins.time);
|
||||
if let Some(instruction)=self.mouse_interpolator.pop_buffered_instruction(ins.set_time(time)){
|
||||
//record
|
||||
@ -436,8 +436,8 @@ impl InstructionConsumer<StepInstruction> for Session{
|
||||
}
|
||||
}
|
||||
impl InstructionEmitter<StepInstruction> for Session{
|
||||
type Time=SessionTime;
|
||||
fn next_instruction(&self,time_limit:SessionTime)->Option<TimedInstruction<StepInstruction,Self::Time>>{
|
||||
type TimeInner=SessionTimeInner;
|
||||
fn next_instruction(&self,time_limit:SessionTime)->Option<TimedInstruction<StepInstruction,Self::TimeInner>>{
|
||||
self.mouse_interpolator.next_instruction(time_limit)
|
||||
}
|
||||
}
|
||||
|
@ -4,6 +4,6 @@ version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
strafesnet_common = { path = "../lib/common", registry = "strafesnet" }
|
||||
strafesnet_physics = { path = "../engine/physics", registry = "strafesnet" }
|
||||
strafesnet_common = { version = "0.5.2", path = "../lib/common", registry = "strafesnet" }
|
||||
strafesnet_physics = { version = "0.1.0", path = "../engine/physics", registry = "strafesnet" }
|
||||
strafesnet_snf = { path = "../lib/snf", registry = "strafesnet" }
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "strafesnet_bsp_loader"
|
||||
version = "0.3.0"
|
||||
version = "0.2.2"
|
||||
edition = "2021"
|
||||
repository = "https://git.itzana.me/StrafesNET/strafe-project"
|
||||
license = "MIT OR Apache-2.0"
|
||||
@ -11,8 +11,7 @@ authors = ["Rhys Lloyd <krakow20@gmail.com>"]
|
||||
|
||||
[dependencies]
|
||||
glam = "0.29.0"
|
||||
strafesnet_common = { version = "0.6.0", path = "../common", registry = "strafesnet" }
|
||||
strafesnet_deferred_loader = { version = "0.5.0", path = "../deferred_loader", registry = "strafesnet" }
|
||||
vbsp = { version = "0.7.0-codegen1", registry = "strafesnet" }
|
||||
strafesnet_common = { path = "../common", registry = "strafesnet" }
|
||||
strafesnet_deferred_loader = { version = "0.5.0", path = "../deferred_loader" }
|
||||
vbsp = "0.6.0"
|
||||
vmdl = "0.2.0"
|
||||
vpk = "0.2.0"
|
||||
|
@ -1,321 +0,0 @@
|
||||
use strafesnet_common::integer::Planar64;
|
||||
use strafesnet_common::{model,integer};
|
||||
use strafesnet_common::integer::{vec3::Vector3,Fixed,Ratio};
|
||||
|
||||
use crate::{valve_transform_normal,valve_transform_dist};
|
||||
|
||||
#[derive(Hash,Eq,PartialEq)]
|
||||
struct Face{
|
||||
normal:integer::Planar64Vec3,
|
||||
dot:integer::Planar64,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Faces{
|
||||
faces:Vec<Vec<integer::Planar64Vec3>>,
|
||||
}
|
||||
|
||||
fn solve3(c0:&Face,c1:&Face,c2:&Face)->Option<Ratio<Vector3<Fixed<3,96>>,Fixed<3,96>>>{
|
||||
let n0_n1=c0.normal.cross(c1.normal);
|
||||
let det=c2.normal.dot(n0_n1);
|
||||
if det.abs().is_zero(){
|
||||
return None;
|
||||
}
|
||||
Some((
|
||||
c1.normal.cross(c2.normal)*c0.dot
|
||||
+c2.normal.cross(c0.normal)*c1.dot
|
||||
+c0.normal.cross(c1.normal)*c2.dot
|
||||
)/det)
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum PlanesToFacesError{
|
||||
InitFace1,
|
||||
InitFace2,
|
||||
InitIntersection,
|
||||
FindNewIntersection,
|
||||
EmptyFaces,
|
||||
InfiniteLoop1,
|
||||
InfiniteLoop2,
|
||||
}
|
||||
impl std::fmt::Display for PlanesToFacesError{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl core::error::Error for PlanesToFacesError{}
|
||||
|
||||
fn planes_to_faces(face_list:std::collections::HashSet<Face>)->Result<Faces,PlanesToFacesError>{
|
||||
let mut faces=Vec::new();
|
||||
// for each face, determine one edge at a time until you complete the face
|
||||
'face: for face0 in &face_list{
|
||||
// 1. find first edge
|
||||
// 2. follow edges around face
|
||||
|
||||
// === finding first edge ===
|
||||
// 1. pick the most perpendicular set of 3 faces
|
||||
// 2. check if any faces occlude the intersection
|
||||
// 3. use this test to replace left and right alternating until they are not occluded
|
||||
|
||||
// find the most perpendicular face to face0
|
||||
let mut face1=face_list.iter().min_by_key(|&p|{
|
||||
face0.normal.dot(p.normal).abs()
|
||||
}).ok_or(PlanesToFacesError::InitFace1)?;
|
||||
|
||||
// direction of edge formed by face0 x face1
|
||||
let edge_dir=face0.normal.cross(face1.normal);
|
||||
|
||||
// find the most perpendicular face to both face0 and face1
|
||||
let mut face2=face_list.iter().max_by_key(|&p|{
|
||||
// find the best *oriented* face (no .abs())
|
||||
edge_dir.dot(p.normal)
|
||||
}).ok_or(PlanesToFacesError::InitFace2)?;
|
||||
|
||||
let mut detect_loop=200u8;
|
||||
|
||||
let mut intersection=solve3(face0,face1,face2).ok_or(PlanesToFacesError::InitIntersection)?;
|
||||
|
||||
// repeatedly update face0, face1 until all faces form part of the convex solid
|
||||
'find: loop{
|
||||
if let Some(a)=detect_loop.checked_sub(1){
|
||||
detect_loop=a;
|
||||
}else{
|
||||
return Err(PlanesToFacesError::InfiniteLoop1);
|
||||
}
|
||||
// test if any *other* faces occlude the intersection
|
||||
for new_face in &face_list{
|
||||
// new face occludes intersection point
|
||||
if (new_face.dot.fix_2()/Planar64::ONE).lt_ratio(new_face.normal.dot(intersection.num)/intersection.den){
|
||||
// replace one of the faces with the new face
|
||||
// dont' try to replace face0 because we are exploring that face in particular
|
||||
if let Some(new_intersection)=solve3(face0,new_face,face2){
|
||||
// face1 does not occlude (or intersect) the new intersection
|
||||
if (face1.dot.fix_2()/Planar64::ONE).gt_ratio(face1.normal.dot(new_intersection.num)/new_intersection.den){
|
||||
face1=new_face;
|
||||
intersection=new_intersection;
|
||||
continue 'find;
|
||||
}
|
||||
}
|
||||
if let Some(new_intersection)=solve3(face0,face1,new_face){
|
||||
// face2 does not occlude (or intersect) the new intersection
|
||||
if (face2.dot.fix_2()/Planar64::ONE).gt_ratio(face2.normal.dot(new_intersection.num)/new_intersection.den){
|
||||
face2=new_face;
|
||||
intersection=new_intersection;
|
||||
continue 'find;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// we have found a set of faces for which the intersection is on the convex solid
|
||||
break 'find;
|
||||
}
|
||||
|
||||
// check if face0 must go, meaning it is a degenerate face and does not contribute anything to the convex solid
|
||||
for new_face in &face_list{
|
||||
if core::ptr::eq(face0,new_face){
|
||||
continue;
|
||||
}
|
||||
if core::ptr::eq(face1,new_face){
|
||||
continue;
|
||||
}
|
||||
if core::ptr::eq(face2,new_face){
|
||||
continue;
|
||||
}
|
||||
if let Some(new_intersection)=solve3(new_face,face1,face2){
|
||||
// face0 does not occlude (or intersect) the new intersection
|
||||
if (face0.dot.fix_2()/Planar64::ONE).lt_ratio(face0.normal.dot(new_intersection.num)/new_intersection.den){
|
||||
// abort! reject face0 entirely
|
||||
continue 'face;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// === follow edges around face ===
|
||||
// Note that we chose face2 such that the 3 faces create a particular winding order.
|
||||
// If we choose a consistent face to follow (face1, face2) it will always wind with a consistent chirality
|
||||
|
||||
let mut detect_loop=200u8;
|
||||
|
||||
// keep looping until we meet this face again
|
||||
let face1=face1;
|
||||
let mut face=Vec::new();
|
||||
loop{
|
||||
// push point onto vertices
|
||||
// problem: this may push a vertex that does not fit in the fixed point range and is thus meaningless
|
||||
face.push(intersection.divide().fix_1());
|
||||
|
||||
// we looped back around to face1, we're done!
|
||||
if core::ptr::eq(face1,face2){
|
||||
break;
|
||||
}
|
||||
|
||||
// the measure
|
||||
let edge_dir=face0.normal.cross(face2.normal);
|
||||
|
||||
// the dot product to beat
|
||||
let d_intersection=edge_dir.dot(intersection.num)/intersection.den;
|
||||
|
||||
// find the next face moving clockwise around face0
|
||||
let (new_face,new_intersection,_)=face_list.iter().filter_map(|new_face|{
|
||||
// ignore faces that are part of the current edge
|
||||
if core::ptr::eq(face0,new_face)
|
||||
|core::ptr::eq(face2,new_face){
|
||||
return None;
|
||||
}
|
||||
let new_intersection=solve3(face0,face2,new_face)?;
|
||||
|
||||
// the d value must be larger
|
||||
let d_new_intersection=edge_dir.dot(new_intersection.num)/new_intersection.den;
|
||||
if d_new_intersection.le_ratio(d_intersection){
|
||||
return None;
|
||||
}
|
||||
|
||||
Some((new_face,new_intersection,d_new_intersection))
|
||||
}).min_by_key(|&(_,_,d)|d).ok_or(PlanesToFacesError::FindNewIntersection)?;
|
||||
|
||||
face2=new_face;
|
||||
intersection=new_intersection;
|
||||
|
||||
if let Some(a)=detect_loop.checked_sub(1){
|
||||
detect_loop=a;
|
||||
}else{
|
||||
return Err(PlanesToFacesError::InfiniteLoop2);
|
||||
}
|
||||
}
|
||||
|
||||
faces.push(face);
|
||||
}
|
||||
|
||||
if faces.is_empty(){
|
||||
Err(PlanesToFacesError::EmptyFaces)
|
||||
}else{
|
||||
Ok(Faces{
|
||||
faces,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum BrushToMeshError{
|
||||
SliceBrushSides,
|
||||
MissingPlane,
|
||||
InvalidFaceCount{
|
||||
count:usize,
|
||||
},
|
||||
InvalidPlanes(PlanesToFacesError),
|
||||
SkipBecauseTexture,
|
||||
}
|
||||
impl std::fmt::Display for BrushToMeshError{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl core::error::Error for BrushToMeshError{}
|
||||
|
||||
pub fn faces_to_mesh(faces:Vec<Vec<integer::Planar64Vec3>>)->model::Mesh{
|
||||
// generate the mesh
|
||||
let mut mb=model::MeshBuilder::new();
|
||||
let color=mb.acquire_color_id(glam::Vec4::ONE);
|
||||
let tex=mb.acquire_tex_id(glam::Vec2::ZERO);
|
||||
// normals are ignored by physics
|
||||
let normal=mb.acquire_normal_id(integer::vec3::ZERO);
|
||||
|
||||
let polygon_list=faces.into_iter().map(|face|{
|
||||
face.into_iter().map(|pos|{
|
||||
let pos=mb.acquire_pos_id(pos);
|
||||
mb.acquire_vertex_id(model::IndexedVertex{
|
||||
pos,
|
||||
tex,
|
||||
normal,
|
||||
color,
|
||||
})
|
||||
}).collect()
|
||||
}).collect();
|
||||
|
||||
let polygon_groups=vec![model::PolygonGroup::PolygonList(model::PolygonList::new(polygon_list))];
|
||||
let physics_groups=vec![model::IndexedPhysicsGroup{
|
||||
groups:vec![model::PolygonGroupId::new(0)],
|
||||
}];
|
||||
let graphics_groups=vec![];
|
||||
|
||||
mb.build(polygon_groups,graphics_groups,physics_groups)
|
||||
}
|
||||
|
||||
pub fn brush_to_mesh(bsp:&vbsp::Bsp,brush:&vbsp::Brush)->Result<model::Mesh,BrushToMeshError>{
|
||||
let brush_start_idx=brush.brush_side as usize;
|
||||
let sides_range=brush_start_idx..brush_start_idx+brush.num_brush_sides as usize;
|
||||
let sides=bsp.brush_sides.get(sides_range).ok_or(BrushToMeshError::SliceBrushSides)?;
|
||||
for side in sides{
|
||||
if let Some(texture_info)=bsp.textures_info.get(side.texture_info as usize){
|
||||
let texture_info=vbsp::Handle::new(bsp,texture_info);
|
||||
let s=texture_info.name();
|
||||
if s.starts_with("tools/")||s.starts_with("TOOLS/"){
|
||||
return Err(BrushToMeshError::SkipBecauseTexture);
|
||||
}
|
||||
}
|
||||
}
|
||||
let face_list=sides.iter().filter(|side|side.bevel==0).map(|side|{
|
||||
let plane=bsp.plane(side.plane as usize)?;
|
||||
Some(Face{
|
||||
normal:valve_transform_normal(plane.normal.into()),
|
||||
dot:valve_transform_dist(plane.dist.into()),
|
||||
})
|
||||
}).collect::<Option<std::collections::HashSet<_>>>().ok_or(BrushToMeshError::MissingPlane)?;
|
||||
|
||||
if face_list.len()<4{
|
||||
return Err(BrushToMeshError::InvalidFaceCount{count:face_list.len()});
|
||||
}
|
||||
|
||||
let faces=planes_to_faces(face_list).map_err(BrushToMeshError::InvalidPlanes)?;
|
||||
|
||||
let mesh=faces_to_mesh(faces.faces);
|
||||
|
||||
Ok(mesh)
|
||||
}
|
||||
|
||||
pub fn unit_cube()->model::Mesh{
|
||||
let face_list=[
|
||||
Face{normal:integer::vec3::X,dot:Planar64::ONE},
|
||||
Face{normal:integer::vec3::Y,dot:Planar64::ONE},
|
||||
Face{normal:integer::vec3::Z,dot:Planar64::ONE},
|
||||
Face{normal:integer::vec3::NEG_X,dot:Planar64::ONE},
|
||||
Face{normal:integer::vec3::NEG_Y,dot:Planar64::ONE},
|
||||
Face{normal:integer::vec3::NEG_Z,dot:Planar64::ONE},
|
||||
].into_iter().collect();
|
||||
let faces=planes_to_faces(face_list).unwrap();
|
||||
let mesh=faces_to_mesh(faces.faces);
|
||||
mesh
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test{
|
||||
use super::*;
|
||||
#[test]
|
||||
fn test_cube(){
|
||||
let face_list=[
|
||||
Face{normal:integer::vec3::X,dot:Planar64::ONE},
|
||||
Face{normal:integer::vec3::Y,dot:Planar64::ONE},
|
||||
Face{normal:integer::vec3::Z,dot:Planar64::ONE},
|
||||
Face{normal:integer::vec3::NEG_X,dot:Planar64::ONE},
|
||||
Face{normal:integer::vec3::NEG_Y,dot:Planar64::ONE},
|
||||
Face{normal:integer::vec3::NEG_Z,dot:Planar64::ONE},
|
||||
].into_iter().collect();
|
||||
let faces=planes_to_faces(face_list).unwrap();
|
||||
dbg!(faces);
|
||||
}
|
||||
#[test]
|
||||
fn test_cube_with_degernate_face(){
|
||||
let face_list=[
|
||||
Face{normal:integer::vec3::X,dot:Planar64::ONE},
|
||||
Face{normal:integer::vec3::Y,dot:Planar64::ONE},
|
||||
Face{normal:integer::vec3::Z,dot:Planar64::ONE},
|
||||
Face{normal:integer::vec3::NEG_X,dot:Planar64::ONE},
|
||||
Face{normal:integer::vec3::NEG_Y,dot:Planar64::ONE},
|
||||
Face{normal:integer::vec3::NEG_Z,dot:Planar64::ONE},
|
||||
Face{normal:integer::vec3::NEG_Z,dot:Planar64::EPSILON},
|
||||
].into_iter().collect();
|
||||
let faces=planes_to_faces(face_list).unwrap();
|
||||
dbg!(faces);
|
||||
}
|
||||
}
|
@ -7,31 +7,6 @@ use strafesnet_deferred_loader::texture::{RenderConfigs,Texture};
|
||||
|
||||
use crate::valve_transform;
|
||||
|
||||
fn ingest_vertex(
|
||||
mb:&mut model::MeshBuilder,
|
||||
world_position:vbsp::Vector,
|
||||
texture_transform_u:glam::Vec4,
|
||||
texture_transform_v:glam::Vec4,
|
||||
normal:model::NormalId,
|
||||
color:model::ColorId,
|
||||
)->model::VertexId{
|
||||
//world_model.origin seems to always be 0,0,0
|
||||
let vertex_xyz=world_position.into();
|
||||
let pos=mb.acquire_pos_id(valve_transform(vertex_xyz));
|
||||
|
||||
//calculate texture coordinates
|
||||
let pos_4d=glam::Vec3::from_array(vertex_xyz).extend(1.0);
|
||||
let tex=glam::vec2(texture_transform_u.dot(pos_4d),texture_transform_v.dot(pos_4d));
|
||||
let tex=mb.acquire_tex_id(tex);
|
||||
|
||||
mb.acquire_vertex_id(model::IndexedVertex{
|
||||
pos,
|
||||
tex,
|
||||
normal,
|
||||
color,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn convert<'a>(
|
||||
bsp:&'a crate::Bsp,
|
||||
render_config_deferred_loader:&mut RenderConfigDeferredLoader<Cow<'a,str>>,
|
||||
@ -41,20 +16,21 @@ pub fn convert<'a>(
|
||||
//figure out real attributes later
|
||||
let mut unique_attributes=Vec::new();
|
||||
unique_attributes.push(gameplay_attributes::CollisionAttributes::Decoration);
|
||||
unique_attributes.push(gameplay_attributes::CollisionAttributes::contact_default());
|
||||
unique_attributes.push(gameplay_attributes::CollisionAttributes::intersect_default());
|
||||
const ATTRIBUTE_DECORATION:gameplay_attributes::CollisionAttributesId=gameplay_attributes::CollisionAttributesId::new(0);
|
||||
const ATTRIBUTE_CONTACT_DEFAULT:gameplay_attributes::CollisionAttributesId=gameplay_attributes::CollisionAttributesId::new(1);
|
||||
const ATTRIBUTE_INTERSECT_DEFAULT:gameplay_attributes::CollisionAttributesId=gameplay_attributes::CollisionAttributesId::new(2);
|
||||
const TEMP_TOUCH_ME_ATTRIBUTE:gameplay_attributes::CollisionAttributesId=gameplay_attributes::CollisionAttributesId::new(0);
|
||||
|
||||
let mut prop_mesh_count=0;
|
||||
//declare all prop models to Loader
|
||||
let prop_models=bsp.static_props().map(|prop|{
|
||||
//get or create mesh_id
|
||||
let mesh_id=mesh_deferred_loader.acquire_mesh_id(prop.model());
|
||||
//not the most failsafe code but this is just for the map tool lmao
|
||||
if prop_mesh_count==mesh_id.get(){
|
||||
prop_mesh_count+=1;
|
||||
};
|
||||
let placement=prop.as_prop_placement();
|
||||
model::Model{
|
||||
mesh:mesh_id,
|
||||
attributes:ATTRIBUTE_DECORATION,
|
||||
attributes:TEMP_TOUCH_ME_ATTRIBUTE,
|
||||
transform:integer::Planar64Affine3::new(
|
||||
integer::mat3::try_from_f32_array_2d((
|
||||
glam::Mat3A::from_diagonal(glam::Vec3::splat(placement.scale))
|
||||
@ -71,12 +47,14 @@ pub fn convert<'a>(
|
||||
|
||||
//the generated MeshIds in here will collide with the Loader Mesh Ids
|
||||
//but I can't think of a good workaround other than just remapping one later.
|
||||
let mut world_meshes:Vec<model::Mesh>=bsp.models().map(|world_model|{
|
||||
let mut mb=model::MeshBuilder::new();
|
||||
|
||||
let color=mb.acquire_color_id(glam::Vec4::ONE);
|
||||
let world_meshes:Vec<model::Mesh>=bsp.models().map(|world_model|{
|
||||
//non-deduplicated
|
||||
let mut spam_pos=Vec::new();
|
||||
let mut spam_tex=Vec::new();
|
||||
let mut spam_normal=Vec::new();
|
||||
let mut spam_vertices=Vec::new();
|
||||
let mut graphics_groups=Vec::new();
|
||||
let mut render_id_to_graphics_group_id=std::collections::HashMap::new();
|
||||
let mut physics_group=model::IndexedPhysicsGroup::default();
|
||||
let polygon_groups=world_model.faces().enumerate().map(|(polygon_group_id,face)|{
|
||||
let polygon_group_id=model::PolygonGroupId::new(polygon_group_id as u32);
|
||||
let face_texture=face.texture();
|
||||
@ -85,151 +63,107 @@ pub fn convert<'a>(
|
||||
let texture_transform_u=glam::Vec4::from_array(face_texture.texture_transforms_u)/(face_texture_data.width as f32);
|
||||
let texture_transform_v=glam::Vec4::from_array(face_texture.texture_transforms_v)/(face_texture_data.height as f32);
|
||||
|
||||
//this automatically figures out what the texture is trying to do and creates
|
||||
//a render config for it, and then returns the id to that render config
|
||||
let render_id=render_config_deferred_loader.acquire_render_config_id(Some(face_texture_data.name().into()));
|
||||
|
||||
//normal
|
||||
let normal=mb.acquire_normal_id(valve_transform(face.normal().into()));
|
||||
let mut polygon_iter=face.vertex_positions().map(|vertex_position|
|
||||
world_model.origin+vertex_position
|
||||
);
|
||||
let normal=face.normal();
|
||||
let normal_idx=spam_normal.len() as u32;
|
||||
spam_normal.push(valve_transform(normal.into()));
|
||||
let mut polygon_iter=face.vertex_positions().map(|vertex_position|{
|
||||
//world_model.origin seems to always be 0,0,0
|
||||
let vertex_xyz=(world_model.origin+vertex_position).into();
|
||||
let pos_idx=spam_pos.len();
|
||||
spam_pos.push(valve_transform(vertex_xyz));
|
||||
|
||||
//calculate texture coordinates
|
||||
let pos=glam::Vec3::from_array(vertex_xyz).extend(1.0);
|
||||
let tex=glam::vec2(texture_transform_u.dot(pos),texture_transform_v.dot(pos));
|
||||
let tex_idx=spam_tex.len() as u32;
|
||||
spam_tex.push(tex);
|
||||
|
||||
let vertex_id=model::VertexId::new(spam_vertices.len() as u32);
|
||||
spam_vertices.push(model::IndexedVertex{
|
||||
pos:model::PositionId::new(pos_idx as u32),
|
||||
tex:model::TextureCoordinateId::new(tex_idx as u32),
|
||||
normal:model::NormalId::new(normal_idx),
|
||||
color:model::ColorId::new(0),
|
||||
});
|
||||
vertex_id
|
||||
});
|
||||
let polygon_list=std::iter::from_fn(move||{
|
||||
match (polygon_iter.next(),polygon_iter.next(),polygon_iter.next()){
|
||||
(Some(v1),Some(v2),Some(v3))=>Some([v1,v2,v3]),
|
||||
(Some(v1),Some(v2),Some(v3))=>Some(vec![v1,v2,v3]),
|
||||
//ignore extra vertices, not sure what to do in this case, failing the whole conversion could be appropriate
|
||||
_=>None,
|
||||
}
|
||||
}).map(|triplet|{
|
||||
triplet.map(|world_position|
|
||||
ingest_vertex(&mut mb,world_position,texture_transform_u,texture_transform_v,normal,color)
|
||||
).to_vec()
|
||||
}).collect();
|
||||
if face.is_visible(){
|
||||
//this automatically figures out what the texture is trying to do and creates
|
||||
//a render config for it, and then returns the id to that render config
|
||||
let render_id=render_config_deferred_loader.acquire_render_config_id(Some(Cow::Borrowed(face_texture_data.name())));
|
||||
//deduplicate graphics groups by render id
|
||||
let graphics_group_id=*render_id_to_graphics_group_id.entry(render_id).or_insert_with(||{
|
||||
let graphics_group_id=graphics_groups.len();
|
||||
graphics_groups.push(model::IndexedGraphicsGroup{
|
||||
render:render_id,
|
||||
groups:vec![],
|
||||
});
|
||||
graphics_group_id
|
||||
});
|
||||
graphics_groups[graphics_group_id].groups.push(polygon_group_id);
|
||||
//TODO: deduplicate graphics groups by render id
|
||||
graphics_groups.push(model::IndexedGraphicsGroup{
|
||||
render:render_id,
|
||||
groups:vec![polygon_group_id],
|
||||
})
|
||||
}
|
||||
physics_group.groups.push(polygon_group_id);
|
||||
model::PolygonGroup::PolygonList(model::PolygonList::new(polygon_list))
|
||||
}).collect();
|
||||
|
||||
mb.build(polygon_groups,graphics_groups,vec![])
|
||||
model::Mesh{
|
||||
unique_pos:spam_pos,
|
||||
unique_tex:spam_tex,
|
||||
unique_normal:spam_normal,
|
||||
unique_color:vec![glam::Vec4::ONE],
|
||||
unique_vertices:spam_vertices,
|
||||
polygon_groups,
|
||||
graphics_groups,
|
||||
physics_groups:vec![physics_group],
|
||||
}
|
||||
}).collect();
|
||||
|
||||
let mut found_spawn=None;
|
||||
|
||||
let mut world_models=Vec::new();
|
||||
|
||||
// the one and only world model 0
|
||||
world_models.push(model::Model{
|
||||
mesh:model::MeshId::new(0),
|
||||
attributes:ATTRIBUTE_DECORATION,
|
||||
transform:integer::Planar64Affine3::IDENTITY,
|
||||
color:glam::Vec4::W,
|
||||
});
|
||||
|
||||
for raw_ent in bsp.entities.iter(){
|
||||
match raw_ent.parse(){
|
||||
Ok(vbsp::basic::Entity::Brush(brush))
|
||||
|Ok(vbsp::basic::Entity::BrushIllusionary(brush))
|
||||
|Ok(vbsp::basic::Entity::BrushWall(brush))
|
||||
|Ok(vbsp::basic::Entity::BrushWallToggle(brush))=>{
|
||||
//The first character of brush.model is '*'
|
||||
match brush.model[1..].parse(){
|
||||
Ok(mesh_id)=>{
|
||||
world_models.push(model::Model{
|
||||
mesh:model::MeshId::new(mesh_id),
|
||||
attributes:ATTRIBUTE_DECORATION,
|
||||
transform:integer::Planar64Affine3::from_translation(
|
||||
valve_transform(brush.origin.into())
|
||||
),
|
||||
color:(glam::Vec3::from_array([
|
||||
brush.color.r as f32,
|
||||
brush.color.g as f32,
|
||||
brush.color.b as f32
|
||||
])/255.0).extend(1.0),
|
||||
});
|
||||
},
|
||||
Err(e)=>{
|
||||
println!("Brush model int parse error: {e}");
|
||||
},
|
||||
}
|
||||
},
|
||||
_=>(),
|
||||
}
|
||||
|
||||
match raw_ent.parse(){
|
||||
Ok(vbsp::css::Entity::InfoPlayerCounterterrorist(spawn))=>{
|
||||
found_spawn=Some(valve_transform(spawn.origin.into()));
|
||||
},
|
||||
Err(e)=>{
|
||||
println!("Bsp Entity parse error: {e}");
|
||||
},
|
||||
_=>(),
|
||||
}
|
||||
}
|
||||
|
||||
// physics models
|
||||
for brush in &bsp.brushes{
|
||||
if !brush.flags.contains(vbsp::BrushFlags::SOLID){
|
||||
continue;
|
||||
}
|
||||
let mesh_result=crate::brush::brush_to_mesh(bsp,brush);
|
||||
match mesh_result{
|
||||
Ok(mesh)=>{
|
||||
let mesh_id=model::MeshId::new(world_meshes.len() as u32);
|
||||
world_meshes.push(mesh);
|
||||
world_models.push(model::Model{
|
||||
mesh:mesh_id,
|
||||
attributes:ATTRIBUTE_CONTACT_DEFAULT,
|
||||
transform:integer::Planar64Affine3::new(
|
||||
integer::mat3::identity(),
|
||||
integer::vec3::ZERO,
|
||||
),
|
||||
color:glam::Vec4::ONE,
|
||||
});
|
||||
},
|
||||
Err(e)=>println!("Brush mesh error: {e}"),
|
||||
}
|
||||
}
|
||||
|
||||
let mut modes_list=Vec::new();
|
||||
if let Some(spawn_point)=found_spawn{
|
||||
// create a new mesh
|
||||
let mesh_id=model::MeshId::new(world_meshes.len() as u32);
|
||||
world_meshes.push(crate::brush::unit_cube());
|
||||
// create a new model
|
||||
let model_id=model::ModelId::new(world_models.len() as u32);
|
||||
world_models.push(model::Model{
|
||||
let world_models:Vec<model::Model>=
|
||||
//one instance of the main world mesh
|
||||
std::iter::once((
|
||||
//world_model
|
||||
model::MeshId::new(0),
|
||||
//model_origin
|
||||
vbsp::Vector::from([0.0,0.0,0.0]),
|
||||
//model_color
|
||||
vbsp::Color{r:255,g:255,b:255},
|
||||
)).chain(
|
||||
//entities sprinkle instances of the other meshes around
|
||||
bsp.entities.iter()
|
||||
.flat_map(|ent|ent.parse())//ignore entity parsing errors
|
||||
.filter_map(|ent|match ent{
|
||||
vbsp::Entity::Brush(brush)=>Some(brush),
|
||||
vbsp::Entity::BrushIllusionary(brush)=>Some(brush),
|
||||
vbsp::Entity::BrushWall(brush)=>Some(brush),
|
||||
vbsp::Entity::BrushWallToggle(brush)=>Some(brush),
|
||||
_=>None,
|
||||
}).flat_map(|brush|
|
||||
//The first character of brush.model is '*'
|
||||
brush.model[1..].parse().map(|mesh_id|//ignore parse int errors
|
||||
(model::MeshId::new(mesh_id),brush.origin,brush.color)
|
||||
)
|
||||
)
|
||||
).map(|(mesh_id,model_origin,vbsp::Color{r,g,b})|{
|
||||
model::Model{
|
||||
mesh:mesh_id,
|
||||
attributes:ATTRIBUTE_INTERSECT_DEFAULT,
|
||||
transform:integer::Planar64Affine3::from_translation(spawn_point),
|
||||
color:glam::Vec4::W,
|
||||
});
|
||||
|
||||
let first_stage=strafesnet_common::gameplay_modes::Stage::empty(model_id);
|
||||
let main_mode=strafesnet_common::gameplay_modes::Mode::new(
|
||||
strafesnet_common::gameplay_style::StyleModifiers::source_bhop(),
|
||||
model_id,
|
||||
std::collections::HashMap::new(),
|
||||
vec![first_stage],
|
||||
std::collections::HashMap::new(),
|
||||
);
|
||||
modes_list.push(main_mode);
|
||||
}
|
||||
attributes:TEMP_TOUCH_ME_ATTRIBUTE,
|
||||
transform:integer::Planar64Affine3::new(
|
||||
integer::mat3::identity(),
|
||||
valve_transform(model_origin.into())
|
||||
),
|
||||
color:(glam::Vec3::from_array([r as f32,g as f32,b as f32])/255.0).extend(1.0),
|
||||
}
|
||||
}).collect();
|
||||
|
||||
PartialMap1{
|
||||
attributes:unique_attributes,
|
||||
world_meshes,
|
||||
prop_models,
|
||||
world_models,
|
||||
modes:strafesnet_common::gameplay_modes::Modes::new(modes_list),
|
||||
modes:strafesnet_common::gameplay_modes::Modes::new(Vec::new()),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,17 +1,20 @@
|
||||
use strafesnet_deferred_loader::deferred_loader::{LoadFailureMode,MeshDeferredLoader,RenderConfigDeferredLoader};
|
||||
|
||||
mod bsp;
|
||||
mod mesh;
|
||||
mod brush;
|
||||
pub mod loader;
|
||||
|
||||
pub struct Bsp(vbsp::Bsp);
|
||||
impl Bsp{
|
||||
pub const fn new(value:vbsp::Bsp)->Self{
|
||||
Self(value)
|
||||
}
|
||||
}
|
||||
impl AsRef<vbsp::Bsp> for Bsp{
|
||||
fn as_ref(&self)->&vbsp::Bsp{
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
const VALVE_SCALE:f32=1.0/16.0;
|
||||
pub(crate) fn valve_transform_dist(d:f32)->strafesnet_common::integer::Planar64{
|
||||
(d*VALVE_SCALE).try_into().unwrap()
|
||||
}
|
||||
pub(crate) fn valve_transform_normal([x,y,z]:[f32;3])->strafesnet_common::integer::Planar64Vec3{
|
||||
strafesnet_common::integer::vec3::try_from_f32_array([x,z,-y]).unwrap()
|
||||
}
|
||||
pub(crate) fn valve_transform([x,y,z]:[f32;3])->strafesnet_common::integer::Planar64Vec3{
|
||||
strafesnet_common::integer::vec3::try_from_f32_array([x*VALVE_SCALE,z*VALVE_SCALE,-y*VALVE_SCALE]).unwrap()
|
||||
}
|
||||
@ -28,38 +31,6 @@ impl std::fmt::Display for ReadError{
|
||||
}
|
||||
impl std::error::Error for ReadError{}
|
||||
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum LoadError{
|
||||
Texture(loader::TextureError),
|
||||
Mesh(loader::MeshError),
|
||||
}
|
||||
impl std::fmt::Display for LoadError{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for LoadError{}
|
||||
impl From<loader::TextureError> for LoadError{
|
||||
fn from(value:loader::TextureError)->Self{
|
||||
Self::Texture(value)
|
||||
}
|
||||
}
|
||||
impl From<loader::MeshError> for LoadError{
|
||||
fn from(value:loader::MeshError)->Self{
|
||||
Self::Mesh(value)
|
||||
}
|
||||
}
|
||||
pub struct Bsp{
|
||||
bsp:vbsp::Bsp,
|
||||
case_folded_file_names:std::collections::HashMap<String,String>,
|
||||
}
|
||||
impl AsRef<vbsp::Bsp> for Bsp{
|
||||
fn as_ref(&self)->&vbsp::Bsp{
|
||||
&self.bsp
|
||||
}
|
||||
}
|
||||
|
||||
pub fn read<R:std::io::Read>(mut input:R)->Result<Bsp,ReadError>{
|
||||
let mut s=Vec::new();
|
||||
|
||||
@ -68,66 +39,5 @@ pub fn read<R:std::io::Read>(mut input:R)->Result<Bsp,ReadError>{
|
||||
|
||||
vbsp::Bsp::read(s.as_slice()).map(Bsp::new).map_err(ReadError::Bsp)
|
||||
}
|
||||
impl Bsp{
|
||||
pub fn new(bsp:vbsp::Bsp)->Self{
|
||||
let case_folded_file_names=bsp.pack.clone().into_zip().lock().unwrap().file_names().map(|s|{
|
||||
(s.to_lowercase(),s.to_owned())
|
||||
}).collect();
|
||||
Self{
|
||||
bsp,
|
||||
case_folded_file_names,
|
||||
}
|
||||
}
|
||||
pub fn pack_get(&self,name_lowercase:&str)->Result<Option<Vec<u8>>,vbsp::BspError>{
|
||||
match self.case_folded_file_names.get(name_lowercase){
|
||||
Some(name_folded)=>self.bsp.pack.get(name_folded),
|
||||
None=>Ok(None),
|
||||
}
|
||||
}
|
||||
pub fn to_snf(&self,failure_mode:LoadFailureMode,vpk_list:&[Vpk])->Result<strafesnet_common::map::CompleteMap,LoadError>{
|
||||
let mut texture_deferred_loader=RenderConfigDeferredLoader::new();
|
||||
let mut mesh_deferred_loader=MeshDeferredLoader::new();
|
||||
|
||||
let map_step1=bsp::convert(
|
||||
self,
|
||||
&mut texture_deferred_loader,
|
||||
&mut mesh_deferred_loader,
|
||||
);
|
||||
|
||||
let mut mesh_loader=loader::MeshLoader::new(loader::BspFinder{bsp:self,vpks:vpk_list},&mut texture_deferred_loader);
|
||||
let prop_meshes=mesh_deferred_loader.into_meshes(&mut mesh_loader,failure_mode).map_err(LoadError::Mesh)?;
|
||||
|
||||
let map_step2=map_step1.add_prop_meshes(prop_meshes);
|
||||
|
||||
let mut texture_loader=loader::TextureLoader::new();
|
||||
let render_configs=texture_deferred_loader.into_render_configs(&mut texture_loader,failure_mode).map_err(LoadError::Texture)?;
|
||||
|
||||
let map=map_step2.add_render_configs_and_textures(render_configs);
|
||||
|
||||
Ok(map)
|
||||
}
|
||||
}
|
||||
pub struct Vpk{
|
||||
vpk:vpk::VPK,
|
||||
case_folded_file_names:std::collections::HashMap<String,String>,
|
||||
}
|
||||
impl AsRef<vpk::VPK> for Vpk{
|
||||
fn as_ref(&self)->&vpk::VPK{
|
||||
&self.vpk
|
||||
}
|
||||
}
|
||||
impl Vpk{
|
||||
pub fn new(vpk:vpk::VPK)->Vpk{
|
||||
let case_folded_file_names=vpk.tree.keys().map(|s|{
|
||||
(s.to_lowercase(),s.to_owned())
|
||||
}).collect();
|
||||
Vpk{
|
||||
vpk,
|
||||
case_folded_file_names,
|
||||
}
|
||||
}
|
||||
pub fn tree_get(&self,name_lowercase:&str)->Option<&vpk::entry::VPKEntry>{
|
||||
let name_folded=self.case_folded_file_names.get(name_lowercase)?;
|
||||
self.vpk.tree.get(name_folded)
|
||||
}
|
||||
}
|
||||
pub use bsp::convert;
|
||||
|
@ -3,7 +3,7 @@ use std::{borrow::Cow, io::Read};
|
||||
use strafesnet_common::model::Mesh;
|
||||
use strafesnet_deferred_loader::{loader::Loader,texture::Texture};
|
||||
|
||||
use crate::{Bsp,Vpk};
|
||||
use crate::{mesh::ModelData, Bsp};
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(Debug)]
|
||||
@ -73,56 +73,25 @@ impl From<vbsp::BspError> for MeshError{
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone,Copy)]
|
||||
pub struct BspFinder<'bsp,'vpk>{
|
||||
pub bsp:&'bsp Bsp,
|
||||
pub vpks:&'vpk [Vpk],
|
||||
pub struct MeshLoader<'a,'b>{
|
||||
bsp:&'a Bsp,
|
||||
deferred_loader:&'b mut strafesnet_deferred_loader::deferred_loader::RenderConfigDeferredLoader<Cow<'a,str>>,
|
||||
}
|
||||
impl<'bsp,'vpk> BspFinder<'bsp,'vpk>{
|
||||
pub fn find<'a>(&self,path:&str)->Result<Option<Cow<'a,[u8]>>,vbsp::BspError>
|
||||
where
|
||||
'bsp:'a,
|
||||
'vpk:'a,
|
||||
{
|
||||
// search bsp
|
||||
if let Some(data)=self.bsp.pack_get(path)?{
|
||||
return Ok(Some(Cow::Owned(data)));
|
||||
}
|
||||
|
||||
//search each vpk
|
||||
for vpk in self.vpks{
|
||||
if let Some(vpk_entry)=vpk.tree_get(path){
|
||||
return Ok(Some(vpk_entry.get()?));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ModelLoader<'bsp,'vpk,'a>{
|
||||
finder:BspFinder<'bsp,'vpk>,
|
||||
life:core::marker::PhantomData<&'a ()>,
|
||||
}
|
||||
impl ModelLoader<'_,'_,'_>{
|
||||
#[inline]
|
||||
pub const fn new<'bsp,'vpk,'a>(
|
||||
finder:BspFinder<'bsp,'vpk>,
|
||||
)->ModelLoader<'bsp,'vpk,'a>{
|
||||
ModelLoader{
|
||||
finder,
|
||||
life:core::marker::PhantomData,
|
||||
impl MeshLoader<'_,'_>{
|
||||
pub fn new<'a,'b>(
|
||||
bsp:&'a Bsp,
|
||||
deferred_loader:&'b mut strafesnet_deferred_loader::deferred_loader::RenderConfigDeferredLoader<Cow<'a,str>>,
|
||||
)->MeshLoader<'a,'b>{
|
||||
MeshLoader{
|
||||
bsp,
|
||||
deferred_loader,
|
||||
}
|
||||
}
|
||||
}
|
||||
impl<'bsp,'vpk,'a> Loader for ModelLoader<'bsp,'vpk,'a>
|
||||
where
|
||||
'bsp:'a,
|
||||
'vpk:'a,
|
||||
{
|
||||
impl<'a> Loader for MeshLoader<'a,'_>{
|
||||
type Error=MeshError;
|
||||
type Index=&'a str;
|
||||
type Resource=vmdl::Model;
|
||||
type Resource=Mesh;
|
||||
fn load(&mut self,index:Self::Index)->Result<Self::Resource,Self::Error>{
|
||||
let mdl_path_lower=index.to_lowercase();
|
||||
//.mdl, .vvd, .dx90.vtx
|
||||
@ -132,44 +101,12 @@ impl<'bsp,'vpk,'a> Loader for ModelLoader<'bsp,'vpk,'a>
|
||||
vvd_path.set_extension("vvd");
|
||||
vtx_path.set_extension("dx90.vtx");
|
||||
// TODO: search more packs, possibly using an index of multiple packs
|
||||
let mdl=self.finder.find(mdl_path_lower.as_str())?.ok_or(MeshError::MissingMdl)?;
|
||||
let vtx=self.finder.find(vtx_path.as_os_str().to_str().unwrap())?.ok_or(MeshError::MissingVtx)?;
|
||||
let vvd=self.finder.find(vvd_path.as_os_str().to_str().unwrap())?.ok_or(MeshError::MissingVvd)?;
|
||||
Ok(vmdl::Model::from_parts(
|
||||
vmdl::mdl::Mdl::read(mdl.as_ref())?,
|
||||
vmdl::vtx::Vtx::read(vtx.as_ref())?,
|
||||
vmdl::vvd::Vvd::read(vvd.as_ref())?,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
pub struct MeshLoader<'bsp,'vpk,'load,'a>{
|
||||
finder:BspFinder<'bsp,'vpk>,
|
||||
deferred_loader:&'load mut strafesnet_deferred_loader::deferred_loader::RenderConfigDeferredLoader<Cow<'a,str>>,
|
||||
}
|
||||
impl MeshLoader<'_,'_,'_,'_>{
|
||||
#[inline]
|
||||
pub const fn new<'bsp,'vpk,'load,'a>(
|
||||
finder:BspFinder<'bsp,'vpk>,
|
||||
deferred_loader:&'load mut strafesnet_deferred_loader::deferred_loader::RenderConfigDeferredLoader<Cow<'a,str>>,
|
||||
)->MeshLoader<'bsp,'vpk,'load,'a>{
|
||||
MeshLoader{
|
||||
finder,
|
||||
deferred_loader
|
||||
}
|
||||
}
|
||||
}
|
||||
impl<'bsp,'vpk,'load,'a> Loader for MeshLoader<'bsp,'vpk,'load,'a>
|
||||
where
|
||||
'bsp:'a,
|
||||
'vpk:'a,
|
||||
{
|
||||
type Error=MeshError;
|
||||
type Index=&'a str;
|
||||
type Resource=Mesh;
|
||||
fn load(&mut self,index:Self::Index)->Result<Self::Resource,Self::Error>{
|
||||
let model=ModelLoader::new(self.finder).load(index)?;
|
||||
let mesh=crate::mesh::convert_mesh(model,&mut self.deferred_loader);
|
||||
let bsp=self.bsp.as_ref();
|
||||
let mdl=bsp.pack.get(mdl_path_lower.as_str())?.ok_or(MeshError::MissingMdl)?;
|
||||
let vtx=bsp.pack.get(vvd_path.as_os_str().to_str().unwrap())?.ok_or(MeshError::MissingVtx)?;
|
||||
let vvd=bsp.pack.get(vtx_path.as_os_str().to_str().unwrap())?.ok_or(MeshError::MissingVvd)?;
|
||||
let model=ModelData{mdl,vtx,vvd};
|
||||
let mesh=model.convert_mesh(&mut self.deferred_loader)?;
|
||||
Ok(mesh)
|
||||
}
|
||||
}
|
||||
|
@ -5,74 +5,85 @@ use strafesnet_deferred_loader::deferred_loader::RenderConfigDeferredLoader;
|
||||
|
||||
use crate::valve_transform;
|
||||
|
||||
fn ingest_vertex(mb:&mut model::MeshBuilder,vertex:&vmdl::vvd::Vertex,color:model::ColorId)->model::VertexId{
|
||||
let pos=mb.acquire_pos_id(valve_transform(vertex.position.into()));
|
||||
let normal=mb.acquire_normal_id(valve_transform(vertex.normal.into()));
|
||||
let tex=mb.acquire_tex_id(glam::Vec2::from_array(vertex.texture_coordinates));
|
||||
mb.acquire_vertex_id(model::IndexedVertex{
|
||||
pos,
|
||||
tex,
|
||||
normal,
|
||||
color,
|
||||
})
|
||||
pub struct ModelData{
|
||||
pub mdl:Vec<u8>,
|
||||
pub vtx:Vec<u8>,
|
||||
pub vvd:Vec<u8>,
|
||||
}
|
||||
|
||||
pub fn convert_mesh(model:vmdl::Model,deferred_loader:&mut RenderConfigDeferredLoader<Cow<str>>)->model::Mesh{
|
||||
let texture_paths=model.texture_directories();
|
||||
if texture_paths.len()!=1{
|
||||
println!("WARNING: multiple texture paths");
|
||||
}
|
||||
let skin=model.skin_tables().nth(0).unwrap();
|
||||
|
||||
let mut mb=model::MeshBuilder::new();
|
||||
|
||||
let color=mb.acquire_color_id(glam::Vec4::ONE);
|
||||
|
||||
let model_vertices=model.vertices();
|
||||
|
||||
let mut graphics_groups=Vec::new();
|
||||
let mut physics_groups=Vec::new();
|
||||
let polygon_groups=model.meshes().enumerate().map(|(polygon_group_id,mesh)|{
|
||||
let polygon_group_id=model::PolygonGroupId::new(polygon_group_id as u32);
|
||||
|
||||
let render_id=if let (Some(texture_path),Some(texture_name))=(texture_paths.get(0),skin.texture(mesh.material_index())){
|
||||
let mut path=std::path::PathBuf::from(texture_path.as_str());
|
||||
path.push(texture_name);
|
||||
let index=path.as_os_str().to_str().map(|s|Cow::Owned(s.to_owned()));
|
||||
deferred_loader.acquire_render_config_id(index)
|
||||
}else{
|
||||
deferred_loader.acquire_render_config_id(None)
|
||||
};
|
||||
|
||||
graphics_groups.push(model::IndexedGraphicsGroup{
|
||||
render:render_id,
|
||||
groups:vec![polygon_group_id],
|
||||
});
|
||||
physics_groups.push(model::IndexedPhysicsGroup{
|
||||
groups:vec![polygon_group_id],
|
||||
});
|
||||
model::PolygonGroup::PolygonList(model::PolygonList::new(
|
||||
//looking at the code, it would seem that the strips are pre-deindexed into triangle lists when calling this function
|
||||
mesh.vertex_strip_indices().flat_map(|mut strip|{
|
||||
std::iter::from_fn(move ||{
|
||||
match (strip.next(),strip.next(),strip.next()){
|
||||
(Some(v1),Some(v2),Some(v3))=>Some([v1,v2,v3]),
|
||||
//ignore extra vertices, not sure what to do in this case, failing the whole conversion could be appropriate
|
||||
_=>None,
|
||||
}
|
||||
})
|
||||
}).flat_map(|[v1,v2,v3]|{
|
||||
// this should probably be a fatal error :D
|
||||
let v1=model_vertices.get(v1)?;
|
||||
let v2=model_vertices.get(v2)?;
|
||||
let v3=model_vertices.get(v3)?;
|
||||
Some(vec![
|
||||
ingest_vertex(&mut mb,v1,color),
|
||||
ingest_vertex(&mut mb,v2,color),
|
||||
ingest_vertex(&mut mb,v3,color),
|
||||
])
|
||||
}).collect()
|
||||
impl ModelData{
|
||||
fn read_model(&self)->Result<vmdl::Model,vmdl::ModelError>{
|
||||
Ok(vmdl::Model::from_parts(
|
||||
vmdl::mdl::Mdl::read(self.mdl.as_ref())?,
|
||||
vmdl::vtx::Vtx::read(self.vtx.as_ref())?,
|
||||
vmdl::vvd::Vvd::read(self.vvd.as_ref())?,
|
||||
))
|
||||
}).collect();
|
||||
mb.build(polygon_groups,graphics_groups,physics_groups)
|
||||
}
|
||||
pub fn convert_mesh<'a>(self,deferred_loader:&mut RenderConfigDeferredLoader<Cow<'a,str>>)->Result<model::Mesh,vmdl::ModelError>{
|
||||
let model=self.read_model()?;
|
||||
let texture_paths=model.texture_directories();
|
||||
if texture_paths.len()!=1{
|
||||
println!("WARNING: multiple texture paths");
|
||||
}
|
||||
let skin=model.skin_tables().nth(0).unwrap();
|
||||
|
||||
let mut spam_pos=Vec::with_capacity(model.vertices().len());
|
||||
let mut spam_normal=Vec::with_capacity(model.vertices().len());
|
||||
let mut spam_tex=Vec::with_capacity(model.vertices().len());
|
||||
let mut spam_vertices=Vec::with_capacity(model.vertices().len());
|
||||
for (i,vertex) in model.vertices().iter().enumerate(){
|
||||
spam_pos.push(valve_transform(vertex.position.into()));
|
||||
spam_normal.push(valve_transform(vertex.normal.into()));
|
||||
spam_tex.push(glam::Vec2::from_array(vertex.texture_coordinates));
|
||||
spam_vertices.push(model::IndexedVertex{
|
||||
pos:model::PositionId::new(i as u32),
|
||||
tex:model::TextureCoordinateId::new(i as u32),
|
||||
normal:model::NormalId::new(i as u32),
|
||||
color:model::ColorId::new(0),
|
||||
});
|
||||
}
|
||||
let mut graphics_groups=Vec::new();
|
||||
let mut physics_groups=Vec::new();
|
||||
let polygon_groups=model.meshes().enumerate().map(|(polygon_group_id,mesh)|{
|
||||
let polygon_group_id=model::PolygonGroupId::new(polygon_group_id as u32);
|
||||
|
||||
let render_id=if let (Some(texture_path),Some(texture_name))=(texture_paths.get(0),skin.texture(mesh.material_index())){
|
||||
let mut path=std::path::PathBuf::from(texture_path.as_str());
|
||||
path.push(texture_name);
|
||||
let index=path.as_os_str().to_str().map(|s|Cow::Owned(s.to_owned()));
|
||||
deferred_loader.acquire_render_config_id(index)
|
||||
}else{
|
||||
deferred_loader.acquire_render_config_id(None)
|
||||
};
|
||||
|
||||
graphics_groups.push(model::IndexedGraphicsGroup{
|
||||
render:render_id,
|
||||
groups:vec![polygon_group_id],
|
||||
});
|
||||
physics_groups.push(model::IndexedPhysicsGroup{
|
||||
groups:vec![polygon_group_id],
|
||||
});
|
||||
model::PolygonGroup::PolygonList(model::PolygonList::new(
|
||||
//looking at the code, it would seem that the strips are pre-deindexed into triangle lists when calling this function
|
||||
mesh.vertex_strip_indices().flat_map(|mut strip|
|
||||
std::iter::from_fn(move||{
|
||||
match (strip.next(),strip.next(),strip.next()){
|
||||
(Some(v1),Some(v2),Some(v3))=>Some([v1,v2,v3].map(|vertex_id|model::VertexId::new(vertex_id as u32)).to_vec()),
|
||||
//ignore extra vertices, not sure what to do in this case, failing the whole conversion could be appropriate
|
||||
_=>None,
|
||||
}
|
||||
})
|
||||
).collect()
|
||||
))
|
||||
}).collect();
|
||||
Ok(model::Mesh{
|
||||
unique_pos:spam_pos,
|
||||
unique_normal:spam_normal,
|
||||
unique_tex:spam_tex,
|
||||
unique_color:vec![glam::Vec4::ONE],
|
||||
unique_vertices:spam_vertices,
|
||||
polygon_groups,
|
||||
graphics_groups,
|
||||
physics_groups,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "strafesnet_common"
|
||||
version = "0.6.0"
|
||||
version = "0.5.2"
|
||||
edition = "2021"
|
||||
repository = "https://git.itzana.me/StrafesNET/strafe-project"
|
||||
license = "MIT OR Apache-2.0"
|
||||
@ -12,8 +12,8 @@ authors = ["Rhys Lloyd <krakow20@gmail.com>"]
|
||||
[dependencies]
|
||||
arrayvec = "0.7.4"
|
||||
bitflags = "2.6.0"
|
||||
fixed_wide = { version = "0.1.2", path = "../fixed_wide", registry = "strafesnet", features = ["deferred-division","zeroes","wide-mul"] }
|
||||
linear_ops = { version = "0.1.0", path = "../linear_ops", registry = "strafesnet", features = ["deferred-division","named-fields"] }
|
||||
ratio_ops = { version = "0.1.0", path = "../ratio_ops", registry = "strafesnet" }
|
||||
fixed_wide = { path = "../fixed_wide", registry = "strafesnet", features = ["deferred-division","zeroes","wide-mul"] }
|
||||
linear_ops = { path = "../linear_ops", registry = "strafesnet", features = ["deferred-division","named-fields"] }
|
||||
ratio_ops = { path = "../ratio_ops", registry = "strafesnet" }
|
||||
glam = "0.29.0"
|
||||
id = { version = "0.1.0", registry = "strafesnet" }
|
||||
|
@ -10,27 +10,35 @@ use crate::aabb::Aabb;
|
||||
//sort the centerpoints on each axis (3 lists)
|
||||
//bv is put into octant based on whether it is upper or lower in each list
|
||||
|
||||
pub enum RecursiveContent<N,L>{
|
||||
Branch(Vec<N>),
|
||||
Leaf(L),
|
||||
pub enum RecursiveContent<R,T>{
|
||||
Branch(Vec<R>),
|
||||
Leaf(T),
|
||||
}
|
||||
impl<N,L> RecursiveContent<N,L>{
|
||||
pub fn empty()->Self{
|
||||
impl<R,T> Default for RecursiveContent<R,T>{
|
||||
fn default()->Self{
|
||||
Self::Branch(Vec::new())
|
||||
}
|
||||
}
|
||||
pub struct BvhNode<L>{
|
||||
content:RecursiveContent<BvhNode<L>,L>,
|
||||
pub struct BvhNode<T>{
|
||||
content:RecursiveContent<BvhNode<T>,T>,
|
||||
aabb:Aabb,
|
||||
}
|
||||
impl<L> BvhNode<L>{
|
||||
pub fn empty()->Self{
|
||||
impl<T> Default for BvhNode<T>{
|
||||
fn default()->Self{
|
||||
Self{
|
||||
content:RecursiveContent::empty(),
|
||||
content:Default::default(),
|
||||
aabb:Aabb::default(),
|
||||
}
|
||||
}
|
||||
pub fn sample_aabb<F:FnMut(&L)>(&self,aabb:&Aabb,f:&mut F){
|
||||
}
|
||||
pub struct BvhWeightNode<W,T>{
|
||||
content:RecursiveContent<BvhWeightNode<W,T>,T>,
|
||||
weight:W,
|
||||
aabb:Aabb,
|
||||
}
|
||||
|
||||
impl<T> BvhNode<T>{
|
||||
pub fn the_tester<F:FnMut(&T)>(&self,aabb:&Aabb,f:&mut F){
|
||||
match &self.content{
|
||||
RecursiveContent::Leaf(model)=>f(model),
|
||||
RecursiveContent::Branch(children)=>for child in children{
|
||||
@ -39,15 +47,51 @@ impl<L> BvhNode<L>{
|
||||
//you're probably not going to spend a lot of time outside the map,
|
||||
//so the test is extra work for nothing
|
||||
if aabb.intersects(&child.aabb){
|
||||
child.sample_aabb(aabb,f);
|
||||
child.the_tester(aabb,f);
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
pub fn into_inner(self)->(RecursiveContent<BvhNode<L>,L>,Aabb){
|
||||
(self.content,self.aabb)
|
||||
pub fn into_visitor<F:FnMut(T)>(self,f:&mut F){
|
||||
match self.content{
|
||||
RecursiveContent::Leaf(model)=>f(model),
|
||||
RecursiveContent::Branch(children)=>for child in children{
|
||||
child.into_visitor(f)
|
||||
},
|
||||
}
|
||||
}
|
||||
pub fn into_visitor<F:FnMut(L)>(self,f:&mut F){
|
||||
pub fn weigh_contents<W:Copy+std::iter::Sum<W>,F:Fn(&T)->W>(self,f:&F)->BvhWeightNode<W,T>{
|
||||
match self.content{
|
||||
RecursiveContent::Leaf(model)=>BvhWeightNode{
|
||||
weight:f(&model),
|
||||
content:RecursiveContent::Leaf(model),
|
||||
aabb:self.aabb,
|
||||
},
|
||||
RecursiveContent::Branch(children)=>{
|
||||
let branch:Vec<BvhWeightNode<W,T>>=children.into_iter().map(|child|
|
||||
child.weigh_contents(f)
|
||||
).collect();
|
||||
BvhWeightNode{
|
||||
weight:branch.iter().map(|node|node.weight).sum(),
|
||||
content:RecursiveContent::Branch(branch),
|
||||
aabb:self.aabb,
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl <W,T> BvhWeightNode<W,T>{
|
||||
pub const fn weight(&self)->&W{
|
||||
&self.weight
|
||||
}
|
||||
pub const fn aabb(&self)->&Aabb{
|
||||
&self.aabb
|
||||
}
|
||||
pub fn into_content(self)->RecursiveContent<BvhWeightNode<W,T>,T>{
|
||||
self.content
|
||||
}
|
||||
pub fn into_visitor<F:FnMut(T)>(self,f:&mut F){
|
||||
match self.content{
|
||||
RecursiveContent::Leaf(model)=>f(model),
|
||||
RecursiveContent::Branch(children)=>for child in children{
|
||||
@ -86,9 +130,9 @@ fn generate_bvh_node<T>(boxen:Vec<(T,Aabb)>,force:bool)->BvhNode<T>{
|
||||
sort_y.push((i,center.y));
|
||||
sort_z.push((i,center.z));
|
||||
}
|
||||
sort_x.sort_by_key(|&(_,c)|c);
|
||||
sort_y.sort_by_key(|&(_,c)|c);
|
||||
sort_z.sort_by_key(|&(_,c)|c);
|
||||
sort_x.sort_by(|tup0,tup1|tup0.1.cmp(&tup1.1));
|
||||
sort_y.sort_by(|tup0,tup1|tup0.1.cmp(&tup1.1));
|
||||
sort_z.sort_by(|tup0,tup1|tup0.1.cmp(&tup1.1));
|
||||
let h=n/2;
|
||||
let median_x=sort_x[h].1;
|
||||
let median_y=sort_y[h].1;
|
||||
|
@ -171,7 +171,4 @@ impl CollisionAttributes{
|
||||
pub fn contact_default()->Self{
|
||||
Self::Contact(ContactAttributes::default())
|
||||
}
|
||||
pub fn intersect_default()->Self{
|
||||
Self::Intersect(IntersectAttributes::default())
|
||||
}
|
||||
}
|
||||
|
@ -1,11 +1,13 @@
|
||||
use crate::integer::Time;
|
||||
|
||||
#[derive(Clone,Debug)]
|
||||
pub struct TimedInstruction<I,T>{
|
||||
pub time:T,
|
||||
pub time:Time<T>,
|
||||
pub instruction:I,
|
||||
}
|
||||
impl<I,T> TimedInstruction<I,T>{
|
||||
#[inline]
|
||||
pub fn set_time<T2>(self,new_time:T2)->TimedInstruction<I,T2>{
|
||||
pub fn set_time<TimeInner>(self,new_time:Time<TimeInner>)->TimedInstruction<I,TimeInner>{
|
||||
TimedInstruction{
|
||||
time:new_time,
|
||||
instruction:self.instruction,
|
||||
@ -15,21 +17,21 @@ impl<I,T> TimedInstruction<I,T>{
|
||||
|
||||
/// Ensure all emitted instructions are processed before consuming external instructions
|
||||
pub trait InstructionEmitter<I>{
|
||||
type Time;
|
||||
fn next_instruction(&self,time_limit:Self::Time)->Option<TimedInstruction<I,Self::Time>>;
|
||||
type TimeInner;
|
||||
fn next_instruction(&self,time_limit:Time<Self::TimeInner>)->Option<TimedInstruction<I,Self::TimeInner>>;
|
||||
}
|
||||
/// Apply an atomic state update
|
||||
pub trait InstructionConsumer<I>{
|
||||
type Time;
|
||||
fn process_instruction(&mut self,instruction:TimedInstruction<I,Self::Time>);
|
||||
type TimeInner;
|
||||
fn process_instruction(&mut self,instruction:TimedInstruction<I,Self::TimeInner>);
|
||||
}
|
||||
/// If the object produces its own instructions, allow exhaustively feeding them back in
|
||||
pub trait InstructionFeedback<I,T>:InstructionEmitter<I,Time=T>+InstructionConsumer<I,Time=T>
|
||||
pub trait InstructionFeedback<I,T>:InstructionEmitter<I,TimeInner=T>+InstructionConsumer<I,TimeInner=T>
|
||||
where
|
||||
T:Copy,
|
||||
Time<T>:Copy,
|
||||
{
|
||||
#[inline]
|
||||
fn process_exhaustive(&mut self,time_limit:T){
|
||||
fn process_exhaustive(&mut self,time_limit:Time<T>){
|
||||
while let Some(instruction)=self.next_instruction(time_limit){
|
||||
self.process_instruction(instruction);
|
||||
}
|
||||
@ -37,24 +39,39 @@ pub trait InstructionFeedback<I,T>:InstructionEmitter<I,Time=T>+InstructionConsu
|
||||
}
|
||||
impl<I,T,X> InstructionFeedback<I,T> for X
|
||||
where
|
||||
T:Copy,
|
||||
X:InstructionEmitter<I,Time=T>+InstructionConsumer<I,Time=T>,
|
||||
Time<T>:Copy,
|
||||
X:InstructionEmitter<I,TimeInner=T>+InstructionConsumer<I,TimeInner=T>,
|
||||
{}
|
||||
|
||||
//PROPER PRIVATE FIELDS!!!
|
||||
pub struct InstructionCollector<I,T>{
|
||||
time:T,
|
||||
time:Time<T>,
|
||||
instruction:Option<I>,
|
||||
}
|
||||
impl<I,T> InstructionCollector<I,T>{
|
||||
impl<I,T> InstructionCollector<I,T>
|
||||
where Time<T>:Copy+PartialOrd,
|
||||
{
|
||||
#[inline]
|
||||
pub const fn new(time:T)->Self{
|
||||
pub const fn new(time:Time<T>)->Self{
|
||||
Self{
|
||||
time,
|
||||
instruction:None
|
||||
}
|
||||
}
|
||||
#[inline]
|
||||
pub const fn time(&self)->Time<T>{
|
||||
self.time
|
||||
}
|
||||
#[inline]
|
||||
pub fn collect(&mut self,instruction:Option<TimedInstruction<I,T>>){
|
||||
if let Some(ins)=instruction{
|
||||
if ins.time<self.time{
|
||||
self.time=ins.time;
|
||||
self.instruction=Some(ins.instruction);
|
||||
}
|
||||
}
|
||||
}
|
||||
#[inline]
|
||||
pub fn take(self)->Option<TimedInstruction<I,T>>{
|
||||
//STEAL INSTRUCTION AND DESTROY INSTRUCTIONCOLLECTOR
|
||||
self.instruction.map(|instruction|TimedInstruction{
|
||||
@ -63,20 +80,3 @@ impl<I,T> InstructionCollector<I,T>{
|
||||
})
|
||||
}
|
||||
}
|
||||
impl<I,T:Copy> InstructionCollector<I,T>{
|
||||
#[inline]
|
||||
pub const fn time(&self)->T{
|
||||
self.time
|
||||
}
|
||||
}
|
||||
impl<I,T:PartialOrd> InstructionCollector<I,T>{
|
||||
#[inline]
|
||||
pub fn collect(&mut self,instruction:Option<TimedInstruction<I,T>>){
|
||||
if let Some(ins)=instruction{
|
||||
if ins.time<self.time{
|
||||
self.time=ins.time;
|
||||
self.instruction=Some(ins.instruction);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -63,12 +63,6 @@ impl<T> From<Planar64> for Time<T>{
|
||||
Self::raw((value*Planar64::raw(1_000_000_000)).fix_1().to_raw())
|
||||
}
|
||||
}
|
||||
impl<T> From<Time<T>> for Ratio<Planar64,Planar64>{
|
||||
#[inline]
|
||||
fn from(value:Time<T>)->Self{
|
||||
value.to_ratio()
|
||||
}
|
||||
}
|
||||
impl<T,Num,Den,N1,T1> From<Ratio<Num,Den>> for Time<T>
|
||||
where
|
||||
Num:core::ops::Mul<Planar64,Output=N1>,
|
||||
@ -654,19 +648,11 @@ pub struct Planar64Affine3{
|
||||
pub translation:Planar64Vec3,
|
||||
}
|
||||
impl Planar64Affine3{
|
||||
pub const IDENTITY:Self=Self::new(mat3::identity(),vec3::ZERO);
|
||||
#[inline]
|
||||
pub const fn new(matrix3:Planar64Mat3,translation:Planar64Vec3)->Self{
|
||||
Self{matrix3,translation}
|
||||
}
|
||||
#[inline]
|
||||
pub const fn from_translation(translation:Planar64Vec3)->Self{
|
||||
Self{
|
||||
matrix3:mat3::identity(),
|
||||
translation,
|
||||
}
|
||||
}
|
||||
#[inline]
|
||||
pub fn transform_point3(&self,point:Planar64Vec3)->vec3::Vector3<Fixed<2,64>>{
|
||||
self.translation.fix_2()+self.matrix3*point
|
||||
}
|
||||
|
@ -1,5 +1,3 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use crate::integer::{Planar64Vec3,Planar64Affine3};
|
||||
use crate::gameplay_attributes;
|
||||
|
||||
@ -125,87 +123,6 @@ pub struct Mesh{
|
||||
pub physics_groups:Vec<IndexedPhysicsGroup>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct MeshBuilder{
|
||||
unique_pos:Vec<Planar64Vec3>,//Unit32Vec3
|
||||
unique_normal:Vec<Planar64Vec3>,//Unit32Vec3
|
||||
unique_tex:Vec<TextureCoordinate>,
|
||||
unique_color:Vec<Color4>,
|
||||
unique_vertices:Vec<IndexedVertex>,
|
||||
pos_id_from:HashMap<Planar64Vec3,PositionId>,//Unit32Vec3
|
||||
normal_id_from:HashMap<Planar64Vec3,NormalId>,//Unit32Vec3
|
||||
tex_id_from:HashMap<[u32;2],TextureCoordinateId>,
|
||||
color_id_from:HashMap<[u32;4],ColorId>,
|
||||
vertex_id_from:HashMap<IndexedVertex,VertexId>,
|
||||
}
|
||||
impl MeshBuilder{
|
||||
pub fn new()->Self{
|
||||
Self::default()
|
||||
}
|
||||
pub fn build(
|
||||
self,
|
||||
polygon_groups:Vec<PolygonGroup>,
|
||||
graphics_groups:Vec<IndexedGraphicsGroup>,
|
||||
physics_groups:Vec<IndexedPhysicsGroup>,
|
||||
)->Mesh{
|
||||
let MeshBuilder{
|
||||
unique_pos,
|
||||
unique_normal,
|
||||
unique_tex,
|
||||
unique_color,
|
||||
unique_vertices,
|
||||
..
|
||||
}=self;
|
||||
Mesh{
|
||||
unique_pos,
|
||||
unique_normal,
|
||||
unique_tex,
|
||||
unique_color,
|
||||
unique_vertices,
|
||||
polygon_groups,
|
||||
graphics_groups,
|
||||
physics_groups,
|
||||
}
|
||||
}
|
||||
pub fn acquire_pos_id(&mut self,pos:Planar64Vec3)->PositionId{
|
||||
*self.pos_id_from.entry(pos).or_insert_with(||{
|
||||
let pos_id=PositionId::new(self.unique_pos.len() as u32);
|
||||
self.unique_pos.push(pos);
|
||||
pos_id
|
||||
})
|
||||
}
|
||||
pub fn acquire_normal_id(&mut self,normal:Planar64Vec3)->NormalId{
|
||||
*self.normal_id_from.entry(normal).or_insert_with(||{
|
||||
let normal_id=NormalId::new(self.unique_normal.len() as u32);
|
||||
self.unique_normal.push(normal);
|
||||
normal_id
|
||||
})
|
||||
}
|
||||
pub fn acquire_tex_id(&mut self,tex:TextureCoordinate)->TextureCoordinateId{
|
||||
let h=tex.to_array().map(f32::to_bits);
|
||||
*self.tex_id_from.entry(h).or_insert_with(||{
|
||||
let tex_id=TextureCoordinateId::new(self.unique_tex.len() as u32);
|
||||
self.unique_tex.push(tex);
|
||||
tex_id
|
||||
})
|
||||
}
|
||||
pub fn acquire_color_id(&mut self,color:Color4)->ColorId{
|
||||
let h=color.to_array().map(f32::to_bits);
|
||||
*self.color_id_from.entry(h).or_insert_with(||{
|
||||
let color_id=ColorId::new(self.unique_color.len() as u32);
|
||||
self.unique_color.push(color);
|
||||
color_id
|
||||
})
|
||||
}
|
||||
pub fn acquire_vertex_id(&mut self,vertex:IndexedVertex)->VertexId{
|
||||
*self.vertex_id_from.entry(vertex.clone()).or_insert_with(||{
|
||||
let vertex_id=VertexId::new(self.unique_vertices.len() as u32);
|
||||
self.unique_vertices.push(vertex);
|
||||
vertex_id
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug,Clone,Copy,Hash,id::Id,Eq,PartialEq)]
|
||||
pub struct ModelId(u32);
|
||||
pub struct Model{
|
||||
|
@ -10,4 +10,4 @@ authors = ["Rhys Lloyd <krakow20@gmail.com>"]
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
strafesnet_common = { version = "0.6.0", path = "../common", registry = "strafesnet" }
|
||||
strafesnet_common = { path = "../common", registry = "strafesnet" }
|
||||
|
@ -4,7 +4,6 @@ use crate::mesh::Meshes;
|
||||
use crate::texture::{RenderConfigs,Texture};
|
||||
use strafesnet_common::model::{Mesh,MeshId,RenderConfig,RenderConfigId,TextureId};
|
||||
|
||||
#[derive(Clone,Copy,Debug)]
|
||||
pub enum LoadFailureMode{
|
||||
DefaultToNone,
|
||||
Fatal,
|
||||
@ -42,10 +41,7 @@ impl<H:core::hash::Hash+Eq> RenderConfigDeferredLoader<H>{
|
||||
render_id
|
||||
})
|
||||
}
|
||||
pub fn into_indices(self)->impl Iterator<Item=H>{
|
||||
self.render_config_id_from_asset_id.into_keys().flatten()
|
||||
}
|
||||
pub fn into_render_configs<L:Loader<Resource=Texture,Index=H>>(mut self,loader:&mut L,failure_mode:LoadFailureMode)->Result<RenderConfigs,L::Error>{
|
||||
pub fn into_render_configs<L:Loader<Index=H,Resource=Texture>>(mut self,loader:&mut L,failure_mode:LoadFailureMode)->Result<RenderConfigs,L::Error>{
|
||||
let mut sorted_textures=vec![None;self.texture_count as usize];
|
||||
for (index_option,render_config_id) in self.render_config_id_from_asset_id{
|
||||
let render_config=&mut self.render_configs[render_config_id.get() as usize];
|
||||
@ -57,7 +53,7 @@ impl<H:core::hash::Hash+Eq> RenderConfigDeferredLoader<H>{
|
||||
Ok(texture)=>Some(texture),
|
||||
Err(e)=>{
|
||||
render_config.texture=None;
|
||||
println!("Error loading texture: {e}");
|
||||
println!("Error loading resource: {e}");
|
||||
None
|
||||
},
|
||||
},
|
||||
@ -90,10 +86,7 @@ impl<H:core::hash::Hash+Eq> MeshDeferredLoader<H>{
|
||||
let mesh_id=MeshId::new(self.mesh_id_from_asset_id.len() as u32);
|
||||
*self.mesh_id_from_asset_id.entry(index).or_insert(mesh_id)
|
||||
}
|
||||
pub fn into_indices(self)->impl Iterator<Item=H>{
|
||||
self.mesh_id_from_asset_id.into_keys()
|
||||
}
|
||||
pub fn into_meshes<L:Loader<Resource=Mesh,Index=H>>(self,loader:&mut L,failure_mode:LoadFailureMode)->Result<Meshes,L::Error>{
|
||||
pub fn into_meshes<L:Loader<Index=H,Resource=Mesh>>(self,loader:&mut L,failure_mode:LoadFailureMode)->Result<Meshes,L::Error>{
|
||||
let mut mesh_list=vec![None;self.mesh_id_from_asset_id.len()];
|
||||
for (index,mesh_id) in self.mesh_id_from_asset_id{
|
||||
let resource_result=loader.load(index);
|
||||
@ -102,7 +95,7 @@ impl<H:core::hash::Hash+Eq> MeshDeferredLoader<H>{
|
||||
LoadFailureMode::DefaultToNone=>match resource_result{
|
||||
Ok(mesh)=>Some(mesh),
|
||||
Err(e)=>{
|
||||
println!("Error loading mesh: {e}");
|
||||
println!("Error loading resource: {e}");
|
||||
None
|
||||
},
|
||||
},
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "fixed_wide"
|
||||
version = "0.1.2"
|
||||
version = "0.1.1"
|
||||
edition = "2021"
|
||||
repository = "https://git.itzana.me/StrafesNET/strafe-project"
|
||||
license = "MIT OR Apache-2.0"
|
||||
@ -17,4 +17,4 @@ zeroes=["dep:arrayvec"]
|
||||
bnum = "0.12.0"
|
||||
arrayvec = { version = "0.7.6", optional = true }
|
||||
paste = "1.0.15"
|
||||
ratio_ops = { version = "0.1.0", path = "../ratio_ops", registry = "strafesnet", optional = true }
|
||||
ratio_ops = { path = "../ratio_ops", registry = "strafesnet", optional = true }
|
||||
|
@ -14,8 +14,8 @@ fixed-wide=["dep:fixed_wide","dep:paste"]
|
||||
deferred-division=["dep:ratio_ops"]
|
||||
|
||||
[dependencies]
|
||||
ratio_ops = { version = "0.1.0", path = "../ratio_ops", registry = "strafesnet", optional = true }
|
||||
fixed_wide = { version = "0.1.2", path = "../fixed_wide", registry = "strafesnet", optional = true }
|
||||
ratio_ops = { path = "../ratio_ops", registry = "strafesnet", optional = true }
|
||||
fixed_wide = { path = "../fixed_wide", registry = "strafesnet", optional = true }
|
||||
paste = { version = "1.0.15", optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
|
@ -1,6 +1,5 @@
|
||||
use crate::vector::Vector;
|
||||
|
||||
#[repr(transparent)]
|
||||
#[derive(Clone,Copy,Debug,Hash,Eq,PartialEq)]
|
||||
pub struct Matrix<const X:usize,const Y:usize,T>{
|
||||
pub(crate) array:[[T;Y];X],
|
||||
|
@ -3,7 +3,6 @@
|
||||
/// v.x += v.z;
|
||||
/// println!("v.x={}",v.x);
|
||||
|
||||
#[repr(transparent)]
|
||||
#[derive(Clone,Copy,Debug,Hash,Eq,PartialEq)]
|
||||
pub struct Vector<const N:usize,T>{
|
||||
pub(crate) array:[T;N],
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "strafesnet_rbx_loader"
|
||||
version = "0.6.0"
|
||||
version = "0.5.2"
|
||||
edition = "2021"
|
||||
repository = "https://git.itzana.me/StrafesNET/strafe-project"
|
||||
license = "MIT OR Apache-2.0"
|
||||
@ -15,10 +15,10 @@ glam = "0.29.0"
|
||||
lazy-regex = "3.1.0"
|
||||
rbx_binary = { version = "0.7.4", registry = "strafesnet" }
|
||||
rbx_dom_weak = { version = "2.7.0", registry = "strafesnet" }
|
||||
rbx_mesh = "0.3.1"
|
||||
rbx_mesh = "0.1.2"
|
||||
rbx_reflection_database = { version = "0.2.10", registry = "strafesnet" }
|
||||
rbx_xml = { version = "0.13.3", registry = "strafesnet" }
|
||||
rbxassetid = { version = "0.1.0", path = "../rbxassetid", registry = "strafesnet" }
|
||||
roblox_emulator = { version = "0.4.7", path = "../roblox_emulator", registry = "strafesnet" }
|
||||
strafesnet_common = { version = "0.6.0", path = "../common", registry = "strafesnet" }
|
||||
strafesnet_deferred_loader = { version = "0.5.0", path = "../deferred_loader", registry = "strafesnet" }
|
||||
rbxassetid = { version = "0.1.0", path = "../rbxassetid" }
|
||||
roblox_emulator = { path = "../roblox_emulator", registry = "strafesnet" }
|
||||
strafesnet_common = { path = "../common", registry = "strafesnet" }
|
||||
strafesnet_deferred_loader = { version = "0.5.0", path = "../deferred_loader" }
|
||||
|
@ -1,10 +1,8 @@
|
||||
use std::io::Read;
|
||||
use rbx_dom_weak::WeakDom;
|
||||
use strafesnet_deferred_loader::deferred_loader::{LoadFailureMode,MeshDeferredLoader,RenderConfigDeferredLoader};
|
||||
|
||||
mod rbx;
|
||||
mod mesh;
|
||||
mod union;
|
||||
pub mod loader;
|
||||
mod primitives;
|
||||
|
||||
@ -33,9 +31,6 @@ impl Model{
|
||||
let services=context.convert_into_place();
|
||||
Place{dom,services}
|
||||
}
|
||||
pub fn to_snf(&self,failure_mode:LoadFailureMode)->Result<strafesnet_common::map::CompleteMap,LoadError>{
|
||||
to_snf(self,failure_mode)
|
||||
}
|
||||
}
|
||||
impl AsRef<WeakDom> for Model{
|
||||
fn as_ref(&self)->&WeakDom{
|
||||
@ -67,9 +62,6 @@ impl Place{
|
||||
}
|
||||
}
|
||||
}
|
||||
pub fn to_snf(&self,failure_mode:LoadFailureMode)->Result<strafesnet_common::map::CompleteMap,LoadError>{
|
||||
to_snf(self,failure_mode)
|
||||
}
|
||||
}
|
||||
impl AsRef<WeakDom> for Place{
|
||||
fn as_ref(&self)->&WeakDom{
|
||||
@ -101,49 +93,6 @@ pub fn read<R:Read>(input:R)->Result<Model,ReadError>{
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum LoadError{
|
||||
Texture(loader::TextureError),
|
||||
Mesh(loader::MeshError),
|
||||
}
|
||||
impl std::fmt::Display for LoadError{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for LoadError{}
|
||||
impl From<loader::TextureError> for LoadError{
|
||||
fn from(value:loader::TextureError)->Self{
|
||||
Self::Texture(value)
|
||||
}
|
||||
}
|
||||
impl From<loader::MeshError> for LoadError{
|
||||
fn from(value:loader::MeshError)->Self{
|
||||
Self::Mesh(value)
|
||||
}
|
||||
}
|
||||
//ConvertError
|
||||
|
||||
fn to_snf(dom:impl AsRef<WeakDom>,failure_mode:LoadFailureMode)->Result<strafesnet_common::map::CompleteMap,LoadError>{
|
||||
let dom=dom.as_ref();
|
||||
|
||||
let mut texture_deferred_loader=RenderConfigDeferredLoader::new();
|
||||
let mut mesh_deferred_loader=MeshDeferredLoader::new();
|
||||
|
||||
let map_step1=rbx::convert(
|
||||
dom,
|
||||
&mut texture_deferred_loader,
|
||||
&mut mesh_deferred_loader,
|
||||
);
|
||||
|
||||
let mut mesh_loader=loader::MeshLoader::new();
|
||||
let meshpart_meshes=mesh_deferred_loader.into_meshes(&mut mesh_loader,failure_mode).map_err(LoadError::Mesh)?;
|
||||
|
||||
let map_step2=map_step1.add_meshpart_meshes_and_calculate_attributes(meshpart_meshes);
|
||||
|
||||
let mut texture_loader=loader::TextureLoader::new();
|
||||
let render_configs=texture_deferred_loader.into_render_configs(&mut texture_loader,failure_mode).map_err(LoadError::Texture)?;
|
||||
|
||||
let map=map_step2.add_render_configs_and_textures(render_configs);
|
||||
|
||||
Ok(map)
|
||||
}
|
||||
pub use rbx::convert;
|
||||
|
@ -4,14 +4,6 @@ use strafesnet_common::model::Mesh;
|
||||
use strafesnet_deferred_loader::{loader::Loader,texture::Texture};
|
||||
|
||||
use crate::data::RobloxMeshBytes;
|
||||
use crate::rbx::RobloxFaceTextureDescription;
|
||||
|
||||
fn read_entire_file(path:impl AsRef<std::path::Path>)->Result<Vec<u8>,std::io::Error>{
|
||||
let mut file=std::fs::File::open(path)?;
|
||||
let mut data=Vec::new();
|
||||
file.read_to_end(&mut data)?;
|
||||
Ok(data)
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(Debug)]
|
||||
@ -49,7 +41,9 @@ impl<'a> Loader for TextureLoader<'a>{
|
||||
fn load(&mut self,index:Self::Index)->Result<Self::Resource,Self::Error>{
|
||||
let RobloxAssetId(asset_id)=index.parse()?;
|
||||
let file_name=format!("textures/{}.dds",asset_id);
|
||||
let data=read_entire_file(file_name)?;
|
||||
let mut file=std::fs::File::open(file_name)?;
|
||||
let mut data=Vec::new();
|
||||
file.read_to_end(&mut data)?;
|
||||
Ok(Texture::ImageDDS(data))
|
||||
}
|
||||
}
|
||||
@ -59,11 +53,8 @@ impl<'a> Loader for TextureLoader<'a>{
|
||||
pub enum MeshError{
|
||||
Io(std::io::Error),
|
||||
RobloxAssetIdParse(RobloxAssetIdParseErr),
|
||||
Mesh(crate::mesh::Error),
|
||||
Union(crate::union::Error),
|
||||
DecodeBinary(rbx_binary::DecodeError),
|
||||
OneChildPolicy,
|
||||
MissingInstance,
|
||||
Mesh(crate::mesh::Error)
|
||||
|
||||
}
|
||||
impl std::fmt::Display for MeshError{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
@ -86,57 +77,6 @@ impl From<crate::mesh::Error> for MeshError{
|
||||
Self::Mesh(value)
|
||||
}
|
||||
}
|
||||
impl From<crate::union::Error> for MeshError{
|
||||
fn from(value:crate::union::Error)->Self{
|
||||
Self::Union(value)
|
||||
}
|
||||
}
|
||||
impl From<rbx_binary::DecodeError> for MeshError{
|
||||
fn from(value:rbx_binary::DecodeError)->Self{
|
||||
Self::DecodeBinary(value)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Hash,Eq,PartialEq)]
|
||||
pub enum MeshType<'a>{
|
||||
FileMesh,
|
||||
Union{
|
||||
mesh_data:&'a [u8],
|
||||
physics_data:&'a [u8],
|
||||
size_float_bits:[u32;3],
|
||||
part_texture_description:[Option<RobloxFaceTextureDescription>;6],
|
||||
},
|
||||
}
|
||||
#[derive(Hash,Eq,PartialEq)]
|
||||
pub struct MeshIndex<'a>{
|
||||
mesh_type:MeshType<'a>,
|
||||
content:&'a str,
|
||||
}
|
||||
impl MeshIndex<'_>{
|
||||
pub fn file_mesh(content:&str)->MeshIndex{
|
||||
MeshIndex{
|
||||
mesh_type:MeshType::FileMesh,
|
||||
content,
|
||||
}
|
||||
}
|
||||
pub fn union<'a>(
|
||||
content:&'a str,
|
||||
mesh_data:&'a [u8],
|
||||
physics_data:&'a [u8],
|
||||
size:&rbx_dom_weak::types::Vector3,
|
||||
part_texture_description:crate::rbx::RobloxPartDescription,
|
||||
)->MeshIndex<'a>{
|
||||
MeshIndex{
|
||||
mesh_type:MeshType::Union{
|
||||
mesh_data,
|
||||
physics_data,
|
||||
size_float_bits:[size.x.to_bits(),size.y.to_bits(),size.z.to_bits()],
|
||||
part_texture_description,
|
||||
},
|
||||
content,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct MeshLoader<'a>(std::marker::PhantomData<&'a ()>);
|
||||
impl MeshLoader<'_>{
|
||||
@ -146,46 +86,17 @@ impl MeshLoader<'_>{
|
||||
}
|
||||
impl<'a> Loader for MeshLoader<'a>{
|
||||
type Error=MeshError;
|
||||
type Index=MeshIndex<'a>;
|
||||
type Index=&'a str;
|
||||
type Resource=Mesh;
|
||||
fn load(&mut self,index:Self::Index)->Result<Self::Resource,Self::Error>{
|
||||
let mesh=match index.mesh_type{
|
||||
MeshType::FileMesh=>{
|
||||
let RobloxAssetId(asset_id)=index.content.parse()?;
|
||||
let file_name=format!("meshes/{}",asset_id);
|
||||
let data=read_entire_file(file_name)?;
|
||||
crate::mesh::convert(RobloxMeshBytes::new(data))?
|
||||
},
|
||||
MeshType::Union{mut physics_data,mut mesh_data,size_float_bits,part_texture_description}=>{
|
||||
// decode asset
|
||||
let size=glam::Vec3::from_array(size_float_bits.map(f32::from_bits));
|
||||
if !index.content.is_empty()&&(physics_data.is_empty()||mesh_data.is_empty()){
|
||||
let RobloxAssetId(asset_id)=index.content.parse()?;
|
||||
let file_name=format!("unions/{}",asset_id);
|
||||
let data=read_entire_file(file_name)?;
|
||||
let dom=rbx_binary::from_reader(std::io::Cursor::new(data))?;
|
||||
let &[referent]=dom.root().children()else{
|
||||
return Err(MeshError::OneChildPolicy);
|
||||
};
|
||||
let Some(instance)=dom.get_by_ref(referent)else{
|
||||
return Err(MeshError::MissingInstance);
|
||||
};
|
||||
if physics_data.is_empty(){
|
||||
if let Some(rbx_dom_weak::types::Variant::BinaryString(data))=instance.properties.get("PhysicsData"){
|
||||
physics_data=data.as_ref();
|
||||
}
|
||||
}
|
||||
if mesh_data.is_empty(){
|
||||
if let Some(rbx_dom_weak::types::Variant::BinaryString(data))=instance.properties.get("MeshData"){
|
||||
mesh_data=data.as_ref();
|
||||
}
|
||||
}
|
||||
crate::union::convert(physics_data,mesh_data,size,part_texture_description)?
|
||||
}else{
|
||||
crate::union::convert(physics_data,mesh_data,size,part_texture_description)?
|
||||
}
|
||||
},
|
||||
};
|
||||
let RobloxAssetId(asset_id)=index.parse()?;
|
||||
let file_name=format!("meshes/{}",asset_id);
|
||||
let mut file=std::fs::File::open(file_name)?;
|
||||
// reading the entire file is way faster than
|
||||
// round tripping to disk every read from the parser
|
||||
let mut data=Vec::new();
|
||||
file.read_to_end(&mut data)?;
|
||||
let mesh=crate::mesh::convert(RobloxMeshBytes::new(data))?;
|
||||
Ok(mesh)
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use rbx_mesh::mesh::{Vertex2,Vertex2Truncated};
|
||||
use strafesnet_common::{integer::vec3,model::{self,ColorId,IndexedVertex,NormalId,PolygonGroup,PolygonList,PositionId,RenderConfigId,TextureCoordinateId,VertexId}};
|
||||
use rbx_mesh::mesh::{Vertex2, Vertex2Truncated};
|
||||
use strafesnet_common::{integer::vec3,model::{self, ColorId, IndexedVertex, NormalId, PolygonGroup, PolygonList, PositionId, TextureCoordinateId, VertexId}};
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(Debug)]
|
||||
@ -205,13 +205,7 @@ pub fn convert(roblox_mesh_bytes:crate::data::RobloxMeshBytes)->Result<model::Me
|
||||
unique_vertices,
|
||||
polygon_groups,
|
||||
//these should probably be moved to the model...
|
||||
//but what if models want to use the same texture
|
||||
graphics_groups:vec![model::IndexedGraphicsGroup{
|
||||
render:RenderConfigId::new(0),
|
||||
//the lowest lod is highest quality
|
||||
groups:vec![model::PolygonGroupId::new(0)]
|
||||
}],
|
||||
//disable physics
|
||||
graphics_groups:Vec::new(),
|
||||
physics_groups:Vec::new(),
|
||||
})
|
||||
}
|
||||
|
@ -42,6 +42,50 @@ const CUBE_DEFAULT_NORMALS:[Planar64Vec3;6]=[
|
||||
vec3::int( 0,-1, 0),//CubeFace::Bottom
|
||||
vec3::int( 0, 0,-1),//CubeFace::Front
|
||||
];
|
||||
const CUBE_DEFAULT_POLYS:[[[u32;3];4];6]=[
|
||||
// right (1, 0, 0)
|
||||
[
|
||||
[6,2,0],//[vertex,tex,norm]
|
||||
[5,1,0],
|
||||
[2,0,0],
|
||||
[1,3,0],
|
||||
],
|
||||
// top (0, 1, 0)
|
||||
[
|
||||
[5,3,1],
|
||||
[4,2,1],
|
||||
[3,1,1],
|
||||
[2,0,1],
|
||||
],
|
||||
// back (0, 0, 1)
|
||||
[
|
||||
[0,3,2],
|
||||
[1,2,2],
|
||||
[2,1,2],
|
||||
[3,0,2],
|
||||
],
|
||||
// left (-1, 0, 0)
|
||||
[
|
||||
[0,2,3],
|
||||
[3,1,3],
|
||||
[4,0,3],
|
||||
[7,3,3],
|
||||
],
|
||||
// bottom (0,-1, 0)
|
||||
[
|
||||
[1,1,4],
|
||||
[0,0,4],
|
||||
[7,3,4],
|
||||
[6,2,4],
|
||||
],
|
||||
// front (0, 0,-1)
|
||||
[
|
||||
[4,1,5],
|
||||
[5,0,5],
|
||||
[6,3,5],
|
||||
[7,2,5],
|
||||
],
|
||||
];
|
||||
|
||||
#[derive(Hash,PartialEq,Eq)]
|
||||
pub enum WedgeFace{
|
||||
@ -88,8 +132,8 @@ impl CubeFaceDescription{
|
||||
pub fn insert(&mut self,index:CubeFace,value:FaceDescription){
|
||||
self.0[index as usize]=Some(value);
|
||||
}
|
||||
pub fn pairs(self)->impl Iterator<Item=(usize,FaceDescription)>{
|
||||
self.0.into_iter().enumerate().filter_map(|(i,v)|v.map(|u|(i,u)))
|
||||
pub fn pairs(self)->std::iter::FilterMap<std::iter::Enumerate<std::array::IntoIter<Option<FaceDescription>,6>>,impl FnMut((usize,Option<FaceDescription>))->Option<(usize,FaceDescription)>>{
|
||||
self.0.into_iter().enumerate().filter_map(|v|v.1.map(|u|(v.0,u)))
|
||||
}
|
||||
}
|
||||
pub fn unit_cube(render:RenderConfigId)->Mesh{
|
||||
@ -157,50 +201,6 @@ impl FaceDescription{
|
||||
}
|
||||
}
|
||||
pub fn generate_partial_unit_cube(face_descriptions:CubeFaceDescription)->Mesh{
|
||||
const CUBE_DEFAULT_POLYS:[[[u32;3];4];6]=[
|
||||
// right (1, 0, 0)
|
||||
[
|
||||
[6,2,0],//[vertex,tex,norm]
|
||||
[5,1,0],
|
||||
[2,0,0],
|
||||
[1,3,0],
|
||||
],
|
||||
// top (0, 1, 0)
|
||||
[
|
||||
[5,3,1],
|
||||
[4,2,1],
|
||||
[3,1,1],
|
||||
[2,0,1],
|
||||
],
|
||||
// back (0, 0, 1)
|
||||
[
|
||||
[0,3,2],
|
||||
[1,2,2],
|
||||
[2,1,2],
|
||||
[3,0,2],
|
||||
],
|
||||
// left (-1, 0, 0)
|
||||
[
|
||||
[0,2,3],
|
||||
[3,1,3],
|
||||
[4,0,3],
|
||||
[7,3,3],
|
||||
],
|
||||
// bottom (0,-1, 0)
|
||||
[
|
||||
[1,1,4],
|
||||
[0,0,4],
|
||||
[7,3,4],
|
||||
[6,2,4],
|
||||
],
|
||||
// front (0, 0,-1)
|
||||
[
|
||||
[4,1,5],
|
||||
[5,0,5],
|
||||
[6,3,5],
|
||||
[7,2,5],
|
||||
],
|
||||
];
|
||||
let mut generated_pos=Vec::new();
|
||||
let mut generated_tex=Vec::new();
|
||||
let mut generated_normal=Vec::new();
|
||||
@ -279,35 +279,35 @@ pub fn generate_partial_unit_cube(face_descriptions:CubeFaceDescription)->Mesh{
|
||||
}
|
||||
//don't think too hard about the copy paste because this is all going into the map tool eventually...
|
||||
pub fn generate_partial_unit_wedge(face_descriptions:WedgeFaceDescription)->Mesh{
|
||||
const WEDGE_DEFAULT_POLYS:[&[[u32;3]];5]=[
|
||||
let wedge_default_polys=[
|
||||
// right (1, 0, 0)
|
||||
&[
|
||||
vec![
|
||||
[6,2,0],//[vertex,tex,norm]
|
||||
[2,0,0],
|
||||
[1,3,0],
|
||||
],
|
||||
// FrontTop (0, 1, -1)
|
||||
&[
|
||||
vec![
|
||||
[3,1,1],
|
||||
[2,0,1],
|
||||
[6,3,1],
|
||||
[7,2,1],
|
||||
],
|
||||
// back (0, 0, 1)
|
||||
&[
|
||||
vec![
|
||||
[0,3,2],
|
||||
[1,2,2],
|
||||
[2,1,2],
|
||||
[3,0,2],
|
||||
],
|
||||
// left (-1, 0, 0)
|
||||
&[
|
||||
vec![
|
||||
[0,2,3],
|
||||
[3,1,3],
|
||||
[7,3,3],
|
||||
],
|
||||
// bottom (0,-1, 0)
|
||||
&[
|
||||
vec![
|
||||
[1,1,4],
|
||||
[0,0,4],
|
||||
[7,3,4],
|
||||
@ -351,7 +351,7 @@ pub fn generate_partial_unit_wedge(face_descriptions:WedgeFaceDescription)->Mesh
|
||||
//push vertices as they are needed
|
||||
let group_id=PolygonGroupId::new(polygon_groups.len() as u32);
|
||||
polygon_groups.push(PolygonGroup::PolygonList(PolygonList::new(vec![
|
||||
WEDGE_DEFAULT_POLYS[face_id].iter().map(|tup|{
|
||||
wedge_default_polys[face_id].iter().map(|tup|{
|
||||
let pos=CUBE_DEFAULT_VERTICES[tup[0] as usize];
|
||||
let pos_index=if let Some(pos_index)=generated_pos.iter().position(|&p|p==pos){
|
||||
pos_index
|
||||
@ -392,34 +392,34 @@ pub fn generate_partial_unit_wedge(face_descriptions:WedgeFaceDescription)->Mesh
|
||||
}
|
||||
|
||||
pub fn generate_partial_unit_cornerwedge(face_descriptions:CornerWedgeFaceDescription)->Mesh{
|
||||
const CORNERWEDGE_DEFAULT_POLYS:[&[[u32;3]];5]=[
|
||||
let cornerwedge_default_polys=[
|
||||
// right (1, 0, 0)
|
||||
&[
|
||||
vec![
|
||||
[6,2,0],//[vertex,tex,norm]
|
||||
[5,1,0],
|
||||
[1,3,0],
|
||||
],
|
||||
// BackTop (0, 1, 1)
|
||||
&[
|
||||
vec![
|
||||
[5,3,1],
|
||||
[0,1,1],
|
||||
[1,0,1],
|
||||
],
|
||||
// LeftTop (-1, 1, 0)
|
||||
&[
|
||||
vec![
|
||||
[5,3,2],
|
||||
[7,2,2],
|
||||
[0,1,2],
|
||||
],
|
||||
// bottom (0,-1, 0)
|
||||
&[
|
||||
vec![
|
||||
[1,1,3],
|
||||
[0,0,3],
|
||||
[7,3,3],
|
||||
[6,2,3],
|
||||
],
|
||||
// front (0, 0,-1)
|
||||
&[
|
||||
vec![
|
||||
[5,0,4],
|
||||
[6,3,4],
|
||||
[7,2,4],
|
||||
@ -462,7 +462,7 @@ pub fn generate_partial_unit_cornerwedge(face_descriptions:CornerWedgeFaceDescri
|
||||
//push vertices as they are needed
|
||||
let group_id=PolygonGroupId::new(polygon_groups.len() as u32);
|
||||
polygon_groups.push(PolygonGroup::PolygonList(PolygonList::new(vec![
|
||||
CORNERWEDGE_DEFAULT_POLYS[face_id].iter().map(|tup|{
|
||||
cornerwedge_default_polys[face_id].iter().map(|tup|{
|
||||
let pos=CUBE_DEFAULT_VERTICES[tup[0] as usize];
|
||||
let pos_index=if let Some(pos_index)=generated_pos.iter().position(|&p|p==pos){
|
||||
pos_index
|
||||
|
@ -1,7 +1,5 @@
|
||||
use std::collections::HashMap;
|
||||
use crate::loader::MeshIndex;
|
||||
use crate::primitives;
|
||||
use strafesnet_common::aabb::Aabb;
|
||||
use strafesnet_common::map;
|
||||
use strafesnet_common::model;
|
||||
use strafesnet_common::gameplay_modes;
|
||||
@ -346,103 +344,58 @@ fn get_attributes(name:&str,can_collide:bool,velocity:Planar64Vec3,model_id:mode
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone,Copy)]
|
||||
pub struct RobloxTextureTransform{
|
||||
offset_studs_u:f32,
|
||||
offset_studs_v:f32,
|
||||
studs_per_tile_u:f32,
|
||||
studs_per_tile_v:f32,
|
||||
size_u:f32,
|
||||
size_v:f32,
|
||||
#[derive(Clone,Copy,PartialEq)]
|
||||
struct RobloxTextureTransform{
|
||||
offset_u:f32,
|
||||
offset_v:f32,
|
||||
scale_u:f32,
|
||||
scale_v:f32,
|
||||
}
|
||||
#[derive(Clone,Copy,Hash,Eq,PartialEq)]
|
||||
pub struct RobloxTextureTransformBits{
|
||||
offset_studs_u:u32,
|
||||
offset_studs_v:u32,
|
||||
studs_per_tile_u:u32,
|
||||
studs_per_tile_v:u32,
|
||||
size_u:u32,
|
||||
size_v:u32,
|
||||
impl std::cmp::Eq for RobloxTextureTransform{}//????
|
||||
impl std::default::Default for RobloxTextureTransform{
|
||||
fn default()->Self{
|
||||
Self{offset_u:0.0,offset_v:0.0,scale_u:1.0,scale_v:1.0}
|
||||
}
|
||||
}
|
||||
impl RobloxTextureTransform{
|
||||
fn identity()->Self{
|
||||
Self{
|
||||
offset_studs_u:0.0,
|
||||
offset_studs_v:0.0,
|
||||
studs_per_tile_u:1.0,
|
||||
studs_per_tile_v:1.0,
|
||||
size_u:1.0,
|
||||
size_v:1.0,
|
||||
}
|
||||
}
|
||||
pub fn to_bits(self)->RobloxTextureTransformBits{
|
||||
RobloxTextureTransformBits{
|
||||
offset_studs_u:self.offset_studs_u.to_bits(),
|
||||
offset_studs_v:self.offset_studs_v.to_bits(),
|
||||
studs_per_tile_u:self.studs_per_tile_u.to_bits(),
|
||||
studs_per_tile_v:self.studs_per_tile_v.to_bits(),
|
||||
size_u:self.size_u.to_bits(),
|
||||
size_v:self.size_v.to_bits(),
|
||||
}
|
||||
}
|
||||
pub fn affine(&self)->glam::Affine2{
|
||||
glam::Affine2::from_translation(
|
||||
glam::vec2(self.offset_studs_u/self.studs_per_tile_u,self.offset_studs_v/self.studs_per_tile_v)
|
||||
)
|
||||
*glam::Affine2::from_scale(
|
||||
glam::vec2(self.size_u/self.studs_per_tile_u,self.size_v/self.studs_per_tile_v)
|
||||
)
|
||||
}
|
||||
pub fn set_size(&mut self,size_u:f32,size_v:f32){
|
||||
self.size_u=size_u;
|
||||
self.size_v=size_v;
|
||||
impl std::hash::Hash for RobloxTextureTransform{
|
||||
fn hash<H:std::hash::Hasher>(&self,state:&mut H) {
|
||||
self.offset_u.to_ne_bytes().hash(state);
|
||||
self.offset_v.to_ne_bytes().hash(state);
|
||||
self.scale_u.to_ne_bytes().hash(state);
|
||||
self.scale_v.to_ne_bytes().hash(state);
|
||||
}
|
||||
}
|
||||
impl core::hash::Hash for RobloxTextureTransform{
|
||||
fn hash<H:core::hash::Hasher>(&self,state:&mut H){
|
||||
self.to_bits().hash(state);
|
||||
}
|
||||
}
|
||||
#[derive(Clone,Copy,Hash,Eq,PartialEq)]
|
||||
pub struct RobloxFaceTextureDescriptionBits{
|
||||
#[derive(Clone,PartialEq)]
|
||||
struct RobloxFaceTextureDescription{
|
||||
render:RenderConfigId,
|
||||
color:[u32;4],
|
||||
transform:RobloxTextureTransformBits,
|
||||
color:glam::Vec4,
|
||||
transform:RobloxTextureTransform,
|
||||
}
|
||||
#[derive(Clone,Copy)]
|
||||
pub struct RobloxFaceTextureDescription{
|
||||
pub render:RenderConfigId,
|
||||
pub color:glam::Vec4,
|
||||
pub transform:RobloxTextureTransform,
|
||||
}
|
||||
impl core::cmp::PartialEq for RobloxFaceTextureDescription{
|
||||
fn eq(&self,other:&Self)->bool{
|
||||
self.to_bits().eq(&other.to_bits())
|
||||
}
|
||||
}
|
||||
impl core::cmp::Eq for RobloxFaceTextureDescription{}
|
||||
impl core::hash::Hash for RobloxFaceTextureDescription{
|
||||
fn hash<H:core::hash::Hasher>(&self,state:&mut H){
|
||||
self.to_bits().hash(state);
|
||||
}
|
||||
impl std::cmp::Eq for RobloxFaceTextureDescription{}//????
|
||||
impl std::hash::Hash for RobloxFaceTextureDescription{
|
||||
fn hash<H:std::hash::Hasher>(&self,state:&mut H){
|
||||
self.render.hash(state);
|
||||
self.transform.hash(state);
|
||||
for &el in self.color.as_ref().iter(){
|
||||
el.to_ne_bytes().hash(state);
|
||||
}
|
||||
}
|
||||
}
|
||||
impl RobloxFaceTextureDescription{
|
||||
pub fn to_bits(self)->RobloxFaceTextureDescriptionBits{
|
||||
RobloxFaceTextureDescriptionBits{
|
||||
render:self.render,
|
||||
color:self.color.to_array().map(f32::to_bits),
|
||||
transform:self.transform.to_bits(),
|
||||
}
|
||||
}
|
||||
pub fn to_face_description(&self)->primitives::FaceDescription{
|
||||
fn to_face_description(&self)->primitives::FaceDescription{
|
||||
primitives::FaceDescription{
|
||||
render:self.render,
|
||||
transform:self.transform.affine(),
|
||||
transform:glam::Affine2::from_translation(
|
||||
glam::vec2(self.transform.offset_u,self.transform.offset_v)
|
||||
)
|
||||
*glam::Affine2::from_scale(
|
||||
glam::vec2(self.transform.scale_u,self.transform.scale_v)
|
||||
),
|
||||
color:self.color,
|
||||
}
|
||||
}
|
||||
}
|
||||
pub type RobloxPartDescription=[Option<RobloxFaceTextureDescription>;6];
|
||||
type RobloxPartDescription=[Option<RobloxFaceTextureDescription>;6];
|
||||
type RobloxWedgeDescription=[Option<RobloxFaceTextureDescription>;5];
|
||||
type RobloxCornerWedgeDescription=[Option<RobloxFaceTextureDescription>;5];
|
||||
#[derive(Clone,Eq,Hash,PartialEq)]
|
||||
@ -453,128 +406,41 @@ enum RobloxBasePartDescription{
|
||||
Wedge(RobloxWedgeDescription),
|
||||
CornerWedge(RobloxCornerWedgeDescription),
|
||||
}
|
||||
fn get_texture_description<'a>(
|
||||
temp_objects:&mut Vec<rbx_dom_weak::types::Ref>,
|
||||
render_config_deferred_loader:&mut RenderConfigDeferredLoader<&'a str>,
|
||||
dom:&'a rbx_dom_weak::WeakDom,
|
||||
object:&rbx_dom_weak::Instance,
|
||||
size:&rbx_dom_weak::types::Vector3,
|
||||
)->RobloxPartDescription{
|
||||
//use the biggest one and cut it down later...
|
||||
let mut part_texture_description:RobloxPartDescription=[None,None,None,None,None,None];
|
||||
temp_objects.clear();
|
||||
recursive_collect_superclass(temp_objects,&dom,object,"Decal");
|
||||
for &mut decal_ref in temp_objects{
|
||||
if let Some(decal)=dom.get_by_ref(decal_ref){
|
||||
if let (
|
||||
Some(rbx_dom_weak::types::Variant::Content(content)),
|
||||
Some(rbx_dom_weak::types::Variant::Enum(normalid)),
|
||||
Some(rbx_dom_weak::types::Variant::Color3(decal_color3)),
|
||||
Some(rbx_dom_weak::types::Variant::Float32(decal_transparency)),
|
||||
) = (
|
||||
decal.properties.get("Texture"),
|
||||
decal.properties.get("Face"),
|
||||
decal.properties.get("Color3"),
|
||||
decal.properties.get("Transparency"),
|
||||
) {
|
||||
let render_id=render_config_deferred_loader.acquire_render_config_id(Some(content.as_ref()));
|
||||
let normal_id=normalid.to_u32();
|
||||
if normal_id<6{
|
||||
let (roblox_texture_color,roblox_texture_transform)=if decal.class=="Texture"{
|
||||
//generate tranform
|
||||
if let (
|
||||
Some(&rbx_dom_weak::types::Variant::Float32(offset_studs_u)),
|
||||
Some(&rbx_dom_weak::types::Variant::Float32(offset_studs_v)),
|
||||
Some(&rbx_dom_weak::types::Variant::Float32(studs_per_tile_u)),
|
||||
Some(&rbx_dom_weak::types::Variant::Float32(studs_per_tile_v)),
|
||||
) = (
|
||||
decal.properties.get("OffsetStudsU"),
|
||||
decal.properties.get("OffsetStudsV"),
|
||||
decal.properties.get("StudsPerTileU"),
|
||||
decal.properties.get("StudsPerTileV"),
|
||||
)
|
||||
{
|
||||
let (size_u,size_v)=match normal_id{
|
||||
0=>(size.z,size.y),//right
|
||||
1=>(size.x,size.z),//top
|
||||
2=>(size.x,size.y),//back
|
||||
3=>(size.z,size.y),//left
|
||||
4=>(size.x,size.z),//bottom
|
||||
5=>(size.x,size.y),//front
|
||||
_=>unreachable!(),
|
||||
};
|
||||
(
|
||||
glam::vec4(decal_color3.r,decal_color3.g,decal_color3.b,1.0-*decal_transparency),
|
||||
RobloxTextureTransform{
|
||||
offset_studs_u,
|
||||
offset_studs_v,
|
||||
studs_per_tile_u,
|
||||
studs_per_tile_v,
|
||||
size_u,
|
||||
size_v,
|
||||
}
|
||||
)
|
||||
}else{
|
||||
(glam::Vec4::ONE,RobloxTextureTransform::identity())
|
||||
}
|
||||
}else{
|
||||
(glam::Vec4::ONE,RobloxTextureTransform::identity())
|
||||
};
|
||||
part_texture_description[normal_id as usize]=Some(RobloxFaceTextureDescription{
|
||||
render:render_id,
|
||||
color:roblox_texture_color,
|
||||
transform:roblox_texture_transform,
|
||||
});
|
||||
}else{
|
||||
println!("NormalId={} is invalid",normal_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
part_texture_description
|
||||
}
|
||||
enum Shape{
|
||||
Primitive(primitives::Primitives),
|
||||
MeshPart,
|
||||
PhysicsData,
|
||||
}
|
||||
enum MeshAvailability{
|
||||
Immediate,
|
||||
DeferredMesh(RenderConfigId),
|
||||
DeferredUnion(RobloxPartDescription),
|
||||
Deferred(RenderConfigId),
|
||||
}
|
||||
struct DeferredModelDeferredAttributes<'a>{
|
||||
struct DeferredModelDeferredAttributes{
|
||||
render:RenderConfigId,
|
||||
model:ModelDeferredAttributes<'a>,
|
||||
model:ModelDeferredAttributes,
|
||||
}
|
||||
struct ModelDeferredAttributes<'a>{
|
||||
struct ModelDeferredAttributes{
|
||||
mesh:model::MeshId,
|
||||
deferred_attributes:GetAttributesArgs<'a>,
|
||||
deferred_attributes:GetAttributesArgs,
|
||||
color:model::Color4,//transparency is in here
|
||||
transform:Planar64Affine3,
|
||||
}
|
||||
struct DeferredUnionDeferredAttributes<'a>{
|
||||
render:RobloxPartDescription,
|
||||
model:ModelDeferredAttributes<'a>,
|
||||
}
|
||||
struct ModelOwnedAttributes{
|
||||
mesh:model::MeshId,
|
||||
attributes:attr::CollisionAttributes,
|
||||
color:model::Color4,//transparency is in here
|
||||
transform:Planar64Affine3,
|
||||
}
|
||||
struct GetAttributesArgs<'a>{
|
||||
name:&'a str,
|
||||
struct GetAttributesArgs{
|
||||
name:Box<str>,
|
||||
can_collide:bool,
|
||||
velocity:Planar64Vec3,
|
||||
}
|
||||
pub fn convert<'a>(
|
||||
dom:&'a rbx_dom_weak::WeakDom,
|
||||
render_config_deferred_loader:&mut RenderConfigDeferredLoader<&'a str>,
|
||||
mesh_deferred_loader:&mut MeshDeferredLoader<MeshIndex<'a>>,
|
||||
)->PartialMap1<'a>{
|
||||
mesh_deferred_loader:&mut MeshDeferredLoader<&'a str>,
|
||||
)->PartialMap1{
|
||||
let mut deferred_models_deferred_attributes=Vec::new();
|
||||
let mut deferred_unions_deferred_attributes=Vec::new();
|
||||
let mut primitive_models_deferred_attributes=Vec::new();
|
||||
let mut primitive_meshes=Vec::new();
|
||||
let mut mesh_id_from_description=HashMap::new();
|
||||
@ -635,7 +501,6 @@ pub fn convert<'a>(
|
||||
"WedgePart"=>Shape::Primitive(primitives::Primitives::Wedge),
|
||||
"CornerWedgePart"=>Shape::Primitive(primitives::Primitives::CornerWedge),
|
||||
"MeshPart"=>Shape::MeshPart,
|
||||
"UnionOperation"=>Shape::PhysicsData,
|
||||
_=>{
|
||||
println!("Unsupported BasePart ClassName={}; defaulting to cube",object.class);
|
||||
Shape::Primitive(primitives::Primitives::Cube)
|
||||
@ -644,8 +509,74 @@ pub fn convert<'a>(
|
||||
|
||||
let (availability,mesh_id)=match shape{
|
||||
Shape::Primitive(primitive_shape)=>{
|
||||
//TODO: TAB TAB
|
||||
let part_texture_description=get_texture_description(&mut temp_objects,render_config_deferred_loader,dom,object,size);
|
||||
//TODO: TAB TAB
|
||||
//use the biggest one and cut it down later...
|
||||
let mut part_texture_description:RobloxPartDescription=[None,None,None,None,None,None];
|
||||
temp_objects.clear();
|
||||
recursive_collect_superclass(&mut temp_objects, &dom, object,"Decal");
|
||||
for &decal_ref in &temp_objects{
|
||||
if let Some(decal)=dom.get_by_ref(decal_ref){
|
||||
if let (
|
||||
Some(rbx_dom_weak::types::Variant::Content(content)),
|
||||
Some(rbx_dom_weak::types::Variant::Enum(normalid)),
|
||||
Some(rbx_dom_weak::types::Variant::Color3(decal_color3)),
|
||||
Some(rbx_dom_weak::types::Variant::Float32(decal_transparency)),
|
||||
) = (
|
||||
decal.properties.get("Texture"),
|
||||
decal.properties.get("Face"),
|
||||
decal.properties.get("Color3"),
|
||||
decal.properties.get("Transparency"),
|
||||
) {
|
||||
let render_id=render_config_deferred_loader.acquire_render_config_id(Some(content.as_ref()));
|
||||
let normal_id=normalid.to_u32();
|
||||
if normal_id<6{
|
||||
let (roblox_texture_color,roblox_texture_transform)=if decal.class=="Texture"{
|
||||
//generate tranform
|
||||
if let (
|
||||
Some(rbx_dom_weak::types::Variant::Float32(ox)),
|
||||
Some(rbx_dom_weak::types::Variant::Float32(oy)),
|
||||
Some(rbx_dom_weak::types::Variant::Float32(sx)),
|
||||
Some(rbx_dom_weak::types::Variant::Float32(sy)),
|
||||
) = (
|
||||
decal.properties.get("OffsetStudsU"),
|
||||
decal.properties.get("OffsetStudsV"),
|
||||
decal.properties.get("StudsPerTileU"),
|
||||
decal.properties.get("StudsPerTileV"),
|
||||
)
|
||||
{
|
||||
let (size_u,size_v)=match normal_id{
|
||||
0=>(size.z,size.y),//right
|
||||
1=>(size.x,size.z),//top
|
||||
2=>(size.x,size.y),//back
|
||||
3=>(size.z,size.y),//left
|
||||
4=>(size.x,size.z),//bottom
|
||||
5=>(size.x,size.y),//front
|
||||
_=>unreachable!(),
|
||||
};
|
||||
(
|
||||
glam::vec4(decal_color3.r,decal_color3.g,decal_color3.b,1.0-*decal_transparency),
|
||||
RobloxTextureTransform{
|
||||
offset_u:*ox/(*sx),offset_v:*oy/(*sy),
|
||||
scale_u:size_u/(*sx),scale_v:size_v/(*sy),
|
||||
}
|
||||
)
|
||||
}else{
|
||||
(glam::Vec4::ONE,RobloxTextureTransform::default())
|
||||
}
|
||||
}else{
|
||||
(glam::Vec4::ONE,RobloxTextureTransform::default())
|
||||
};
|
||||
part_texture_description[normal_id as usize]=Some(RobloxFaceTextureDescription{
|
||||
render:render_id,
|
||||
color:roblox_texture_color,
|
||||
transform:roblox_texture_transform,
|
||||
});
|
||||
}else{
|
||||
println!("NormalId={} unsupported for shape={:?}",normal_id,primitive_shape);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
//obscure rust syntax "slice pattern"
|
||||
let [
|
||||
f0,//Cube::Right
|
||||
@ -662,7 +593,7 @@ pub fn convert<'a>(
|
||||
//use front face texture first and use top face texture as a fallback
|
||||
primitives::Primitives::Wedge=>RobloxBasePartDescription::Wedge([
|
||||
f0,//Cube::Right->Wedge::Right
|
||||
f5.or(f1),//Cube::Front|Cube::Top->Wedge::TopFront
|
||||
if f5.is_some(){f5}else{f1},//Cube::Front|Cube::Top->Wedge::TopFront
|
||||
f2,//Cube::Back->Wedge::Back
|
||||
f3,//Cube::Left->Wedge::Left
|
||||
f4,//Cube::Bottom->Wedge::Bottom
|
||||
@ -670,8 +601,8 @@ pub fn convert<'a>(
|
||||
//TODO: fix Left+Back texture coordinates to match roblox when not overwridden by Top
|
||||
primitives::Primitives::CornerWedge=>RobloxBasePartDescription::CornerWedge([
|
||||
f0,//Cube::Right->CornerWedge::Right
|
||||
f2.or(f1.clone()),//Cube::Back|Cube::Top->CornerWedge::TopBack
|
||||
f3.or(f1),//Cube::Left|Cube::Top->CornerWedge::TopLeft
|
||||
if f2.is_some(){f2}else{f1.clone()},//Cube::Back|Cube::Top->CornerWedge::TopBack
|
||||
if f3.is_some(){f3}else{f1},//Cube::Left|Cube::Top->CornerWedge::TopLeft
|
||||
f4,//Cube::Bottom->CornerWedge::Bottom
|
||||
f5,//Cube::Front->CornerWedge::Front
|
||||
]),
|
||||
@ -758,51 +689,29 @@ pub fn convert<'a>(
|
||||
object.properties.get("TextureID"),
|
||||
){
|
||||
(
|
||||
MeshAvailability::DeferredMesh(render_config_deferred_loader.acquire_render_config_id(Some(texture_asset_id.as_ref()))),
|
||||
mesh_deferred_loader.acquire_mesh_id(MeshIndex::file_mesh(mesh_asset_id.as_ref())),
|
||||
MeshAvailability::Deferred(render_config_deferred_loader.acquire_render_config_id(Some(texture_asset_id.as_ref()))),
|
||||
mesh_deferred_loader.acquire_mesh_id(mesh_asset_id.as_ref()),
|
||||
)
|
||||
}else{
|
||||
panic!("Mesh has no Mesh or Texture");
|
||||
},
|
||||
Shape::PhysicsData=>{
|
||||
let mut content="";
|
||||
let mut mesh_data:&[u8]=&[];
|
||||
let mut physics_data:&[u8]=&[];
|
||||
if let Some(rbx_dom_weak::types::Variant::Content(asset_id))=object.properties.get("AssetId"){
|
||||
content=asset_id.as_ref();
|
||||
}
|
||||
if let Some(rbx_dom_weak::types::Variant::BinaryString(data))=object.properties.get("MeshData"){
|
||||
mesh_data=data.as_ref();
|
||||
}
|
||||
if let Some(rbx_dom_weak::types::Variant::BinaryString(data))=object.properties.get("PhysicsData"){
|
||||
physics_data=data.as_ref();
|
||||
}
|
||||
let part_texture_description=get_texture_description(&mut temp_objects,render_config_deferred_loader,dom,object,size);
|
||||
let mesh_index=MeshIndex::union(content,mesh_data,physics_data,size,part_texture_description.clone());
|
||||
let mesh_id=mesh_deferred_loader.acquire_mesh_id(mesh_index);
|
||||
(MeshAvailability::DeferredUnion(part_texture_description),mesh_id)
|
||||
},
|
||||
};
|
||||
let model_deferred_attributes=ModelDeferredAttributes{
|
||||
mesh:mesh_id,
|
||||
transform:model_transform,
|
||||
color:glam::vec4(color3.r as f32/255f32, color3.g as f32/255f32, color3.b as f32/255f32, 1.0-*transparency),
|
||||
deferred_attributes:GetAttributesArgs{
|
||||
name:object.name.as_str(),
|
||||
name:object.name.as_str().into(),
|
||||
can_collide:*can_collide,
|
||||
velocity:vec3::try_from_f32_array([velocity.x,velocity.y,velocity.z]).unwrap(),
|
||||
},
|
||||
};
|
||||
match availability{
|
||||
MeshAvailability::Immediate=>primitive_models_deferred_attributes.push(model_deferred_attributes),
|
||||
MeshAvailability::DeferredMesh(render)=>deferred_models_deferred_attributes.push(DeferredModelDeferredAttributes{
|
||||
MeshAvailability::Deferred(render)=>deferred_models_deferred_attributes.push(DeferredModelDeferredAttributes{
|
||||
render,
|
||||
model:model_deferred_attributes
|
||||
}),
|
||||
MeshAvailability::DeferredUnion(part_texture_description)=>deferred_unions_deferred_attributes.push(DeferredUnionDeferredAttributes{
|
||||
render:part_texture_description,
|
||||
model:model_deferred_attributes,
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -811,68 +720,18 @@ pub fn convert<'a>(
|
||||
primitive_meshes,
|
||||
primitive_models_deferred_attributes,
|
||||
deferred_models_deferred_attributes,
|
||||
deferred_unions_deferred_attributes,
|
||||
}
|
||||
}
|
||||
struct MeshWithAabb{
|
||||
mesh:model::Mesh,
|
||||
aabb:Aabb,
|
||||
aabb:strafesnet_common::aabb::Aabb,
|
||||
}
|
||||
fn acquire_mesh_id_from_render_config_id<'a>(
|
||||
primitive_meshes:&mut Vec<model::Mesh>,
|
||||
mesh_id_from_render_config_id:&mut HashMap<model::MeshId,HashMap<RenderConfigId,model::MeshId>>,
|
||||
loaded_meshes:&'a HashMap<model::MeshId,MeshWithAabb>,
|
||||
old_mesh_id:model::MeshId,
|
||||
render:RenderConfigId,
|
||||
)->Option<(model::MeshId,&'a Aabb)>{
|
||||
//ignore meshes that fail to load completely for now
|
||||
loaded_meshes.get(&old_mesh_id).map(|mesh_with_aabb|(
|
||||
*mesh_id_from_render_config_id.entry(old_mesh_id).or_insert_with(||HashMap::new())
|
||||
.entry(render).or_insert_with(||{
|
||||
let mesh_id=model::MeshId::new(primitive_meshes.len() as u32);
|
||||
let mut mesh_clone=mesh_with_aabb.mesh.clone();
|
||||
//set the render group lool
|
||||
if let Some(graphics_group)=mesh_clone.graphics_groups.first_mut(){
|
||||
graphics_group.render=render;
|
||||
}
|
||||
primitive_meshes.push(mesh_clone);
|
||||
mesh_id
|
||||
}),
|
||||
&mesh_with_aabb.aabb,
|
||||
))
|
||||
}
|
||||
fn acquire_union_id_from_render_config_id<'a>(
|
||||
primitive_meshes:&mut Vec<model::Mesh>,
|
||||
union_id_from_render_config_id:&mut HashMap<model::MeshId,HashMap<RobloxPartDescription,model::MeshId>>,
|
||||
loaded_meshes:&'a HashMap<model::MeshId,MeshWithAabb>,
|
||||
old_union_id:model::MeshId,
|
||||
part_texture_description:RobloxPartDescription,
|
||||
)->Option<(model::MeshId,&'a Aabb)>{
|
||||
//ignore uniones that fail to load completely for now
|
||||
loaded_meshes.get(&old_union_id).map(|union_with_aabb|(
|
||||
*union_id_from_render_config_id.entry(old_union_id).or_insert_with(||HashMap::new())
|
||||
.entry(part_texture_description.clone()).or_insert_with(||{
|
||||
let union_id=model::MeshId::new(primitive_meshes.len() as u32);
|
||||
let mut union_clone=union_with_aabb.mesh.clone();
|
||||
//set the render groups
|
||||
for (graphics_group,maybe_face_texture_description) in union_clone.graphics_groups.iter_mut().zip(part_texture_description){
|
||||
if let Some(face_texture_description)=maybe_face_texture_description{
|
||||
graphics_group.render=face_texture_description.render;
|
||||
}
|
||||
}
|
||||
primitive_meshes.push(union_clone);
|
||||
union_id
|
||||
}),
|
||||
&union_with_aabb.aabb,
|
||||
))
|
||||
}
|
||||
pub struct PartialMap1<'a>{
|
||||
pub struct PartialMap1{
|
||||
primitive_meshes:Vec<model::Mesh>,
|
||||
primitive_models_deferred_attributes:Vec<ModelDeferredAttributes<'a>>,
|
||||
deferred_models_deferred_attributes:Vec<DeferredModelDeferredAttributes<'a>>,
|
||||
deferred_unions_deferred_attributes:Vec<DeferredUnionDeferredAttributes<'a>>,
|
||||
primitive_models_deferred_attributes:Vec<ModelDeferredAttributes>,
|
||||
deferred_models_deferred_attributes:Vec<DeferredModelDeferredAttributes>,
|
||||
}
|
||||
impl PartialMap1<'_>{
|
||||
impl PartialMap1{
|
||||
pub fn add_meshpart_meshes_and_calculate_attributes(
|
||||
mut self,
|
||||
meshpart_meshes:Meshes,
|
||||
@ -899,21 +758,32 @@ impl PartialMap1<'_>{
|
||||
})
|
||||
}).collect();
|
||||
|
||||
// SAFETY: I have no idea what I'm doing and this is definitely unsound in some subtle way
|
||||
// I just want to chain iterators together man
|
||||
let aint_no_way=core::cell::UnsafeCell::new(&mut self.primitive_meshes);
|
||||
|
||||
let mut mesh_id_from_render_config_id=HashMap::new();
|
||||
let mut union_id_from_render_config_id=HashMap::new();
|
||||
//ignore meshes that fail to load completely for now
|
||||
let mut acquire_mesh_id_from_render_config_id=|old_mesh_id,render|{
|
||||
loaded_meshes.get(&old_mesh_id).map(|mesh_with_aabb|(
|
||||
*mesh_id_from_render_config_id.entry(old_mesh_id).or_insert_with(||HashMap::new())
|
||||
.entry(render).or_insert_with(||{
|
||||
let mesh_id=model::MeshId::new(self.primitive_meshes.len() as u32);
|
||||
let mut mesh_clone=mesh_with_aabb.mesh.clone();
|
||||
//add a render group lool
|
||||
mesh_clone.graphics_groups.push(model::IndexedGraphicsGroup{
|
||||
render,
|
||||
//the lowest lod is highest quality
|
||||
groups:vec![model::PolygonGroupId::new(0)]
|
||||
});
|
||||
self.primitive_meshes.push(mesh_clone);
|
||||
mesh_id
|
||||
}),
|
||||
&mesh_with_aabb.aabb,
|
||||
))
|
||||
};
|
||||
//now that the meshes are loaded, these models can be generated
|
||||
let models_owned_attributes:Vec<ModelOwnedAttributes>=
|
||||
self.deferred_models_deferred_attributes.into_iter().flat_map(|deferred_model_deferred_attributes|{
|
||||
//meshes need to be cloned from loaded_meshes with a new id when they are used with a new render_id
|
||||
//insert into primitive_meshes
|
||||
let (mesh,aabb)=acquire_mesh_id_from_render_config_id(
|
||||
unsafe{*aint_no_way.get()},
|
||||
&mut mesh_id_from_render_config_id,
|
||||
&loaded_meshes,
|
||||
deferred_model_deferred_attributes.model.mesh,
|
||||
deferred_model_deferred_attributes.render
|
||||
)?;
|
||||
@ -931,32 +801,7 @@ impl PartialMap1<'_>{
|
||||
deferred_model_deferred_attributes.model.transform.translation
|
||||
),
|
||||
})
|
||||
}).chain(self.deferred_unions_deferred_attributes.into_iter().flat_map(|deferred_union_deferred_attributes|{
|
||||
//meshes need to be cloned from loaded_meshes with a new id when they are used with a new render_id
|
||||
//insert into primitive_meshes
|
||||
let (mesh,aabb)=acquire_union_id_from_render_config_id(
|
||||
unsafe{*aint_no_way.get()},
|
||||
&mut union_id_from_render_config_id,
|
||||
&loaded_meshes,
|
||||
deferred_union_deferred_attributes.model.mesh,
|
||||
deferred_union_deferred_attributes.render
|
||||
)?;
|
||||
let size=aabb.size();
|
||||
Some(ModelDeferredAttributes{
|
||||
mesh,
|
||||
deferred_attributes:deferred_union_deferred_attributes.model.deferred_attributes,
|
||||
color:deferred_union_deferred_attributes.model.color,
|
||||
transform:Planar64Affine3::new(
|
||||
Planar64Mat3::from_cols([
|
||||
(deferred_union_deferred_attributes.model.transform.matrix3.x_axis*2/size.x).divide().fix_1(),
|
||||
(deferred_union_deferred_attributes.model.transform.matrix3.y_axis*2/size.y).divide().fix_1(),
|
||||
(deferred_union_deferred_attributes.model.transform.matrix3.z_axis*2/size.z).divide().fix_1()
|
||||
]),
|
||||
deferred_union_deferred_attributes.model.transform.translation
|
||||
),
|
||||
})
|
||||
}))
|
||||
.chain(self.primitive_models_deferred_attributes.into_iter())
|
||||
}).chain(self.primitive_models_deferred_attributes.into_iter())
|
||||
.enumerate().map(|(model_id,model_deferred_attributes)|{
|
||||
let model_id=model::ModelId::new(model_id as u32);
|
||||
ModelOwnedAttributes{
|
||||
|
@ -1,177 +0,0 @@
|
||||
use rbx_mesh::mesh_data::NormalId2 as MeshDataNormalId2;
|
||||
use strafesnet_common::model::{self,IndexedVertex,PolygonGroup,PolygonGroupId,PolygonList,RenderConfigId};
|
||||
use strafesnet_common::integer::vec3;
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub enum Error{
|
||||
Block,
|
||||
MissingVertexId(u32),
|
||||
Planar64Vec3(strafesnet_common::integer::Planar64TryFromFloatError),
|
||||
RobloxPhysicsData(rbx_mesh::physics_data::Error),
|
||||
RobloxMeshData(rbx_mesh::mesh_data::Error),
|
||||
}
|
||||
impl std::fmt::Display for Error{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
|
||||
// wacky state machine to make sure all vertices in a face agree upon what NormalId to use.
|
||||
// Roblox duplicates this information per vertex when it should only exist per-face.
|
||||
enum MeshDataNormalStatus{
|
||||
Agree(MeshDataNormalId2),
|
||||
Conflicting,
|
||||
}
|
||||
struct MeshDataNormalChecker{
|
||||
status:Option<MeshDataNormalStatus>,
|
||||
}
|
||||
impl MeshDataNormalChecker{
|
||||
fn new()->Self{
|
||||
Self{status:None}
|
||||
}
|
||||
fn check(&mut self,normal:MeshDataNormalId2){
|
||||
self.status=match self.status.take(){
|
||||
None=>Some(MeshDataNormalStatus::Agree(normal)),
|
||||
Some(MeshDataNormalStatus::Agree(old_normal))=>{
|
||||
if old_normal==normal{
|
||||
Some(MeshDataNormalStatus::Agree(old_normal))
|
||||
}else{
|
||||
Some(MeshDataNormalStatus::Conflicting)
|
||||
}
|
||||
},
|
||||
Some(MeshDataNormalStatus::Conflicting)=>Some(MeshDataNormalStatus::Conflicting),
|
||||
};
|
||||
}
|
||||
fn into_agreed_normal(self)->Option<MeshDataNormalId2>{
|
||||
self.status.and_then(|status|match status{
|
||||
MeshDataNormalStatus::Agree(normal)=>Some(normal),
|
||||
MeshDataNormalStatus::Conflicting=>None,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for Error{}
|
||||
pub fn convert(
|
||||
roblox_physics_data:&[u8],
|
||||
roblox_mesh_data:&[u8],
|
||||
size:glam::Vec3,
|
||||
part_texture_description:crate::rbx::RobloxPartDescription,
|
||||
)->Result<model::Mesh,Error>{
|
||||
const NORMAL_FACES:usize=6;
|
||||
let mut polygon_groups_normal_id=vec![Vec::new();NORMAL_FACES];
|
||||
|
||||
// build graphics and physics meshes
|
||||
let mut mb=strafesnet_common::model::MeshBuilder::new();
|
||||
// graphics
|
||||
let graphics_groups=if !roblox_mesh_data.is_empty(){
|
||||
// create per-face texture coordinate affine transforms
|
||||
let cube_face_description=part_texture_description.map(|opt|opt.map(|mut t|{
|
||||
t.transform.set_size(1.0,1.0);
|
||||
t.to_face_description()
|
||||
}));
|
||||
|
||||
let mesh_data=rbx_mesh::read_mesh_data_versioned(
|
||||
std::io::Cursor::new(roblox_mesh_data)
|
||||
).map_err(Error::RobloxMeshData)?;
|
||||
let graphics_mesh=match mesh_data{
|
||||
rbx_mesh::mesh_data::MeshData::CSGK(_)=>return Err(Error::Block),
|
||||
rbx_mesh::mesh_data::MeshData::CSGMDL(rbx_mesh::mesh_data::CSGMDL::CSGMDL2(mesh_data2))=>mesh_data2.mesh,
|
||||
rbx_mesh::mesh_data::MeshData::CSGMDL(rbx_mesh::mesh_data::CSGMDL::CSGMDL4(mesh_data4))=>mesh_data4.mesh,
|
||||
};
|
||||
for [vertex_id0,vertex_id1,vertex_id2] in graphics_mesh.faces{
|
||||
let face=[
|
||||
graphics_mesh.vertices.get(vertex_id0.0 as usize).ok_or(Error::MissingVertexId(vertex_id0.0))?,
|
||||
graphics_mesh.vertices.get(vertex_id1.0 as usize).ok_or(Error::MissingVertexId(vertex_id1.0))?,
|
||||
graphics_mesh.vertices.get(vertex_id2.0 as usize).ok_or(Error::MissingVertexId(vertex_id2.0))?,
|
||||
];
|
||||
let mut normal_agreement_checker=MeshDataNormalChecker::new();
|
||||
let face=face.into_iter().map(|vertex|{
|
||||
normal_agreement_checker.check(vertex.normal_id);
|
||||
let pos=mb.acquire_pos_id(vec3::try_from_f32_array(vertex.pos)?);
|
||||
let normal=mb.acquire_normal_id(vec3::try_from_f32_array(vertex.norm)?);
|
||||
let tex_coord=glam::Vec2::from_array(vertex.tex);
|
||||
let maybe_face_description=&cube_face_description[vertex.normal_id as usize-1];
|
||||
let (tex,color)=match maybe_face_description{
|
||||
Some(face_description)=>{
|
||||
// transform texture coordinates and set decal color
|
||||
let tex=mb.acquire_tex_id(face_description.transform.transform_point2(tex_coord));
|
||||
let color=mb.acquire_color_id(face_description.color);
|
||||
(tex,color)
|
||||
},
|
||||
None=>{
|
||||
// texture coordinates don't matter and pass through mesh vertex color
|
||||
let tex=mb.acquire_tex_id(tex_coord);
|
||||
let color=mb.acquire_color_id(glam::Vec4::from_array(vertex.color.map(|f|f as f32/255.0f32)));
|
||||
(tex,color)
|
||||
},
|
||||
};
|
||||
Ok(mb.acquire_vertex_id(IndexedVertex{pos,tex,normal,color}))
|
||||
}).collect::<Result<Vec<_>,_>>().map_err(Error::Planar64Vec3)?;
|
||||
if let Some(normal_id)=normal_agreement_checker.into_agreed_normal(){
|
||||
polygon_groups_normal_id[normal_id as usize-1].push(face);
|
||||
}else{
|
||||
panic!("Empty face!");
|
||||
}
|
||||
}
|
||||
(0..NORMAL_FACES).map(|polygon_group_id|{
|
||||
model::IndexedGraphicsGroup{
|
||||
render:cube_face_description[polygon_group_id].as_ref().map_or(RenderConfigId::new(0),|face_description|face_description.render),
|
||||
groups:vec![PolygonGroupId::new(polygon_group_id as u32)]
|
||||
}
|
||||
}).collect()
|
||||
}else{
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
//physics
|
||||
let physics_convex_meshes=if !roblox_physics_data.is_empty(){
|
||||
let physics_data=rbx_mesh::read_physics_data_versioned(
|
||||
std::io::Cursor::new(roblox_physics_data)
|
||||
).map_err(Error::RobloxPhysicsData)?;
|
||||
let physics_convex_meshes=match physics_data{
|
||||
rbx_mesh::physics_data::PhysicsData::CSGK(_)
|
||||
// have not seen this format in practice
|
||||
|rbx_mesh::physics_data::PhysicsData::CSGPHS(rbx_mesh::physics_data::CSGPHS::Block)
|
||||
=>return Err(Error::Block),
|
||||
rbx_mesh::physics_data::PhysicsData::CSGPHS(rbx_mesh::physics_data::CSGPHS::Meshes(meshes))
|
||||
=>meshes.meshes,
|
||||
rbx_mesh::physics_data::PhysicsData::CSGPHS(rbx_mesh::physics_data::CSGPHS::PhysicsInfoMesh(pim))
|
||||
=>vec![pim.mesh],
|
||||
};
|
||||
physics_convex_meshes
|
||||
}else{
|
||||
Vec::new()
|
||||
};
|
||||
let polygon_groups:Vec<PolygonGroup>=polygon_groups_normal_id.into_iter().map(|faces|
|
||||
// graphics polygon groups (to be rendered)
|
||||
Ok(PolygonGroup::PolygonList(PolygonList::new(faces)))
|
||||
).chain(physics_convex_meshes.into_iter().map(|mesh|{
|
||||
// this can be factored out of the loop but I am lazy
|
||||
let color=mb.acquire_color_id(glam::Vec4::ONE);
|
||||
let tex=mb.acquire_tex_id(glam::Vec2::ZERO);
|
||||
// physics polygon groups (to do physics)
|
||||
Ok(PolygonGroup::PolygonList(PolygonList::new(mesh.faces.into_iter().map(|[vertex_id0,vertex_id1,vertex_id2]|{
|
||||
let face=[
|
||||
mesh.vertices.get(vertex_id0.0 as usize).ok_or(Error::MissingVertexId(vertex_id0.0))?,
|
||||
mesh.vertices.get(vertex_id1.0 as usize).ok_or(Error::MissingVertexId(vertex_id1.0))?,
|
||||
mesh.vertices.get(vertex_id2.0 as usize).ok_or(Error::MissingVertexId(vertex_id2.0))?,
|
||||
].map(|v|glam::Vec3::from_slice(v)/size);
|
||||
let vertex_norm=(face[1]-face[0])
|
||||
.cross(face[2]-face[0]);
|
||||
let normal=mb.acquire_normal_id(vec3::try_from_f32_array(vertex_norm.to_array()).map_err(Error::Planar64Vec3)?);
|
||||
face.into_iter().map(|vertex_pos|{
|
||||
let pos=mb.acquire_pos_id(vec3::try_from_f32_array(vertex_pos.to_array()).map_err(Error::Planar64Vec3)?);
|
||||
Ok(mb.acquire_vertex_id(IndexedVertex{pos,tex,normal,color}))
|
||||
}).collect()
|
||||
}).collect::<Result<_,_>>()?)))
|
||||
})).collect::<Result<_,_>>()?;
|
||||
let physics_groups=(NORMAL_FACES..polygon_groups.len()).map(|id|model::IndexedPhysicsGroup{
|
||||
groups:vec![PolygonGroupId::new(id as u32)]
|
||||
}).collect();
|
||||
Ok(mb.build(
|
||||
polygon_groups,
|
||||
graphics_groups,
|
||||
physics_groups,
|
||||
))
|
||||
}
|
@ -2,10 +2,6 @@
|
||||
name = "rbxassetid"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
repository = "https://git.itzana.me/StrafesNET/strafe-project"
|
||||
license = "MIT OR Apache-2.0"
|
||||
description = "Parse Roblox asset id from 'Content' urls."
|
||||
authors = ["Rhys Lloyd <krakow20@gmail.com>"]
|
||||
|
||||
[dependencies]
|
||||
url = "2.5.4"
|
||||
|
@ -1,176 +0,0 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
@ -1,23 +0,0 @@
|
||||
Permission is hereby granted, free of charge, to any
|
||||
person obtaining a copy of this software and associated
|
||||
documentation files (the "Software"), to deal in the
|
||||
Software without restriction, including without
|
||||
limitation the rights to use, copy, modify, merge,
|
||||
publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software
|
||||
is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice
|
||||
shall be included in all copies or substantial portions
|
||||
of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
|
||||
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
|
||||
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
|
||||
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
|
||||
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
DEALINGS IN THE SOFTWARE.
|
@ -1,26 +0,0 @@
|
||||
Roblox Asset Id
|
||||
===============
|
||||
|
||||
## Example
|
||||
|
||||
```rust
|
||||
use rbxassetid::RobloxAssetId;
|
||||
|
||||
let content="rbxassetid://255299419";
|
||||
let RobloxAssetId(asset_id)=content.parse()?;
|
||||
```
|
||||
|
||||
#### License
|
||||
|
||||
<sup>
|
||||
Licensed under either of <a href="LICENSE-APACHE">Apache License, Version
|
||||
2.0</a> or <a href="LICENSE-MIT">MIT license</a> at your option.
|
||||
</sup>
|
||||
|
||||
<br>
|
||||
|
||||
<sub>
|
||||
Unless you explicitly state otherwise, any contribution intentionally submitted
|
||||
for inclusion in this crate by you, as defined in the Apache-2.0 license, shall
|
||||
be dual licensed as above, without any additional terms or conditions.
|
||||
</sub>
|
@ -33,9 +33,3 @@ impl std::str::FromStr for RobloxAssetId{
|
||||
Ok(Self(parsed_asset_id.map_err(RobloxAssetIdParseErr::ParseInt)?))
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_rbxassetid(){
|
||||
let content="rbxassetid://255299419";
|
||||
let RobloxAssetId(_asset_id)=content.parse().unwrap();
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "strafesnet_snf"
|
||||
version = "0.3.0"
|
||||
version = "0.2.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
@ -8,4 +8,4 @@ edition = "2021"
|
||||
[dependencies]
|
||||
binrw = "0.14.0"
|
||||
id = { version = "0.1.0", registry = "strafesnet" }
|
||||
strafesnet_common = { version = "0.6.0", path = "../common", registry = "strafesnet" }
|
||||
strafesnet_common = { path = "../common", registry = "strafesnet" }
|
||||
|
@ -6,7 +6,7 @@ use strafesnet_common::physics::Time;
|
||||
|
||||
const VERSION:u32=0;
|
||||
|
||||
type TimedPhysicsInstruction=strafesnet_common::instruction::TimedInstruction<strafesnet_common::physics::Instruction,strafesnet_common::physics::Time>;
|
||||
type TimedPhysicsInstruction=strafesnet_common::instruction::TimedInstruction<strafesnet_common::physics::Instruction,strafesnet_common::physics::TimeInner>;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Error{
|
||||
|
@ -6,7 +6,7 @@ use crate::file::BlockId;
|
||||
use binrw::{binrw,BinReaderExt,BinWriterExt};
|
||||
use strafesnet_common::model;
|
||||
use strafesnet_common::aabb::Aabb;
|
||||
use strafesnet_common::bvh::{BvhNode,RecursiveContent};
|
||||
use strafesnet_common::bvh::BvhNode;
|
||||
use strafesnet_common::gameplay_modes;
|
||||
|
||||
#[derive(Debug)]
|
||||
@ -233,7 +233,7 @@ impl<R:BinReaderExt> StreamableMap<R>{
|
||||
}
|
||||
pub fn get_intersecting_region_block_ids(&self,aabb:&Aabb)->Vec<BlockId>{
|
||||
let mut block_ids=Vec::new();
|
||||
self.bvh.sample_aabb(aabb,&mut |&block_id|block_ids.push(block_id));
|
||||
self.bvh.the_tester(aabb,&mut |&block_id|block_ids.push(block_id));
|
||||
block_ids
|
||||
}
|
||||
pub fn load_region(&mut self,block_id:BlockId)->Result<Vec<(model::ModelId,model::Model)>,Error>{
|
||||
@ -287,60 +287,12 @@ impl<R:BinReaderExt> StreamableMap<R>{
|
||||
}
|
||||
}
|
||||
|
||||
// silly redefinition of Bvh for determining the size of subnodes
|
||||
// without duplicating work by running weight calculation recursion top down on every node
|
||||
pub struct BvhWeightNode<W,T>{
|
||||
content:RecursiveContent<BvhWeightNode<W,T>,T>,
|
||||
weight:W,
|
||||
aabb:Aabb,
|
||||
}
|
||||
impl <W,T> BvhWeightNode<W,T>{
|
||||
pub const fn weight(&self)->&W{
|
||||
&self.weight
|
||||
}
|
||||
pub const fn aabb(&self)->&Aabb{
|
||||
&self.aabb
|
||||
}
|
||||
pub fn into_content(self)->RecursiveContent<BvhWeightNode<W,T>,T>{
|
||||
self.content
|
||||
}
|
||||
pub fn into_visitor<F:FnMut(T)>(self,f:&mut F){
|
||||
match self.content{
|
||||
RecursiveContent::Leaf(model)=>f(model),
|
||||
RecursiveContent::Branch(children)=>for child in children{
|
||||
child.into_visitor(f)
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn weigh_contents<T,W:Copy+std::iter::Sum<W>,F:Fn(&T)->W>(node:BvhNode<T>,f:&F)->BvhWeightNode<W,T>{
|
||||
let (content,aabb)=node.into_inner();
|
||||
match content{
|
||||
RecursiveContent::Leaf(model)=>BvhWeightNode{
|
||||
weight:f(&model),
|
||||
content:RecursiveContent::Leaf(model),
|
||||
aabb,
|
||||
},
|
||||
RecursiveContent::Branch(children)=>{
|
||||
let branch:Vec<BvhWeightNode<W,T>>=children.into_iter().map(|child|
|
||||
weigh_contents(child,f)
|
||||
).collect();
|
||||
BvhWeightNode{
|
||||
weight:branch.iter().map(|node|node.weight).sum(),
|
||||
content:RecursiveContent::Branch(branch),
|
||||
aabb,
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
const BVH_NODE_MAX_WEIGHT:usize=64*1024;//64 kB
|
||||
fn collect_spacial_blocks(
|
||||
block_location:&mut Vec<u64>,
|
||||
block_headers:&mut Vec<SpacialBlockHeader>,
|
||||
sequential_block_data:&mut std::io::Cursor<&mut Vec<u8>>,
|
||||
bvh_node:BvhWeightNode<usize,(model::ModelId,newtypes::model::Model)>
|
||||
bvh_node:strafesnet_common::bvh::BvhWeightNode<usize,(model::ModelId,newtypes::model::Model)>
|
||||
)->Result<(),Error>{
|
||||
//inspect the node weights top-down.
|
||||
//When a node weighs less than the limit,
|
||||
@ -390,7 +342,7 @@ pub fn write_map<W:BinWriterExt>(mut writer:W,map:strafesnet_common::map::Comple
|
||||
}
|
||||
Ok(((model::ModelId::new(model_id as u32),model.into()),aabb))
|
||||
}).collect::<Result<Vec<_>,_>>()?;
|
||||
let bvh=weigh_contents(strafesnet_common::bvh::generate_bvh(boxen),&|_|std::mem::size_of::<newtypes::model::Model>());
|
||||
let bvh=strafesnet_common::bvh::generate_bvh(boxen).weigh_contents(&|_|std::mem::size_of::<newtypes::model::Model>());
|
||||
//build blocks
|
||||
//block location is initialized with two values
|
||||
//the first value represents the location of the first byte after the file header
|
||||
|
@ -1,7 +1,7 @@
|
||||
use super::integer::Time;
|
||||
use super::common::{bool_from_u8,bool_into_u8};
|
||||
|
||||
type TimedPhysicsInstruction=strafesnet_common::instruction::TimedInstruction<strafesnet_common::physics::Instruction,strafesnet_common::physics::Time>;
|
||||
type TimedPhysicsInstruction=strafesnet_common::instruction::TimedInstruction<strafesnet_common::physics::Instruction,strafesnet_common::physics::TimeInner>;
|
||||
|
||||
#[binrw::binrw]
|
||||
#[brw(little)]
|
||||
|
@ -1,37 +0,0 @@
|
||||
[package]
|
||||
name = "map-tool"
|
||||
version = "1.7.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0.75"
|
||||
clap = { version = "4.4.2", features = ["derive"] }
|
||||
flate2 = "1.0.27"
|
||||
futures = "0.3.31"
|
||||
image = "0.25.2"
|
||||
image_dds = "0.7.1"
|
||||
lazy-regex = "3.1.0"
|
||||
rbx_asset = { version = "0.2.5", registry = "strafesnet" }
|
||||
rbx_binary = { version = "0.7.4", registry = "strafesnet" }
|
||||
rbx_dom_weak = { version = "2.7.0", registry = "strafesnet" }
|
||||
rbx_reflection_database = { version = "0.2.10", registry = "strafesnet" }
|
||||
rbx_xml = { version = "0.13.3", registry = "strafesnet" }
|
||||
rbxassetid = { version = "0.1.0", registry = "strafesnet" }
|
||||
strafesnet_bsp_loader = { version = "0.3.0", path = "../lib/bsp_loader", registry = "strafesnet" }
|
||||
strafesnet_deferred_loader = { version = "0.5.0", path = "../lib/deferred_loader", registry = "strafesnet" }
|
||||
strafesnet_rbx_loader = { version = "0.6.0", path = "../lib/rbx_loader", registry = "strafesnet" }
|
||||
strafesnet_snf = { version = "0.3.0", path = "../lib/snf", registry = "strafesnet" }
|
||||
thiserror = "2.0.11"
|
||||
tokio = { version = "1.43.0", features = ["macros", "rt-multi-thread", "fs"] }
|
||||
vbsp = { version = "0.7.0-codegen1", registry = "strafesnet" }
|
||||
vmdl = "0.2.0"
|
||||
vmt-parser = "0.2.0"
|
||||
vpk = "0.2.0"
|
||||
vtf = "0.3.0"
|
||||
|
||||
#[profile.release]
|
||||
#lto = true
|
||||
#strip = true
|
||||
#codegen-units = 1
|
@ -1,23 +0,0 @@
|
||||
Permission is hereby granted, free of charge, to any
|
||||
person obtaining a copy of this software and associated
|
||||
documentation files (the "Software"), to deal in the
|
||||
Software without restriction, including without
|
||||
limitation the rights to use, copy, modify, merge,
|
||||
publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software
|
||||
is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice
|
||||
shall be included in all copies or substantial portions
|
||||
of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
|
||||
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
|
||||
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
|
||||
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
|
||||
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
DEALINGS IN THE SOFTWARE.
|
@ -1,2 +0,0 @@
|
||||
# map-tool
|
||||
|
@ -1,30 +0,0 @@
|
||||
mod roblox;
|
||||
mod source;
|
||||
|
||||
use clap::{Parser,Subcommand};
|
||||
use anyhow::Result as AResult;
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(author, version, about, long_about = None)]
|
||||
#[command(propagate_version = true)]
|
||||
struct Cli {
|
||||
#[command(subcommand)]
|
||||
command: Commands,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
enum Commands{
|
||||
#[command(flatten)]
|
||||
Roblox(roblox::Commands),
|
||||
#[command(flatten)]
|
||||
Source(source::Commands),
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main()->AResult<()>{
|
||||
let cli=Cli::parse();
|
||||
match cli.command{
|
||||
Commands::Roblox(commands)=>commands.run().await,
|
||||
Commands::Source(commands)=>commands.run().await,
|
||||
}
|
||||
}
|
@ -1,431 +0,0 @@
|
||||
use std::path::{Path,PathBuf};
|
||||
use std::io::{Cursor,Read,Seek};
|
||||
use std::collections::HashSet;
|
||||
use clap::{Args,Subcommand};
|
||||
use anyhow::Result as AResult;
|
||||
use rbx_dom_weak::Instance;
|
||||
use strafesnet_deferred_loader::deferred_loader::LoadFailureMode;
|
||||
use rbxassetid::RobloxAssetId;
|
||||
use tokio::io::AsyncReadExt;
|
||||
|
||||
const DOWNLOAD_LIMIT:usize=16;
|
||||
|
||||
#[derive(Subcommand)]
|
||||
pub enum Commands{
|
||||
RobloxToSNF(RobloxToSNFSubcommand),
|
||||
DownloadAssets(DownloadAssetsSubcommand),
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct RobloxToSNFSubcommand {
|
||||
#[arg(long)]
|
||||
output_folder:PathBuf,
|
||||
#[arg(required=true)]
|
||||
input_files:Vec<PathBuf>,
|
||||
}
|
||||
#[derive(Args)]
|
||||
pub struct DownloadAssetsSubcommand{
|
||||
#[arg(required=true)]
|
||||
roblox_files:Vec<PathBuf>,
|
||||
// #[arg(long)]
|
||||
// cookie_file:Option<String>,
|
||||
}
|
||||
|
||||
impl Commands{
|
||||
pub async fn run(self)->AResult<()>{
|
||||
match self{
|
||||
Commands::RobloxToSNF(subcommand)=>roblox_to_snf(subcommand.input_files,subcommand.output_folder).await,
|
||||
Commands::DownloadAssets(subcommand)=>download_assets(
|
||||
subcommand.roblox_files,
|
||||
rbx_asset::cookie::Cookie::new("".to_string()),
|
||||
).await,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
#[derive(Debug)]
|
||||
enum LoadDomError{
|
||||
IO(std::io::Error),
|
||||
Binary(rbx_binary::DecodeError),
|
||||
Xml(rbx_xml::DecodeError),
|
||||
UnknownFormat,
|
||||
}
|
||||
fn load_dom<R:Read+Seek>(mut input:R)->Result<rbx_dom_weak::WeakDom,LoadDomError>{
|
||||
let mut first_8=[0u8;8];
|
||||
input.read_exact(&mut first_8).map_err(LoadDomError::IO)?;
|
||||
input.rewind().map_err(LoadDomError::IO)?;
|
||||
match &first_8{
|
||||
b"<roblox!"=>rbx_binary::from_reader(input).map_err(LoadDomError::Binary),
|
||||
b"<roblox "=>rbx_xml::from_reader(input,rbx_xml::DecodeOptions::default()).map_err(LoadDomError::Xml),
|
||||
_=>Err(LoadDomError::UnknownFormat),
|
||||
}
|
||||
}
|
||||
|
||||
/* The ones I'm interested in:
|
||||
Beam.Texture
|
||||
Decal.Texture
|
||||
FileMesh.MeshId
|
||||
FileMesh.TextureId
|
||||
MaterialVariant.ColorMap
|
||||
MaterialVariant.MetalnessMap
|
||||
MaterialVariant.NormalMap
|
||||
MaterialVariant.RoughnessMap
|
||||
MeshPart.MeshId
|
||||
MeshPart.TextureID
|
||||
ParticleEmitter.Texture
|
||||
Sky.MoonTextureId
|
||||
Sky.SkyboxBk
|
||||
Sky.SkyboxDn
|
||||
Sky.SkyboxFt
|
||||
Sky.SkyboxLf
|
||||
Sky.SkyboxRt
|
||||
Sky.SkyboxUp
|
||||
Sky.SunTextureId
|
||||
SurfaceAppearance.ColorMap
|
||||
SurfaceAppearance.MetalnessMap
|
||||
SurfaceAppearance.NormalMap
|
||||
SurfaceAppearance.RoughnessMap
|
||||
SurfaceAppearance.TexturePack
|
||||
*/
|
||||
fn accumulate_content_id(content_list:&mut HashSet<RobloxAssetId>,object:&Instance,property:&str){
|
||||
if let Some(rbx_dom_weak::types::Variant::Content(content))=object.properties.get(property){
|
||||
let url:&str=content.as_ref();
|
||||
if let Ok(asset_id)=url.parse(){
|
||||
content_list.insert(asset_id);
|
||||
}else{
|
||||
println!("Content failed to parse into AssetID: {:?}",content);
|
||||
}
|
||||
}else{
|
||||
println!("property={} does not exist for class={}",property,object.class.as_str());
|
||||
}
|
||||
}
|
||||
async fn read_entire_file(path:impl AsRef<Path>)->Result<Cursor<Vec<u8>>,std::io::Error>{
|
||||
let mut file=tokio::fs::File::open(path).await?;
|
||||
let mut data=Vec::new();
|
||||
file.read_to_end(&mut data).await?;
|
||||
Ok(Cursor::new(data))
|
||||
}
|
||||
#[derive(Default)]
|
||||
struct UniqueAssets{
|
||||
meshes:HashSet<RobloxAssetId>,
|
||||
unions:HashSet<RobloxAssetId>,
|
||||
textures:HashSet<RobloxAssetId>,
|
||||
}
|
||||
impl UniqueAssets{
|
||||
fn collect(&mut self,object:&Instance){
|
||||
match object.class.as_str(){
|
||||
"Beam"=>accumulate_content_id(&mut self.textures,object,"Texture"),
|
||||
"Decal"=>accumulate_content_id(&mut self.textures,object,"Texture"),
|
||||
"Texture"=>accumulate_content_id(&mut self.textures,object,"Texture"),
|
||||
"FileMesh"=>accumulate_content_id(&mut self.textures,object,"TextureId"),
|
||||
"MeshPart"=>{
|
||||
accumulate_content_id(&mut self.textures,object,"TextureID");
|
||||
accumulate_content_id(&mut self.meshes,object,"MeshId");
|
||||
},
|
||||
"SpecialMesh"=>accumulate_content_id(&mut self.meshes,object,"MeshId"),
|
||||
"ParticleEmitter"=>accumulate_content_id(&mut self.textures,object,"Texture"),
|
||||
"Sky"=>{
|
||||
accumulate_content_id(&mut self.textures,object,"MoonTextureId");
|
||||
accumulate_content_id(&mut self.textures,object,"SkyboxBk");
|
||||
accumulate_content_id(&mut self.textures,object,"SkyboxDn");
|
||||
accumulate_content_id(&mut self.textures,object,"SkyboxFt");
|
||||
accumulate_content_id(&mut self.textures,object,"SkyboxLf");
|
||||
accumulate_content_id(&mut self.textures,object,"SkyboxRt");
|
||||
accumulate_content_id(&mut self.textures,object,"SkyboxUp");
|
||||
accumulate_content_id(&mut self.textures,object,"SunTextureId");
|
||||
},
|
||||
"UnionOperation"=>accumulate_content_id(&mut self.unions,object,"AssetId"),
|
||||
_=>(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
#[derive(Debug)]
|
||||
enum UniqueAssetError{
|
||||
IO(std::io::Error),
|
||||
LoadDom(LoadDomError),
|
||||
}
|
||||
async fn unique_assets(path:&Path)->Result<UniqueAssets,UniqueAssetError>{
|
||||
// read entire file
|
||||
let mut assets=UniqueAssets::default();
|
||||
let data=read_entire_file(path).await.map_err(UniqueAssetError::IO)?;
|
||||
let dom=load_dom(data).map_err(UniqueAssetError::LoadDom)?;
|
||||
for object in dom.into_raw().1.into_values(){
|
||||
assets.collect(&object);
|
||||
}
|
||||
Ok(assets)
|
||||
}
|
||||
enum DownloadType{
|
||||
Texture(RobloxAssetId),
|
||||
Mesh(RobloxAssetId),
|
||||
Union(RobloxAssetId),
|
||||
}
|
||||
impl DownloadType{
|
||||
fn path(&self)->PathBuf{
|
||||
match self{
|
||||
DownloadType::Texture(asset_id)=>format!("downloaded_textures/{}",asset_id.0.to_string()).into(),
|
||||
DownloadType::Mesh(asset_id)=>format!("meshes/{}",asset_id.0.to_string()).into(),
|
||||
DownloadType::Union(asset_id)=>format!("unions/{}",asset_id.0.to_string()).into(),
|
||||
}
|
||||
}
|
||||
fn asset_id(&self)->u64{
|
||||
match self{
|
||||
DownloadType::Texture(asset_id)=>asset_id.0,
|
||||
DownloadType::Mesh(asset_id)=>asset_id.0,
|
||||
DownloadType::Union(asset_id)=>asset_id.0,
|
||||
}
|
||||
}
|
||||
}
|
||||
enum DownloadResult{
|
||||
Cached(PathBuf),
|
||||
Data(Vec<u8>),
|
||||
Failed,
|
||||
}
|
||||
#[derive(Default,Debug)]
|
||||
struct Stats{
|
||||
total_assets:u32,
|
||||
cached_assets:u32,
|
||||
downloaded_assets:u32,
|
||||
failed_downloads:u32,
|
||||
timed_out_downloads:u32,
|
||||
}
|
||||
async fn download_retry(stats:&mut Stats,context:&rbx_asset::cookie::CookieContext,download_instruction:DownloadType)->Result<DownloadResult,std::io::Error>{
|
||||
stats.total_assets+=1;
|
||||
let download_instruction=download_instruction;
|
||||
// check if file exists on disk
|
||||
let path=download_instruction.path();
|
||||
if tokio::fs::try_exists(path.as_path()).await?{
|
||||
stats.cached_assets+=1;
|
||||
return Ok(DownloadResult::Cached(path));
|
||||
}
|
||||
let asset_id=download_instruction.asset_id();
|
||||
// if not, download file
|
||||
let mut retry=0;
|
||||
const BACKOFF_MUL:f32=1.3956124250860895286;//exp(1/3)
|
||||
let mut backoff=1000f32;
|
||||
loop{
|
||||
let asset_result=context.get_asset(rbx_asset::cookie::GetAssetRequest{
|
||||
asset_id,
|
||||
version:None,
|
||||
}).await;
|
||||
match asset_result{
|
||||
Ok(asset_result)=>{
|
||||
stats.downloaded_assets+=1;
|
||||
tokio::fs::write(path,&asset_result).await?;
|
||||
break Ok(DownloadResult::Data(asset_result));
|
||||
},
|
||||
Err(rbx_asset::cookie::GetError::Response(rbx_asset::ResponseError::StatusCodeWithUrlAndBody(scwuab)))=>{
|
||||
if scwuab.status_code.as_u16()==429{
|
||||
if retry==12{
|
||||
println!("Giving up asset download {asset_id}");
|
||||
stats.timed_out_downloads+=1;
|
||||
break Ok(DownloadResult::Failed);
|
||||
}
|
||||
println!("Hit roblox rate limit, waiting {:.0}ms...",backoff);
|
||||
tokio::time::sleep(std::time::Duration::from_millis(backoff as u64)).await;
|
||||
backoff*=BACKOFF_MUL;
|
||||
retry+=1;
|
||||
}else{
|
||||
stats.failed_downloads+=1;
|
||||
println!("weird scuwab error: {scwuab:?}");
|
||||
break Ok(DownloadResult::Failed);
|
||||
}
|
||||
},
|
||||
Err(e)=>{
|
||||
stats.failed_downloads+=1;
|
||||
println!("sadly error: {e}");
|
||||
break Ok(DownloadResult::Failed);
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
#[derive(Debug,thiserror::Error)]
|
||||
enum ConvertTextureError{
|
||||
#[error("Io error {0:?}")]
|
||||
Io(#[from]std::io::Error),
|
||||
#[error("Image error {0:?}")]
|
||||
Image(#[from]image::ImageError),
|
||||
#[error("DDS create error {0:?}")]
|
||||
DDS(#[from]image_dds::CreateDdsError),
|
||||
#[error("DDS write error {0:?}")]
|
||||
DDSWrite(#[from]image_dds::ddsfile::Error),
|
||||
}
|
||||
async fn convert_texture(asset_id:RobloxAssetId,download_result:DownloadResult)->Result<(),ConvertTextureError>{
|
||||
let data=match download_result{
|
||||
DownloadResult::Cached(path)=>tokio::fs::read(path).await?,
|
||||
DownloadResult::Data(data)=>data,
|
||||
DownloadResult::Failed=>return Ok(()),
|
||||
};
|
||||
// image::ImageFormat::Png
|
||||
// image::ImageFormat::Jpeg
|
||||
let image=image::load_from_memory(&data)?.to_rgba8();
|
||||
|
||||
// pick format
|
||||
let format=if image.width()%4!=0||image.height()%4!=0{
|
||||
image_dds::ImageFormat::Rgba8UnormSrgb
|
||||
}else{
|
||||
image_dds::ImageFormat::BC7RgbaUnormSrgb
|
||||
};
|
||||
|
||||
//this fails if the image dimensions are not a multiple of 4
|
||||
let dds=image_dds::dds_from_image(
|
||||
&image,
|
||||
format,
|
||||
image_dds::Quality::Slow,
|
||||
image_dds::Mipmaps::GeneratedAutomatic,
|
||||
)?;
|
||||
|
||||
let file_name=format!("textures/{}.dds",asset_id.0);
|
||||
let mut file=std::fs::File::create(file_name)?;
|
||||
dds.write(&mut file)?;
|
||||
Ok(())
|
||||
}
|
||||
async fn download_assets(paths:Vec<PathBuf>,cookie:rbx_asset::cookie::Cookie)->AResult<()>{
|
||||
tokio::try_join!(
|
||||
tokio::fs::create_dir_all("downloaded_textures"),
|
||||
tokio::fs::create_dir_all("textures"),
|
||||
tokio::fs::create_dir_all("meshes"),
|
||||
tokio::fs::create_dir_all("unions"),
|
||||
)?;
|
||||
// use mpsc
|
||||
let thread_limit=std::thread::available_parallelism()?.get();
|
||||
let (send_assets,mut recv_assets)=tokio::sync::mpsc::channel(DOWNLOAD_LIMIT);
|
||||
let (send_texture,mut recv_texture)=tokio::sync::mpsc::channel(thread_limit);
|
||||
// map decode dispatcher
|
||||
// read files multithreaded
|
||||
// produce UniqueAssetsResult per file
|
||||
tokio::spawn(async move{
|
||||
// move send so it gets dropped when all maps have been decoded
|
||||
// closing the channel
|
||||
let mut it=paths.into_iter();
|
||||
static SEM:tokio::sync::Semaphore=tokio::sync::Semaphore::const_new(0);
|
||||
SEM.add_permits(thread_limit);
|
||||
while let (Ok(permit),Some(path))=(SEM.acquire().await,it.next()){
|
||||
let send=send_assets.clone();
|
||||
tokio::spawn(async move{
|
||||
let result=unique_assets(path.as_path()).await;
|
||||
_=send.send(result).await;
|
||||
drop(permit);
|
||||
});
|
||||
}
|
||||
});
|
||||
// download manager
|
||||
// insert into global unique assets guy
|
||||
// add to download queue if the asset is globally unique and does not already exist on disk
|
||||
let mut stats=Stats::default();
|
||||
let context=rbx_asset::cookie::CookieContext::new(cookie);
|
||||
let mut globally_unique_assets=UniqueAssets::default();
|
||||
// pop a job = retry_queue.pop_front() or ingest(recv.recv().await)
|
||||
// SLOW MODE:
|
||||
// acquire all permits
|
||||
// drop all permits
|
||||
// pop one job
|
||||
// if it succeeds go into fast mode
|
||||
// FAST MODE:
|
||||
// acquire one permit
|
||||
// pop a job
|
||||
let download_thread=tokio::spawn(async move{
|
||||
while let Some(result)=recv_assets.recv().await{
|
||||
let unique_assets=match result{
|
||||
Ok(unique_assets)=>unique_assets,
|
||||
Err(e)=>{
|
||||
println!("error: {e:?}");
|
||||
continue;
|
||||
},
|
||||
};
|
||||
for texture_id in unique_assets.textures{
|
||||
if globally_unique_assets.textures.insert(texture_id){
|
||||
let data=download_retry(&mut stats,&context,DownloadType::Texture(texture_id)).await?;
|
||||
send_texture.send((texture_id,data)).await?;
|
||||
}
|
||||
}
|
||||
for mesh_id in unique_assets.meshes{
|
||||
if globally_unique_assets.meshes.insert(mesh_id){
|
||||
download_retry(&mut stats,&context,DownloadType::Mesh(mesh_id)).await?;
|
||||
}
|
||||
}
|
||||
for union_id in unique_assets.unions{
|
||||
if globally_unique_assets.unions.insert(union_id){
|
||||
download_retry(&mut stats,&context,DownloadType::Union(union_id)).await?;
|
||||
}
|
||||
}
|
||||
}
|
||||
dbg!(stats);
|
||||
Ok::<(),anyhow::Error>(())
|
||||
});
|
||||
static SEM:tokio::sync::Semaphore=tokio::sync::Semaphore::const_new(0);
|
||||
SEM.add_permits(thread_limit);
|
||||
while let (Ok(permit),Some((asset_id,download_result)))=(SEM.acquire().await,recv_texture.recv().await){
|
||||
tokio::spawn(async move{
|
||||
let result=convert_texture(asset_id,download_result).await;
|
||||
drop(permit);
|
||||
result.unwrap();
|
||||
});
|
||||
}
|
||||
download_thread.await??;
|
||||
_=SEM.acquire_many(thread_limit as u32).await.unwrap();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[allow(dead_code)]
|
||||
enum ConvertError{
|
||||
IO(std::io::Error),
|
||||
SNFMap(strafesnet_snf::map::Error),
|
||||
RobloxRead(strafesnet_rbx_loader::ReadError),
|
||||
RobloxLoad(strafesnet_rbx_loader::LoadError),
|
||||
}
|
||||
impl std::fmt::Display for ConvertError{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for ConvertError{}
|
||||
async fn convert_to_snf(path:&Path,output_folder:PathBuf)->AResult<()>{
|
||||
let entire_file=tokio::fs::read(path).await?;
|
||||
|
||||
let model=strafesnet_rbx_loader::read(
|
||||
std::io::Cursor::new(entire_file)
|
||||
).map_err(ConvertError::RobloxRead)?;
|
||||
|
||||
let mut place=model.into_place();
|
||||
place.run_scripts();
|
||||
|
||||
let map=place.to_snf(LoadFailureMode::DefaultToNone).map_err(ConvertError::RobloxLoad)?;
|
||||
|
||||
let mut dest=output_folder;
|
||||
dest.push(path.file_stem().unwrap());
|
||||
dest.set_extension("snfm");
|
||||
let file=std::fs::File::create(dest).map_err(ConvertError::IO)?;
|
||||
|
||||
strafesnet_snf::map::write_map(file,map).map_err(ConvertError::SNFMap)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn roblox_to_snf(paths:Vec<std::path::PathBuf>,output_folder:PathBuf)->AResult<()>{
|
||||
let start=std::time::Instant::now();
|
||||
|
||||
let thread_limit=std::thread::available_parallelism()?.get();
|
||||
let mut it=paths.into_iter();
|
||||
static SEM:tokio::sync::Semaphore=tokio::sync::Semaphore::const_new(0);
|
||||
SEM.add_permits(thread_limit);
|
||||
|
||||
while let (Ok(permit),Some(path))=(SEM.acquire().await,it.next()){
|
||||
let output_folder=output_folder.clone();
|
||||
tokio::spawn(async move{
|
||||
let result=convert_to_snf(path.as_path(),output_folder).await;
|
||||
drop(permit);
|
||||
match result{
|
||||
Ok(())=>(),
|
||||
Err(e)=>println!("Convert error: {e:?}"),
|
||||
}
|
||||
});
|
||||
}
|
||||
_=SEM.acquire_many(thread_limit as u32).await.unwrap();
|
||||
|
||||
println!("elapsed={:?}", start.elapsed());
|
||||
Ok(())
|
||||
}
|
@ -1,464 +0,0 @@
|
||||
use std::path::{Path,PathBuf};
|
||||
use std::borrow::Cow;
|
||||
use clap::{Args,Subcommand};
|
||||
use anyhow::Result as AResult;
|
||||
use futures::StreamExt;
|
||||
use strafesnet_bsp_loader::loader::BspFinder;
|
||||
use strafesnet_deferred_loader::loader::Loader;
|
||||
use strafesnet_deferred_loader::deferred_loader::{LoadFailureMode,MeshDeferredLoader,RenderConfigDeferredLoader};
|
||||
|
||||
#[derive(Subcommand)]
|
||||
pub enum Commands{
|
||||
SourceToSNF(SourceToSNFSubcommand),
|
||||
ExtractTextures(ExtractTexturesSubcommand),
|
||||
VPKContents(VPKContentsSubcommand),
|
||||
BSPContents(BSPContentsSubcommand),
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct SourceToSNFSubcommand {
|
||||
#[arg(long)]
|
||||
output_folder:PathBuf,
|
||||
#[arg(required=true)]
|
||||
input_files:Vec<PathBuf>,
|
||||
#[arg(long)]
|
||||
vpks:Vec<PathBuf>,
|
||||
}
|
||||
#[derive(Args)]
|
||||
pub struct ExtractTexturesSubcommand{
|
||||
#[arg(required=true)]
|
||||
bsp_files:Vec<PathBuf>,
|
||||
#[arg(long)]
|
||||
vpks:Vec<PathBuf>,
|
||||
}
|
||||
#[derive(Args)]
|
||||
pub struct VPKContentsSubcommand {
|
||||
#[arg(long)]
|
||||
input_file:PathBuf,
|
||||
}
|
||||
#[derive(Args)]
|
||||
pub struct BSPContentsSubcommand {
|
||||
#[arg(long)]
|
||||
input_file:PathBuf,
|
||||
}
|
||||
|
||||
impl Commands{
|
||||
pub async fn run(self)->AResult<()>{
|
||||
match self{
|
||||
Commands::SourceToSNF(subcommand)=>source_to_snf(subcommand.input_files,subcommand.output_folder,subcommand.vpks).await,
|
||||
Commands::ExtractTextures(subcommand)=>extract_textures(subcommand.bsp_files,subcommand.vpks).await,
|
||||
Commands::VPKContents(subcommand)=>vpk_contents(subcommand.input_file),
|
||||
Commands::BSPContents(subcommand)=>bsp_contents(subcommand.input_file),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
enum VMTContent{
|
||||
VMT(String),
|
||||
VTF(String),
|
||||
Patch(vmt_parser::material::PatchMaterial),
|
||||
Unsupported,//don't want to deal with whatever vmt variant
|
||||
Unresolved,//could not locate a texture because of vmt content
|
||||
}
|
||||
impl VMTContent{
|
||||
fn vtf(opt:Option<String>)->Self{
|
||||
match opt{
|
||||
Some(s)=>Self::VTF(s),
|
||||
None=>Self::Unresolved,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_some_texture(material:vmt_parser::material::Material)->VMTContent{
|
||||
//just grab some texture from somewhere for now
|
||||
match material{
|
||||
vmt_parser::material::Material::LightMappedGeneric(mat)=>VMTContent::vtf(Some(mat.base_texture)),
|
||||
vmt_parser::material::Material::VertexLitGeneric(mat)=>VMTContent::vtf(mat.base_texture.or(mat.decal_texture)),//this just dies if there is none
|
||||
vmt_parser::material::Material::VertexLitGenericDx6(mat)=>VMTContent::vtf(mat.base_texture.or(mat.decal_texture)),
|
||||
vmt_parser::material::Material::UnlitGeneric(mat)=>VMTContent::vtf(mat.base_texture),
|
||||
vmt_parser::material::Material::UnlitTwoTexture(mat)=>VMTContent::vtf(mat.base_texture),
|
||||
vmt_parser::material::Material::Water(mat)=>VMTContent::vtf(mat.base_texture),
|
||||
vmt_parser::material::Material::WorldVertexTransition(mat)=>VMTContent::vtf(Some(mat.base_texture)),
|
||||
vmt_parser::material::Material::EyeRefract(mat)=>VMTContent::vtf(Some(mat.cornea_texture)),
|
||||
vmt_parser::material::Material::SubRect(mat)=>VMTContent::VMT(mat.material),//recursive
|
||||
vmt_parser::material::Material::Sprite(mat)=>VMTContent::vtf(Some(mat.base_texture)),
|
||||
vmt_parser::material::Material::SpriteCard(mat)=>VMTContent::vtf(mat.base_texture),
|
||||
vmt_parser::material::Material::Cable(mat)=>VMTContent::vtf(Some(mat.base_texture)),
|
||||
vmt_parser::material::Material::Refract(mat)=>VMTContent::vtf(mat.base_texture),
|
||||
vmt_parser::material::Material::Modulate(mat)=>VMTContent::vtf(Some(mat.base_texture)),
|
||||
vmt_parser::material::Material::DecalModulate(mat)=>VMTContent::vtf(Some(mat.base_texture)),
|
||||
vmt_parser::material::Material::Sky(mat)=>VMTContent::vtf(Some(mat.base_texture)),
|
||||
vmt_parser::material::Material::Replacements(_mat)=>VMTContent::Unsupported,
|
||||
vmt_parser::material::Material::Patch(mat)=>VMTContent::Patch(mat),
|
||||
_=>unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug,thiserror::Error)]
|
||||
enum GetVMTError{
|
||||
#[error("Bsp error {0:?}")]
|
||||
Bsp(#[from]vbsp::BspError),
|
||||
#[error("Utf8 error {0:?}")]
|
||||
Utf8(#[from]std::str::Utf8Error),
|
||||
#[error("Vdf error {0:?}")]
|
||||
Vdf(#[from]vmt_parser::VdfError),
|
||||
#[error("Vmt not found")]
|
||||
NotFound,
|
||||
}
|
||||
|
||||
fn get_vmt(finder:BspFinder,search_name:&str)->Result<vmt_parser::material::Material,GetVMTError>{
|
||||
let vmt_data=finder.find(search_name)?.ok_or(GetVMTError::NotFound)?;
|
||||
//decode vmt and then write
|
||||
let vmt_str=core::str::from_utf8(&vmt_data)?;
|
||||
let material=vmt_parser::from_str(vmt_str)?;
|
||||
//println!("vmt material={:?}",material);
|
||||
Ok(material)
|
||||
}
|
||||
|
||||
#[derive(Debug,thiserror::Error)]
|
||||
enum LoadVMTError{
|
||||
#[error("Bsp error {0:?}")]
|
||||
Bsp(#[from]vbsp::BspError),
|
||||
#[error("GetVMT error {0:?}")]
|
||||
GetVMT(#[from]GetVMTError),
|
||||
#[error("FromUtf8 error {0:?}")]
|
||||
FromUtf8(#[from]std::string::FromUtf8Error),
|
||||
#[error("Vdf error {0:?}")]
|
||||
Vdf(#[from]vmt_parser::VdfError),
|
||||
#[error("Vmt unsupported")]
|
||||
Unsupported,
|
||||
#[error("Vmt unresolved")]
|
||||
Unresolved,
|
||||
#[error("Vmt not found")]
|
||||
NotFound,
|
||||
}
|
||||
fn recursive_vmt_loader<'bsp,'vpk,'a>(finder:BspFinder<'bsp,'vpk>,material:vmt_parser::material::Material)->Result<Option<Cow<'a,[u8]>>,LoadVMTError>
|
||||
where
|
||||
'bsp:'a,
|
||||
'vpk:'a,
|
||||
{
|
||||
match get_some_texture(material){
|
||||
VMTContent::VMT(mut s)=>{
|
||||
s.make_ascii_lowercase();
|
||||
recursive_vmt_loader(finder,get_vmt(finder,&s)?)
|
||||
},
|
||||
VMTContent::VTF(s)=>{
|
||||
let mut texture_file_name=PathBuf::from("materials");
|
||||
texture_file_name.push(s);
|
||||
texture_file_name.set_extension("vtf");
|
||||
texture_file_name.as_mut_os_str().make_ascii_lowercase();
|
||||
Ok(finder.find(texture_file_name.to_str().unwrap())?)
|
||||
},
|
||||
VMTContent::Patch(mat)=>recursive_vmt_loader(finder,
|
||||
mat.resolve(|search_name|{
|
||||
let name_lowercase=search_name.to_lowercase();
|
||||
match finder.find(&name_lowercase)?{
|
||||
Some(bytes)=>Ok(String::from_utf8(bytes.into_owned())?),
|
||||
None=>Err(LoadVMTError::NotFound),
|
||||
}
|
||||
})?
|
||||
),
|
||||
VMTContent::Unsupported=>Err(LoadVMTError::Unsupported),
|
||||
VMTContent::Unresolved=>Err(LoadVMTError::Unresolved),
|
||||
}
|
||||
}
|
||||
fn load_texture<'bsp,'vpk,'a>(finder:BspFinder<'bsp,'vpk>,texture_name:&str)->Result<Option<Cow<'a,[u8]>>,LoadVMTError>
|
||||
where
|
||||
'bsp:'a,
|
||||
'vpk:'a,
|
||||
{
|
||||
let mut texture_file_name=PathBuf::from("materials");
|
||||
//lower case
|
||||
texture_file_name.push(texture_name);
|
||||
texture_file_name.as_mut_os_str().make_ascii_lowercase();
|
||||
//remove stem and search for both vtf and vmt files
|
||||
let stem=texture_file_name.file_stem().unwrap().to_owned();
|
||||
texture_file_name.pop();
|
||||
texture_file_name.push(stem);
|
||||
if let Some(stuff)=finder.find(texture_file_name.to_str().unwrap())?{
|
||||
return Ok(Some(stuff));
|
||||
}
|
||||
|
||||
// search for both vmt,vtf
|
||||
let mut texture_file_name_vmt=texture_file_name.clone();
|
||||
texture_file_name_vmt.set_extension("vmt");
|
||||
|
||||
let get_vmt_result=get_vmt(finder,texture_file_name_vmt.to_str().unwrap());
|
||||
match get_vmt_result{
|
||||
Ok(material)=>{
|
||||
let vmt_result=recursive_vmt_loader(finder,material);
|
||||
match vmt_result{
|
||||
Ok(Some(stuff))=>return Ok(Some(stuff)),
|
||||
Ok(None)
|
||||
|Err(LoadVMTError::NotFound)=>(),
|
||||
|Err(LoadVMTError::GetVMT(GetVMTError::NotFound))=>(),
|
||||
Err(e)=>return Err(e),
|
||||
}
|
||||
}
|
||||
|Err(GetVMTError::NotFound)=>(),
|
||||
Err(e)=>Err(e)?,
|
||||
}
|
||||
|
||||
// try looking for vtf
|
||||
let mut texture_file_name_vtf=texture_file_name.clone();
|
||||
texture_file_name_vtf.set_extension("vtf");
|
||||
|
||||
let get_vtf_result=get_vmt(finder,texture_file_name_vtf.to_str().unwrap());
|
||||
match get_vtf_result{
|
||||
Ok(material)=>{
|
||||
let vtf_result=recursive_vmt_loader(finder,material);
|
||||
match vtf_result{
|
||||
Ok(Some(stuff))=>return Ok(Some(stuff)),
|
||||
Ok(None)
|
||||
|Err(LoadVMTError::NotFound)=>(),
|
||||
|Err(LoadVMTError::GetVMT(GetVMTError::NotFound))=>(),
|
||||
Err(e)=>return Err(e),
|
||||
}
|
||||
}
|
||||
|Err(GetVMTError::NotFound)=>(),
|
||||
Err(e)=>Err(e)?,
|
||||
}
|
||||
|
||||
Ok(None)
|
||||
}
|
||||
#[derive(Debug,thiserror::Error)]
|
||||
enum ExtractTextureError{
|
||||
#[error("Io error {0:?}")]
|
||||
Io(#[from]std::io::Error),
|
||||
#[error("Bsp error {0:?}")]
|
||||
Bsp(#[from]vbsp::BspError),
|
||||
#[error("MeshLoad error {0:?}")]
|
||||
MeshLoad(#[from]strafesnet_bsp_loader::loader::MeshError),
|
||||
#[error("Load VMT error {0:?}")]
|
||||
LoadVMT(#[from]LoadVMTError),
|
||||
}
|
||||
async fn gimme_them_textures(path:&Path,vpk_list:&[strafesnet_bsp_loader::Vpk],send_texture:tokio::sync::mpsc::Sender<(Vec<u8>,String)>)->Result<(),ExtractTextureError>{
|
||||
let bsp=vbsp::Bsp::read(tokio::fs::read(path).await?.as_ref())?;
|
||||
let loader_bsp=strafesnet_bsp_loader::Bsp::new(bsp);
|
||||
let bsp=loader_bsp.as_ref();
|
||||
|
||||
let mut texture_deferred_loader=RenderConfigDeferredLoader::new();
|
||||
for texture in bsp.textures(){
|
||||
texture_deferred_loader.acquire_render_config_id(Some(Cow::Borrowed(texture.name())));
|
||||
}
|
||||
|
||||
let mut mesh_deferred_loader=MeshDeferredLoader::new();
|
||||
for prop in bsp.static_props(){
|
||||
mesh_deferred_loader.acquire_mesh_id(prop.model());
|
||||
}
|
||||
|
||||
let finder=BspFinder{
|
||||
bsp:&loader_bsp,
|
||||
vpks:vpk_list
|
||||
};
|
||||
|
||||
let mut mesh_loader=strafesnet_bsp_loader::loader::ModelLoader::new(finder);
|
||||
// load models and collect requested textures
|
||||
for model_path in mesh_deferred_loader.into_indices(){
|
||||
let model:vmdl::Model=match mesh_loader.load(model_path){
|
||||
Ok(model)=>model,
|
||||
Err(e)=>{
|
||||
println!("Model={model_path} Load model error: {e}");
|
||||
continue;
|
||||
},
|
||||
};
|
||||
for texture in model.textures(){
|
||||
for search_path in &texture.search_paths{
|
||||
let mut path=PathBuf::from(search_path.as_str());
|
||||
path.push(texture.name.as_str());
|
||||
let path=path.to_str().unwrap().to_owned();
|
||||
texture_deferred_loader.acquire_render_config_id(Some(Cow::Owned(path)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for texture_path in texture_deferred_loader.into_indices(){
|
||||
match load_texture(finder,&texture_path){
|
||||
Ok(Some(texture))=>send_texture.send(
|
||||
(texture.into_owned(),texture_path.into_owned())
|
||||
).await.unwrap(),
|
||||
Ok(None)=>(),
|
||||
Err(e)=>println!("Texture={texture_path} Load error: {e}"),
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug,thiserror::Error)]
|
||||
enum ConvertTextureError{
|
||||
#[error("Bsp error {0:?}")]
|
||||
Bsp(#[from]vbsp::BspError),
|
||||
#[error("Vtf error {0:?}")]
|
||||
Vtf(#[from]vtf::Error),
|
||||
#[error("DDS create error {0:?}")]
|
||||
DDS(#[from]image_dds::CreateDdsError),
|
||||
#[error("DDS write error {0:?}")]
|
||||
DDSWrite(#[from]image_dds::ddsfile::Error),
|
||||
#[error("Io error {0:?}")]
|
||||
Io(#[from]std::io::Error),
|
||||
}
|
||||
|
||||
async fn convert_texture(texture:Vec<u8>,write_file_name:impl AsRef<Path>)->Result<(),ConvertTextureError>{
|
||||
let image=vtf::from_bytes(&texture)?.highres_image.decode(0)?.to_rgba8();
|
||||
|
||||
let format=if image.width()%4!=0||image.height()%4!=0{
|
||||
image_dds::ImageFormat::Rgba8UnormSrgb
|
||||
}else{
|
||||
image_dds::ImageFormat::BC7RgbaUnormSrgb
|
||||
};
|
||||
//this fails if the image dimensions are not a multiple of 4
|
||||
let dds = image_dds::dds_from_image(
|
||||
&image,
|
||||
format,
|
||||
image_dds::Quality::Slow,
|
||||
image_dds::Mipmaps::GeneratedAutomatic,
|
||||
)?;
|
||||
|
||||
//write dds
|
||||
let mut dest=PathBuf::from("textures");
|
||||
dest.push(write_file_name);
|
||||
dest.set_extension("dds");
|
||||
std::fs::create_dir_all(dest.parent().unwrap())?;
|
||||
let mut writer=std::io::BufWriter::new(std::fs::File::create(dest)?);
|
||||
dds.write(&mut writer)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn read_vpks(vpk_paths:Vec<PathBuf>,thread_limit:usize)->Vec<strafesnet_bsp_loader::Vpk>{
|
||||
futures::stream::iter(vpk_paths).map(|vpk_path|async{
|
||||
// idk why it doesn't want to pass out the errors but this is fatal anyways
|
||||
tokio::task::spawn_blocking(move||Ok::<_,vpk::Error>(strafesnet_bsp_loader::Vpk::new(vpk::VPK::read(&vpk_path)?))).await.unwrap().unwrap()
|
||||
})
|
||||
.buffer_unordered(thread_limit)
|
||||
.collect().await
|
||||
}
|
||||
|
||||
async fn extract_textures(paths:Vec<PathBuf>,vpk_paths:Vec<PathBuf>)->AResult<()>{
|
||||
tokio::try_join!(
|
||||
tokio::fs::create_dir_all("extracted_textures"),
|
||||
tokio::fs::create_dir_all("textures"),
|
||||
tokio::fs::create_dir_all("meshes"),
|
||||
)?;
|
||||
let thread_limit=std::thread::available_parallelism()?.get();
|
||||
|
||||
// load vpk list and leak for static lifetime
|
||||
let vpk_list:&[strafesnet_bsp_loader::Vpk]=read_vpks(vpk_paths,thread_limit).await.leak();
|
||||
|
||||
let (send_texture,mut recv_texture)=tokio::sync::mpsc::channel(thread_limit);
|
||||
let mut it=paths.into_iter();
|
||||
let extract_thread=tokio::spawn(async move{
|
||||
static SEM:tokio::sync::Semaphore=tokio::sync::Semaphore::const_new(0);
|
||||
SEM.add_permits(thread_limit);
|
||||
while let (Ok(permit),Some(path))=(SEM.acquire().await,it.next()){
|
||||
let send=send_texture.clone();
|
||||
tokio::spawn(async move{
|
||||
let result=gimme_them_textures(&path,vpk_list,send).await;
|
||||
drop(permit);
|
||||
match result{
|
||||
Ok(())=>(),
|
||||
Err(e)=>println!("Map={path:?} Decode error: {e:?}"),
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// convert images
|
||||
static SEM:tokio::sync::Semaphore=tokio::sync::Semaphore::const_new(0);
|
||||
SEM.add_permits(thread_limit);
|
||||
while let (Ok(permit),Some((data,dest)))=(SEM.acquire().await,recv_texture.recv().await){
|
||||
// TODO: dedup dest?
|
||||
tokio::spawn(async move{
|
||||
let result=convert_texture(data,dest).await;
|
||||
drop(permit);
|
||||
match result{
|
||||
Ok(())=>(),
|
||||
Err(e)=>println!("Convert error: {e:?}"),
|
||||
}
|
||||
});
|
||||
}
|
||||
extract_thread.await?;
|
||||
_=SEM.acquire_many(thread_limit as u32).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn vpk_contents(vpk_path:PathBuf)->AResult<()>{
|
||||
let vpk_index=vpk::VPK::read(&vpk_path)?;
|
||||
for (label,entry) in vpk_index.tree.into_iter(){
|
||||
println!("vpk label={} entry={:?}",label,entry);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn bsp_contents(path:PathBuf)->AResult<()>{
|
||||
let bsp=vbsp::Bsp::read(std::fs::read(path)?.as_ref())?;
|
||||
for file_name in bsp.pack.into_zip().into_inner().unwrap().file_names(){
|
||||
println!("file_name={:?}",file_name);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[allow(dead_code)]
|
||||
enum ConvertError{
|
||||
IO(std::io::Error),
|
||||
SNFMap(strafesnet_snf::map::Error),
|
||||
BspRead(strafesnet_bsp_loader::ReadError),
|
||||
BspLoad(strafesnet_bsp_loader::LoadError),
|
||||
}
|
||||
impl std::fmt::Display for ConvertError{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for ConvertError{}
|
||||
|
||||
async fn convert_to_snf(path:&Path,vpk_list:&[strafesnet_bsp_loader::Vpk],output_folder:PathBuf)->AResult<()>{
|
||||
let entire_file=tokio::fs::read(path).await?;
|
||||
|
||||
let bsp=strafesnet_bsp_loader::read(
|
||||
std::io::Cursor::new(entire_file)
|
||||
).map_err(ConvertError::BspRead)?;
|
||||
|
||||
let map=bsp.to_snf(LoadFailureMode::DefaultToNone,vpk_list).map_err(ConvertError::BspLoad)?;
|
||||
|
||||
let mut dest=output_folder;
|
||||
dest.push(path.file_stem().unwrap());
|
||||
dest.set_extension("snfm");
|
||||
let file=std::fs::File::create(dest).map_err(ConvertError::IO)?;
|
||||
|
||||
strafesnet_snf::map::write_map(file,map).map_err(ConvertError::SNFMap)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
async fn source_to_snf(paths:Vec<std::path::PathBuf>,output_folder:PathBuf,vpk_paths:Vec<PathBuf>)->AResult<()>{
|
||||
let start=std::time::Instant::now();
|
||||
|
||||
let thread_limit=std::thread::available_parallelism()?.get();
|
||||
|
||||
// load vpk list and leak for static lifetime
|
||||
let vpk_list:&[strafesnet_bsp_loader::Vpk]=read_vpks(vpk_paths,thread_limit).await.leak();
|
||||
|
||||
let mut it=paths.into_iter();
|
||||
static SEM:tokio::sync::Semaphore=tokio::sync::Semaphore::const_new(0);
|
||||
SEM.add_permits(thread_limit);
|
||||
|
||||
while let (Ok(permit),Some(path))=(SEM.acquire().await,it.next()){
|
||||
let output_folder=output_folder.clone();
|
||||
tokio::spawn(async move{
|
||||
let result=convert_to_snf(path.as_path(),vpk_list,output_folder).await;
|
||||
drop(permit);
|
||||
match result{
|
||||
Ok(())=>(),
|
||||
Err(e)=>println!("Convert error: {e:?}"),
|
||||
}
|
||||
});
|
||||
}
|
||||
_=SEM.acquire_many(thread_limit as u32).await.unwrap();
|
||||
|
||||
println!("elapsed={:?}", start.elapsed());
|
||||
Ok(())
|
||||
}
|
@ -1,16 +1,16 @@
|
||||
use crate::window::Instruction;
|
||||
use strafesnet_common::integer;
|
||||
use strafesnet_common::instruction::TimedInstruction;
|
||||
use strafesnet_common::session::Time as SessionTime;
|
||||
use strafesnet_common::session::TimeInner as SessionTimeInner;
|
||||
|
||||
pub struct App<'a>{
|
||||
root_time:std::time::Instant,
|
||||
window_thread:crate::compat_worker::QNWorker<'a,TimedInstruction<Instruction,SessionTime>>,
|
||||
window_thread:crate::compat_worker::QNWorker<'a,TimedInstruction<Instruction,SessionTimeInner>>,
|
||||
}
|
||||
impl<'a> App<'a>{
|
||||
pub fn new(
|
||||
root_time:std::time::Instant,
|
||||
window_thread:crate::compat_worker::QNWorker<'a,TimedInstruction<Instruction,SessionTime>>,
|
||||
window_thread:crate::compat_worker::QNWorker<'a,TimedInstruction<Instruction,SessionTimeInner>>,
|
||||
)->App<'a>{
|
||||
Self{
|
||||
root_time,
|
||||
|
@ -1,7 +1,6 @@
|
||||
use std::io::Read;
|
||||
|
||||
#[cfg(any(feature="roblox",feature="source"))]
|
||||
use strafesnet_deferred_loader::deferred_loader::LoadFailureMode;
|
||||
use strafesnet_deferred_loader::deferred_loader::{LoadFailureMode,MeshDeferredLoader,RenderConfigDeferredLoader};
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(Debug)]
|
||||
@ -69,9 +68,13 @@ pub enum LoadError{
|
||||
ReadError(ReadError),
|
||||
File(std::io::Error),
|
||||
#[cfg(feature="roblox")]
|
||||
LoadRoblox(strafesnet_rbx_loader::LoadError),
|
||||
LoadRobloxMesh(strafesnet_rbx_loader::loader::MeshError),
|
||||
#[cfg(feature="roblox")]
|
||||
LoadRobloxTexture(strafesnet_rbx_loader::loader::TextureError),
|
||||
#[cfg(feature="source")]
|
||||
LoadSource(strafesnet_bsp_loader::LoadError),
|
||||
LoadSourceMesh(strafesnet_bsp_loader::loader::MeshError),
|
||||
#[cfg(feature="source")]
|
||||
LoadSourceTexture(strafesnet_bsp_loader::loader::TextureError),
|
||||
}
|
||||
impl std::fmt::Display for LoadError{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
@ -81,7 +84,7 @@ impl std::fmt::Display for LoadError{
|
||||
impl std::error::Error for LoadError{}
|
||||
|
||||
pub enum LoadFormat{
|
||||
#[cfg(any(feature="snf",feature="roblox",feature="source"))]
|
||||
#[cfg(feature="snf")]
|
||||
Map(strafesnet_common::map::CompleteMap),
|
||||
#[cfg(feature="snf")]
|
||||
Bot(strafesnet_snf::bot::Segment),
|
||||
@ -99,13 +102,50 @@ pub fn load<P:AsRef<std::path::Path>>(path:P)->Result<LoadFormat,LoadError>{
|
||||
ReadFormat::Roblox(model)=>{
|
||||
let mut place=model.into_place();
|
||||
place.run_scripts();
|
||||
Ok(LoadFormat::Map(
|
||||
place.to_snf(LoadFailureMode::DefaultToNone).map_err(LoadError::LoadRoblox)?
|
||||
))
|
||||
|
||||
let mut texture_deferred_loader=RenderConfigDeferredLoader::new();
|
||||
let mut mesh_deferred_loader=MeshDeferredLoader::new();
|
||||
|
||||
let map_step1=strafesnet_rbx_loader::convert(
|
||||
place.as_ref(),
|
||||
&mut texture_deferred_loader,
|
||||
&mut mesh_deferred_loader,
|
||||
);
|
||||
|
||||
let mut mesh_loader=strafesnet_rbx_loader::loader::MeshLoader::new();
|
||||
let meshpart_meshes=mesh_deferred_loader.into_meshes(&mut mesh_loader,LoadFailureMode::DefaultToNone).map_err(LoadError::LoadRobloxMesh)?;
|
||||
|
||||
let map_step2=map_step1.add_meshpart_meshes_and_calculate_attributes(meshpart_meshes);
|
||||
|
||||
let mut texture_loader=strafesnet_rbx_loader::loader::TextureLoader::new();
|
||||
let render_configs=texture_deferred_loader.into_render_configs(&mut texture_loader,LoadFailureMode::DefaultToNone).map_err(LoadError::LoadRobloxTexture)?;
|
||||
|
||||
let map=map_step2.add_render_configs_and_textures(render_configs);
|
||||
|
||||
Ok(LoadFormat::Map(map))
|
||||
},
|
||||
#[cfg(feature="source")]
|
||||
ReadFormat::Source(bsp)=>Ok(LoadFormat::Map(
|
||||
bsp.to_snf(LoadFailureMode::DefaultToNone,&[]).map_err(LoadError::LoadSource)?
|
||||
)),
|
||||
ReadFormat::Source(bsp)=>{
|
||||
let mut texture_deferred_loader=RenderConfigDeferredLoader::new();
|
||||
let mut mesh_deferred_loader=MeshDeferredLoader::new();
|
||||
|
||||
let map_step1=strafesnet_bsp_loader::convert(
|
||||
&bsp,
|
||||
&mut texture_deferred_loader,
|
||||
&mut mesh_deferred_loader,
|
||||
);
|
||||
|
||||
let mut mesh_loader=strafesnet_bsp_loader::loader::MeshLoader::new(&bsp,&mut texture_deferred_loader);
|
||||
let prop_meshes=mesh_deferred_loader.into_meshes(&mut mesh_loader,LoadFailureMode::DefaultToNone).map_err(LoadError::LoadSourceMesh)?;
|
||||
|
||||
let map_step2=map_step1.add_prop_meshes(prop_meshes);
|
||||
|
||||
let mut texture_loader=strafesnet_bsp_loader::loader::TextureLoader::new();
|
||||
let render_configs=texture_deferred_loader.into_render_configs(&mut texture_loader,LoadFailureMode::DefaultToNone).map_err(LoadError::LoadSourceTexture)?;
|
||||
|
||||
let map=map_step2.add_render_configs_and_textures(render_configs);
|
||||
|
||||
Ok(LoadFormat::Map(map))
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -2,6 +2,7 @@ mod app;
|
||||
mod file;
|
||||
mod setup;
|
||||
mod window;
|
||||
mod worker;
|
||||
mod compat_worker;
|
||||
mod physics_worker;
|
||||
mod graphics_worker;
|
||||
|
@ -6,7 +6,7 @@ use strafesnet_session::session::{
|
||||
};
|
||||
use strafesnet_common::instruction::{TimedInstruction,InstructionConsumer};
|
||||
use strafesnet_common::physics::Time as PhysicsTime;
|
||||
use strafesnet_common::session::Time as SessionTime;
|
||||
use strafesnet_common::session::{Time as SessionTime,TimeInner as SessionTimeInner};
|
||||
use strafesnet_common::timer::Timer;
|
||||
|
||||
pub enum Instruction{
|
||||
@ -23,7 +23,7 @@ pub fn new<'a>(
|
||||
mut graphics_worker:crate::compat_worker::INWorker<'a,crate::graphics_worker::Instruction>,
|
||||
directories:Directories,
|
||||
user_settings:settings::UserSettings,
|
||||
)->crate::compat_worker::QNWorker<'a,TimedInstruction<Instruction,SessionTime>>{
|
||||
)->crate::compat_worker::QNWorker<'a,TimedInstruction<Instruction,SessionTimeInner>>{
|
||||
let physics=strafesnet_physics::physics::PhysicsState::default();
|
||||
let timer=Timer::unpaused(SessionTime::ZERO,PhysicsTime::ZERO);
|
||||
let simulation=Simulation::new(timer,physics);
|
||||
@ -32,7 +32,7 @@ pub fn new<'a>(
|
||||
directories,
|
||||
simulation,
|
||||
);
|
||||
crate::compat_worker::QNWorker::new(move |ins:TimedInstruction<Instruction,SessionTime>|{
|
||||
crate::compat_worker::QNWorker::new(move |ins:TimedInstruction<Instruction,SessionTimeInner>|{
|
||||
// excruciating pain
|
||||
macro_rules! run_session_instruction{
|
||||
($time:expr,$instruction:expr)=>{
|
||||
|
@ -1,5 +1,5 @@
|
||||
use strafesnet_common::instruction::TimedInstruction;
|
||||
use strafesnet_common::session::Time as SessionTime;
|
||||
use strafesnet_common::session::{Time as SessionTime,TimeInner as SessionTimeInner};
|
||||
use strafesnet_common::physics::{MiscInstruction,SetControlInstruction};
|
||||
use crate::file::LoadFormat;
|
||||
use crate::physics_worker::Instruction as PhysicsWorkerInstruction;
|
||||
@ -17,7 +17,7 @@ struct WindowContext<'a>{
|
||||
mouse_pos:glam::DVec2,
|
||||
screen_size:glam::UVec2,
|
||||
window:&'a winit::window::Window,
|
||||
physics_thread:crate::compat_worker::QNWorker<'a,TimedInstruction<PhysicsWorkerInstruction,SessionTime>>,
|
||||
physics_thread:crate::compat_worker::QNWorker<'a,TimedInstruction<PhysicsWorkerInstruction,SessionTimeInner>>,
|
||||
}
|
||||
|
||||
impl WindowContext<'_>{
|
||||
@ -223,7 +223,7 @@ impl WindowContext<'_>{
|
||||
pub fn worker<'a>(
|
||||
window:&'a winit::window::Window,
|
||||
setup_context:crate::setup::SetupContext<'a>,
|
||||
)->crate::compat_worker::QNWorker<'a,TimedInstruction<Instruction,SessionTime>>{
|
||||
)->crate::compat_worker::QNWorker<'a,TimedInstruction<Instruction,SessionTimeInner>>{
|
||||
// WindowContextSetup::new
|
||||
#[cfg(feature="user-install")]
|
||||
let directories=Directories::user().unwrap();
|
||||
@ -252,7 +252,7 @@ pub fn worker<'a>(
|
||||
};
|
||||
|
||||
//WindowContextSetup::into_worker
|
||||
crate::compat_worker::QNWorker::new(move |ins:TimedInstruction<Instruction,SessionTime>|{
|
||||
crate::compat_worker::QNWorker::new(move |ins:TimedInstruction<Instruction,SessionTimeInner>|{
|
||||
match ins.instruction{
|
||||
Instruction::WindowEvent(window_event)=>{
|
||||
window_context.window_event(ins.time,window_event);
|
||||
|
@ -1 +1 @@
|
||||
/run/media/quat/Files/Documents/map-files/strafesnet/maps/bhop_snfm
|
||||
/run/media/quat/Files/Documents/map-files/verify-scripts/maps/bhop_snfm
|
@ -1 +0,0 @@
|
||||
/run/media/quat/Files/Documents/map-files/strafesnet/meshes
|
@ -1 +1 @@
|
||||
/run/media/quat/Files/Documents/map-files/strafesnet/replays
|
||||
/run/media/quat/Files/Documents/map-files/verify-scripts/replays
|
@ -1 +1 @@
|
||||
/run/media/quat/Files/Documents/map-files/strafesnet/maps/surf_snfm
|
||||
/run/media/quat/Files/Documents/map-files/verify-scripts/maps/surf_snfm
|
@ -1 +0,0 @@
|
||||
/run/media/quat/Files/Documents/map-files/strafesnet/textures
|
@ -1 +0,0 @@
|
||||
/run/media/quat/Files/Documents/map-files/strafesnet/unions
|
Reference in New Issue
Block a user