Compare commits
19 Commits
sussy-tips
...
union2
Author | SHA1 | Date | |
---|---|---|---|
dc35865eca | |||
d51415af1b | |||
4439f1e8fe | |||
bd70712bae | |||
d4315c7df4 | |||
446aeec299 | |||
2be1ad0c65 | |||
453e439849 | |||
926e57790e | |||
012d2d1837 | |||
93c462aa81 | |||
52172e94fd | |||
720ab43f95 | |||
2c729adf64 | |||
3467bc77b0 | |||
2cf5ff5059 | |||
b550778a60 | |||
69599b23be | |||
2d9ad990c2 |
Cargo.lockCargo.toml
engine
integration-testing
lib
bsp_loader
common
deferred_loader
fixed_wide
linear_ops
rbx_loader
rbxassetid
snf
map-tool
strafe-client/src
tools
2043
Cargo.lock
generated
2043
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -15,7 +15,6 @@ members = [
|
|||||||
"lib/rbxassetid",
|
"lib/rbxassetid",
|
||||||
"lib/roblox_emulator",
|
"lib/roblox_emulator",
|
||||||
"lib/snf",
|
"lib/snf",
|
||||||
"map-tool",
|
|
||||||
"strafe-client",
|
"strafe-client",
|
||||||
]
|
]
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
|
@ -556,7 +556,7 @@ impl MoveState{
|
|||||||
=>None,
|
=>None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn next_move_instruction(&self,strafe:&Option<gameplay_style::StrafeSettings>,time:Time)->Option<TimedInstruction<InternalInstruction,Time>>{
|
fn next_move_instruction(&self,strafe:&Option<gameplay_style::StrafeSettings>,time:Time)->Option<TimedInstruction<InternalInstruction,TimeInner>>{
|
||||||
//check if you have a valid walk state and create an instruction
|
//check if you have a valid walk state and create an instruction
|
||||||
match self{
|
match self{
|
||||||
MoveState::Walk(walk_state)|MoveState::Ladder(walk_state)=>match &walk_state.target{
|
MoveState::Walk(walk_state)|MoveState::Ladder(walk_state)=>match &walk_state.target{
|
||||||
@ -784,7 +784,7 @@ impl TouchingState{
|
|||||||
}).collect();
|
}).collect();
|
||||||
crate::push_solve::push_solve(&contacts,acceleration)
|
crate::push_solve::push_solve(&contacts,acceleration)
|
||||||
}
|
}
|
||||||
fn predict_collision_end(&self,collector:&mut instruction::InstructionCollector<InternalInstruction,Time>,models:&PhysicsModels,hitbox_mesh:&HitboxMesh,body:&Body,start_time:Time){
|
fn predict_collision_end(&self,collector:&mut instruction::InstructionCollector<InternalInstruction,TimeInner>,models:&PhysicsModels,hitbox_mesh:&HitboxMesh,body:&Body,start_time:Time){
|
||||||
// let relative_body=body.relative_to(&Body::ZERO);
|
// let relative_body=body.relative_to(&Body::ZERO);
|
||||||
let relative_body=body;
|
let relative_body=body;
|
||||||
for contact in &self.contacts{
|
for contact in &self.contacts{
|
||||||
@ -878,7 +878,7 @@ impl PhysicsState{
|
|||||||
fn reset_to_default(&mut self){
|
fn reset_to_default(&mut self){
|
||||||
*self=Self::default();
|
*self=Self::default();
|
||||||
}
|
}
|
||||||
fn next_move_instruction(&self)->Option<TimedInstruction<InternalInstruction,Time>>{
|
fn next_move_instruction(&self)->Option<TimedInstruction<InternalInstruction,TimeInner>>{
|
||||||
self.move_state.next_move_instruction(&self.style.strafe,self.time)
|
self.move_state.next_move_instruction(&self.style.strafe,self.time)
|
||||||
}
|
}
|
||||||
fn cull_velocity(&mut self,data:&PhysicsData,velocity:Planar64Vec3){
|
fn cull_velocity(&mut self,data:&PhysicsData,velocity:Planar64Vec3){
|
||||||
@ -935,7 +935,7 @@ pub struct PhysicsData{
|
|||||||
impl Default for PhysicsData{
|
impl Default for PhysicsData{
|
||||||
fn default()->Self{
|
fn default()->Self{
|
||||||
Self{
|
Self{
|
||||||
bvh:bvh::BvhNode::empty(),
|
bvh:bvh::BvhNode::default(),
|
||||||
models:Default::default(),
|
models:Default::default(),
|
||||||
modes:Default::default(),
|
modes:Default::default(),
|
||||||
hitbox_mesh:StyleModifiers::default().calculate_mesh(),
|
hitbox_mesh:StyleModifiers::default().calculate_mesh(),
|
||||||
@ -950,21 +950,21 @@ pub struct PhysicsContext<'a>{
|
|||||||
// the physics consumes both Instruction and PhysicsInternalInstruction,
|
// the physics consumes both Instruction and PhysicsInternalInstruction,
|
||||||
// but can only emit PhysicsInternalInstruction
|
// but can only emit PhysicsInternalInstruction
|
||||||
impl InstructionConsumer<InternalInstruction> for PhysicsContext<'_>{
|
impl InstructionConsumer<InternalInstruction> for PhysicsContext<'_>{
|
||||||
type Time=Time;
|
type TimeInner=TimeInner;
|
||||||
fn process_instruction(&mut self,ins:TimedInstruction<InternalInstruction,Time>){
|
fn process_instruction(&mut self,ins:TimedInstruction<InternalInstruction,TimeInner>){
|
||||||
atomic_internal_instruction(&mut self.state,&self.data,ins)
|
atomic_internal_instruction(&mut self.state,&self.data,ins)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl InstructionConsumer<Instruction> for PhysicsContext<'_>{
|
impl InstructionConsumer<Instruction> for PhysicsContext<'_>{
|
||||||
type Time=Time;
|
type TimeInner=TimeInner;
|
||||||
fn process_instruction(&mut self,ins:TimedInstruction<Instruction,Time>){
|
fn process_instruction(&mut self,ins:TimedInstruction<Instruction,TimeInner>){
|
||||||
atomic_input_instruction(&mut self.state,&self.data,ins)
|
atomic_input_instruction(&mut self.state,&self.data,ins)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl InstructionEmitter<InternalInstruction> for PhysicsContext<'_>{
|
impl InstructionEmitter<InternalInstruction> for PhysicsContext<'_>{
|
||||||
type Time=Time;
|
type TimeInner=TimeInner;
|
||||||
//this little next instruction function could cache its return value and invalidate the cached value by watching the State.
|
//this little next instruction function could cache its return value and invalidate the cached value by watching the State.
|
||||||
fn next_instruction(&self,time_limit:Time)->Option<TimedInstruction<InternalInstruction,Time>>{
|
fn next_instruction(&self,time_limit:Time)->Option<TimedInstruction<InternalInstruction,TimeInner>>{
|
||||||
next_instruction_internal(&self.state,&self.data,time_limit)
|
next_instruction_internal(&self.state,&self.data,time_limit)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -972,7 +972,7 @@ impl PhysicsContext<'_>{
|
|||||||
pub fn run_input_instruction(
|
pub fn run_input_instruction(
|
||||||
state:&mut PhysicsState,
|
state:&mut PhysicsState,
|
||||||
data:&PhysicsData,
|
data:&PhysicsData,
|
||||||
instruction:TimedInstruction<Instruction,Time>
|
instruction:TimedInstruction<Instruction,TimeInner>
|
||||||
){
|
){
|
||||||
let mut context=PhysicsContext{state,data};
|
let mut context=PhysicsContext{state,data};
|
||||||
context.process_exhaustive(instruction.time);
|
context.process_exhaustive(instruction.time);
|
||||||
@ -1121,7 +1121,7 @@ impl PhysicsData{
|
|||||||
}
|
}
|
||||||
|
|
||||||
//this is the one who asks
|
//this is the one who asks
|
||||||
fn next_instruction_internal(state:&PhysicsState,data:&PhysicsData,time_limit:Time)->Option<TimedInstruction<InternalInstruction,Time>>{
|
fn next_instruction_internal(state:&PhysicsState,data:&PhysicsData,time_limit:Time)->Option<TimedInstruction<InternalInstruction,TimeInner>>{
|
||||||
//JUST POLLING!!! NO MUTATION
|
//JUST POLLING!!! NO MUTATION
|
||||||
let mut collector=instruction::InstructionCollector::new(time_limit);
|
let mut collector=instruction::InstructionCollector::new(time_limit);
|
||||||
|
|
||||||
@ -1136,7 +1136,7 @@ impl PhysicsData{
|
|||||||
//relative to moving platforms
|
//relative to moving platforms
|
||||||
//let relative_body=state.body.relative_to(&Body::ZERO);
|
//let relative_body=state.body.relative_to(&Body::ZERO);
|
||||||
let relative_body=&state.body;
|
let relative_body=&state.body;
|
||||||
data.bvh.sample_aabb(&aabb,&mut |&convex_mesh_id|{
|
data.bvh.the_tester(&aabb,&mut |&convex_mesh_id|{
|
||||||
//no checks are needed because of the time limits.
|
//no checks are needed because of the time limits.
|
||||||
let model_mesh=data.models.mesh(convex_mesh_id);
|
let model_mesh=data.models.mesh(convex_mesh_id);
|
||||||
let minkowski=model_physics::MinkowskiMesh::minkowski_sum(model_mesh,data.hitbox_mesh.transformed_mesh());
|
let minkowski=model_physics::MinkowskiMesh::minkowski_sum(model_mesh,data.hitbox_mesh.transformed_mesh());
|
||||||
@ -1198,7 +1198,7 @@ fn recalculate_touching(
|
|||||||
aabb.inflate(hitbox_mesh.halfsize);
|
aabb.inflate(hitbox_mesh.halfsize);
|
||||||
//relative to moving platforms
|
//relative to moving platforms
|
||||||
//let relative_body=state.body.relative_to(&Body::ZERO);
|
//let relative_body=state.body.relative_to(&Body::ZERO);
|
||||||
bvh.sample_aabb(&aabb,&mut |&convex_mesh_id|{
|
bvh.the_tester(&aabb,&mut |&convex_mesh_id|{
|
||||||
//no checks are needed because of the time limits.
|
//no checks are needed because of the time limits.
|
||||||
let model_mesh=models.mesh(convex_mesh_id);
|
let model_mesh=models.mesh(convex_mesh_id);
|
||||||
let minkowski=model_physics::MinkowskiMesh::minkowski_sum(model_mesh,hitbox_mesh.transformed_mesh());
|
let minkowski=model_physics::MinkowskiMesh::minkowski_sum(model_mesh,hitbox_mesh.transformed_mesh());
|
||||||
@ -1651,7 +1651,7 @@ fn collision_end_intersect(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn atomic_internal_instruction(state:&mut PhysicsState,data:&PhysicsData,ins:TimedInstruction<InternalInstruction,Time>){
|
fn atomic_internal_instruction(state:&mut PhysicsState,data:&PhysicsData,ins:TimedInstruction<InternalInstruction,TimeInner>){
|
||||||
state.time=ins.time;
|
state.time=ins.time;
|
||||||
let (should_advance_body,goober_time)=match ins.instruction{
|
let (should_advance_body,goober_time)=match ins.instruction{
|
||||||
InternalInstruction::CollisionStart(_,dt)
|
InternalInstruction::CollisionStart(_,dt)
|
||||||
@ -1747,7 +1747,7 @@ fn atomic_internal_instruction(state:&mut PhysicsState,data:&PhysicsData,ins:Tim
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn atomic_input_instruction(state:&mut PhysicsState,data:&PhysicsData,ins:TimedInstruction<Instruction,Time>){
|
fn atomic_input_instruction(state:&mut PhysicsState,data:&PhysicsData,ins:TimedInstruction<Instruction,TimeInner>){
|
||||||
state.time=ins.time;
|
state.time=ins.time;
|
||||||
let should_advance_body=match ins.instruction{
|
let should_advance_body=match ins.instruction{
|
||||||
//the body may as well be a quantum wave function
|
//the body may as well be a quantum wave function
|
||||||
|
@ -5,13 +5,13 @@ use strafesnet_common::physics::{
|
|||||||
TimeInner as PhysicsTimeInner,
|
TimeInner as PhysicsTimeInner,
|
||||||
Time as PhysicsTime,
|
Time as PhysicsTime,
|
||||||
};
|
};
|
||||||
use strafesnet_common::session::Time as SessionTime;
|
use strafesnet_common::session::{Time as SessionTime,TimeInner as SessionTimeInner};
|
||||||
use strafesnet_common::instruction::{InstructionConsumer,InstructionEmitter,TimedInstruction};
|
use strafesnet_common::instruction::{InstructionConsumer,InstructionEmitter,TimedInstruction};
|
||||||
|
|
||||||
type TimedSelfInstruction=TimedInstruction<Instruction,PhysicsTime>;
|
type TimedSelfInstruction=TimedInstruction<Instruction,PhysicsTimeInner>;
|
||||||
type DoubleTimedSelfInstruction=TimedInstruction<TimedSelfInstruction,SessionTime>;
|
type DoubleTimedSelfInstruction=TimedInstruction<TimedSelfInstruction,SessionTimeInner>;
|
||||||
|
|
||||||
type TimedPhysicsInstruction=TimedInstruction<PhysicsInstruction,PhysicsTime>;
|
type TimedPhysicsInstruction=TimedInstruction<PhysicsInstruction,PhysicsTimeInner>;
|
||||||
|
|
||||||
const MOUSE_TIMEOUT:SessionTime=SessionTime::from_millis(10);
|
const MOUSE_TIMEOUT:SessionTime=SessionTime::from_millis(10);
|
||||||
|
|
||||||
@ -89,14 +89,14 @@ pub struct MouseInterpolator{
|
|||||||
// Maybe MouseInterpolator manipulation is better expressed using impls
|
// Maybe MouseInterpolator manipulation is better expressed using impls
|
||||||
// and called from Instruction trait impls in session
|
// and called from Instruction trait impls in session
|
||||||
impl InstructionConsumer<TimedSelfInstruction> for MouseInterpolator{
|
impl InstructionConsumer<TimedSelfInstruction> for MouseInterpolator{
|
||||||
type Time=SessionTime;
|
type TimeInner=SessionTimeInner;
|
||||||
fn process_instruction(&mut self,ins:DoubleTimedSelfInstruction){
|
fn process_instruction(&mut self,ins:DoubleTimedSelfInstruction){
|
||||||
self.push_unbuffered_input(ins.time,ins.instruction.time,ins.instruction.instruction.into())
|
self.push_unbuffered_input(ins.time,ins.instruction.time,ins.instruction.instruction.into())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl InstructionEmitter<StepInstruction> for MouseInterpolator{
|
impl InstructionEmitter<StepInstruction> for MouseInterpolator{
|
||||||
type Time=SessionTime;
|
type TimeInner=SessionTimeInner;
|
||||||
fn next_instruction(&self,time_limit:SessionTime)->Option<TimedInstruction<StepInstruction,Self::Time>>{
|
fn next_instruction(&self,time_limit:SessionTime)->Option<TimedInstruction<StepInstruction,Self::TimeInner>>{
|
||||||
self.buffered_instruction_with_timeout(time_limit)
|
self.buffered_instruction_with_timeout(time_limit)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -108,7 +108,7 @@ impl MouseInterpolator{
|
|||||||
output:std::collections::VecDeque::new(),
|
output:std::collections::VecDeque::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn push_mouse_and_flush_buffer(&mut self,ins:TimedInstruction<MouseInstruction,PhysicsTime>){
|
fn push_mouse_and_flush_buffer(&mut self,ins:TimedInstruction<MouseInstruction,PhysicsTimeInner>){
|
||||||
self.buffer.push_front(TimedInstruction{
|
self.buffer.push_front(TimedInstruction{
|
||||||
time:ins.time,
|
time:ins.time,
|
||||||
instruction:BufferedInstruction::Mouse(ins.instruction).into(),
|
instruction:BufferedInstruction::Mouse(ins.instruction).into(),
|
||||||
@ -219,7 +219,7 @@ impl MouseInterpolator{
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn buffered_instruction_with_timeout(&self,time_limit:SessionTime)->Option<TimedInstruction<StepInstruction,SessionTime>>{
|
fn buffered_instruction_with_timeout(&self,time_limit:SessionTime)->Option<TimedInstruction<StepInstruction,SessionTimeInner>>{
|
||||||
match self.get_mouse_timedout_at(time_limit){
|
match self.get_mouse_timedout_at(time_limit){
|
||||||
Some(timeout)=>Some(TimedInstruction{
|
Some(timeout)=>Some(TimedInstruction{
|
||||||
time:timeout,
|
time:timeout,
|
||||||
@ -232,7 +232,7 @@ impl MouseInterpolator{
|
|||||||
}),
|
}),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pub fn pop_buffered_instruction(&mut self,ins:TimedInstruction<StepInstruction,PhysicsTime>)->Option<TimedPhysicsInstruction>{
|
pub fn pop_buffered_instruction(&mut self,ins:TimedInstruction<StepInstruction,PhysicsTimeInner>)->Option<TimedPhysicsInstruction>{
|
||||||
match ins.instruction{
|
match ins.instruction{
|
||||||
StepInstruction::Pop=>(),
|
StepInstruction::Pop=>(),
|
||||||
StepInstruction::Timeout=>self.timeout_mouse(ins.time),
|
StepInstruction::Timeout=>self.timeout_mouse(ins.time),
|
||||||
@ -244,7 +244,6 @@ impl MouseInterpolator{
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test{
|
mod test{
|
||||||
use super::*;
|
use super::*;
|
||||||
use strafesnet_common::session::TimeInner as SessionTimeInner;
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test(){
|
fn test(){
|
||||||
let mut interpolator=MouseInterpolator::new();
|
let mut interpolator=MouseInterpolator::new();
|
||||||
|
@ -88,11 +88,11 @@ impl Simulation{
|
|||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct Recording{
|
pub struct Recording{
|
||||||
instructions:Vec<TimedInstruction<PhysicsInputInstruction,PhysicsTime>>,
|
instructions:Vec<TimedInstruction<PhysicsInputInstruction,PhysicsTimeInner>>,
|
||||||
}
|
}
|
||||||
impl Recording{
|
impl Recording{
|
||||||
pub fn new(
|
pub fn new(
|
||||||
instructions:Vec<TimedInstruction<PhysicsInputInstruction,PhysicsTime>>,
|
instructions:Vec<TimedInstruction<PhysicsInputInstruction,PhysicsTimeInner>>,
|
||||||
)->Self{
|
)->Self{
|
||||||
Self{instructions}
|
Self{instructions}
|
||||||
}
|
}
|
||||||
@ -207,8 +207,8 @@ impl Session{
|
|||||||
// Session emits DoStep
|
// Session emits DoStep
|
||||||
|
|
||||||
impl InstructionConsumer<Instruction<'_>> for Session{
|
impl InstructionConsumer<Instruction<'_>> for Session{
|
||||||
type Time=SessionTime;
|
type TimeInner=SessionTimeInner;
|
||||||
fn process_instruction(&mut self,ins:TimedInstruction<Instruction,Self::Time>){
|
fn process_instruction(&mut self,ins:TimedInstruction<Instruction,Self::TimeInner>){
|
||||||
// repetitive procedure macro
|
// repetitive procedure macro
|
||||||
macro_rules! run_mouse_interpolator_instruction{
|
macro_rules! run_mouse_interpolator_instruction{
|
||||||
($instruction:expr)=>{
|
($instruction:expr)=>{
|
||||||
@ -425,8 +425,8 @@ impl InstructionConsumer<Instruction<'_>> for Session{
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl InstructionConsumer<StepInstruction> for Session{
|
impl InstructionConsumer<StepInstruction> for Session{
|
||||||
type Time=SessionTime;
|
type TimeInner=SessionTimeInner;
|
||||||
fn process_instruction(&mut self,ins:TimedInstruction<StepInstruction,Self::Time>){
|
fn process_instruction(&mut self,ins:TimedInstruction<StepInstruction,Self::TimeInner>){
|
||||||
let time=self.simulation.timer.time(ins.time);
|
let time=self.simulation.timer.time(ins.time);
|
||||||
if let Some(instruction)=self.mouse_interpolator.pop_buffered_instruction(ins.set_time(time)){
|
if let Some(instruction)=self.mouse_interpolator.pop_buffered_instruction(ins.set_time(time)){
|
||||||
//record
|
//record
|
||||||
@ -436,8 +436,8 @@ impl InstructionConsumer<StepInstruction> for Session{
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl InstructionEmitter<StepInstruction> for Session{
|
impl InstructionEmitter<StepInstruction> for Session{
|
||||||
type Time=SessionTime;
|
type TimeInner=SessionTimeInner;
|
||||||
fn next_instruction(&self,time_limit:SessionTime)->Option<TimedInstruction<StepInstruction,Self::Time>>{
|
fn next_instruction(&self,time_limit:SessionTime)->Option<TimedInstruction<StepInstruction,Self::TimeInner>>{
|
||||||
self.mouse_interpolator.next_instruction(time_limit)
|
self.mouse_interpolator.next_instruction(time_limit)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -4,6 +4,6 @@ version = "0.1.0"
|
|||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
strafesnet_common = { path = "../lib/common", registry = "strafesnet" }
|
strafesnet_common = { version = "0.5.2", path = "../lib/common", registry = "strafesnet" }
|
||||||
strafesnet_physics = { path = "../engine/physics", registry = "strafesnet" }
|
strafesnet_physics = { version = "0.1.0", path = "../engine/physics", registry = "strafesnet" }
|
||||||
strafesnet_snf = { path = "../lib/snf", registry = "strafesnet" }
|
strafesnet_snf = { path = "../lib/snf", registry = "strafesnet" }
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "strafesnet_bsp_loader"
|
name = "strafesnet_bsp_loader"
|
||||||
version = "0.3.0"
|
version = "0.2.2"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
repository = "https://git.itzana.me/StrafesNET/strafe-project"
|
repository = "https://git.itzana.me/StrafesNET/strafe-project"
|
||||||
license = "MIT OR Apache-2.0"
|
license = "MIT OR Apache-2.0"
|
||||||
@ -11,8 +11,7 @@ authors = ["Rhys Lloyd <krakow20@gmail.com>"]
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
glam = "0.29.0"
|
glam = "0.29.0"
|
||||||
strafesnet_common = { version = "0.6.0", path = "../common", registry = "strafesnet" }
|
strafesnet_common = { path = "../common", registry = "strafesnet" }
|
||||||
strafesnet_deferred_loader = { version = "0.5.0", path = "../deferred_loader", registry = "strafesnet" }
|
strafesnet_deferred_loader = { version = "0.5.0", path = "../deferred_loader" }
|
||||||
vbsp = { version = "0.7.0-codegen1", registry = "strafesnet" }
|
vbsp = "0.6.0"
|
||||||
vmdl = "0.2.0"
|
vmdl = "0.2.0"
|
||||||
vpk = "0.2.0"
|
|
||||||
|
@ -1,321 +0,0 @@
|
|||||||
use strafesnet_common::integer::Planar64;
|
|
||||||
use strafesnet_common::{model,integer};
|
|
||||||
use strafesnet_common::integer::{vec3::Vector3,Fixed,Ratio};
|
|
||||||
|
|
||||||
use crate::{valve_transform_normal,valve_transform_dist};
|
|
||||||
|
|
||||||
#[derive(Hash,Eq,PartialEq)]
|
|
||||||
struct Face{
|
|
||||||
normal:integer::Planar64Vec3,
|
|
||||||
dot:integer::Planar64,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
struct Faces{
|
|
||||||
faces:Vec<Vec<integer::Planar64Vec3>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
fn solve3(c0:&Face,c1:&Face,c2:&Face)->Option<Ratio<Vector3<Fixed<3,96>>,Fixed<3,96>>>{
|
|
||||||
let n0_n1=c0.normal.cross(c1.normal);
|
|
||||||
let det=c2.normal.dot(n0_n1);
|
|
||||||
if det.abs().is_zero(){
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
Some((
|
|
||||||
c1.normal.cross(c2.normal)*c0.dot
|
|
||||||
+c2.normal.cross(c0.normal)*c1.dot
|
|
||||||
+c0.normal.cross(c1.normal)*c2.dot
|
|
||||||
)/det)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub enum PlanesToFacesError{
|
|
||||||
InitFace1,
|
|
||||||
InitFace2,
|
|
||||||
InitIntersection,
|
|
||||||
FindNewIntersection,
|
|
||||||
EmptyFaces,
|
|
||||||
InfiniteLoop1,
|
|
||||||
InfiniteLoop2,
|
|
||||||
}
|
|
||||||
impl std::fmt::Display for PlanesToFacesError{
|
|
||||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
|
||||||
write!(f,"{self:?}")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl core::error::Error for PlanesToFacesError{}
|
|
||||||
|
|
||||||
fn planes_to_faces(face_list:std::collections::HashSet<Face>)->Result<Faces,PlanesToFacesError>{
|
|
||||||
let mut faces=Vec::new();
|
|
||||||
// for each face, determine one edge at a time until you complete the face
|
|
||||||
'face: for face0 in &face_list{
|
|
||||||
// 1. find first edge
|
|
||||||
// 2. follow edges around face
|
|
||||||
|
|
||||||
// === finding first edge ===
|
|
||||||
// 1. pick the most perpendicular set of 3 faces
|
|
||||||
// 2. check if any faces occlude the intersection
|
|
||||||
// 3. use this test to replace left and right alternating until they are not occluded
|
|
||||||
|
|
||||||
// find the most perpendicular face to face0
|
|
||||||
let mut face1=face_list.iter().min_by_key(|&p|{
|
|
||||||
face0.normal.dot(p.normal).abs()
|
|
||||||
}).ok_or(PlanesToFacesError::InitFace1)?;
|
|
||||||
|
|
||||||
// direction of edge formed by face0 x face1
|
|
||||||
let edge_dir=face0.normal.cross(face1.normal);
|
|
||||||
|
|
||||||
// find the most perpendicular face to both face0 and face1
|
|
||||||
let mut face2=face_list.iter().max_by_key(|&p|{
|
|
||||||
// find the best *oriented* face (no .abs())
|
|
||||||
edge_dir.dot(p.normal)
|
|
||||||
}).ok_or(PlanesToFacesError::InitFace2)?;
|
|
||||||
|
|
||||||
let mut detect_loop=200u8;
|
|
||||||
|
|
||||||
let mut intersection=solve3(face0,face1,face2).ok_or(PlanesToFacesError::InitIntersection)?;
|
|
||||||
|
|
||||||
// repeatedly update face0, face1 until all faces form part of the convex solid
|
|
||||||
'find: loop{
|
|
||||||
if let Some(a)=detect_loop.checked_sub(1){
|
|
||||||
detect_loop=a;
|
|
||||||
}else{
|
|
||||||
return Err(PlanesToFacesError::InfiniteLoop1);
|
|
||||||
}
|
|
||||||
// test if any *other* faces occlude the intersection
|
|
||||||
for new_face in &face_list{
|
|
||||||
// new face occludes intersection point
|
|
||||||
if (new_face.dot.fix_2()/Planar64::ONE).lt_ratio(new_face.normal.dot(intersection.num)/intersection.den){
|
|
||||||
// replace one of the faces with the new face
|
|
||||||
// dont' try to replace face0 because we are exploring that face in particular
|
|
||||||
if let Some(new_intersection)=solve3(face0,new_face,face2){
|
|
||||||
// face1 does not occlude (or intersect) the new intersection
|
|
||||||
if (face1.dot.fix_2()/Planar64::ONE).gt_ratio(face1.normal.dot(new_intersection.num)/new_intersection.den){
|
|
||||||
face1=new_face;
|
|
||||||
intersection=new_intersection;
|
|
||||||
continue 'find;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if let Some(new_intersection)=solve3(face0,face1,new_face){
|
|
||||||
// face2 does not occlude (or intersect) the new intersection
|
|
||||||
if (face2.dot.fix_2()/Planar64::ONE).gt_ratio(face2.normal.dot(new_intersection.num)/new_intersection.den){
|
|
||||||
face2=new_face;
|
|
||||||
intersection=new_intersection;
|
|
||||||
continue 'find;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// we have found a set of faces for which the intersection is on the convex solid
|
|
||||||
break 'find;
|
|
||||||
}
|
|
||||||
|
|
||||||
// check if face0 must go, meaning it is a degenerate face and does not contribute anything to the convex solid
|
|
||||||
for new_face in &face_list{
|
|
||||||
if core::ptr::eq(face0,new_face){
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if core::ptr::eq(face1,new_face){
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if core::ptr::eq(face2,new_face){
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if let Some(new_intersection)=solve3(new_face,face1,face2){
|
|
||||||
// face0 does not occlude (or intersect) the new intersection
|
|
||||||
if (face0.dot.fix_2()/Planar64::ONE).lt_ratio(face0.normal.dot(new_intersection.num)/new_intersection.den){
|
|
||||||
// abort! reject face0 entirely
|
|
||||||
continue 'face;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// === follow edges around face ===
|
|
||||||
// Note that we chose face2 such that the 3 faces create a particular winding order.
|
|
||||||
// If we choose a consistent face to follow (face1, face2) it will always wind with a consistent chirality
|
|
||||||
|
|
||||||
let mut detect_loop=200u8;
|
|
||||||
|
|
||||||
// keep looping until we meet this face again
|
|
||||||
let face1=face1;
|
|
||||||
let mut face=Vec::new();
|
|
||||||
loop{
|
|
||||||
// push point onto vertices
|
|
||||||
// problem: this may push a vertex that does not fit in the fixed point range and is thus meaningless
|
|
||||||
face.push(intersection.divide().fix_1());
|
|
||||||
|
|
||||||
// we looped back around to face1, we're done!
|
|
||||||
if core::ptr::eq(face1,face2){
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
// the measure
|
|
||||||
let edge_dir=face0.normal.cross(face2.normal);
|
|
||||||
|
|
||||||
// the dot product to beat
|
|
||||||
let d_intersection=edge_dir.dot(intersection.num)/intersection.den;
|
|
||||||
|
|
||||||
// find the next face moving clockwise around face0
|
|
||||||
let (new_face,new_intersection,_)=face_list.iter().filter_map(|new_face|{
|
|
||||||
// ignore faces that are part of the current edge
|
|
||||||
if core::ptr::eq(face0,new_face)
|
|
||||||
|core::ptr::eq(face2,new_face){
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
let new_intersection=solve3(face0,face2,new_face)?;
|
|
||||||
|
|
||||||
// the d value must be larger
|
|
||||||
let d_new_intersection=edge_dir.dot(new_intersection.num)/new_intersection.den;
|
|
||||||
if d_new_intersection.le_ratio(d_intersection){
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
Some((new_face,new_intersection,d_new_intersection))
|
|
||||||
}).min_by_key(|&(_,_,d)|d).ok_or(PlanesToFacesError::FindNewIntersection)?;
|
|
||||||
|
|
||||||
face2=new_face;
|
|
||||||
intersection=new_intersection;
|
|
||||||
|
|
||||||
if let Some(a)=detect_loop.checked_sub(1){
|
|
||||||
detect_loop=a;
|
|
||||||
}else{
|
|
||||||
return Err(PlanesToFacesError::InfiniteLoop2);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
faces.push(face);
|
|
||||||
}
|
|
||||||
|
|
||||||
if faces.is_empty(){
|
|
||||||
Err(PlanesToFacesError::EmptyFaces)
|
|
||||||
}else{
|
|
||||||
Ok(Faces{
|
|
||||||
faces,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub enum BrushToMeshError{
|
|
||||||
SliceBrushSides,
|
|
||||||
MissingPlane,
|
|
||||||
InvalidFaceCount{
|
|
||||||
count:usize,
|
|
||||||
},
|
|
||||||
InvalidPlanes(PlanesToFacesError),
|
|
||||||
SkipBecauseTexture,
|
|
||||||
}
|
|
||||||
impl std::fmt::Display for BrushToMeshError{
|
|
||||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
|
||||||
write!(f,"{self:?}")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl core::error::Error for BrushToMeshError{}
|
|
||||||
|
|
||||||
pub fn faces_to_mesh(faces:Vec<Vec<integer::Planar64Vec3>>)->model::Mesh{
|
|
||||||
// generate the mesh
|
|
||||||
let mut mb=model::MeshBuilder::new();
|
|
||||||
let color=mb.acquire_color_id(glam::Vec4::ONE);
|
|
||||||
let tex=mb.acquire_tex_id(glam::Vec2::ZERO);
|
|
||||||
// normals are ignored by physics
|
|
||||||
let normal=mb.acquire_normal_id(integer::vec3::ZERO);
|
|
||||||
|
|
||||||
let polygon_list=faces.into_iter().map(|face|{
|
|
||||||
face.into_iter().map(|pos|{
|
|
||||||
let pos=mb.acquire_pos_id(pos);
|
|
||||||
mb.acquire_vertex_id(model::IndexedVertex{
|
|
||||||
pos,
|
|
||||||
tex,
|
|
||||||
normal,
|
|
||||||
color,
|
|
||||||
})
|
|
||||||
}).collect()
|
|
||||||
}).collect();
|
|
||||||
|
|
||||||
let polygon_groups=vec![model::PolygonGroup::PolygonList(model::PolygonList::new(polygon_list))];
|
|
||||||
let physics_groups=vec![model::IndexedPhysicsGroup{
|
|
||||||
groups:vec![model::PolygonGroupId::new(0)],
|
|
||||||
}];
|
|
||||||
let graphics_groups=vec![];
|
|
||||||
|
|
||||||
mb.build(polygon_groups,graphics_groups,physics_groups)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn brush_to_mesh(bsp:&vbsp::Bsp,brush:&vbsp::Brush)->Result<model::Mesh,BrushToMeshError>{
|
|
||||||
let brush_start_idx=brush.brush_side as usize;
|
|
||||||
let sides_range=brush_start_idx..brush_start_idx+brush.num_brush_sides as usize;
|
|
||||||
let sides=bsp.brush_sides.get(sides_range).ok_or(BrushToMeshError::SliceBrushSides)?;
|
|
||||||
for side in sides{
|
|
||||||
if let Some(texture_info)=bsp.textures_info.get(side.texture_info as usize){
|
|
||||||
let texture_info=vbsp::Handle::new(bsp,texture_info);
|
|
||||||
let s=texture_info.name();
|
|
||||||
if s.starts_with("tools/")||s.starts_with("TOOLS/"){
|
|
||||||
return Err(BrushToMeshError::SkipBecauseTexture);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let face_list=sides.iter().filter(|side|side.bevel==0).map(|side|{
|
|
||||||
let plane=bsp.plane(side.plane as usize)?;
|
|
||||||
Some(Face{
|
|
||||||
normal:valve_transform_normal(plane.normal.into()),
|
|
||||||
dot:valve_transform_dist(plane.dist.into()),
|
|
||||||
})
|
|
||||||
}).collect::<Option<std::collections::HashSet<_>>>().ok_or(BrushToMeshError::MissingPlane)?;
|
|
||||||
|
|
||||||
if face_list.len()<4{
|
|
||||||
return Err(BrushToMeshError::InvalidFaceCount{count:face_list.len()});
|
|
||||||
}
|
|
||||||
|
|
||||||
let faces=planes_to_faces(face_list).map_err(BrushToMeshError::InvalidPlanes)?;
|
|
||||||
|
|
||||||
let mesh=faces_to_mesh(faces.faces);
|
|
||||||
|
|
||||||
Ok(mesh)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn unit_cube()->model::Mesh{
|
|
||||||
let face_list=[
|
|
||||||
Face{normal:integer::vec3::X,dot:Planar64::ONE},
|
|
||||||
Face{normal:integer::vec3::Y,dot:Planar64::ONE},
|
|
||||||
Face{normal:integer::vec3::Z,dot:Planar64::ONE},
|
|
||||||
Face{normal:integer::vec3::NEG_X,dot:Planar64::ONE},
|
|
||||||
Face{normal:integer::vec3::NEG_Y,dot:Planar64::ONE},
|
|
||||||
Face{normal:integer::vec3::NEG_Z,dot:Planar64::ONE},
|
|
||||||
].into_iter().collect();
|
|
||||||
let faces=planes_to_faces(face_list).unwrap();
|
|
||||||
let mesh=faces_to_mesh(faces.faces);
|
|
||||||
mesh
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test{
|
|
||||||
use super::*;
|
|
||||||
#[test]
|
|
||||||
fn test_cube(){
|
|
||||||
let face_list=[
|
|
||||||
Face{normal:integer::vec3::X,dot:Planar64::ONE},
|
|
||||||
Face{normal:integer::vec3::Y,dot:Planar64::ONE},
|
|
||||||
Face{normal:integer::vec3::Z,dot:Planar64::ONE},
|
|
||||||
Face{normal:integer::vec3::NEG_X,dot:Planar64::ONE},
|
|
||||||
Face{normal:integer::vec3::NEG_Y,dot:Planar64::ONE},
|
|
||||||
Face{normal:integer::vec3::NEG_Z,dot:Planar64::ONE},
|
|
||||||
].into_iter().collect();
|
|
||||||
let faces=planes_to_faces(face_list).unwrap();
|
|
||||||
dbg!(faces);
|
|
||||||
}
|
|
||||||
#[test]
|
|
||||||
fn test_cube_with_degernate_face(){
|
|
||||||
let face_list=[
|
|
||||||
Face{normal:integer::vec3::X,dot:Planar64::ONE},
|
|
||||||
Face{normal:integer::vec3::Y,dot:Planar64::ONE},
|
|
||||||
Face{normal:integer::vec3::Z,dot:Planar64::ONE},
|
|
||||||
Face{normal:integer::vec3::NEG_X,dot:Planar64::ONE},
|
|
||||||
Face{normal:integer::vec3::NEG_Y,dot:Planar64::ONE},
|
|
||||||
Face{normal:integer::vec3::NEG_Z,dot:Planar64::ONE},
|
|
||||||
Face{normal:integer::vec3::NEG_Z,dot:Planar64::EPSILON},
|
|
||||||
].into_iter().collect();
|
|
||||||
let faces=planes_to_faces(face_list).unwrap();
|
|
||||||
dbg!(faces);
|
|
||||||
}
|
|
||||||
}
|
|
@ -7,31 +7,6 @@ use strafesnet_deferred_loader::texture::{RenderConfigs,Texture};
|
|||||||
|
|
||||||
use crate::valve_transform;
|
use crate::valve_transform;
|
||||||
|
|
||||||
fn ingest_vertex(
|
|
||||||
mb:&mut model::MeshBuilder,
|
|
||||||
world_position:vbsp::Vector,
|
|
||||||
texture_transform_u:glam::Vec4,
|
|
||||||
texture_transform_v:glam::Vec4,
|
|
||||||
normal:model::NormalId,
|
|
||||||
color:model::ColorId,
|
|
||||||
)->model::VertexId{
|
|
||||||
//world_model.origin seems to always be 0,0,0
|
|
||||||
let vertex_xyz=world_position.into();
|
|
||||||
let pos=mb.acquire_pos_id(valve_transform(vertex_xyz));
|
|
||||||
|
|
||||||
//calculate texture coordinates
|
|
||||||
let pos_4d=glam::Vec3::from_array(vertex_xyz).extend(1.0);
|
|
||||||
let tex=glam::vec2(texture_transform_u.dot(pos_4d),texture_transform_v.dot(pos_4d));
|
|
||||||
let tex=mb.acquire_tex_id(tex);
|
|
||||||
|
|
||||||
mb.acquire_vertex_id(model::IndexedVertex{
|
|
||||||
pos,
|
|
||||||
tex,
|
|
||||||
normal,
|
|
||||||
color,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn convert<'a>(
|
pub fn convert<'a>(
|
||||||
bsp:&'a crate::Bsp,
|
bsp:&'a crate::Bsp,
|
||||||
render_config_deferred_loader:&mut RenderConfigDeferredLoader<Cow<'a,str>>,
|
render_config_deferred_loader:&mut RenderConfigDeferredLoader<Cow<'a,str>>,
|
||||||
@ -41,20 +16,21 @@ pub fn convert<'a>(
|
|||||||
//figure out real attributes later
|
//figure out real attributes later
|
||||||
let mut unique_attributes=Vec::new();
|
let mut unique_attributes=Vec::new();
|
||||||
unique_attributes.push(gameplay_attributes::CollisionAttributes::Decoration);
|
unique_attributes.push(gameplay_attributes::CollisionAttributes::Decoration);
|
||||||
unique_attributes.push(gameplay_attributes::CollisionAttributes::contact_default());
|
const TEMP_TOUCH_ME_ATTRIBUTE:gameplay_attributes::CollisionAttributesId=gameplay_attributes::CollisionAttributesId::new(0);
|
||||||
unique_attributes.push(gameplay_attributes::CollisionAttributes::intersect_default());
|
|
||||||
const ATTRIBUTE_DECORATION:gameplay_attributes::CollisionAttributesId=gameplay_attributes::CollisionAttributesId::new(0);
|
|
||||||
const ATTRIBUTE_CONTACT_DEFAULT:gameplay_attributes::CollisionAttributesId=gameplay_attributes::CollisionAttributesId::new(1);
|
|
||||||
const ATTRIBUTE_INTERSECT_DEFAULT:gameplay_attributes::CollisionAttributesId=gameplay_attributes::CollisionAttributesId::new(2);
|
|
||||||
|
|
||||||
|
let mut prop_mesh_count=0;
|
||||||
//declare all prop models to Loader
|
//declare all prop models to Loader
|
||||||
let prop_models=bsp.static_props().map(|prop|{
|
let prop_models=bsp.static_props().map(|prop|{
|
||||||
//get or create mesh_id
|
//get or create mesh_id
|
||||||
let mesh_id=mesh_deferred_loader.acquire_mesh_id(prop.model());
|
let mesh_id=mesh_deferred_loader.acquire_mesh_id(prop.model());
|
||||||
|
//not the most failsafe code but this is just for the map tool lmao
|
||||||
|
if prop_mesh_count==mesh_id.get(){
|
||||||
|
prop_mesh_count+=1;
|
||||||
|
};
|
||||||
let placement=prop.as_prop_placement();
|
let placement=prop.as_prop_placement();
|
||||||
model::Model{
|
model::Model{
|
||||||
mesh:mesh_id,
|
mesh:mesh_id,
|
||||||
attributes:ATTRIBUTE_DECORATION,
|
attributes:TEMP_TOUCH_ME_ATTRIBUTE,
|
||||||
transform:integer::Planar64Affine3::new(
|
transform:integer::Planar64Affine3::new(
|
||||||
integer::mat3::try_from_f32_array_2d((
|
integer::mat3::try_from_f32_array_2d((
|
||||||
glam::Mat3A::from_diagonal(glam::Vec3::splat(placement.scale))
|
glam::Mat3A::from_diagonal(glam::Vec3::splat(placement.scale))
|
||||||
@ -71,12 +47,14 @@ pub fn convert<'a>(
|
|||||||
|
|
||||||
//the generated MeshIds in here will collide with the Loader Mesh Ids
|
//the generated MeshIds in here will collide with the Loader Mesh Ids
|
||||||
//but I can't think of a good workaround other than just remapping one later.
|
//but I can't think of a good workaround other than just remapping one later.
|
||||||
let mut world_meshes:Vec<model::Mesh>=bsp.models().map(|world_model|{
|
let world_meshes:Vec<model::Mesh>=bsp.models().map(|world_model|{
|
||||||
let mut mb=model::MeshBuilder::new();
|
//non-deduplicated
|
||||||
|
let mut spam_pos=Vec::new();
|
||||||
let color=mb.acquire_color_id(glam::Vec4::ONE);
|
let mut spam_tex=Vec::new();
|
||||||
|
let mut spam_normal=Vec::new();
|
||||||
|
let mut spam_vertices=Vec::new();
|
||||||
let mut graphics_groups=Vec::new();
|
let mut graphics_groups=Vec::new();
|
||||||
let mut render_id_to_graphics_group_id=std::collections::HashMap::new();
|
let mut physics_group=model::IndexedPhysicsGroup::default();
|
||||||
let polygon_groups=world_model.faces().enumerate().map(|(polygon_group_id,face)|{
|
let polygon_groups=world_model.faces().enumerate().map(|(polygon_group_id,face)|{
|
||||||
let polygon_group_id=model::PolygonGroupId::new(polygon_group_id as u32);
|
let polygon_group_id=model::PolygonGroupId::new(polygon_group_id as u32);
|
||||||
let face_texture=face.texture();
|
let face_texture=face.texture();
|
||||||
@ -85,151 +63,107 @@ pub fn convert<'a>(
|
|||||||
let texture_transform_u=glam::Vec4::from_array(face_texture.texture_transforms_u)/(face_texture_data.width as f32);
|
let texture_transform_u=glam::Vec4::from_array(face_texture.texture_transforms_u)/(face_texture_data.width as f32);
|
||||||
let texture_transform_v=glam::Vec4::from_array(face_texture.texture_transforms_v)/(face_texture_data.height as f32);
|
let texture_transform_v=glam::Vec4::from_array(face_texture.texture_transforms_v)/(face_texture_data.height as f32);
|
||||||
|
|
||||||
|
//this automatically figures out what the texture is trying to do and creates
|
||||||
|
//a render config for it, and then returns the id to that render config
|
||||||
|
let render_id=render_config_deferred_loader.acquire_render_config_id(Some(face_texture_data.name().into()));
|
||||||
|
|
||||||
//normal
|
//normal
|
||||||
let normal=mb.acquire_normal_id(valve_transform(face.normal().into()));
|
let normal=face.normal();
|
||||||
let mut polygon_iter=face.vertex_positions().map(|vertex_position|
|
let normal_idx=spam_normal.len() as u32;
|
||||||
world_model.origin+vertex_position
|
spam_normal.push(valve_transform(normal.into()));
|
||||||
);
|
let mut polygon_iter=face.vertex_positions().map(|vertex_position|{
|
||||||
|
//world_model.origin seems to always be 0,0,0
|
||||||
|
let vertex_xyz=(world_model.origin+vertex_position).into();
|
||||||
|
let pos_idx=spam_pos.len();
|
||||||
|
spam_pos.push(valve_transform(vertex_xyz));
|
||||||
|
|
||||||
|
//calculate texture coordinates
|
||||||
|
let pos=glam::Vec3::from_array(vertex_xyz).extend(1.0);
|
||||||
|
let tex=glam::vec2(texture_transform_u.dot(pos),texture_transform_v.dot(pos));
|
||||||
|
let tex_idx=spam_tex.len() as u32;
|
||||||
|
spam_tex.push(tex);
|
||||||
|
|
||||||
|
let vertex_id=model::VertexId::new(spam_vertices.len() as u32);
|
||||||
|
spam_vertices.push(model::IndexedVertex{
|
||||||
|
pos:model::PositionId::new(pos_idx as u32),
|
||||||
|
tex:model::TextureCoordinateId::new(tex_idx as u32),
|
||||||
|
normal:model::NormalId::new(normal_idx),
|
||||||
|
color:model::ColorId::new(0),
|
||||||
|
});
|
||||||
|
vertex_id
|
||||||
|
});
|
||||||
let polygon_list=std::iter::from_fn(move||{
|
let polygon_list=std::iter::from_fn(move||{
|
||||||
match (polygon_iter.next(),polygon_iter.next(),polygon_iter.next()){
|
match (polygon_iter.next(),polygon_iter.next(),polygon_iter.next()){
|
||||||
(Some(v1),Some(v2),Some(v3))=>Some([v1,v2,v3]),
|
(Some(v1),Some(v2),Some(v3))=>Some(vec![v1,v2,v3]),
|
||||||
//ignore extra vertices, not sure what to do in this case, failing the whole conversion could be appropriate
|
//ignore extra vertices, not sure what to do in this case, failing the whole conversion could be appropriate
|
||||||
_=>None,
|
_=>None,
|
||||||
}
|
}
|
||||||
}).map(|triplet|{
|
|
||||||
triplet.map(|world_position|
|
|
||||||
ingest_vertex(&mut mb,world_position,texture_transform_u,texture_transform_v,normal,color)
|
|
||||||
).to_vec()
|
|
||||||
}).collect();
|
}).collect();
|
||||||
if face.is_visible(){
|
if face.is_visible(){
|
||||||
//this automatically figures out what the texture is trying to do and creates
|
//TODO: deduplicate graphics groups by render id
|
||||||
//a render config for it, and then returns the id to that render config
|
graphics_groups.push(model::IndexedGraphicsGroup{
|
||||||
let render_id=render_config_deferred_loader.acquire_render_config_id(Some(Cow::Borrowed(face_texture_data.name())));
|
render:render_id,
|
||||||
//deduplicate graphics groups by render id
|
groups:vec![polygon_group_id],
|
||||||
let graphics_group_id=*render_id_to_graphics_group_id.entry(render_id).or_insert_with(||{
|
})
|
||||||
let graphics_group_id=graphics_groups.len();
|
|
||||||
graphics_groups.push(model::IndexedGraphicsGroup{
|
|
||||||
render:render_id,
|
|
||||||
groups:vec![],
|
|
||||||
});
|
|
||||||
graphics_group_id
|
|
||||||
});
|
|
||||||
graphics_groups[graphics_group_id].groups.push(polygon_group_id);
|
|
||||||
}
|
}
|
||||||
|
physics_group.groups.push(polygon_group_id);
|
||||||
model::PolygonGroup::PolygonList(model::PolygonList::new(polygon_list))
|
model::PolygonGroup::PolygonList(model::PolygonList::new(polygon_list))
|
||||||
}).collect();
|
}).collect();
|
||||||
|
model::Mesh{
|
||||||
mb.build(polygon_groups,graphics_groups,vec![])
|
unique_pos:spam_pos,
|
||||||
|
unique_tex:spam_tex,
|
||||||
|
unique_normal:spam_normal,
|
||||||
|
unique_color:vec![glam::Vec4::ONE],
|
||||||
|
unique_vertices:spam_vertices,
|
||||||
|
polygon_groups,
|
||||||
|
graphics_groups,
|
||||||
|
physics_groups:vec![physics_group],
|
||||||
|
}
|
||||||
}).collect();
|
}).collect();
|
||||||
|
|
||||||
let mut found_spawn=None;
|
let world_models:Vec<model::Model>=
|
||||||
|
//one instance of the main world mesh
|
||||||
let mut world_models=Vec::new();
|
std::iter::once((
|
||||||
|
//world_model
|
||||||
// the one and only world model 0
|
model::MeshId::new(0),
|
||||||
world_models.push(model::Model{
|
//model_origin
|
||||||
mesh:model::MeshId::new(0),
|
vbsp::Vector::from([0.0,0.0,0.0]),
|
||||||
attributes:ATTRIBUTE_DECORATION,
|
//model_color
|
||||||
transform:integer::Planar64Affine3::IDENTITY,
|
vbsp::Color{r:255,g:255,b:255},
|
||||||
color:glam::Vec4::W,
|
)).chain(
|
||||||
});
|
//entities sprinkle instances of the other meshes around
|
||||||
|
bsp.entities.iter()
|
||||||
for raw_ent in bsp.entities.iter(){
|
.flat_map(|ent|ent.parse())//ignore entity parsing errors
|
||||||
match raw_ent.parse(){
|
.filter_map(|ent|match ent{
|
||||||
Ok(vbsp::basic::Entity::Brush(brush))
|
vbsp::Entity::Brush(brush)=>Some(brush),
|
||||||
|Ok(vbsp::basic::Entity::BrushIllusionary(brush))
|
vbsp::Entity::BrushIllusionary(brush)=>Some(brush),
|
||||||
|Ok(vbsp::basic::Entity::BrushWall(brush))
|
vbsp::Entity::BrushWall(brush)=>Some(brush),
|
||||||
|Ok(vbsp::basic::Entity::BrushWallToggle(brush))=>{
|
vbsp::Entity::BrushWallToggle(brush)=>Some(brush),
|
||||||
//The first character of brush.model is '*'
|
_=>None,
|
||||||
match brush.model[1..].parse(){
|
}).flat_map(|brush|
|
||||||
Ok(mesh_id)=>{
|
//The first character of brush.model is '*'
|
||||||
world_models.push(model::Model{
|
brush.model[1..].parse().map(|mesh_id|//ignore parse int errors
|
||||||
mesh:model::MeshId::new(mesh_id),
|
(model::MeshId::new(mesh_id),brush.origin,brush.color)
|
||||||
attributes:ATTRIBUTE_DECORATION,
|
)
|
||||||
transform:integer::Planar64Affine3::from_translation(
|
)
|
||||||
valve_transform(brush.origin.into())
|
).map(|(mesh_id,model_origin,vbsp::Color{r,g,b})|{
|
||||||
),
|
model::Model{
|
||||||
color:(glam::Vec3::from_array([
|
|
||||||
brush.color.r as f32,
|
|
||||||
brush.color.g as f32,
|
|
||||||
brush.color.b as f32
|
|
||||||
])/255.0).extend(1.0),
|
|
||||||
});
|
|
||||||
},
|
|
||||||
Err(e)=>{
|
|
||||||
println!("Brush model int parse error: {e}");
|
|
||||||
},
|
|
||||||
}
|
|
||||||
},
|
|
||||||
_=>(),
|
|
||||||
}
|
|
||||||
|
|
||||||
match raw_ent.parse(){
|
|
||||||
Ok(vbsp::css::Entity::InfoPlayerCounterterrorist(spawn))=>{
|
|
||||||
found_spawn=Some(valve_transform(spawn.origin.into()));
|
|
||||||
},
|
|
||||||
Err(e)=>{
|
|
||||||
println!("Bsp Entity parse error: {e}");
|
|
||||||
},
|
|
||||||
_=>(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// physics models
|
|
||||||
for brush in &bsp.brushes{
|
|
||||||
if !brush.flags.contains(vbsp::BrushFlags::SOLID){
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
let mesh_result=crate::brush::brush_to_mesh(bsp,brush);
|
|
||||||
match mesh_result{
|
|
||||||
Ok(mesh)=>{
|
|
||||||
let mesh_id=model::MeshId::new(world_meshes.len() as u32);
|
|
||||||
world_meshes.push(mesh);
|
|
||||||
world_models.push(model::Model{
|
|
||||||
mesh:mesh_id,
|
|
||||||
attributes:ATTRIBUTE_CONTACT_DEFAULT,
|
|
||||||
transform:integer::Planar64Affine3::new(
|
|
||||||
integer::mat3::identity(),
|
|
||||||
integer::vec3::ZERO,
|
|
||||||
),
|
|
||||||
color:glam::Vec4::ONE,
|
|
||||||
});
|
|
||||||
},
|
|
||||||
Err(e)=>println!("Brush mesh error: {e}"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut modes_list=Vec::new();
|
|
||||||
if let Some(spawn_point)=found_spawn{
|
|
||||||
// create a new mesh
|
|
||||||
let mesh_id=model::MeshId::new(world_meshes.len() as u32);
|
|
||||||
world_meshes.push(crate::brush::unit_cube());
|
|
||||||
// create a new model
|
|
||||||
let model_id=model::ModelId::new(world_models.len() as u32);
|
|
||||||
world_models.push(model::Model{
|
|
||||||
mesh:mesh_id,
|
mesh:mesh_id,
|
||||||
attributes:ATTRIBUTE_INTERSECT_DEFAULT,
|
attributes:TEMP_TOUCH_ME_ATTRIBUTE,
|
||||||
transform:integer::Planar64Affine3::from_translation(spawn_point),
|
transform:integer::Planar64Affine3::new(
|
||||||
color:glam::Vec4::W,
|
integer::mat3::identity(),
|
||||||
});
|
valve_transform(model_origin.into())
|
||||||
|
),
|
||||||
let first_stage=strafesnet_common::gameplay_modes::Stage::empty(model_id);
|
color:(glam::Vec3::from_array([r as f32,g as f32,b as f32])/255.0).extend(1.0),
|
||||||
let main_mode=strafesnet_common::gameplay_modes::Mode::new(
|
}
|
||||||
strafesnet_common::gameplay_style::StyleModifiers::source_bhop(),
|
}).collect();
|
||||||
model_id,
|
|
||||||
std::collections::HashMap::new(),
|
|
||||||
vec![first_stage],
|
|
||||||
std::collections::HashMap::new(),
|
|
||||||
);
|
|
||||||
modes_list.push(main_mode);
|
|
||||||
}
|
|
||||||
|
|
||||||
PartialMap1{
|
PartialMap1{
|
||||||
attributes:unique_attributes,
|
attributes:unique_attributes,
|
||||||
world_meshes,
|
world_meshes,
|
||||||
prop_models,
|
prop_models,
|
||||||
world_models,
|
world_models,
|
||||||
modes:strafesnet_common::gameplay_modes::Modes::new(modes_list),
|
modes:strafesnet_common::gameplay_modes::Modes::new(Vec::new()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,17 +1,20 @@
|
|||||||
use strafesnet_deferred_loader::deferred_loader::{LoadFailureMode,MeshDeferredLoader,RenderConfigDeferredLoader};
|
|
||||||
|
|
||||||
mod bsp;
|
mod bsp;
|
||||||
mod mesh;
|
mod mesh;
|
||||||
mod brush;
|
|
||||||
pub mod loader;
|
pub mod loader;
|
||||||
|
|
||||||
|
pub struct Bsp(vbsp::Bsp);
|
||||||
|
impl Bsp{
|
||||||
|
pub const fn new(value:vbsp::Bsp)->Self{
|
||||||
|
Self(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl AsRef<vbsp::Bsp> for Bsp{
|
||||||
|
fn as_ref(&self)->&vbsp::Bsp{
|
||||||
|
&self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const VALVE_SCALE:f32=1.0/16.0;
|
const VALVE_SCALE:f32=1.0/16.0;
|
||||||
pub(crate) fn valve_transform_dist(d:f32)->strafesnet_common::integer::Planar64{
|
|
||||||
(d*VALVE_SCALE).try_into().unwrap()
|
|
||||||
}
|
|
||||||
pub(crate) fn valve_transform_normal([x,y,z]:[f32;3])->strafesnet_common::integer::Planar64Vec3{
|
|
||||||
strafesnet_common::integer::vec3::try_from_f32_array([x,z,-y]).unwrap()
|
|
||||||
}
|
|
||||||
pub(crate) fn valve_transform([x,y,z]:[f32;3])->strafesnet_common::integer::Planar64Vec3{
|
pub(crate) fn valve_transform([x,y,z]:[f32;3])->strafesnet_common::integer::Planar64Vec3{
|
||||||
strafesnet_common::integer::vec3::try_from_f32_array([x*VALVE_SCALE,z*VALVE_SCALE,-y*VALVE_SCALE]).unwrap()
|
strafesnet_common::integer::vec3::try_from_f32_array([x*VALVE_SCALE,z*VALVE_SCALE,-y*VALVE_SCALE]).unwrap()
|
||||||
}
|
}
|
||||||
@ -28,38 +31,6 @@ impl std::fmt::Display for ReadError{
|
|||||||
}
|
}
|
||||||
impl std::error::Error for ReadError{}
|
impl std::error::Error for ReadError{}
|
||||||
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub enum LoadError{
|
|
||||||
Texture(loader::TextureError),
|
|
||||||
Mesh(loader::MeshError),
|
|
||||||
}
|
|
||||||
impl std::fmt::Display for LoadError{
|
|
||||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
|
||||||
write!(f,"{self:?}")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl std::error::Error for LoadError{}
|
|
||||||
impl From<loader::TextureError> for LoadError{
|
|
||||||
fn from(value:loader::TextureError)->Self{
|
|
||||||
Self::Texture(value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl From<loader::MeshError> for LoadError{
|
|
||||||
fn from(value:loader::MeshError)->Self{
|
|
||||||
Self::Mesh(value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pub struct Bsp{
|
|
||||||
bsp:vbsp::Bsp,
|
|
||||||
case_folded_file_names:std::collections::HashMap<String,String>,
|
|
||||||
}
|
|
||||||
impl AsRef<vbsp::Bsp> for Bsp{
|
|
||||||
fn as_ref(&self)->&vbsp::Bsp{
|
|
||||||
&self.bsp
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn read<R:std::io::Read>(mut input:R)->Result<Bsp,ReadError>{
|
pub fn read<R:std::io::Read>(mut input:R)->Result<Bsp,ReadError>{
|
||||||
let mut s=Vec::new();
|
let mut s=Vec::new();
|
||||||
|
|
||||||
@ -68,66 +39,5 @@ pub fn read<R:std::io::Read>(mut input:R)->Result<Bsp,ReadError>{
|
|||||||
|
|
||||||
vbsp::Bsp::read(s.as_slice()).map(Bsp::new).map_err(ReadError::Bsp)
|
vbsp::Bsp::read(s.as_slice()).map(Bsp::new).map_err(ReadError::Bsp)
|
||||||
}
|
}
|
||||||
impl Bsp{
|
|
||||||
pub fn new(bsp:vbsp::Bsp)->Self{
|
|
||||||
let case_folded_file_names=bsp.pack.clone().into_zip().lock().unwrap().file_names().map(|s|{
|
|
||||||
(s.to_lowercase(),s.to_owned())
|
|
||||||
}).collect();
|
|
||||||
Self{
|
|
||||||
bsp,
|
|
||||||
case_folded_file_names,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pub fn pack_get(&self,name_lowercase:&str)->Result<Option<Vec<u8>>,vbsp::BspError>{
|
|
||||||
match self.case_folded_file_names.get(name_lowercase){
|
|
||||||
Some(name_folded)=>self.bsp.pack.get(name_folded),
|
|
||||||
None=>Ok(None),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pub fn to_snf(&self,failure_mode:LoadFailureMode,vpk_list:&[Vpk])->Result<strafesnet_common::map::CompleteMap,LoadError>{
|
|
||||||
let mut texture_deferred_loader=RenderConfigDeferredLoader::new();
|
|
||||||
let mut mesh_deferred_loader=MeshDeferredLoader::new();
|
|
||||||
|
|
||||||
let map_step1=bsp::convert(
|
pub use bsp::convert;
|
||||||
self,
|
|
||||||
&mut texture_deferred_loader,
|
|
||||||
&mut mesh_deferred_loader,
|
|
||||||
);
|
|
||||||
|
|
||||||
let mut mesh_loader=loader::MeshLoader::new(loader::BspFinder{bsp:self,vpks:vpk_list},&mut texture_deferred_loader);
|
|
||||||
let prop_meshes=mesh_deferred_loader.into_meshes(&mut mesh_loader,failure_mode).map_err(LoadError::Mesh)?;
|
|
||||||
|
|
||||||
let map_step2=map_step1.add_prop_meshes(prop_meshes);
|
|
||||||
|
|
||||||
let mut texture_loader=loader::TextureLoader::new();
|
|
||||||
let render_configs=texture_deferred_loader.into_render_configs(&mut texture_loader,failure_mode).map_err(LoadError::Texture)?;
|
|
||||||
|
|
||||||
let map=map_step2.add_render_configs_and_textures(render_configs);
|
|
||||||
|
|
||||||
Ok(map)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pub struct Vpk{
|
|
||||||
vpk:vpk::VPK,
|
|
||||||
case_folded_file_names:std::collections::HashMap<String,String>,
|
|
||||||
}
|
|
||||||
impl AsRef<vpk::VPK> for Vpk{
|
|
||||||
fn as_ref(&self)->&vpk::VPK{
|
|
||||||
&self.vpk
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl Vpk{
|
|
||||||
pub fn new(vpk:vpk::VPK)->Vpk{
|
|
||||||
let case_folded_file_names=vpk.tree.keys().map(|s|{
|
|
||||||
(s.to_lowercase(),s.to_owned())
|
|
||||||
}).collect();
|
|
||||||
Vpk{
|
|
||||||
vpk,
|
|
||||||
case_folded_file_names,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pub fn tree_get(&self,name_lowercase:&str)->Option<&vpk::entry::VPKEntry>{
|
|
||||||
let name_folded=self.case_folded_file_names.get(name_lowercase)?;
|
|
||||||
self.vpk.tree.get(name_folded)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
@ -3,7 +3,7 @@ use std::{borrow::Cow, io::Read};
|
|||||||
use strafesnet_common::model::Mesh;
|
use strafesnet_common::model::Mesh;
|
||||||
use strafesnet_deferred_loader::{loader::Loader,texture::Texture};
|
use strafesnet_deferred_loader::{loader::Loader,texture::Texture};
|
||||||
|
|
||||||
use crate::{Bsp,Vpk};
|
use crate::{mesh::ModelData, Bsp};
|
||||||
|
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
@ -73,56 +73,25 @@ impl From<vbsp::BspError> for MeshError{
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone,Copy)]
|
pub struct MeshLoader<'a,'b>{
|
||||||
pub struct BspFinder<'bsp,'vpk>{
|
bsp:&'a Bsp,
|
||||||
pub bsp:&'bsp Bsp,
|
deferred_loader:&'b mut strafesnet_deferred_loader::deferred_loader::RenderConfigDeferredLoader<Cow<'a,str>>,
|
||||||
pub vpks:&'vpk [Vpk],
|
|
||||||
}
|
}
|
||||||
impl<'bsp,'vpk> BspFinder<'bsp,'vpk>{
|
impl MeshLoader<'_,'_>{
|
||||||
pub fn find<'a>(&self,path:&str)->Result<Option<Cow<'a,[u8]>>,vbsp::BspError>
|
pub fn new<'a,'b>(
|
||||||
where
|
bsp:&'a Bsp,
|
||||||
'bsp:'a,
|
deferred_loader:&'b mut strafesnet_deferred_loader::deferred_loader::RenderConfigDeferredLoader<Cow<'a,str>>,
|
||||||
'vpk:'a,
|
)->MeshLoader<'a,'b>{
|
||||||
{
|
MeshLoader{
|
||||||
// search bsp
|
bsp,
|
||||||
if let Some(data)=self.bsp.pack_get(path)?{
|
deferred_loader,
|
||||||
return Ok(Some(Cow::Owned(data)));
|
|
||||||
}
|
|
||||||
|
|
||||||
//search each vpk
|
|
||||||
for vpk in self.vpks{
|
|
||||||
if let Some(vpk_entry)=vpk.tree_get(path){
|
|
||||||
return Ok(Some(vpk_entry.get()?));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(None)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct ModelLoader<'bsp,'vpk,'a>{
|
|
||||||
finder:BspFinder<'bsp,'vpk>,
|
|
||||||
life:core::marker::PhantomData<&'a ()>,
|
|
||||||
}
|
|
||||||
impl ModelLoader<'_,'_,'_>{
|
|
||||||
#[inline]
|
|
||||||
pub const fn new<'bsp,'vpk,'a>(
|
|
||||||
finder:BspFinder<'bsp,'vpk>,
|
|
||||||
)->ModelLoader<'bsp,'vpk,'a>{
|
|
||||||
ModelLoader{
|
|
||||||
finder,
|
|
||||||
life:core::marker::PhantomData,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl<'bsp,'vpk,'a> Loader for ModelLoader<'bsp,'vpk,'a>
|
impl<'a> Loader for MeshLoader<'a,'_>{
|
||||||
where
|
|
||||||
'bsp:'a,
|
|
||||||
'vpk:'a,
|
|
||||||
{
|
|
||||||
type Error=MeshError;
|
type Error=MeshError;
|
||||||
type Index=&'a str;
|
type Index=&'a str;
|
||||||
type Resource=vmdl::Model;
|
type Resource=Mesh;
|
||||||
fn load(&mut self,index:Self::Index)->Result<Self::Resource,Self::Error>{
|
fn load(&mut self,index:Self::Index)->Result<Self::Resource,Self::Error>{
|
||||||
let mdl_path_lower=index.to_lowercase();
|
let mdl_path_lower=index.to_lowercase();
|
||||||
//.mdl, .vvd, .dx90.vtx
|
//.mdl, .vvd, .dx90.vtx
|
||||||
@ -132,44 +101,12 @@ impl<'bsp,'vpk,'a> Loader for ModelLoader<'bsp,'vpk,'a>
|
|||||||
vvd_path.set_extension("vvd");
|
vvd_path.set_extension("vvd");
|
||||||
vtx_path.set_extension("dx90.vtx");
|
vtx_path.set_extension("dx90.vtx");
|
||||||
// TODO: search more packs, possibly using an index of multiple packs
|
// TODO: search more packs, possibly using an index of multiple packs
|
||||||
let mdl=self.finder.find(mdl_path_lower.as_str())?.ok_or(MeshError::MissingMdl)?;
|
let bsp=self.bsp.as_ref();
|
||||||
let vtx=self.finder.find(vtx_path.as_os_str().to_str().unwrap())?.ok_or(MeshError::MissingVtx)?;
|
let mdl=bsp.pack.get(mdl_path_lower.as_str())?.ok_or(MeshError::MissingMdl)?;
|
||||||
let vvd=self.finder.find(vvd_path.as_os_str().to_str().unwrap())?.ok_or(MeshError::MissingVvd)?;
|
let vtx=bsp.pack.get(vvd_path.as_os_str().to_str().unwrap())?.ok_or(MeshError::MissingVtx)?;
|
||||||
Ok(vmdl::Model::from_parts(
|
let vvd=bsp.pack.get(vtx_path.as_os_str().to_str().unwrap())?.ok_or(MeshError::MissingVvd)?;
|
||||||
vmdl::mdl::Mdl::read(mdl.as_ref())?,
|
let model=ModelData{mdl,vtx,vvd};
|
||||||
vmdl::vtx::Vtx::read(vtx.as_ref())?,
|
let mesh=model.convert_mesh(&mut self.deferred_loader)?;
|
||||||
vmdl::vvd::Vvd::read(vvd.as_ref())?,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct MeshLoader<'bsp,'vpk,'load,'a>{
|
|
||||||
finder:BspFinder<'bsp,'vpk>,
|
|
||||||
deferred_loader:&'load mut strafesnet_deferred_loader::deferred_loader::RenderConfigDeferredLoader<Cow<'a,str>>,
|
|
||||||
}
|
|
||||||
impl MeshLoader<'_,'_,'_,'_>{
|
|
||||||
#[inline]
|
|
||||||
pub const fn new<'bsp,'vpk,'load,'a>(
|
|
||||||
finder:BspFinder<'bsp,'vpk>,
|
|
||||||
deferred_loader:&'load mut strafesnet_deferred_loader::deferred_loader::RenderConfigDeferredLoader<Cow<'a,str>>,
|
|
||||||
)->MeshLoader<'bsp,'vpk,'load,'a>{
|
|
||||||
MeshLoader{
|
|
||||||
finder,
|
|
||||||
deferred_loader
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl<'bsp,'vpk,'load,'a> Loader for MeshLoader<'bsp,'vpk,'load,'a>
|
|
||||||
where
|
|
||||||
'bsp:'a,
|
|
||||||
'vpk:'a,
|
|
||||||
{
|
|
||||||
type Error=MeshError;
|
|
||||||
type Index=&'a str;
|
|
||||||
type Resource=Mesh;
|
|
||||||
fn load(&mut self,index:Self::Index)->Result<Self::Resource,Self::Error>{
|
|
||||||
let model=ModelLoader::new(self.finder).load(index)?;
|
|
||||||
let mesh=crate::mesh::convert_mesh(model,&mut self.deferred_loader);
|
|
||||||
Ok(mesh)
|
Ok(mesh)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -5,74 +5,85 @@ use strafesnet_deferred_loader::deferred_loader::RenderConfigDeferredLoader;
|
|||||||
|
|
||||||
use crate::valve_transform;
|
use crate::valve_transform;
|
||||||
|
|
||||||
fn ingest_vertex(mb:&mut model::MeshBuilder,vertex:&vmdl::vvd::Vertex,color:model::ColorId)->model::VertexId{
|
pub struct ModelData{
|
||||||
let pos=mb.acquire_pos_id(valve_transform(vertex.position.into()));
|
pub mdl:Vec<u8>,
|
||||||
let normal=mb.acquire_normal_id(valve_transform(vertex.normal.into()));
|
pub vtx:Vec<u8>,
|
||||||
let tex=mb.acquire_tex_id(glam::Vec2::from_array(vertex.texture_coordinates));
|
pub vvd:Vec<u8>,
|
||||||
mb.acquire_vertex_id(model::IndexedVertex{
|
|
||||||
pos,
|
|
||||||
tex,
|
|
||||||
normal,
|
|
||||||
color,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
impl ModelData{
|
||||||
pub fn convert_mesh(model:vmdl::Model,deferred_loader:&mut RenderConfigDeferredLoader<Cow<str>>)->model::Mesh{
|
fn read_model(&self)->Result<vmdl::Model,vmdl::ModelError>{
|
||||||
let texture_paths=model.texture_directories();
|
Ok(vmdl::Model::from_parts(
|
||||||
if texture_paths.len()!=1{
|
vmdl::mdl::Mdl::read(self.mdl.as_ref())?,
|
||||||
println!("WARNING: multiple texture paths");
|
vmdl::vtx::Vtx::read(self.vtx.as_ref())?,
|
||||||
}
|
vmdl::vvd::Vvd::read(self.vvd.as_ref())?,
|
||||||
let skin=model.skin_tables().nth(0).unwrap();
|
|
||||||
|
|
||||||
let mut mb=model::MeshBuilder::new();
|
|
||||||
|
|
||||||
let color=mb.acquire_color_id(glam::Vec4::ONE);
|
|
||||||
|
|
||||||
let model_vertices=model.vertices();
|
|
||||||
|
|
||||||
let mut graphics_groups=Vec::new();
|
|
||||||
let mut physics_groups=Vec::new();
|
|
||||||
let polygon_groups=model.meshes().enumerate().map(|(polygon_group_id,mesh)|{
|
|
||||||
let polygon_group_id=model::PolygonGroupId::new(polygon_group_id as u32);
|
|
||||||
|
|
||||||
let render_id=if let (Some(texture_path),Some(texture_name))=(texture_paths.get(0),skin.texture(mesh.material_index())){
|
|
||||||
let mut path=std::path::PathBuf::from(texture_path.as_str());
|
|
||||||
path.push(texture_name);
|
|
||||||
let index=path.as_os_str().to_str().map(|s|Cow::Owned(s.to_owned()));
|
|
||||||
deferred_loader.acquire_render_config_id(index)
|
|
||||||
}else{
|
|
||||||
deferred_loader.acquire_render_config_id(None)
|
|
||||||
};
|
|
||||||
|
|
||||||
graphics_groups.push(model::IndexedGraphicsGroup{
|
|
||||||
render:render_id,
|
|
||||||
groups:vec![polygon_group_id],
|
|
||||||
});
|
|
||||||
physics_groups.push(model::IndexedPhysicsGroup{
|
|
||||||
groups:vec![polygon_group_id],
|
|
||||||
});
|
|
||||||
model::PolygonGroup::PolygonList(model::PolygonList::new(
|
|
||||||
//looking at the code, it would seem that the strips are pre-deindexed into triangle lists when calling this function
|
|
||||||
mesh.vertex_strip_indices().flat_map(|mut strip|{
|
|
||||||
std::iter::from_fn(move ||{
|
|
||||||
match (strip.next(),strip.next(),strip.next()){
|
|
||||||
(Some(v1),Some(v2),Some(v3))=>Some([v1,v2,v3]),
|
|
||||||
//ignore extra vertices, not sure what to do in this case, failing the whole conversion could be appropriate
|
|
||||||
_=>None,
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}).flat_map(|[v1,v2,v3]|{
|
|
||||||
// this should probably be a fatal error :D
|
|
||||||
let v1=model_vertices.get(v1)?;
|
|
||||||
let v2=model_vertices.get(v2)?;
|
|
||||||
let v3=model_vertices.get(v3)?;
|
|
||||||
Some(vec![
|
|
||||||
ingest_vertex(&mut mb,v1,color),
|
|
||||||
ingest_vertex(&mut mb,v2,color),
|
|
||||||
ingest_vertex(&mut mb,v3,color),
|
|
||||||
])
|
|
||||||
}).collect()
|
|
||||||
))
|
))
|
||||||
}).collect();
|
}
|
||||||
mb.build(polygon_groups,graphics_groups,physics_groups)
|
pub fn convert_mesh<'a>(self,deferred_loader:&mut RenderConfigDeferredLoader<Cow<'a,str>>)->Result<model::Mesh,vmdl::ModelError>{
|
||||||
|
let model=self.read_model()?;
|
||||||
|
let texture_paths=model.texture_directories();
|
||||||
|
if texture_paths.len()!=1{
|
||||||
|
println!("WARNING: multiple texture paths");
|
||||||
|
}
|
||||||
|
let skin=model.skin_tables().nth(0).unwrap();
|
||||||
|
|
||||||
|
let mut spam_pos=Vec::with_capacity(model.vertices().len());
|
||||||
|
let mut spam_normal=Vec::with_capacity(model.vertices().len());
|
||||||
|
let mut spam_tex=Vec::with_capacity(model.vertices().len());
|
||||||
|
let mut spam_vertices=Vec::with_capacity(model.vertices().len());
|
||||||
|
for (i,vertex) in model.vertices().iter().enumerate(){
|
||||||
|
spam_pos.push(valve_transform(vertex.position.into()));
|
||||||
|
spam_normal.push(valve_transform(vertex.normal.into()));
|
||||||
|
spam_tex.push(glam::Vec2::from_array(vertex.texture_coordinates));
|
||||||
|
spam_vertices.push(model::IndexedVertex{
|
||||||
|
pos:model::PositionId::new(i as u32),
|
||||||
|
tex:model::TextureCoordinateId::new(i as u32),
|
||||||
|
normal:model::NormalId::new(i as u32),
|
||||||
|
color:model::ColorId::new(0),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
let mut graphics_groups=Vec::new();
|
||||||
|
let mut physics_groups=Vec::new();
|
||||||
|
let polygon_groups=model.meshes().enumerate().map(|(polygon_group_id,mesh)|{
|
||||||
|
let polygon_group_id=model::PolygonGroupId::new(polygon_group_id as u32);
|
||||||
|
|
||||||
|
let render_id=if let (Some(texture_path),Some(texture_name))=(texture_paths.get(0),skin.texture(mesh.material_index())){
|
||||||
|
let mut path=std::path::PathBuf::from(texture_path.as_str());
|
||||||
|
path.push(texture_name);
|
||||||
|
let index=path.as_os_str().to_str().map(|s|Cow::Owned(s.to_owned()));
|
||||||
|
deferred_loader.acquire_render_config_id(index)
|
||||||
|
}else{
|
||||||
|
deferred_loader.acquire_render_config_id(None)
|
||||||
|
};
|
||||||
|
|
||||||
|
graphics_groups.push(model::IndexedGraphicsGroup{
|
||||||
|
render:render_id,
|
||||||
|
groups:vec![polygon_group_id],
|
||||||
|
});
|
||||||
|
physics_groups.push(model::IndexedPhysicsGroup{
|
||||||
|
groups:vec![polygon_group_id],
|
||||||
|
});
|
||||||
|
model::PolygonGroup::PolygonList(model::PolygonList::new(
|
||||||
|
//looking at the code, it would seem that the strips are pre-deindexed into triangle lists when calling this function
|
||||||
|
mesh.vertex_strip_indices().flat_map(|mut strip|
|
||||||
|
std::iter::from_fn(move||{
|
||||||
|
match (strip.next(),strip.next(),strip.next()){
|
||||||
|
(Some(v1),Some(v2),Some(v3))=>Some([v1,v2,v3].map(|vertex_id|model::VertexId::new(vertex_id as u32)).to_vec()),
|
||||||
|
//ignore extra vertices, not sure what to do in this case, failing the whole conversion could be appropriate
|
||||||
|
_=>None,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
).collect()
|
||||||
|
))
|
||||||
|
}).collect();
|
||||||
|
Ok(model::Mesh{
|
||||||
|
unique_pos:spam_pos,
|
||||||
|
unique_normal:spam_normal,
|
||||||
|
unique_tex:spam_tex,
|
||||||
|
unique_color:vec![glam::Vec4::ONE],
|
||||||
|
unique_vertices:spam_vertices,
|
||||||
|
polygon_groups,
|
||||||
|
graphics_groups,
|
||||||
|
physics_groups,
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "strafesnet_common"
|
name = "strafesnet_common"
|
||||||
version = "0.6.0"
|
version = "0.5.2"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
repository = "https://git.itzana.me/StrafesNET/strafe-project"
|
repository = "https://git.itzana.me/StrafesNET/strafe-project"
|
||||||
license = "MIT OR Apache-2.0"
|
license = "MIT OR Apache-2.0"
|
||||||
@ -12,8 +12,8 @@ authors = ["Rhys Lloyd <krakow20@gmail.com>"]
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
arrayvec = "0.7.4"
|
arrayvec = "0.7.4"
|
||||||
bitflags = "2.6.0"
|
bitflags = "2.6.0"
|
||||||
fixed_wide = { version = "0.1.2", path = "../fixed_wide", registry = "strafesnet", features = ["deferred-division","zeroes","wide-mul"] }
|
fixed_wide = { path = "../fixed_wide", registry = "strafesnet", features = ["deferred-division","zeroes","wide-mul"] }
|
||||||
linear_ops = { version = "0.1.0", path = "../linear_ops", registry = "strafesnet", features = ["deferred-division","named-fields"] }
|
linear_ops = { path = "../linear_ops", registry = "strafesnet", features = ["deferred-division","named-fields"] }
|
||||||
ratio_ops = { version = "0.1.0", path = "../ratio_ops", registry = "strafesnet" }
|
ratio_ops = { path = "../ratio_ops", registry = "strafesnet" }
|
||||||
glam = "0.29.0"
|
glam = "0.29.0"
|
||||||
id = { version = "0.1.0", registry = "strafesnet" }
|
id = { version = "0.1.0", registry = "strafesnet" }
|
||||||
|
@ -10,27 +10,35 @@ use crate::aabb::Aabb;
|
|||||||
//sort the centerpoints on each axis (3 lists)
|
//sort the centerpoints on each axis (3 lists)
|
||||||
//bv is put into octant based on whether it is upper or lower in each list
|
//bv is put into octant based on whether it is upper or lower in each list
|
||||||
|
|
||||||
pub enum RecursiveContent<N,L>{
|
pub enum RecursiveContent<R,T>{
|
||||||
Branch(Vec<N>),
|
Branch(Vec<R>),
|
||||||
Leaf(L),
|
Leaf(T),
|
||||||
}
|
}
|
||||||
impl<N,L> RecursiveContent<N,L>{
|
impl<R,T> Default for RecursiveContent<R,T>{
|
||||||
pub fn empty()->Self{
|
fn default()->Self{
|
||||||
Self::Branch(Vec::new())
|
Self::Branch(Vec::new())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pub struct BvhNode<L>{
|
pub struct BvhNode<T>{
|
||||||
content:RecursiveContent<BvhNode<L>,L>,
|
content:RecursiveContent<BvhNode<T>,T>,
|
||||||
aabb:Aabb,
|
aabb:Aabb,
|
||||||
}
|
}
|
||||||
impl<L> BvhNode<L>{
|
impl<T> Default for BvhNode<T>{
|
||||||
pub fn empty()->Self{
|
fn default()->Self{
|
||||||
Self{
|
Self{
|
||||||
content:RecursiveContent::empty(),
|
content:Default::default(),
|
||||||
aabb:Aabb::default(),
|
aabb:Aabb::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pub fn sample_aabb<F:FnMut(&L)>(&self,aabb:&Aabb,f:&mut F){
|
}
|
||||||
|
pub struct BvhWeightNode<W,T>{
|
||||||
|
content:RecursiveContent<BvhWeightNode<W,T>,T>,
|
||||||
|
weight:W,
|
||||||
|
aabb:Aabb,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> BvhNode<T>{
|
||||||
|
pub fn the_tester<F:FnMut(&T)>(&self,aabb:&Aabb,f:&mut F){
|
||||||
match &self.content{
|
match &self.content{
|
||||||
RecursiveContent::Leaf(model)=>f(model),
|
RecursiveContent::Leaf(model)=>f(model),
|
||||||
RecursiveContent::Branch(children)=>for child in children{
|
RecursiveContent::Branch(children)=>for child in children{
|
||||||
@ -39,15 +47,51 @@ impl<L> BvhNode<L>{
|
|||||||
//you're probably not going to spend a lot of time outside the map,
|
//you're probably not going to spend a lot of time outside the map,
|
||||||
//so the test is extra work for nothing
|
//so the test is extra work for nothing
|
||||||
if aabb.intersects(&child.aabb){
|
if aabb.intersects(&child.aabb){
|
||||||
child.sample_aabb(aabb,f);
|
child.the_tester(aabb,f);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pub fn into_inner(self)->(RecursiveContent<BvhNode<L>,L>,Aabb){
|
pub fn into_visitor<F:FnMut(T)>(self,f:&mut F){
|
||||||
(self.content,self.aabb)
|
match self.content{
|
||||||
|
RecursiveContent::Leaf(model)=>f(model),
|
||||||
|
RecursiveContent::Branch(children)=>for child in children{
|
||||||
|
child.into_visitor(f)
|
||||||
|
},
|
||||||
|
}
|
||||||
}
|
}
|
||||||
pub fn into_visitor<F:FnMut(L)>(self,f:&mut F){
|
pub fn weigh_contents<W:Copy+std::iter::Sum<W>,F:Fn(&T)->W>(self,f:&F)->BvhWeightNode<W,T>{
|
||||||
|
match self.content{
|
||||||
|
RecursiveContent::Leaf(model)=>BvhWeightNode{
|
||||||
|
weight:f(&model),
|
||||||
|
content:RecursiveContent::Leaf(model),
|
||||||
|
aabb:self.aabb,
|
||||||
|
},
|
||||||
|
RecursiveContent::Branch(children)=>{
|
||||||
|
let branch:Vec<BvhWeightNode<W,T>>=children.into_iter().map(|child|
|
||||||
|
child.weigh_contents(f)
|
||||||
|
).collect();
|
||||||
|
BvhWeightNode{
|
||||||
|
weight:branch.iter().map(|node|node.weight).sum(),
|
||||||
|
content:RecursiveContent::Branch(branch),
|
||||||
|
aabb:self.aabb,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl <W,T> BvhWeightNode<W,T>{
|
||||||
|
pub const fn weight(&self)->&W{
|
||||||
|
&self.weight
|
||||||
|
}
|
||||||
|
pub const fn aabb(&self)->&Aabb{
|
||||||
|
&self.aabb
|
||||||
|
}
|
||||||
|
pub fn into_content(self)->RecursiveContent<BvhWeightNode<W,T>,T>{
|
||||||
|
self.content
|
||||||
|
}
|
||||||
|
pub fn into_visitor<F:FnMut(T)>(self,f:&mut F){
|
||||||
match self.content{
|
match self.content{
|
||||||
RecursiveContent::Leaf(model)=>f(model),
|
RecursiveContent::Leaf(model)=>f(model),
|
||||||
RecursiveContent::Branch(children)=>for child in children{
|
RecursiveContent::Branch(children)=>for child in children{
|
||||||
@ -86,9 +130,9 @@ fn generate_bvh_node<T>(boxen:Vec<(T,Aabb)>,force:bool)->BvhNode<T>{
|
|||||||
sort_y.push((i,center.y));
|
sort_y.push((i,center.y));
|
||||||
sort_z.push((i,center.z));
|
sort_z.push((i,center.z));
|
||||||
}
|
}
|
||||||
sort_x.sort_by_key(|&(_,c)|c);
|
sort_x.sort_by(|tup0,tup1|tup0.1.cmp(&tup1.1));
|
||||||
sort_y.sort_by_key(|&(_,c)|c);
|
sort_y.sort_by(|tup0,tup1|tup0.1.cmp(&tup1.1));
|
||||||
sort_z.sort_by_key(|&(_,c)|c);
|
sort_z.sort_by(|tup0,tup1|tup0.1.cmp(&tup1.1));
|
||||||
let h=n/2;
|
let h=n/2;
|
||||||
let median_x=sort_x[h].1;
|
let median_x=sort_x[h].1;
|
||||||
let median_y=sort_y[h].1;
|
let median_y=sort_y[h].1;
|
||||||
|
@ -171,7 +171,4 @@ impl CollisionAttributes{
|
|||||||
pub fn contact_default()->Self{
|
pub fn contact_default()->Self{
|
||||||
Self::Contact(ContactAttributes::default())
|
Self::Contact(ContactAttributes::default())
|
||||||
}
|
}
|
||||||
pub fn intersect_default()->Self{
|
|
||||||
Self::Intersect(IntersectAttributes::default())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
@ -1,11 +1,13 @@
|
|||||||
|
use crate::integer::Time;
|
||||||
|
|
||||||
#[derive(Clone,Debug)]
|
#[derive(Clone,Debug)]
|
||||||
pub struct TimedInstruction<I,T>{
|
pub struct TimedInstruction<I,T>{
|
||||||
pub time:T,
|
pub time:Time<T>,
|
||||||
pub instruction:I,
|
pub instruction:I,
|
||||||
}
|
}
|
||||||
impl<I,T> TimedInstruction<I,T>{
|
impl<I,T> TimedInstruction<I,T>{
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn set_time<T2>(self,new_time:T2)->TimedInstruction<I,T2>{
|
pub fn set_time<TimeInner>(self,new_time:Time<TimeInner>)->TimedInstruction<I,TimeInner>{
|
||||||
TimedInstruction{
|
TimedInstruction{
|
||||||
time:new_time,
|
time:new_time,
|
||||||
instruction:self.instruction,
|
instruction:self.instruction,
|
||||||
@ -15,21 +17,21 @@ impl<I,T> TimedInstruction<I,T>{
|
|||||||
|
|
||||||
/// Ensure all emitted instructions are processed before consuming external instructions
|
/// Ensure all emitted instructions are processed before consuming external instructions
|
||||||
pub trait InstructionEmitter<I>{
|
pub trait InstructionEmitter<I>{
|
||||||
type Time;
|
type TimeInner;
|
||||||
fn next_instruction(&self,time_limit:Self::Time)->Option<TimedInstruction<I,Self::Time>>;
|
fn next_instruction(&self,time_limit:Time<Self::TimeInner>)->Option<TimedInstruction<I,Self::TimeInner>>;
|
||||||
}
|
}
|
||||||
/// Apply an atomic state update
|
/// Apply an atomic state update
|
||||||
pub trait InstructionConsumer<I>{
|
pub trait InstructionConsumer<I>{
|
||||||
type Time;
|
type TimeInner;
|
||||||
fn process_instruction(&mut self,instruction:TimedInstruction<I,Self::Time>);
|
fn process_instruction(&mut self,instruction:TimedInstruction<I,Self::TimeInner>);
|
||||||
}
|
}
|
||||||
/// If the object produces its own instructions, allow exhaustively feeding them back in
|
/// If the object produces its own instructions, allow exhaustively feeding them back in
|
||||||
pub trait InstructionFeedback<I,T>:InstructionEmitter<I,Time=T>+InstructionConsumer<I,Time=T>
|
pub trait InstructionFeedback<I,T>:InstructionEmitter<I,TimeInner=T>+InstructionConsumer<I,TimeInner=T>
|
||||||
where
|
where
|
||||||
T:Copy,
|
Time<T>:Copy,
|
||||||
{
|
{
|
||||||
#[inline]
|
#[inline]
|
||||||
fn process_exhaustive(&mut self,time_limit:T){
|
fn process_exhaustive(&mut self,time_limit:Time<T>){
|
||||||
while let Some(instruction)=self.next_instruction(time_limit){
|
while let Some(instruction)=self.next_instruction(time_limit){
|
||||||
self.process_instruction(instruction);
|
self.process_instruction(instruction);
|
||||||
}
|
}
|
||||||
@ -37,24 +39,39 @@ pub trait InstructionFeedback<I,T>:InstructionEmitter<I,Time=T>+InstructionConsu
|
|||||||
}
|
}
|
||||||
impl<I,T,X> InstructionFeedback<I,T> for X
|
impl<I,T,X> InstructionFeedback<I,T> for X
|
||||||
where
|
where
|
||||||
T:Copy,
|
Time<T>:Copy,
|
||||||
X:InstructionEmitter<I,Time=T>+InstructionConsumer<I,Time=T>,
|
X:InstructionEmitter<I,TimeInner=T>+InstructionConsumer<I,TimeInner=T>,
|
||||||
{}
|
{}
|
||||||
|
|
||||||
//PROPER PRIVATE FIELDS!!!
|
//PROPER PRIVATE FIELDS!!!
|
||||||
pub struct InstructionCollector<I,T>{
|
pub struct InstructionCollector<I,T>{
|
||||||
time:T,
|
time:Time<T>,
|
||||||
instruction:Option<I>,
|
instruction:Option<I>,
|
||||||
}
|
}
|
||||||
impl<I,T> InstructionCollector<I,T>{
|
impl<I,T> InstructionCollector<I,T>
|
||||||
|
where Time<T>:Copy+PartialOrd,
|
||||||
|
{
|
||||||
#[inline]
|
#[inline]
|
||||||
pub const fn new(time:T)->Self{
|
pub const fn new(time:Time<T>)->Self{
|
||||||
Self{
|
Self{
|
||||||
time,
|
time,
|
||||||
instruction:None
|
instruction:None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#[inline]
|
#[inline]
|
||||||
|
pub const fn time(&self)->Time<T>{
|
||||||
|
self.time
|
||||||
|
}
|
||||||
|
#[inline]
|
||||||
|
pub fn collect(&mut self,instruction:Option<TimedInstruction<I,T>>){
|
||||||
|
if let Some(ins)=instruction{
|
||||||
|
if ins.time<self.time{
|
||||||
|
self.time=ins.time;
|
||||||
|
self.instruction=Some(ins.instruction);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#[inline]
|
||||||
pub fn take(self)->Option<TimedInstruction<I,T>>{
|
pub fn take(self)->Option<TimedInstruction<I,T>>{
|
||||||
//STEAL INSTRUCTION AND DESTROY INSTRUCTIONCOLLECTOR
|
//STEAL INSTRUCTION AND DESTROY INSTRUCTIONCOLLECTOR
|
||||||
self.instruction.map(|instruction|TimedInstruction{
|
self.instruction.map(|instruction|TimedInstruction{
|
||||||
@ -63,20 +80,3 @@ impl<I,T> InstructionCollector<I,T>{
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl<I,T:Copy> InstructionCollector<I,T>{
|
|
||||||
#[inline]
|
|
||||||
pub const fn time(&self)->T{
|
|
||||||
self.time
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl<I,T:PartialOrd> InstructionCollector<I,T>{
|
|
||||||
#[inline]
|
|
||||||
pub fn collect(&mut self,instruction:Option<TimedInstruction<I,T>>){
|
|
||||||
if let Some(ins)=instruction{
|
|
||||||
if ins.time<self.time{
|
|
||||||
self.time=ins.time;
|
|
||||||
self.instruction=Some(ins.instruction);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
@ -63,12 +63,6 @@ impl<T> From<Planar64> for Time<T>{
|
|||||||
Self::raw((value*Planar64::raw(1_000_000_000)).fix_1().to_raw())
|
Self::raw((value*Planar64::raw(1_000_000_000)).fix_1().to_raw())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl<T> From<Time<T>> for Ratio<Planar64,Planar64>{
|
|
||||||
#[inline]
|
|
||||||
fn from(value:Time<T>)->Self{
|
|
||||||
value.to_ratio()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl<T,Num,Den,N1,T1> From<Ratio<Num,Den>> for Time<T>
|
impl<T,Num,Den,N1,T1> From<Ratio<Num,Den>> for Time<T>
|
||||||
where
|
where
|
||||||
Num:core::ops::Mul<Planar64,Output=N1>,
|
Num:core::ops::Mul<Planar64,Output=N1>,
|
||||||
@ -654,19 +648,11 @@ pub struct Planar64Affine3{
|
|||||||
pub translation:Planar64Vec3,
|
pub translation:Planar64Vec3,
|
||||||
}
|
}
|
||||||
impl Planar64Affine3{
|
impl Planar64Affine3{
|
||||||
pub const IDENTITY:Self=Self::new(mat3::identity(),vec3::ZERO);
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub const fn new(matrix3:Planar64Mat3,translation:Planar64Vec3)->Self{
|
pub const fn new(matrix3:Planar64Mat3,translation:Planar64Vec3)->Self{
|
||||||
Self{matrix3,translation}
|
Self{matrix3,translation}
|
||||||
}
|
}
|
||||||
#[inline]
|
#[inline]
|
||||||
pub const fn from_translation(translation:Planar64Vec3)->Self{
|
|
||||||
Self{
|
|
||||||
matrix3:mat3::identity(),
|
|
||||||
translation,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
#[inline]
|
|
||||||
pub fn transform_point3(&self,point:Planar64Vec3)->vec3::Vector3<Fixed<2,64>>{
|
pub fn transform_point3(&self,point:Planar64Vec3)->vec3::Vector3<Fixed<2,64>>{
|
||||||
self.translation.fix_2()+self.matrix3*point
|
self.translation.fix_2()+self.matrix3*point
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
use std::collections::HashMap;
|
|
||||||
|
|
||||||
use crate::integer::{Planar64Vec3,Planar64Affine3};
|
use crate::integer::{Planar64Vec3,Planar64Affine3};
|
||||||
use crate::gameplay_attributes;
|
use crate::gameplay_attributes;
|
||||||
|
|
||||||
@ -125,87 +123,6 @@ pub struct Mesh{
|
|||||||
pub physics_groups:Vec<IndexedPhysicsGroup>,
|
pub physics_groups:Vec<IndexedPhysicsGroup>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default)]
|
|
||||||
pub struct MeshBuilder{
|
|
||||||
unique_pos:Vec<Planar64Vec3>,//Unit32Vec3
|
|
||||||
unique_normal:Vec<Planar64Vec3>,//Unit32Vec3
|
|
||||||
unique_tex:Vec<TextureCoordinate>,
|
|
||||||
unique_color:Vec<Color4>,
|
|
||||||
unique_vertices:Vec<IndexedVertex>,
|
|
||||||
pos_id_from:HashMap<Planar64Vec3,PositionId>,//Unit32Vec3
|
|
||||||
normal_id_from:HashMap<Planar64Vec3,NormalId>,//Unit32Vec3
|
|
||||||
tex_id_from:HashMap<[u32;2],TextureCoordinateId>,
|
|
||||||
color_id_from:HashMap<[u32;4],ColorId>,
|
|
||||||
vertex_id_from:HashMap<IndexedVertex,VertexId>,
|
|
||||||
}
|
|
||||||
impl MeshBuilder{
|
|
||||||
pub fn new()->Self{
|
|
||||||
Self::default()
|
|
||||||
}
|
|
||||||
pub fn build(
|
|
||||||
self,
|
|
||||||
polygon_groups:Vec<PolygonGroup>,
|
|
||||||
graphics_groups:Vec<IndexedGraphicsGroup>,
|
|
||||||
physics_groups:Vec<IndexedPhysicsGroup>,
|
|
||||||
)->Mesh{
|
|
||||||
let MeshBuilder{
|
|
||||||
unique_pos,
|
|
||||||
unique_normal,
|
|
||||||
unique_tex,
|
|
||||||
unique_color,
|
|
||||||
unique_vertices,
|
|
||||||
..
|
|
||||||
}=self;
|
|
||||||
Mesh{
|
|
||||||
unique_pos,
|
|
||||||
unique_normal,
|
|
||||||
unique_tex,
|
|
||||||
unique_color,
|
|
||||||
unique_vertices,
|
|
||||||
polygon_groups,
|
|
||||||
graphics_groups,
|
|
||||||
physics_groups,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pub fn acquire_pos_id(&mut self,pos:Planar64Vec3)->PositionId{
|
|
||||||
*self.pos_id_from.entry(pos).or_insert_with(||{
|
|
||||||
let pos_id=PositionId::new(self.unique_pos.len() as u32);
|
|
||||||
self.unique_pos.push(pos);
|
|
||||||
pos_id
|
|
||||||
})
|
|
||||||
}
|
|
||||||
pub fn acquire_normal_id(&mut self,normal:Planar64Vec3)->NormalId{
|
|
||||||
*self.normal_id_from.entry(normal).or_insert_with(||{
|
|
||||||
let normal_id=NormalId::new(self.unique_normal.len() as u32);
|
|
||||||
self.unique_normal.push(normal);
|
|
||||||
normal_id
|
|
||||||
})
|
|
||||||
}
|
|
||||||
pub fn acquire_tex_id(&mut self,tex:TextureCoordinate)->TextureCoordinateId{
|
|
||||||
let h=tex.to_array().map(f32::to_bits);
|
|
||||||
*self.tex_id_from.entry(h).or_insert_with(||{
|
|
||||||
let tex_id=TextureCoordinateId::new(self.unique_tex.len() as u32);
|
|
||||||
self.unique_tex.push(tex);
|
|
||||||
tex_id
|
|
||||||
})
|
|
||||||
}
|
|
||||||
pub fn acquire_color_id(&mut self,color:Color4)->ColorId{
|
|
||||||
let h=color.to_array().map(f32::to_bits);
|
|
||||||
*self.color_id_from.entry(h).or_insert_with(||{
|
|
||||||
let color_id=ColorId::new(self.unique_color.len() as u32);
|
|
||||||
self.unique_color.push(color);
|
|
||||||
color_id
|
|
||||||
})
|
|
||||||
}
|
|
||||||
pub fn acquire_vertex_id(&mut self,vertex:IndexedVertex)->VertexId{
|
|
||||||
*self.vertex_id_from.entry(vertex.clone()).or_insert_with(||{
|
|
||||||
let vertex_id=VertexId::new(self.unique_vertices.len() as u32);
|
|
||||||
self.unique_vertices.push(vertex);
|
|
||||||
vertex_id
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug,Clone,Copy,Hash,id::Id,Eq,PartialEq)]
|
#[derive(Debug,Clone,Copy,Hash,id::Id,Eq,PartialEq)]
|
||||||
pub struct ModelId(u32);
|
pub struct ModelId(u32);
|
||||||
pub struct Model{
|
pub struct Model{
|
||||||
|
@ -10,4 +10,4 @@ authors = ["Rhys Lloyd <krakow20@gmail.com>"]
|
|||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
strafesnet_common = { version = "0.6.0", path = "../common", registry = "strafesnet" }
|
strafesnet_common = { path = "../common", registry = "strafesnet" }
|
||||||
|
@ -4,7 +4,6 @@ use crate::mesh::Meshes;
|
|||||||
use crate::texture::{RenderConfigs,Texture};
|
use crate::texture::{RenderConfigs,Texture};
|
||||||
use strafesnet_common::model::{Mesh,MeshId,RenderConfig,RenderConfigId,TextureId};
|
use strafesnet_common::model::{Mesh,MeshId,RenderConfig,RenderConfigId,TextureId};
|
||||||
|
|
||||||
#[derive(Clone,Copy,Debug)]
|
|
||||||
pub enum LoadFailureMode{
|
pub enum LoadFailureMode{
|
||||||
DefaultToNone,
|
DefaultToNone,
|
||||||
Fatal,
|
Fatal,
|
||||||
@ -42,10 +41,7 @@ impl<H:core::hash::Hash+Eq> RenderConfigDeferredLoader<H>{
|
|||||||
render_id
|
render_id
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
pub fn into_indices(self)->impl Iterator<Item=H>{
|
pub fn into_render_configs<L:Loader<Index=H,Resource=Texture>>(mut self,loader:&mut L,failure_mode:LoadFailureMode)->Result<RenderConfigs,L::Error>{
|
||||||
self.render_config_id_from_asset_id.into_keys().flatten()
|
|
||||||
}
|
|
||||||
pub fn into_render_configs<L:Loader<Resource=Texture,Index=H>>(mut self,loader:&mut L,failure_mode:LoadFailureMode)->Result<RenderConfigs,L::Error>{
|
|
||||||
let mut sorted_textures=vec![None;self.texture_count as usize];
|
let mut sorted_textures=vec![None;self.texture_count as usize];
|
||||||
for (index_option,render_config_id) in self.render_config_id_from_asset_id{
|
for (index_option,render_config_id) in self.render_config_id_from_asset_id{
|
||||||
let render_config=&mut self.render_configs[render_config_id.get() as usize];
|
let render_config=&mut self.render_configs[render_config_id.get() as usize];
|
||||||
@ -57,7 +53,7 @@ impl<H:core::hash::Hash+Eq> RenderConfigDeferredLoader<H>{
|
|||||||
Ok(texture)=>Some(texture),
|
Ok(texture)=>Some(texture),
|
||||||
Err(e)=>{
|
Err(e)=>{
|
||||||
render_config.texture=None;
|
render_config.texture=None;
|
||||||
println!("Error loading texture: {e}");
|
println!("Error loading resource: {e}");
|
||||||
None
|
None
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -90,10 +86,7 @@ impl<H:core::hash::Hash+Eq> MeshDeferredLoader<H>{
|
|||||||
let mesh_id=MeshId::new(self.mesh_id_from_asset_id.len() as u32);
|
let mesh_id=MeshId::new(self.mesh_id_from_asset_id.len() as u32);
|
||||||
*self.mesh_id_from_asset_id.entry(index).or_insert(mesh_id)
|
*self.mesh_id_from_asset_id.entry(index).or_insert(mesh_id)
|
||||||
}
|
}
|
||||||
pub fn into_indices(self)->impl Iterator<Item=H>{
|
pub fn into_meshes<L:Loader<Index=H,Resource=Mesh>>(self,loader:&mut L,failure_mode:LoadFailureMode)->Result<Meshes,L::Error>{
|
||||||
self.mesh_id_from_asset_id.into_keys()
|
|
||||||
}
|
|
||||||
pub fn into_meshes<L:Loader<Resource=Mesh,Index=H>>(self,loader:&mut L,failure_mode:LoadFailureMode)->Result<Meshes,L::Error>{
|
|
||||||
let mut mesh_list=vec![None;self.mesh_id_from_asset_id.len()];
|
let mut mesh_list=vec![None;self.mesh_id_from_asset_id.len()];
|
||||||
for (index,mesh_id) in self.mesh_id_from_asset_id{
|
for (index,mesh_id) in self.mesh_id_from_asset_id{
|
||||||
let resource_result=loader.load(index);
|
let resource_result=loader.load(index);
|
||||||
@ -102,7 +95,7 @@ impl<H:core::hash::Hash+Eq> MeshDeferredLoader<H>{
|
|||||||
LoadFailureMode::DefaultToNone=>match resource_result{
|
LoadFailureMode::DefaultToNone=>match resource_result{
|
||||||
Ok(mesh)=>Some(mesh),
|
Ok(mesh)=>Some(mesh),
|
||||||
Err(e)=>{
|
Err(e)=>{
|
||||||
println!("Error loading mesh: {e}");
|
println!("Error loading resource: {e}");
|
||||||
None
|
None
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "fixed_wide"
|
name = "fixed_wide"
|
||||||
version = "0.1.2"
|
version = "0.1.1"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
repository = "https://git.itzana.me/StrafesNET/strafe-project"
|
repository = "https://git.itzana.me/StrafesNET/strafe-project"
|
||||||
license = "MIT OR Apache-2.0"
|
license = "MIT OR Apache-2.0"
|
||||||
@ -17,4 +17,4 @@ zeroes=["dep:arrayvec"]
|
|||||||
bnum = "0.12.0"
|
bnum = "0.12.0"
|
||||||
arrayvec = { version = "0.7.6", optional = true }
|
arrayvec = { version = "0.7.6", optional = true }
|
||||||
paste = "1.0.15"
|
paste = "1.0.15"
|
||||||
ratio_ops = { version = "0.1.0", path = "../ratio_ops", registry = "strafesnet", optional = true }
|
ratio_ops = { path = "../ratio_ops", registry = "strafesnet", optional = true }
|
||||||
|
@ -14,8 +14,8 @@ fixed-wide=["dep:fixed_wide","dep:paste"]
|
|||||||
deferred-division=["dep:ratio_ops"]
|
deferred-division=["dep:ratio_ops"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
ratio_ops = { version = "0.1.0", path = "../ratio_ops", registry = "strafesnet", optional = true }
|
ratio_ops = { path = "../ratio_ops", registry = "strafesnet", optional = true }
|
||||||
fixed_wide = { version = "0.1.2", path = "../fixed_wide", registry = "strafesnet", optional = true }
|
fixed_wide = { path = "../fixed_wide", registry = "strafesnet", optional = true }
|
||||||
paste = { version = "1.0.15", optional = true }
|
paste = { version = "1.0.15", optional = true }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
use crate::vector::Vector;
|
use crate::vector::Vector;
|
||||||
|
|
||||||
#[repr(transparent)]
|
|
||||||
#[derive(Clone,Copy,Debug,Hash,Eq,PartialEq)]
|
#[derive(Clone,Copy,Debug,Hash,Eq,PartialEq)]
|
||||||
pub struct Matrix<const X:usize,const Y:usize,T>{
|
pub struct Matrix<const X:usize,const Y:usize,T>{
|
||||||
pub(crate) array:[[T;Y];X],
|
pub(crate) array:[[T;Y];X],
|
||||||
|
@ -3,7 +3,6 @@
|
|||||||
/// v.x += v.z;
|
/// v.x += v.z;
|
||||||
/// println!("v.x={}",v.x);
|
/// println!("v.x={}",v.x);
|
||||||
|
|
||||||
#[repr(transparent)]
|
|
||||||
#[derive(Clone,Copy,Debug,Hash,Eq,PartialEq)]
|
#[derive(Clone,Copy,Debug,Hash,Eq,PartialEq)]
|
||||||
pub struct Vector<const N:usize,T>{
|
pub struct Vector<const N:usize,T>{
|
||||||
pub(crate) array:[T;N],
|
pub(crate) array:[T;N],
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "strafesnet_rbx_loader"
|
name = "strafesnet_rbx_loader"
|
||||||
version = "0.6.0"
|
version = "0.5.2"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
repository = "https://git.itzana.me/StrafesNET/strafe-project"
|
repository = "https://git.itzana.me/StrafesNET/strafe-project"
|
||||||
license = "MIT OR Apache-2.0"
|
license = "MIT OR Apache-2.0"
|
||||||
@ -15,10 +15,10 @@ glam = "0.29.0"
|
|||||||
lazy-regex = "3.1.0"
|
lazy-regex = "3.1.0"
|
||||||
rbx_binary = { version = "0.7.4", registry = "strafesnet" }
|
rbx_binary = { version = "0.7.4", registry = "strafesnet" }
|
||||||
rbx_dom_weak = { version = "2.7.0", registry = "strafesnet" }
|
rbx_dom_weak = { version = "2.7.0", registry = "strafesnet" }
|
||||||
rbx_mesh = "0.3.1"
|
rbx_mesh = "0.2.0"
|
||||||
rbx_reflection_database = { version = "0.2.10", registry = "strafesnet" }
|
rbx_reflection_database = { version = "0.2.10", registry = "strafesnet" }
|
||||||
rbx_xml = { version = "0.13.3", registry = "strafesnet" }
|
rbx_xml = { version = "0.13.3", registry = "strafesnet" }
|
||||||
rbxassetid = { version = "0.1.0", path = "../rbxassetid", registry = "strafesnet" }
|
rbxassetid = { version = "0.1.0", path = "../rbxassetid" }
|
||||||
roblox_emulator = { version = "0.4.7", path = "../roblox_emulator", registry = "strafesnet" }
|
roblox_emulator = { path = "../roblox_emulator", registry = "strafesnet" }
|
||||||
strafesnet_common = { version = "0.6.0", path = "../common", registry = "strafesnet" }
|
strafesnet_common = { path = "../common", registry = "strafesnet" }
|
||||||
strafesnet_deferred_loader = { version = "0.5.0", path = "../deferred_loader", registry = "strafesnet" }
|
strafesnet_deferred_loader = { version = "0.5.0", path = "../deferred_loader" }
|
||||||
|
6
lib/rbx_loader/src/directories.rs
Normal file
6
lib/rbx_loader/src/directories.rs
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
// TODO: make a directories structure like strafe client
|
||||||
|
struct Directories{
|
||||||
|
textures:PathBuf,
|
||||||
|
meshes:PathBuf,
|
||||||
|
unions:PathBuf,
|
||||||
|
}
|
@ -1,6 +1,5 @@
|
|||||||
use std::io::Read;
|
use std::io::Read;
|
||||||
use rbx_dom_weak::WeakDom;
|
use rbx_dom_weak::WeakDom;
|
||||||
use strafesnet_deferred_loader::deferred_loader::{LoadFailureMode,MeshDeferredLoader,RenderConfigDeferredLoader};
|
|
||||||
|
|
||||||
mod rbx;
|
mod rbx;
|
||||||
mod mesh;
|
mod mesh;
|
||||||
@ -33,9 +32,6 @@ impl Model{
|
|||||||
let services=context.convert_into_place();
|
let services=context.convert_into_place();
|
||||||
Place{dom,services}
|
Place{dom,services}
|
||||||
}
|
}
|
||||||
pub fn to_snf(&self,failure_mode:LoadFailureMode)->Result<strafesnet_common::map::CompleteMap,LoadError>{
|
|
||||||
to_snf(self,failure_mode)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
impl AsRef<WeakDom> for Model{
|
impl AsRef<WeakDom> for Model{
|
||||||
fn as_ref(&self)->&WeakDom{
|
fn as_ref(&self)->&WeakDom{
|
||||||
@ -67,9 +63,6 @@ impl Place{
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pub fn to_snf(&self,failure_mode:LoadFailureMode)->Result<strafesnet_common::map::CompleteMap,LoadError>{
|
|
||||||
to_snf(self,failure_mode)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
impl AsRef<WeakDom> for Place{
|
impl AsRef<WeakDom> for Place{
|
||||||
fn as_ref(&self)->&WeakDom{
|
fn as_ref(&self)->&WeakDom{
|
||||||
@ -101,49 +94,6 @@ pub fn read<R:Read>(input:R)->Result<Model,ReadError>{
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
//ConvertError
|
||||||
pub enum LoadError{
|
|
||||||
Texture(loader::TextureError),
|
|
||||||
Mesh(loader::MeshError),
|
|
||||||
}
|
|
||||||
impl std::fmt::Display for LoadError{
|
|
||||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
|
||||||
write!(f,"{self:?}")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl std::error::Error for LoadError{}
|
|
||||||
impl From<loader::TextureError> for LoadError{
|
|
||||||
fn from(value:loader::TextureError)->Self{
|
|
||||||
Self::Texture(value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl From<loader::MeshError> for LoadError{
|
|
||||||
fn from(value:loader::MeshError)->Self{
|
|
||||||
Self::Mesh(value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn to_snf(dom:impl AsRef<WeakDom>,failure_mode:LoadFailureMode)->Result<strafesnet_common::map::CompleteMap,LoadError>{
|
pub use rbx::convert;
|
||||||
let dom=dom.as_ref();
|
|
||||||
|
|
||||||
let mut texture_deferred_loader=RenderConfigDeferredLoader::new();
|
|
||||||
let mut mesh_deferred_loader=MeshDeferredLoader::new();
|
|
||||||
|
|
||||||
let map_step1=rbx::convert(
|
|
||||||
dom,
|
|
||||||
&mut texture_deferred_loader,
|
|
||||||
&mut mesh_deferred_loader,
|
|
||||||
);
|
|
||||||
|
|
||||||
let mut mesh_loader=loader::MeshLoader::new();
|
|
||||||
let meshpart_meshes=mesh_deferred_loader.into_meshes(&mut mesh_loader,failure_mode).map_err(LoadError::Mesh)?;
|
|
||||||
|
|
||||||
let map_step2=map_step1.add_meshpart_meshes_and_calculate_attributes(meshpart_meshes);
|
|
||||||
|
|
||||||
let mut texture_loader=loader::TextureLoader::new();
|
|
||||||
let render_configs=texture_deferred_loader.into_render_configs(&mut texture_loader,failure_mode).map_err(LoadError::Texture)?;
|
|
||||||
|
|
||||||
let map=map_step2.add_render_configs_and_textures(render_configs);
|
|
||||||
|
|
||||||
Ok(map)
|
|
||||||
}
|
|
||||||
|
@ -4,7 +4,6 @@ use strafesnet_common::model::Mesh;
|
|||||||
use strafesnet_deferred_loader::{loader::Loader,texture::Texture};
|
use strafesnet_deferred_loader::{loader::Loader,texture::Texture};
|
||||||
|
|
||||||
use crate::data::RobloxMeshBytes;
|
use crate::data::RobloxMeshBytes;
|
||||||
use crate::rbx::RobloxFaceTextureDescription;
|
|
||||||
|
|
||||||
fn read_entire_file(path:impl AsRef<std::path::Path>)->Result<Vec<u8>,std::io::Error>{
|
fn read_entire_file(path:impl AsRef<std::path::Path>)->Result<Vec<u8>,std::io::Error>{
|
||||||
let mut file=std::fs::File::open(path)?;
|
let mut file=std::fs::File::open(path)?;
|
||||||
@ -103,8 +102,6 @@ pub enum MeshType<'a>{
|
|||||||
Union{
|
Union{
|
||||||
mesh_data:&'a [u8],
|
mesh_data:&'a [u8],
|
||||||
physics_data:&'a [u8],
|
physics_data:&'a [u8],
|
||||||
size_float_bits:[u32;3],
|
|
||||||
part_texture_description:[Option<RobloxFaceTextureDescription>;6],
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
#[derive(Hash,Eq,PartialEq)]
|
#[derive(Hash,Eq,PartialEq)]
|
||||||
@ -123,16 +120,9 @@ impl MeshIndex<'_>{
|
|||||||
content:&'a str,
|
content:&'a str,
|
||||||
mesh_data:&'a [u8],
|
mesh_data:&'a [u8],
|
||||||
physics_data:&'a [u8],
|
physics_data:&'a [u8],
|
||||||
size:&rbx_dom_weak::types::Vector3,
|
|
||||||
part_texture_description:crate::rbx::RobloxPartDescription,
|
|
||||||
)->MeshIndex<'a>{
|
)->MeshIndex<'a>{
|
||||||
MeshIndex{
|
MeshIndex{
|
||||||
mesh_type:MeshType::Union{
|
mesh_type:MeshType::Union{mesh_data,physics_data},
|
||||||
mesh_data,
|
|
||||||
physics_data,
|
|
||||||
size_float_bits:[size.x.to_bits(),size.y.to_bits(),size.z.to_bits()],
|
|
||||||
part_texture_description,
|
|
||||||
},
|
|
||||||
content,
|
content,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -156,9 +146,8 @@ impl<'a> Loader for MeshLoader<'a>{
|
|||||||
let data=read_entire_file(file_name)?;
|
let data=read_entire_file(file_name)?;
|
||||||
crate::mesh::convert(RobloxMeshBytes::new(data))?
|
crate::mesh::convert(RobloxMeshBytes::new(data))?
|
||||||
},
|
},
|
||||||
MeshType::Union{mut physics_data,mut mesh_data,size_float_bits,part_texture_description}=>{
|
MeshType::Union{mut physics_data,mut mesh_data}=>{
|
||||||
// decode asset
|
// decode asset
|
||||||
let size=glam::Vec3::from_array(size_float_bits.map(f32::from_bits));
|
|
||||||
if !index.content.is_empty()&&(physics_data.is_empty()||mesh_data.is_empty()){
|
if !index.content.is_empty()&&(physics_data.is_empty()||mesh_data.is_empty()){
|
||||||
let RobloxAssetId(asset_id)=index.content.parse()?;
|
let RobloxAssetId(asset_id)=index.content.parse()?;
|
||||||
let file_name=format!("unions/{}",asset_id);
|
let file_name=format!("unions/{}",asset_id);
|
||||||
@ -180,9 +169,9 @@ impl<'a> Loader for MeshLoader<'a>{
|
|||||||
mesh_data=data.as_ref();
|
mesh_data=data.as_ref();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
crate::union::convert(physics_data,mesh_data,size,part_texture_description)?
|
crate::union::convert(physics_data,mesh_data)?
|
||||||
}else{
|
}else{
|
||||||
crate::union::convert(physics_data,mesh_data,size,part_texture_description)?
|
crate::union::convert(physics_data,mesh_data)?
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
@ -42,6 +42,50 @@ const CUBE_DEFAULT_NORMALS:[Planar64Vec3;6]=[
|
|||||||
vec3::int( 0,-1, 0),//CubeFace::Bottom
|
vec3::int( 0,-1, 0),//CubeFace::Bottom
|
||||||
vec3::int( 0, 0,-1),//CubeFace::Front
|
vec3::int( 0, 0,-1),//CubeFace::Front
|
||||||
];
|
];
|
||||||
|
const CUBE_DEFAULT_POLYS:[[[u32;3];4];6]=[
|
||||||
|
// right (1, 0, 0)
|
||||||
|
[
|
||||||
|
[6,2,0],//[vertex,tex,norm]
|
||||||
|
[5,1,0],
|
||||||
|
[2,0,0],
|
||||||
|
[1,3,0],
|
||||||
|
],
|
||||||
|
// top (0, 1, 0)
|
||||||
|
[
|
||||||
|
[5,3,1],
|
||||||
|
[4,2,1],
|
||||||
|
[3,1,1],
|
||||||
|
[2,0,1],
|
||||||
|
],
|
||||||
|
// back (0, 0, 1)
|
||||||
|
[
|
||||||
|
[0,3,2],
|
||||||
|
[1,2,2],
|
||||||
|
[2,1,2],
|
||||||
|
[3,0,2],
|
||||||
|
],
|
||||||
|
// left (-1, 0, 0)
|
||||||
|
[
|
||||||
|
[0,2,3],
|
||||||
|
[3,1,3],
|
||||||
|
[4,0,3],
|
||||||
|
[7,3,3],
|
||||||
|
],
|
||||||
|
// bottom (0,-1, 0)
|
||||||
|
[
|
||||||
|
[1,1,4],
|
||||||
|
[0,0,4],
|
||||||
|
[7,3,4],
|
||||||
|
[6,2,4],
|
||||||
|
],
|
||||||
|
// front (0, 0,-1)
|
||||||
|
[
|
||||||
|
[4,1,5],
|
||||||
|
[5,0,5],
|
||||||
|
[6,3,5],
|
||||||
|
[7,2,5],
|
||||||
|
],
|
||||||
|
];
|
||||||
|
|
||||||
#[derive(Hash,PartialEq,Eq)]
|
#[derive(Hash,PartialEq,Eq)]
|
||||||
pub enum WedgeFace{
|
pub enum WedgeFace{
|
||||||
@ -88,8 +132,8 @@ impl CubeFaceDescription{
|
|||||||
pub fn insert(&mut self,index:CubeFace,value:FaceDescription){
|
pub fn insert(&mut self,index:CubeFace,value:FaceDescription){
|
||||||
self.0[index as usize]=Some(value);
|
self.0[index as usize]=Some(value);
|
||||||
}
|
}
|
||||||
pub fn pairs(self)->impl Iterator<Item=(usize,FaceDescription)>{
|
pub fn pairs(self)->std::iter::FilterMap<std::iter::Enumerate<std::array::IntoIter<Option<FaceDescription>,6>>,impl FnMut((usize,Option<FaceDescription>))->Option<(usize,FaceDescription)>>{
|
||||||
self.0.into_iter().enumerate().filter_map(|(i,v)|v.map(|u|(i,u)))
|
self.0.into_iter().enumerate().filter_map(|v|v.1.map(|u|(v.0,u)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pub fn unit_cube(render:RenderConfigId)->Mesh{
|
pub fn unit_cube(render:RenderConfigId)->Mesh{
|
||||||
@ -157,50 +201,6 @@ impl FaceDescription{
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
pub fn generate_partial_unit_cube(face_descriptions:CubeFaceDescription)->Mesh{
|
pub fn generate_partial_unit_cube(face_descriptions:CubeFaceDescription)->Mesh{
|
||||||
const CUBE_DEFAULT_POLYS:[[[u32;3];4];6]=[
|
|
||||||
// right (1, 0, 0)
|
|
||||||
[
|
|
||||||
[6,2,0],//[vertex,tex,norm]
|
|
||||||
[5,1,0],
|
|
||||||
[2,0,0],
|
|
||||||
[1,3,0],
|
|
||||||
],
|
|
||||||
// top (0, 1, 0)
|
|
||||||
[
|
|
||||||
[5,3,1],
|
|
||||||
[4,2,1],
|
|
||||||
[3,1,1],
|
|
||||||
[2,0,1],
|
|
||||||
],
|
|
||||||
// back (0, 0, 1)
|
|
||||||
[
|
|
||||||
[0,3,2],
|
|
||||||
[1,2,2],
|
|
||||||
[2,1,2],
|
|
||||||
[3,0,2],
|
|
||||||
],
|
|
||||||
// left (-1, 0, 0)
|
|
||||||
[
|
|
||||||
[0,2,3],
|
|
||||||
[3,1,3],
|
|
||||||
[4,0,3],
|
|
||||||
[7,3,3],
|
|
||||||
],
|
|
||||||
// bottom (0,-1, 0)
|
|
||||||
[
|
|
||||||
[1,1,4],
|
|
||||||
[0,0,4],
|
|
||||||
[7,3,4],
|
|
||||||
[6,2,4],
|
|
||||||
],
|
|
||||||
// front (0, 0,-1)
|
|
||||||
[
|
|
||||||
[4,1,5],
|
|
||||||
[5,0,5],
|
|
||||||
[6,3,5],
|
|
||||||
[7,2,5],
|
|
||||||
],
|
|
||||||
];
|
|
||||||
let mut generated_pos=Vec::new();
|
let mut generated_pos=Vec::new();
|
||||||
let mut generated_tex=Vec::new();
|
let mut generated_tex=Vec::new();
|
||||||
let mut generated_normal=Vec::new();
|
let mut generated_normal=Vec::new();
|
||||||
@ -279,35 +279,35 @@ pub fn generate_partial_unit_cube(face_descriptions:CubeFaceDescription)->Mesh{
|
|||||||
}
|
}
|
||||||
//don't think too hard about the copy paste because this is all going into the map tool eventually...
|
//don't think too hard about the copy paste because this is all going into the map tool eventually...
|
||||||
pub fn generate_partial_unit_wedge(face_descriptions:WedgeFaceDescription)->Mesh{
|
pub fn generate_partial_unit_wedge(face_descriptions:WedgeFaceDescription)->Mesh{
|
||||||
const WEDGE_DEFAULT_POLYS:[&[[u32;3]];5]=[
|
let wedge_default_polys=[
|
||||||
// right (1, 0, 0)
|
// right (1, 0, 0)
|
||||||
&[
|
vec![
|
||||||
[6,2,0],//[vertex,tex,norm]
|
[6,2,0],//[vertex,tex,norm]
|
||||||
[2,0,0],
|
[2,0,0],
|
||||||
[1,3,0],
|
[1,3,0],
|
||||||
],
|
],
|
||||||
// FrontTop (0, 1, -1)
|
// FrontTop (0, 1, -1)
|
||||||
&[
|
vec![
|
||||||
[3,1,1],
|
[3,1,1],
|
||||||
[2,0,1],
|
[2,0,1],
|
||||||
[6,3,1],
|
[6,3,1],
|
||||||
[7,2,1],
|
[7,2,1],
|
||||||
],
|
],
|
||||||
// back (0, 0, 1)
|
// back (0, 0, 1)
|
||||||
&[
|
vec![
|
||||||
[0,3,2],
|
[0,3,2],
|
||||||
[1,2,2],
|
[1,2,2],
|
||||||
[2,1,2],
|
[2,1,2],
|
||||||
[3,0,2],
|
[3,0,2],
|
||||||
],
|
],
|
||||||
// left (-1, 0, 0)
|
// left (-1, 0, 0)
|
||||||
&[
|
vec![
|
||||||
[0,2,3],
|
[0,2,3],
|
||||||
[3,1,3],
|
[3,1,3],
|
||||||
[7,3,3],
|
[7,3,3],
|
||||||
],
|
],
|
||||||
// bottom (0,-1, 0)
|
// bottom (0,-1, 0)
|
||||||
&[
|
vec![
|
||||||
[1,1,4],
|
[1,1,4],
|
||||||
[0,0,4],
|
[0,0,4],
|
||||||
[7,3,4],
|
[7,3,4],
|
||||||
@ -351,7 +351,7 @@ pub fn generate_partial_unit_wedge(face_descriptions:WedgeFaceDescription)->Mesh
|
|||||||
//push vertices as they are needed
|
//push vertices as they are needed
|
||||||
let group_id=PolygonGroupId::new(polygon_groups.len() as u32);
|
let group_id=PolygonGroupId::new(polygon_groups.len() as u32);
|
||||||
polygon_groups.push(PolygonGroup::PolygonList(PolygonList::new(vec![
|
polygon_groups.push(PolygonGroup::PolygonList(PolygonList::new(vec![
|
||||||
WEDGE_DEFAULT_POLYS[face_id].iter().map(|tup|{
|
wedge_default_polys[face_id].iter().map(|tup|{
|
||||||
let pos=CUBE_DEFAULT_VERTICES[tup[0] as usize];
|
let pos=CUBE_DEFAULT_VERTICES[tup[0] as usize];
|
||||||
let pos_index=if let Some(pos_index)=generated_pos.iter().position(|&p|p==pos){
|
let pos_index=if let Some(pos_index)=generated_pos.iter().position(|&p|p==pos){
|
||||||
pos_index
|
pos_index
|
||||||
@ -392,34 +392,34 @@ pub fn generate_partial_unit_wedge(face_descriptions:WedgeFaceDescription)->Mesh
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn generate_partial_unit_cornerwedge(face_descriptions:CornerWedgeFaceDescription)->Mesh{
|
pub fn generate_partial_unit_cornerwedge(face_descriptions:CornerWedgeFaceDescription)->Mesh{
|
||||||
const CORNERWEDGE_DEFAULT_POLYS:[&[[u32;3]];5]=[
|
let cornerwedge_default_polys=[
|
||||||
// right (1, 0, 0)
|
// right (1, 0, 0)
|
||||||
&[
|
vec![
|
||||||
[6,2,0],//[vertex,tex,norm]
|
[6,2,0],//[vertex,tex,norm]
|
||||||
[5,1,0],
|
[5,1,0],
|
||||||
[1,3,0],
|
[1,3,0],
|
||||||
],
|
],
|
||||||
// BackTop (0, 1, 1)
|
// BackTop (0, 1, 1)
|
||||||
&[
|
vec![
|
||||||
[5,3,1],
|
[5,3,1],
|
||||||
[0,1,1],
|
[0,1,1],
|
||||||
[1,0,1],
|
[1,0,1],
|
||||||
],
|
],
|
||||||
// LeftTop (-1, 1, 0)
|
// LeftTop (-1, 1, 0)
|
||||||
&[
|
vec![
|
||||||
[5,3,2],
|
[5,3,2],
|
||||||
[7,2,2],
|
[7,2,2],
|
||||||
[0,1,2],
|
[0,1,2],
|
||||||
],
|
],
|
||||||
// bottom (0,-1, 0)
|
// bottom (0,-1, 0)
|
||||||
&[
|
vec![
|
||||||
[1,1,3],
|
[1,1,3],
|
||||||
[0,0,3],
|
[0,0,3],
|
||||||
[7,3,3],
|
[7,3,3],
|
||||||
[6,2,3],
|
[6,2,3],
|
||||||
],
|
],
|
||||||
// front (0, 0,-1)
|
// front (0, 0,-1)
|
||||||
&[
|
vec![
|
||||||
[5,0,4],
|
[5,0,4],
|
||||||
[6,3,4],
|
[6,3,4],
|
||||||
[7,2,4],
|
[7,2,4],
|
||||||
@ -462,7 +462,7 @@ pub fn generate_partial_unit_cornerwedge(face_descriptions:CornerWedgeFaceDescri
|
|||||||
//push vertices as they are needed
|
//push vertices as they are needed
|
||||||
let group_id=PolygonGroupId::new(polygon_groups.len() as u32);
|
let group_id=PolygonGroupId::new(polygon_groups.len() as u32);
|
||||||
polygon_groups.push(PolygonGroup::PolygonList(PolygonList::new(vec![
|
polygon_groups.push(PolygonGroup::PolygonList(PolygonList::new(vec![
|
||||||
CORNERWEDGE_DEFAULT_POLYS[face_id].iter().map(|tup|{
|
cornerwedge_default_polys[face_id].iter().map(|tup|{
|
||||||
let pos=CUBE_DEFAULT_VERTICES[tup[0] as usize];
|
let pos=CUBE_DEFAULT_VERTICES[tup[0] as usize];
|
||||||
let pos_index=if let Some(pos_index)=generated_pos.iter().position(|&p|p==pos){
|
let pos_index=if let Some(pos_index)=generated_pos.iter().position(|&p|p==pos){
|
||||||
pos_index
|
pos_index
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use crate::loader::MeshIndex;
|
use crate::loader::MeshIndex;
|
||||||
use crate::primitives;
|
use crate::primitives;
|
||||||
use strafesnet_common::aabb::Aabb;
|
|
||||||
use strafesnet_common::map;
|
use strafesnet_common::map;
|
||||||
use strafesnet_common::model;
|
use strafesnet_common::model;
|
||||||
use strafesnet_common::gameplay_modes;
|
use strafesnet_common::gameplay_modes;
|
||||||
@ -346,103 +345,58 @@ fn get_attributes(name:&str,can_collide:bool,velocity:Planar64Vec3,model_id:mode
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone,Copy)]
|
#[derive(Clone,Copy,PartialEq)]
|
||||||
pub struct RobloxTextureTransform{
|
struct RobloxTextureTransform{
|
||||||
offset_studs_u:f32,
|
offset_u:f32,
|
||||||
offset_studs_v:f32,
|
offset_v:f32,
|
||||||
studs_per_tile_u:f32,
|
scale_u:f32,
|
||||||
studs_per_tile_v:f32,
|
scale_v:f32,
|
||||||
size_u:f32,
|
|
||||||
size_v:f32,
|
|
||||||
}
|
}
|
||||||
#[derive(Clone,Copy,Hash,Eq,PartialEq)]
|
impl std::cmp::Eq for RobloxTextureTransform{}//????
|
||||||
pub struct RobloxTextureTransformBits{
|
impl std::default::Default for RobloxTextureTransform{
|
||||||
offset_studs_u:u32,
|
fn default()->Self{
|
||||||
offset_studs_v:u32,
|
Self{offset_u:0.0,offset_v:0.0,scale_u:1.0,scale_v:1.0}
|
||||||
studs_per_tile_u:u32,
|
}
|
||||||
studs_per_tile_v:u32,
|
|
||||||
size_u:u32,
|
|
||||||
size_v:u32,
|
|
||||||
}
|
}
|
||||||
impl RobloxTextureTransform{
|
impl std::hash::Hash for RobloxTextureTransform{
|
||||||
fn identity()->Self{
|
fn hash<H:std::hash::Hasher>(&self,state:&mut H) {
|
||||||
Self{
|
self.offset_u.to_ne_bytes().hash(state);
|
||||||
offset_studs_u:0.0,
|
self.offset_v.to_ne_bytes().hash(state);
|
||||||
offset_studs_v:0.0,
|
self.scale_u.to_ne_bytes().hash(state);
|
||||||
studs_per_tile_u:1.0,
|
self.scale_v.to_ne_bytes().hash(state);
|
||||||
studs_per_tile_v:1.0,
|
|
||||||
size_u:1.0,
|
|
||||||
size_v:1.0,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pub fn to_bits(self)->RobloxTextureTransformBits{
|
|
||||||
RobloxTextureTransformBits{
|
|
||||||
offset_studs_u:self.offset_studs_u.to_bits(),
|
|
||||||
offset_studs_v:self.offset_studs_v.to_bits(),
|
|
||||||
studs_per_tile_u:self.studs_per_tile_u.to_bits(),
|
|
||||||
studs_per_tile_v:self.studs_per_tile_v.to_bits(),
|
|
||||||
size_u:self.size_u.to_bits(),
|
|
||||||
size_v:self.size_v.to_bits(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pub fn affine(&self)->glam::Affine2{
|
|
||||||
glam::Affine2::from_translation(
|
|
||||||
glam::vec2(self.offset_studs_u/self.studs_per_tile_u,self.offset_studs_v/self.studs_per_tile_v)
|
|
||||||
)
|
|
||||||
*glam::Affine2::from_scale(
|
|
||||||
glam::vec2(self.size_u/self.studs_per_tile_u,self.size_v/self.studs_per_tile_v)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
pub fn set_size(&mut self,size_u:f32,size_v:f32){
|
|
||||||
self.size_u=size_u;
|
|
||||||
self.size_v=size_v;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl core::hash::Hash for RobloxTextureTransform{
|
#[derive(Clone,PartialEq)]
|
||||||
fn hash<H:core::hash::Hasher>(&self,state:&mut H){
|
struct RobloxFaceTextureDescription{
|
||||||
self.to_bits().hash(state);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
#[derive(Clone,Copy,Hash,Eq,PartialEq)]
|
|
||||||
pub struct RobloxFaceTextureDescriptionBits{
|
|
||||||
render:RenderConfigId,
|
render:RenderConfigId,
|
||||||
color:[u32;4],
|
color:glam::Vec4,
|
||||||
transform:RobloxTextureTransformBits,
|
transform:RobloxTextureTransform,
|
||||||
}
|
}
|
||||||
#[derive(Clone,Copy)]
|
impl std::cmp::Eq for RobloxFaceTextureDescription{}//????
|
||||||
pub struct RobloxFaceTextureDescription{
|
impl std::hash::Hash for RobloxFaceTextureDescription{
|
||||||
pub render:RenderConfigId,
|
fn hash<H:std::hash::Hasher>(&self,state:&mut H){
|
||||||
pub color:glam::Vec4,
|
self.render.hash(state);
|
||||||
pub transform:RobloxTextureTransform,
|
self.transform.hash(state);
|
||||||
}
|
for &el in self.color.as_ref().iter(){
|
||||||
impl core::cmp::PartialEq for RobloxFaceTextureDescription{
|
el.to_ne_bytes().hash(state);
|
||||||
fn eq(&self,other:&Self)->bool{
|
}
|
||||||
self.to_bits().eq(&other.to_bits())
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
impl core::cmp::Eq for RobloxFaceTextureDescription{}
|
|
||||||
impl core::hash::Hash for RobloxFaceTextureDescription{
|
|
||||||
fn hash<H:core::hash::Hasher>(&self,state:&mut H){
|
|
||||||
self.to_bits().hash(state);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
impl RobloxFaceTextureDescription{
|
impl RobloxFaceTextureDescription{
|
||||||
pub fn to_bits(self)->RobloxFaceTextureDescriptionBits{
|
fn to_face_description(&self)->primitives::FaceDescription{
|
||||||
RobloxFaceTextureDescriptionBits{
|
|
||||||
render:self.render,
|
|
||||||
color:self.color.to_array().map(f32::to_bits),
|
|
||||||
transform:self.transform.to_bits(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pub fn to_face_description(&self)->primitives::FaceDescription{
|
|
||||||
primitives::FaceDescription{
|
primitives::FaceDescription{
|
||||||
render:self.render,
|
render:self.render,
|
||||||
transform:self.transform.affine(),
|
transform:glam::Affine2::from_translation(
|
||||||
|
glam::vec2(self.transform.offset_u,self.transform.offset_v)
|
||||||
|
)
|
||||||
|
*glam::Affine2::from_scale(
|
||||||
|
glam::vec2(self.transform.scale_u,self.transform.scale_v)
|
||||||
|
),
|
||||||
color:self.color,
|
color:self.color,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pub type RobloxPartDescription=[Option<RobloxFaceTextureDescription>;6];
|
type RobloxPartDescription=[Option<RobloxFaceTextureDescription>;6];
|
||||||
type RobloxWedgeDescription=[Option<RobloxFaceTextureDescription>;5];
|
type RobloxWedgeDescription=[Option<RobloxFaceTextureDescription>;5];
|
||||||
type RobloxCornerWedgeDescription=[Option<RobloxFaceTextureDescription>;5];
|
type RobloxCornerWedgeDescription=[Option<RobloxFaceTextureDescription>;5];
|
||||||
#[derive(Clone,Eq,Hash,PartialEq)]
|
#[derive(Clone,Eq,Hash,PartialEq)]
|
||||||
@ -483,10 +437,10 @@ fn get_texture_description<'a>(
|
|||||||
let (roblox_texture_color,roblox_texture_transform)=if decal.class=="Texture"{
|
let (roblox_texture_color,roblox_texture_transform)=if decal.class=="Texture"{
|
||||||
//generate tranform
|
//generate tranform
|
||||||
if let (
|
if let (
|
||||||
Some(&rbx_dom_weak::types::Variant::Float32(offset_studs_u)),
|
Some(rbx_dom_weak::types::Variant::Float32(ox)),
|
||||||
Some(&rbx_dom_weak::types::Variant::Float32(offset_studs_v)),
|
Some(rbx_dom_weak::types::Variant::Float32(oy)),
|
||||||
Some(&rbx_dom_weak::types::Variant::Float32(studs_per_tile_u)),
|
Some(rbx_dom_weak::types::Variant::Float32(sx)),
|
||||||
Some(&rbx_dom_weak::types::Variant::Float32(studs_per_tile_v)),
|
Some(rbx_dom_weak::types::Variant::Float32(sy)),
|
||||||
) = (
|
) = (
|
||||||
decal.properties.get("OffsetStudsU"),
|
decal.properties.get("OffsetStudsU"),
|
||||||
decal.properties.get("OffsetStudsV"),
|
decal.properties.get("OffsetStudsV"),
|
||||||
@ -506,19 +460,15 @@ fn get_texture_description<'a>(
|
|||||||
(
|
(
|
||||||
glam::vec4(decal_color3.r,decal_color3.g,decal_color3.b,1.0-*decal_transparency),
|
glam::vec4(decal_color3.r,decal_color3.g,decal_color3.b,1.0-*decal_transparency),
|
||||||
RobloxTextureTransform{
|
RobloxTextureTransform{
|
||||||
offset_studs_u,
|
offset_u:*ox/(*sx),offset_v:*oy/(*sy),
|
||||||
offset_studs_v,
|
scale_u:size_u/(*sx),scale_v:size_v/(*sy),
|
||||||
studs_per_tile_u,
|
|
||||||
studs_per_tile_v,
|
|
||||||
size_u,
|
|
||||||
size_v,
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
}else{
|
}else{
|
||||||
(glam::Vec4::ONE,RobloxTextureTransform::identity())
|
(glam::Vec4::ONE,RobloxTextureTransform::default())
|
||||||
}
|
}
|
||||||
}else{
|
}else{
|
||||||
(glam::Vec4::ONE,RobloxTextureTransform::identity())
|
(glam::Vec4::ONE,RobloxTextureTransform::default())
|
||||||
};
|
};
|
||||||
part_texture_description[normal_id as usize]=Some(RobloxFaceTextureDescription{
|
part_texture_description[normal_id as usize]=Some(RobloxFaceTextureDescription{
|
||||||
render:render_id,
|
render:render_id,
|
||||||
@ -603,7 +553,7 @@ pub fn convert<'a>(
|
|||||||
object.properties.get("CanCollide"),
|
object.properties.get("CanCollide"),
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
let model_transform=planar64_affine3_from_roblox(cf,size);
|
let mut model_transform=planar64_affine3_from_roblox(cf,size);
|
||||||
|
|
||||||
if model_transform.matrix3.det().is_zero(){
|
if model_transform.matrix3.det().is_zero(){
|
||||||
let mut parent_ref=object.parent();
|
let mut parent_ref=object.parent();
|
||||||
@ -662,7 +612,7 @@ pub fn convert<'a>(
|
|||||||
//use front face texture first and use top face texture as a fallback
|
//use front face texture first and use top face texture as a fallback
|
||||||
primitives::Primitives::Wedge=>RobloxBasePartDescription::Wedge([
|
primitives::Primitives::Wedge=>RobloxBasePartDescription::Wedge([
|
||||||
f0,//Cube::Right->Wedge::Right
|
f0,//Cube::Right->Wedge::Right
|
||||||
f5.or(f1),//Cube::Front|Cube::Top->Wedge::TopFront
|
if f5.is_some(){f5}else{f1},//Cube::Front|Cube::Top->Wedge::TopFront
|
||||||
f2,//Cube::Back->Wedge::Back
|
f2,//Cube::Back->Wedge::Back
|
||||||
f3,//Cube::Left->Wedge::Left
|
f3,//Cube::Left->Wedge::Left
|
||||||
f4,//Cube::Bottom->Wedge::Bottom
|
f4,//Cube::Bottom->Wedge::Bottom
|
||||||
@ -670,8 +620,8 @@ pub fn convert<'a>(
|
|||||||
//TODO: fix Left+Back texture coordinates to match roblox when not overwridden by Top
|
//TODO: fix Left+Back texture coordinates to match roblox when not overwridden by Top
|
||||||
primitives::Primitives::CornerWedge=>RobloxBasePartDescription::CornerWedge([
|
primitives::Primitives::CornerWedge=>RobloxBasePartDescription::CornerWedge([
|
||||||
f0,//Cube::Right->CornerWedge::Right
|
f0,//Cube::Right->CornerWedge::Right
|
||||||
f2.or(f1.clone()),//Cube::Back|Cube::Top->CornerWedge::TopBack
|
if f2.is_some(){f2}else{f1.clone()},//Cube::Back|Cube::Top->CornerWedge::TopBack
|
||||||
f3.or(f1),//Cube::Left|Cube::Top->CornerWedge::TopLeft
|
if f3.is_some(){f3}else{f1},//Cube::Left|Cube::Top->CornerWedge::TopLeft
|
||||||
f4,//Cube::Bottom->CornerWedge::Bottom
|
f4,//Cube::Bottom->CornerWedge::Bottom
|
||||||
f5,//Cube::Front->CornerWedge::Front
|
f5,//Cube::Front->CornerWedge::Front
|
||||||
]),
|
]),
|
||||||
@ -765,6 +715,9 @@ pub fn convert<'a>(
|
|||||||
panic!("Mesh has no Mesh or Texture");
|
panic!("Mesh has no Mesh or Texture");
|
||||||
},
|
},
|
||||||
Shape::PhysicsData=>{
|
Shape::PhysicsData=>{
|
||||||
|
//The union mesh is sized already
|
||||||
|
model_transform=planar64_affine3_from_roblox(cf,&rbx_dom_weak::types::Vector3{x:2.0,y:2.0,z:2.0});
|
||||||
|
|
||||||
let mut content="";
|
let mut content="";
|
||||||
let mut mesh_data:&[u8]=&[];
|
let mut mesh_data:&[u8]=&[];
|
||||||
let mut physics_data:&[u8]=&[];
|
let mut physics_data:&[u8]=&[];
|
||||||
@ -778,7 +731,7 @@ pub fn convert<'a>(
|
|||||||
physics_data=data.as_ref();
|
physics_data=data.as_ref();
|
||||||
}
|
}
|
||||||
let part_texture_description=get_texture_description(&mut temp_objects,render_config_deferred_loader,dom,object,size);
|
let part_texture_description=get_texture_description(&mut temp_objects,render_config_deferred_loader,dom,object,size);
|
||||||
let mesh_index=MeshIndex::union(content,mesh_data,physics_data,size,part_texture_description.clone());
|
let mesh_index=MeshIndex::union(content,mesh_data,physics_data);
|
||||||
let mesh_id=mesh_deferred_loader.acquire_mesh_id(mesh_index);
|
let mesh_id=mesh_deferred_loader.acquire_mesh_id(mesh_index);
|
||||||
(MeshAvailability::DeferredUnion(part_texture_description),mesh_id)
|
(MeshAvailability::DeferredUnion(part_texture_description),mesh_id)
|
||||||
},
|
},
|
||||||
@ -816,55 +769,7 @@ pub fn convert<'a>(
|
|||||||
}
|
}
|
||||||
struct MeshWithAabb{
|
struct MeshWithAabb{
|
||||||
mesh:model::Mesh,
|
mesh:model::Mesh,
|
||||||
aabb:Aabb,
|
aabb:strafesnet_common::aabb::Aabb,
|
||||||
}
|
|
||||||
fn acquire_mesh_id_from_render_config_id<'a>(
|
|
||||||
primitive_meshes:&mut Vec<model::Mesh>,
|
|
||||||
mesh_id_from_render_config_id:&mut HashMap<model::MeshId,HashMap<RenderConfigId,model::MeshId>>,
|
|
||||||
loaded_meshes:&'a HashMap<model::MeshId,MeshWithAabb>,
|
|
||||||
old_mesh_id:model::MeshId,
|
|
||||||
render:RenderConfigId,
|
|
||||||
)->Option<(model::MeshId,&'a Aabb)>{
|
|
||||||
//ignore meshes that fail to load completely for now
|
|
||||||
loaded_meshes.get(&old_mesh_id).map(|mesh_with_aabb|(
|
|
||||||
*mesh_id_from_render_config_id.entry(old_mesh_id).or_insert_with(||HashMap::new())
|
|
||||||
.entry(render).or_insert_with(||{
|
|
||||||
let mesh_id=model::MeshId::new(primitive_meshes.len() as u32);
|
|
||||||
let mut mesh_clone=mesh_with_aabb.mesh.clone();
|
|
||||||
//set the render group lool
|
|
||||||
if let Some(graphics_group)=mesh_clone.graphics_groups.first_mut(){
|
|
||||||
graphics_group.render=render;
|
|
||||||
}
|
|
||||||
primitive_meshes.push(mesh_clone);
|
|
||||||
mesh_id
|
|
||||||
}),
|
|
||||||
&mesh_with_aabb.aabb,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
fn acquire_union_id_from_render_config_id<'a>(
|
|
||||||
primitive_meshes:&mut Vec<model::Mesh>,
|
|
||||||
union_id_from_render_config_id:&mut HashMap<model::MeshId,HashMap<RobloxPartDescription,model::MeshId>>,
|
|
||||||
loaded_meshes:&'a HashMap<model::MeshId,MeshWithAabb>,
|
|
||||||
old_union_id:model::MeshId,
|
|
||||||
part_texture_description:RobloxPartDescription,
|
|
||||||
)->Option<(model::MeshId,&'a Aabb)>{
|
|
||||||
//ignore uniones that fail to load completely for now
|
|
||||||
loaded_meshes.get(&old_union_id).map(|union_with_aabb|(
|
|
||||||
*union_id_from_render_config_id.entry(old_union_id).or_insert_with(||HashMap::new())
|
|
||||||
.entry(part_texture_description.clone()).or_insert_with(||{
|
|
||||||
let union_id=model::MeshId::new(primitive_meshes.len() as u32);
|
|
||||||
let mut union_clone=union_with_aabb.mesh.clone();
|
|
||||||
//set the render groups
|
|
||||||
for (graphics_group,maybe_face_texture_description) in union_clone.graphics_groups.iter_mut().zip(part_texture_description){
|
|
||||||
if let Some(face_texture_description)=maybe_face_texture_description{
|
|
||||||
graphics_group.render=face_texture_description.render;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
primitive_meshes.push(union_clone);
|
|
||||||
union_id
|
|
||||||
}),
|
|
||||||
&union_with_aabb.aabb,
|
|
||||||
))
|
|
||||||
}
|
}
|
||||||
pub struct PartialMap1<'a>{
|
pub struct PartialMap1<'a>{
|
||||||
primitive_meshes:Vec<model::Mesh>,
|
primitive_meshes:Vec<model::Mesh>,
|
||||||
@ -899,21 +804,30 @@ impl PartialMap1<'_>{
|
|||||||
})
|
})
|
||||||
}).collect();
|
}).collect();
|
||||||
|
|
||||||
// SAFETY: I have no idea what I'm doing and this is definitely unsound in some subtle way
|
|
||||||
// I just want to chain iterators together man
|
|
||||||
let aint_no_way=core::cell::UnsafeCell::new(&mut self.primitive_meshes);
|
|
||||||
|
|
||||||
let mut mesh_id_from_render_config_id=HashMap::new();
|
let mut mesh_id_from_render_config_id=HashMap::new();
|
||||||
let mut union_id_from_render_config_id=HashMap::new();
|
//ignore meshes that fail to load completely for now
|
||||||
|
let mut acquire_mesh_id_from_render_config_id=|old_mesh_id,render|{
|
||||||
|
loaded_meshes.get(&old_mesh_id).map(|mesh_with_aabb|(
|
||||||
|
*mesh_id_from_render_config_id.entry(old_mesh_id).or_insert_with(||HashMap::new())
|
||||||
|
.entry(render).or_insert_with(||{
|
||||||
|
let mesh_id=model::MeshId::new(self.primitive_meshes.len() as u32);
|
||||||
|
let mut mesh_clone=mesh_with_aabb.mesh.clone();
|
||||||
|
//set the render group lool
|
||||||
|
if let Some(graphics_group)=mesh_clone.graphics_groups.first_mut(){
|
||||||
|
graphics_group.render=render;
|
||||||
|
}
|
||||||
|
self.primitive_meshes.push(mesh_clone);
|
||||||
|
mesh_id
|
||||||
|
}),
|
||||||
|
&mesh_with_aabb.aabb,
|
||||||
|
))
|
||||||
|
};
|
||||||
//now that the meshes are loaded, these models can be generated
|
//now that the meshes are loaded, these models can be generated
|
||||||
let models_owned_attributes:Vec<ModelOwnedAttributes>=
|
let models_owned_attributes:Vec<ModelOwnedAttributes>=
|
||||||
self.deferred_models_deferred_attributes.into_iter().flat_map(|deferred_model_deferred_attributes|{
|
self.deferred_models_deferred_attributes.into_iter().flat_map(|deferred_model_deferred_attributes|{
|
||||||
//meshes need to be cloned from loaded_meshes with a new id when they are used with a new render_id
|
//meshes need to be cloned from loaded_meshes with a new id when they are used with a new render_id
|
||||||
//insert into primitive_meshes
|
//insert into primitive_meshes
|
||||||
let (mesh,aabb)=acquire_mesh_id_from_render_config_id(
|
let (mesh,aabb)=acquire_mesh_id_from_render_config_id(
|
||||||
unsafe{*aint_no_way.get()},
|
|
||||||
&mut mesh_id_from_render_config_id,
|
|
||||||
&loaded_meshes,
|
|
||||||
deferred_model_deferred_attributes.model.mesh,
|
deferred_model_deferred_attributes.model.mesh,
|
||||||
deferred_model_deferred_attributes.render
|
deferred_model_deferred_attributes.render
|
||||||
)?;
|
)?;
|
||||||
@ -931,32 +845,7 @@ impl PartialMap1<'_>{
|
|||||||
deferred_model_deferred_attributes.model.transform.translation
|
deferred_model_deferred_attributes.model.transform.translation
|
||||||
),
|
),
|
||||||
})
|
})
|
||||||
}).chain(self.deferred_unions_deferred_attributes.into_iter().flat_map(|deferred_union_deferred_attributes|{
|
}).chain(self.primitive_models_deferred_attributes.into_iter())
|
||||||
//meshes need to be cloned from loaded_meshes with a new id when they are used with a new render_id
|
|
||||||
//insert into primitive_meshes
|
|
||||||
let (mesh,aabb)=acquire_union_id_from_render_config_id(
|
|
||||||
unsafe{*aint_no_way.get()},
|
|
||||||
&mut union_id_from_render_config_id,
|
|
||||||
&loaded_meshes,
|
|
||||||
deferred_union_deferred_attributes.model.mesh,
|
|
||||||
deferred_union_deferred_attributes.render
|
|
||||||
)?;
|
|
||||||
let size=aabb.size();
|
|
||||||
Some(ModelDeferredAttributes{
|
|
||||||
mesh,
|
|
||||||
deferred_attributes:deferred_union_deferred_attributes.model.deferred_attributes,
|
|
||||||
color:deferred_union_deferred_attributes.model.color,
|
|
||||||
transform:Planar64Affine3::new(
|
|
||||||
Planar64Mat3::from_cols([
|
|
||||||
(deferred_union_deferred_attributes.model.transform.matrix3.x_axis*2/size.x).divide().fix_1(),
|
|
||||||
(deferred_union_deferred_attributes.model.transform.matrix3.y_axis*2/size.y).divide().fix_1(),
|
|
||||||
(deferred_union_deferred_attributes.model.transform.matrix3.z_axis*2/size.z).divide().fix_1()
|
|
||||||
]),
|
|
||||||
deferred_union_deferred_attributes.model.transform.translation
|
|
||||||
),
|
|
||||||
})
|
|
||||||
}))
|
|
||||||
.chain(self.primitive_models_deferred_attributes.into_iter())
|
|
||||||
.enumerate().map(|(model_id,model_deferred_attributes)|{
|
.enumerate().map(|(model_id,model_deferred_attributes)|{
|
||||||
let model_id=model::ModelId::new(model_id as u32);
|
let model_id=model::ModelId::new(model_id as u32);
|
||||||
ModelOwnedAttributes{
|
ModelOwnedAttributes{
|
||||||
|
@ -1,11 +1,13 @@
|
|||||||
use rbx_mesh::mesh_data::NormalId2 as MeshDataNormalId2;
|
use std::collections::HashMap;
|
||||||
use strafesnet_common::model::{self,IndexedVertex,PolygonGroup,PolygonGroupId,PolygonList,RenderConfigId};
|
|
||||||
|
use strafesnet_common::model::{self, ColorId, IndexedVertex, NormalId, PolygonGroup, PolygonGroupId, PolygonList, PositionId, RenderConfigId, TextureCoordinateId, VertexId};
|
||||||
use strafesnet_common::integer::vec3;
|
use strafesnet_common::integer::vec3;
|
||||||
|
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum Error{
|
pub enum Error{
|
||||||
Block,
|
Block,
|
||||||
|
NotSupposedToHappen,
|
||||||
MissingVertexId(u32),
|
MissingVertexId(u32),
|
||||||
Planar64Vec3(strafesnet_common::integer::Planar64TryFromFloatError),
|
Planar64Vec3(strafesnet_common::integer::Planar64TryFromFloatError),
|
||||||
RobloxPhysicsData(rbx_mesh::physics_data::Error),
|
RobloxPhysicsData(rbx_mesh::physics_data::Error),
|
||||||
@ -17,161 +19,125 @@ impl std::fmt::Display for Error{
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// wacky state machine to make sure all vertices in a face agree upon what NormalId to use.
|
|
||||||
// Roblox duplicates this information per vertex when it should only exist per-face.
|
|
||||||
enum MeshDataNormalStatus{
|
|
||||||
Agree(MeshDataNormalId2),
|
|
||||||
Conflicting,
|
|
||||||
}
|
|
||||||
struct MeshDataNormalChecker{
|
|
||||||
status:Option<MeshDataNormalStatus>,
|
|
||||||
}
|
|
||||||
impl MeshDataNormalChecker{
|
|
||||||
fn new()->Self{
|
|
||||||
Self{status:None}
|
|
||||||
}
|
|
||||||
fn check(&mut self,normal:MeshDataNormalId2){
|
|
||||||
self.status=match self.status.take(){
|
|
||||||
None=>Some(MeshDataNormalStatus::Agree(normal)),
|
|
||||||
Some(MeshDataNormalStatus::Agree(old_normal))=>{
|
|
||||||
if old_normal==normal{
|
|
||||||
Some(MeshDataNormalStatus::Agree(old_normal))
|
|
||||||
}else{
|
|
||||||
Some(MeshDataNormalStatus::Conflicting)
|
|
||||||
}
|
|
||||||
},
|
|
||||||
Some(MeshDataNormalStatus::Conflicting)=>Some(MeshDataNormalStatus::Conflicting),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
fn into_agreed_normal(self)->Option<MeshDataNormalId2>{
|
|
||||||
self.status.and_then(|status|match status{
|
|
||||||
MeshDataNormalStatus::Agree(normal)=>Some(normal),
|
|
||||||
MeshDataNormalStatus::Conflicting=>None,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::error::Error for Error{}
|
impl std::error::Error for Error{}
|
||||||
pub fn convert(
|
pub fn convert(roblox_physics_data:&[u8],roblox_mesh_data:&[u8])->Result<model::Mesh,Error>{
|
||||||
roblox_physics_data:&[u8],
|
match (roblox_physics_data,roblox_mesh_data){
|
||||||
roblox_mesh_data:&[u8],
|
(b"",b"")=>return Err(Error::Block),
|
||||||
size:glam::Vec3,
|
(b"",_)
|
||||||
part_texture_description:crate::rbx::RobloxPartDescription,
|
|(_,b"")=>return Err(Error::NotSupposedToHappen),
|
||||||
)->Result<model::Mesh,Error>{
|
_=>(),
|
||||||
const NORMAL_FACES:usize=6;
|
}
|
||||||
let mut polygon_groups_normal_id=vec![Vec::new();NORMAL_FACES];
|
|
||||||
|
|
||||||
// build graphics and physics meshes
|
// graphical
|
||||||
let mut mb=strafesnet_common::model::MeshBuilder::new();
|
let mesh_data=rbx_mesh::read_mesh_data_versioned(
|
||||||
// graphics
|
std::io::Cursor::new(roblox_mesh_data)
|
||||||
let graphics_groups=if !roblox_mesh_data.is_empty(){
|
).map_err(Error::RobloxMeshData)?;
|
||||||
// create per-face texture coordinate affine transforms
|
let graphics_mesh=match mesh_data{
|
||||||
let cube_face_description=part_texture_description.map(|opt|opt.map(|mut t|{
|
rbx_mesh::mesh_data::CSGPHS::CSGK(_)=>return Err(Error::NotSupposedToHappen),
|
||||||
t.transform.set_size(1.0,1.0);
|
rbx_mesh::mesh_data::CSGPHS::CSGPHS2(mesh_data2)=>mesh_data2.mesh,
|
||||||
t.to_face_description()
|
rbx_mesh::mesh_data::CSGPHS::CSGPHS4(mesh_data4)=>mesh_data4.mesh,
|
||||||
}));
|
|
||||||
|
|
||||||
let mesh_data=rbx_mesh::read_mesh_data_versioned(
|
|
||||||
std::io::Cursor::new(roblox_mesh_data)
|
|
||||||
).map_err(Error::RobloxMeshData)?;
|
|
||||||
let graphics_mesh=match mesh_data{
|
|
||||||
rbx_mesh::mesh_data::MeshData::CSGK(_)=>return Err(Error::Block),
|
|
||||||
rbx_mesh::mesh_data::MeshData::CSGMDL(rbx_mesh::mesh_data::CSGMDL::CSGMDL2(mesh_data2))=>mesh_data2.mesh,
|
|
||||||
rbx_mesh::mesh_data::MeshData::CSGMDL(rbx_mesh::mesh_data::CSGMDL::CSGMDL4(mesh_data4))=>mesh_data4.mesh,
|
|
||||||
};
|
|
||||||
for [vertex_id0,vertex_id1,vertex_id2] in graphics_mesh.faces{
|
|
||||||
let face=[
|
|
||||||
graphics_mesh.vertices.get(vertex_id0.0 as usize).ok_or(Error::MissingVertexId(vertex_id0.0))?,
|
|
||||||
graphics_mesh.vertices.get(vertex_id1.0 as usize).ok_or(Error::MissingVertexId(vertex_id1.0))?,
|
|
||||||
graphics_mesh.vertices.get(vertex_id2.0 as usize).ok_or(Error::MissingVertexId(vertex_id2.0))?,
|
|
||||||
];
|
|
||||||
let mut normal_agreement_checker=MeshDataNormalChecker::new();
|
|
||||||
let face=face.into_iter().map(|vertex|{
|
|
||||||
normal_agreement_checker.check(vertex.normal_id);
|
|
||||||
let pos=mb.acquire_pos_id(vec3::try_from_f32_array(vertex.pos)?);
|
|
||||||
let normal=mb.acquire_normal_id(vec3::try_from_f32_array(vertex.norm)?);
|
|
||||||
let tex_coord=glam::Vec2::from_array(vertex.tex);
|
|
||||||
let maybe_face_description=&cube_face_description[vertex.normal_id as usize-1];
|
|
||||||
let (tex,color)=match maybe_face_description{
|
|
||||||
Some(face_description)=>{
|
|
||||||
// transform texture coordinates and set decal color
|
|
||||||
let tex=mb.acquire_tex_id(face_description.transform.transform_point2(tex_coord));
|
|
||||||
let color=mb.acquire_color_id(face_description.color);
|
|
||||||
(tex,color)
|
|
||||||
},
|
|
||||||
None=>{
|
|
||||||
// texture coordinates don't matter and pass through mesh vertex color
|
|
||||||
let tex=mb.acquire_tex_id(tex_coord);
|
|
||||||
let color=mb.acquire_color_id(glam::Vec4::from_array(vertex.color.map(|f|f as f32/255.0f32)));
|
|
||||||
(tex,color)
|
|
||||||
},
|
|
||||||
};
|
|
||||||
Ok(mb.acquire_vertex_id(IndexedVertex{pos,tex,normal,color}))
|
|
||||||
}).collect::<Result<Vec<_>,_>>().map_err(Error::Planar64Vec3)?;
|
|
||||||
if let Some(normal_id)=normal_agreement_checker.into_agreed_normal(){
|
|
||||||
polygon_groups_normal_id[normal_id as usize-1].push(face);
|
|
||||||
}else{
|
|
||||||
panic!("Empty face!");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
(0..NORMAL_FACES).map(|polygon_group_id|{
|
|
||||||
model::IndexedGraphicsGroup{
|
|
||||||
render:cube_face_description[polygon_group_id].as_ref().map_or(RenderConfigId::new(0),|face_description|face_description.render),
|
|
||||||
groups:vec![PolygonGroupId::new(polygon_group_id as u32)]
|
|
||||||
}
|
|
||||||
}).collect()
|
|
||||||
}else{
|
|
||||||
Vec::new()
|
|
||||||
};
|
};
|
||||||
|
|
||||||
//physics
|
// physical
|
||||||
let physics_convex_meshes=if !roblox_physics_data.is_empty(){
|
let physics_data=rbx_mesh::read_physics_data(
|
||||||
let physics_data=rbx_mesh::read_physics_data_versioned(
|
std::io::Cursor::new(roblox_physics_data)
|
||||||
std::io::Cursor::new(roblox_physics_data)
|
).map_err(Error::RobloxPhysicsData)?;
|
||||||
).map_err(Error::RobloxPhysicsData)?;
|
let physics_convex_meshes=match physics_data{
|
||||||
let physics_convex_meshes=match physics_data{
|
rbx_mesh::physics_data::PhysicsData::CSGK(_)
|
||||||
rbx_mesh::physics_data::PhysicsData::CSGK(_)
|
// have not seen this format in practice
|
||||||
// have not seen this format in practice
|
|rbx_mesh::physics_data::PhysicsData::CSGPHS(rbx_mesh::physics_data::CSGPHS::Block)
|
||||||
|rbx_mesh::physics_data::PhysicsData::CSGPHS(rbx_mesh::physics_data::CSGPHS::Block)
|
=>return Err(Error::NotSupposedToHappen),
|
||||||
=>return Err(Error::Block),
|
rbx_mesh::physics_data::PhysicsData::CSGPHS(rbx_mesh::physics_data::CSGPHS::Meshes(meshes))
|
||||||
rbx_mesh::physics_data::PhysicsData::CSGPHS(rbx_mesh::physics_data::CSGPHS::Meshes(meshes))
|
=>meshes.meshes,
|
||||||
=>meshes.meshes,
|
rbx_mesh::physics_data::PhysicsData::CSGPHS(rbx_mesh::physics_data::CSGPHS::PhysicsInfoMesh(pim))
|
||||||
rbx_mesh::physics_data::PhysicsData::CSGPHS(rbx_mesh::physics_data::CSGPHS::PhysicsInfoMesh(pim))
|
=>vec![pim.mesh],
|
||||||
=>vec![pim.mesh],
|
|
||||||
};
|
|
||||||
physics_convex_meshes
|
|
||||||
}else{
|
|
||||||
Vec::new()
|
|
||||||
};
|
};
|
||||||
let polygon_groups:Vec<PolygonGroup>=polygon_groups_normal_id.into_iter().map(|faces|
|
let mut unique_pos=Vec::new();
|
||||||
// graphics polygon groups (to be rendered)
|
let mut pos_id_from=HashMap::new();
|
||||||
Ok(PolygonGroup::PolygonList(PolygonList::new(faces)))
|
let mut unique_tex=Vec::new();
|
||||||
).chain(physics_convex_meshes.into_iter().map(|mesh|{
|
let mut tex_id_from=HashMap::new();
|
||||||
// this can be factored out of the loop but I am lazy
|
let mut unique_normal=Vec::new();
|
||||||
let color=mb.acquire_color_id(glam::Vec4::ONE);
|
let mut normal_id_from=HashMap::new();
|
||||||
let tex=mb.acquire_tex_id(glam::Vec2::ZERO);
|
let mut unique_color=Vec::new();
|
||||||
// physics polygon groups (to do physics)
|
let mut color_id_from=HashMap::new();
|
||||||
|
let mut unique_vertices=Vec::new();
|
||||||
|
let mut vertex_id_from=HashMap::new();
|
||||||
|
let mut acquire_pos_id=|pos|{
|
||||||
|
let p=vec3::try_from_f32_array(pos).map_err(Error::Planar64Vec3)?;
|
||||||
|
Ok(*pos_id_from.entry(p).or_insert_with(||{
|
||||||
|
let pos_id=PositionId::new(unique_pos.len() as u32);
|
||||||
|
unique_pos.push(p);
|
||||||
|
pos_id
|
||||||
|
}))
|
||||||
|
};
|
||||||
|
let mut acquire_tex_id=|tex|{
|
||||||
|
let h=bytemuck::cast::<[f32;2],[u32;2]>(tex);
|
||||||
|
*tex_id_from.entry(h).or_insert_with(||{
|
||||||
|
let tex_id=TextureCoordinateId::new(unique_tex.len() as u32);
|
||||||
|
unique_tex.push(glam::Vec2::from_array(tex));
|
||||||
|
tex_id
|
||||||
|
})
|
||||||
|
};
|
||||||
|
let mut acquire_normal_id=|normal|{
|
||||||
|
let n=vec3::try_from_f32_array(normal).map_err(Error::Planar64Vec3)?;
|
||||||
|
Ok(*normal_id_from.entry(n).or_insert_with(||{
|
||||||
|
let normal_id=NormalId::new(unique_normal.len() as u32);
|
||||||
|
unique_normal.push(n);
|
||||||
|
normal_id
|
||||||
|
}))
|
||||||
|
};
|
||||||
|
let mut acquire_color_id=|color|{
|
||||||
|
let h=bytemuck::cast::<[f32;4],[u32;4]>(color);
|
||||||
|
*color_id_from.entry(h).or_insert_with(||{
|
||||||
|
let color_id=ColorId::new(unique_color.len() as u32);
|
||||||
|
unique_color.push(glam::Vec4::from_array(color));
|
||||||
|
color_id
|
||||||
|
})
|
||||||
|
};
|
||||||
|
let mut acquire_vertex_id=|vertex:IndexedVertex|{
|
||||||
|
*vertex_id_from.entry(vertex.clone()).or_insert_with(||{
|
||||||
|
let vertex_id=VertexId::new(unique_vertices.len() as u32);
|
||||||
|
unique_vertices.push(vertex);
|
||||||
|
vertex_id
|
||||||
|
})
|
||||||
|
};
|
||||||
|
let color=acquire_color_id([1.0f32;4]);
|
||||||
|
let tex=acquire_tex_id([0.0f32;2]);
|
||||||
|
let polygon_groups:Vec<PolygonGroup>=physics_convex_meshes.into_iter().map(|mesh|{
|
||||||
Ok(PolygonGroup::PolygonList(PolygonList::new(mesh.faces.into_iter().map(|[vertex_id0,vertex_id1,vertex_id2]|{
|
Ok(PolygonGroup::PolygonList(PolygonList::new(mesh.faces.into_iter().map(|[vertex_id0,vertex_id1,vertex_id2]|{
|
||||||
let face=[
|
let v0=mesh.vertices.get(vertex_id0.0 as usize).ok_or(Error::MissingVertexId(vertex_id0.0))?;
|
||||||
mesh.vertices.get(vertex_id0.0 as usize).ok_or(Error::MissingVertexId(vertex_id0.0))?,
|
let v1=mesh.vertices.get(vertex_id1.0 as usize).ok_or(Error::MissingVertexId(vertex_id1.0))?;
|
||||||
mesh.vertices.get(vertex_id1.0 as usize).ok_or(Error::MissingVertexId(vertex_id1.0))?,
|
let v2=mesh.vertices.get(vertex_id2.0 as usize).ok_or(Error::MissingVertexId(vertex_id2.0))?;
|
||||||
mesh.vertices.get(vertex_id2.0 as usize).ok_or(Error::MissingVertexId(vertex_id2.0))?,
|
let vertex_norm=(glam::Vec3::from_slice(v1)-glam::Vec3::from_slice(v0))
|
||||||
].map(|v|glam::Vec3::from_slice(v)/size);
|
.cross(glam::Vec3::from_slice(v2)-glam::Vec3::from_slice(v0)).to_array();
|
||||||
let vertex_norm=(face[1]-face[0])
|
let mut ingest_vertex_id=|&vertex_pos:&[f32;3]|Ok(acquire_vertex_id(IndexedVertex{
|
||||||
.cross(face[2]-face[0]);
|
pos:acquire_pos_id(vertex_pos)?,
|
||||||
let normal=mb.acquire_normal_id(vec3::try_from_f32_array(vertex_norm.to_array()).map_err(Error::Planar64Vec3)?);
|
tex,
|
||||||
face.into_iter().map(|vertex_pos|{
|
normal:acquire_normal_id(vertex_norm)?,
|
||||||
let pos=mb.acquire_pos_id(vec3::try_from_f32_array(vertex_pos.to_array()).map_err(Error::Planar64Vec3)?);
|
color,
|
||||||
Ok(mb.acquire_vertex_id(IndexedVertex{pos,tex,normal,color}))
|
}));
|
||||||
}).collect()
|
Ok(vec![
|
||||||
|
ingest_vertex_id(v0)?,
|
||||||
|
ingest_vertex_id(v1)?,
|
||||||
|
ingest_vertex_id(v2)?,
|
||||||
|
])
|
||||||
}).collect::<Result<_,_>>()?)))
|
}).collect::<Result<_,_>>()?)))
|
||||||
})).collect::<Result<_,_>>()?;
|
}).collect::<Result<_,_>>()?;
|
||||||
let physics_groups=(NORMAL_FACES..polygon_groups.len()).map(|id|model::IndexedPhysicsGroup{
|
let graphics_groups=vec![model::IndexedGraphicsGroup{
|
||||||
|
render:RenderConfigId::new(0),
|
||||||
|
groups:(0..polygon_groups.len()).map(|id|PolygonGroupId::new(id as u32)).collect()
|
||||||
|
}];
|
||||||
|
let physics_groups=(0..polygon_groups.len()).map(|id|model::IndexedPhysicsGroup{
|
||||||
groups:vec![PolygonGroupId::new(id as u32)]
|
groups:vec![PolygonGroupId::new(id as u32)]
|
||||||
}).collect();
|
}).collect();
|
||||||
Ok(mb.build(
|
Ok(model::Mesh{
|
||||||
|
unique_pos,
|
||||||
|
unique_normal,
|
||||||
|
unique_tex,
|
||||||
|
unique_color,
|
||||||
|
unique_vertices,
|
||||||
polygon_groups,
|
polygon_groups,
|
||||||
graphics_groups,
|
graphics_groups,
|
||||||
physics_groups,
|
physics_groups,
|
||||||
))
|
})
|
||||||
}
|
}
|
||||||
|
@ -2,10 +2,6 @@
|
|||||||
name = "rbxassetid"
|
name = "rbxassetid"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
repository = "https://git.itzana.me/StrafesNET/strafe-project"
|
|
||||||
license = "MIT OR Apache-2.0"
|
|
||||||
description = "Parse Roblox asset id from 'Content' urls."
|
|
||||||
authors = ["Rhys Lloyd <krakow20@gmail.com>"]
|
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
url = "2.5.4"
|
url = "2.5.4"
|
||||||
|
@ -1,176 +0,0 @@
|
|||||||
Apache License
|
|
||||||
Version 2.0, January 2004
|
|
||||||
http://www.apache.org/licenses/
|
|
||||||
|
|
||||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
|
||||||
|
|
||||||
1. Definitions.
|
|
||||||
|
|
||||||
"License" shall mean the terms and conditions for use, reproduction,
|
|
||||||
and distribution as defined by Sections 1 through 9 of this document.
|
|
||||||
|
|
||||||
"Licensor" shall mean the copyright owner or entity authorized by
|
|
||||||
the copyright owner that is granting the License.
|
|
||||||
|
|
||||||
"Legal Entity" shall mean the union of the acting entity and all
|
|
||||||
other entities that control, are controlled by, or are under common
|
|
||||||
control with that entity. For the purposes of this definition,
|
|
||||||
"control" means (i) the power, direct or indirect, to cause the
|
|
||||||
direction or management of such entity, whether by contract or
|
|
||||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
|
||||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
|
||||||
|
|
||||||
"You" (or "Your") shall mean an individual or Legal Entity
|
|
||||||
exercising permissions granted by this License.
|
|
||||||
|
|
||||||
"Source" form shall mean the preferred form for making modifications,
|
|
||||||
including but not limited to software source code, documentation
|
|
||||||
source, and configuration files.
|
|
||||||
|
|
||||||
"Object" form shall mean any form resulting from mechanical
|
|
||||||
transformation or translation of a Source form, including but
|
|
||||||
not limited to compiled object code, generated documentation,
|
|
||||||
and conversions to other media types.
|
|
||||||
|
|
||||||
"Work" shall mean the work of authorship, whether in Source or
|
|
||||||
Object form, made available under the License, as indicated by a
|
|
||||||
copyright notice that is included in or attached to the work
|
|
||||||
(an example is provided in the Appendix below).
|
|
||||||
|
|
||||||
"Derivative Works" shall mean any work, whether in Source or Object
|
|
||||||
form, that is based on (or derived from) the Work and for which the
|
|
||||||
editorial revisions, annotations, elaborations, or other modifications
|
|
||||||
represent, as a whole, an original work of authorship. For the purposes
|
|
||||||
of this License, Derivative Works shall not include works that remain
|
|
||||||
separable from, or merely link (or bind by name) to the interfaces of,
|
|
||||||
the Work and Derivative Works thereof.
|
|
||||||
|
|
||||||
"Contribution" shall mean any work of authorship, including
|
|
||||||
the original version of the Work and any modifications or additions
|
|
||||||
to that Work or Derivative Works thereof, that is intentionally
|
|
||||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
|
||||||
or by an individual or Legal Entity authorized to submit on behalf of
|
|
||||||
the copyright owner. For the purposes of this definition, "submitted"
|
|
||||||
means any form of electronic, verbal, or written communication sent
|
|
||||||
to the Licensor or its representatives, including but not limited to
|
|
||||||
communication on electronic mailing lists, source code control systems,
|
|
||||||
and issue tracking systems that are managed by, or on behalf of, the
|
|
||||||
Licensor for the purpose of discussing and improving the Work, but
|
|
||||||
excluding communication that is conspicuously marked or otherwise
|
|
||||||
designated in writing by the copyright owner as "Not a Contribution."
|
|
||||||
|
|
||||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
|
||||||
on behalf of whom a Contribution has been received by Licensor and
|
|
||||||
subsequently incorporated within the Work.
|
|
||||||
|
|
||||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
||||||
copyright license to reproduce, prepare Derivative Works of,
|
|
||||||
publicly display, publicly perform, sublicense, and distribute the
|
|
||||||
Work and such Derivative Works in Source or Object form.
|
|
||||||
|
|
||||||
3. Grant of Patent License. Subject to the terms and conditions of
|
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
||||||
(except as stated in this section) patent license to make, have made,
|
|
||||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
|
||||||
where such license applies only to those patent claims licensable
|
|
||||||
by such Contributor that are necessarily infringed by their
|
|
||||||
Contribution(s) alone or by combination of their Contribution(s)
|
|
||||||
with the Work to which such Contribution(s) was submitted. If You
|
|
||||||
institute patent litigation against any entity (including a
|
|
||||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
|
||||||
or a Contribution incorporated within the Work constitutes direct
|
|
||||||
or contributory patent infringement, then any patent licenses
|
|
||||||
granted to You under this License for that Work shall terminate
|
|
||||||
as of the date such litigation is filed.
|
|
||||||
|
|
||||||
4. Redistribution. You may reproduce and distribute copies of the
|
|
||||||
Work or Derivative Works thereof in any medium, with or without
|
|
||||||
modifications, and in Source or Object form, provided that You
|
|
||||||
meet the following conditions:
|
|
||||||
|
|
||||||
(a) You must give any other recipients of the Work or
|
|
||||||
Derivative Works a copy of this License; and
|
|
||||||
|
|
||||||
(b) You must cause any modified files to carry prominent notices
|
|
||||||
stating that You changed the files; and
|
|
||||||
|
|
||||||
(c) You must retain, in the Source form of any Derivative Works
|
|
||||||
that You distribute, all copyright, patent, trademark, and
|
|
||||||
attribution notices from the Source form of the Work,
|
|
||||||
excluding those notices that do not pertain to any part of
|
|
||||||
the Derivative Works; and
|
|
||||||
|
|
||||||
(d) If the Work includes a "NOTICE" text file as part of its
|
|
||||||
distribution, then any Derivative Works that You distribute must
|
|
||||||
include a readable copy of the attribution notices contained
|
|
||||||
within such NOTICE file, excluding those notices that do not
|
|
||||||
pertain to any part of the Derivative Works, in at least one
|
|
||||||
of the following places: within a NOTICE text file distributed
|
|
||||||
as part of the Derivative Works; within the Source form or
|
|
||||||
documentation, if provided along with the Derivative Works; or,
|
|
||||||
within a display generated by the Derivative Works, if and
|
|
||||||
wherever such third-party notices normally appear. The contents
|
|
||||||
of the NOTICE file are for informational purposes only and
|
|
||||||
do not modify the License. You may add Your own attribution
|
|
||||||
notices within Derivative Works that You distribute, alongside
|
|
||||||
or as an addendum to the NOTICE text from the Work, provided
|
|
||||||
that such additional attribution notices cannot be construed
|
|
||||||
as modifying the License.
|
|
||||||
|
|
||||||
You may add Your own copyright statement to Your modifications and
|
|
||||||
may provide additional or different license terms and conditions
|
|
||||||
for use, reproduction, or distribution of Your modifications, or
|
|
||||||
for any such Derivative Works as a whole, provided Your use,
|
|
||||||
reproduction, and distribution of the Work otherwise complies with
|
|
||||||
the conditions stated in this License.
|
|
||||||
|
|
||||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
|
||||||
any Contribution intentionally submitted for inclusion in the Work
|
|
||||||
by You to the Licensor shall be under the terms and conditions of
|
|
||||||
this License, without any additional terms or conditions.
|
|
||||||
Notwithstanding the above, nothing herein shall supersede or modify
|
|
||||||
the terms of any separate license agreement you may have executed
|
|
||||||
with Licensor regarding such Contributions.
|
|
||||||
|
|
||||||
6. Trademarks. This License does not grant permission to use the trade
|
|
||||||
names, trademarks, service marks, or product names of the Licensor,
|
|
||||||
except as required for reasonable and customary use in describing the
|
|
||||||
origin of the Work and reproducing the content of the NOTICE file.
|
|
||||||
|
|
||||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
|
||||||
agreed to in writing, Licensor provides the Work (and each
|
|
||||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
||||||
implied, including, without limitation, any warranties or conditions
|
|
||||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
|
||||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
|
||||||
appropriateness of using or redistributing the Work and assume any
|
|
||||||
risks associated with Your exercise of permissions under this License.
|
|
||||||
|
|
||||||
8. Limitation of Liability. In no event and under no legal theory,
|
|
||||||
whether in tort (including negligence), contract, or otherwise,
|
|
||||||
unless required by applicable law (such as deliberate and grossly
|
|
||||||
negligent acts) or agreed to in writing, shall any Contributor be
|
|
||||||
liable to You for damages, including any direct, indirect, special,
|
|
||||||
incidental, or consequential damages of any character arising as a
|
|
||||||
result of this License or out of the use or inability to use the
|
|
||||||
Work (including but not limited to damages for loss of goodwill,
|
|
||||||
work stoppage, computer failure or malfunction, or any and all
|
|
||||||
other commercial damages or losses), even if such Contributor
|
|
||||||
has been advised of the possibility of such damages.
|
|
||||||
|
|
||||||
9. Accepting Warranty or Additional Liability. While redistributing
|
|
||||||
the Work or Derivative Works thereof, You may choose to offer,
|
|
||||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
|
||||||
or other liability obligations and/or rights consistent with this
|
|
||||||
License. However, in accepting such obligations, You may act only
|
|
||||||
on Your own behalf and on Your sole responsibility, not on behalf
|
|
||||||
of any other Contributor, and only if You agree to indemnify,
|
|
||||||
defend, and hold each Contributor harmless for any liability
|
|
||||||
incurred by, or claims asserted against, such Contributor by reason
|
|
||||||
of your accepting any such warranty or additional liability.
|
|
||||||
|
|
||||||
END OF TERMS AND CONDITIONS
|
|
@ -1,23 +0,0 @@
|
|||||||
Permission is hereby granted, free of charge, to any
|
|
||||||
person obtaining a copy of this software and associated
|
|
||||||
documentation files (the "Software"), to deal in the
|
|
||||||
Software without restriction, including without
|
|
||||||
limitation the rights to use, copy, modify, merge,
|
|
||||||
publish, distribute, sublicense, and/or sell copies of
|
|
||||||
the Software, and to permit persons to whom the Software
|
|
||||||
is furnished to do so, subject to the following
|
|
||||||
conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice
|
|
||||||
shall be included in all copies or substantial portions
|
|
||||||
of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
|
|
||||||
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
|
|
||||||
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
|
||||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
|
|
||||||
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
|
||||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
|
||||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
|
|
||||||
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
|
||||||
DEALINGS IN THE SOFTWARE.
|
|
@ -1,26 +0,0 @@
|
|||||||
Roblox Asset Id
|
|
||||||
===============
|
|
||||||
|
|
||||||
## Example
|
|
||||||
|
|
||||||
```rust
|
|
||||||
use rbxassetid::RobloxAssetId;
|
|
||||||
|
|
||||||
let content="rbxassetid://255299419";
|
|
||||||
let RobloxAssetId(asset_id)=content.parse()?;
|
|
||||||
```
|
|
||||||
|
|
||||||
#### License
|
|
||||||
|
|
||||||
<sup>
|
|
||||||
Licensed under either of <a href="LICENSE-APACHE">Apache License, Version
|
|
||||||
2.0</a> or <a href="LICENSE-MIT">MIT license</a> at your option.
|
|
||||||
</sup>
|
|
||||||
|
|
||||||
<br>
|
|
||||||
|
|
||||||
<sub>
|
|
||||||
Unless you explicitly state otherwise, any contribution intentionally submitted
|
|
||||||
for inclusion in this crate by you, as defined in the Apache-2.0 license, shall
|
|
||||||
be dual licensed as above, without any additional terms or conditions.
|
|
||||||
</sub>
|
|
@ -33,9 +33,3 @@ impl std::str::FromStr for RobloxAssetId{
|
|||||||
Ok(Self(parsed_asset_id.map_err(RobloxAssetIdParseErr::ParseInt)?))
|
Ok(Self(parsed_asset_id.map_err(RobloxAssetIdParseErr::ParseInt)?))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_rbxassetid(){
|
|
||||||
let content="rbxassetid://255299419";
|
|
||||||
let RobloxAssetId(_asset_id)=content.parse().unwrap();
|
|
||||||
}
|
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "strafesnet_snf"
|
name = "strafesnet_snf"
|
||||||
version = "0.3.0"
|
version = "0.2.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
@ -8,4 +8,4 @@ edition = "2021"
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
binrw = "0.14.0"
|
binrw = "0.14.0"
|
||||||
id = { version = "0.1.0", registry = "strafesnet" }
|
id = { version = "0.1.0", registry = "strafesnet" }
|
||||||
strafesnet_common = { version = "0.6.0", path = "../common", registry = "strafesnet" }
|
strafesnet_common = { path = "../common", registry = "strafesnet" }
|
||||||
|
@ -6,7 +6,7 @@ use strafesnet_common::physics::Time;
|
|||||||
|
|
||||||
const VERSION:u32=0;
|
const VERSION:u32=0;
|
||||||
|
|
||||||
type TimedPhysicsInstruction=strafesnet_common::instruction::TimedInstruction<strafesnet_common::physics::Instruction,strafesnet_common::physics::Time>;
|
type TimedPhysicsInstruction=strafesnet_common::instruction::TimedInstruction<strafesnet_common::physics::Instruction,strafesnet_common::physics::TimeInner>;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum Error{
|
pub enum Error{
|
||||||
|
@ -6,7 +6,7 @@ use crate::file::BlockId;
|
|||||||
use binrw::{binrw,BinReaderExt,BinWriterExt};
|
use binrw::{binrw,BinReaderExt,BinWriterExt};
|
||||||
use strafesnet_common::model;
|
use strafesnet_common::model;
|
||||||
use strafesnet_common::aabb::Aabb;
|
use strafesnet_common::aabb::Aabb;
|
||||||
use strafesnet_common::bvh::{BvhNode,RecursiveContent};
|
use strafesnet_common::bvh::BvhNode;
|
||||||
use strafesnet_common::gameplay_modes;
|
use strafesnet_common::gameplay_modes;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
@ -233,7 +233,7 @@ impl<R:BinReaderExt> StreamableMap<R>{
|
|||||||
}
|
}
|
||||||
pub fn get_intersecting_region_block_ids(&self,aabb:&Aabb)->Vec<BlockId>{
|
pub fn get_intersecting_region_block_ids(&self,aabb:&Aabb)->Vec<BlockId>{
|
||||||
let mut block_ids=Vec::new();
|
let mut block_ids=Vec::new();
|
||||||
self.bvh.sample_aabb(aabb,&mut |&block_id|block_ids.push(block_id));
|
self.bvh.the_tester(aabb,&mut |&block_id|block_ids.push(block_id));
|
||||||
block_ids
|
block_ids
|
||||||
}
|
}
|
||||||
pub fn load_region(&mut self,block_id:BlockId)->Result<Vec<(model::ModelId,model::Model)>,Error>{
|
pub fn load_region(&mut self,block_id:BlockId)->Result<Vec<(model::ModelId,model::Model)>,Error>{
|
||||||
@ -287,60 +287,12 @@ impl<R:BinReaderExt> StreamableMap<R>{
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// silly redefinition of Bvh for determining the size of subnodes
|
|
||||||
// without duplicating work by running weight calculation recursion top down on every node
|
|
||||||
pub struct BvhWeightNode<W,T>{
|
|
||||||
content:RecursiveContent<BvhWeightNode<W,T>,T>,
|
|
||||||
weight:W,
|
|
||||||
aabb:Aabb,
|
|
||||||
}
|
|
||||||
impl <W,T> BvhWeightNode<W,T>{
|
|
||||||
pub const fn weight(&self)->&W{
|
|
||||||
&self.weight
|
|
||||||
}
|
|
||||||
pub const fn aabb(&self)->&Aabb{
|
|
||||||
&self.aabb
|
|
||||||
}
|
|
||||||
pub fn into_content(self)->RecursiveContent<BvhWeightNode<W,T>,T>{
|
|
||||||
self.content
|
|
||||||
}
|
|
||||||
pub fn into_visitor<F:FnMut(T)>(self,f:&mut F){
|
|
||||||
match self.content{
|
|
||||||
RecursiveContent::Leaf(model)=>f(model),
|
|
||||||
RecursiveContent::Branch(children)=>for child in children{
|
|
||||||
child.into_visitor(f)
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn weigh_contents<T,W:Copy+std::iter::Sum<W>,F:Fn(&T)->W>(node:BvhNode<T>,f:&F)->BvhWeightNode<W,T>{
|
|
||||||
let (content,aabb)=node.into_inner();
|
|
||||||
match content{
|
|
||||||
RecursiveContent::Leaf(model)=>BvhWeightNode{
|
|
||||||
weight:f(&model),
|
|
||||||
content:RecursiveContent::Leaf(model),
|
|
||||||
aabb,
|
|
||||||
},
|
|
||||||
RecursiveContent::Branch(children)=>{
|
|
||||||
let branch:Vec<BvhWeightNode<W,T>>=children.into_iter().map(|child|
|
|
||||||
weigh_contents(child,f)
|
|
||||||
).collect();
|
|
||||||
BvhWeightNode{
|
|
||||||
weight:branch.iter().map(|node|node.weight).sum(),
|
|
||||||
content:RecursiveContent::Branch(branch),
|
|
||||||
aabb,
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const BVH_NODE_MAX_WEIGHT:usize=64*1024;//64 kB
|
const BVH_NODE_MAX_WEIGHT:usize=64*1024;//64 kB
|
||||||
fn collect_spacial_blocks(
|
fn collect_spacial_blocks(
|
||||||
block_location:&mut Vec<u64>,
|
block_location:&mut Vec<u64>,
|
||||||
block_headers:&mut Vec<SpacialBlockHeader>,
|
block_headers:&mut Vec<SpacialBlockHeader>,
|
||||||
sequential_block_data:&mut std::io::Cursor<&mut Vec<u8>>,
|
sequential_block_data:&mut std::io::Cursor<&mut Vec<u8>>,
|
||||||
bvh_node:BvhWeightNode<usize,(model::ModelId,newtypes::model::Model)>
|
bvh_node:strafesnet_common::bvh::BvhWeightNode<usize,(model::ModelId,newtypes::model::Model)>
|
||||||
)->Result<(),Error>{
|
)->Result<(),Error>{
|
||||||
//inspect the node weights top-down.
|
//inspect the node weights top-down.
|
||||||
//When a node weighs less than the limit,
|
//When a node weighs less than the limit,
|
||||||
@ -390,7 +342,7 @@ pub fn write_map<W:BinWriterExt>(mut writer:W,map:strafesnet_common::map::Comple
|
|||||||
}
|
}
|
||||||
Ok(((model::ModelId::new(model_id as u32),model.into()),aabb))
|
Ok(((model::ModelId::new(model_id as u32),model.into()),aabb))
|
||||||
}).collect::<Result<Vec<_>,_>>()?;
|
}).collect::<Result<Vec<_>,_>>()?;
|
||||||
let bvh=weigh_contents(strafesnet_common::bvh::generate_bvh(boxen),&|_|std::mem::size_of::<newtypes::model::Model>());
|
let bvh=strafesnet_common::bvh::generate_bvh(boxen).weigh_contents(&|_|std::mem::size_of::<newtypes::model::Model>());
|
||||||
//build blocks
|
//build blocks
|
||||||
//block location is initialized with two values
|
//block location is initialized with two values
|
||||||
//the first value represents the location of the first byte after the file header
|
//the first value represents the location of the first byte after the file header
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
use super::integer::Time;
|
use super::integer::Time;
|
||||||
use super::common::{bool_from_u8,bool_into_u8};
|
use super::common::{bool_from_u8,bool_into_u8};
|
||||||
|
|
||||||
type TimedPhysicsInstruction=strafesnet_common::instruction::TimedInstruction<strafesnet_common::physics::Instruction,strafesnet_common::physics::Time>;
|
type TimedPhysicsInstruction=strafesnet_common::instruction::TimedInstruction<strafesnet_common::physics::Instruction,strafesnet_common::physics::TimeInner>;
|
||||||
|
|
||||||
#[binrw::binrw]
|
#[binrw::binrw]
|
||||||
#[brw(little)]
|
#[brw(little)]
|
||||||
|
@ -1,37 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "map-tool"
|
|
||||||
version = "1.7.0"
|
|
||||||
edition = "2021"
|
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
anyhow = "1.0.75"
|
|
||||||
clap = { version = "4.4.2", features = ["derive"] }
|
|
||||||
flate2 = "1.0.27"
|
|
||||||
futures = "0.3.31"
|
|
||||||
image = "0.25.2"
|
|
||||||
image_dds = "0.7.1"
|
|
||||||
lazy-regex = "3.1.0"
|
|
||||||
rbx_asset = { version = "0.2.5", registry = "strafesnet" }
|
|
||||||
rbx_binary = { version = "0.7.4", registry = "strafesnet" }
|
|
||||||
rbx_dom_weak = { version = "2.7.0", registry = "strafesnet" }
|
|
||||||
rbx_reflection_database = { version = "0.2.10", registry = "strafesnet" }
|
|
||||||
rbx_xml = { version = "0.13.3", registry = "strafesnet" }
|
|
||||||
rbxassetid = { version = "0.1.0", registry = "strafesnet" }
|
|
||||||
strafesnet_bsp_loader = { version = "0.3.0", path = "../lib/bsp_loader", registry = "strafesnet" }
|
|
||||||
strafesnet_deferred_loader = { version = "0.5.0", path = "../lib/deferred_loader", registry = "strafesnet" }
|
|
||||||
strafesnet_rbx_loader = { version = "0.6.0", path = "../lib/rbx_loader", registry = "strafesnet" }
|
|
||||||
strafesnet_snf = { version = "0.3.0", path = "../lib/snf", registry = "strafesnet" }
|
|
||||||
thiserror = "2.0.11"
|
|
||||||
tokio = { version = "1.43.0", features = ["macros", "rt-multi-thread", "fs"] }
|
|
||||||
vbsp = { version = "0.7.0-codegen1", registry = "strafesnet" }
|
|
||||||
vmdl = "0.2.0"
|
|
||||||
vmt-parser = "0.2.0"
|
|
||||||
vpk = "0.2.0"
|
|
||||||
vtf = "0.3.0"
|
|
||||||
|
|
||||||
#[profile.release]
|
|
||||||
#lto = true
|
|
||||||
#strip = true
|
|
||||||
#codegen-units = 1
|
|
@ -1,23 +0,0 @@
|
|||||||
Permission is hereby granted, free of charge, to any
|
|
||||||
person obtaining a copy of this software and associated
|
|
||||||
documentation files (the "Software"), to deal in the
|
|
||||||
Software without restriction, including without
|
|
||||||
limitation the rights to use, copy, modify, merge,
|
|
||||||
publish, distribute, sublicense, and/or sell copies of
|
|
||||||
the Software, and to permit persons to whom the Software
|
|
||||||
is furnished to do so, subject to the following
|
|
||||||
conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice
|
|
||||||
shall be included in all copies or substantial portions
|
|
||||||
of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
|
|
||||||
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
|
|
||||||
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
|
||||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
|
|
||||||
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
|
||||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
|
||||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
|
|
||||||
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
|
||||||
DEALINGS IN THE SOFTWARE.
|
|
@ -1,2 +0,0 @@
|
|||||||
# map-tool
|
|
||||||
|
|
@ -1,30 +0,0 @@
|
|||||||
mod roblox;
|
|
||||||
mod source;
|
|
||||||
|
|
||||||
use clap::{Parser,Subcommand};
|
|
||||||
use anyhow::Result as AResult;
|
|
||||||
|
|
||||||
#[derive(Parser)]
|
|
||||||
#[command(author, version, about, long_about = None)]
|
|
||||||
#[command(propagate_version = true)]
|
|
||||||
struct Cli {
|
|
||||||
#[command(subcommand)]
|
|
||||||
command: Commands,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Subcommand)]
|
|
||||||
enum Commands{
|
|
||||||
#[command(flatten)]
|
|
||||||
Roblox(roblox::Commands),
|
|
||||||
#[command(flatten)]
|
|
||||||
Source(source::Commands),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::main]
|
|
||||||
async fn main()->AResult<()>{
|
|
||||||
let cli=Cli::parse();
|
|
||||||
match cli.command{
|
|
||||||
Commands::Roblox(commands)=>commands.run().await,
|
|
||||||
Commands::Source(commands)=>commands.run().await,
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,431 +0,0 @@
|
|||||||
use std::path::{Path,PathBuf};
|
|
||||||
use std::io::{Cursor,Read,Seek};
|
|
||||||
use std::collections::HashSet;
|
|
||||||
use clap::{Args,Subcommand};
|
|
||||||
use anyhow::Result as AResult;
|
|
||||||
use rbx_dom_weak::Instance;
|
|
||||||
use strafesnet_deferred_loader::deferred_loader::LoadFailureMode;
|
|
||||||
use rbxassetid::RobloxAssetId;
|
|
||||||
use tokio::io::AsyncReadExt;
|
|
||||||
|
|
||||||
const DOWNLOAD_LIMIT:usize=16;
|
|
||||||
|
|
||||||
#[derive(Subcommand)]
|
|
||||||
pub enum Commands{
|
|
||||||
RobloxToSNF(RobloxToSNFSubcommand),
|
|
||||||
DownloadAssets(DownloadAssetsSubcommand),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Args)]
|
|
||||||
pub struct RobloxToSNFSubcommand {
|
|
||||||
#[arg(long)]
|
|
||||||
output_folder:PathBuf,
|
|
||||||
#[arg(required=true)]
|
|
||||||
input_files:Vec<PathBuf>,
|
|
||||||
}
|
|
||||||
#[derive(Args)]
|
|
||||||
pub struct DownloadAssetsSubcommand{
|
|
||||||
#[arg(required=true)]
|
|
||||||
roblox_files:Vec<PathBuf>,
|
|
||||||
// #[arg(long)]
|
|
||||||
// cookie_file:Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Commands{
|
|
||||||
pub async fn run(self)->AResult<()>{
|
|
||||||
match self{
|
|
||||||
Commands::RobloxToSNF(subcommand)=>roblox_to_snf(subcommand.input_files,subcommand.output_folder).await,
|
|
||||||
Commands::DownloadAssets(subcommand)=>download_assets(
|
|
||||||
subcommand.roblox_files,
|
|
||||||
rbx_asset::cookie::Cookie::new("".to_string()),
|
|
||||||
).await,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(unused)]
|
|
||||||
#[derive(Debug)]
|
|
||||||
enum LoadDomError{
|
|
||||||
IO(std::io::Error),
|
|
||||||
Binary(rbx_binary::DecodeError),
|
|
||||||
Xml(rbx_xml::DecodeError),
|
|
||||||
UnknownFormat,
|
|
||||||
}
|
|
||||||
fn load_dom<R:Read+Seek>(mut input:R)->Result<rbx_dom_weak::WeakDom,LoadDomError>{
|
|
||||||
let mut first_8=[0u8;8];
|
|
||||||
input.read_exact(&mut first_8).map_err(LoadDomError::IO)?;
|
|
||||||
input.rewind().map_err(LoadDomError::IO)?;
|
|
||||||
match &first_8{
|
|
||||||
b"<roblox!"=>rbx_binary::from_reader(input).map_err(LoadDomError::Binary),
|
|
||||||
b"<roblox "=>rbx_xml::from_reader(input,rbx_xml::DecodeOptions::default()).map_err(LoadDomError::Xml),
|
|
||||||
_=>Err(LoadDomError::UnknownFormat),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/* The ones I'm interested in:
|
|
||||||
Beam.Texture
|
|
||||||
Decal.Texture
|
|
||||||
FileMesh.MeshId
|
|
||||||
FileMesh.TextureId
|
|
||||||
MaterialVariant.ColorMap
|
|
||||||
MaterialVariant.MetalnessMap
|
|
||||||
MaterialVariant.NormalMap
|
|
||||||
MaterialVariant.RoughnessMap
|
|
||||||
MeshPart.MeshId
|
|
||||||
MeshPart.TextureID
|
|
||||||
ParticleEmitter.Texture
|
|
||||||
Sky.MoonTextureId
|
|
||||||
Sky.SkyboxBk
|
|
||||||
Sky.SkyboxDn
|
|
||||||
Sky.SkyboxFt
|
|
||||||
Sky.SkyboxLf
|
|
||||||
Sky.SkyboxRt
|
|
||||||
Sky.SkyboxUp
|
|
||||||
Sky.SunTextureId
|
|
||||||
SurfaceAppearance.ColorMap
|
|
||||||
SurfaceAppearance.MetalnessMap
|
|
||||||
SurfaceAppearance.NormalMap
|
|
||||||
SurfaceAppearance.RoughnessMap
|
|
||||||
SurfaceAppearance.TexturePack
|
|
||||||
*/
|
|
||||||
fn accumulate_content_id(content_list:&mut HashSet<RobloxAssetId>,object:&Instance,property:&str){
|
|
||||||
if let Some(rbx_dom_weak::types::Variant::Content(content))=object.properties.get(property){
|
|
||||||
let url:&str=content.as_ref();
|
|
||||||
if let Ok(asset_id)=url.parse(){
|
|
||||||
content_list.insert(asset_id);
|
|
||||||
}else{
|
|
||||||
println!("Content failed to parse into AssetID: {:?}",content);
|
|
||||||
}
|
|
||||||
}else{
|
|
||||||
println!("property={} does not exist for class={}",property,object.class.as_str());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
async fn read_entire_file(path:impl AsRef<Path>)->Result<Cursor<Vec<u8>>,std::io::Error>{
|
|
||||||
let mut file=tokio::fs::File::open(path).await?;
|
|
||||||
let mut data=Vec::new();
|
|
||||||
file.read_to_end(&mut data).await?;
|
|
||||||
Ok(Cursor::new(data))
|
|
||||||
}
|
|
||||||
#[derive(Default)]
|
|
||||||
struct UniqueAssets{
|
|
||||||
meshes:HashSet<RobloxAssetId>,
|
|
||||||
unions:HashSet<RobloxAssetId>,
|
|
||||||
textures:HashSet<RobloxAssetId>,
|
|
||||||
}
|
|
||||||
impl UniqueAssets{
|
|
||||||
fn collect(&mut self,object:&Instance){
|
|
||||||
match object.class.as_str(){
|
|
||||||
"Beam"=>accumulate_content_id(&mut self.textures,object,"Texture"),
|
|
||||||
"Decal"=>accumulate_content_id(&mut self.textures,object,"Texture"),
|
|
||||||
"Texture"=>accumulate_content_id(&mut self.textures,object,"Texture"),
|
|
||||||
"FileMesh"=>accumulate_content_id(&mut self.textures,object,"TextureId"),
|
|
||||||
"MeshPart"=>{
|
|
||||||
accumulate_content_id(&mut self.textures,object,"TextureID");
|
|
||||||
accumulate_content_id(&mut self.meshes,object,"MeshId");
|
|
||||||
},
|
|
||||||
"SpecialMesh"=>accumulate_content_id(&mut self.meshes,object,"MeshId"),
|
|
||||||
"ParticleEmitter"=>accumulate_content_id(&mut self.textures,object,"Texture"),
|
|
||||||
"Sky"=>{
|
|
||||||
accumulate_content_id(&mut self.textures,object,"MoonTextureId");
|
|
||||||
accumulate_content_id(&mut self.textures,object,"SkyboxBk");
|
|
||||||
accumulate_content_id(&mut self.textures,object,"SkyboxDn");
|
|
||||||
accumulate_content_id(&mut self.textures,object,"SkyboxFt");
|
|
||||||
accumulate_content_id(&mut self.textures,object,"SkyboxLf");
|
|
||||||
accumulate_content_id(&mut self.textures,object,"SkyboxRt");
|
|
||||||
accumulate_content_id(&mut self.textures,object,"SkyboxUp");
|
|
||||||
accumulate_content_id(&mut self.textures,object,"SunTextureId");
|
|
||||||
},
|
|
||||||
"UnionOperation"=>accumulate_content_id(&mut self.unions,object,"AssetId"),
|
|
||||||
_=>(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(unused)]
|
|
||||||
#[derive(Debug)]
|
|
||||||
enum UniqueAssetError{
|
|
||||||
IO(std::io::Error),
|
|
||||||
LoadDom(LoadDomError),
|
|
||||||
}
|
|
||||||
async fn unique_assets(path:&Path)->Result<UniqueAssets,UniqueAssetError>{
|
|
||||||
// read entire file
|
|
||||||
let mut assets=UniqueAssets::default();
|
|
||||||
let data=read_entire_file(path).await.map_err(UniqueAssetError::IO)?;
|
|
||||||
let dom=load_dom(data).map_err(UniqueAssetError::LoadDom)?;
|
|
||||||
for object in dom.into_raw().1.into_values(){
|
|
||||||
assets.collect(&object);
|
|
||||||
}
|
|
||||||
Ok(assets)
|
|
||||||
}
|
|
||||||
enum DownloadType{
|
|
||||||
Texture(RobloxAssetId),
|
|
||||||
Mesh(RobloxAssetId),
|
|
||||||
Union(RobloxAssetId),
|
|
||||||
}
|
|
||||||
impl DownloadType{
|
|
||||||
fn path(&self)->PathBuf{
|
|
||||||
match self{
|
|
||||||
DownloadType::Texture(asset_id)=>format!("downloaded_textures/{}",asset_id.0.to_string()).into(),
|
|
||||||
DownloadType::Mesh(asset_id)=>format!("meshes/{}",asset_id.0.to_string()).into(),
|
|
||||||
DownloadType::Union(asset_id)=>format!("unions/{}",asset_id.0.to_string()).into(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fn asset_id(&self)->u64{
|
|
||||||
match self{
|
|
||||||
DownloadType::Texture(asset_id)=>asset_id.0,
|
|
||||||
DownloadType::Mesh(asset_id)=>asset_id.0,
|
|
||||||
DownloadType::Union(asset_id)=>asset_id.0,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
enum DownloadResult{
|
|
||||||
Cached(PathBuf),
|
|
||||||
Data(Vec<u8>),
|
|
||||||
Failed,
|
|
||||||
}
|
|
||||||
#[derive(Default,Debug)]
|
|
||||||
struct Stats{
|
|
||||||
total_assets:u32,
|
|
||||||
cached_assets:u32,
|
|
||||||
downloaded_assets:u32,
|
|
||||||
failed_downloads:u32,
|
|
||||||
timed_out_downloads:u32,
|
|
||||||
}
|
|
||||||
async fn download_retry(stats:&mut Stats,context:&rbx_asset::cookie::CookieContext,download_instruction:DownloadType)->Result<DownloadResult,std::io::Error>{
|
|
||||||
stats.total_assets+=1;
|
|
||||||
let download_instruction=download_instruction;
|
|
||||||
// check if file exists on disk
|
|
||||||
let path=download_instruction.path();
|
|
||||||
if tokio::fs::try_exists(path.as_path()).await?{
|
|
||||||
stats.cached_assets+=1;
|
|
||||||
return Ok(DownloadResult::Cached(path));
|
|
||||||
}
|
|
||||||
let asset_id=download_instruction.asset_id();
|
|
||||||
// if not, download file
|
|
||||||
let mut retry=0;
|
|
||||||
const BACKOFF_MUL:f32=1.3956124250860895286;//exp(1/3)
|
|
||||||
let mut backoff=1000f32;
|
|
||||||
loop{
|
|
||||||
let asset_result=context.get_asset(rbx_asset::cookie::GetAssetRequest{
|
|
||||||
asset_id,
|
|
||||||
version:None,
|
|
||||||
}).await;
|
|
||||||
match asset_result{
|
|
||||||
Ok(asset_result)=>{
|
|
||||||
stats.downloaded_assets+=1;
|
|
||||||
tokio::fs::write(path,&asset_result).await?;
|
|
||||||
break Ok(DownloadResult::Data(asset_result));
|
|
||||||
},
|
|
||||||
Err(rbx_asset::cookie::GetError::Response(rbx_asset::ResponseError::StatusCodeWithUrlAndBody(scwuab)))=>{
|
|
||||||
if scwuab.status_code.as_u16()==429{
|
|
||||||
if retry==12{
|
|
||||||
println!("Giving up asset download {asset_id}");
|
|
||||||
stats.timed_out_downloads+=1;
|
|
||||||
break Ok(DownloadResult::Failed);
|
|
||||||
}
|
|
||||||
println!("Hit roblox rate limit, waiting {:.0}ms...",backoff);
|
|
||||||
tokio::time::sleep(std::time::Duration::from_millis(backoff as u64)).await;
|
|
||||||
backoff*=BACKOFF_MUL;
|
|
||||||
retry+=1;
|
|
||||||
}else{
|
|
||||||
stats.failed_downloads+=1;
|
|
||||||
println!("weird scuwab error: {scwuab:?}");
|
|
||||||
break Ok(DownloadResult::Failed);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
Err(e)=>{
|
|
||||||
stats.failed_downloads+=1;
|
|
||||||
println!("sadly error: {e}");
|
|
||||||
break Ok(DownloadResult::Failed);
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
#[derive(Debug,thiserror::Error)]
|
|
||||||
enum ConvertTextureError{
|
|
||||||
#[error("Io error {0:?}")]
|
|
||||||
Io(#[from]std::io::Error),
|
|
||||||
#[error("Image error {0:?}")]
|
|
||||||
Image(#[from]image::ImageError),
|
|
||||||
#[error("DDS create error {0:?}")]
|
|
||||||
DDS(#[from]image_dds::CreateDdsError),
|
|
||||||
#[error("DDS write error {0:?}")]
|
|
||||||
DDSWrite(#[from]image_dds::ddsfile::Error),
|
|
||||||
}
|
|
||||||
async fn convert_texture(asset_id:RobloxAssetId,download_result:DownloadResult)->Result<(),ConvertTextureError>{
|
|
||||||
let data=match download_result{
|
|
||||||
DownloadResult::Cached(path)=>tokio::fs::read(path).await?,
|
|
||||||
DownloadResult::Data(data)=>data,
|
|
||||||
DownloadResult::Failed=>return Ok(()),
|
|
||||||
};
|
|
||||||
// image::ImageFormat::Png
|
|
||||||
// image::ImageFormat::Jpeg
|
|
||||||
let image=image::load_from_memory(&data)?.to_rgba8();
|
|
||||||
|
|
||||||
// pick format
|
|
||||||
let format=if image.width()%4!=0||image.height()%4!=0{
|
|
||||||
image_dds::ImageFormat::Rgba8UnormSrgb
|
|
||||||
}else{
|
|
||||||
image_dds::ImageFormat::BC7RgbaUnormSrgb
|
|
||||||
};
|
|
||||||
|
|
||||||
//this fails if the image dimensions are not a multiple of 4
|
|
||||||
let dds=image_dds::dds_from_image(
|
|
||||||
&image,
|
|
||||||
format,
|
|
||||||
image_dds::Quality::Slow,
|
|
||||||
image_dds::Mipmaps::GeneratedAutomatic,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
let file_name=format!("textures/{}.dds",asset_id.0);
|
|
||||||
let mut file=std::fs::File::create(file_name)?;
|
|
||||||
dds.write(&mut file)?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
async fn download_assets(paths:Vec<PathBuf>,cookie:rbx_asset::cookie::Cookie)->AResult<()>{
|
|
||||||
tokio::try_join!(
|
|
||||||
tokio::fs::create_dir_all("downloaded_textures"),
|
|
||||||
tokio::fs::create_dir_all("textures"),
|
|
||||||
tokio::fs::create_dir_all("meshes"),
|
|
||||||
tokio::fs::create_dir_all("unions"),
|
|
||||||
)?;
|
|
||||||
// use mpsc
|
|
||||||
let thread_limit=std::thread::available_parallelism()?.get();
|
|
||||||
let (send_assets,mut recv_assets)=tokio::sync::mpsc::channel(DOWNLOAD_LIMIT);
|
|
||||||
let (send_texture,mut recv_texture)=tokio::sync::mpsc::channel(thread_limit);
|
|
||||||
// map decode dispatcher
|
|
||||||
// read files multithreaded
|
|
||||||
// produce UniqueAssetsResult per file
|
|
||||||
tokio::spawn(async move{
|
|
||||||
// move send so it gets dropped when all maps have been decoded
|
|
||||||
// closing the channel
|
|
||||||
let mut it=paths.into_iter();
|
|
||||||
static SEM:tokio::sync::Semaphore=tokio::sync::Semaphore::const_new(0);
|
|
||||||
SEM.add_permits(thread_limit);
|
|
||||||
while let (Ok(permit),Some(path))=(SEM.acquire().await,it.next()){
|
|
||||||
let send=send_assets.clone();
|
|
||||||
tokio::spawn(async move{
|
|
||||||
let result=unique_assets(path.as_path()).await;
|
|
||||||
_=send.send(result).await;
|
|
||||||
drop(permit);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
// download manager
|
|
||||||
// insert into global unique assets guy
|
|
||||||
// add to download queue if the asset is globally unique and does not already exist on disk
|
|
||||||
let mut stats=Stats::default();
|
|
||||||
let context=rbx_asset::cookie::CookieContext::new(cookie);
|
|
||||||
let mut globally_unique_assets=UniqueAssets::default();
|
|
||||||
// pop a job = retry_queue.pop_front() or ingest(recv.recv().await)
|
|
||||||
// SLOW MODE:
|
|
||||||
// acquire all permits
|
|
||||||
// drop all permits
|
|
||||||
// pop one job
|
|
||||||
// if it succeeds go into fast mode
|
|
||||||
// FAST MODE:
|
|
||||||
// acquire one permit
|
|
||||||
// pop a job
|
|
||||||
let download_thread=tokio::spawn(async move{
|
|
||||||
while let Some(result)=recv_assets.recv().await{
|
|
||||||
let unique_assets=match result{
|
|
||||||
Ok(unique_assets)=>unique_assets,
|
|
||||||
Err(e)=>{
|
|
||||||
println!("error: {e:?}");
|
|
||||||
continue;
|
|
||||||
},
|
|
||||||
};
|
|
||||||
for texture_id in unique_assets.textures{
|
|
||||||
if globally_unique_assets.textures.insert(texture_id){
|
|
||||||
let data=download_retry(&mut stats,&context,DownloadType::Texture(texture_id)).await?;
|
|
||||||
send_texture.send((texture_id,data)).await?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for mesh_id in unique_assets.meshes{
|
|
||||||
if globally_unique_assets.meshes.insert(mesh_id){
|
|
||||||
download_retry(&mut stats,&context,DownloadType::Mesh(mesh_id)).await?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for union_id in unique_assets.unions{
|
|
||||||
if globally_unique_assets.unions.insert(union_id){
|
|
||||||
download_retry(&mut stats,&context,DownloadType::Union(union_id)).await?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
dbg!(stats);
|
|
||||||
Ok::<(),anyhow::Error>(())
|
|
||||||
});
|
|
||||||
static SEM:tokio::sync::Semaphore=tokio::sync::Semaphore::const_new(0);
|
|
||||||
SEM.add_permits(thread_limit);
|
|
||||||
while let (Ok(permit),Some((asset_id,download_result)))=(SEM.acquire().await,recv_texture.recv().await){
|
|
||||||
tokio::spawn(async move{
|
|
||||||
let result=convert_texture(asset_id,download_result).await;
|
|
||||||
drop(permit);
|
|
||||||
result.unwrap();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
download_thread.await??;
|
|
||||||
_=SEM.acquire_many(thread_limit as u32).await.unwrap();
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
#[allow(dead_code)]
|
|
||||||
enum ConvertError{
|
|
||||||
IO(std::io::Error),
|
|
||||||
SNFMap(strafesnet_snf::map::Error),
|
|
||||||
RobloxRead(strafesnet_rbx_loader::ReadError),
|
|
||||||
RobloxLoad(strafesnet_rbx_loader::LoadError),
|
|
||||||
}
|
|
||||||
impl std::fmt::Display for ConvertError{
|
|
||||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
|
||||||
write!(f,"{self:?}")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl std::error::Error for ConvertError{}
|
|
||||||
async fn convert_to_snf(path:&Path,output_folder:PathBuf)->AResult<()>{
|
|
||||||
let entire_file=tokio::fs::read(path).await?;
|
|
||||||
|
|
||||||
let model=strafesnet_rbx_loader::read(
|
|
||||||
std::io::Cursor::new(entire_file)
|
|
||||||
).map_err(ConvertError::RobloxRead)?;
|
|
||||||
|
|
||||||
let mut place=model.into_place();
|
|
||||||
place.run_scripts();
|
|
||||||
|
|
||||||
let map=place.to_snf(LoadFailureMode::DefaultToNone).map_err(ConvertError::RobloxLoad)?;
|
|
||||||
|
|
||||||
let mut dest=output_folder;
|
|
||||||
dest.push(path.file_stem().unwrap());
|
|
||||||
dest.set_extension("snfm");
|
|
||||||
let file=std::fs::File::create(dest).map_err(ConvertError::IO)?;
|
|
||||||
|
|
||||||
strafesnet_snf::map::write_map(file,map).map_err(ConvertError::SNFMap)?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn roblox_to_snf(paths:Vec<std::path::PathBuf>,output_folder:PathBuf)->AResult<()>{
|
|
||||||
let start=std::time::Instant::now();
|
|
||||||
|
|
||||||
let thread_limit=std::thread::available_parallelism()?.get();
|
|
||||||
let mut it=paths.into_iter();
|
|
||||||
static SEM:tokio::sync::Semaphore=tokio::sync::Semaphore::const_new(0);
|
|
||||||
SEM.add_permits(thread_limit);
|
|
||||||
|
|
||||||
while let (Ok(permit),Some(path))=(SEM.acquire().await,it.next()){
|
|
||||||
let output_folder=output_folder.clone();
|
|
||||||
tokio::spawn(async move{
|
|
||||||
let result=convert_to_snf(path.as_path(),output_folder).await;
|
|
||||||
drop(permit);
|
|
||||||
match result{
|
|
||||||
Ok(())=>(),
|
|
||||||
Err(e)=>println!("Convert error: {e:?}"),
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
_=SEM.acquire_many(thread_limit as u32).await.unwrap();
|
|
||||||
|
|
||||||
println!("elapsed={:?}", start.elapsed());
|
|
||||||
Ok(())
|
|
||||||
}
|
|
@ -1,464 +0,0 @@
|
|||||||
use std::path::{Path,PathBuf};
|
|
||||||
use std::borrow::Cow;
|
|
||||||
use clap::{Args,Subcommand};
|
|
||||||
use anyhow::Result as AResult;
|
|
||||||
use futures::StreamExt;
|
|
||||||
use strafesnet_bsp_loader::loader::BspFinder;
|
|
||||||
use strafesnet_deferred_loader::loader::Loader;
|
|
||||||
use strafesnet_deferred_loader::deferred_loader::{LoadFailureMode,MeshDeferredLoader,RenderConfigDeferredLoader};
|
|
||||||
|
|
||||||
#[derive(Subcommand)]
|
|
||||||
pub enum Commands{
|
|
||||||
SourceToSNF(SourceToSNFSubcommand),
|
|
||||||
ExtractTextures(ExtractTexturesSubcommand),
|
|
||||||
VPKContents(VPKContentsSubcommand),
|
|
||||||
BSPContents(BSPContentsSubcommand),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Args)]
|
|
||||||
pub struct SourceToSNFSubcommand {
|
|
||||||
#[arg(long)]
|
|
||||||
output_folder:PathBuf,
|
|
||||||
#[arg(required=true)]
|
|
||||||
input_files:Vec<PathBuf>,
|
|
||||||
#[arg(long)]
|
|
||||||
vpks:Vec<PathBuf>,
|
|
||||||
}
|
|
||||||
#[derive(Args)]
|
|
||||||
pub struct ExtractTexturesSubcommand{
|
|
||||||
#[arg(required=true)]
|
|
||||||
bsp_files:Vec<PathBuf>,
|
|
||||||
#[arg(long)]
|
|
||||||
vpks:Vec<PathBuf>,
|
|
||||||
}
|
|
||||||
#[derive(Args)]
|
|
||||||
pub struct VPKContentsSubcommand {
|
|
||||||
#[arg(long)]
|
|
||||||
input_file:PathBuf,
|
|
||||||
}
|
|
||||||
#[derive(Args)]
|
|
||||||
pub struct BSPContentsSubcommand {
|
|
||||||
#[arg(long)]
|
|
||||||
input_file:PathBuf,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Commands{
|
|
||||||
pub async fn run(self)->AResult<()>{
|
|
||||||
match self{
|
|
||||||
Commands::SourceToSNF(subcommand)=>source_to_snf(subcommand.input_files,subcommand.output_folder,subcommand.vpks).await,
|
|
||||||
Commands::ExtractTextures(subcommand)=>extract_textures(subcommand.bsp_files,subcommand.vpks).await,
|
|
||||||
Commands::VPKContents(subcommand)=>vpk_contents(subcommand.input_file),
|
|
||||||
Commands::BSPContents(subcommand)=>bsp_contents(subcommand.input_file),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
enum VMTContent{
|
|
||||||
VMT(String),
|
|
||||||
VTF(String),
|
|
||||||
Patch(vmt_parser::material::PatchMaterial),
|
|
||||||
Unsupported,//don't want to deal with whatever vmt variant
|
|
||||||
Unresolved,//could not locate a texture because of vmt content
|
|
||||||
}
|
|
||||||
impl VMTContent{
|
|
||||||
fn vtf(opt:Option<String>)->Self{
|
|
||||||
match opt{
|
|
||||||
Some(s)=>Self::VTF(s),
|
|
||||||
None=>Self::Unresolved,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_some_texture(material:vmt_parser::material::Material)->VMTContent{
|
|
||||||
//just grab some texture from somewhere for now
|
|
||||||
match material{
|
|
||||||
vmt_parser::material::Material::LightMappedGeneric(mat)=>VMTContent::vtf(Some(mat.base_texture)),
|
|
||||||
vmt_parser::material::Material::VertexLitGeneric(mat)=>VMTContent::vtf(mat.base_texture.or(mat.decal_texture)),//this just dies if there is none
|
|
||||||
vmt_parser::material::Material::VertexLitGenericDx6(mat)=>VMTContent::vtf(mat.base_texture.or(mat.decal_texture)),
|
|
||||||
vmt_parser::material::Material::UnlitGeneric(mat)=>VMTContent::vtf(mat.base_texture),
|
|
||||||
vmt_parser::material::Material::UnlitTwoTexture(mat)=>VMTContent::vtf(mat.base_texture),
|
|
||||||
vmt_parser::material::Material::Water(mat)=>VMTContent::vtf(mat.base_texture),
|
|
||||||
vmt_parser::material::Material::WorldVertexTransition(mat)=>VMTContent::vtf(Some(mat.base_texture)),
|
|
||||||
vmt_parser::material::Material::EyeRefract(mat)=>VMTContent::vtf(Some(mat.cornea_texture)),
|
|
||||||
vmt_parser::material::Material::SubRect(mat)=>VMTContent::VMT(mat.material),//recursive
|
|
||||||
vmt_parser::material::Material::Sprite(mat)=>VMTContent::vtf(Some(mat.base_texture)),
|
|
||||||
vmt_parser::material::Material::SpriteCard(mat)=>VMTContent::vtf(mat.base_texture),
|
|
||||||
vmt_parser::material::Material::Cable(mat)=>VMTContent::vtf(Some(mat.base_texture)),
|
|
||||||
vmt_parser::material::Material::Refract(mat)=>VMTContent::vtf(mat.base_texture),
|
|
||||||
vmt_parser::material::Material::Modulate(mat)=>VMTContent::vtf(Some(mat.base_texture)),
|
|
||||||
vmt_parser::material::Material::DecalModulate(mat)=>VMTContent::vtf(Some(mat.base_texture)),
|
|
||||||
vmt_parser::material::Material::Sky(mat)=>VMTContent::vtf(Some(mat.base_texture)),
|
|
||||||
vmt_parser::material::Material::Replacements(_mat)=>VMTContent::Unsupported,
|
|
||||||
vmt_parser::material::Material::Patch(mat)=>VMTContent::Patch(mat),
|
|
||||||
_=>unreachable!(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug,thiserror::Error)]
|
|
||||||
enum GetVMTError{
|
|
||||||
#[error("Bsp error {0:?}")]
|
|
||||||
Bsp(#[from]vbsp::BspError),
|
|
||||||
#[error("Utf8 error {0:?}")]
|
|
||||||
Utf8(#[from]std::str::Utf8Error),
|
|
||||||
#[error("Vdf error {0:?}")]
|
|
||||||
Vdf(#[from]vmt_parser::VdfError),
|
|
||||||
#[error("Vmt not found")]
|
|
||||||
NotFound,
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_vmt(finder:BspFinder,search_name:&str)->Result<vmt_parser::material::Material,GetVMTError>{
|
|
||||||
let vmt_data=finder.find(search_name)?.ok_or(GetVMTError::NotFound)?;
|
|
||||||
//decode vmt and then write
|
|
||||||
let vmt_str=core::str::from_utf8(&vmt_data)?;
|
|
||||||
let material=vmt_parser::from_str(vmt_str)?;
|
|
||||||
//println!("vmt material={:?}",material);
|
|
||||||
Ok(material)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug,thiserror::Error)]
|
|
||||||
enum LoadVMTError{
|
|
||||||
#[error("Bsp error {0:?}")]
|
|
||||||
Bsp(#[from]vbsp::BspError),
|
|
||||||
#[error("GetVMT error {0:?}")]
|
|
||||||
GetVMT(#[from]GetVMTError),
|
|
||||||
#[error("FromUtf8 error {0:?}")]
|
|
||||||
FromUtf8(#[from]std::string::FromUtf8Error),
|
|
||||||
#[error("Vdf error {0:?}")]
|
|
||||||
Vdf(#[from]vmt_parser::VdfError),
|
|
||||||
#[error("Vmt unsupported")]
|
|
||||||
Unsupported,
|
|
||||||
#[error("Vmt unresolved")]
|
|
||||||
Unresolved,
|
|
||||||
#[error("Vmt not found")]
|
|
||||||
NotFound,
|
|
||||||
}
|
|
||||||
fn recursive_vmt_loader<'bsp,'vpk,'a>(finder:BspFinder<'bsp,'vpk>,material:vmt_parser::material::Material)->Result<Option<Cow<'a,[u8]>>,LoadVMTError>
|
|
||||||
where
|
|
||||||
'bsp:'a,
|
|
||||||
'vpk:'a,
|
|
||||||
{
|
|
||||||
match get_some_texture(material){
|
|
||||||
VMTContent::VMT(mut s)=>{
|
|
||||||
s.make_ascii_lowercase();
|
|
||||||
recursive_vmt_loader(finder,get_vmt(finder,&s)?)
|
|
||||||
},
|
|
||||||
VMTContent::VTF(s)=>{
|
|
||||||
let mut texture_file_name=PathBuf::from("materials");
|
|
||||||
texture_file_name.push(s);
|
|
||||||
texture_file_name.set_extension("vtf");
|
|
||||||
texture_file_name.as_mut_os_str().make_ascii_lowercase();
|
|
||||||
Ok(finder.find(texture_file_name.to_str().unwrap())?)
|
|
||||||
},
|
|
||||||
VMTContent::Patch(mat)=>recursive_vmt_loader(finder,
|
|
||||||
mat.resolve(|search_name|{
|
|
||||||
let name_lowercase=search_name.to_lowercase();
|
|
||||||
match finder.find(&name_lowercase)?{
|
|
||||||
Some(bytes)=>Ok(String::from_utf8(bytes.into_owned())?),
|
|
||||||
None=>Err(LoadVMTError::NotFound),
|
|
||||||
}
|
|
||||||
})?
|
|
||||||
),
|
|
||||||
VMTContent::Unsupported=>Err(LoadVMTError::Unsupported),
|
|
||||||
VMTContent::Unresolved=>Err(LoadVMTError::Unresolved),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fn load_texture<'bsp,'vpk,'a>(finder:BspFinder<'bsp,'vpk>,texture_name:&str)->Result<Option<Cow<'a,[u8]>>,LoadVMTError>
|
|
||||||
where
|
|
||||||
'bsp:'a,
|
|
||||||
'vpk:'a,
|
|
||||||
{
|
|
||||||
let mut texture_file_name=PathBuf::from("materials");
|
|
||||||
//lower case
|
|
||||||
texture_file_name.push(texture_name);
|
|
||||||
texture_file_name.as_mut_os_str().make_ascii_lowercase();
|
|
||||||
//remove stem and search for both vtf and vmt files
|
|
||||||
let stem=texture_file_name.file_stem().unwrap().to_owned();
|
|
||||||
texture_file_name.pop();
|
|
||||||
texture_file_name.push(stem);
|
|
||||||
if let Some(stuff)=finder.find(texture_file_name.to_str().unwrap())?{
|
|
||||||
return Ok(Some(stuff));
|
|
||||||
}
|
|
||||||
|
|
||||||
// search for both vmt,vtf
|
|
||||||
let mut texture_file_name_vmt=texture_file_name.clone();
|
|
||||||
texture_file_name_vmt.set_extension("vmt");
|
|
||||||
|
|
||||||
let get_vmt_result=get_vmt(finder,texture_file_name_vmt.to_str().unwrap());
|
|
||||||
match get_vmt_result{
|
|
||||||
Ok(material)=>{
|
|
||||||
let vmt_result=recursive_vmt_loader(finder,material);
|
|
||||||
match vmt_result{
|
|
||||||
Ok(Some(stuff))=>return Ok(Some(stuff)),
|
|
||||||
Ok(None)
|
|
||||||
|Err(LoadVMTError::NotFound)=>(),
|
|
||||||
|Err(LoadVMTError::GetVMT(GetVMTError::NotFound))=>(),
|
|
||||||
Err(e)=>return Err(e),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|Err(GetVMTError::NotFound)=>(),
|
|
||||||
Err(e)=>Err(e)?,
|
|
||||||
}
|
|
||||||
|
|
||||||
// try looking for vtf
|
|
||||||
let mut texture_file_name_vtf=texture_file_name.clone();
|
|
||||||
texture_file_name_vtf.set_extension("vtf");
|
|
||||||
|
|
||||||
let get_vtf_result=get_vmt(finder,texture_file_name_vtf.to_str().unwrap());
|
|
||||||
match get_vtf_result{
|
|
||||||
Ok(material)=>{
|
|
||||||
let vtf_result=recursive_vmt_loader(finder,material);
|
|
||||||
match vtf_result{
|
|
||||||
Ok(Some(stuff))=>return Ok(Some(stuff)),
|
|
||||||
Ok(None)
|
|
||||||
|Err(LoadVMTError::NotFound)=>(),
|
|
||||||
|Err(LoadVMTError::GetVMT(GetVMTError::NotFound))=>(),
|
|
||||||
Err(e)=>return Err(e),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|Err(GetVMTError::NotFound)=>(),
|
|
||||||
Err(e)=>Err(e)?,
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(None)
|
|
||||||
}
|
|
||||||
#[derive(Debug,thiserror::Error)]
|
|
||||||
enum ExtractTextureError{
|
|
||||||
#[error("Io error {0:?}")]
|
|
||||||
Io(#[from]std::io::Error),
|
|
||||||
#[error("Bsp error {0:?}")]
|
|
||||||
Bsp(#[from]vbsp::BspError),
|
|
||||||
#[error("MeshLoad error {0:?}")]
|
|
||||||
MeshLoad(#[from]strafesnet_bsp_loader::loader::MeshError),
|
|
||||||
#[error("Load VMT error {0:?}")]
|
|
||||||
LoadVMT(#[from]LoadVMTError),
|
|
||||||
}
|
|
||||||
async fn gimme_them_textures(path:&Path,vpk_list:&[strafesnet_bsp_loader::Vpk],send_texture:tokio::sync::mpsc::Sender<(Vec<u8>,String)>)->Result<(),ExtractTextureError>{
|
|
||||||
let bsp=vbsp::Bsp::read(tokio::fs::read(path).await?.as_ref())?;
|
|
||||||
let loader_bsp=strafesnet_bsp_loader::Bsp::new(bsp);
|
|
||||||
let bsp=loader_bsp.as_ref();
|
|
||||||
|
|
||||||
let mut texture_deferred_loader=RenderConfigDeferredLoader::new();
|
|
||||||
for texture in bsp.textures(){
|
|
||||||
texture_deferred_loader.acquire_render_config_id(Some(Cow::Borrowed(texture.name())));
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut mesh_deferred_loader=MeshDeferredLoader::new();
|
|
||||||
for prop in bsp.static_props(){
|
|
||||||
mesh_deferred_loader.acquire_mesh_id(prop.model());
|
|
||||||
}
|
|
||||||
|
|
||||||
let finder=BspFinder{
|
|
||||||
bsp:&loader_bsp,
|
|
||||||
vpks:vpk_list
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut mesh_loader=strafesnet_bsp_loader::loader::ModelLoader::new(finder);
|
|
||||||
// load models and collect requested textures
|
|
||||||
for model_path in mesh_deferred_loader.into_indices(){
|
|
||||||
let model:vmdl::Model=match mesh_loader.load(model_path){
|
|
||||||
Ok(model)=>model,
|
|
||||||
Err(e)=>{
|
|
||||||
println!("Model={model_path} Load model error: {e}");
|
|
||||||
continue;
|
|
||||||
},
|
|
||||||
};
|
|
||||||
for texture in model.textures(){
|
|
||||||
for search_path in &texture.search_paths{
|
|
||||||
let mut path=PathBuf::from(search_path.as_str());
|
|
||||||
path.push(texture.name.as_str());
|
|
||||||
let path=path.to_str().unwrap().to_owned();
|
|
||||||
texture_deferred_loader.acquire_render_config_id(Some(Cow::Owned(path)));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for texture_path in texture_deferred_loader.into_indices(){
|
|
||||||
match load_texture(finder,&texture_path){
|
|
||||||
Ok(Some(texture))=>send_texture.send(
|
|
||||||
(texture.into_owned(),texture_path.into_owned())
|
|
||||||
).await.unwrap(),
|
|
||||||
Ok(None)=>(),
|
|
||||||
Err(e)=>println!("Texture={texture_path} Load error: {e}"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
#[derive(Debug,thiserror::Error)]
|
|
||||||
enum ConvertTextureError{
|
|
||||||
#[error("Bsp error {0:?}")]
|
|
||||||
Bsp(#[from]vbsp::BspError),
|
|
||||||
#[error("Vtf error {0:?}")]
|
|
||||||
Vtf(#[from]vtf::Error),
|
|
||||||
#[error("DDS create error {0:?}")]
|
|
||||||
DDS(#[from]image_dds::CreateDdsError),
|
|
||||||
#[error("DDS write error {0:?}")]
|
|
||||||
DDSWrite(#[from]image_dds::ddsfile::Error),
|
|
||||||
#[error("Io error {0:?}")]
|
|
||||||
Io(#[from]std::io::Error),
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn convert_texture(texture:Vec<u8>,write_file_name:impl AsRef<Path>)->Result<(),ConvertTextureError>{
|
|
||||||
let image=vtf::from_bytes(&texture)?.highres_image.decode(0)?.to_rgba8();
|
|
||||||
|
|
||||||
let format=if image.width()%4!=0||image.height()%4!=0{
|
|
||||||
image_dds::ImageFormat::Rgba8UnormSrgb
|
|
||||||
}else{
|
|
||||||
image_dds::ImageFormat::BC7RgbaUnormSrgb
|
|
||||||
};
|
|
||||||
//this fails if the image dimensions are not a multiple of 4
|
|
||||||
let dds = image_dds::dds_from_image(
|
|
||||||
&image,
|
|
||||||
format,
|
|
||||||
image_dds::Quality::Slow,
|
|
||||||
image_dds::Mipmaps::GeneratedAutomatic,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
//write dds
|
|
||||||
let mut dest=PathBuf::from("textures");
|
|
||||||
dest.push(write_file_name);
|
|
||||||
dest.set_extension("dds");
|
|
||||||
std::fs::create_dir_all(dest.parent().unwrap())?;
|
|
||||||
let mut writer=std::io::BufWriter::new(std::fs::File::create(dest)?);
|
|
||||||
dds.write(&mut writer)?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn read_vpks(vpk_paths:Vec<PathBuf>,thread_limit:usize)->Vec<strafesnet_bsp_loader::Vpk>{
|
|
||||||
futures::stream::iter(vpk_paths).map(|vpk_path|async{
|
|
||||||
// idk why it doesn't want to pass out the errors but this is fatal anyways
|
|
||||||
tokio::task::spawn_blocking(move||Ok::<_,vpk::Error>(strafesnet_bsp_loader::Vpk::new(vpk::VPK::read(&vpk_path)?))).await.unwrap().unwrap()
|
|
||||||
})
|
|
||||||
.buffer_unordered(thread_limit)
|
|
||||||
.collect().await
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn extract_textures(paths:Vec<PathBuf>,vpk_paths:Vec<PathBuf>)->AResult<()>{
|
|
||||||
tokio::try_join!(
|
|
||||||
tokio::fs::create_dir_all("extracted_textures"),
|
|
||||||
tokio::fs::create_dir_all("textures"),
|
|
||||||
tokio::fs::create_dir_all("meshes"),
|
|
||||||
)?;
|
|
||||||
let thread_limit=std::thread::available_parallelism()?.get();
|
|
||||||
|
|
||||||
// load vpk list and leak for static lifetime
|
|
||||||
let vpk_list:&[strafesnet_bsp_loader::Vpk]=read_vpks(vpk_paths,thread_limit).await.leak();
|
|
||||||
|
|
||||||
let (send_texture,mut recv_texture)=tokio::sync::mpsc::channel(thread_limit);
|
|
||||||
let mut it=paths.into_iter();
|
|
||||||
let extract_thread=tokio::spawn(async move{
|
|
||||||
static SEM:tokio::sync::Semaphore=tokio::sync::Semaphore::const_new(0);
|
|
||||||
SEM.add_permits(thread_limit);
|
|
||||||
while let (Ok(permit),Some(path))=(SEM.acquire().await,it.next()){
|
|
||||||
let send=send_texture.clone();
|
|
||||||
tokio::spawn(async move{
|
|
||||||
let result=gimme_them_textures(&path,vpk_list,send).await;
|
|
||||||
drop(permit);
|
|
||||||
match result{
|
|
||||||
Ok(())=>(),
|
|
||||||
Err(e)=>println!("Map={path:?} Decode error: {e:?}"),
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// convert images
|
|
||||||
static SEM:tokio::sync::Semaphore=tokio::sync::Semaphore::const_new(0);
|
|
||||||
SEM.add_permits(thread_limit);
|
|
||||||
while let (Ok(permit),Some((data,dest)))=(SEM.acquire().await,recv_texture.recv().await){
|
|
||||||
// TODO: dedup dest?
|
|
||||||
tokio::spawn(async move{
|
|
||||||
let result=convert_texture(data,dest).await;
|
|
||||||
drop(permit);
|
|
||||||
match result{
|
|
||||||
Ok(())=>(),
|
|
||||||
Err(e)=>println!("Convert error: {e:?}"),
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
extract_thread.await?;
|
|
||||||
_=SEM.acquire_many(thread_limit as u32).await?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn vpk_contents(vpk_path:PathBuf)->AResult<()>{
|
|
||||||
let vpk_index=vpk::VPK::read(&vpk_path)?;
|
|
||||||
for (label,entry) in vpk_index.tree.into_iter(){
|
|
||||||
println!("vpk label={} entry={:?}",label,entry);
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn bsp_contents(path:PathBuf)->AResult<()>{
|
|
||||||
let bsp=vbsp::Bsp::read(std::fs::read(path)?.as_ref())?;
|
|
||||||
for file_name in bsp.pack.into_zip().into_inner().unwrap().file_names(){
|
|
||||||
println!("file_name={:?}",file_name);
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
#[allow(dead_code)]
|
|
||||||
enum ConvertError{
|
|
||||||
IO(std::io::Error),
|
|
||||||
SNFMap(strafesnet_snf::map::Error),
|
|
||||||
BspRead(strafesnet_bsp_loader::ReadError),
|
|
||||||
BspLoad(strafesnet_bsp_loader::LoadError),
|
|
||||||
}
|
|
||||||
impl std::fmt::Display for ConvertError{
|
|
||||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
|
||||||
write!(f,"{self:?}")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl std::error::Error for ConvertError{}
|
|
||||||
|
|
||||||
async fn convert_to_snf(path:&Path,vpk_list:&[strafesnet_bsp_loader::Vpk],output_folder:PathBuf)->AResult<()>{
|
|
||||||
let entire_file=tokio::fs::read(path).await?;
|
|
||||||
|
|
||||||
let bsp=strafesnet_bsp_loader::read(
|
|
||||||
std::io::Cursor::new(entire_file)
|
|
||||||
).map_err(ConvertError::BspRead)?;
|
|
||||||
|
|
||||||
let map=bsp.to_snf(LoadFailureMode::DefaultToNone,vpk_list).map_err(ConvertError::BspLoad)?;
|
|
||||||
|
|
||||||
let mut dest=output_folder;
|
|
||||||
dest.push(path.file_stem().unwrap());
|
|
||||||
dest.set_extension("snfm");
|
|
||||||
let file=std::fs::File::create(dest).map_err(ConvertError::IO)?;
|
|
||||||
|
|
||||||
strafesnet_snf::map::write_map(file,map).map_err(ConvertError::SNFMap)?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
async fn source_to_snf(paths:Vec<std::path::PathBuf>,output_folder:PathBuf,vpk_paths:Vec<PathBuf>)->AResult<()>{
|
|
||||||
let start=std::time::Instant::now();
|
|
||||||
|
|
||||||
let thread_limit=std::thread::available_parallelism()?.get();
|
|
||||||
|
|
||||||
// load vpk list and leak for static lifetime
|
|
||||||
let vpk_list:&[strafesnet_bsp_loader::Vpk]=read_vpks(vpk_paths,thread_limit).await.leak();
|
|
||||||
|
|
||||||
let mut it=paths.into_iter();
|
|
||||||
static SEM:tokio::sync::Semaphore=tokio::sync::Semaphore::const_new(0);
|
|
||||||
SEM.add_permits(thread_limit);
|
|
||||||
|
|
||||||
while let (Ok(permit),Some(path))=(SEM.acquire().await,it.next()){
|
|
||||||
let output_folder=output_folder.clone();
|
|
||||||
tokio::spawn(async move{
|
|
||||||
let result=convert_to_snf(path.as_path(),vpk_list,output_folder).await;
|
|
||||||
drop(permit);
|
|
||||||
match result{
|
|
||||||
Ok(())=>(),
|
|
||||||
Err(e)=>println!("Convert error: {e:?}"),
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
_=SEM.acquire_many(thread_limit as u32).await.unwrap();
|
|
||||||
|
|
||||||
println!("elapsed={:?}", start.elapsed());
|
|
||||||
Ok(())
|
|
||||||
}
|
|
@ -1,16 +1,16 @@
|
|||||||
use crate::window::Instruction;
|
use crate::window::Instruction;
|
||||||
use strafesnet_common::integer;
|
use strafesnet_common::integer;
|
||||||
use strafesnet_common::instruction::TimedInstruction;
|
use strafesnet_common::instruction::TimedInstruction;
|
||||||
use strafesnet_common::session::Time as SessionTime;
|
use strafesnet_common::session::TimeInner as SessionTimeInner;
|
||||||
|
|
||||||
pub struct App<'a>{
|
pub struct App<'a>{
|
||||||
root_time:std::time::Instant,
|
root_time:std::time::Instant,
|
||||||
window_thread:crate::compat_worker::QNWorker<'a,TimedInstruction<Instruction,SessionTime>>,
|
window_thread:crate::compat_worker::QNWorker<'a,TimedInstruction<Instruction,SessionTimeInner>>,
|
||||||
}
|
}
|
||||||
impl<'a> App<'a>{
|
impl<'a> App<'a>{
|
||||||
pub fn new(
|
pub fn new(
|
||||||
root_time:std::time::Instant,
|
root_time:std::time::Instant,
|
||||||
window_thread:crate::compat_worker::QNWorker<'a,TimedInstruction<Instruction,SessionTime>>,
|
window_thread:crate::compat_worker::QNWorker<'a,TimedInstruction<Instruction,SessionTimeInner>>,
|
||||||
)->App<'a>{
|
)->App<'a>{
|
||||||
Self{
|
Self{
|
||||||
root_time,
|
root_time,
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
use std::io::Read;
|
use std::io::Read;
|
||||||
|
|
||||||
#[cfg(any(feature="roblox",feature="source"))]
|
#[cfg(any(feature="roblox",feature="source"))]
|
||||||
use strafesnet_deferred_loader::deferred_loader::LoadFailureMode;
|
use strafesnet_deferred_loader::deferred_loader::{LoadFailureMode,MeshDeferredLoader,RenderConfigDeferredLoader};
|
||||||
|
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
@ -69,9 +69,13 @@ pub enum LoadError{
|
|||||||
ReadError(ReadError),
|
ReadError(ReadError),
|
||||||
File(std::io::Error),
|
File(std::io::Error),
|
||||||
#[cfg(feature="roblox")]
|
#[cfg(feature="roblox")]
|
||||||
LoadRoblox(strafesnet_rbx_loader::LoadError),
|
LoadRobloxMesh(strafesnet_rbx_loader::loader::MeshError),
|
||||||
|
#[cfg(feature="roblox")]
|
||||||
|
LoadRobloxTexture(strafesnet_rbx_loader::loader::TextureError),
|
||||||
#[cfg(feature="source")]
|
#[cfg(feature="source")]
|
||||||
LoadSource(strafesnet_bsp_loader::LoadError),
|
LoadSourceMesh(strafesnet_bsp_loader::loader::MeshError),
|
||||||
|
#[cfg(feature="source")]
|
||||||
|
LoadSourceTexture(strafesnet_bsp_loader::loader::TextureError),
|
||||||
}
|
}
|
||||||
impl std::fmt::Display for LoadError{
|
impl std::fmt::Display for LoadError{
|
||||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||||
@ -81,7 +85,7 @@ impl std::fmt::Display for LoadError{
|
|||||||
impl std::error::Error for LoadError{}
|
impl std::error::Error for LoadError{}
|
||||||
|
|
||||||
pub enum LoadFormat{
|
pub enum LoadFormat{
|
||||||
#[cfg(any(feature="snf",feature="roblox",feature="source"))]
|
#[cfg(feature="snf")]
|
||||||
Map(strafesnet_common::map::CompleteMap),
|
Map(strafesnet_common::map::CompleteMap),
|
||||||
#[cfg(feature="snf")]
|
#[cfg(feature="snf")]
|
||||||
Bot(strafesnet_snf::bot::Segment),
|
Bot(strafesnet_snf::bot::Segment),
|
||||||
@ -99,13 +103,50 @@ pub fn load<P:AsRef<std::path::Path>>(path:P)->Result<LoadFormat,LoadError>{
|
|||||||
ReadFormat::Roblox(model)=>{
|
ReadFormat::Roblox(model)=>{
|
||||||
let mut place=model.into_place();
|
let mut place=model.into_place();
|
||||||
place.run_scripts();
|
place.run_scripts();
|
||||||
Ok(LoadFormat::Map(
|
|
||||||
place.to_snf(LoadFailureMode::DefaultToNone).map_err(LoadError::LoadRoblox)?
|
let mut texture_deferred_loader=RenderConfigDeferredLoader::new();
|
||||||
))
|
let mut mesh_deferred_loader=MeshDeferredLoader::new();
|
||||||
|
|
||||||
|
let map_step1=strafesnet_rbx_loader::convert(
|
||||||
|
place.as_ref(),
|
||||||
|
&mut texture_deferred_loader,
|
||||||
|
&mut mesh_deferred_loader,
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut mesh_loader=strafesnet_rbx_loader::loader::MeshLoader::new();
|
||||||
|
let meshpart_meshes=mesh_deferred_loader.into_meshes(&mut mesh_loader,LoadFailureMode::DefaultToNone).map_err(LoadError::LoadRobloxMesh)?;
|
||||||
|
|
||||||
|
let map_step2=map_step1.add_meshpart_meshes_and_calculate_attributes(meshpart_meshes);
|
||||||
|
|
||||||
|
let mut texture_loader=strafesnet_rbx_loader::loader::TextureLoader::new();
|
||||||
|
let render_configs=texture_deferred_loader.into_render_configs(&mut texture_loader,LoadFailureMode::DefaultToNone).map_err(LoadError::LoadRobloxTexture)?;
|
||||||
|
|
||||||
|
let map=map_step2.add_render_configs_and_textures(render_configs);
|
||||||
|
|
||||||
|
Ok(LoadFormat::Map(map))
|
||||||
},
|
},
|
||||||
#[cfg(feature="source")]
|
#[cfg(feature="source")]
|
||||||
ReadFormat::Source(bsp)=>Ok(LoadFormat::Map(
|
ReadFormat::Source(bsp)=>{
|
||||||
bsp.to_snf(LoadFailureMode::DefaultToNone,&[]).map_err(LoadError::LoadSource)?
|
let mut texture_deferred_loader=RenderConfigDeferredLoader::new();
|
||||||
)),
|
let mut mesh_deferred_loader=MeshDeferredLoader::new();
|
||||||
|
|
||||||
|
let map_step1=strafesnet_bsp_loader::convert(
|
||||||
|
&bsp,
|
||||||
|
&mut texture_deferred_loader,
|
||||||
|
&mut mesh_deferred_loader,
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut mesh_loader=strafesnet_bsp_loader::loader::MeshLoader::new(&bsp,&mut texture_deferred_loader);
|
||||||
|
let prop_meshes=mesh_deferred_loader.into_meshes(&mut mesh_loader,LoadFailureMode::DefaultToNone).map_err(LoadError::LoadSourceMesh)?;
|
||||||
|
|
||||||
|
let map_step2=map_step1.add_prop_meshes(prop_meshes);
|
||||||
|
|
||||||
|
let mut texture_loader=strafesnet_bsp_loader::loader::TextureLoader::new();
|
||||||
|
let render_configs=texture_deferred_loader.into_render_configs(&mut texture_loader,LoadFailureMode::DefaultToNone).map_err(LoadError::LoadSourceTexture)?;
|
||||||
|
|
||||||
|
let map=map_step2.add_render_configs_and_textures(render_configs);
|
||||||
|
|
||||||
|
Ok(LoadFormat::Map(map))
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2,6 +2,7 @@ mod app;
|
|||||||
mod file;
|
mod file;
|
||||||
mod setup;
|
mod setup;
|
||||||
mod window;
|
mod window;
|
||||||
|
mod worker;
|
||||||
mod compat_worker;
|
mod compat_worker;
|
||||||
mod physics_worker;
|
mod physics_worker;
|
||||||
mod graphics_worker;
|
mod graphics_worker;
|
||||||
|
@ -6,7 +6,7 @@ use strafesnet_session::session::{
|
|||||||
};
|
};
|
||||||
use strafesnet_common::instruction::{TimedInstruction,InstructionConsumer};
|
use strafesnet_common::instruction::{TimedInstruction,InstructionConsumer};
|
||||||
use strafesnet_common::physics::Time as PhysicsTime;
|
use strafesnet_common::physics::Time as PhysicsTime;
|
||||||
use strafesnet_common::session::Time as SessionTime;
|
use strafesnet_common::session::{Time as SessionTime,TimeInner as SessionTimeInner};
|
||||||
use strafesnet_common::timer::Timer;
|
use strafesnet_common::timer::Timer;
|
||||||
|
|
||||||
pub enum Instruction{
|
pub enum Instruction{
|
||||||
@ -23,7 +23,7 @@ pub fn new<'a>(
|
|||||||
mut graphics_worker:crate::compat_worker::INWorker<'a,crate::graphics_worker::Instruction>,
|
mut graphics_worker:crate::compat_worker::INWorker<'a,crate::graphics_worker::Instruction>,
|
||||||
directories:Directories,
|
directories:Directories,
|
||||||
user_settings:settings::UserSettings,
|
user_settings:settings::UserSettings,
|
||||||
)->crate::compat_worker::QNWorker<'a,TimedInstruction<Instruction,SessionTime>>{
|
)->crate::compat_worker::QNWorker<'a,TimedInstruction<Instruction,SessionTimeInner>>{
|
||||||
let physics=strafesnet_physics::physics::PhysicsState::default();
|
let physics=strafesnet_physics::physics::PhysicsState::default();
|
||||||
let timer=Timer::unpaused(SessionTime::ZERO,PhysicsTime::ZERO);
|
let timer=Timer::unpaused(SessionTime::ZERO,PhysicsTime::ZERO);
|
||||||
let simulation=Simulation::new(timer,physics);
|
let simulation=Simulation::new(timer,physics);
|
||||||
@ -32,7 +32,7 @@ pub fn new<'a>(
|
|||||||
directories,
|
directories,
|
||||||
simulation,
|
simulation,
|
||||||
);
|
);
|
||||||
crate::compat_worker::QNWorker::new(move |ins:TimedInstruction<Instruction,SessionTime>|{
|
crate::compat_worker::QNWorker::new(move |ins:TimedInstruction<Instruction,SessionTimeInner>|{
|
||||||
// excruciating pain
|
// excruciating pain
|
||||||
macro_rules! run_session_instruction{
|
macro_rules! run_session_instruction{
|
||||||
($time:expr,$instruction:expr)=>{
|
($time:expr,$instruction:expr)=>{
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
use strafesnet_common::instruction::TimedInstruction;
|
use strafesnet_common::instruction::TimedInstruction;
|
||||||
use strafesnet_common::session::Time as SessionTime;
|
use strafesnet_common::session::{Time as SessionTime,TimeInner as SessionTimeInner};
|
||||||
use strafesnet_common::physics::{MiscInstruction,SetControlInstruction};
|
use strafesnet_common::physics::{MiscInstruction,SetControlInstruction};
|
||||||
use crate::file::LoadFormat;
|
use crate::file::LoadFormat;
|
||||||
use crate::physics_worker::Instruction as PhysicsWorkerInstruction;
|
use crate::physics_worker::Instruction as PhysicsWorkerInstruction;
|
||||||
@ -17,7 +17,7 @@ struct WindowContext<'a>{
|
|||||||
mouse_pos:glam::DVec2,
|
mouse_pos:glam::DVec2,
|
||||||
screen_size:glam::UVec2,
|
screen_size:glam::UVec2,
|
||||||
window:&'a winit::window::Window,
|
window:&'a winit::window::Window,
|
||||||
physics_thread:crate::compat_worker::QNWorker<'a,TimedInstruction<PhysicsWorkerInstruction,SessionTime>>,
|
physics_thread:crate::compat_worker::QNWorker<'a,TimedInstruction<PhysicsWorkerInstruction,SessionTimeInner>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl WindowContext<'_>{
|
impl WindowContext<'_>{
|
||||||
@ -223,7 +223,7 @@ impl WindowContext<'_>{
|
|||||||
pub fn worker<'a>(
|
pub fn worker<'a>(
|
||||||
window:&'a winit::window::Window,
|
window:&'a winit::window::Window,
|
||||||
setup_context:crate::setup::SetupContext<'a>,
|
setup_context:crate::setup::SetupContext<'a>,
|
||||||
)->crate::compat_worker::QNWorker<'a,TimedInstruction<Instruction,SessionTime>>{
|
)->crate::compat_worker::QNWorker<'a,TimedInstruction<Instruction,SessionTimeInner>>{
|
||||||
// WindowContextSetup::new
|
// WindowContextSetup::new
|
||||||
#[cfg(feature="user-install")]
|
#[cfg(feature="user-install")]
|
||||||
let directories=Directories::user().unwrap();
|
let directories=Directories::user().unwrap();
|
||||||
@ -252,7 +252,7 @@ pub fn worker<'a>(
|
|||||||
};
|
};
|
||||||
|
|
||||||
//WindowContextSetup::into_worker
|
//WindowContextSetup::into_worker
|
||||||
crate::compat_worker::QNWorker::new(move |ins:TimedInstruction<Instruction,SessionTime>|{
|
crate::compat_worker::QNWorker::new(move |ins:TimedInstruction<Instruction,SessionTimeInner>|{
|
||||||
match ins.instruction{
|
match ins.instruction{
|
||||||
Instruction::WindowEvent(window_event)=>{
|
Instruction::WindowEvent(window_event)=>{
|
||||||
window_context.window_event(ins.time,window_event);
|
window_context.window_event(ins.time,window_event);
|
||||||
|
@ -1 +1 @@
|
|||||||
/run/media/quat/Files/Documents/map-files/strafesnet/maps/bhop_snfm
|
/run/media/quat/Files/Documents/map-files/verify-scripts/maps/bhop_snfm
|
@ -1 +1 @@
|
|||||||
/run/media/quat/Files/Documents/map-files/strafesnet/meshes
|
/run/media/quat/Files/Documents/map-files/verify-scripts/meshes
|
@ -1 +1 @@
|
|||||||
/run/media/quat/Files/Documents/map-files/strafesnet/replays
|
/run/media/quat/Files/Documents/map-files/verify-scripts/replays
|
@ -1 +1 @@
|
|||||||
/run/media/quat/Files/Documents/map-files/strafesnet/maps/surf_snfm
|
/run/media/quat/Files/Documents/map-files/verify-scripts/maps/surf_snfm
|
@ -1 +1 @@
|
|||||||
/run/media/quat/Files/Documents/map-files/strafesnet/textures
|
/run/media/quat/Files/Documents/map-files/verify-scripts/textures
|
@ -1 +0,0 @@
|
|||||||
/run/media/quat/Files/Documents/map-files/strafesnet/unions
|
|
Reference in New Issue
Block a user