forked from StrafesNET/map-tool
Compare commits
104 Commits
test-files
...
master
Author | SHA1 | Date | |
---|---|---|---|
a56c114d08 | |||
b6a5324ae7 | |||
6f5a3c5176 | |||
6bab31f3b3 | |||
9cdeed160f | |||
d0c59b51a4 | |||
451f3ccecb | |||
ed9701981d | |||
60e0197344 | |||
4d97a490c1 | |||
52ba44c6be | |||
95b6272b18 | |||
0172675b04 | |||
982b4aecac | |||
c1ddcdb0c5 | |||
c2d0a4487c | |||
dc9fd2c442 | |||
4199d41d3f | |||
7fbcb206ff | |||
a17901d473 | |||
b88c6b899a | |||
835d4bbecd | |||
b756dc979c | |||
1e888ebb01 | |||
b9dccb1af5 | |||
c6d293cc6b | |||
a386f90f51 | |||
43115cbac6 | |||
35b5aff9a7 | |||
36419af870 | |||
a7518bef46 | |||
6df1f41599 | |||
422d0a160d | |||
1727f9213c | |||
afa9e7447d | |||
ff85efa54f | |||
fa69c53cfc | |||
a57c228580 | |||
5dc69db885 | |||
e54400a436 | |||
e2a5edf8df | |||
d6dd1b8abd | |||
a2b793fcd3 | |||
9cb34f14c8 | |||
bd2e3aa2d3 | |||
07f6053839 | |||
0d5b918ea1 | |||
20a568220a | |||
d670d4129e | |||
de7b0bd5cc | |||
01524146c7 | |||
45e8e415d0 | |||
4417bafc5c | |||
8553625738 | |||
3a3749eaeb | |||
53539f290b | |||
479dd37f53 | |||
34b6a869f0 | |||
19a455ee5e | |||
9904b7a044 | |||
6efa811eb6 | |||
81e4a201bd | |||
8fd5618af2 | |||
54c26d6e1e | |||
110ec94a08 | |||
980da5a6a7 | |||
1cd77984d4 | |||
b0fe231388 | |||
5a4a39ab75 | |||
|
1b2324deeb | ||
4c485e76e4 | |||
7bbb9ca24f | |||
eff55af1b4 | |||
0d05cc9996 | |||
2a55ef90df | |||
1a6202ae66 | |||
|
742f7b4ec0 | ||
2cb346f49a | |||
e5cca9ed04 | |||
52d911a25a | |||
7ab20f36a7 | |||
a7554da1c5 | |||
37f0dad7a1 | |||
e309f15cb8 | |||
29374e4ff5 | |||
b7d04d1f40 | |||
432ec11ea6 | |||
01449b1850 | |||
327d0a4992 | |||
420dbaa022 | |||
cad29af4bb | |||
e0e8744bfd | |||
b434dce0f6 | |||
6ef8fd2f69 | |||
7234065bd8 | |||
41d8e700c5 | |||
4ca3d56f0f | |||
593b6902fd | |||
7523c4313a | |||
694440bd29 | |||
755e1d4d5b | |||
4334a6f330 | |||
553ad2cca5 | |||
3f15d2f5a8 |
1738
Cargo.lock
generated
1738
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
29
Cargo.toml
29
Cargo.toml
@ -1,11 +1,32 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "map-tool"
|
name = "map-tool"
|
||||||
version = "0.1.0"
|
version = "1.5.3"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
rbx_binary = "0.7.1"
|
anyhow = "1.0.75"
|
||||||
rbx_dom_weak = "2.5.0"
|
clap = { version = "4.4.2", features = ["derive"] }
|
||||||
rbx_reflection_database = "0.2.7"
|
flate2 = "1.0.27"
|
||||||
|
image = "0.24.7"
|
||||||
|
image_dds = "0.1.1"
|
||||||
|
lazy-regex = "3.1.0"
|
||||||
|
rbx_binary = { version = "0.7.4", registry = "strafesnet" }
|
||||||
|
rbx_dom_weak = { version = "2.7.0", registry = "strafesnet" }
|
||||||
|
rbx_reflection_database = { version = "0.2.10", registry = "strafesnet" }
|
||||||
|
rbx_xml = { version = "0.13.3", registry = "strafesnet" }
|
||||||
|
strafesnet_bsp_loader = { version = "0.1.3", registry = "strafesnet" }
|
||||||
|
strafesnet_deferred_loader = { version = "0.3.1", features = ["legacy"], registry = "strafesnet" }
|
||||||
|
strafesnet_rbx_loader = { version = "0.3.6", registry = "strafesnet" }
|
||||||
|
strafesnet_snf = { version = "0.1.0", registry = "strafesnet" }
|
||||||
|
vbsp = "0.5.0"
|
||||||
|
vmdl = "0.1.1"
|
||||||
|
vmt-parser = "0.1.1"
|
||||||
|
vpk = "0.2.0"
|
||||||
|
vtf = "0.2.1"
|
||||||
|
|
||||||
|
#[profile.release]
|
||||||
|
#lto = true
|
||||||
|
#strip = true
|
||||||
|
#codegen-units = 1
|
||||||
|
28
LICENSE
28
LICENSE
@ -1,9 +1,23 @@
|
|||||||
MIT License
|
Permission is hereby granted, free of charge, to any
|
||||||
|
person obtaining a copy of this software and associated
|
||||||
|
documentation files (the "Software"), to deal in the
|
||||||
|
Software without restriction, including without
|
||||||
|
limitation the rights to use, copy, modify, merge,
|
||||||
|
publish, distribute, sublicense, and/or sell copies of
|
||||||
|
the Software, and to permit persons to whom the Software
|
||||||
|
is furnished to do so, subject to the following
|
||||||
|
conditions:
|
||||||
|
|
||||||
Copyright (c) <year> <copyright holders>
|
The above copyright notice and this permission notice
|
||||||
|
shall be included in all copies or substantial portions
|
||||||
|
of the Software.
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
|
||||||
|
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
|
||||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||||
|
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||||
|
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
|
||||||
|
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||||
|
DEALINGS IN THE SOFTWARE.
|
851
src/main.rs
851
src/main.rs
@ -1,73 +1,820 @@
|
|||||||
fn class_is_a(class: &str, superclass: &str) -> bool {
|
use std::{collections::HashSet,io::{Read,Seek},path::PathBuf};
|
||||||
if class==superclass {
|
use clap::{Args,Parser,Subcommand};
|
||||||
return true
|
use anyhow::Result as AResult;
|
||||||
}
|
use rbx_dom_weak::Instance;
|
||||||
let class_descriptor=rbx_reflection_database::get().classes.get(class);
|
|
||||||
if let Some(descriptor) = &class_descriptor {
|
#[derive(Parser)]
|
||||||
if let Some(class_super) = &descriptor.superclass {
|
#[command(author, version, about, long_about = None)]
|
||||||
return class_is_a(&class_super, superclass)
|
#[command(propagate_version = true)]
|
||||||
}
|
struct Cli {
|
||||||
}
|
#[command(subcommand)]
|
||||||
return false
|
command: Commands,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn recursive_collect_scripts(scripts: &mut std::vec::Vec<rbx_dom_weak::types::Ref>,dom: &rbx_dom_weak::WeakDom, instance: &rbx_dom_weak::Instance){
|
#[derive(Subcommand)]
|
||||||
|
enum Commands {
|
||||||
|
RobloxToSNF(RobloxToSNFSubcommand),
|
||||||
|
SourceToSNF(SourceToSNFSubcommand),
|
||||||
|
DownloadTextures(DownloadTexturesSubcommand),
|
||||||
|
ExtractTextures(ExtractTexturesSubcommand),
|
||||||
|
ConvertTextures(ConvertTexturesSubcommand),
|
||||||
|
VPKContents(VPKContentsSubcommand),
|
||||||
|
BSPContents(BSPContentsSubcommand),
|
||||||
|
DownloadMeshes(DownloadMeshesSubcommand),
|
||||||
|
WriteAttributes(WriteAttributesSubcommand),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Args)]
|
||||||
|
struct RobloxToSNFSubcommand {
|
||||||
|
#[arg(long)]
|
||||||
|
output_folder:PathBuf,
|
||||||
|
#[arg(required=true)]
|
||||||
|
input_files:Vec<PathBuf>,
|
||||||
|
}
|
||||||
|
#[derive(Args)]
|
||||||
|
struct SourceToSNFSubcommand {
|
||||||
|
#[arg(long)]
|
||||||
|
output_folder:PathBuf,
|
||||||
|
#[arg(required=true)]
|
||||||
|
input_files:Vec<PathBuf>,
|
||||||
|
}
|
||||||
|
#[derive(Args)]
|
||||||
|
struct DownloadTexturesSubcommand {
|
||||||
|
#[arg(long,required=true)]
|
||||||
|
roblox_files:Vec<PathBuf>
|
||||||
|
}
|
||||||
|
#[derive(Args)]
|
||||||
|
struct ExtractTexturesSubcommand {
|
||||||
|
#[arg(long)]
|
||||||
|
bsp_file:PathBuf,
|
||||||
|
#[arg(long)]
|
||||||
|
vpk_dir_files:Vec<PathBuf>
|
||||||
|
}
|
||||||
|
#[derive(Args)]
|
||||||
|
struct ConvertTexturesSubcommand {
|
||||||
|
}
|
||||||
|
#[derive(Args)]
|
||||||
|
struct VPKContentsSubcommand {
|
||||||
|
#[arg(long)]
|
||||||
|
input_file:PathBuf,
|
||||||
|
}
|
||||||
|
#[derive(Args)]
|
||||||
|
struct BSPContentsSubcommand {
|
||||||
|
#[arg(long)]
|
||||||
|
input_file:PathBuf,
|
||||||
|
}
|
||||||
|
#[derive(Args)]
|
||||||
|
struct DownloadMeshesSubcommand {
|
||||||
|
#[arg(long,required=true)]
|
||||||
|
roblox_files:Vec<PathBuf>
|
||||||
|
}
|
||||||
|
#[derive(Args)]
|
||||||
|
struct WriteAttributesSubcommand {
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() -> AResult<()> {
|
||||||
|
let cli = Cli::parse();
|
||||||
|
match cli.command {
|
||||||
|
Commands::RobloxToSNF(subcommand)=>roblox_to_snf(subcommand.input_files,subcommand.output_folder),
|
||||||
|
Commands::SourceToSNF(subcommand)=>source_to_snf(subcommand.input_files,subcommand.output_folder),
|
||||||
|
Commands::DownloadTextures(subcommand)=>download_textures(subcommand.roblox_files),
|
||||||
|
Commands::ExtractTextures(subcommand)=>extract_textures(vec![subcommand.bsp_file],subcommand.vpk_dir_files),
|
||||||
|
Commands::VPKContents(subcommand)=>vpk_contents(subcommand.input_file),
|
||||||
|
Commands::BSPContents(subcommand)=>bsp_contents(subcommand.input_file),
|
||||||
|
Commands::ConvertTextures(_subcommand)=>convert_textures(),
|
||||||
|
Commands::DownloadMeshes(subcommand)=>download_meshes(subcommand.roblox_files),
|
||||||
|
Commands::WriteAttributes(_subcommand)=>write_attributes(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn recursive_collect_regex(objects: &mut std::vec::Vec<rbx_dom_weak::types::Ref>,dom: &rbx_dom_weak::WeakDom, instance: &rbx_dom_weak::Instance, regex: &lazy_regex::Lazy<lazy_regex::Regex>){
|
||||||
for &referent in instance.children() {
|
for &referent in instance.children() {
|
||||||
if let Some(c) = dom.get_by_ref(referent) {
|
if let Some(c) = dom.get_by_ref(referent) {
|
||||||
if class_is_a(c.class.as_str(), "LuaSourceContainer") {
|
if regex.captures(c.name.as_str()).is_some(){
|
||||||
scripts.push(c.referent());//copy ref
|
objects.push(c.referent());//copy ref
|
||||||
}
|
}
|
||||||
recursive_collect_scripts(scripts,dom,c);
|
recursive_collect_regex(objects,dom,c,regex);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
fn get_button_refs(dom:&rbx_dom_weak::WeakDom) -> Vec<rbx_dom_weak::types::Ref>{
|
||||||
// Using buffered I/O is recommended with rbx_binary
|
let mut buttons = std::vec::Vec::new();
|
||||||
let input = std::io::BufReader::new(std::fs::File::open("map.rbxm")?);
|
recursive_collect_regex(&mut buttons, dom, dom.root(),lazy_regex::regex!(r"Button(\d+)$"));
|
||||||
|
buttons
|
||||||
let dom = rbx_binary::from_reader(input)?;
|
|
||||||
|
|
||||||
//Construct allowed scripts
|
|
||||||
let mut allowed = std::collections::HashSet::<String>::new();
|
|
||||||
for entry in std::fs::read_dir("allowed")? {
|
|
||||||
allowed.insert(std::fs::read_to_string(entry?.path())?);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut scripts = std::vec::Vec::<rbx_dom_weak::types::Ref>::new();
|
enum ReaderType<'a, R:Read+Seek>{
|
||||||
recursive_collect_scripts(&mut scripts, &dom, dom.root());
|
GZip(flate2::read::GzDecoder<&'a mut R>),
|
||||||
|
Raw(&'a mut R),
|
||||||
|
}
|
||||||
|
|
||||||
//check scribb
|
fn maybe_gzip_decode<R:Read+Seek>(input:&mut R)->AResult<ReaderType<R>>{
|
||||||
let mut any_failed=false;
|
let mut first_2=[0u8;2];
|
||||||
for (i,&referent) in scripts.iter().enumerate() {
|
if let (Ok(()),Ok(()))=(std::io::Read::read_exact(input, &mut first_2),std::io::Seek::rewind(input)){
|
||||||
if let Some(script) = dom.get_by_ref(referent) {
|
match &first_2{
|
||||||
if let Some(rbx_dom_weak::types::Variant::String(s)) = script.properties.get("Source") {
|
b"\x1f\x8b"=>Ok(ReaderType::GZip(flate2::read::GzDecoder::new(input))),
|
||||||
if allowed.contains(s) {
|
_=>Ok(ReaderType::Raw(input)),
|
||||||
println!("pass");
|
|
||||||
}else{
|
|
||||||
println!("fail");
|
|
||||||
any_failed=true;
|
|
||||||
std::fs::write(format!("blocked/{}.lua",i),s)?;
|
|
||||||
}
|
}
|
||||||
}else{
|
}else{
|
||||||
println!("failed to get source");
|
Err(anyhow::Error::msg("failed to peek"))
|
||||||
any_failed=true;
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn load_dom<R:Read+Seek>(input:&mut R)->AResult<rbx_dom_weak::WeakDom>{
|
||||||
|
let mut first_8=[0u8;8];
|
||||||
|
if let (Ok(()),Ok(()))=(std::io::Read::read_exact(input, &mut first_8),std::io::Seek::rewind(input)){
|
||||||
|
match &first_8[0..4]{
|
||||||
|
b"<rob"=>{
|
||||||
|
match &first_8[4..8]{
|
||||||
|
b"lox!"=>rbx_binary::from_reader(input).map_err(anyhow::Error::msg),
|
||||||
|
b"lox "=>rbx_xml::from_reader(input,rbx_xml::DecodeOptions::default()).map_err(anyhow::Error::msg),
|
||||||
|
other=>Err(anyhow::Error::msg(format!("Unknown Roblox file type {:?}",other))),
|
||||||
|
}
|
||||||
|
},
|
||||||
|
_=>Err(anyhow::Error::msg("unsupported file type")),
|
||||||
}
|
}
|
||||||
}else{
|
}else{
|
||||||
println!("failed to deref script");
|
Err(anyhow::Error::msg("peek failed"))
|
||||||
any_failed=true;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if any_failed {
|
|
||||||
println!("One or more scripts are not allowed.");
|
fn get_dom<R:Read+Seek>(input:&mut R)->AResult<rbx_dom_weak::WeakDom>{
|
||||||
return Ok(())//everything is not ok but idk how to return an error LMAO
|
match maybe_gzip_decode(input){
|
||||||
|
Ok(ReaderType::GZip(mut readable)) => {
|
||||||
|
//gzip
|
||||||
|
let mut extracted:Vec<u8>=Vec::new();
|
||||||
|
readable.read_to_end(&mut extracted)?;
|
||||||
|
Ok(load_dom(&mut std::io::Cursor::new(extracted))?)
|
||||||
|
},
|
||||||
|
Ok(ReaderType::Raw(readable)) => Ok(load_dom(readable)?),
|
||||||
|
Err(e) => Err(e)?,
|
||||||
}
|
}
|
||||||
println!("All scripts passed!");
|
}
|
||||||
// std::process::Command::new("rbxcompiler")
|
|
||||||
// .arg("--compile=false")
|
struct RobloxAssetId(u64);
|
||||||
// .arg("--group=6980477")
|
struct RobloxAssetIdParseErr;
|
||||||
// .arg("--asset=5692139100")
|
impl std::str::FromStr for RobloxAssetId {
|
||||||
// .arg("--input=map.rbxm")
|
type Err=RobloxAssetIdParseErr;
|
||||||
// .spawn()?;
|
fn from_str(s: &str) -> Result<Self, Self::Err>{
|
||||||
|
let regman=lazy_regex::regex!(r"(\d+)$");
|
||||||
|
if let Some(captures) = regman.captures(s) {
|
||||||
|
if captures.len()==2{//captures[0] is all captures concatenated, and then each individual capture
|
||||||
|
if let Ok(id) = captures[0].parse::<u64>() {
|
||||||
|
return Ok(Self(id));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(RobloxAssetIdParseErr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/* The ones I'm interested in:
|
||||||
|
Beam.Texture
|
||||||
|
Decal.Texture
|
||||||
|
FileMesh.MeshId
|
||||||
|
FileMesh.TextureId
|
||||||
|
MaterialVariant.ColorMap
|
||||||
|
MaterialVariant.MetalnessMap
|
||||||
|
MaterialVariant.NormalMap
|
||||||
|
MaterialVariant.RoughnessMap
|
||||||
|
MeshPart.MeshId
|
||||||
|
MeshPart.TextureID
|
||||||
|
ParticleEmitter.Texture
|
||||||
|
Sky.MoonTextureId
|
||||||
|
Sky.SkyboxBk
|
||||||
|
Sky.SkyboxDn
|
||||||
|
Sky.SkyboxFt
|
||||||
|
Sky.SkyboxLf
|
||||||
|
Sky.SkyboxRt
|
||||||
|
Sky.SkyboxUp
|
||||||
|
Sky.SunTextureId
|
||||||
|
SurfaceAppearance.ColorMap
|
||||||
|
SurfaceAppearance.MetalnessMap
|
||||||
|
SurfaceAppearance.NormalMap
|
||||||
|
SurfaceAppearance.RoughnessMap
|
||||||
|
SurfaceAppearance.TexturePack
|
||||||
|
*/
|
||||||
|
fn accumulate_content_id(content_list:&mut HashSet<u64>,object:&Instance,property:&str){
|
||||||
|
if let Some(rbx_dom_weak::types::Variant::Content(content))=object.properties.get(property){
|
||||||
|
if let Ok(asset_id)=AsRef::<str>::as_ref(content).parse::<RobloxAssetId>(){
|
||||||
|
content_list.insert(asset_id.0);
|
||||||
|
}else{
|
||||||
|
println!("Content failed to parse into AssetID: {:?}",content);
|
||||||
|
}
|
||||||
|
}else{
|
||||||
|
println!("property={} does not exist for class={}",object.class.as_str(),property);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fn download_textures(paths:Vec<PathBuf>)->AResult<()>{
|
||||||
|
println!("Reading files, this could take a hot minute...");
|
||||||
|
let mut texture_list=HashSet::new();
|
||||||
|
for path in paths{
|
||||||
|
let file=match std::fs::File::open(path.as_path()){
|
||||||
|
Ok(file)=>file,
|
||||||
|
Err(e)=>{
|
||||||
|
println!("file error {e}");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let mut input=std::io::BufReader::new(file);
|
||||||
|
match get_dom(&mut input){
|
||||||
|
Ok(dom)=>{
|
||||||
|
for object in dom.into_raw().1.into_values(){
|
||||||
|
match object.class.as_str(){
|
||||||
|
"Beam"=>accumulate_content_id(&mut texture_list,&object,"Texture"),
|
||||||
|
"Decal"=>accumulate_content_id(&mut texture_list,&object,"Texture"),
|
||||||
|
"Texture"=>accumulate_content_id(&mut texture_list,&object,"Texture"),
|
||||||
|
"FileMesh"=>accumulate_content_id(&mut texture_list,&object,"TextureId"),
|
||||||
|
"MeshPart"=>accumulate_content_id(&mut texture_list,&object,"TextureID"),
|
||||||
|
"ParticleEmitter"=>accumulate_content_id(&mut texture_list,&object,"Texture"),
|
||||||
|
"Sky"=>{
|
||||||
|
accumulate_content_id(&mut texture_list,&object,"MoonTextureId");
|
||||||
|
accumulate_content_id(&mut texture_list,&object,"SkyboxBk");
|
||||||
|
accumulate_content_id(&mut texture_list,&object,"SkyboxDn");
|
||||||
|
accumulate_content_id(&mut texture_list,&object,"SkyboxFt");
|
||||||
|
accumulate_content_id(&mut texture_list,&object,"SkyboxLf");
|
||||||
|
accumulate_content_id(&mut texture_list,&object,"SkyboxRt");
|
||||||
|
accumulate_content_id(&mut texture_list,&object,"SkyboxUp");
|
||||||
|
accumulate_content_id(&mut texture_list,&object,"SunTextureId");
|
||||||
|
},
|
||||||
|
_=>(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Err(e)=>println!("error loading map {:?}: {:?}",path.file_name(),e),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let texture_list_string=texture_list.into_iter().map(|id|id.to_string()).collect::<Vec<String>>();
|
||||||
|
println!("Texture list:{:?}",texture_list_string.join(" "));
|
||||||
|
std::fs::create_dir_all("textures/unprocessed")?;
|
||||||
|
let output=std::process::Command::new("asset-tool")
|
||||||
|
.args(["download","--cookie-literal","","--output-folder","textures/unprocessed/"])
|
||||||
|
.args(texture_list_string)
|
||||||
|
.spawn()?
|
||||||
|
.wait_with_output()?;
|
||||||
|
println!("Asset tool exit_success:{}",output.status.success());
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
fn download_meshes(paths:Vec<PathBuf>)->AResult<()>{
|
||||||
|
println!("Reading files, this could take a hot minute...");
|
||||||
|
let mut mesh_list=HashSet::new();
|
||||||
|
for path in paths{
|
||||||
|
let file=match std::fs::File::open(path.as_path()){
|
||||||
|
Ok(file)=>file,
|
||||||
|
Err(e)=>{
|
||||||
|
println!("file error {e}");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let mut input=std::io::BufReader::new(file);
|
||||||
|
match get_dom(&mut input){
|
||||||
|
Ok(dom)=>{
|
||||||
|
for object in dom.into_raw().1.into_values(){
|
||||||
|
match object.class.as_str(){
|
||||||
|
"MeshPart"=>accumulate_content_id(&mut mesh_list,&object,"MeshId"),
|
||||||
|
"SpecialMesh"=>accumulate_content_id(&mut mesh_list,&object,"MeshId"),
|
||||||
|
_=>(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Err(e)=>println!("error loading map {:?}: {:?}",path.file_name(),e),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let mesh_list_string=mesh_list.into_iter().map(|id|id.to_string()).collect::<Vec<String>>();
|
||||||
|
println!("Mesh list:{:?}",mesh_list_string.join(" "));
|
||||||
|
std::fs::create_dir_all("meshes/")?;
|
||||||
|
let output=std::process::Command::new("asset-tool")
|
||||||
|
.args(["download","--cookie-literal","","--output-folder","meshes/"])
|
||||||
|
.args(mesh_list_string)
|
||||||
|
.spawn()?
|
||||||
|
.wait_with_output()?;
|
||||||
|
println!("Asset tool exit_success:{}",output.status.success());
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn load_image<R:Read+Seek+std::io::BufRead>(input:&mut R)->AResult<image::DynamicImage>{
|
||||||
|
let mut fourcc=[0u8;4];
|
||||||
|
input.read_exact(&mut fourcc)?;
|
||||||
|
input.rewind()?;
|
||||||
|
match &fourcc{
|
||||||
|
b"\x89PNG"=>Ok(image::load(input,image::ImageFormat::Png)?),
|
||||||
|
b"\xFF\xD8\xFF\xE0"=>Ok(image::load(input,image::ImageFormat::Jpeg)?),//JFIF
|
||||||
|
b"<rob"=>Err(anyhow::Error::msg("Roblox xml garbage is not supported yet")),
|
||||||
|
other=>Err(anyhow::Error::msg(format!("Unknown texture format {:?}",other))),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn convert(file_thing:std::fs::DirEntry) -> AResult<()>{
|
||||||
|
let mut input = std::io::BufReader::new(std::fs::File::open(file_thing.path())?);
|
||||||
|
|
||||||
|
let mut extracted_input=None;
|
||||||
|
let image=match maybe_gzip_decode(&mut input){
|
||||||
|
Ok(ReaderType::GZip(mut readable)) => {
|
||||||
|
//gzip
|
||||||
|
let mut extracted:Vec<u8>=Vec::new();
|
||||||
|
//read the entire thing to the end so that I can clone the data and write a png to processed images
|
||||||
|
readable.read_to_end(&mut extracted)?;
|
||||||
|
extracted_input=Some(extracted.clone());
|
||||||
|
load_image(&mut std::io::Cursor::new(extracted))
|
||||||
|
},
|
||||||
|
Ok(ReaderType::Raw(readable)) => load_image(readable),
|
||||||
|
Err(e) => Err(e)?,
|
||||||
|
}?.to_rgba8();//this sets a=255, arcane is actually supposed to look like that
|
||||||
|
|
||||||
|
let format=if image.width()%4!=0||image.height()%4!=0{
|
||||||
|
image_dds::ImageFormat::R8G8B8A8Srgb
|
||||||
|
}else{
|
||||||
|
image_dds::ImageFormat::BC7Srgb
|
||||||
|
};
|
||||||
|
//this fails if the image dimensions are not a multiple of 4
|
||||||
|
let dds = image_dds::dds_from_image(
|
||||||
|
&image,
|
||||||
|
format,
|
||||||
|
image_dds::Quality::Slow,
|
||||||
|
image_dds::Mipmaps::GeneratedAutomatic,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
//write dds
|
||||||
|
let mut dest=PathBuf::from("textures");
|
||||||
|
dest.push(file_thing.file_name());
|
||||||
|
dest.set_extension("dds");
|
||||||
|
let mut writer = std::io::BufWriter::new(std::fs::File::create(dest)?);
|
||||||
|
dds.write(&mut writer)?;
|
||||||
|
|
||||||
|
if let Some(mut extracted)=extracted_input{
|
||||||
|
//write extracted to processed
|
||||||
|
let mut dest=PathBuf::from("textures/processed");
|
||||||
|
dest.push(file_thing.file_name());
|
||||||
|
std::fs::write(dest, &mut extracted)?;
|
||||||
|
//delete ugly gzip file
|
||||||
|
std::fs::remove_file(file_thing.path())?;
|
||||||
|
}else{
|
||||||
|
//move file to processed
|
||||||
|
let mut dest=PathBuf::from("textures/processed");
|
||||||
|
dest.push(file_thing.file_name());
|
||||||
|
std::fs::rename(file_thing.path(), dest)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
fn convert_textures() -> AResult<()>{
|
||||||
|
std::fs::create_dir_all("textures/unprocessed")?;
|
||||||
|
std::fs::create_dir_all("textures/processed")?;
|
||||||
|
let start = std::time::Instant::now();
|
||||||
|
let mut threads=Vec::new();
|
||||||
|
for entry in std::fs::read_dir("textures/unprocessed")? {
|
||||||
|
let file_thing=entry?;
|
||||||
|
threads.push(std::thread::spawn(move ||{
|
||||||
|
let file_name=format!("{:?}",file_thing);
|
||||||
|
let result=convert(file_thing);
|
||||||
|
if let Err(e)=result{
|
||||||
|
println!("error processing file:{:?} error message:{:?}",file_name,e);
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
let mut i=0;
|
||||||
|
let n_threads=threads.len();
|
||||||
|
for thread in threads{
|
||||||
|
i+=1;
|
||||||
|
if let Err(e)=thread.join(){
|
||||||
|
println!("thread error: {:?}",e);
|
||||||
|
}else{
|
||||||
|
println!("{}/{}",i,n_threads);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
println!("{:?}", start.elapsed());
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_attributes() -> AResult<()>{
|
||||||
|
for entry in std::fs::read_dir("maps/unprocessed")? {
|
||||||
|
let file_thing=entry?;
|
||||||
|
println!("processing map={:?}",file_thing.file_name());
|
||||||
|
let mut input = std::io::BufReader::new(std::fs::File::open(file_thing.path())?);
|
||||||
|
let mut dom = get_dom(&mut input)?;
|
||||||
|
|
||||||
|
let button_refs = get_button_refs(&dom);
|
||||||
|
|
||||||
|
for &button_ref in &button_refs {
|
||||||
|
if let Some(button)=dom.get_by_ref_mut(button_ref){
|
||||||
|
match button.properties.get_mut("Attributes"){
|
||||||
|
Some(rbx_dom_weak::types::Variant::Attributes(attributes))=>{
|
||||||
|
println!("Appending Ref={} to existing attributes for {}",button_ref,button.name);
|
||||||
|
attributes.insert("Ref".to_string(),rbx_dom_weak::types::Variant::String(button_ref.to_string()));
|
||||||
|
},
|
||||||
|
None=>{
|
||||||
|
println!("Creating new attributes with Ref={} for {}",button_ref,button.name);
|
||||||
|
let mut attributes=rbx_dom_weak::types::Attributes::new();
|
||||||
|
attributes.insert("Ref".to_string(),rbx_dom_weak::types::Variant::String(button_ref.to_string()));
|
||||||
|
button.properties.insert("Attributes".to_string(),rbx_dom_weak::types::Variant::Attributes(attributes));
|
||||||
|
}
|
||||||
|
_=>unreachable!("Fetching attributes did not return attributes."),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let mut dest={
|
||||||
|
let mut dest=PathBuf::from("maps/attributes");
|
||||||
|
dest.push(file_thing.file_name());
|
||||||
|
let output = std::io::BufWriter::new(std::fs::File::create(dest)?);
|
||||||
|
//write workspace:GetChildren()[1]
|
||||||
|
let workspace_children=dom.root().children();
|
||||||
|
if workspace_children.len()!=1{
|
||||||
|
return Err(anyhow::Error::msg("there can only be one model"));
|
||||||
|
}
|
||||||
|
rbx_binary::to_writer(output, &dom, &[workspace_children[0]])?;
|
||||||
|
//move original to processed folder
|
||||||
|
PathBuf::from("maps/unaltered")
|
||||||
|
};
|
||||||
|
dest.push(file_thing.file_name());
|
||||||
|
std::fs::rename(file_thing.path(), dest)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
enum VMTContent{
|
||||||
|
VMT(String),
|
||||||
|
VTF(String),
|
||||||
|
Patch(vmt_parser::material::PatchMaterial),
|
||||||
|
Unsupported,//don't want to deal with whatever vmt variant
|
||||||
|
Unresolved,//could not locate a texture because of vmt content
|
||||||
|
}
|
||||||
|
impl VMTContent{
|
||||||
|
fn vtf(opt:Option<String>)->Self{
|
||||||
|
match opt{
|
||||||
|
Some(s)=>Self::VTF(s),
|
||||||
|
None=>Self::Unresolved,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_some_texture(material:vmt_parser::material::Material)->AResult<VMTContent>{
|
||||||
|
//just grab some texture from somewhere for now
|
||||||
|
Ok(match material{
|
||||||
|
vmt_parser::material::Material::LightMappedGeneric(mat)=>VMTContent::vtf(Some(mat.base_texture)),
|
||||||
|
vmt_parser::material::Material::VertexLitGeneric(mat)=>VMTContent::vtf(mat.base_texture.or(mat.decal_texture)),//this just dies if there is none
|
||||||
|
vmt_parser::material::Material::VertexLitGenericDx6(mat)=>VMTContent::vtf(mat.base_texture.or(mat.decal_texture)),
|
||||||
|
vmt_parser::material::Material::UnlitGeneric(mat)=>VMTContent::vtf(mat.base_texture),
|
||||||
|
vmt_parser::material::Material::UnlitTwoTexture(mat)=>VMTContent::vtf(mat.base_texture),
|
||||||
|
vmt_parser::material::Material::Water(mat)=>VMTContent::vtf(mat.base_texture),
|
||||||
|
vmt_parser::material::Material::WorldVertexTransition(mat)=>VMTContent::vtf(Some(mat.base_texture)),
|
||||||
|
vmt_parser::material::Material::EyeRefract(mat)=>VMTContent::vtf(Some(mat.cornea_texture)),
|
||||||
|
vmt_parser::material::Material::SubRect(mat)=>VMTContent::VMT(mat.material),//recursive
|
||||||
|
vmt_parser::material::Material::Sprite(mat)=>VMTContent::vtf(Some(mat.base_texture)),
|
||||||
|
vmt_parser::material::Material::SpriteCard(mat)=>VMTContent::vtf(mat.base_texture),
|
||||||
|
vmt_parser::material::Material::Cable(mat)=>VMTContent::vtf(Some(mat.base_texture)),
|
||||||
|
vmt_parser::material::Material::Refract(mat)=>VMTContent::vtf(mat.base_texture),
|
||||||
|
vmt_parser::material::Material::Modulate(mat)=>VMTContent::vtf(Some(mat.base_texture)),
|
||||||
|
vmt_parser::material::Material::DecalModulate(mat)=>VMTContent::vtf(Some(mat.base_texture)),
|
||||||
|
vmt_parser::material::Material::Sky(mat)=>VMTContent::vtf(Some(mat.base_texture)),
|
||||||
|
vmt_parser::material::Material::Replacements(_mat)=>VMTContent::Unsupported,
|
||||||
|
vmt_parser::material::Material::Patch(mat)=>VMTContent::Patch(mat),
|
||||||
|
_=>return Err(anyhow::Error::msg("vmt failed to parse")),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_vmt<F:Fn(String)->AResult<Option<Vec<u8>>>>(find_stuff:&F,search_name:String)->AResult<vmt_parser::material::Material>{
|
||||||
|
if let Some(stuff)=find_stuff(search_name)?{
|
||||||
|
//decode vmt and then write
|
||||||
|
let stuff=String::from_utf8(stuff)?;
|
||||||
|
let material=vmt_parser::from_str(stuff.as_str())?;
|
||||||
|
println!("vmt material={:?}",material);
|
||||||
|
return Ok(material);
|
||||||
|
}
|
||||||
|
Err(anyhow::Error::msg("vmt not found"))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn recursive_vmt_loader<F:Fn(String)->AResult<Option<Vec<u8>>>>(find_stuff:&F,material:vmt_parser::material::Material)->AResult<Option<Vec<u8>>>{
|
||||||
|
match get_some_texture(material)?{
|
||||||
|
VMTContent::VMT(s)=>recursive_vmt_loader(find_stuff,get_vmt(find_stuff,s)?),
|
||||||
|
VMTContent::VTF(s)=>{
|
||||||
|
let mut texture_file_name=PathBuf::from("materials");
|
||||||
|
texture_file_name.push(s);
|
||||||
|
texture_file_name.set_extension("vtf");
|
||||||
|
find_stuff(texture_file_name.into_os_string().into_string().unwrap())
|
||||||
|
},
|
||||||
|
VMTContent::Patch(mat)=>recursive_vmt_loader(find_stuff,
|
||||||
|
mat.resolve(|search_name|{
|
||||||
|
match find_stuff(search_name.to_string())?{
|
||||||
|
Some(bytes)=>Ok(String::from_utf8(bytes)?),
|
||||||
|
None=>Err(anyhow::Error::msg("could not find vmt")),
|
||||||
|
}
|
||||||
|
})?
|
||||||
|
),
|
||||||
|
VMTContent::Unsupported=>{println!("Unsupported vmt");Ok(None)},//print and move on
|
||||||
|
VMTContent::Unresolved=>{println!("Unresolved vmt");Ok(None)},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_textures(paths:Vec<PathBuf>,vpk_paths:Vec<PathBuf>)->AResult<()>{
|
||||||
|
std::fs::create_dir_all("textures")?;
|
||||||
|
let vpk_list:Vec<vpk::VPK>=vpk_paths.into_iter().map(|vpk_path|vpk::VPK::read(&vpk_path).expect("vpk file does not exist")).collect();
|
||||||
|
for path in paths{
|
||||||
|
let mut deduplicate=std::collections::HashSet::new();
|
||||||
|
let bsp=vbsp::Bsp::read(std::fs::read(path)?.as_ref())?;
|
||||||
|
for texture in bsp.textures(){
|
||||||
|
deduplicate.insert(PathBuf::from(texture.name()));
|
||||||
|
}
|
||||||
|
//dedupe prop models
|
||||||
|
let mut model_dedupe=std::collections::HashSet::new();
|
||||||
|
for prop in bsp.static_props(){
|
||||||
|
model_dedupe.insert(prop.model());
|
||||||
|
}
|
||||||
|
|
||||||
|
//grab texture names from props
|
||||||
|
for model_name in model_dedupe{
|
||||||
|
//.mdl, .vvd, .dx90.vtx
|
||||||
|
let mut path=PathBuf::from(model_name);
|
||||||
|
let file_name=PathBuf::from(path.file_stem().unwrap());
|
||||||
|
path.pop();
|
||||||
|
path.push(file_name);
|
||||||
|
let mut vvd_path=path.clone();
|
||||||
|
let mut vtx_path=path.clone();
|
||||||
|
vvd_path.set_extension("vvd");
|
||||||
|
vtx_path.set_extension("dx90.vtx");
|
||||||
|
match (bsp.pack.get(model_name),bsp.pack.get(vvd_path.as_os_str().to_str().unwrap()),bsp.pack.get(vtx_path.as_os_str().to_str().unwrap())){
|
||||||
|
(Ok(Some(mdl_file)),Ok(Some(vvd_file)),Ok(Some(vtx_file)))=>{
|
||||||
|
match (vmdl::mdl::Mdl::read(mdl_file.as_ref()),vmdl::vvd::Vvd::read(vvd_file.as_ref()),vmdl::vtx::Vtx::read(vtx_file.as_ref())){
|
||||||
|
(Ok(mdl),Ok(vvd),Ok(vtx))=>{
|
||||||
|
let model=vmdl::Model::from_parts(mdl,vtx,vvd);
|
||||||
|
for texture in model.textures(){
|
||||||
|
for search_path in &texture.search_paths{
|
||||||
|
let mut path=PathBuf::from(search_path.as_str());
|
||||||
|
path.push(texture.name.as_str());
|
||||||
|
deduplicate.insert(path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
_=>println!("model_name={} error",model_name),
|
||||||
|
}
|
||||||
|
},
|
||||||
|
_=>println!("no model name={}",model_name),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let pack=&bsp.pack;
|
||||||
|
let vpk_list=&vpk_list;
|
||||||
|
std::thread::scope(move|s|{
|
||||||
|
let mut thread_handles=Vec::new();
|
||||||
|
for texture_name in deduplicate{
|
||||||
|
let mut found_texture=false;
|
||||||
|
//LMAO imagine having to write type names
|
||||||
|
let write_image=|mut stuff,write_file_name|{
|
||||||
|
let image=vtf::from_bytes(&mut stuff)?.highres_image.decode(0)?.to_rgba8();
|
||||||
|
|
||||||
|
let format=if image.width()%4!=0||image.height()%4!=0{
|
||||||
|
image_dds::ImageFormat::R8G8B8A8Srgb
|
||||||
|
}else{
|
||||||
|
image_dds::ImageFormat::BC7Srgb
|
||||||
|
};
|
||||||
|
//this fails if the image dimensions are not a multiple of 4
|
||||||
|
let dds = image_dds::dds_from_image(
|
||||||
|
&image,
|
||||||
|
format,
|
||||||
|
image_dds::Quality::Slow,
|
||||||
|
image_dds::Mipmaps::GeneratedAutomatic,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
//write dds
|
||||||
|
let mut dest=PathBuf::from("textures");
|
||||||
|
dest.push(write_file_name);
|
||||||
|
dest.set_extension("dds");
|
||||||
|
std::fs::create_dir_all(dest.parent().unwrap())?;
|
||||||
|
let mut writer = std::io::BufWriter::new(std::fs::File::create(dest)?);
|
||||||
|
dds.write(&mut writer)?;
|
||||||
|
Ok::<(),anyhow::Error>(())
|
||||||
|
};
|
||||||
|
let find_stuff=|search_file_name:String|{
|
||||||
|
println!("search_file_name={}",search_file_name);
|
||||||
|
match pack.get(search_file_name.as_str())?{
|
||||||
|
Some(file)=>return Ok(Some(file)),
|
||||||
|
_=>(),
|
||||||
|
}
|
||||||
|
//search pak list
|
||||||
|
for vpk_index in vpk_list{
|
||||||
|
if let Some(vpk_entry)=vpk_index.tree.get(search_file_name.as_str()){
|
||||||
|
return Ok(Some(match vpk_entry.get()?{
|
||||||
|
std::borrow::Cow::Borrowed(bytes)=>bytes.to_vec(),
|
||||||
|
std::borrow::Cow::Owned(bytes)=>bytes,
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok::<Option<Vec<u8>>,anyhow::Error>(None)
|
||||||
|
};
|
||||||
|
let loader=|texture_name:String|{
|
||||||
|
let mut texture_file_name=PathBuf::from("materials");
|
||||||
|
//lower case
|
||||||
|
let texture_file_name_lowercase=texture_name.to_lowercase();
|
||||||
|
texture_file_name.push(texture_file_name_lowercase.clone());
|
||||||
|
//remove stem and search for both vtf and vmt files
|
||||||
|
let stem=PathBuf::from(texture_file_name.file_stem().unwrap());
|
||||||
|
texture_file_name.pop();
|
||||||
|
texture_file_name.push(stem);
|
||||||
|
//somehow search for both files
|
||||||
|
let mut texture_file_name_vmt=texture_file_name.clone();
|
||||||
|
texture_file_name.set_extension("vtf");
|
||||||
|
texture_file_name_vmt.set_extension("vmt");
|
||||||
|
if let Some(stuff)=find_stuff(texture_file_name.to_string_lossy().to_string())?{
|
||||||
|
return Ok(Some(stuff))
|
||||||
|
}
|
||||||
|
recursive_vmt_loader(&find_stuff,get_vmt(&find_stuff,texture_file_name_vmt.to_string_lossy().to_string())?)
|
||||||
|
};
|
||||||
|
if let Some(stuff)=loader(texture_name.to_string_lossy().to_string())?{
|
||||||
|
found_texture=true;
|
||||||
|
let texture_name=texture_name.clone();
|
||||||
|
thread_handles.push(s.spawn(move||write_image(stuff,texture_name)));
|
||||||
|
}
|
||||||
|
if !found_texture{
|
||||||
|
println!("no data");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for thread in thread_handles{
|
||||||
|
match thread.join(){
|
||||||
|
Ok(Err(e))=>println!("write error: {:?}",e),
|
||||||
|
Err(e)=>println!("thread error: {:?}",e),
|
||||||
|
Ok(_)=>(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok::<(),anyhow::Error>(())
|
||||||
|
})?
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn vpk_contents(vpk_path:PathBuf)->AResult<()>{
|
||||||
|
let vpk_index=vpk::VPK::read(&vpk_path)?;
|
||||||
|
for (label,entry) in vpk_index.tree.into_iter(){
|
||||||
|
println!("vpk label={} entry={:?}",label,entry);
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn bsp_contents(path:PathBuf)->AResult<()>{
|
||||||
|
let bsp=vbsp::Bsp::read(std::fs::read(path)?.as_ref())?;
|
||||||
|
for file_name in bsp.pack.into_zip().into_inner().unwrap().file_names(){
|
||||||
|
println!("file_name={:?}",file_name);
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
#[allow(dead_code)]
|
||||||
|
enum ConvertError{
|
||||||
|
IO(std::io::Error),
|
||||||
|
SNFMap(strafesnet_snf::map::Error),
|
||||||
|
RbxLoader(strafesnet_rbx_loader::ReadError),
|
||||||
|
BspLoader(strafesnet_bsp_loader::ReadError),
|
||||||
|
}
|
||||||
|
impl std::fmt::Display for ConvertError{
|
||||||
|
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||||
|
write!(f,"{self:?}")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl std::error::Error for ConvertError{}
|
||||||
|
|
||||||
|
type MapThread=std::thread::JoinHandle<Result<(),ConvertError>>;
|
||||||
|
|
||||||
|
fn roblox_to_snf(pathlist:Vec<std::path::PathBuf>,output_folder:PathBuf)->AResult<()>{
|
||||||
|
let n_paths=pathlist.len();
|
||||||
|
let start = std::time::Instant::now();
|
||||||
|
let mut threads:std::collections::VecDeque<MapThread>=std::collections::VecDeque::new();
|
||||||
|
let mut i=0;
|
||||||
|
let mut join_thread=|thread:MapThread|{
|
||||||
|
i+=1;
|
||||||
|
if let Err(e)=thread.join(){
|
||||||
|
println!("thread error: {:?}",e);
|
||||||
|
}else{
|
||||||
|
println!("{}/{}",i,n_paths);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
for path in pathlist{
|
||||||
|
if 32<=threads.len(){
|
||||||
|
join_thread(threads.pop_front().unwrap());
|
||||||
|
}
|
||||||
|
let output_folder=output_folder.clone();
|
||||||
|
threads.push_back(std::thread::spawn(move ||{
|
||||||
|
let mut dom=strafesnet_rbx_loader::read(
|
||||||
|
std::fs::File::open(path.as_path())
|
||||||
|
.map_err(ConvertError::IO)?
|
||||||
|
).map_err(ConvertError::RbxLoader)?;
|
||||||
|
|
||||||
|
dom.run_scripts();
|
||||||
|
|
||||||
|
let mut loader=strafesnet_deferred_loader::roblox_legacy();
|
||||||
|
|
||||||
|
let (texture_loader,mesh_loader)=loader.get_inner_mut();
|
||||||
|
|
||||||
|
let map_step1=strafesnet_rbx_loader::convert(
|
||||||
|
&dom,
|
||||||
|
|name|texture_loader.acquire_render_config_id(name),
|
||||||
|
|name|mesh_loader.acquire_mesh_id(name),
|
||||||
|
);
|
||||||
|
|
||||||
|
let meshpart_meshes=mesh_loader.load_meshes().map_err(ConvertError::IO)?;
|
||||||
|
|
||||||
|
let map_step2=map_step1.add_meshpart_meshes_and_calculate_attributes(
|
||||||
|
meshpart_meshes.into_iter().map(|(mesh_id,loader_model)|
|
||||||
|
(mesh_id,strafesnet_rbx_loader::data::RobloxMeshBytes::new(loader_model.get()))
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
let (textures,render_configs)=loader.into_render_configs().map_err(ConvertError::IO)?.consume();
|
||||||
|
|
||||||
|
let map=map_step2.add_render_configs_and_textures(
|
||||||
|
render_configs.into_iter(),
|
||||||
|
textures.into_iter().map(|(texture_id,texture)|
|
||||||
|
(texture_id,match texture{
|
||||||
|
strafesnet_deferred_loader::texture::Texture::ImageDDS(data)=>data,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut dest=output_folder.clone();
|
||||||
|
dest.push(path.file_stem().unwrap());
|
||||||
|
dest.set_extension("snfm");
|
||||||
|
let file=std::fs::File::create(dest).map_err(ConvertError::IO)?;
|
||||||
|
|
||||||
|
strafesnet_snf::map::write_map(file,map).map_err(ConvertError::SNFMap)?;
|
||||||
|
Ok(())
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
for thread in threads{
|
||||||
|
join_thread(thread);
|
||||||
|
}
|
||||||
|
println!("{:?}", start.elapsed());
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn source_to_snf(pathlist:Vec<std::path::PathBuf>,output_folder:PathBuf)->AResult<()>{
|
||||||
|
let n_paths=pathlist.len();
|
||||||
|
let start = std::time::Instant::now();
|
||||||
|
let mut threads:std::collections::VecDeque<MapThread>=std::collections::VecDeque::new();
|
||||||
|
let mut i=0;
|
||||||
|
let mut join_thread=|thread:MapThread|{
|
||||||
|
i+=1;
|
||||||
|
if let Err(e)=thread.join(){
|
||||||
|
println!("thread error: {:?}",e);
|
||||||
|
}else{
|
||||||
|
println!("{}/{}",i,n_paths);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
for path in pathlist{
|
||||||
|
if 32<=threads.len(){
|
||||||
|
join_thread(threads.pop_front().unwrap());
|
||||||
|
}
|
||||||
|
let output_folder=output_folder.clone();
|
||||||
|
threads.push_back(std::thread::spawn(move ||{
|
||||||
|
let bsp=strafesnet_bsp_loader::read(
|
||||||
|
std::fs::File::open(path.as_path())
|
||||||
|
.map_err(ConvertError::IO)?
|
||||||
|
).map_err(ConvertError::BspLoader)?;
|
||||||
|
let mut loader=strafesnet_deferred_loader::source_legacy();
|
||||||
|
|
||||||
|
let (texture_loader,mesh_loader)=loader.get_inner_mut();
|
||||||
|
|
||||||
|
let map_step1=strafesnet_bsp_loader::convert(
|
||||||
|
&bsp,
|
||||||
|
|name|texture_loader.acquire_render_config_id(name),
|
||||||
|
|name|mesh_loader.acquire_mesh_id(name),
|
||||||
|
);
|
||||||
|
|
||||||
|
let prop_meshes=mesh_loader.load_meshes(&bsp.as_ref());
|
||||||
|
|
||||||
|
let map_step2=map_step1.add_prop_meshes(
|
||||||
|
//the type conflagulator 9000
|
||||||
|
prop_meshes.into_iter().map(|(mesh_id,loader_model)|
|
||||||
|
(mesh_id,strafesnet_bsp_loader::data::ModelData{
|
||||||
|
mdl:strafesnet_bsp_loader::data::MdlData::new(loader_model.mdl.get()),
|
||||||
|
vtx:strafesnet_bsp_loader::data::VtxData::new(loader_model.vtx.get()),
|
||||||
|
vvd:strafesnet_bsp_loader::data::VvdData::new(loader_model.vvd.get()),
|
||||||
|
})
|
||||||
|
),
|
||||||
|
|name|texture_loader.acquire_render_config_id(name),
|
||||||
|
);
|
||||||
|
|
||||||
|
let (textures,render_configs)=loader.into_render_configs().map_err(ConvertError::IO)?.consume();
|
||||||
|
|
||||||
|
let map=map_step2.add_render_configs_and_textures(
|
||||||
|
render_configs.into_iter(),
|
||||||
|
textures.into_iter().map(|(texture_id,texture)|
|
||||||
|
(texture_id,match texture{
|
||||||
|
strafesnet_deferred_loader::texture::Texture::ImageDDS(data)=>data,
|
||||||
|
})
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut dest=output_folder.clone();
|
||||||
|
dest.push(path.file_stem().unwrap());
|
||||||
|
dest.set_extension("snfm");
|
||||||
|
let file=std::fs::File::create(dest).map_err(ConvertError::IO)?;
|
||||||
|
|
||||||
|
strafesnet_snf::map::write_map(file,map).map_err(ConvertError::SNFMap)?;
|
||||||
|
Ok(())
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
for thread in threads{
|
||||||
|
join_thread(thread);
|
||||||
|
}
|
||||||
|
println!("{:?}", start.elapsed());
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user