forked from StrafesNET/map-tool
Compare commits
17 Commits
Author | SHA1 | Date | |
---|---|---|---|
205db9a0db | |||
ca50bf35c2 | |||
6522c255cd | |||
a5079f21d7 | |||
349cd9c233 | |||
d455cf4dc9 | |||
3227a6486a | |||
1ce51dd4da | |||
1ad9723905 | |||
41b28fa7d2 | |||
a2ab23097b | |||
602061b44c | |||
1989369956 | |||
a18aea828c | |||
b7000ee9af | |||
2b77ea5712 | |||
cf98f8e7bb |
2
.cargo/config.toml
Normal file
2
.cargo/config.toml
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
[registries.strafesnet]
|
||||||
|
index = "sparse+https://git.itzana.me/api/packages/strafesnet/cargo/"
|
2509
Cargo.lock
generated
2509
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
27
Cargo.toml
27
Cargo.toml
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "map-tool"
|
name = "map-tool"
|
||||||
version = "1.5.3"
|
version = "1.7.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
@ -9,22 +9,27 @@ edition = "2021"
|
|||||||
anyhow = "1.0.75"
|
anyhow = "1.0.75"
|
||||||
clap = { version = "4.4.2", features = ["derive"] }
|
clap = { version = "4.4.2", features = ["derive"] }
|
||||||
flate2 = "1.0.27"
|
flate2 = "1.0.27"
|
||||||
image = "0.24.7"
|
futures = "0.3.31"
|
||||||
image_dds = "0.1.1"
|
image = "0.25.2"
|
||||||
|
image_dds = "0.7.1"
|
||||||
lazy-regex = "3.1.0"
|
lazy-regex = "3.1.0"
|
||||||
|
rbx_asset = { version = "0.2.5", registry = "strafesnet" }
|
||||||
rbx_binary = { version = "0.7.4", registry = "strafesnet" }
|
rbx_binary = { version = "0.7.4", registry = "strafesnet" }
|
||||||
rbx_dom_weak = { version = "2.7.0", registry = "strafesnet" }
|
rbx_dom_weak = { version = "2.7.0", registry = "strafesnet" }
|
||||||
rbx_reflection_database = { version = "0.2.10", registry = "strafesnet" }
|
rbx_reflection_database = { version = "0.2.10", registry = "strafesnet" }
|
||||||
rbx_xml = { version = "0.13.3", registry = "strafesnet" }
|
rbx_xml = { version = "0.13.3", registry = "strafesnet" }
|
||||||
strafesnet_bsp_loader = { version = "0.1.3", registry = "strafesnet" }
|
rbxassetid = { version = "0.1.0", registry = "strafesnet" }
|
||||||
strafesnet_deferred_loader = { version = "0.3.1", features = ["legacy"], registry = "strafesnet" }
|
strafesnet_bsp_loader = { version = "0.3.0", registry = "strafesnet" }
|
||||||
strafesnet_rbx_loader = { version = "0.3.6", registry = "strafesnet" }
|
strafesnet_deferred_loader = { version = "0.5.0", registry = "strafesnet" }
|
||||||
strafesnet_snf = { version = "0.1.0", registry = "strafesnet" }
|
strafesnet_rbx_loader = { version = "0.6.0", registry = "strafesnet" }
|
||||||
vbsp = "0.5.0"
|
strafesnet_snf = { version = "0.3.0", registry = "strafesnet" }
|
||||||
vmdl = "0.1.1"
|
thiserror = "2.0.11"
|
||||||
vmt-parser = "0.1.1"
|
tokio = { version = "1.43.0", features = ["macros", "rt-multi-thread", "fs"] }
|
||||||
|
vbsp = "0.6.0"
|
||||||
|
vmdl = "0.2.0"
|
||||||
|
vmt-parser = "0.2.0"
|
||||||
vpk = "0.2.0"
|
vpk = "0.2.0"
|
||||||
vtf = "0.2.1"
|
vtf = "0.3.0"
|
||||||
|
|
||||||
#[profile.release]
|
#[profile.release]
|
||||||
#lto = true
|
#lto = true
|
||||||
|
820
src/main.rs
820
src/main.rs
@ -1,7 +1,8 @@
|
|||||||
use std::{collections::HashSet,io::{Read,Seek},path::PathBuf};
|
mod roblox;
|
||||||
use clap::{Args,Parser,Subcommand};
|
mod source;
|
||||||
|
|
||||||
|
use clap::{Parser,Subcommand};
|
||||||
use anyhow::Result as AResult;
|
use anyhow::Result as AResult;
|
||||||
use rbx_dom_weak::Instance;
|
|
||||||
|
|
||||||
#[derive(Parser)]
|
#[derive(Parser)]
|
||||||
#[command(author, version, about, long_about = None)]
|
#[command(author, version, about, long_about = None)]
|
||||||
@ -12,809 +13,18 @@ struct Cli {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Subcommand)]
|
#[derive(Subcommand)]
|
||||||
enum Commands {
|
enum Commands{
|
||||||
RobloxToSNF(RobloxToSNFSubcommand),
|
#[command(flatten)]
|
||||||
SourceToSNF(SourceToSNFSubcommand),
|
Roblox(roblox::Commands),
|
||||||
DownloadTextures(DownloadTexturesSubcommand),
|
#[command(flatten)]
|
||||||
ExtractTextures(ExtractTexturesSubcommand),
|
Source(source::Commands),
|
||||||
ConvertTextures(ConvertTexturesSubcommand),
|
|
||||||
VPKContents(VPKContentsSubcommand),
|
|
||||||
BSPContents(BSPContentsSubcommand),
|
|
||||||
DownloadMeshes(DownloadMeshesSubcommand),
|
|
||||||
WriteAttributes(WriteAttributesSubcommand),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Args)]
|
#[tokio::main]
|
||||||
struct RobloxToSNFSubcommand {
|
async fn main()->AResult<()>{
|
||||||
#[arg(long)]
|
let cli=Cli::parse();
|
||||||
output_folder:PathBuf,
|
match cli.command{
|
||||||
#[arg(required=true)]
|
Commands::Roblox(commands)=>commands.run().await,
|
||||||
input_files:Vec<PathBuf>,
|
Commands::Source(commands)=>commands.run().await,
|
||||||
}
|
|
||||||
#[derive(Args)]
|
|
||||||
struct SourceToSNFSubcommand {
|
|
||||||
#[arg(long)]
|
|
||||||
output_folder:PathBuf,
|
|
||||||
#[arg(required=true)]
|
|
||||||
input_files:Vec<PathBuf>,
|
|
||||||
}
|
|
||||||
#[derive(Args)]
|
|
||||||
struct DownloadTexturesSubcommand {
|
|
||||||
#[arg(long,required=true)]
|
|
||||||
roblox_files:Vec<PathBuf>
|
|
||||||
}
|
|
||||||
#[derive(Args)]
|
|
||||||
struct ExtractTexturesSubcommand {
|
|
||||||
#[arg(long)]
|
|
||||||
bsp_file:PathBuf,
|
|
||||||
#[arg(long)]
|
|
||||||
vpk_dir_files:Vec<PathBuf>
|
|
||||||
}
|
|
||||||
#[derive(Args)]
|
|
||||||
struct ConvertTexturesSubcommand {
|
|
||||||
}
|
|
||||||
#[derive(Args)]
|
|
||||||
struct VPKContentsSubcommand {
|
|
||||||
#[arg(long)]
|
|
||||||
input_file:PathBuf,
|
|
||||||
}
|
|
||||||
#[derive(Args)]
|
|
||||||
struct BSPContentsSubcommand {
|
|
||||||
#[arg(long)]
|
|
||||||
input_file:PathBuf,
|
|
||||||
}
|
|
||||||
#[derive(Args)]
|
|
||||||
struct DownloadMeshesSubcommand {
|
|
||||||
#[arg(long,required=true)]
|
|
||||||
roblox_files:Vec<PathBuf>
|
|
||||||
}
|
|
||||||
#[derive(Args)]
|
|
||||||
struct WriteAttributesSubcommand {
|
|
||||||
}
|
|
||||||
|
|
||||||
fn main() -> AResult<()> {
|
|
||||||
let cli = Cli::parse();
|
|
||||||
match cli.command {
|
|
||||||
Commands::RobloxToSNF(subcommand)=>roblox_to_snf(subcommand.input_files,subcommand.output_folder),
|
|
||||||
Commands::SourceToSNF(subcommand)=>source_to_snf(subcommand.input_files,subcommand.output_folder),
|
|
||||||
Commands::DownloadTextures(subcommand)=>download_textures(subcommand.roblox_files),
|
|
||||||
Commands::ExtractTextures(subcommand)=>extract_textures(vec![subcommand.bsp_file],subcommand.vpk_dir_files),
|
|
||||||
Commands::VPKContents(subcommand)=>vpk_contents(subcommand.input_file),
|
|
||||||
Commands::BSPContents(subcommand)=>bsp_contents(subcommand.input_file),
|
|
||||||
Commands::ConvertTextures(_subcommand)=>convert_textures(),
|
|
||||||
Commands::DownloadMeshes(subcommand)=>download_meshes(subcommand.roblox_files),
|
|
||||||
Commands::WriteAttributes(_subcommand)=>write_attributes(),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn recursive_collect_regex(objects: &mut std::vec::Vec<rbx_dom_weak::types::Ref>,dom: &rbx_dom_weak::WeakDom, instance: &rbx_dom_weak::Instance, regex: &lazy_regex::Lazy<lazy_regex::Regex>){
|
|
||||||
for &referent in instance.children() {
|
|
||||||
if let Some(c) = dom.get_by_ref(referent) {
|
|
||||||
if regex.captures(c.name.as_str()).is_some(){
|
|
||||||
objects.push(c.referent());//copy ref
|
|
||||||
}
|
|
||||||
recursive_collect_regex(objects,dom,c,regex);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_button_refs(dom:&rbx_dom_weak::WeakDom) -> Vec<rbx_dom_weak::types::Ref>{
|
|
||||||
let mut buttons = std::vec::Vec::new();
|
|
||||||
recursive_collect_regex(&mut buttons, dom, dom.root(),lazy_regex::regex!(r"Button(\d+)$"));
|
|
||||||
buttons
|
|
||||||
}
|
|
||||||
|
|
||||||
enum ReaderType<'a, R:Read+Seek>{
|
|
||||||
GZip(flate2::read::GzDecoder<&'a mut R>),
|
|
||||||
Raw(&'a mut R),
|
|
||||||
}
|
|
||||||
|
|
||||||
fn maybe_gzip_decode<R:Read+Seek>(input:&mut R)->AResult<ReaderType<R>>{
|
|
||||||
let mut first_2=[0u8;2];
|
|
||||||
if let (Ok(()),Ok(()))=(std::io::Read::read_exact(input, &mut first_2),std::io::Seek::rewind(input)){
|
|
||||||
match &first_2{
|
|
||||||
b"\x1f\x8b"=>Ok(ReaderType::GZip(flate2::read::GzDecoder::new(input))),
|
|
||||||
_=>Ok(ReaderType::Raw(input)),
|
|
||||||
}
|
|
||||||
}else{
|
|
||||||
Err(anyhow::Error::msg("failed to peek"))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn load_dom<R:Read+Seek>(input:&mut R)->AResult<rbx_dom_weak::WeakDom>{
|
|
||||||
let mut first_8=[0u8;8];
|
|
||||||
if let (Ok(()),Ok(()))=(std::io::Read::read_exact(input, &mut first_8),std::io::Seek::rewind(input)){
|
|
||||||
match &first_8[0..4]{
|
|
||||||
b"<rob"=>{
|
|
||||||
match &first_8[4..8]{
|
|
||||||
b"lox!"=>rbx_binary::from_reader(input).map_err(anyhow::Error::msg),
|
|
||||||
b"lox "=>rbx_xml::from_reader(input,rbx_xml::DecodeOptions::default()).map_err(anyhow::Error::msg),
|
|
||||||
other=>Err(anyhow::Error::msg(format!("Unknown Roblox file type {:?}",other))),
|
|
||||||
}
|
|
||||||
},
|
|
||||||
_=>Err(anyhow::Error::msg("unsupported file type")),
|
|
||||||
}
|
|
||||||
}else{
|
|
||||||
Err(anyhow::Error::msg("peek failed"))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_dom<R:Read+Seek>(input:&mut R)->AResult<rbx_dom_weak::WeakDom>{
|
|
||||||
match maybe_gzip_decode(input){
|
|
||||||
Ok(ReaderType::GZip(mut readable)) => {
|
|
||||||
//gzip
|
|
||||||
let mut extracted:Vec<u8>=Vec::new();
|
|
||||||
readable.read_to_end(&mut extracted)?;
|
|
||||||
Ok(load_dom(&mut std::io::Cursor::new(extracted))?)
|
|
||||||
},
|
|
||||||
Ok(ReaderType::Raw(readable)) => Ok(load_dom(readable)?),
|
|
||||||
Err(e) => Err(e)?,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
struct RobloxAssetId(u64);
|
|
||||||
struct RobloxAssetIdParseErr;
|
|
||||||
impl std::str::FromStr for RobloxAssetId {
|
|
||||||
type Err=RobloxAssetIdParseErr;
|
|
||||||
fn from_str(s: &str) -> Result<Self, Self::Err>{
|
|
||||||
let regman=lazy_regex::regex!(r"(\d+)$");
|
|
||||||
if let Some(captures) = regman.captures(s) {
|
|
||||||
if captures.len()==2{//captures[0] is all captures concatenated, and then each individual capture
|
|
||||||
if let Ok(id) = captures[0].parse::<u64>() {
|
|
||||||
return Ok(Self(id));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(RobloxAssetIdParseErr)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/* The ones I'm interested in:
|
|
||||||
Beam.Texture
|
|
||||||
Decal.Texture
|
|
||||||
FileMesh.MeshId
|
|
||||||
FileMesh.TextureId
|
|
||||||
MaterialVariant.ColorMap
|
|
||||||
MaterialVariant.MetalnessMap
|
|
||||||
MaterialVariant.NormalMap
|
|
||||||
MaterialVariant.RoughnessMap
|
|
||||||
MeshPart.MeshId
|
|
||||||
MeshPart.TextureID
|
|
||||||
ParticleEmitter.Texture
|
|
||||||
Sky.MoonTextureId
|
|
||||||
Sky.SkyboxBk
|
|
||||||
Sky.SkyboxDn
|
|
||||||
Sky.SkyboxFt
|
|
||||||
Sky.SkyboxLf
|
|
||||||
Sky.SkyboxRt
|
|
||||||
Sky.SkyboxUp
|
|
||||||
Sky.SunTextureId
|
|
||||||
SurfaceAppearance.ColorMap
|
|
||||||
SurfaceAppearance.MetalnessMap
|
|
||||||
SurfaceAppearance.NormalMap
|
|
||||||
SurfaceAppearance.RoughnessMap
|
|
||||||
SurfaceAppearance.TexturePack
|
|
||||||
*/
|
|
||||||
fn accumulate_content_id(content_list:&mut HashSet<u64>,object:&Instance,property:&str){
|
|
||||||
if let Some(rbx_dom_weak::types::Variant::Content(content))=object.properties.get(property){
|
|
||||||
if let Ok(asset_id)=AsRef::<str>::as_ref(content).parse::<RobloxAssetId>(){
|
|
||||||
content_list.insert(asset_id.0);
|
|
||||||
}else{
|
|
||||||
println!("Content failed to parse into AssetID: {:?}",content);
|
|
||||||
}
|
|
||||||
}else{
|
|
||||||
println!("property={} does not exist for class={}",object.class.as_str(),property);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fn download_textures(paths:Vec<PathBuf>)->AResult<()>{
|
|
||||||
println!("Reading files, this could take a hot minute...");
|
|
||||||
let mut texture_list=HashSet::new();
|
|
||||||
for path in paths{
|
|
||||||
let file=match std::fs::File::open(path.as_path()){
|
|
||||||
Ok(file)=>file,
|
|
||||||
Err(e)=>{
|
|
||||||
println!("file error {e}");
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
let mut input=std::io::BufReader::new(file);
|
|
||||||
match get_dom(&mut input){
|
|
||||||
Ok(dom)=>{
|
|
||||||
for object in dom.into_raw().1.into_values(){
|
|
||||||
match object.class.as_str(){
|
|
||||||
"Beam"=>accumulate_content_id(&mut texture_list,&object,"Texture"),
|
|
||||||
"Decal"=>accumulate_content_id(&mut texture_list,&object,"Texture"),
|
|
||||||
"Texture"=>accumulate_content_id(&mut texture_list,&object,"Texture"),
|
|
||||||
"FileMesh"=>accumulate_content_id(&mut texture_list,&object,"TextureId"),
|
|
||||||
"MeshPart"=>accumulate_content_id(&mut texture_list,&object,"TextureID"),
|
|
||||||
"ParticleEmitter"=>accumulate_content_id(&mut texture_list,&object,"Texture"),
|
|
||||||
"Sky"=>{
|
|
||||||
accumulate_content_id(&mut texture_list,&object,"MoonTextureId");
|
|
||||||
accumulate_content_id(&mut texture_list,&object,"SkyboxBk");
|
|
||||||
accumulate_content_id(&mut texture_list,&object,"SkyboxDn");
|
|
||||||
accumulate_content_id(&mut texture_list,&object,"SkyboxFt");
|
|
||||||
accumulate_content_id(&mut texture_list,&object,"SkyboxLf");
|
|
||||||
accumulate_content_id(&mut texture_list,&object,"SkyboxRt");
|
|
||||||
accumulate_content_id(&mut texture_list,&object,"SkyboxUp");
|
|
||||||
accumulate_content_id(&mut texture_list,&object,"SunTextureId");
|
|
||||||
},
|
|
||||||
_=>(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
Err(e)=>println!("error loading map {:?}: {:?}",path.file_name(),e),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let texture_list_string=texture_list.into_iter().map(|id|id.to_string()).collect::<Vec<String>>();
|
|
||||||
println!("Texture list:{:?}",texture_list_string.join(" "));
|
|
||||||
std::fs::create_dir_all("textures/unprocessed")?;
|
|
||||||
let output=std::process::Command::new("asset-tool")
|
|
||||||
.args(["download","--cookie-literal","","--output-folder","textures/unprocessed/"])
|
|
||||||
.args(texture_list_string)
|
|
||||||
.spawn()?
|
|
||||||
.wait_with_output()?;
|
|
||||||
println!("Asset tool exit_success:{}",output.status.success());
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
fn download_meshes(paths:Vec<PathBuf>)->AResult<()>{
|
|
||||||
println!("Reading files, this could take a hot minute...");
|
|
||||||
let mut mesh_list=HashSet::new();
|
|
||||||
for path in paths{
|
|
||||||
let file=match std::fs::File::open(path.as_path()){
|
|
||||||
Ok(file)=>file,
|
|
||||||
Err(e)=>{
|
|
||||||
println!("file error {e}");
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
let mut input=std::io::BufReader::new(file);
|
|
||||||
match get_dom(&mut input){
|
|
||||||
Ok(dom)=>{
|
|
||||||
for object in dom.into_raw().1.into_values(){
|
|
||||||
match object.class.as_str(){
|
|
||||||
"MeshPart"=>accumulate_content_id(&mut mesh_list,&object,"MeshId"),
|
|
||||||
"SpecialMesh"=>accumulate_content_id(&mut mesh_list,&object,"MeshId"),
|
|
||||||
_=>(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
Err(e)=>println!("error loading map {:?}: {:?}",path.file_name(),e),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let mesh_list_string=mesh_list.into_iter().map(|id|id.to_string()).collect::<Vec<String>>();
|
|
||||||
println!("Mesh list:{:?}",mesh_list_string.join(" "));
|
|
||||||
std::fs::create_dir_all("meshes/")?;
|
|
||||||
let output=std::process::Command::new("asset-tool")
|
|
||||||
.args(["download","--cookie-literal","","--output-folder","meshes/"])
|
|
||||||
.args(mesh_list_string)
|
|
||||||
.spawn()?
|
|
||||||
.wait_with_output()?;
|
|
||||||
println!("Asset tool exit_success:{}",output.status.success());
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn load_image<R:Read+Seek+std::io::BufRead>(input:&mut R)->AResult<image::DynamicImage>{
|
|
||||||
let mut fourcc=[0u8;4];
|
|
||||||
input.read_exact(&mut fourcc)?;
|
|
||||||
input.rewind()?;
|
|
||||||
match &fourcc{
|
|
||||||
b"\x89PNG"=>Ok(image::load(input,image::ImageFormat::Png)?),
|
|
||||||
b"\xFF\xD8\xFF\xE0"=>Ok(image::load(input,image::ImageFormat::Jpeg)?),//JFIF
|
|
||||||
b"<rob"=>Err(anyhow::Error::msg("Roblox xml garbage is not supported yet")),
|
|
||||||
other=>Err(anyhow::Error::msg(format!("Unknown texture format {:?}",other))),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn convert(file_thing:std::fs::DirEntry) -> AResult<()>{
|
|
||||||
let mut input = std::io::BufReader::new(std::fs::File::open(file_thing.path())?);
|
|
||||||
|
|
||||||
let mut extracted_input=None;
|
|
||||||
let image=match maybe_gzip_decode(&mut input){
|
|
||||||
Ok(ReaderType::GZip(mut readable)) => {
|
|
||||||
//gzip
|
|
||||||
let mut extracted:Vec<u8>=Vec::new();
|
|
||||||
//read the entire thing to the end so that I can clone the data and write a png to processed images
|
|
||||||
readable.read_to_end(&mut extracted)?;
|
|
||||||
extracted_input=Some(extracted.clone());
|
|
||||||
load_image(&mut std::io::Cursor::new(extracted))
|
|
||||||
},
|
|
||||||
Ok(ReaderType::Raw(readable)) => load_image(readable),
|
|
||||||
Err(e) => Err(e)?,
|
|
||||||
}?.to_rgba8();//this sets a=255, arcane is actually supposed to look like that
|
|
||||||
|
|
||||||
let format=if image.width()%4!=0||image.height()%4!=0{
|
|
||||||
image_dds::ImageFormat::R8G8B8A8Srgb
|
|
||||||
}else{
|
|
||||||
image_dds::ImageFormat::BC7Srgb
|
|
||||||
};
|
|
||||||
//this fails if the image dimensions are not a multiple of 4
|
|
||||||
let dds = image_dds::dds_from_image(
|
|
||||||
&image,
|
|
||||||
format,
|
|
||||||
image_dds::Quality::Slow,
|
|
||||||
image_dds::Mipmaps::GeneratedAutomatic,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
//write dds
|
|
||||||
let mut dest=PathBuf::from("textures");
|
|
||||||
dest.push(file_thing.file_name());
|
|
||||||
dest.set_extension("dds");
|
|
||||||
let mut writer = std::io::BufWriter::new(std::fs::File::create(dest)?);
|
|
||||||
dds.write(&mut writer)?;
|
|
||||||
|
|
||||||
if let Some(mut extracted)=extracted_input{
|
|
||||||
//write extracted to processed
|
|
||||||
let mut dest=PathBuf::from("textures/processed");
|
|
||||||
dest.push(file_thing.file_name());
|
|
||||||
std::fs::write(dest, &mut extracted)?;
|
|
||||||
//delete ugly gzip file
|
|
||||||
std::fs::remove_file(file_thing.path())?;
|
|
||||||
}else{
|
|
||||||
//move file to processed
|
|
||||||
let mut dest=PathBuf::from("textures/processed");
|
|
||||||
dest.push(file_thing.file_name());
|
|
||||||
std::fs::rename(file_thing.path(), dest)?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
fn convert_textures() -> AResult<()>{
|
|
||||||
std::fs::create_dir_all("textures/unprocessed")?;
|
|
||||||
std::fs::create_dir_all("textures/processed")?;
|
|
||||||
let start = std::time::Instant::now();
|
|
||||||
let mut threads=Vec::new();
|
|
||||||
for entry in std::fs::read_dir("textures/unprocessed")? {
|
|
||||||
let file_thing=entry?;
|
|
||||||
threads.push(std::thread::spawn(move ||{
|
|
||||||
let file_name=format!("{:?}",file_thing);
|
|
||||||
let result=convert(file_thing);
|
|
||||||
if let Err(e)=result{
|
|
||||||
println!("error processing file:{:?} error message:{:?}",file_name,e);
|
|
||||||
}
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
let mut i=0;
|
|
||||||
let n_threads=threads.len();
|
|
||||||
for thread in threads{
|
|
||||||
i+=1;
|
|
||||||
if let Err(e)=thread.join(){
|
|
||||||
println!("thread error: {:?}",e);
|
|
||||||
}else{
|
|
||||||
println!("{}/{}",i,n_threads);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
println!("{:?}", start.elapsed());
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn write_attributes() -> AResult<()>{
|
|
||||||
for entry in std::fs::read_dir("maps/unprocessed")? {
|
|
||||||
let file_thing=entry?;
|
|
||||||
println!("processing map={:?}",file_thing.file_name());
|
|
||||||
let mut input = std::io::BufReader::new(std::fs::File::open(file_thing.path())?);
|
|
||||||
let mut dom = get_dom(&mut input)?;
|
|
||||||
|
|
||||||
let button_refs = get_button_refs(&dom);
|
|
||||||
|
|
||||||
for &button_ref in &button_refs {
|
|
||||||
if let Some(button)=dom.get_by_ref_mut(button_ref){
|
|
||||||
match button.properties.get_mut("Attributes"){
|
|
||||||
Some(rbx_dom_weak::types::Variant::Attributes(attributes))=>{
|
|
||||||
println!("Appending Ref={} to existing attributes for {}",button_ref,button.name);
|
|
||||||
attributes.insert("Ref".to_string(),rbx_dom_weak::types::Variant::String(button_ref.to_string()));
|
|
||||||
},
|
|
||||||
None=>{
|
|
||||||
println!("Creating new attributes with Ref={} for {}",button_ref,button.name);
|
|
||||||
let mut attributes=rbx_dom_weak::types::Attributes::new();
|
|
||||||
attributes.insert("Ref".to_string(),rbx_dom_weak::types::Variant::String(button_ref.to_string()));
|
|
||||||
button.properties.insert("Attributes".to_string(),rbx_dom_weak::types::Variant::Attributes(attributes));
|
|
||||||
}
|
|
||||||
_=>unreachable!("Fetching attributes did not return attributes."),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let mut dest={
|
|
||||||
let mut dest=PathBuf::from("maps/attributes");
|
|
||||||
dest.push(file_thing.file_name());
|
|
||||||
let output = std::io::BufWriter::new(std::fs::File::create(dest)?);
|
|
||||||
//write workspace:GetChildren()[1]
|
|
||||||
let workspace_children=dom.root().children();
|
|
||||||
if workspace_children.len()!=1{
|
|
||||||
return Err(anyhow::Error::msg("there can only be one model"));
|
|
||||||
}
|
|
||||||
rbx_binary::to_writer(output, &dom, &[workspace_children[0]])?;
|
|
||||||
//move original to processed folder
|
|
||||||
PathBuf::from("maps/unaltered")
|
|
||||||
};
|
|
||||||
dest.push(file_thing.file_name());
|
|
||||||
std::fs::rename(file_thing.path(), dest)?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
enum VMTContent{
|
|
||||||
VMT(String),
|
|
||||||
VTF(String),
|
|
||||||
Patch(vmt_parser::material::PatchMaterial),
|
|
||||||
Unsupported,//don't want to deal with whatever vmt variant
|
|
||||||
Unresolved,//could not locate a texture because of vmt content
|
|
||||||
}
|
|
||||||
impl VMTContent{
|
|
||||||
fn vtf(opt:Option<String>)->Self{
|
|
||||||
match opt{
|
|
||||||
Some(s)=>Self::VTF(s),
|
|
||||||
None=>Self::Unresolved,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_some_texture(material:vmt_parser::material::Material)->AResult<VMTContent>{
|
|
||||||
//just grab some texture from somewhere for now
|
|
||||||
Ok(match material{
|
|
||||||
vmt_parser::material::Material::LightMappedGeneric(mat)=>VMTContent::vtf(Some(mat.base_texture)),
|
|
||||||
vmt_parser::material::Material::VertexLitGeneric(mat)=>VMTContent::vtf(mat.base_texture.or(mat.decal_texture)),//this just dies if there is none
|
|
||||||
vmt_parser::material::Material::VertexLitGenericDx6(mat)=>VMTContent::vtf(mat.base_texture.or(mat.decal_texture)),
|
|
||||||
vmt_parser::material::Material::UnlitGeneric(mat)=>VMTContent::vtf(mat.base_texture),
|
|
||||||
vmt_parser::material::Material::UnlitTwoTexture(mat)=>VMTContent::vtf(mat.base_texture),
|
|
||||||
vmt_parser::material::Material::Water(mat)=>VMTContent::vtf(mat.base_texture),
|
|
||||||
vmt_parser::material::Material::WorldVertexTransition(mat)=>VMTContent::vtf(Some(mat.base_texture)),
|
|
||||||
vmt_parser::material::Material::EyeRefract(mat)=>VMTContent::vtf(Some(mat.cornea_texture)),
|
|
||||||
vmt_parser::material::Material::SubRect(mat)=>VMTContent::VMT(mat.material),//recursive
|
|
||||||
vmt_parser::material::Material::Sprite(mat)=>VMTContent::vtf(Some(mat.base_texture)),
|
|
||||||
vmt_parser::material::Material::SpriteCard(mat)=>VMTContent::vtf(mat.base_texture),
|
|
||||||
vmt_parser::material::Material::Cable(mat)=>VMTContent::vtf(Some(mat.base_texture)),
|
|
||||||
vmt_parser::material::Material::Refract(mat)=>VMTContent::vtf(mat.base_texture),
|
|
||||||
vmt_parser::material::Material::Modulate(mat)=>VMTContent::vtf(Some(mat.base_texture)),
|
|
||||||
vmt_parser::material::Material::DecalModulate(mat)=>VMTContent::vtf(Some(mat.base_texture)),
|
|
||||||
vmt_parser::material::Material::Sky(mat)=>VMTContent::vtf(Some(mat.base_texture)),
|
|
||||||
vmt_parser::material::Material::Replacements(_mat)=>VMTContent::Unsupported,
|
|
||||||
vmt_parser::material::Material::Patch(mat)=>VMTContent::Patch(mat),
|
|
||||||
_=>return Err(anyhow::Error::msg("vmt failed to parse")),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_vmt<F:Fn(String)->AResult<Option<Vec<u8>>>>(find_stuff:&F,search_name:String)->AResult<vmt_parser::material::Material>{
|
|
||||||
if let Some(stuff)=find_stuff(search_name)?{
|
|
||||||
//decode vmt and then write
|
|
||||||
let stuff=String::from_utf8(stuff)?;
|
|
||||||
let material=vmt_parser::from_str(stuff.as_str())?;
|
|
||||||
println!("vmt material={:?}",material);
|
|
||||||
return Ok(material);
|
|
||||||
}
|
|
||||||
Err(anyhow::Error::msg("vmt not found"))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn recursive_vmt_loader<F:Fn(String)->AResult<Option<Vec<u8>>>>(find_stuff:&F,material:vmt_parser::material::Material)->AResult<Option<Vec<u8>>>{
|
|
||||||
match get_some_texture(material)?{
|
|
||||||
VMTContent::VMT(s)=>recursive_vmt_loader(find_stuff,get_vmt(find_stuff,s)?),
|
|
||||||
VMTContent::VTF(s)=>{
|
|
||||||
let mut texture_file_name=PathBuf::from("materials");
|
|
||||||
texture_file_name.push(s);
|
|
||||||
texture_file_name.set_extension("vtf");
|
|
||||||
find_stuff(texture_file_name.into_os_string().into_string().unwrap())
|
|
||||||
},
|
|
||||||
VMTContent::Patch(mat)=>recursive_vmt_loader(find_stuff,
|
|
||||||
mat.resolve(|search_name|{
|
|
||||||
match find_stuff(search_name.to_string())?{
|
|
||||||
Some(bytes)=>Ok(String::from_utf8(bytes)?),
|
|
||||||
None=>Err(anyhow::Error::msg("could not find vmt")),
|
|
||||||
}
|
|
||||||
})?
|
|
||||||
),
|
|
||||||
VMTContent::Unsupported=>{println!("Unsupported vmt");Ok(None)},//print and move on
|
|
||||||
VMTContent::Unresolved=>{println!("Unresolved vmt");Ok(None)},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn extract_textures(paths:Vec<PathBuf>,vpk_paths:Vec<PathBuf>)->AResult<()>{
|
|
||||||
std::fs::create_dir_all("textures")?;
|
|
||||||
let vpk_list:Vec<vpk::VPK>=vpk_paths.into_iter().map(|vpk_path|vpk::VPK::read(&vpk_path).expect("vpk file does not exist")).collect();
|
|
||||||
for path in paths{
|
|
||||||
let mut deduplicate=std::collections::HashSet::new();
|
|
||||||
let bsp=vbsp::Bsp::read(std::fs::read(path)?.as_ref())?;
|
|
||||||
for texture in bsp.textures(){
|
|
||||||
deduplicate.insert(PathBuf::from(texture.name()));
|
|
||||||
}
|
|
||||||
//dedupe prop models
|
|
||||||
let mut model_dedupe=std::collections::HashSet::new();
|
|
||||||
for prop in bsp.static_props(){
|
|
||||||
model_dedupe.insert(prop.model());
|
|
||||||
}
|
|
||||||
|
|
||||||
//grab texture names from props
|
|
||||||
for model_name in model_dedupe{
|
|
||||||
//.mdl, .vvd, .dx90.vtx
|
|
||||||
let mut path=PathBuf::from(model_name);
|
|
||||||
let file_name=PathBuf::from(path.file_stem().unwrap());
|
|
||||||
path.pop();
|
|
||||||
path.push(file_name);
|
|
||||||
let mut vvd_path=path.clone();
|
|
||||||
let mut vtx_path=path.clone();
|
|
||||||
vvd_path.set_extension("vvd");
|
|
||||||
vtx_path.set_extension("dx90.vtx");
|
|
||||||
match (bsp.pack.get(model_name),bsp.pack.get(vvd_path.as_os_str().to_str().unwrap()),bsp.pack.get(vtx_path.as_os_str().to_str().unwrap())){
|
|
||||||
(Ok(Some(mdl_file)),Ok(Some(vvd_file)),Ok(Some(vtx_file)))=>{
|
|
||||||
match (vmdl::mdl::Mdl::read(mdl_file.as_ref()),vmdl::vvd::Vvd::read(vvd_file.as_ref()),vmdl::vtx::Vtx::read(vtx_file.as_ref())){
|
|
||||||
(Ok(mdl),Ok(vvd),Ok(vtx))=>{
|
|
||||||
let model=vmdl::Model::from_parts(mdl,vtx,vvd);
|
|
||||||
for texture in model.textures(){
|
|
||||||
for search_path in &texture.search_paths{
|
|
||||||
let mut path=PathBuf::from(search_path.as_str());
|
|
||||||
path.push(texture.name.as_str());
|
|
||||||
deduplicate.insert(path);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
_=>println!("model_name={} error",model_name),
|
|
||||||
}
|
|
||||||
},
|
|
||||||
_=>println!("no model name={}",model_name),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let pack=&bsp.pack;
|
|
||||||
let vpk_list=&vpk_list;
|
|
||||||
std::thread::scope(move|s|{
|
|
||||||
let mut thread_handles=Vec::new();
|
|
||||||
for texture_name in deduplicate{
|
|
||||||
let mut found_texture=false;
|
|
||||||
//LMAO imagine having to write type names
|
|
||||||
let write_image=|mut stuff,write_file_name|{
|
|
||||||
let image=vtf::from_bytes(&mut stuff)?.highres_image.decode(0)?.to_rgba8();
|
|
||||||
|
|
||||||
let format=if image.width()%4!=0||image.height()%4!=0{
|
|
||||||
image_dds::ImageFormat::R8G8B8A8Srgb
|
|
||||||
}else{
|
|
||||||
image_dds::ImageFormat::BC7Srgb
|
|
||||||
};
|
|
||||||
//this fails if the image dimensions are not a multiple of 4
|
|
||||||
let dds = image_dds::dds_from_image(
|
|
||||||
&image,
|
|
||||||
format,
|
|
||||||
image_dds::Quality::Slow,
|
|
||||||
image_dds::Mipmaps::GeneratedAutomatic,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
//write dds
|
|
||||||
let mut dest=PathBuf::from("textures");
|
|
||||||
dest.push(write_file_name);
|
|
||||||
dest.set_extension("dds");
|
|
||||||
std::fs::create_dir_all(dest.parent().unwrap())?;
|
|
||||||
let mut writer = std::io::BufWriter::new(std::fs::File::create(dest)?);
|
|
||||||
dds.write(&mut writer)?;
|
|
||||||
Ok::<(),anyhow::Error>(())
|
|
||||||
};
|
|
||||||
let find_stuff=|search_file_name:String|{
|
|
||||||
println!("search_file_name={}",search_file_name);
|
|
||||||
match pack.get(search_file_name.as_str())?{
|
|
||||||
Some(file)=>return Ok(Some(file)),
|
|
||||||
_=>(),
|
|
||||||
}
|
|
||||||
//search pak list
|
|
||||||
for vpk_index in vpk_list{
|
|
||||||
if let Some(vpk_entry)=vpk_index.tree.get(search_file_name.as_str()){
|
|
||||||
return Ok(Some(match vpk_entry.get()?{
|
|
||||||
std::borrow::Cow::Borrowed(bytes)=>bytes.to_vec(),
|
|
||||||
std::borrow::Cow::Owned(bytes)=>bytes,
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok::<Option<Vec<u8>>,anyhow::Error>(None)
|
|
||||||
};
|
|
||||||
let loader=|texture_name:String|{
|
|
||||||
let mut texture_file_name=PathBuf::from("materials");
|
|
||||||
//lower case
|
|
||||||
let texture_file_name_lowercase=texture_name.to_lowercase();
|
|
||||||
texture_file_name.push(texture_file_name_lowercase.clone());
|
|
||||||
//remove stem and search for both vtf and vmt files
|
|
||||||
let stem=PathBuf::from(texture_file_name.file_stem().unwrap());
|
|
||||||
texture_file_name.pop();
|
|
||||||
texture_file_name.push(stem);
|
|
||||||
//somehow search for both files
|
|
||||||
let mut texture_file_name_vmt=texture_file_name.clone();
|
|
||||||
texture_file_name.set_extension("vtf");
|
|
||||||
texture_file_name_vmt.set_extension("vmt");
|
|
||||||
if let Some(stuff)=find_stuff(texture_file_name.to_string_lossy().to_string())?{
|
|
||||||
return Ok(Some(stuff))
|
|
||||||
}
|
|
||||||
recursive_vmt_loader(&find_stuff,get_vmt(&find_stuff,texture_file_name_vmt.to_string_lossy().to_string())?)
|
|
||||||
};
|
|
||||||
if let Some(stuff)=loader(texture_name.to_string_lossy().to_string())?{
|
|
||||||
found_texture=true;
|
|
||||||
let texture_name=texture_name.clone();
|
|
||||||
thread_handles.push(s.spawn(move||write_image(stuff,texture_name)));
|
|
||||||
}
|
|
||||||
if !found_texture{
|
|
||||||
println!("no data");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for thread in thread_handles{
|
|
||||||
match thread.join(){
|
|
||||||
Ok(Err(e))=>println!("write error: {:?}",e),
|
|
||||||
Err(e)=>println!("thread error: {:?}",e),
|
|
||||||
Ok(_)=>(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok::<(),anyhow::Error>(())
|
|
||||||
})?
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn vpk_contents(vpk_path:PathBuf)->AResult<()>{
|
|
||||||
let vpk_index=vpk::VPK::read(&vpk_path)?;
|
|
||||||
for (label,entry) in vpk_index.tree.into_iter(){
|
|
||||||
println!("vpk label={} entry={:?}",label,entry);
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn bsp_contents(path:PathBuf)->AResult<()>{
|
|
||||||
let bsp=vbsp::Bsp::read(std::fs::read(path)?.as_ref())?;
|
|
||||||
for file_name in bsp.pack.into_zip().into_inner().unwrap().file_names(){
|
|
||||||
println!("file_name={:?}",file_name);
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
#[allow(dead_code)]
|
|
||||||
enum ConvertError{
|
|
||||||
IO(std::io::Error),
|
|
||||||
SNFMap(strafesnet_snf::map::Error),
|
|
||||||
RbxLoader(strafesnet_rbx_loader::ReadError),
|
|
||||||
BspLoader(strafesnet_bsp_loader::ReadError),
|
|
||||||
}
|
|
||||||
impl std::fmt::Display for ConvertError{
|
|
||||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
|
||||||
write!(f,"{self:?}")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl std::error::Error for ConvertError{}
|
|
||||||
|
|
||||||
type MapThread=std::thread::JoinHandle<Result<(),ConvertError>>;
|
|
||||||
|
|
||||||
fn roblox_to_snf(pathlist:Vec<std::path::PathBuf>,output_folder:PathBuf)->AResult<()>{
|
|
||||||
let n_paths=pathlist.len();
|
|
||||||
let start = std::time::Instant::now();
|
|
||||||
let mut threads:std::collections::VecDeque<MapThread>=std::collections::VecDeque::new();
|
|
||||||
let mut i=0;
|
|
||||||
let mut join_thread=|thread:MapThread|{
|
|
||||||
i+=1;
|
|
||||||
if let Err(e)=thread.join(){
|
|
||||||
println!("thread error: {:?}",e);
|
|
||||||
}else{
|
|
||||||
println!("{}/{}",i,n_paths);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
for path in pathlist{
|
|
||||||
if 32<=threads.len(){
|
|
||||||
join_thread(threads.pop_front().unwrap());
|
|
||||||
}
|
|
||||||
let output_folder=output_folder.clone();
|
|
||||||
threads.push_back(std::thread::spawn(move ||{
|
|
||||||
let mut dom=strafesnet_rbx_loader::read(
|
|
||||||
std::fs::File::open(path.as_path())
|
|
||||||
.map_err(ConvertError::IO)?
|
|
||||||
).map_err(ConvertError::RbxLoader)?;
|
|
||||||
|
|
||||||
dom.run_scripts();
|
|
||||||
|
|
||||||
let mut loader=strafesnet_deferred_loader::roblox_legacy();
|
|
||||||
|
|
||||||
let (texture_loader,mesh_loader)=loader.get_inner_mut();
|
|
||||||
|
|
||||||
let map_step1=strafesnet_rbx_loader::convert(
|
|
||||||
&dom,
|
|
||||||
|name|texture_loader.acquire_render_config_id(name),
|
|
||||||
|name|mesh_loader.acquire_mesh_id(name),
|
|
||||||
);
|
|
||||||
|
|
||||||
let meshpart_meshes=mesh_loader.load_meshes().map_err(ConvertError::IO)?;
|
|
||||||
|
|
||||||
let map_step2=map_step1.add_meshpart_meshes_and_calculate_attributes(
|
|
||||||
meshpart_meshes.into_iter().map(|(mesh_id,loader_model)|
|
|
||||||
(mesh_id,strafesnet_rbx_loader::data::RobloxMeshBytes::new(loader_model.get()))
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
let (textures,render_configs)=loader.into_render_configs().map_err(ConvertError::IO)?.consume();
|
|
||||||
|
|
||||||
let map=map_step2.add_render_configs_and_textures(
|
|
||||||
render_configs.into_iter(),
|
|
||||||
textures.into_iter().map(|(texture_id,texture)|
|
|
||||||
(texture_id,match texture{
|
|
||||||
strafesnet_deferred_loader::texture::Texture::ImageDDS(data)=>data,
|
|
||||||
})
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
let mut dest=output_folder.clone();
|
|
||||||
dest.push(path.file_stem().unwrap());
|
|
||||||
dest.set_extension("snfm");
|
|
||||||
let file=std::fs::File::create(dest).map_err(ConvertError::IO)?;
|
|
||||||
|
|
||||||
strafesnet_snf::map::write_map(file,map).map_err(ConvertError::SNFMap)?;
|
|
||||||
Ok(())
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
for thread in threads{
|
|
||||||
join_thread(thread);
|
|
||||||
}
|
|
||||||
println!("{:?}", start.elapsed());
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn source_to_snf(pathlist:Vec<std::path::PathBuf>,output_folder:PathBuf)->AResult<()>{
|
|
||||||
let n_paths=pathlist.len();
|
|
||||||
let start = std::time::Instant::now();
|
|
||||||
let mut threads:std::collections::VecDeque<MapThread>=std::collections::VecDeque::new();
|
|
||||||
let mut i=0;
|
|
||||||
let mut join_thread=|thread:MapThread|{
|
|
||||||
i+=1;
|
|
||||||
if let Err(e)=thread.join(){
|
|
||||||
println!("thread error: {:?}",e);
|
|
||||||
}else{
|
|
||||||
println!("{}/{}",i,n_paths);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
for path in pathlist{
|
|
||||||
if 32<=threads.len(){
|
|
||||||
join_thread(threads.pop_front().unwrap());
|
|
||||||
}
|
|
||||||
let output_folder=output_folder.clone();
|
|
||||||
threads.push_back(std::thread::spawn(move ||{
|
|
||||||
let bsp=strafesnet_bsp_loader::read(
|
|
||||||
std::fs::File::open(path.as_path())
|
|
||||||
.map_err(ConvertError::IO)?
|
|
||||||
).map_err(ConvertError::BspLoader)?;
|
|
||||||
let mut loader=strafesnet_deferred_loader::source_legacy();
|
|
||||||
|
|
||||||
let (texture_loader,mesh_loader)=loader.get_inner_mut();
|
|
||||||
|
|
||||||
let map_step1=strafesnet_bsp_loader::convert(
|
|
||||||
&bsp,
|
|
||||||
|name|texture_loader.acquire_render_config_id(name),
|
|
||||||
|name|mesh_loader.acquire_mesh_id(name),
|
|
||||||
);
|
|
||||||
|
|
||||||
let prop_meshes=mesh_loader.load_meshes(&bsp.as_ref());
|
|
||||||
|
|
||||||
let map_step2=map_step1.add_prop_meshes(
|
|
||||||
//the type conflagulator 9000
|
|
||||||
prop_meshes.into_iter().map(|(mesh_id,loader_model)|
|
|
||||||
(mesh_id,strafesnet_bsp_loader::data::ModelData{
|
|
||||||
mdl:strafesnet_bsp_loader::data::MdlData::new(loader_model.mdl.get()),
|
|
||||||
vtx:strafesnet_bsp_loader::data::VtxData::new(loader_model.vtx.get()),
|
|
||||||
vvd:strafesnet_bsp_loader::data::VvdData::new(loader_model.vvd.get()),
|
|
||||||
})
|
|
||||||
),
|
|
||||||
|name|texture_loader.acquire_render_config_id(name),
|
|
||||||
);
|
|
||||||
|
|
||||||
let (textures,render_configs)=loader.into_render_configs().map_err(ConvertError::IO)?.consume();
|
|
||||||
|
|
||||||
let map=map_step2.add_render_configs_and_textures(
|
|
||||||
render_configs.into_iter(),
|
|
||||||
textures.into_iter().map(|(texture_id,texture)|
|
|
||||||
(texture_id,match texture{
|
|
||||||
strafesnet_deferred_loader::texture::Texture::ImageDDS(data)=>data,
|
|
||||||
})
|
|
||||||
),
|
|
||||||
);
|
|
||||||
|
|
||||||
let mut dest=output_folder.clone();
|
|
||||||
dest.push(path.file_stem().unwrap());
|
|
||||||
dest.set_extension("snfm");
|
|
||||||
let file=std::fs::File::create(dest).map_err(ConvertError::IO)?;
|
|
||||||
|
|
||||||
strafesnet_snf::map::write_map(file,map).map_err(ConvertError::SNFMap)?;
|
|
||||||
Ok(())
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
for thread in threads{
|
|
||||||
join_thread(thread);
|
|
||||||
}
|
|
||||||
println!("{:?}", start.elapsed());
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
431
src/roblox.rs
Normal file
431
src/roblox.rs
Normal file
@ -0,0 +1,431 @@
|
|||||||
|
use std::path::{Path,PathBuf};
|
||||||
|
use std::io::{Cursor,Read,Seek};
|
||||||
|
use std::collections::HashSet;
|
||||||
|
use clap::{Args,Subcommand};
|
||||||
|
use anyhow::Result as AResult;
|
||||||
|
use rbx_dom_weak::Instance;
|
||||||
|
use strafesnet_deferred_loader::deferred_loader::LoadFailureMode;
|
||||||
|
use rbxassetid::RobloxAssetId;
|
||||||
|
use tokio::io::AsyncReadExt;
|
||||||
|
|
||||||
|
const DOWNLOAD_LIMIT:usize=16;
|
||||||
|
|
||||||
|
#[derive(Subcommand)]
|
||||||
|
pub enum Commands{
|
||||||
|
RobloxToSNF(RobloxToSNFSubcommand),
|
||||||
|
DownloadAssets(DownloadAssetsSubcommand),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Args)]
|
||||||
|
pub struct RobloxToSNFSubcommand {
|
||||||
|
#[arg(long)]
|
||||||
|
output_folder:PathBuf,
|
||||||
|
#[arg(required=true)]
|
||||||
|
input_files:Vec<PathBuf>,
|
||||||
|
}
|
||||||
|
#[derive(Args)]
|
||||||
|
pub struct DownloadAssetsSubcommand{
|
||||||
|
#[arg(required=true)]
|
||||||
|
roblox_files:Vec<PathBuf>,
|
||||||
|
// #[arg(long)]
|
||||||
|
// cookie_file:Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Commands{
|
||||||
|
pub async fn run(self)->AResult<()>{
|
||||||
|
match self{
|
||||||
|
Commands::RobloxToSNF(subcommand)=>roblox_to_snf(subcommand.input_files,subcommand.output_folder).await,
|
||||||
|
Commands::DownloadAssets(subcommand)=>download_assets(
|
||||||
|
subcommand.roblox_files,
|
||||||
|
rbx_asset::cookie::Cookie::new("".to_string()),
|
||||||
|
).await,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(unused)]
|
||||||
|
#[derive(Debug)]
|
||||||
|
enum LoadDomError{
|
||||||
|
IO(std::io::Error),
|
||||||
|
Binary(rbx_binary::DecodeError),
|
||||||
|
Xml(rbx_xml::DecodeError),
|
||||||
|
UnknownFormat,
|
||||||
|
}
|
||||||
|
fn load_dom<R:Read+Seek>(mut input:R)->Result<rbx_dom_weak::WeakDom,LoadDomError>{
|
||||||
|
let mut first_8=[0u8;8];
|
||||||
|
input.read_exact(&mut first_8).map_err(LoadDomError::IO)?;
|
||||||
|
input.rewind().map_err(LoadDomError::IO)?;
|
||||||
|
match &first_8{
|
||||||
|
b"<roblox!"=>rbx_binary::from_reader(input).map_err(LoadDomError::Binary),
|
||||||
|
b"<roblox "=>rbx_xml::from_reader(input,rbx_xml::DecodeOptions::default()).map_err(LoadDomError::Xml),
|
||||||
|
_=>Err(LoadDomError::UnknownFormat),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* The ones I'm interested in:
|
||||||
|
Beam.Texture
|
||||||
|
Decal.Texture
|
||||||
|
FileMesh.MeshId
|
||||||
|
FileMesh.TextureId
|
||||||
|
MaterialVariant.ColorMap
|
||||||
|
MaterialVariant.MetalnessMap
|
||||||
|
MaterialVariant.NormalMap
|
||||||
|
MaterialVariant.RoughnessMap
|
||||||
|
MeshPart.MeshId
|
||||||
|
MeshPart.TextureID
|
||||||
|
ParticleEmitter.Texture
|
||||||
|
Sky.MoonTextureId
|
||||||
|
Sky.SkyboxBk
|
||||||
|
Sky.SkyboxDn
|
||||||
|
Sky.SkyboxFt
|
||||||
|
Sky.SkyboxLf
|
||||||
|
Sky.SkyboxRt
|
||||||
|
Sky.SkyboxUp
|
||||||
|
Sky.SunTextureId
|
||||||
|
SurfaceAppearance.ColorMap
|
||||||
|
SurfaceAppearance.MetalnessMap
|
||||||
|
SurfaceAppearance.NormalMap
|
||||||
|
SurfaceAppearance.RoughnessMap
|
||||||
|
SurfaceAppearance.TexturePack
|
||||||
|
*/
|
||||||
|
fn accumulate_content_id(content_list:&mut HashSet<RobloxAssetId>,object:&Instance,property:&str){
|
||||||
|
if let Some(rbx_dom_weak::types::Variant::Content(content))=object.properties.get(property){
|
||||||
|
let url:&str=content.as_ref();
|
||||||
|
if let Ok(asset_id)=url.parse(){
|
||||||
|
content_list.insert(asset_id);
|
||||||
|
}else{
|
||||||
|
println!("Content failed to parse into AssetID: {:?}",content);
|
||||||
|
}
|
||||||
|
}else{
|
||||||
|
println!("property={} does not exist for class={}",property,object.class.as_str());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
async fn read_entire_file(path:impl AsRef<Path>)->Result<Cursor<Vec<u8>>,std::io::Error>{
|
||||||
|
let mut file=tokio::fs::File::open(path).await?;
|
||||||
|
let mut data=Vec::new();
|
||||||
|
file.read_to_end(&mut data).await?;
|
||||||
|
Ok(Cursor::new(data))
|
||||||
|
}
|
||||||
|
#[derive(Default)]
|
||||||
|
struct UniqueAssets{
|
||||||
|
meshes:HashSet<RobloxAssetId>,
|
||||||
|
unions:HashSet<RobloxAssetId>,
|
||||||
|
textures:HashSet<RobloxAssetId>,
|
||||||
|
}
|
||||||
|
impl UniqueAssets{
|
||||||
|
fn collect(&mut self,object:&Instance){
|
||||||
|
match object.class.as_str(){
|
||||||
|
"Beam"=>accumulate_content_id(&mut self.textures,object,"Texture"),
|
||||||
|
"Decal"=>accumulate_content_id(&mut self.textures,object,"Texture"),
|
||||||
|
"Texture"=>accumulate_content_id(&mut self.textures,object,"Texture"),
|
||||||
|
"FileMesh"=>accumulate_content_id(&mut self.textures,object,"TextureId"),
|
||||||
|
"MeshPart"=>{
|
||||||
|
accumulate_content_id(&mut self.textures,object,"TextureID");
|
||||||
|
accumulate_content_id(&mut self.meshes,object,"MeshId");
|
||||||
|
},
|
||||||
|
"SpecialMesh"=>accumulate_content_id(&mut self.meshes,object,"MeshId"),
|
||||||
|
"ParticleEmitter"=>accumulate_content_id(&mut self.textures,object,"Texture"),
|
||||||
|
"Sky"=>{
|
||||||
|
accumulate_content_id(&mut self.textures,object,"MoonTextureId");
|
||||||
|
accumulate_content_id(&mut self.textures,object,"SkyboxBk");
|
||||||
|
accumulate_content_id(&mut self.textures,object,"SkyboxDn");
|
||||||
|
accumulate_content_id(&mut self.textures,object,"SkyboxFt");
|
||||||
|
accumulate_content_id(&mut self.textures,object,"SkyboxLf");
|
||||||
|
accumulate_content_id(&mut self.textures,object,"SkyboxRt");
|
||||||
|
accumulate_content_id(&mut self.textures,object,"SkyboxUp");
|
||||||
|
accumulate_content_id(&mut self.textures,object,"SunTextureId");
|
||||||
|
},
|
||||||
|
"UnionOperation"=>accumulate_content_id(&mut self.unions,object,"AssetId"),
|
||||||
|
_=>(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(unused)]
|
||||||
|
#[derive(Debug)]
|
||||||
|
enum UniqueAssetError{
|
||||||
|
IO(std::io::Error),
|
||||||
|
LoadDom(LoadDomError),
|
||||||
|
}
|
||||||
|
async fn unique_assets(path:&Path)->Result<UniqueAssets,UniqueAssetError>{
|
||||||
|
// read entire file
|
||||||
|
let mut assets=UniqueAssets::default();
|
||||||
|
let data=read_entire_file(path).await.map_err(UniqueAssetError::IO)?;
|
||||||
|
let dom=load_dom(data).map_err(UniqueAssetError::LoadDom)?;
|
||||||
|
for object in dom.into_raw().1.into_values(){
|
||||||
|
assets.collect(&object);
|
||||||
|
}
|
||||||
|
Ok(assets)
|
||||||
|
}
|
||||||
|
enum DownloadType{
|
||||||
|
Texture(RobloxAssetId),
|
||||||
|
Mesh(RobloxAssetId),
|
||||||
|
Union(RobloxAssetId),
|
||||||
|
}
|
||||||
|
impl DownloadType{
|
||||||
|
fn path(&self)->PathBuf{
|
||||||
|
match self{
|
||||||
|
DownloadType::Texture(asset_id)=>format!("downloaded_textures/{}",asset_id.0.to_string()).into(),
|
||||||
|
DownloadType::Mesh(asset_id)=>format!("meshes/{}",asset_id.0.to_string()).into(),
|
||||||
|
DownloadType::Union(asset_id)=>format!("unions/{}",asset_id.0.to_string()).into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fn asset_id(&self)->u64{
|
||||||
|
match self{
|
||||||
|
DownloadType::Texture(asset_id)=>asset_id.0,
|
||||||
|
DownloadType::Mesh(asset_id)=>asset_id.0,
|
||||||
|
DownloadType::Union(asset_id)=>asset_id.0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
enum DownloadResult{
|
||||||
|
Cached(PathBuf),
|
||||||
|
Data(Vec<u8>),
|
||||||
|
Failed,
|
||||||
|
}
|
||||||
|
#[derive(Default,Debug)]
|
||||||
|
struct Stats{
|
||||||
|
total_assets:u32,
|
||||||
|
cached_assets:u32,
|
||||||
|
downloaded_assets:u32,
|
||||||
|
failed_downloads:u32,
|
||||||
|
timed_out_downloads:u32,
|
||||||
|
}
|
||||||
|
async fn download_retry(stats:&mut Stats,context:&rbx_asset::cookie::CookieContext,download_instruction:DownloadType)->Result<DownloadResult,std::io::Error>{
|
||||||
|
stats.total_assets+=1;
|
||||||
|
let download_instruction=download_instruction;
|
||||||
|
// check if file exists on disk
|
||||||
|
let path=download_instruction.path();
|
||||||
|
if tokio::fs::try_exists(path.as_path()).await?{
|
||||||
|
stats.cached_assets+=1;
|
||||||
|
return Ok(DownloadResult::Cached(path));
|
||||||
|
}
|
||||||
|
let asset_id=download_instruction.asset_id();
|
||||||
|
// if not, download file
|
||||||
|
let mut retry=0;
|
||||||
|
const BACKOFF_MUL:f32=1.3956124250860895286;//exp(1/3)
|
||||||
|
let mut backoff=1000f32;
|
||||||
|
loop{
|
||||||
|
let asset_result=context.get_asset(rbx_asset::cookie::GetAssetRequest{
|
||||||
|
asset_id,
|
||||||
|
version:None,
|
||||||
|
}).await;
|
||||||
|
match asset_result{
|
||||||
|
Ok(asset_result)=>{
|
||||||
|
stats.downloaded_assets+=1;
|
||||||
|
tokio::fs::write(path,&asset_result).await?;
|
||||||
|
break Ok(DownloadResult::Data(asset_result));
|
||||||
|
},
|
||||||
|
Err(rbx_asset::cookie::GetError::Response(rbx_asset::ResponseError::StatusCodeWithUrlAndBody(scwuab)))=>{
|
||||||
|
if scwuab.status_code.as_u16()==429{
|
||||||
|
if retry==12{
|
||||||
|
println!("Giving up asset download {asset_id}");
|
||||||
|
stats.timed_out_downloads+=1;
|
||||||
|
break Ok(DownloadResult::Failed);
|
||||||
|
}
|
||||||
|
println!("Hit roblox rate limit, waiting {:.0}ms...",backoff);
|
||||||
|
tokio::time::sleep(std::time::Duration::from_millis(backoff as u64)).await;
|
||||||
|
backoff*=BACKOFF_MUL;
|
||||||
|
retry+=1;
|
||||||
|
}else{
|
||||||
|
stats.failed_downloads+=1;
|
||||||
|
println!("weird scuwab error: {scwuab:?}");
|
||||||
|
break Ok(DownloadResult::Failed);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Err(e)=>{
|
||||||
|
stats.failed_downloads+=1;
|
||||||
|
println!("sadly error: {e}");
|
||||||
|
break Ok(DownloadResult::Failed);
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#[derive(Debug,thiserror::Error)]
|
||||||
|
enum ConvertTextureError{
|
||||||
|
#[error("Io error {0:?}")]
|
||||||
|
Io(#[from]std::io::Error),
|
||||||
|
#[error("Image error {0:?}")]
|
||||||
|
Image(#[from]image::ImageError),
|
||||||
|
#[error("DDS create error {0:?}")]
|
||||||
|
DDS(#[from]image_dds::CreateDdsError),
|
||||||
|
#[error("DDS write error {0:?}")]
|
||||||
|
DDSWrite(#[from]image_dds::ddsfile::Error),
|
||||||
|
}
|
||||||
|
async fn convert_texture(asset_id:RobloxAssetId,download_result:DownloadResult)->Result<(),ConvertTextureError>{
|
||||||
|
let data=match download_result{
|
||||||
|
DownloadResult::Cached(path)=>tokio::fs::read(path).await?,
|
||||||
|
DownloadResult::Data(data)=>data,
|
||||||
|
DownloadResult::Failed=>return Ok(()),
|
||||||
|
};
|
||||||
|
// image::ImageFormat::Png
|
||||||
|
// image::ImageFormat::Jpeg
|
||||||
|
let image=image::load_from_memory(&data)?.to_rgba8();
|
||||||
|
|
||||||
|
// pick format
|
||||||
|
let format=if image.width()%4!=0||image.height()%4!=0{
|
||||||
|
image_dds::ImageFormat::Rgba8UnormSrgb
|
||||||
|
}else{
|
||||||
|
image_dds::ImageFormat::BC7RgbaUnormSrgb
|
||||||
|
};
|
||||||
|
|
||||||
|
//this fails if the image dimensions are not a multiple of 4
|
||||||
|
let dds=image_dds::dds_from_image(
|
||||||
|
&image,
|
||||||
|
format,
|
||||||
|
image_dds::Quality::Slow,
|
||||||
|
image_dds::Mipmaps::GeneratedAutomatic,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let file_name=format!("textures/{}.dds",asset_id.0);
|
||||||
|
let mut file=std::fs::File::create(file_name)?;
|
||||||
|
dds.write(&mut file)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
async fn download_assets(paths:Vec<PathBuf>,cookie:rbx_asset::cookie::Cookie)->AResult<()>{
|
||||||
|
tokio::try_join!(
|
||||||
|
tokio::fs::create_dir_all("downloaded_textures"),
|
||||||
|
tokio::fs::create_dir_all("textures"),
|
||||||
|
tokio::fs::create_dir_all("meshes"),
|
||||||
|
tokio::fs::create_dir_all("unions"),
|
||||||
|
)?;
|
||||||
|
// use mpsc
|
||||||
|
let thread_limit=std::thread::available_parallelism()?.get();
|
||||||
|
let (send_assets,mut recv_assets)=tokio::sync::mpsc::channel(DOWNLOAD_LIMIT);
|
||||||
|
let (send_texture,mut recv_texture)=tokio::sync::mpsc::channel(thread_limit);
|
||||||
|
// map decode dispatcher
|
||||||
|
// read files multithreaded
|
||||||
|
// produce UniqueAssetsResult per file
|
||||||
|
tokio::spawn(async move{
|
||||||
|
// move send so it gets dropped when all maps have been decoded
|
||||||
|
// closing the channel
|
||||||
|
let mut it=paths.into_iter();
|
||||||
|
static SEM:tokio::sync::Semaphore=tokio::sync::Semaphore::const_new(0);
|
||||||
|
SEM.add_permits(thread_limit);
|
||||||
|
while let (Ok(permit),Some(path))=(SEM.acquire().await,it.next()){
|
||||||
|
let send=send_assets.clone();
|
||||||
|
tokio::spawn(async move{
|
||||||
|
let result=unique_assets(path.as_path()).await;
|
||||||
|
_=send.send(result).await;
|
||||||
|
drop(permit);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
// download manager
|
||||||
|
// insert into global unique assets guy
|
||||||
|
// add to download queue if the asset is globally unique and does not already exist on disk
|
||||||
|
let mut stats=Stats::default();
|
||||||
|
let context=rbx_asset::cookie::CookieContext::new(cookie);
|
||||||
|
let mut globally_unique_assets=UniqueAssets::default();
|
||||||
|
// pop a job = retry_queue.pop_front() or ingest(recv.recv().await)
|
||||||
|
// SLOW MODE:
|
||||||
|
// acquire all permits
|
||||||
|
// drop all permits
|
||||||
|
// pop one job
|
||||||
|
// if it succeeds go into fast mode
|
||||||
|
// FAST MODE:
|
||||||
|
// acquire one permit
|
||||||
|
// pop a job
|
||||||
|
let download_thread=tokio::spawn(async move{
|
||||||
|
while let Some(result)=recv_assets.recv().await{
|
||||||
|
let unique_assets=match result{
|
||||||
|
Ok(unique_assets)=>unique_assets,
|
||||||
|
Err(e)=>{
|
||||||
|
println!("error: {e:?}");
|
||||||
|
continue;
|
||||||
|
},
|
||||||
|
};
|
||||||
|
for texture_id in unique_assets.textures{
|
||||||
|
if globally_unique_assets.textures.insert(texture_id){
|
||||||
|
let data=download_retry(&mut stats,&context,DownloadType::Texture(texture_id)).await?;
|
||||||
|
send_texture.send((texture_id,data)).await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for mesh_id in unique_assets.meshes{
|
||||||
|
if globally_unique_assets.meshes.insert(mesh_id){
|
||||||
|
download_retry(&mut stats,&context,DownloadType::Mesh(mesh_id)).await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for union_id in unique_assets.unions{
|
||||||
|
if globally_unique_assets.unions.insert(union_id){
|
||||||
|
download_retry(&mut stats,&context,DownloadType::Union(union_id)).await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
dbg!(stats);
|
||||||
|
Ok::<(),anyhow::Error>(())
|
||||||
|
});
|
||||||
|
static SEM:tokio::sync::Semaphore=tokio::sync::Semaphore::const_new(0);
|
||||||
|
SEM.add_permits(thread_limit);
|
||||||
|
while let (Ok(permit),Some((asset_id,download_result)))=(SEM.acquire().await,recv_texture.recv().await){
|
||||||
|
tokio::spawn(async move{
|
||||||
|
let result=convert_texture(asset_id,download_result).await;
|
||||||
|
drop(permit);
|
||||||
|
result.unwrap();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
download_thread.await??;
|
||||||
|
_=SEM.acquire_many(thread_limit as u32).await.unwrap();
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
#[allow(dead_code)]
|
||||||
|
enum ConvertError{
|
||||||
|
IO(std::io::Error),
|
||||||
|
SNFMap(strafesnet_snf::map::Error),
|
||||||
|
RobloxRead(strafesnet_rbx_loader::ReadError),
|
||||||
|
RobloxLoad(strafesnet_rbx_loader::LoadError),
|
||||||
|
}
|
||||||
|
impl std::fmt::Display for ConvertError{
|
||||||
|
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||||
|
write!(f,"{self:?}")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl std::error::Error for ConvertError{}
|
||||||
|
async fn convert_to_snf(path:&Path,output_folder:PathBuf)->AResult<()>{
|
||||||
|
let entire_file=tokio::fs::read(path).await?;
|
||||||
|
|
||||||
|
let model=strafesnet_rbx_loader::read(
|
||||||
|
std::io::Cursor::new(entire_file)
|
||||||
|
).map_err(ConvertError::RobloxRead)?;
|
||||||
|
|
||||||
|
let mut place=model.into_place();
|
||||||
|
place.run_scripts();
|
||||||
|
|
||||||
|
let map=place.to_snf(LoadFailureMode::DefaultToNone).map_err(ConvertError::RobloxLoad)?;
|
||||||
|
|
||||||
|
let mut dest=output_folder;
|
||||||
|
dest.push(path.file_stem().unwrap());
|
||||||
|
dest.set_extension("snfm");
|
||||||
|
let file=std::fs::File::create(dest).map_err(ConvertError::IO)?;
|
||||||
|
|
||||||
|
strafesnet_snf::map::write_map(file,map).map_err(ConvertError::SNFMap)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn roblox_to_snf(paths:Vec<std::path::PathBuf>,output_folder:PathBuf)->AResult<()>{
|
||||||
|
let start=std::time::Instant::now();
|
||||||
|
|
||||||
|
let thread_limit=std::thread::available_parallelism()?.get();
|
||||||
|
let mut it=paths.into_iter();
|
||||||
|
static SEM:tokio::sync::Semaphore=tokio::sync::Semaphore::const_new(0);
|
||||||
|
SEM.add_permits(thread_limit);
|
||||||
|
|
||||||
|
while let (Ok(permit),Some(path))=(SEM.acquire().await,it.next()){
|
||||||
|
let output_folder=output_folder.clone();
|
||||||
|
tokio::spawn(async move{
|
||||||
|
let result=convert_to_snf(path.as_path(),output_folder).await;
|
||||||
|
drop(permit);
|
||||||
|
match result{
|
||||||
|
Ok(())=>(),
|
||||||
|
Err(e)=>println!("Convert error: {e:?}"),
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
_=SEM.acquire_many(thread_limit as u32).await.unwrap();
|
||||||
|
|
||||||
|
println!("elapsed={:?}", start.elapsed());
|
||||||
|
Ok(())
|
||||||
|
}
|
428
src/source.rs
Normal file
428
src/source.rs
Normal file
@ -0,0 +1,428 @@
|
|||||||
|
use std::path::{Path,PathBuf};
|
||||||
|
use std::borrow::Cow;
|
||||||
|
use clap::{Args,Subcommand};
|
||||||
|
use anyhow::Result as AResult;
|
||||||
|
use futures::StreamExt;
|
||||||
|
use strafesnet_bsp_loader::loader::BspFinder;
|
||||||
|
use strafesnet_deferred_loader::loader::Loader;
|
||||||
|
use strafesnet_deferred_loader::deferred_loader::{LoadFailureMode,MeshDeferredLoader,RenderConfigDeferredLoader};
|
||||||
|
|
||||||
|
#[derive(Subcommand)]
|
||||||
|
pub enum Commands{
|
||||||
|
SourceToSNF(SourceToSNFSubcommand),
|
||||||
|
ExtractTextures(ExtractTexturesSubcommand),
|
||||||
|
VPKContents(VPKContentsSubcommand),
|
||||||
|
BSPContents(BSPContentsSubcommand),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Args)]
|
||||||
|
pub struct SourceToSNFSubcommand {
|
||||||
|
#[arg(long)]
|
||||||
|
output_folder:PathBuf,
|
||||||
|
#[arg(required=true)]
|
||||||
|
input_files:Vec<PathBuf>,
|
||||||
|
#[arg(long)]
|
||||||
|
vpks:Vec<PathBuf>,
|
||||||
|
}
|
||||||
|
#[derive(Args)]
|
||||||
|
pub struct ExtractTexturesSubcommand{
|
||||||
|
#[arg(required=true)]
|
||||||
|
bsp_files:Vec<PathBuf>,
|
||||||
|
#[arg(long)]
|
||||||
|
vpks:Vec<PathBuf>,
|
||||||
|
}
|
||||||
|
#[derive(Args)]
|
||||||
|
pub struct VPKContentsSubcommand {
|
||||||
|
#[arg(long)]
|
||||||
|
input_file:PathBuf,
|
||||||
|
}
|
||||||
|
#[derive(Args)]
|
||||||
|
pub struct BSPContentsSubcommand {
|
||||||
|
#[arg(long)]
|
||||||
|
input_file:PathBuf,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Commands{
|
||||||
|
pub async fn run(self)->AResult<()>{
|
||||||
|
match self{
|
||||||
|
Commands::SourceToSNF(subcommand)=>source_to_snf(subcommand.input_files,subcommand.output_folder,subcommand.vpks).await,
|
||||||
|
Commands::ExtractTextures(subcommand)=>extract_textures(subcommand.bsp_files,subcommand.vpks).await,
|
||||||
|
Commands::VPKContents(subcommand)=>vpk_contents(subcommand.input_file),
|
||||||
|
Commands::BSPContents(subcommand)=>bsp_contents(subcommand.input_file),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
enum VMTContent{
|
||||||
|
VMT(String),
|
||||||
|
VTF(String),
|
||||||
|
Patch(vmt_parser::material::PatchMaterial),
|
||||||
|
Unsupported,//don't want to deal with whatever vmt variant
|
||||||
|
Unresolved,//could not locate a texture because of vmt content
|
||||||
|
}
|
||||||
|
impl VMTContent{
|
||||||
|
fn vtf(opt:Option<String>)->Self{
|
||||||
|
match opt{
|
||||||
|
Some(s)=>Self::VTF(s),
|
||||||
|
None=>Self::Unresolved,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_some_texture(material:vmt_parser::material::Material)->VMTContent{
|
||||||
|
//just grab some texture from somewhere for now
|
||||||
|
match material{
|
||||||
|
vmt_parser::material::Material::LightMappedGeneric(mat)=>VMTContent::vtf(Some(mat.base_texture)),
|
||||||
|
vmt_parser::material::Material::VertexLitGeneric(mat)=>VMTContent::vtf(mat.base_texture.or(mat.decal_texture)),//this just dies if there is none
|
||||||
|
vmt_parser::material::Material::VertexLitGenericDx6(mat)=>VMTContent::vtf(mat.base_texture.or(mat.decal_texture)),
|
||||||
|
vmt_parser::material::Material::UnlitGeneric(mat)=>VMTContent::vtf(mat.base_texture),
|
||||||
|
vmt_parser::material::Material::UnlitTwoTexture(mat)=>VMTContent::vtf(mat.base_texture),
|
||||||
|
vmt_parser::material::Material::Water(mat)=>VMTContent::vtf(mat.base_texture),
|
||||||
|
vmt_parser::material::Material::WorldVertexTransition(mat)=>VMTContent::vtf(Some(mat.base_texture)),
|
||||||
|
vmt_parser::material::Material::EyeRefract(mat)=>VMTContent::vtf(Some(mat.cornea_texture)),
|
||||||
|
vmt_parser::material::Material::SubRect(mat)=>VMTContent::VMT(mat.material),//recursive
|
||||||
|
vmt_parser::material::Material::Sprite(mat)=>VMTContent::vtf(Some(mat.base_texture)),
|
||||||
|
vmt_parser::material::Material::SpriteCard(mat)=>VMTContent::vtf(mat.base_texture),
|
||||||
|
vmt_parser::material::Material::Cable(mat)=>VMTContent::vtf(Some(mat.base_texture)),
|
||||||
|
vmt_parser::material::Material::Refract(mat)=>VMTContent::vtf(mat.base_texture),
|
||||||
|
vmt_parser::material::Material::Modulate(mat)=>VMTContent::vtf(Some(mat.base_texture)),
|
||||||
|
vmt_parser::material::Material::DecalModulate(mat)=>VMTContent::vtf(Some(mat.base_texture)),
|
||||||
|
vmt_parser::material::Material::Sky(mat)=>VMTContent::vtf(Some(mat.base_texture)),
|
||||||
|
vmt_parser::material::Material::Replacements(_mat)=>VMTContent::Unsupported,
|
||||||
|
vmt_parser::material::Material::Patch(mat)=>VMTContent::Patch(mat),
|
||||||
|
_=>unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug,thiserror::Error)]
|
||||||
|
enum GetVMTError{
|
||||||
|
#[error("Bsp error {0:?}")]
|
||||||
|
Bsp(#[from]vbsp::BspError),
|
||||||
|
#[error("Utf8 error {0:?}")]
|
||||||
|
Utf8(#[from]std::str::Utf8Error),
|
||||||
|
#[error("Vdf error {0:?}")]
|
||||||
|
Vdf(#[from]vmt_parser::VdfError),
|
||||||
|
#[error("Vmt not found")]
|
||||||
|
NotFound,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_vmt(finder:BspFinder,search_name:&str)->Result<vmt_parser::material::Material,GetVMTError>{
|
||||||
|
let vmt_data=finder.find(search_name)?.ok_or(GetVMTError::NotFound)?;
|
||||||
|
//decode vmt and then write
|
||||||
|
let vmt_str=core::str::from_utf8(&vmt_data)?;
|
||||||
|
let material=vmt_parser::from_str(vmt_str)?;
|
||||||
|
//println!("vmt material={:?}",material);
|
||||||
|
Ok(material)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug,thiserror::Error)]
|
||||||
|
enum LoadVMTError{
|
||||||
|
#[error("Bsp error {0:?}")]
|
||||||
|
Bsp(#[from]vbsp::BspError),
|
||||||
|
#[error("GetVMT error {0:?}")]
|
||||||
|
GetVMT(#[from]GetVMTError),
|
||||||
|
#[error("FromUtf8 error {0:?}")]
|
||||||
|
FromUtf8(#[from]std::string::FromUtf8Error),
|
||||||
|
#[error("Vdf error {0:?}")]
|
||||||
|
Vdf(#[from]vmt_parser::VdfError),
|
||||||
|
#[error("Vmt unsupported")]
|
||||||
|
Unsupported,
|
||||||
|
#[error("Vmt unresolved")]
|
||||||
|
Unresolved,
|
||||||
|
#[error("Vmt not found")]
|
||||||
|
NotFound,
|
||||||
|
}
|
||||||
|
fn recursive_vmt_loader<'bsp,'vpk,'a>(finder:BspFinder<'bsp,'vpk>,material:vmt_parser::material::Material)->Result<Option<Cow<'a,[u8]>>,LoadVMTError>
|
||||||
|
where
|
||||||
|
'bsp:'a,
|
||||||
|
'vpk:'a,
|
||||||
|
{
|
||||||
|
match get_some_texture(material){
|
||||||
|
VMTContent::VMT(s)=>recursive_vmt_loader(finder,get_vmt(finder,s.as_str())?),
|
||||||
|
VMTContent::VTF(s)=>{
|
||||||
|
let mut texture_file_name=PathBuf::from("materials");
|
||||||
|
texture_file_name.push(s);
|
||||||
|
texture_file_name.set_extension("vtf");
|
||||||
|
Ok(finder.find(texture_file_name.to_str().unwrap())?)
|
||||||
|
},
|
||||||
|
VMTContent::Patch(mat)=>recursive_vmt_loader(finder,
|
||||||
|
mat.resolve(|search_name|
|
||||||
|
match finder.find(search_name)?{
|
||||||
|
Some(bytes)=>Ok(String::from_utf8(bytes.into_owned())?),
|
||||||
|
None=>Err(LoadVMTError::NotFound),
|
||||||
|
}
|
||||||
|
)?
|
||||||
|
),
|
||||||
|
VMTContent::Unsupported=>Err(LoadVMTError::Unsupported),
|
||||||
|
VMTContent::Unresolved=>Err(LoadVMTError::Unresolved),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fn load_texture<'bsp,'vpk,'a>(finder:BspFinder<'bsp,'vpk>,texture_name:&str)->Result<Option<Cow<'a,[u8]>>,LoadVMTError>
|
||||||
|
where
|
||||||
|
'bsp:'a,
|
||||||
|
'vpk:'a,
|
||||||
|
{
|
||||||
|
let mut texture_file_name=PathBuf::from("materials");
|
||||||
|
//lower case
|
||||||
|
let texture_file_name_lowercase=texture_name.to_lowercase();
|
||||||
|
texture_file_name.push(texture_file_name_lowercase.clone());
|
||||||
|
//remove stem and search for both vtf and vmt files
|
||||||
|
let stem=PathBuf::from(texture_file_name.file_stem().unwrap());
|
||||||
|
texture_file_name.pop();
|
||||||
|
texture_file_name.push(stem);
|
||||||
|
if let Some(stuff)=finder.find(texture_file_name.to_str().unwrap())?{
|
||||||
|
return Ok(Some(stuff))
|
||||||
|
}
|
||||||
|
//somehow search for both files
|
||||||
|
let mut texture_file_name_vmt=texture_file_name.clone();
|
||||||
|
texture_file_name.set_extension("vtf");
|
||||||
|
texture_file_name_vmt.set_extension("vmt");
|
||||||
|
recursive_vmt_loader(finder,get_vmt(finder,texture_file_name_vmt.to_str().unwrap())?)
|
||||||
|
}
|
||||||
|
#[derive(Debug,thiserror::Error)]
|
||||||
|
enum ExtractTextureError{
|
||||||
|
#[error("Io error {0:?}")]
|
||||||
|
Io(#[from]std::io::Error),
|
||||||
|
#[error("Bsp error {0:?}")]
|
||||||
|
Bsp(#[from]vbsp::BspError),
|
||||||
|
#[error("MeshLoad error {0:?}")]
|
||||||
|
MeshLoad(#[from]strafesnet_bsp_loader::loader::MeshError),
|
||||||
|
#[error("Load VMT error {0:?}")]
|
||||||
|
LoadVMT(#[from]LoadVMTError),
|
||||||
|
}
|
||||||
|
async fn gimme_them_textures(path:&Path,vpk_list:&[vpk::VPK],send_texture:tokio::sync::mpsc::Sender<(Vec<u8>,String)>)->Result<(),ExtractTextureError>{
|
||||||
|
let bsp=vbsp::Bsp::read(tokio::fs::read(path).await?.as_ref())?;
|
||||||
|
let loader_bsp=strafesnet_bsp_loader::Bsp::new(bsp);
|
||||||
|
let bsp=loader_bsp.as_ref();
|
||||||
|
|
||||||
|
let mut texture_deferred_loader=RenderConfigDeferredLoader::new();
|
||||||
|
for texture in bsp.textures(){
|
||||||
|
texture_deferred_loader.acquire_render_config_id(Some(Cow::Borrowed(texture.name())));
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut mesh_deferred_loader=MeshDeferredLoader::new();
|
||||||
|
for prop in bsp.static_props(){
|
||||||
|
mesh_deferred_loader.acquire_mesh_id(prop.model());
|
||||||
|
}
|
||||||
|
|
||||||
|
let finder=BspFinder{
|
||||||
|
bsp:&loader_bsp,
|
||||||
|
vpks:vpk_list
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut mesh_loader=strafesnet_bsp_loader::loader::ModelLoader::new(finder);
|
||||||
|
// load models and collect requested textures
|
||||||
|
for model_path in mesh_deferred_loader.into_indices(){
|
||||||
|
let model:vmdl::Model=match mesh_loader.load(model_path){
|
||||||
|
Ok(model)=>model,
|
||||||
|
Err(e)=>{
|
||||||
|
println!("Model={model_path} Load model error: {e}");
|
||||||
|
continue;
|
||||||
|
},
|
||||||
|
};
|
||||||
|
for texture in model.textures(){
|
||||||
|
for search_path in &texture.search_paths{
|
||||||
|
let mut path=PathBuf::from(search_path.as_str());
|
||||||
|
path.push(texture.name.as_str());
|
||||||
|
let path=path.to_str().unwrap().to_owned();
|
||||||
|
texture_deferred_loader.acquire_render_config_id(Some(Cow::Owned(path)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for texture_path in texture_deferred_loader.into_indices(){
|
||||||
|
match load_texture(finder,&texture_path){
|
||||||
|
Ok(Some(texture))=>send_texture.send(
|
||||||
|
(texture.into_owned(),texture_path.into_owned())
|
||||||
|
).await.unwrap(),
|
||||||
|
Ok(None)=>(),
|
||||||
|
Err(e)=>println!("Texture={texture_path} Load error: {e}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#[derive(Debug,thiserror::Error)]
|
||||||
|
enum ConvertTextureError{
|
||||||
|
#[error("Bsp error {0:?}")]
|
||||||
|
Bsp(#[from]vbsp::BspError),
|
||||||
|
#[error("Vtf error {0:?}")]
|
||||||
|
Vtf(#[from]vtf::Error),
|
||||||
|
#[error("DDS create error {0:?}")]
|
||||||
|
DDS(#[from]image_dds::CreateDdsError),
|
||||||
|
#[error("DDS write error {0:?}")]
|
||||||
|
DDSWrite(#[from]image_dds::ddsfile::Error),
|
||||||
|
#[error("Io error {0:?}")]
|
||||||
|
Io(#[from]std::io::Error),
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn convert_texture(texture:Vec<u8>,write_file_name:impl AsRef<Path>)->Result<(),ConvertTextureError>{
|
||||||
|
let image=vtf::from_bytes(&texture)?.highres_image.decode(0)?.to_rgba8();
|
||||||
|
|
||||||
|
let format=if image.width()%4!=0||image.height()%4!=0{
|
||||||
|
image_dds::ImageFormat::Rgba8UnormSrgb
|
||||||
|
}else{
|
||||||
|
image_dds::ImageFormat::BC7RgbaUnormSrgb
|
||||||
|
};
|
||||||
|
//this fails if the image dimensions are not a multiple of 4
|
||||||
|
let dds = image_dds::dds_from_image(
|
||||||
|
&image,
|
||||||
|
format,
|
||||||
|
image_dds::Quality::Slow,
|
||||||
|
image_dds::Mipmaps::GeneratedAutomatic,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
//write dds
|
||||||
|
let mut dest=PathBuf::from("textures");
|
||||||
|
dest.push(write_file_name);
|
||||||
|
dest.set_extension("dds");
|
||||||
|
std::fs::create_dir_all(dest.parent().unwrap())?;
|
||||||
|
let mut writer=std::io::BufWriter::new(std::fs::File::create(dest)?);
|
||||||
|
dds.write(&mut writer)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn read_vpks(vpk_paths:Vec<PathBuf>,thread_limit:usize)->Vec<vpk::VPK>{
|
||||||
|
futures::stream::iter(vpk_paths).map(|vpk_path|async{
|
||||||
|
// idk why it doesn't want to pass out the errors but this is fatal anyways
|
||||||
|
tokio::task::spawn_blocking(move||vpk::VPK::read(&vpk_path)).await.unwrap().unwrap()
|
||||||
|
})
|
||||||
|
.buffer_unordered(thread_limit)
|
||||||
|
.collect().await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn extract_textures(paths:Vec<PathBuf>,vpk_paths:Vec<PathBuf>)->AResult<()>{
|
||||||
|
tokio::try_join!(
|
||||||
|
tokio::fs::create_dir_all("extracted_textures"),
|
||||||
|
tokio::fs::create_dir_all("textures"),
|
||||||
|
tokio::fs::create_dir_all("meshes"),
|
||||||
|
)?;
|
||||||
|
let thread_limit=std::thread::available_parallelism()?.get();
|
||||||
|
|
||||||
|
// load vpk list
|
||||||
|
let vpk_list=read_vpks(vpk_paths,thread_limit).await;
|
||||||
|
|
||||||
|
// leak vpk_list for static lifetime?
|
||||||
|
let vpk_list:&[vpk::VPK]=vpk_list.leak();
|
||||||
|
|
||||||
|
let (send_texture,mut recv_texture)=tokio::sync::mpsc::channel(thread_limit);
|
||||||
|
let mut it=paths.into_iter();
|
||||||
|
let extract_thread=tokio::spawn(async move{
|
||||||
|
static SEM:tokio::sync::Semaphore=tokio::sync::Semaphore::const_new(0);
|
||||||
|
SEM.add_permits(thread_limit);
|
||||||
|
while let (Ok(permit),Some(path))=(SEM.acquire().await,it.next()){
|
||||||
|
let send=send_texture.clone();
|
||||||
|
tokio::spawn(async move{
|
||||||
|
let result=gimme_them_textures(&path,vpk_list,send).await;
|
||||||
|
drop(permit);
|
||||||
|
match result{
|
||||||
|
Ok(())=>(),
|
||||||
|
Err(e)=>println!("Map={path:?} Decode error: {e:?}"),
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// convert images
|
||||||
|
static SEM:tokio::sync::Semaphore=tokio::sync::Semaphore::const_new(0);
|
||||||
|
SEM.add_permits(thread_limit);
|
||||||
|
while let (Ok(permit),Some((data,dest)))=(SEM.acquire().await,recv_texture.recv().await){
|
||||||
|
// TODO: dedup dest?
|
||||||
|
tokio::spawn(async move{
|
||||||
|
let result=convert_texture(data,dest).await;
|
||||||
|
drop(permit);
|
||||||
|
match result{
|
||||||
|
Ok(())=>(),
|
||||||
|
Err(e)=>println!("Convert error: {e:?}"),
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
extract_thread.await?;
|
||||||
|
_=SEM.acquire_many(thread_limit as u32).await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn vpk_contents(vpk_path:PathBuf)->AResult<()>{
|
||||||
|
let vpk_index=vpk::VPK::read(&vpk_path)?;
|
||||||
|
for (label,entry) in vpk_index.tree.into_iter(){
|
||||||
|
println!("vpk label={} entry={:?}",label,entry);
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn bsp_contents(path:PathBuf)->AResult<()>{
|
||||||
|
let bsp=vbsp::Bsp::read(std::fs::read(path)?.as_ref())?;
|
||||||
|
for file_name in bsp.pack.into_zip().into_inner().unwrap().file_names(){
|
||||||
|
println!("file_name={:?}",file_name);
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
#[allow(dead_code)]
|
||||||
|
enum ConvertError{
|
||||||
|
IO(std::io::Error),
|
||||||
|
SNFMap(strafesnet_snf::map::Error),
|
||||||
|
BspRead(strafesnet_bsp_loader::ReadError),
|
||||||
|
BspLoad(strafesnet_bsp_loader::LoadError),
|
||||||
|
}
|
||||||
|
impl std::fmt::Display for ConvertError{
|
||||||
|
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||||
|
write!(f,"{self:?}")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl std::error::Error for ConvertError{}
|
||||||
|
|
||||||
|
async fn convert_to_snf(path:&Path,vpk_list:&[vpk::VPK],output_folder:PathBuf)->AResult<()>{
|
||||||
|
let entire_file=tokio::fs::read(path).await?;
|
||||||
|
|
||||||
|
let bsp=strafesnet_bsp_loader::read(
|
||||||
|
std::io::Cursor::new(entire_file)
|
||||||
|
).map_err(ConvertError::BspRead)?;
|
||||||
|
|
||||||
|
let map=bsp.to_snf(LoadFailureMode::DefaultToNone,vpk_list).map_err(ConvertError::BspLoad)?;
|
||||||
|
|
||||||
|
let mut dest=output_folder;
|
||||||
|
dest.push(path.file_stem().unwrap());
|
||||||
|
dest.set_extension("snfm");
|
||||||
|
let file=std::fs::File::create(dest).map_err(ConvertError::IO)?;
|
||||||
|
|
||||||
|
strafesnet_snf::map::write_map(file,map).map_err(ConvertError::SNFMap)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
async fn source_to_snf(paths:Vec<std::path::PathBuf>,output_folder:PathBuf,vpk_paths:Vec<PathBuf>)->AResult<()>{
|
||||||
|
let start=std::time::Instant::now();
|
||||||
|
|
||||||
|
let thread_limit=std::thread::available_parallelism()?.get();
|
||||||
|
|
||||||
|
// load vpk list
|
||||||
|
let vpk_list=read_vpks(vpk_paths,thread_limit).await;
|
||||||
|
|
||||||
|
// leak vpk_list for static lifetime?
|
||||||
|
let vpk_list:&[vpk::VPK]=vpk_list.leak();
|
||||||
|
|
||||||
|
let mut it=paths.into_iter();
|
||||||
|
static SEM:tokio::sync::Semaphore=tokio::sync::Semaphore::const_new(0);
|
||||||
|
SEM.add_permits(thread_limit);
|
||||||
|
|
||||||
|
while let (Ok(permit),Some(path))=(SEM.acquire().await,it.next()){
|
||||||
|
let output_folder=output_folder.clone();
|
||||||
|
tokio::spawn(async move{
|
||||||
|
let result=convert_to_snf(path.as_path(),vpk_list,output_folder).await;
|
||||||
|
drop(permit);
|
||||||
|
match result{
|
||||||
|
Ok(())=>(),
|
||||||
|
Err(e)=>println!("Convert error: {e:?}"),
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
_=SEM.acquire_many(thread_limit as u32).await.unwrap();
|
||||||
|
|
||||||
|
println!("elapsed={:?}", start.elapsed());
|
||||||
|
Ok(())
|
||||||
|
}
|
Reference in New Issue
Block a user