2024-03-08 17:48:47 +00:00
|
|
|
use std::{io::Read,path::PathBuf};
|
2023-12-31 01:59:40 +00:00
|
|
|
use clap::{Args,Parser,Subcommand};
|
|
|
|
use anyhow::Result as AResult;
|
2023-12-31 02:00:51 +00:00
|
|
|
use futures::StreamExt;
|
2024-01-06 01:54:13 +00:00
|
|
|
use rbx_dom_weak::types::Ref;
|
2024-03-08 17:39:27 +00:00
|
|
|
use tokio::io::AsyncReadExt;
|
2024-04-28 06:36:21 +00:00
|
|
|
use rbx_asset::context::{RobloxContext,InventoryItem,AssetVersion};
|
2023-12-31 01:59:40 +00:00
|
|
|
|
|
|
|
type AssetID=u64;
|
2024-03-08 17:48:47 +00:00
|
|
|
type AssetIDFileMap=Vec<(AssetID,PathBuf)>;
|
2024-01-12 01:11:44 +00:00
|
|
|
const CONCURRENT_DECODE:usize=8;
|
2024-01-06 19:48:05 +00:00
|
|
|
const CONCURRENT_REQUESTS:usize=32;
|
2023-12-31 01:59:40 +00:00
|
|
|
|
|
|
|
#[derive(Parser)]
|
|
|
|
#[command(author,version,about,long_about=None)]
|
|
|
|
#[command(propagate_version = true)]
|
|
|
|
struct Cli{
|
|
|
|
#[command(subcommand)]
|
|
|
|
command:Commands,
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Subcommand)]
|
|
|
|
enum Commands{
|
2024-03-08 17:35:10 +00:00
|
|
|
DownloadHistory(DownloadHistorySubcommand),
|
|
|
|
Download(DownloadSubcommand),
|
2024-04-26 00:38:06 +00:00
|
|
|
DownloadGroupInventoryJson(DownloadGroupInventoryJsonSubcommand),
|
2024-04-25 04:31:53 +00:00
|
|
|
Create(CreateSubcommand),
|
2024-03-08 17:35:10 +00:00
|
|
|
Upload(UploadSubcommand),
|
|
|
|
Compile(CompileSubcommand),
|
|
|
|
Decompile(DecompileSubcommand),
|
|
|
|
DecompileHistoryIntoGit(DecompileHistoryIntoGitSubcommand),
|
|
|
|
DownloadAndDecompileHistoryIntoGit(DownloadAndDecompileHistoryIntoGitSubcommand),
|
2023-12-31 01:59:40 +00:00
|
|
|
}
|
|
|
|
|
2024-03-08 17:35:10 +00:00
|
|
|
#[derive(Args)]
|
|
|
|
struct DownloadHistorySubcommand{
|
2024-04-19 07:01:27 +00:00
|
|
|
#[arg(long)]
|
2024-03-08 17:35:10 +00:00
|
|
|
asset_id:AssetID,
|
2024-04-19 07:01:27 +00:00
|
|
|
#[arg(long)]
|
2024-03-08 17:35:10 +00:00
|
|
|
cookie_type:CookieType,
|
2024-04-19 07:01:27 +00:00
|
|
|
#[arg(long)]
|
2024-03-08 17:35:10 +00:00
|
|
|
cookie:String,
|
2024-04-19 07:01:27 +00:00
|
|
|
#[arg(long)]
|
2024-03-08 17:35:10 +00:00
|
|
|
output_folder:Option<PathBuf>,
|
2024-04-19 07:01:27 +00:00
|
|
|
#[arg(long)]
|
2024-03-15 17:52:49 +00:00
|
|
|
continue_from_versions:Option<bool>,
|
2024-04-19 07:01:27 +00:00
|
|
|
#[arg(long)]
|
2024-03-08 17:35:10 +00:00
|
|
|
start_version:Option<u64>,
|
2024-04-19 07:01:27 +00:00
|
|
|
#[arg(long)]
|
2024-03-08 17:35:10 +00:00
|
|
|
end_version:Option<u64>,
|
2024-01-12 19:24:03 +00:00
|
|
|
}
|
2024-01-17 05:50:35 +00:00
|
|
|
#[derive(Args)]
|
2024-03-08 17:35:10 +00:00
|
|
|
struct DownloadSubcommand{
|
2024-04-19 07:01:27 +00:00
|
|
|
#[arg(long)]
|
2024-03-08 17:35:10 +00:00
|
|
|
cookie_type:CookieType,
|
2024-04-19 07:01:27 +00:00
|
|
|
#[arg(long)]
|
2024-03-08 17:35:10 +00:00
|
|
|
cookie:String,
|
2024-04-19 07:01:27 +00:00
|
|
|
#[arg(long)]
|
2024-03-08 17:35:10 +00:00
|
|
|
output_folder:Option<PathBuf>,
|
2024-04-20 21:43:02 +00:00
|
|
|
#[arg(required=true)]
|
|
|
|
asset_ids:Vec<AssetID>,
|
2024-03-08 17:35:10 +00:00
|
|
|
}
|
|
|
|
#[derive(Args)]
|
2024-04-26 00:38:06 +00:00
|
|
|
struct DownloadGroupInventoryJsonSubcommand{
|
|
|
|
#[arg(long)]
|
|
|
|
cookie_type:CookieType,
|
|
|
|
#[arg(long)]
|
|
|
|
cookie:String,
|
|
|
|
#[arg(long)]
|
|
|
|
output_folder:Option<PathBuf>,
|
|
|
|
#[arg(long)]
|
|
|
|
group:u64,
|
|
|
|
}
|
|
|
|
#[derive(Args)]
|
2024-04-25 04:31:53 +00:00
|
|
|
struct CreateSubcommand{
|
|
|
|
#[arg(long)]
|
|
|
|
cookie_type:CookieType,
|
|
|
|
#[arg(long)]
|
|
|
|
cookie:String,
|
|
|
|
#[arg(long)]
|
|
|
|
model_name:String,
|
|
|
|
#[arg(long)]
|
2024-04-26 01:50:57 +00:00
|
|
|
description:Option<String>,
|
|
|
|
#[arg(long)]
|
2024-04-25 04:31:53 +00:00
|
|
|
input_file:PathBuf,
|
|
|
|
#[arg(long)]
|
|
|
|
group:Option<u64>,
|
2024-04-26 01:50:57 +00:00
|
|
|
#[arg(long)]
|
|
|
|
free_model:Option<bool>,
|
|
|
|
#[arg(long)]
|
|
|
|
allow_comments:Option<bool>,
|
2024-04-25 04:31:53 +00:00
|
|
|
}
|
|
|
|
#[derive(Args)]
|
2024-03-08 17:35:10 +00:00
|
|
|
struct UploadSubcommand{
|
2024-04-19 07:01:27 +00:00
|
|
|
#[arg(long)]
|
2024-03-08 17:35:10 +00:00
|
|
|
asset_id:AssetID,
|
2024-04-19 07:01:27 +00:00
|
|
|
#[arg(long)]
|
2024-03-08 17:35:10 +00:00
|
|
|
cookie_type:CookieType,
|
2024-04-19 07:01:27 +00:00
|
|
|
#[arg(long)]
|
2024-03-08 17:35:10 +00:00
|
|
|
cookie:String,
|
2024-04-19 07:01:27 +00:00
|
|
|
#[arg(long)]
|
2024-03-08 17:35:10 +00:00
|
|
|
input_file:PathBuf,
|
2024-04-19 07:01:27 +00:00
|
|
|
#[arg(long)]
|
2024-03-08 17:35:10 +00:00
|
|
|
group:Option<u64>,
|
|
|
|
}
|
|
|
|
#[derive(Args)]
|
|
|
|
struct CompileSubcommand{
|
2024-04-19 07:01:27 +00:00
|
|
|
#[arg(long)]
|
2024-03-08 17:35:10 +00:00
|
|
|
input_folder:PathBuf,
|
2024-04-19 07:01:27 +00:00
|
|
|
#[arg(long)]
|
2024-03-08 17:35:10 +00:00
|
|
|
output_file:PathBuf,
|
2024-04-19 07:01:27 +00:00
|
|
|
#[arg(long)]
|
2024-03-08 17:35:10 +00:00
|
|
|
style:Option<DecompileStyle>,
|
2024-04-19 07:01:27 +00:00
|
|
|
#[arg(long)]
|
2024-03-08 17:35:10 +00:00
|
|
|
template:Option<PathBuf>,
|
|
|
|
}
|
|
|
|
#[derive(Args)]
|
|
|
|
struct DecompileSubcommand{
|
2024-04-19 07:01:27 +00:00
|
|
|
#[arg(long)]
|
2024-03-08 17:35:10 +00:00
|
|
|
input_file:PathBuf,
|
2024-04-19 07:01:27 +00:00
|
|
|
#[arg(long)]
|
2024-03-08 17:35:10 +00:00
|
|
|
output_folder:PathBuf,
|
2024-04-19 07:01:27 +00:00
|
|
|
#[arg(long)]
|
2024-03-08 17:35:10 +00:00
|
|
|
style:DecompileStyle,
|
2024-04-19 07:01:27 +00:00
|
|
|
#[arg(long)]
|
2024-03-15 17:52:49 +00:00
|
|
|
write_template:Option<bool>,
|
2024-04-19 07:01:27 +00:00
|
|
|
#[arg(long)]
|
2024-03-15 17:52:49 +00:00
|
|
|
write_models:Option<bool>,
|
2024-04-19 07:01:27 +00:00
|
|
|
#[arg(long)]
|
2024-03-15 17:52:49 +00:00
|
|
|
write_scripts:Option<bool>,
|
2024-01-17 05:50:35 +00:00
|
|
|
}
|
2023-12-31 01:59:40 +00:00
|
|
|
#[derive(Args)]
|
2024-03-08 17:35:10 +00:00
|
|
|
struct DecompileHistoryIntoGitSubcommand{
|
2024-04-19 07:01:27 +00:00
|
|
|
#[arg(long)]
|
2024-03-08 17:35:10 +00:00
|
|
|
input_folder:PathBuf,
|
|
|
|
//currently output folder must be the current folder due to git2 limitations
|
|
|
|
//output_folder:cli.output.unwrap(),
|
2024-04-19 07:01:27 +00:00
|
|
|
#[arg(long)]
|
2024-03-08 17:35:10 +00:00
|
|
|
style:DecompileStyle,
|
2024-04-19 07:01:27 +00:00
|
|
|
#[arg(long)]
|
2024-03-08 17:35:10 +00:00
|
|
|
git_committer_name:String,
|
2024-04-19 07:01:27 +00:00
|
|
|
#[arg(long)]
|
2024-03-08 17:35:10 +00:00
|
|
|
git_committer_email:String,
|
2024-04-19 07:01:27 +00:00
|
|
|
#[arg(long)]
|
2024-03-15 17:52:49 +00:00
|
|
|
write_template:Option<bool>,
|
2024-04-19 07:01:27 +00:00
|
|
|
#[arg(long)]
|
2024-03-15 17:52:49 +00:00
|
|
|
write_models:Option<bool>,
|
2024-04-19 07:01:27 +00:00
|
|
|
#[arg(long)]
|
2024-03-15 17:52:49 +00:00
|
|
|
write_scripts:Option<bool>,
|
2024-03-08 17:35:10 +00:00
|
|
|
}
|
|
|
|
#[derive(Args)]
|
|
|
|
struct DownloadAndDecompileHistoryIntoGitSubcommand{
|
2024-04-19 07:01:27 +00:00
|
|
|
#[arg(long)]
|
2024-03-08 17:35:10 +00:00
|
|
|
asset_id:AssetID,
|
2024-04-19 07:01:27 +00:00
|
|
|
#[arg(long)]
|
2024-03-08 17:35:10 +00:00
|
|
|
cookie_type:CookieType,
|
2024-04-19 07:01:27 +00:00
|
|
|
#[arg(long)]
|
2024-03-08 17:35:10 +00:00
|
|
|
cookie:String,
|
|
|
|
//currently output folder must be the current folder due to git2 limitations
|
|
|
|
//output_folder:cli.output.unwrap(),
|
2024-04-19 07:01:27 +00:00
|
|
|
#[arg(long)]
|
2024-03-08 17:35:10 +00:00
|
|
|
style:DecompileStyle,
|
2024-04-19 07:01:27 +00:00
|
|
|
#[arg(long)]
|
2024-03-08 17:35:10 +00:00
|
|
|
git_committer_name:String,
|
2024-04-19 07:01:27 +00:00
|
|
|
#[arg(long)]
|
2024-03-08 17:35:10 +00:00
|
|
|
git_committer_email:String,
|
2024-04-19 07:01:27 +00:00
|
|
|
#[arg(long)]
|
2024-03-15 17:52:49 +00:00
|
|
|
write_template:Option<bool>,
|
2024-04-19 07:01:27 +00:00
|
|
|
#[arg(long)]
|
2024-03-15 17:52:49 +00:00
|
|
|
write_models:Option<bool>,
|
2024-04-19 07:01:27 +00:00
|
|
|
#[arg(long)]
|
2024-03-15 17:52:49 +00:00
|
|
|
write_scripts:Option<bool>,
|
2024-03-08 17:35:10 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Clone,clap::ValueEnum)]
|
|
|
|
enum CookieType{
|
|
|
|
Literal,
|
|
|
|
Environment,
|
|
|
|
File,
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Clone,Copy,Debug,clap::ValueEnum)]
|
|
|
|
enum DecompileStyle{
|
|
|
|
Rox,
|
|
|
|
Rojo,
|
|
|
|
RoxRojo,
|
2023-12-31 01:59:40 +00:00
|
|
|
}
|
|
|
|
|
2023-12-31 02:00:51 +00:00
|
|
|
#[tokio::main]
|
|
|
|
async fn main()->AResult<()>{
|
2023-12-31 01:59:40 +00:00
|
|
|
let cli=Cli::parse();
|
|
|
|
match cli.command{
|
2024-03-08 17:35:10 +00:00
|
|
|
Commands::DownloadHistory(subcommand)=>download_history(DownloadHistoryConfig{
|
2024-03-15 17:52:49 +00:00
|
|
|
continue_from_versions:subcommand.continue_from_versions.unwrap_or(false),
|
2024-03-08 17:35:10 +00:00
|
|
|
end_version:subcommand.end_version,
|
|
|
|
start_version:subcommand.start_version.unwrap_or(0),
|
|
|
|
output_folder:subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
|
|
|
|
cookie:Cookie::from_type(subcommand.cookie_type,subcommand.cookie).await?.0,
|
|
|
|
asset_id:subcommand.asset_id,
|
2024-01-08 23:09:12 +00:00
|
|
|
}).await,
|
2024-03-08 17:35:10 +00:00
|
|
|
Commands::Download(subcommand)=>{
|
|
|
|
let output_folder=subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap());
|
|
|
|
download_list(
|
|
|
|
Cookie::from_type(subcommand.cookie_type,subcommand.cookie).await?.0,
|
2024-04-20 21:43:02 +00:00
|
|
|
subcommand.asset_ids.into_iter().map(|asset_id|{
|
2024-03-08 17:35:10 +00:00
|
|
|
let mut path=output_folder.clone();
|
|
|
|
path.push(asset_id.to_string());
|
|
|
|
(asset_id,path)
|
|
|
|
}).collect()
|
|
|
|
).await
|
|
|
|
},
|
2024-04-26 00:38:06 +00:00
|
|
|
Commands::DownloadGroupInventoryJson(subcommand)=>download_group_inventory_json(
|
|
|
|
Cookie::from_type(subcommand.cookie_type,subcommand.cookie).await?.0,
|
|
|
|
subcommand.group,
|
|
|
|
subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
|
|
|
|
).await,
|
2024-04-26 01:50:57 +00:00
|
|
|
Commands::Create(subcommand)=>create(CreateConfig{
|
|
|
|
cookie:Cookie::from_type(subcommand.cookie_type,subcommand.cookie).await?.0,
|
|
|
|
group:subcommand.group,
|
|
|
|
input_file:subcommand.input_file,
|
|
|
|
model_name:subcommand.model_name,
|
|
|
|
description:subcommand.description.unwrap_or_else(||String::with_capacity(0)),
|
|
|
|
free_model:subcommand.free_model.unwrap_or(false),
|
|
|
|
allow_comments:subcommand.allow_comments.unwrap_or(false),
|
|
|
|
}).await,
|
2024-03-08 17:35:10 +00:00
|
|
|
Commands::Upload(subcommand)=>upload_list(
|
|
|
|
Cookie::from_type(subcommand.cookie_type,subcommand.cookie).await?.0,
|
|
|
|
subcommand.group,
|
|
|
|
vec![(subcommand.asset_id,subcommand.input_file)]
|
2024-01-17 05:50:35 +00:00
|
|
|
).await,
|
2024-03-08 17:35:10 +00:00
|
|
|
Commands::Compile(subcommand)=>compile(CompileConfig{
|
|
|
|
input_folder:subcommand.input_folder,
|
|
|
|
output_file:subcommand.output_file,
|
|
|
|
template:subcommand.template,
|
|
|
|
style:subcommand.style,
|
2024-03-08 17:39:27 +00:00
|
|
|
}).await,
|
2024-03-08 17:35:10 +00:00
|
|
|
Commands::Decompile(subcommand)=>decompile(DecompileConfig{
|
|
|
|
style:subcommand.style,
|
|
|
|
input_file:subcommand.input_file,
|
|
|
|
output_folder:subcommand.output_folder,
|
2024-03-15 17:52:49 +00:00
|
|
|
write_template:subcommand.write_template.unwrap_or(false),
|
|
|
|
write_models:subcommand.write_models.unwrap_or(false),
|
|
|
|
write_scripts:subcommand.write_scripts.unwrap_or(true),
|
2024-01-11 21:59:32 +00:00
|
|
|
}).await,
|
2024-03-08 17:35:10 +00:00
|
|
|
Commands::DecompileHistoryIntoGit(subcommand)=>decompile_history_into_git(DecompileHistoryConfig{
|
|
|
|
git_committer_name:subcommand.git_committer_name,
|
|
|
|
git_committer_email:subcommand.git_committer_email,
|
|
|
|
input_folder:subcommand.input_folder,
|
|
|
|
output_folder:std::env::current_dir()?,
|
|
|
|
style:subcommand.style,
|
2024-03-15 17:52:49 +00:00
|
|
|
write_template:subcommand.write_template.unwrap_or(false),
|
|
|
|
write_models:subcommand.write_models.unwrap_or(false),
|
|
|
|
write_scripts:subcommand.write_scripts.unwrap_or(true),
|
2024-01-12 00:06:24 +00:00
|
|
|
}).await,
|
2024-03-08 17:35:10 +00:00
|
|
|
Commands::DownloadAndDecompileHistoryIntoGit(subcommand)=>download_and_decompile_history_into_git(DownloadAndDecompileHistoryConfig{
|
|
|
|
git_committer_name:subcommand.git_committer_name,
|
|
|
|
git_committer_email:subcommand.git_committer_email,
|
|
|
|
cookie:Cookie::from_type(subcommand.cookie_type,subcommand.cookie).await?.0,
|
|
|
|
asset_id:subcommand.asset_id,
|
|
|
|
output_folder:std::env::current_dir()?,
|
|
|
|
style:subcommand.style,
|
2024-03-15 17:52:49 +00:00
|
|
|
write_template:subcommand.write_template.unwrap_or(false),
|
|
|
|
write_models:subcommand.write_models.unwrap_or(false),
|
|
|
|
write_scripts:subcommand.write_scripts.unwrap_or(true),
|
2024-01-11 10:47:20 +00:00
|
|
|
}).await,
|
2023-12-31 01:59:40 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-03-08 17:35:10 +00:00
|
|
|
struct Cookie(String);
|
|
|
|
impl Cookie{
|
|
|
|
async fn from_type(cookie_type:CookieType,cookie_string:String)->AResult<Self>{
|
|
|
|
Ok(Self(format!(".ROBLOSECURITY={}",match cookie_type{
|
|
|
|
CookieType::Literal=>cookie_string,
|
|
|
|
CookieType::Environment=>std::env::var(cookie_string)?,
|
|
|
|
CookieType::File=>tokio::fs::read_to_string(cookie_string).await?,
|
|
|
|
})))
|
|
|
|
}
|
2023-12-31 17:18:41 +00:00
|
|
|
}
|
|
|
|
|
2024-04-26 01:50:57 +00:00
|
|
|
struct CreateConfig{
|
|
|
|
cookie:String,
|
|
|
|
model_name:String,
|
|
|
|
description:String,
|
|
|
|
input_file:PathBuf,
|
|
|
|
group:Option<u64>,
|
|
|
|
free_model:bool,
|
|
|
|
allow_comments:bool,
|
|
|
|
}
|
|
|
|
|
|
|
|
async fn create(config:CreateConfig)->AResult<()>{
|
2024-04-28 06:36:21 +00:00
|
|
|
let resp=RobloxContext::new(config.cookie)
|
|
|
|
.create(rbx_asset::context::CreateRequest{
|
|
|
|
name:config.model_name,
|
|
|
|
description:config.description,
|
|
|
|
ispublic:config.free_model,
|
|
|
|
allowComments:config.allow_comments,
|
|
|
|
groupId:config.group,
|
|
|
|
},tokio::fs::read(config.input_file).await?).await?;
|
|
|
|
println!("UploadResponse={:?}",resp);
|
2024-04-25 04:31:53 +00:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2024-01-05 21:47:32 +00:00
|
|
|
async fn upload_list(cookie:String,group:Option<u64>,asset_id_file_map:AssetIDFileMap)->AResult<()>{
|
2024-04-28 06:36:21 +00:00
|
|
|
let context=RobloxContext::new(cookie);
|
2024-04-25 04:40:49 +00:00
|
|
|
//this is calling map on the vec because the closure produces an iterator of futures
|
2024-01-12 05:29:06 +00:00
|
|
|
futures::stream::iter(asset_id_file_map.into_iter()
|
2023-12-31 19:15:27 +00:00
|
|
|
.map(|(asset_id,file)|{
|
2024-04-28 06:36:21 +00:00
|
|
|
let context=&context;
|
2023-12-31 19:15:27 +00:00
|
|
|
async move{
|
2024-04-28 06:36:21 +00:00
|
|
|
Ok((asset_id,context.upload(rbx_asset::context::UploadRequest{
|
|
|
|
assetid:asset_id,
|
|
|
|
name:None,
|
|
|
|
description:None,
|
|
|
|
ispublic:None,
|
|
|
|
allowComments:None,
|
|
|
|
groupId:group,
|
|
|
|
},tokio::fs::read(file).await?).await?))
|
2023-12-31 19:15:27 +00:00
|
|
|
}
|
2024-01-12 05:29:06 +00:00
|
|
|
}))
|
2023-12-31 19:15:27 +00:00
|
|
|
.buffer_unordered(CONCURRENT_REQUESTS)
|
|
|
|
.for_each(|b:AResult<_>|async{
|
|
|
|
match b{
|
|
|
|
Ok((asset_id,body))=>{
|
2024-04-26 06:28:55 +00:00
|
|
|
println!("asset_id={} UploadResponse={:?}",asset_id,body);
|
2023-12-31 19:15:27 +00:00
|
|
|
},
|
|
|
|
Err(e)=>eprintln!("ul error: {}",e),
|
|
|
|
}
|
|
|
|
}).await;
|
2023-12-31 01:59:40 +00:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2023-12-31 18:47:45 +00:00
|
|
|
async fn download_list(cookie:String,asset_id_file_map:AssetIDFileMap)->AResult<()>{
|
2024-04-28 06:36:21 +00:00
|
|
|
let context=RobloxContext::new(cookie);
|
2024-01-12 05:29:06 +00:00
|
|
|
futures::stream::iter(asset_id_file_map.into_iter()
|
2023-12-31 18:47:45 +00:00
|
|
|
.map(|(asset_id,file)|{
|
2024-04-28 06:36:21 +00:00
|
|
|
let context=&context;
|
2023-12-31 02:00:51 +00:00
|
|
|
async move{
|
2024-04-28 06:36:21 +00:00
|
|
|
Ok((file,context.download(rbx_asset::context::DownloadRequest{asset_id,version:None}).await?))
|
2023-12-31 02:00:51 +00:00
|
|
|
}
|
2024-01-12 05:29:06 +00:00
|
|
|
}))
|
2023-12-31 02:00:51 +00:00
|
|
|
.buffer_unordered(CONCURRENT_REQUESTS)
|
2024-01-06 20:38:29 +00:00
|
|
|
.for_each(|b:AResult<_>|async{
|
|
|
|
match b{
|
2024-04-28 06:36:21 +00:00
|
|
|
Ok((dest,data))=>{
|
|
|
|
match tokio::fs::write(dest,data).await{
|
|
|
|
Err(e)=>eprintln!("fs error: {}",e),
|
|
|
|
_=>(),
|
|
|
|
}
|
2024-01-06 20:38:29 +00:00
|
|
|
},
|
|
|
|
Err(e)=>eprintln!("dl error: {}",e),
|
|
|
|
}
|
|
|
|
}).await;
|
|
|
|
Ok(())
|
|
|
|
}
|
2024-04-26 00:38:06 +00:00
|
|
|
|
2024-04-28 06:36:21 +00:00
|
|
|
async fn get_inventory_pages(context:&RobloxContext,group:u64)->AResult<Vec<InventoryItem>>{
|
2024-04-26 00:38:06 +00:00
|
|
|
let mut cursor:Option<String>=None;
|
|
|
|
let mut asset_list=Vec::new();
|
|
|
|
loop{
|
2024-04-28 06:36:21 +00:00
|
|
|
let mut page=context.inventory_page(rbx_asset::context::InventoryPageRequest{group,cursor}).await?;
|
2024-04-26 00:38:06 +00:00
|
|
|
asset_list.append(&mut page.data);
|
|
|
|
if page.nextPageCursor.is_none(){
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
cursor=page.nextPageCursor;
|
|
|
|
}
|
|
|
|
Ok(asset_list)
|
|
|
|
}
|
|
|
|
|
|
|
|
async fn download_group_inventory_json(cookie:String,group:u64,output_folder:PathBuf)->AResult<()>{
|
2024-04-28 06:36:21 +00:00
|
|
|
let context=RobloxContext::new(cookie);
|
|
|
|
let item_list=get_inventory_pages(&context,group).await?;
|
2024-04-26 00:38:06 +00:00
|
|
|
|
|
|
|
let mut path=output_folder.clone();
|
|
|
|
path.set_file_name("versions.json");
|
|
|
|
tokio::fs::write(path,serde_json::to_string(&item_list)?).await?;
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
2024-01-06 20:38:29 +00:00
|
|
|
|
2024-04-28 06:36:21 +00:00
|
|
|
async fn get_version_history(context:&RobloxContext,asset_id:AssetID)->AResult<Vec<AssetVersion>>{
|
2024-01-06 20:38:29 +00:00
|
|
|
let mut cursor:Option<String>=None;
|
|
|
|
let mut asset_list=Vec::new();
|
|
|
|
loop{
|
2024-04-28 06:36:21 +00:00
|
|
|
let mut page=context.history_page(rbx_asset::context::HistoryPageRequest{asset_id,cursor}).await?;
|
2024-01-13 00:19:01 +00:00
|
|
|
asset_list.append(&mut page.data);
|
|
|
|
if page.nextPageCursor.is_none(){
|
|
|
|
break;
|
2024-01-06 20:38:29 +00:00
|
|
|
}
|
2024-01-13 00:19:01 +00:00
|
|
|
cursor=page.nextPageCursor;
|
2024-01-06 20:38:29 +00:00
|
|
|
}
|
|
|
|
asset_list.sort_by(|a,b|a.assetVersionNumber.cmp(&b.assetVersionNumber));
|
2024-01-11 07:57:50 +00:00
|
|
|
Ok(asset_list)
|
|
|
|
}
|
|
|
|
|
|
|
|
struct DownloadHistoryConfig{
|
2024-01-14 05:22:25 +00:00
|
|
|
continue_from_versions:bool,
|
2024-01-14 04:49:06 +00:00
|
|
|
end_version:Option<u64>,
|
2024-01-13 00:27:31 +00:00
|
|
|
start_version:u64,
|
2024-03-08 17:48:47 +00:00
|
|
|
output_folder:PathBuf,
|
2024-01-11 07:57:50 +00:00
|
|
|
cookie:String,
|
|
|
|
asset_id:AssetID,
|
|
|
|
}
|
|
|
|
|
2024-01-14 05:22:25 +00:00
|
|
|
async fn download_history(mut config:DownloadHistoryConfig)->AResult<()>{
|
2024-01-14 06:56:12 +00:00
|
|
|
let mut asset_list_contents=std::collections::HashSet::new();
|
2024-01-14 05:22:25 +00:00
|
|
|
let mut asset_list:Vec<AssetVersion>=Vec::new();
|
|
|
|
if config.end_version.is_none()&&config.continue_from_versions{
|
|
|
|
//load prexisting versions list
|
|
|
|
let mut versions_path=config.output_folder.clone();
|
|
|
|
versions_path.push("versions.json");
|
|
|
|
match std::fs::File::open(versions_path){
|
|
|
|
Ok(versions_file)=>asset_list.append(&mut serde_json::from_reader(versions_file)?),
|
|
|
|
Err(e)=>match e.kind(){
|
2024-03-08 17:39:57 +00:00
|
|
|
std::io::ErrorKind::NotFound=>Err(anyhow::Error::msg("Cannot continue from versions.json - file does not exist"))?,
|
2024-01-14 05:22:25 +00:00
|
|
|
_=>Err(e)?,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
//write down which versions are contained
|
|
|
|
for asset_version in &asset_list{
|
2024-01-14 06:56:12 +00:00
|
|
|
asset_list_contents.insert(asset_version.assetVersionNumber);
|
2024-01-14 05:22:25 +00:00
|
|
|
}
|
|
|
|
//find the highest number
|
|
|
|
match asset_list.iter().map(|asset_version|asset_version.assetVersionNumber).max(){
|
|
|
|
Some(max)=>{
|
|
|
|
//count down contiguously until a number is missing
|
|
|
|
for i in (1..=max).rev(){
|
2024-01-14 06:56:12 +00:00
|
|
|
if !asset_list_contents.contains(&i){
|
2024-01-14 05:22:25 +00:00
|
|
|
//that is end_version
|
|
|
|
config.end_version=Some(i);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
//if all versions are contained, set start_version to the max + 1
|
|
|
|
if config.end_version.is_none(){
|
|
|
|
config.start_version=max+1;
|
|
|
|
}
|
|
|
|
},
|
|
|
|
None=>Err(anyhow::Error::msg("Cannot continue from versions.json - there are no previous versions"))?,
|
|
|
|
}
|
|
|
|
}
|
2024-04-28 06:36:21 +00:00
|
|
|
let context=RobloxContext::new(config.cookie);
|
2024-01-13 00:19:01 +00:00
|
|
|
|
2024-01-13 01:07:06 +00:00
|
|
|
//limit concurrent downloads
|
|
|
|
let mut join_set=tokio::task::JoinSet::new();
|
|
|
|
|
2024-01-11 07:57:50 +00:00
|
|
|
//poll paged list of all asset versions
|
2024-01-13 00:19:01 +00:00
|
|
|
let mut cursor:Option<String>=None;
|
|
|
|
loop{
|
2024-04-28 06:36:21 +00:00
|
|
|
let mut page=context.history_page(rbx_asset::context::HistoryPageRequest{asset_id:config.asset_id,cursor}).await?;
|
|
|
|
let context=&context;
|
2024-01-14 04:49:06 +00:00
|
|
|
let output_folder=config.output_folder.clone();
|
|
|
|
let data=&page.data;
|
2024-01-14 06:56:12 +00:00
|
|
|
let asset_list_contents=&asset_list_contents;
|
2024-01-14 04:49:06 +00:00
|
|
|
let join_set=&mut join_set;
|
|
|
|
let error_catcher=||async move{
|
2024-01-13 00:27:31 +00:00
|
|
|
let mut cancel_paging=false;
|
2024-01-14 04:49:06 +00:00
|
|
|
for asset_version in data{
|
2024-01-13 00:19:01 +00:00
|
|
|
let version_number=asset_version.assetVersionNumber;
|
2024-01-14 06:56:12 +00:00
|
|
|
//skip assets beyond specified end_version
|
2024-01-14 04:49:06 +00:00
|
|
|
if config.end_version.is_some_and(|v|v<version_number){
|
|
|
|
continue;
|
|
|
|
}
|
2024-01-14 06:56:12 +00:00
|
|
|
//skip assets lower than start_version and cancel paging asset versions
|
2024-01-13 00:27:31 +00:00
|
|
|
if version_number<config.start_version{
|
|
|
|
cancel_paging=true;
|
|
|
|
continue;//don't trust roblox returned order
|
|
|
|
}
|
2024-01-14 06:56:12 +00:00
|
|
|
//skip previously downloaded assets
|
|
|
|
if asset_list_contents.contains(&version_number){
|
|
|
|
continue;
|
|
|
|
}
|
2024-01-13 01:07:06 +00:00
|
|
|
while CONCURRENT_REQUESTS<=join_set.len(){
|
|
|
|
join_set.join_next().await.unwrap()??;
|
|
|
|
}
|
2024-04-28 06:36:21 +00:00
|
|
|
let context=context.clone();
|
2024-01-15 02:43:03 +00:00
|
|
|
let mut path=output_folder.clone();
|
|
|
|
path.push(format!("{}_v{}.rbxl",config.asset_id,version_number));
|
2024-01-13 01:07:06 +00:00
|
|
|
join_set.spawn(async move{
|
2024-04-28 06:36:21 +00:00
|
|
|
let file=context.download(rbx_asset::context::DownloadRequest{asset_id:config.asset_id,version:Some(version_number)}).await?;
|
2024-01-13 00:19:01 +00:00
|
|
|
|
2024-04-28 06:36:21 +00:00
|
|
|
tokio::fs::write(path,file).await?;
|
2024-01-13 00:19:01 +00:00
|
|
|
|
|
|
|
Ok::<_,anyhow::Error>(())
|
2024-01-13 01:07:06 +00:00
|
|
|
});
|
2024-01-13 00:19:01 +00:00
|
|
|
}
|
2024-01-14 04:49:06 +00:00
|
|
|
Ok::<_,anyhow::Error>(cancel_paging)
|
|
|
|
};
|
|
|
|
let cancel_paging=match error_catcher().await{
|
|
|
|
Ok(cancel)=>cancel,
|
|
|
|
Err(e)=>{
|
|
|
|
println!("download error: {}",e);
|
|
|
|
//cancel download and write versions
|
|
|
|
true
|
|
|
|
},
|
|
|
|
};
|
2024-01-13 00:27:31 +00:00
|
|
|
if page.nextPageCursor.is_none()||cancel_paging{
|
|
|
|
for asset_version in page.data.into_iter(){
|
2024-01-14 06:56:12 +00:00
|
|
|
if !(asset_list_contents.contains(&asset_version.assetVersionNumber)
|
|
|
|
||config.end_version.is_some_and(|v|v<asset_version.assetVersionNumber)
|
|
|
|
||asset_version.assetVersionNumber<config.start_version){
|
2024-01-13 00:27:31 +00:00
|
|
|
asset_list.push(asset_version);
|
|
|
|
}
|
|
|
|
}
|
2024-01-13 00:19:01 +00:00
|
|
|
break;
|
2024-01-13 00:27:31 +00:00
|
|
|
}else{
|
|
|
|
asset_list.append(&mut page.data);
|
2024-01-13 00:19:01 +00:00
|
|
|
}
|
|
|
|
cursor=page.nextPageCursor;
|
|
|
|
}
|
|
|
|
|
|
|
|
asset_list.sort_by(|a,b|a.assetVersionNumber.cmp(&b.assetVersionNumber));
|
|
|
|
|
2024-01-08 23:09:12 +00:00
|
|
|
let mut path=config.output_folder.clone();
|
2024-01-06 20:38:29 +00:00
|
|
|
path.set_file_name("versions.json");
|
|
|
|
tokio::fs::write(path,serde_json::to_string(&asset_list)?).await?;
|
|
|
|
|
2024-01-13 01:07:06 +00:00
|
|
|
while let Some(result)=join_set.join_next().await{
|
|
|
|
result??;
|
2024-01-13 00:19:01 +00:00
|
|
|
}
|
2024-01-11 07:57:50 +00:00
|
|
|
|
2023-12-31 01:59:40 +00:00
|
|
|
Ok(())
|
2024-01-01 20:21:33 +00:00
|
|
|
}
|
2024-01-06 01:54:13 +00:00
|
|
|
|
2024-01-11 10:46:47 +00:00
|
|
|
fn load_dom<R:Read>(input:R)->AResult<rbx_dom_weak::WeakDom>{
|
|
|
|
let mut buf=std::io::BufReader::new(input);
|
|
|
|
let peek=std::io::BufRead::fill_buf(&mut buf)?;
|
|
|
|
match &peek[0..4]{
|
|
|
|
b"<rob"=>{
|
|
|
|
match &peek[4..8]{
|
2024-04-28 06:36:21 +00:00
|
|
|
b"lox!"=>rbx_binary::from_reader(buf).map_err(anyhow::Error::msg),
|
|
|
|
b"lox "=>rbx_xml::from_reader_default(buf).map_err(anyhow::Error::msg),
|
2024-01-11 10:46:47 +00:00
|
|
|
other=>Err(anyhow::Error::msg(format!("Unknown Roblox file type {:?}",other))),
|
|
|
|
}
|
|
|
|
},
|
|
|
|
_=>Err(anyhow::Error::msg("unsupported file type")),
|
2024-01-06 01:54:13 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(PartialEq)]
|
|
|
|
enum Class{
|
|
|
|
Folder,
|
|
|
|
ModuleScript,
|
|
|
|
LocalScript,
|
|
|
|
Script,
|
|
|
|
Model,
|
|
|
|
}
|
|
|
|
|
|
|
|
struct TreeNode{
|
|
|
|
name:String,
|
|
|
|
referent:Ref,
|
|
|
|
parent:Ref,
|
|
|
|
class:Class,
|
|
|
|
children:Vec<Ref>,
|
|
|
|
}
|
|
|
|
impl TreeNode{
|
|
|
|
fn new(name:String,referent:Ref,parent:Ref,class:Class)->Self{
|
|
|
|
Self{
|
|
|
|
name,
|
|
|
|
referent,
|
|
|
|
parent,
|
|
|
|
class,
|
|
|
|
children:Vec::new(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
enum TrimStackInstruction{
|
|
|
|
Referent(Ref),
|
|
|
|
IncrementScript,
|
|
|
|
DecrementScript,
|
|
|
|
}
|
|
|
|
|
|
|
|
enum WriteStackInstruction<'a>{
|
2024-01-06 23:01:56 +00:00
|
|
|
Node(&'a TreeNode,u32),//(Node,NameTally)
|
2024-01-06 19:13:52 +00:00
|
|
|
PushFolder(String),
|
2024-01-06 01:54:13 +00:00
|
|
|
PopFolder,
|
|
|
|
Destroy(Ref),
|
|
|
|
}
|
|
|
|
|
2024-01-12 19:24:03 +00:00
|
|
|
#[derive(Default)]
|
2024-01-06 01:54:13 +00:00
|
|
|
struct PropertiesOverride{
|
2024-01-12 19:24:03 +00:00
|
|
|
name:Option<String>,
|
2024-03-08 17:39:57 +00:00
|
|
|
class:Option<String>,
|
2024-01-06 01:54:13 +00:00
|
|
|
}
|
|
|
|
impl PropertiesOverride{
|
|
|
|
fn is_some(&self)->bool{
|
2024-01-12 19:24:03 +00:00
|
|
|
self.name.is_some()
|
2024-03-08 17:39:57 +00:00
|
|
|
||self.class.is_some()
|
2024-01-06 01:54:13 +00:00
|
|
|
}
|
|
|
|
}
|
2024-01-12 19:24:03 +00:00
|
|
|
impl std::fmt::Display for PropertiesOverride{
|
|
|
|
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
|
|
|
if let Some(name)=self.name.as_deref(){
|
2024-03-08 17:39:57 +00:00
|
|
|
writeln!(f,"--!Properties.Name = \"{}\"",name)?;
|
2024-01-12 19:24:03 +00:00
|
|
|
}
|
2024-03-08 17:39:57 +00:00
|
|
|
if let Some(class)=self.class.as_deref(){
|
|
|
|
writeln!(f,"--!Properties.ClassName = \"{}\"",class)?;
|
2024-01-12 19:24:03 +00:00
|
|
|
}
|
|
|
|
Ok(())
|
2024-01-23 04:28:04 +00:00
|
|
|
}
|
2024-01-12 19:24:03 +00:00
|
|
|
}
|
2024-01-06 01:54:13 +00:00
|
|
|
|
2024-01-06 19:13:52 +00:00
|
|
|
fn sanitize<'a>(s:&'a str)->std::borrow::Cow<'a,str>{
|
2024-03-08 17:39:57 +00:00
|
|
|
lazy_regex::regex!(r"[^A-z0-9.-]").replace_all(s,"_")
|
2024-01-06 19:13:52 +00:00
|
|
|
}
|
|
|
|
|
2024-03-08 17:48:47 +00:00
|
|
|
fn write_item(dom:&rbx_dom_weak::WeakDom,mut file:PathBuf,node:&TreeNode,node_name_override:String,mut properties:PropertiesOverride,style:DecompileStyle,write_models:bool,write_scripts:bool)->AResult<()>{
|
2024-01-11 22:51:31 +00:00
|
|
|
file.push(sanitize(node_name_override.as_str()).as_ref());
|
2024-01-06 01:54:13 +00:00
|
|
|
match node.class{
|
|
|
|
Class::Folder=>(),
|
|
|
|
Class::ModuleScript|Class::LocalScript|Class::Script=>{
|
2024-01-08 23:09:12 +00:00
|
|
|
if !write_scripts{
|
|
|
|
return Ok(())
|
|
|
|
}
|
2024-01-12 19:24:03 +00:00
|
|
|
|
|
|
|
//set extension
|
|
|
|
match style{
|
|
|
|
DecompileStyle::Rox=>assert!(file.set_extension("lua"),"could not set extension"),
|
|
|
|
DecompileStyle::RoxRojo|DecompileStyle::Rojo=>{
|
2024-03-08 17:39:57 +00:00
|
|
|
match properties.class.as_deref(){
|
2024-01-12 19:24:03 +00:00
|
|
|
Some("LocalScript")=>{
|
|
|
|
file.set_extension("client.lua");
|
2024-03-08 17:39:57 +00:00
|
|
|
properties.class=None;
|
2024-01-12 19:24:03 +00:00
|
|
|
},
|
|
|
|
Some("Script")=>{
|
|
|
|
file.set_extension("server.lua");
|
2024-03-08 17:39:57 +00:00
|
|
|
properties.class=None;
|
2024-01-12 19:24:03 +00:00
|
|
|
},
|
|
|
|
// Some("ModuleScript")=>{
|
|
|
|
// file.set_extension("module");
|
2024-03-08 17:39:57 +00:00
|
|
|
// properties.class=None;
|
2024-01-12 19:24:03 +00:00
|
|
|
// },
|
|
|
|
None=>assert!(file.set_extension("lua"),"could not set extension"),
|
2024-03-08 17:39:57 +00:00
|
|
|
Some(other)=>Err(anyhow::Error::msg(format!("Attempt to write a {} as a script",other)))?,
|
2024-01-12 19:24:03 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-01-12 06:02:10 +00:00
|
|
|
if let Some(item)=dom.get_by_ref(node.referent){
|
2024-01-06 01:54:13 +00:00
|
|
|
//TODO: delete disabled scripts
|
|
|
|
if let Some(rbx_dom_weak::types::Variant::String(source))=item.properties.get("Source"){
|
2024-01-12 19:24:03 +00:00
|
|
|
if properties.is_some(){
|
|
|
|
//rox style
|
|
|
|
let source=properties.to_string()+source.as_str();
|
|
|
|
std::fs::write(file,source)?;
|
|
|
|
}else{
|
|
|
|
std::fs::write(file,source)?;
|
|
|
|
}
|
2024-01-12 06:02:10 +00:00
|
|
|
}
|
|
|
|
}
|
2024-01-06 01:54:13 +00:00
|
|
|
},
|
|
|
|
Class::Model=>{
|
2024-01-08 23:09:12 +00:00
|
|
|
if !write_models{
|
|
|
|
return Ok(())
|
|
|
|
}
|
2024-01-06 01:54:13 +00:00
|
|
|
assert!(file.set_extension("rbxmx"));
|
|
|
|
let output=std::io::BufWriter::new(std::fs::File::create(file)?);
|
|
|
|
rbx_xml::to_writer_default(output,dom,&[node.referent])?;
|
|
|
|
},
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2024-01-11 07:57:50 +00:00
|
|
|
struct DecompiledContext{
|
|
|
|
dom:rbx_dom_weak::WeakDom,
|
|
|
|
tree_refs:std::collections::HashMap<rbx_dom_weak::types::Ref,TreeNode>,
|
2024-01-08 23:09:12 +00:00
|
|
|
}
|
|
|
|
|
2024-01-11 10:46:47 +00:00
|
|
|
fn generate_decompiled_context<R:Read>(input:R)->AResult<DecompiledContext>{
|
|
|
|
let dom=load_dom(input)?;
|
2024-01-06 01:54:13 +00:00
|
|
|
|
|
|
|
let mut tree_refs=std::collections::HashMap::new();
|
|
|
|
tree_refs.insert(dom.root_ref(),TreeNode::new(
|
2024-03-08 17:39:57 +00:00
|
|
|
"src".to_owned(),
|
2024-01-06 01:54:13 +00:00
|
|
|
dom.root_ref(),
|
|
|
|
Ref::none(),
|
|
|
|
Class::Folder
|
|
|
|
));
|
|
|
|
|
|
|
|
//run rules
|
|
|
|
let mut stack=vec![dom.root()];
|
|
|
|
while let Some(item)=stack.pop(){
|
|
|
|
let class=match item.class.as_str(){
|
|
|
|
"ModuleScript"=>Class::ModuleScript,
|
|
|
|
"LocalScript"=>Class::LocalScript,
|
|
|
|
"Script"=>Class::Script,
|
|
|
|
"Model"=>Class::Model,
|
|
|
|
_=>Class::Folder,
|
|
|
|
};
|
|
|
|
let skip=match class{
|
|
|
|
Class::Model=>true,
|
|
|
|
_=>false,
|
|
|
|
};
|
|
|
|
if let Some(parent_node)=tree_refs.get_mut(&item.parent()){
|
|
|
|
let referent=item.referent();
|
|
|
|
let node=TreeNode::new(item.name.clone(),referent,parent_node.referent,class);
|
|
|
|
parent_node.children.push(referent);
|
|
|
|
tree_refs.insert(referent,node);
|
|
|
|
}
|
2024-01-08 23:09:19 +00:00
|
|
|
//look no further, turn this node and all its children into a model
|
2024-01-06 01:54:13 +00:00
|
|
|
if skip{
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
for &referent in item.children(){
|
|
|
|
if let Some(c)=dom.get_by_ref(referent){
|
|
|
|
stack.push(c);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
//trim empty folders
|
|
|
|
let mut script_count=0;
|
|
|
|
let mut stack:Vec<TrimStackInstruction>=tree_refs.get(&dom.root_ref()).unwrap().children
|
|
|
|
.iter().map(|&c|TrimStackInstruction::Referent(c)).collect();
|
|
|
|
while let Some(instruction)=stack.pop(){
|
|
|
|
match instruction{
|
|
|
|
TrimStackInstruction::IncrementScript=>script_count+=1,
|
|
|
|
TrimStackInstruction::DecrementScript=>script_count-=1,
|
|
|
|
TrimStackInstruction::Referent(referent)=>{
|
|
|
|
let mut delete=None;
|
|
|
|
if let Some(node)=tree_refs.get_mut(&referent){
|
|
|
|
if node.class==Class::Folder&&script_count!=0{
|
|
|
|
node.class=Class::Model
|
|
|
|
}
|
|
|
|
if node.class==Class::Folder&&node.children.len()==0{
|
|
|
|
delete=Some(node.parent);
|
|
|
|
}else{
|
|
|
|
//how the hell do I do this better without recursion
|
|
|
|
let is_script=match node.class{
|
|
|
|
Class::ModuleScript|Class::LocalScript|Class::Script=>true,
|
|
|
|
_=>false,
|
|
|
|
};
|
|
|
|
//stack is popped from back
|
|
|
|
if is_script{
|
|
|
|
stack.push(TrimStackInstruction::DecrementScript);
|
|
|
|
}
|
|
|
|
for &child_referent in &node.children{
|
|
|
|
stack.push(TrimStackInstruction::Referent(child_referent));
|
|
|
|
}
|
|
|
|
if is_script{
|
|
|
|
stack.push(TrimStackInstruction::IncrementScript);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
//trim referent
|
|
|
|
if let Some(parent_ref)=delete{
|
|
|
|
let parent_node=tree_refs.get_mut(&parent_ref)
|
|
|
|
.expect("parent_ref does not exist in tree_refs");
|
|
|
|
parent_node.children.remove(
|
|
|
|
parent_node.children.iter()
|
|
|
|
.position(|&r|r==referent)
|
|
|
|
.expect("parent.children does not contain referent")
|
|
|
|
);
|
|
|
|
tree_refs.remove(&referent);
|
|
|
|
}
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-01-11 07:57:50 +00:00
|
|
|
Ok(DecompiledContext{
|
|
|
|
dom,
|
|
|
|
tree_refs,
|
|
|
|
})
|
|
|
|
}
|
2024-01-08 23:09:12 +00:00
|
|
|
|
2024-01-11 07:57:50 +00:00
|
|
|
struct WriteConfig{
|
2024-01-12 19:24:03 +00:00
|
|
|
style:DecompileStyle,
|
2024-03-08 17:48:47 +00:00
|
|
|
output_folder:PathBuf,
|
2024-01-11 07:57:50 +00:00
|
|
|
write_template:bool,
|
|
|
|
write_models:bool,
|
|
|
|
write_scripts:bool,
|
|
|
|
}
|
|
|
|
|
2024-01-11 21:59:32 +00:00
|
|
|
async fn write_files(config:WriteConfig,mut context:DecompiledContext)->AResult<()>{
|
2024-01-11 22:51:31 +00:00
|
|
|
let mut write_queue=Vec::new();
|
|
|
|
let mut destroy_queue=Vec::new();
|
|
|
|
|
2024-01-06 23:01:56 +00:00
|
|
|
let mut name_tally=std::collections::HashMap::<String,u32>::new();
|
2024-01-08 23:09:12 +00:00
|
|
|
let mut folder=config.output_folder.clone();
|
2024-01-11 07:57:50 +00:00
|
|
|
let mut stack=vec![WriteStackInstruction::Node(context.tree_refs.get(&context.dom.root_ref()).unwrap(),0)];
|
2024-01-06 01:54:13 +00:00
|
|
|
while let Some(instruction)=stack.pop(){
|
|
|
|
match instruction{
|
|
|
|
WriteStackInstruction::PushFolder(component)=>folder.push(component),
|
|
|
|
WriteStackInstruction::PopFolder=>assert!(folder.pop(),"weirdness"),
|
2024-01-11 22:51:31 +00:00
|
|
|
WriteStackInstruction::Destroy(referent)=>destroy_queue.push(referent),
|
2024-01-06 23:01:56 +00:00
|
|
|
WriteStackInstruction::Node(node,name_count)=>{
|
2024-01-12 19:24:03 +00:00
|
|
|
//track properties that must be overriden to compile folder structure back into a place file
|
2024-01-06 01:54:13 +00:00
|
|
|
let mut properties=PropertiesOverride::default();
|
|
|
|
let has_children=node.children.len()!=0;
|
|
|
|
match node.class{
|
|
|
|
Class::Folder=>(),
|
2024-01-12 19:24:03 +00:00
|
|
|
Class::ModuleScript=>(),//.lua files are ModuleScript by default
|
2024-03-08 17:39:57 +00:00
|
|
|
Class::LocalScript=>properties.class=Some("LocalScript".to_owned()),
|
|
|
|
Class::Script=>properties.class=Some("Script".to_owned()),
|
2024-01-06 01:54:13 +00:00
|
|
|
Class::Model=>(),
|
|
|
|
}
|
2024-01-06 23:01:56 +00:00
|
|
|
let name_override=if 0<name_count{
|
2024-01-12 19:24:03 +00:00
|
|
|
properties.name=Some(node.name.clone());
|
2024-01-06 23:01:56 +00:00
|
|
|
format!("{}_{}",node.name,name_count)
|
|
|
|
}else{
|
|
|
|
node.name.clone()
|
|
|
|
};
|
2024-01-12 19:24:03 +00:00
|
|
|
|
|
|
|
if has_children{
|
2024-01-06 01:54:13 +00:00
|
|
|
//push temp subfolder
|
|
|
|
let mut subfolder=folder.clone();
|
2024-01-06 23:01:56 +00:00
|
|
|
subfolder.push(sanitize(name_override.as_str()).as_ref());
|
2024-01-06 01:54:13 +00:00
|
|
|
//make folder
|
2024-01-11 22:51:31 +00:00
|
|
|
tokio::fs::create_dir(subfolder.clone()).await?;
|
2024-01-12 19:24:03 +00:00
|
|
|
|
|
|
|
let name_final=match config.style{
|
|
|
|
DecompileStyle::Rox
|
|
|
|
|DecompileStyle::RoxRojo=>name_override.clone(),
|
|
|
|
DecompileStyle::Rojo=>"init".to_owned(),
|
|
|
|
};
|
|
|
|
|
2024-01-06 01:54:13 +00:00
|
|
|
//write item in subfolder
|
2024-01-12 19:24:03 +00:00
|
|
|
write_queue.push((subfolder,node,name_final,properties,config.style));
|
2024-01-06 01:54:13 +00:00
|
|
|
}else{
|
|
|
|
//write item
|
2024-01-12 19:24:03 +00:00
|
|
|
write_queue.push((folder.clone(),node,name_override.clone(),properties,config.style));
|
2024-01-06 01:54:13 +00:00
|
|
|
}
|
|
|
|
//queue item to be deleted from dom after child objects are handled (stack is popped from the back)
|
|
|
|
match node.class{
|
|
|
|
Class::Folder=>(),
|
|
|
|
_=>stack.push(WriteStackInstruction::Destroy(node.referent)),
|
|
|
|
}
|
|
|
|
if has_children{
|
|
|
|
stack.push(WriteStackInstruction::PopFolder);
|
2024-01-06 23:01:56 +00:00
|
|
|
name_tally.clear();
|
2024-01-06 01:54:13 +00:00
|
|
|
for referent in &node.children{
|
2024-01-11 07:57:50 +00:00
|
|
|
if let Some(c)=context.tree_refs.get(referent){
|
2024-01-06 23:01:56 +00:00
|
|
|
let v=name_tally.entry(c.name.clone()).and_modify(|v|*v+=1).or_default();
|
|
|
|
stack.push(WriteStackInstruction::Node(c,*v));
|
2024-01-06 01:54:13 +00:00
|
|
|
}
|
|
|
|
}
|
2024-01-06 23:01:56 +00:00
|
|
|
stack.push(WriteStackInstruction::PushFolder(sanitize(name_override.as_str()).to_string()));
|
2024-01-06 01:54:13 +00:00
|
|
|
}
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
2024-01-08 23:09:12 +00:00
|
|
|
|
2024-01-11 22:51:31 +00:00
|
|
|
//run the async
|
2024-01-12 06:07:01 +00:00
|
|
|
{
|
|
|
|
let dom=&context.dom;
|
|
|
|
let write_models=config.write_models;
|
|
|
|
let write_scripts=config.write_scripts;
|
2024-01-12 19:24:03 +00:00
|
|
|
let results:Vec<AResult<()>>=rayon::iter::ParallelIterator::collect(rayon::iter::ParallelIterator::map(rayon::iter::IntoParallelIterator::into_par_iter(write_queue),|(write_path,node,node_name_override,properties,style)|{
|
|
|
|
write_item(&dom,write_path,node,node_name_override,properties,style,write_models,write_scripts)
|
2024-01-12 06:07:01 +00:00
|
|
|
}));
|
|
|
|
for result in results{
|
|
|
|
result?;
|
2024-01-12 00:06:12 +00:00
|
|
|
}
|
2024-01-12 06:07:01 +00:00
|
|
|
}
|
2024-01-11 22:51:31 +00:00
|
|
|
|
|
|
|
//run the destroy
|
|
|
|
for destroy_ref in destroy_queue{
|
|
|
|
context.dom.destroy(destroy_ref);
|
|
|
|
}
|
|
|
|
|
2024-01-06 01:54:13 +00:00
|
|
|
//write what remains in template.rbxlx
|
2024-01-08 23:09:12 +00:00
|
|
|
if config.write_template{
|
|
|
|
let mut file=config.output_folder.clone();
|
2024-01-06 01:54:13 +00:00
|
|
|
file.push("template");
|
|
|
|
assert!(file.set_extension("rbxlx"));
|
|
|
|
let output=std::io::BufWriter::new(std::fs::File::create(file)?);
|
2024-03-08 17:39:57 +00:00
|
|
|
rbx_xml::to_writer_default(output,&context.dom,context.dom.root().children())?;
|
2024-01-06 01:54:13 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2024-01-11 07:57:50 +00:00
|
|
|
struct DecompileConfig{
|
2024-01-12 19:24:03 +00:00
|
|
|
style:DecompileStyle,
|
2024-03-08 17:48:47 +00:00
|
|
|
input_file:PathBuf,
|
|
|
|
output_folder:PathBuf,
|
2024-01-11 07:57:50 +00:00
|
|
|
write_template:bool,
|
|
|
|
write_models:bool,
|
|
|
|
write_scripts:bool,
|
|
|
|
}
|
|
|
|
|
2024-01-11 21:59:32 +00:00
|
|
|
async fn decompile(config:DecompileConfig)->AResult<()>{
|
2024-01-11 07:57:50 +00:00
|
|
|
//rules:
|
|
|
|
//Class Script|LocalScript|ModuleScript->$Name.lua
|
|
|
|
//Class Model->$Name.rbxmx
|
|
|
|
//overrides.json per-folder [Override{name,class}]
|
|
|
|
//Everything else goes into template.rbxlx
|
|
|
|
|
|
|
|
//read file
|
|
|
|
let context=generate_decompiled_context(std::io::BufReader::new(std::fs::File::open(config.input_file)?))?;
|
|
|
|
|
|
|
|
//generate folders, models, and scripts
|
|
|
|
//delete models and scripts from dom
|
|
|
|
write_files(WriteConfig{
|
2024-01-12 19:24:03 +00:00
|
|
|
style:config.style,
|
2024-01-11 07:57:50 +00:00
|
|
|
output_folder:config.output_folder,
|
|
|
|
write_template:config.write_template,
|
|
|
|
write_models:config.write_models,
|
|
|
|
write_scripts:config.write_scripts,
|
2024-01-11 21:59:32 +00:00
|
|
|
},context).await?;
|
2024-01-11 07:57:50 +00:00
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2024-01-11 10:47:20 +00:00
|
|
|
struct WriteCommitConfig{
|
2024-01-11 21:48:57 +00:00
|
|
|
git_committer_name:String,
|
|
|
|
git_committer_email:String,
|
2024-03-08 17:48:47 +00:00
|
|
|
output_folder:PathBuf,
|
2024-01-12 19:24:03 +00:00
|
|
|
style:DecompileStyle,
|
2024-01-11 10:47:20 +00:00
|
|
|
write_template:bool,
|
|
|
|
write_models:bool,
|
|
|
|
write_scripts:bool,
|
|
|
|
}
|
|
|
|
|
2024-01-12 00:06:12 +00:00
|
|
|
async fn write_commit(config:WriteCommitConfig,b:Result<AResult<(AssetVersion,DecompiledContext)>,tokio::task::JoinError>,repo:&git2::Repository)->AResult<()>{
|
|
|
|
let (asset_version,context)=b??;
|
2024-01-12 00:37:48 +00:00
|
|
|
println!("writing files for version {}",asset_version.assetVersionNumber);
|
2024-01-11 10:47:20 +00:00
|
|
|
|
|
|
|
//clean output dir
|
2024-01-12 04:23:44 +00:00
|
|
|
if config.write_models||config.write_scripts{
|
2024-01-11 10:47:20 +00:00
|
|
|
let mut src=config.output_folder.clone();
|
|
|
|
src.push("src");
|
|
|
|
match std::fs::remove_dir_all(src){
|
|
|
|
Ok(())=>(),
|
|
|
|
Err(e)=>println!("remove_dir_all src failed {}",e),
|
|
|
|
}
|
2024-01-12 04:23:44 +00:00
|
|
|
}
|
|
|
|
if config.write_template{
|
2024-01-11 10:47:20 +00:00
|
|
|
let mut template=config.output_folder.clone();
|
|
|
|
template.push("template.rbxlx");
|
|
|
|
match std::fs::remove_file(template){
|
|
|
|
Ok(())=>(),
|
|
|
|
Err(e)=>println!("remove_file template.rbxlx failed {}",e),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
//write files
|
|
|
|
write_files(WriteConfig{
|
2024-01-12 19:24:03 +00:00
|
|
|
style:config.style,
|
2024-01-11 10:47:20 +00:00
|
|
|
output_folder:config.output_folder.clone(),
|
|
|
|
write_template:config.write_template,
|
|
|
|
write_models:config.write_models,
|
|
|
|
write_scripts:config.write_scripts,
|
2024-01-11 21:59:32 +00:00
|
|
|
},context).await?;
|
2024-01-11 10:47:20 +00:00
|
|
|
|
|
|
|
let date=asset_version.created;
|
2024-01-12 04:29:22 +00:00
|
|
|
//let sig=repo.signature()?; //this pulls default name and email
|
2024-01-11 21:48:57 +00:00
|
|
|
let sig=git2::Signature::new(config.git_committer_name.as_str(),config.git_committer_email.as_str(),&git2::Time::new(date.timestamp(),0)).unwrap();
|
2024-01-11 10:47:20 +00:00
|
|
|
let tree_id={
|
|
|
|
let mut tree_index = repo.index()?;
|
2024-03-08 17:40:25 +00:00
|
|
|
match tree_index.add_all(std::iter::once(config.output_folder.as_path()),git2::IndexAddOption::DEFAULT,None){
|
|
|
|
Ok(_)=>(),
|
|
|
|
Err(e)=>println!("tree_index.add_all error: {}",e),
|
|
|
|
}
|
|
|
|
match tree_index.update_all(std::iter::once(config.output_folder.as_path()),None){
|
2024-01-14 19:12:57 +00:00
|
|
|
Ok(_)=>(),
|
|
|
|
Err(e)=>println!("tree_index.update_all error: {}",e),
|
2024-01-12 04:33:12 +00:00
|
|
|
}
|
2024-01-12 04:46:13 +00:00
|
|
|
tree_index.write()?;
|
2024-01-11 10:47:20 +00:00
|
|
|
tree_index.write_tree()?
|
|
|
|
};
|
|
|
|
let tree=repo.find_tree(tree_id)?;
|
|
|
|
|
2024-01-12 04:33:04 +00:00
|
|
|
let mut parents=Vec::new();
|
|
|
|
|
2024-01-11 10:47:20 +00:00
|
|
|
match repo.head(){
|
2024-01-12 04:33:12 +00:00
|
|
|
Ok(reference)=>{
|
|
|
|
let commit=reference.peel_to_commit()?;
|
|
|
|
|
|
|
|
//test tree against commit tree to see if there is any changes
|
|
|
|
let commit_tree=commit.tree()?;
|
|
|
|
let diff=repo.diff_tree_to_tree(Some(&commit_tree),Some(&tree),None)?;
|
2024-03-08 17:40:25 +00:00
|
|
|
if diff.get_delta(0).is_none(){
|
2024-01-12 04:33:12 +00:00
|
|
|
println!("no changes");
|
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
|
|
|
|
parents.push(commit);
|
|
|
|
},
|
2024-01-12 04:33:04 +00:00
|
|
|
Err(e)=>println!("repo head error {:?}",e),
|
2024-01-11 10:47:20 +00:00
|
|
|
};
|
|
|
|
|
2024-01-12 04:33:04 +00:00
|
|
|
repo.commit(
|
|
|
|
Some("HEAD"),//update_ref
|
|
|
|
&sig,//author
|
|
|
|
&sig,//commiter
|
|
|
|
&format!("v{}", asset_version.assetVersionNumber),//message
|
|
|
|
&tree,//tree (basically files)
|
|
|
|
parents.iter().collect::<Vec<&git2::Commit<'_>>>().as_slice(),//parents
|
|
|
|
)?;
|
|
|
|
|
2024-01-11 10:47:20 +00:00
|
|
|
//commit
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
struct DecompileHistoryConfig{
|
2024-01-12 00:06:24 +00:00
|
|
|
git_committer_name:String,
|
|
|
|
git_committer_email:String,
|
2024-03-08 17:48:47 +00:00
|
|
|
input_folder:PathBuf,
|
2024-01-12 19:24:03 +00:00
|
|
|
style:DecompileStyle,
|
2024-03-08 17:48:47 +00:00
|
|
|
output_folder:PathBuf,
|
2024-01-12 00:06:24 +00:00
|
|
|
write_template:bool,
|
|
|
|
write_models:bool,
|
|
|
|
write_scripts:bool,
|
|
|
|
}
|
|
|
|
|
|
|
|
async fn decompile_history_into_git(config:DecompileHistoryConfig)->AResult<()>{
|
2024-01-14 05:22:25 +00:00
|
|
|
//use prexisting versions list
|
2024-01-12 00:06:24 +00:00
|
|
|
let mut versions_path=config.input_folder.clone();
|
|
|
|
versions_path.push("versions.json");
|
|
|
|
let asset_list:Vec<AssetVersion>=serde_json::from_reader(std::fs::File::open(versions_path)?)?;
|
|
|
|
|
2024-01-22 20:28:32 +00:00
|
|
|
let repo=git2::Repository::init(config.output_folder.as_path())?;
|
2024-01-12 00:06:24 +00:00
|
|
|
|
|
|
|
//decompile all versions
|
2024-01-12 05:29:06 +00:00
|
|
|
futures::stream::iter(asset_list.into_iter()
|
2024-01-12 00:06:24 +00:00
|
|
|
.map(|asset_version|{
|
|
|
|
let mut file_path=config.input_folder.clone();
|
2024-01-12 06:25:00 +00:00
|
|
|
tokio::task::spawn_blocking(move||{
|
2024-01-12 00:06:24 +00:00
|
|
|
file_path.push(format!("{}_v{}.rbxl",asset_version.assetId,asset_version.assetVersionNumber));
|
2024-01-12 00:37:26 +00:00
|
|
|
let file=std::fs::File::open(file_path)?;
|
|
|
|
let contents=generate_decompiled_context(file)?;
|
2024-01-12 00:06:24 +00:00
|
|
|
Ok::<_,anyhow::Error>((asset_version,contents))
|
|
|
|
})
|
2024-01-12 05:29:06 +00:00
|
|
|
}))
|
2024-01-12 01:11:44 +00:00
|
|
|
.buffered(CONCURRENT_DECODE)
|
2024-01-12 00:06:24 +00:00
|
|
|
.for_each(|join_handle_result|async{
|
|
|
|
match write_commit(WriteCommitConfig{
|
|
|
|
git_committer_name:config.git_committer_name.clone(),
|
|
|
|
git_committer_email:config.git_committer_email.clone(),
|
2024-01-12 19:24:03 +00:00
|
|
|
style:config.style,
|
2024-01-12 00:06:24 +00:00
|
|
|
output_folder:config.output_folder.clone(),
|
|
|
|
write_template:config.write_template,
|
|
|
|
write_models:config.write_models,
|
|
|
|
write_scripts:config.write_scripts,
|
|
|
|
},join_handle_result,&repo).await{
|
|
|
|
Ok(())=>(),
|
2024-01-14 21:24:33 +00:00
|
|
|
Err(e)=>println!("decompile/write/commit error: {}",e),
|
2024-01-12 00:06:24 +00:00
|
|
|
}
|
|
|
|
}).await;
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
struct DownloadAndDecompileHistoryConfig{
|
2024-01-11 10:47:20 +00:00
|
|
|
cookie:String,
|
|
|
|
asset_id:AssetID,
|
2024-01-11 21:48:57 +00:00
|
|
|
git_committer_name:String,
|
|
|
|
git_committer_email:String,
|
2024-01-12 19:24:03 +00:00
|
|
|
style:DecompileStyle,
|
2024-03-08 17:48:47 +00:00
|
|
|
output_folder:PathBuf,
|
2024-01-11 10:47:20 +00:00
|
|
|
write_template:bool,
|
|
|
|
write_models:bool,
|
|
|
|
write_scripts:bool,
|
|
|
|
}
|
|
|
|
|
2024-01-12 00:06:24 +00:00
|
|
|
async fn download_and_decompile_history_into_git(config:DownloadAndDecompileHistoryConfig)->AResult<()>{
|
2024-04-28 06:36:21 +00:00
|
|
|
let context=RobloxContext::new(config.cookie);
|
2024-01-11 10:47:20 +00:00
|
|
|
|
|
|
|
//poll paged list of all asset versions
|
2024-04-28 06:36:21 +00:00
|
|
|
let asset_list=get_version_history(&context,config.asset_id).await?;
|
2024-01-11 10:47:20 +00:00
|
|
|
|
|
|
|
let repo=git2::Repository::init(config.output_folder.clone())?;
|
|
|
|
|
|
|
|
//download all versions
|
2024-04-28 06:36:21 +00:00
|
|
|
let asset_id=config.asset_id;
|
2024-01-12 05:29:06 +00:00
|
|
|
futures::stream::iter(asset_list.into_iter()
|
2024-01-11 10:47:20 +00:00
|
|
|
.map(|asset_version|{
|
2024-04-28 06:36:21 +00:00
|
|
|
let context=context.clone();
|
2024-01-12 00:06:12 +00:00
|
|
|
tokio::task::spawn(async move{
|
2024-04-28 06:36:21 +00:00
|
|
|
let file=context.download(rbx_asset::context::DownloadRequest{asset_id,version:Some(asset_version.assetVersionNumber)}).await?;
|
|
|
|
Ok::<_,anyhow::Error>((asset_version,generate_decompiled_context(std::io::Cursor::new(file))?))
|
2024-01-12 00:06:12 +00:00
|
|
|
})
|
2024-01-12 05:29:06 +00:00
|
|
|
}))
|
2024-01-12 01:11:44 +00:00
|
|
|
.buffered(CONCURRENT_DECODE)
|
2024-01-12 00:06:12 +00:00
|
|
|
.for_each(|join_handle_result|async{
|
2024-01-11 10:47:20 +00:00
|
|
|
match write_commit(WriteCommitConfig{
|
2024-01-12 19:24:03 +00:00
|
|
|
style:config.style,
|
2024-01-11 21:48:57 +00:00
|
|
|
git_committer_name:config.git_committer_name.clone(),
|
|
|
|
git_committer_email:config.git_committer_email.clone(),
|
2024-01-11 10:47:20 +00:00
|
|
|
output_folder:config.output_folder.clone(),
|
|
|
|
write_template:config.write_template,
|
|
|
|
write_models:config.write_models,
|
|
|
|
write_scripts:config.write_scripts,
|
2024-01-12 00:06:12 +00:00
|
|
|
},join_handle_result,&repo).await{
|
2024-01-11 10:47:20 +00:00
|
|
|
Ok(())=>(),
|
|
|
|
Err(e)=>println!("download/unzip/decompile/write/commit error: {}",e),
|
|
|
|
}
|
|
|
|
}).await;
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2024-03-08 17:39:27 +00:00
|
|
|
//holy smokes what am I doing lmao
|
|
|
|
//This giant machine is supposed to search for files according to style rules
|
|
|
|
//e.g. ScriptName.server.lua or init.lua
|
|
|
|
//Obviously I got carried away
|
|
|
|
//I could use an enum!
|
|
|
|
//I could use a struct!
|
|
|
|
//I could use a trait!
|
|
|
|
//I could use an error!
|
|
|
|
//I could use a match!
|
|
|
|
//I could use a function!
|
|
|
|
//eventually:
|
|
|
|
#[derive(Debug)]
|
2024-04-26 06:27:10 +00:00
|
|
|
#[allow(dead_code)]//idk why this thinks it's dead code, the errors are printed out in various places
|
2024-03-08 17:39:27 +00:00
|
|
|
enum QueryResolveError{
|
|
|
|
NotFound,//0 results
|
|
|
|
Ambiguous,//>1 results
|
|
|
|
JoinError(tokio::task::JoinError),
|
|
|
|
IO(std::io::Error),
|
|
|
|
}
|
|
|
|
impl std::fmt::Display for QueryResolveError{
|
|
|
|
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
|
|
|
write!(f,"{self:?}")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
impl std::error::Error for QueryResolveError{}
|
|
|
|
|
|
|
|
struct FileWithName{
|
|
|
|
file:tokio::fs::File,
|
|
|
|
name:String,
|
|
|
|
}
|
|
|
|
|
2024-03-08 17:48:47 +00:00
|
|
|
async fn get_file_async(mut path:PathBuf,file_name:impl AsRef<std::path::Path>)->Result<FileWithName,QueryResolveError>{
|
2024-03-08 17:39:27 +00:00
|
|
|
let name=file_name.as_ref().to_str().unwrap().to_owned();
|
|
|
|
path.push(file_name);
|
|
|
|
match tokio::fs::File::open(path).await{
|
|
|
|
Ok(file)=>Ok(FileWithName{file,name}),
|
|
|
|
Err(e)=>match e.kind(){
|
|
|
|
std::io::ErrorKind::NotFound=>Err(QueryResolveError::NotFound),
|
|
|
|
_=>Err(QueryResolveError::IO(e)),
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
type QueryHintResult=Result<FileHint,QueryResolveError>;
|
|
|
|
trait Query{
|
|
|
|
async fn resolve(self)->QueryHintResult;
|
|
|
|
}
|
|
|
|
type QueryHandle=tokio::task::JoinHandle<Result<FileWithName,QueryResolveError>>;
|
|
|
|
struct QuerySingle{
|
|
|
|
script:QueryHandle,
|
|
|
|
}
|
|
|
|
impl QuerySingle{
|
2024-03-08 17:48:47 +00:00
|
|
|
fn rox(search_path:&PathBuf,search_name:&str)->Self{
|
2024-03-08 17:39:27 +00:00
|
|
|
Self{
|
|
|
|
script:tokio::spawn(get_file_async(search_path.clone(),format!("{}.lua",search_name)))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
impl Query for QuerySingle{
|
|
|
|
async fn resolve(self)->QueryHintResult{
|
|
|
|
match self.script.await{
|
|
|
|
Ok(Ok(file))=>Ok(FileHint{file,hint:ScriptHint::ModuleScript}),
|
|
|
|
Ok(Err(e))=>Err(e),
|
|
|
|
Err(e)=>Err(QueryResolveError::JoinError(e)),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
struct QueryTriple{
|
|
|
|
module:QueryHandle,
|
|
|
|
server:QueryHandle,
|
|
|
|
client:QueryHandle,
|
|
|
|
}
|
|
|
|
impl QueryTriple{
|
2024-03-08 17:48:47 +00:00
|
|
|
fn rox_rojo(search_path:&PathBuf,search_name:&str,search_module:bool)->Self{
|
2024-03-08 17:39:27 +00:00
|
|
|
//this should be implemented as constructors of Triplet and Quadruplet to fully support Trey's suggestion
|
|
|
|
let module_name=if search_module{
|
|
|
|
format!("{}.module.lua",search_name)
|
|
|
|
}else{
|
|
|
|
format!("{}.lua",search_name)
|
|
|
|
};
|
|
|
|
Self{
|
|
|
|
module:tokio::spawn(get_file_async(search_path.clone(),module_name)),
|
|
|
|
server:tokio::spawn(get_file_async(search_path.clone(),format!("{}.server.lua",search_name))),
|
|
|
|
client:tokio::spawn(get_file_async(search_path.clone(),format!("{}.client.lua",search_name))),
|
|
|
|
}
|
|
|
|
}
|
2024-03-08 17:48:47 +00:00
|
|
|
fn rojo(search_path:&PathBuf)->Self{
|
2024-03-08 17:39:27 +00:00
|
|
|
QueryTriple::rox_rojo(search_path,"init",false)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
//these functions can be achieved with macros, but I have not learned that yet
|
|
|
|
fn mega_triple_join(query_triplet:(QueryHintResult,QueryHintResult,QueryHintResult))->QueryHintResult{
|
|
|
|
match query_triplet{
|
|
|
|
//unambiguously locate file
|
|
|
|
(Ok(f),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound))
|
|
|
|
|(Err(QueryResolveError::NotFound),Ok(f),Err(QueryResolveError::NotFound))
|
|
|
|
|(Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Ok(f))=>Ok(f),
|
|
|
|
//multiple files located
|
|
|
|
(Ok(_),Ok(_),Err(QueryResolveError::NotFound))
|
|
|
|
|(Ok(_),Err(QueryResolveError::NotFound),Ok(_))
|
|
|
|
|(Err(QueryResolveError::NotFound),Ok(_),Ok(_))
|
|
|
|
|(Ok(_),Ok(_),Ok(_))=>Err(QueryResolveError::Ambiguous),
|
|
|
|
//no files located
|
|
|
|
(Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound))=>Err(QueryResolveError::NotFound),
|
|
|
|
//other error
|
|
|
|
(Err(e),_,_)
|
|
|
|
|(_,Err(e),_)
|
|
|
|
|(_,_,Err(e))=>Err(e),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
//LETS GOOOOOOOOOOOOOOOO
|
|
|
|
fn mega_quadruple_join(query_quad:(QueryHintResult,QueryHintResult,QueryHintResult,QueryHintResult))->QueryHintResult{
|
|
|
|
match query_quad{
|
|
|
|
//unambiguously locate file
|
|
|
|
(Ok(f),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound))
|
|
|
|
|(Err(QueryResolveError::NotFound),Ok(f),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound))
|
|
|
|
|(Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Ok(f),Err(QueryResolveError::NotFound))
|
|
|
|
|(Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Ok(f))=>Ok(f),
|
|
|
|
//multiple files located
|
|
|
|
(Ok(_),Ok(_),Ok(_),Err(QueryResolveError::NotFound))
|
|
|
|
|(Ok(_),Ok(_),Err(QueryResolveError::NotFound),Ok(_))
|
|
|
|
|(Ok(_),Ok(_),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound))
|
|
|
|
|(Ok(_),Err(QueryResolveError::NotFound),Ok(_),Ok(_))
|
|
|
|
|(Ok(_),Err(QueryResolveError::NotFound),Ok(_),Err(QueryResolveError::NotFound))
|
|
|
|
|(Ok(_),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Ok(_))
|
|
|
|
|(Err(QueryResolveError::NotFound),Ok(_),Ok(_),Ok(_))
|
|
|
|
|(Err(QueryResolveError::NotFound),Ok(_),Ok(_),Err(QueryResolveError::NotFound))
|
|
|
|
|(Err(QueryResolveError::NotFound),Ok(_),Err(QueryResolveError::NotFound),Ok(_))
|
|
|
|
|(Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Ok(_),Ok(_))
|
|
|
|
|(Ok(_),Ok(_),Ok(_),Ok(_))=>Err(QueryResolveError::Ambiguous),
|
|
|
|
//no files located
|
|
|
|
(Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound))=>Err(QueryResolveError::NotFound),
|
|
|
|
//other error
|
|
|
|
(Err(e),_,_,_)
|
|
|
|
|(_,Err(e),_,_)
|
|
|
|
|(_,_,Err(e),_)
|
|
|
|
|(_,_,_,Err(e))=>Err(e),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
impl Query for QueryTriple{
|
|
|
|
async fn resolve(self)->QueryHintResult{
|
|
|
|
let (module,server,client)=tokio::join!(self.module,self.server,self.client);
|
|
|
|
mega_triple_join((
|
|
|
|
module.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::ModuleScript}),
|
|
|
|
server.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::Script}),
|
|
|
|
client.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::LocalScript}),
|
|
|
|
))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
struct QueryQuad{
|
|
|
|
module_implicit:QueryHandle,
|
|
|
|
module_explicit:QueryHandle,
|
|
|
|
server:QueryHandle,
|
|
|
|
client:QueryHandle,
|
|
|
|
}
|
|
|
|
impl QueryQuad{
|
2024-03-08 17:48:47 +00:00
|
|
|
fn rox_rojo(search_path:&PathBuf,search_name:&str)->Self{
|
2024-03-08 17:39:27 +00:00
|
|
|
let fill=QueryTriple::rox_rojo(search_path,search_name,true);
|
|
|
|
Self{
|
|
|
|
module_implicit:QuerySingle::rox(search_path,search_name).script,//Script.lua
|
|
|
|
module_explicit:fill.module,//Script.module.lua
|
|
|
|
server:fill.server,
|
|
|
|
client:fill.client,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
impl Query for QueryQuad{
|
|
|
|
async fn resolve(self)->QueryHintResult{
|
|
|
|
let (module_implicit,module_explicit,server,client)=tokio::join!(self.module_implicit,self.module_explicit,self.server,self.client);
|
|
|
|
mega_quadruple_join((
|
|
|
|
module_implicit.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::ModuleScript}),
|
|
|
|
module_explicit.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::ModuleScript}),
|
|
|
|
server.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::Script}),
|
|
|
|
client.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::LocalScript}),
|
|
|
|
))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
struct ScriptWithOverrides{
|
|
|
|
overrides:PropertiesOverride,
|
|
|
|
source:String,
|
|
|
|
}
|
|
|
|
|
|
|
|
fn extract_script_overrides(mut source:String)->AResult<ScriptWithOverrides>{
|
|
|
|
let mut overrides=PropertiesOverride::default();
|
|
|
|
let mut count=0;
|
|
|
|
for line in source.lines(){
|
|
|
|
//only string type properties are supported atm
|
|
|
|
if let Some(captures)=lazy_regex::regex!(r#"^\-\-\!\s*Properties\.([A-z]\w*)\s*\=\s*"(\w+)"$"#)
|
|
|
|
.captures(line){
|
|
|
|
count+=line.len();
|
|
|
|
match &captures[1]{
|
|
|
|
"Name"=>overrides.name=Some(captures[2].to_owned()),
|
|
|
|
"ClassName"=>overrides.class=Some(captures[2].to_owned()),
|
|
|
|
other=>Err(anyhow::Error::msg(format!("Unimplemented property {other}")))?,
|
|
|
|
}
|
|
|
|
}else{
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Ok(ScriptWithOverrides{overrides,source:source.split_off(count)})
|
|
|
|
}
|
|
|
|
|
|
|
|
async fn script_node(search_name:&str,mut file:FileWithName,hint:ScriptHint)->AResult<CompileNode>{
|
|
|
|
//read entire file
|
|
|
|
let mut buf=String::new();
|
|
|
|
file.file.read_to_string(&mut buf).await?;
|
|
|
|
//regex script according to Properties lines at the top
|
|
|
|
let script_with_overrides=extract_script_overrides(buf)?;
|
|
|
|
//script
|
|
|
|
Ok(CompileNode{
|
|
|
|
blacklist:Some(file.name),
|
|
|
|
name:script_with_overrides.overrides.name.unwrap_or_else(||search_name.to_owned()),
|
|
|
|
class:match (script_with_overrides.overrides.class.as_deref(),hint){
|
|
|
|
(Some("ModuleScript"),_)
|
|
|
|
|(None,ScriptHint::ModuleScript)=>CompileClass::ModuleScript(script_with_overrides.source),
|
|
|
|
(Some("LocalScript"),_)
|
2024-04-10 10:59:04 +00:00
|
|
|
|(None,ScriptHint::LocalScript)=>CompileClass::LocalScript(script_with_overrides.source),
|
2024-03-08 17:39:27 +00:00
|
|
|
(Some("Script"),_)
|
2024-04-10 10:59:04 +00:00
|
|
|
|(None,ScriptHint::Script)=>CompileClass::Script(script_with_overrides.source),
|
2024-03-08 17:39:27 +00:00
|
|
|
other=>Err(anyhow::Error::msg(format!("Invalid hint or class {other:?}")))?,
|
|
|
|
},
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
async fn model_node(search_name:&str,mut file:FileWithName)->AResult<CompileNode>{
|
|
|
|
//read entire file
|
|
|
|
let mut buf=Vec::new();
|
|
|
|
file.file.read_to_end(&mut buf).await?;
|
|
|
|
//model
|
|
|
|
Ok(CompileNode{
|
|
|
|
blacklist:Some(file.name),
|
|
|
|
name:search_name.to_owned(),//wrong but gets overwritten by internal model name
|
|
|
|
class:CompileClass::Model(buf),
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
async fn locate_override_file(entry:&tokio::fs::DirEntry,style:Option<DecompileStyle>)->AResult<CompileNode>{
|
|
|
|
let contents_folder=entry.path();
|
|
|
|
let file_name=entry.file_name();
|
|
|
|
let search_name=file_name.to_str().unwrap();
|
|
|
|
//scan inside the folder for an object to define the class of the folder
|
|
|
|
let script_query=async {match style{
|
|
|
|
Some(DecompileStyle::Rox)=>QuerySingle::rox(&contents_folder,search_name).resolve().await,
|
|
|
|
Some(DecompileStyle::RoxRojo)=>QueryQuad::rox_rojo(&contents_folder,search_name).resolve().await,
|
|
|
|
Some(DecompileStyle::Rojo)=>QueryTriple::rojo(&contents_folder).resolve().await,
|
|
|
|
//try all three and complain if there is ambiguity
|
|
|
|
None=>mega_triple_join(tokio::join!(
|
|
|
|
QuerySingle::rox(&contents_folder,search_name).resolve(),
|
|
|
|
//true=search for module here to avoid ambiguity with QuerySingle::rox results
|
|
|
|
QueryTriple::rox_rojo(&contents_folder,search_name,true).resolve(),
|
|
|
|
QueryTriple::rojo(&contents_folder).resolve(),
|
|
|
|
))
|
|
|
|
}};
|
|
|
|
//model files are rox & rox-rojo only, so it's a lot less work...
|
|
|
|
let model_query=get_file_async(contents_folder.clone(),format!("{}.rbxmx",search_name));
|
|
|
|
//model? script? both?
|
|
|
|
Ok(match tokio::join!(script_query,model_query){
|
|
|
|
(Ok(FileHint{file,hint}),Err(QueryResolveError::NotFound))=>script_node(search_name,file,hint).await?,
|
|
|
|
(Err(QueryResolveError::NotFound),Ok(file))=>model_node(search_name,file).await?,
|
|
|
|
(Ok(_),Ok(_))=>Err(QueryResolveError::Ambiguous)?,
|
|
|
|
//neither
|
|
|
|
(Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound))=>CompileNode{
|
|
|
|
name:search_name.to_owned(),
|
|
|
|
blacklist:None,
|
|
|
|
class:CompileClass::Folder,
|
|
|
|
},
|
|
|
|
//other error
|
|
|
|
(Err(e),_)
|
|
|
|
|(_,Err(e))=>Err(e)?
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
enum FileDiscernment{
|
|
|
|
Model,
|
|
|
|
Script(ScriptHint),
|
|
|
|
}
|
|
|
|
|
|
|
|
async fn discern_file(entry:&tokio::fs::DirEntry,style:Option<DecompileStyle>)->AResult<CompileNode>{
|
|
|
|
let mut file_name=entry
|
|
|
|
.file_name()
|
|
|
|
.into_string()
|
|
|
|
.map_err(|e|anyhow::Error::msg(format!("insane file name {e:?}")))?;
|
|
|
|
//reject goobers
|
|
|
|
let is_goober=match style{
|
|
|
|
Some(DecompileStyle::Rojo)=>true,
|
|
|
|
_=>false,
|
|
|
|
};
|
|
|
|
let (ext_len,file_discernment)={
|
|
|
|
if let Some(captures)=lazy_regex::regex!(r"^.*(.module.lua|.client.lua|.server.lua)$")
|
|
|
|
.captures(file_name.as_str()){
|
|
|
|
let ext=&captures[1];
|
|
|
|
(ext.len(),match ext{
|
|
|
|
".module.lua"=>{
|
|
|
|
if is_goober{
|
|
|
|
Err(anyhow::Error::msg(format!("File extension {ext} not supported in style {style:?}")))?;
|
|
|
|
}
|
|
|
|
FileDiscernment::Script(ScriptHint::ModuleScript)
|
|
|
|
},
|
|
|
|
".client.lua"=>FileDiscernment::Script(ScriptHint::LocalScript),
|
|
|
|
".server.lua"=>FileDiscernment::Script(ScriptHint::Script),
|
|
|
|
_=>panic!("Regex failed"),
|
|
|
|
})
|
|
|
|
}else if let Some(captures)=lazy_regex::regex!(r"^.*(.rbxmx|.lua)$")
|
|
|
|
.captures(file_name.as_str()){
|
|
|
|
let ext=&captures[1];
|
|
|
|
(ext.len(),match ext{
|
|
|
|
".rbxmx"=>{
|
|
|
|
if is_goober{
|
|
|
|
Err(anyhow::Error::msg(format!("File extension {ext} not supported in style {style:?}")))?;
|
|
|
|
}
|
|
|
|
FileDiscernment::Model
|
|
|
|
},
|
|
|
|
".lua"=>FileDiscernment::Script(ScriptHint::ModuleScript),
|
|
|
|
_=>panic!("Regex failed"),
|
|
|
|
})
|
|
|
|
}else{
|
|
|
|
return Err(anyhow::Error::msg("No file extension"));
|
|
|
|
}
|
|
|
|
};
|
|
|
|
file_name.truncate(file_name.len()-ext_len);
|
|
|
|
let file=tokio::fs::File::open(entry.path()).await?;
|
|
|
|
Ok(match file_discernment{
|
|
|
|
FileDiscernment::Model=>model_node(file_name.as_str(),FileWithName{file,name:file_name.clone()}).await?,
|
|
|
|
FileDiscernment::Script(hint)=>script_node(file_name.as_str(),FileWithName{file,name:file_name.clone()},hint).await?,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug)]
|
|
|
|
enum ScriptHint{
|
|
|
|
Script,
|
|
|
|
LocalScript,
|
|
|
|
ModuleScript,
|
|
|
|
}
|
|
|
|
struct FileHint{
|
|
|
|
file:FileWithName,
|
|
|
|
hint:ScriptHint,
|
|
|
|
}
|
|
|
|
|
|
|
|
enum PreparedData{
|
|
|
|
Model(rbx_dom_weak::WeakDom),
|
|
|
|
Builder(rbx_dom_weak::InstanceBuilder),
|
|
|
|
}
|
|
|
|
|
|
|
|
enum CompileClass{
|
|
|
|
Folder,
|
|
|
|
Script(String),
|
|
|
|
LocalScript(String),
|
|
|
|
ModuleScript(String),
|
|
|
|
Model(Vec<u8>),
|
|
|
|
}
|
|
|
|
|
|
|
|
struct CompileNode{
|
|
|
|
name:String,
|
|
|
|
blacklist:Option<String>,
|
|
|
|
class:CompileClass,
|
|
|
|
}
|
|
|
|
|
|
|
|
enum CompileStackInstruction{
|
|
|
|
TraverseReferent(rbx_dom_weak::types::Ref,Option<String>),
|
|
|
|
PopFolder,
|
|
|
|
}
|
|
|
|
|
|
|
|
struct CompileConfig{
|
2024-03-08 17:48:47 +00:00
|
|
|
input_folder:PathBuf,
|
|
|
|
output_file:PathBuf,
|
|
|
|
template:Option<PathBuf>,
|
2024-03-08 17:39:27 +00:00
|
|
|
style:Option<DecompileStyle>,
|
|
|
|
}
|
|
|
|
|
|
|
|
fn script_builder(class:&str,name:&str,source:String)->rbx_dom_weak::InstanceBuilder{
|
|
|
|
let mut builder=rbx_dom_weak::InstanceBuilder::new(class);
|
|
|
|
builder.set_name(name);
|
|
|
|
builder.add_property("Source",rbx_dom_weak::types::Variant::String(source));
|
|
|
|
builder
|
|
|
|
}
|
|
|
|
|
|
|
|
enum TooComplicated<T>{
|
|
|
|
Stop,
|
|
|
|
Value(T),
|
|
|
|
Skip,
|
|
|
|
}
|
|
|
|
|
|
|
|
async fn compile(config:CompileConfig)->AResult<()>{
|
|
|
|
//basically decompile in reverse order
|
|
|
|
//load template dom
|
|
|
|
let input={
|
|
|
|
let template_path=config.template.unwrap_or_else(||{
|
|
|
|
let mut template_path=config.input_folder.clone();
|
|
|
|
template_path.push("template.rbxlx");
|
|
|
|
template_path
|
|
|
|
});
|
|
|
|
//mr dom doesn't like tokio files
|
|
|
|
std::io::BufReader::new(std::fs::File::open(template_path)?)
|
|
|
|
};
|
|
|
|
let mut dom=load_dom(input)?;
|
2024-04-10 10:45:22 +00:00
|
|
|
//hack to traverse root folder as the root object
|
|
|
|
dom.root_mut().name="src".to_owned();
|
2024-03-08 17:39:27 +00:00
|
|
|
|
|
|
|
//add in scripts and models
|
|
|
|
let mut folder=config.input_folder.clone();
|
2024-04-10 10:45:22 +00:00
|
|
|
let mut stack:Vec<CompileStackInstruction>=vec![CompileStackInstruction::TraverseReferent(dom.root_ref(),None)];
|
2024-03-08 17:39:27 +00:00
|
|
|
while let Some(instruction)=stack.pop(){
|
|
|
|
match instruction{
|
|
|
|
CompileStackInstruction::TraverseReferent(item_ref,blacklist)=>{
|
|
|
|
let sans={
|
|
|
|
let item=dom.get_by_ref(item_ref).ok_or(anyhow::Error::msg("null child ref"))?;
|
|
|
|
sanitize(item.name.as_str()).to_string()
|
|
|
|
};
|
|
|
|
folder.push(sans.as_str());
|
|
|
|
stack.push(CompileStackInstruction::PopFolder);
|
|
|
|
//check if a folder exists with item.name
|
|
|
|
if let Ok(dir)=tokio::fs::read_dir(folder.as_path()).await{
|
|
|
|
let mut exist_names:std::collections::HashSet<String>={
|
|
|
|
let item=dom.get_by_ref(item_ref).ok_or(anyhow::Error::msg("null child ref"))?;
|
|
|
|
//push existing dom children objects onto stack (unrelated to exist_names)
|
|
|
|
stack.extend(item.children().into_iter().map(|&referent|CompileStackInstruction::TraverseReferent(referent,None)));
|
|
|
|
//get names of existing objects
|
|
|
|
item.children().into_iter().map(|&child_ref|{
|
|
|
|
let child=dom.get_by_ref(child_ref).ok_or(anyhow::Error::msg("null child ref"))?;
|
|
|
|
Ok::<_,anyhow::Error>(sanitize(child.name.as_str()).to_string())
|
|
|
|
}).collect::<AResult<_>>()?
|
|
|
|
};
|
|
|
|
if let Some(dont)=blacklist{
|
|
|
|
exist_names.insert(dont);
|
|
|
|
}
|
|
|
|
//generate children from folder contents UNLESS! item already has a child of the same name
|
|
|
|
|
|
|
|
let style=config.style;
|
|
|
|
let exist_names=&exist_names;
|
|
|
|
futures::stream::unfold(dir,|mut dir1|async{
|
|
|
|
//thread the needle! follow the path that dir takes!
|
|
|
|
let ret1={
|
|
|
|
//capture a scoped mutable reference so we can forward dir to the next call even on an error
|
|
|
|
let dir2=&mut dir1;
|
|
|
|
(||async move{//error catcher so I can use ?
|
|
|
|
let ret2=if let Some(entry)=dir2.next_entry().await?{
|
|
|
|
//cull early even if supporting things with identical names is possible
|
|
|
|
if exist_names.contains(entry.file_name().to_str().unwrap()){
|
|
|
|
TooComplicated::Skip
|
|
|
|
}else{
|
|
|
|
TooComplicated::Value(entry)
|
|
|
|
}
|
|
|
|
}else{
|
|
|
|
TooComplicated::Stop
|
|
|
|
};
|
|
|
|
Ok::<_,anyhow::Error>(ret2)
|
|
|
|
})().await
|
|
|
|
};
|
|
|
|
match ret1{
|
|
|
|
Ok(TooComplicated::Stop)=>None,
|
|
|
|
Ok(TooComplicated::Skip)=>Some((Ok(None),dir1)),
|
|
|
|
Ok(TooComplicated::Value(v))=>Some((Ok(Some(v)),dir1)),
|
|
|
|
Err(e)=>Some((Err(e),dir1)),
|
|
|
|
}
|
|
|
|
})
|
|
|
|
|
|
|
|
//gotta spawn off the worker threads (Model is slow)
|
|
|
|
.then(|bog|async{
|
|
|
|
match bog{
|
|
|
|
Ok(Some(entry))=>tokio::spawn(async move{
|
|
|
|
let met=entry.metadata().await?;
|
|
|
|
//discern that bad boy
|
|
|
|
let compile_class=match met.is_dir(){
|
|
|
|
true=>locate_override_file(&entry,style).await?,
|
|
|
|
false=>discern_file(&entry,style).await?,
|
|
|
|
};
|
|
|
|
//prepare data structure
|
|
|
|
Ok(Some((compile_class.blacklist,match compile_class.class{
|
|
|
|
CompileClass::Folder=>PreparedData::Builder(rbx_dom_weak::InstanceBuilder::new("Folder").with_name(compile_class.name.as_str())),
|
|
|
|
CompileClass::Script(source)=>PreparedData::Builder(script_builder("Script",compile_class.name.as_str(),source)),
|
|
|
|
CompileClass::LocalScript(source)=>PreparedData::Builder(script_builder("LocalScript",compile_class.name.as_str(),source)),
|
|
|
|
CompileClass::ModuleScript(source)=>PreparedData::Builder(script_builder("ModuleScript",compile_class.name.as_str(),source)),
|
|
|
|
CompileClass::Model(buf)=>PreparedData::Model(rbx_xml::from_reader_default(std::io::Cursor::new(buf))?),
|
|
|
|
})))
|
|
|
|
}).await?,
|
|
|
|
Ok(None)=>Ok(None),
|
|
|
|
Err(e)=>Err(e),
|
|
|
|
}
|
|
|
|
})
|
|
|
|
|
|
|
|
//is this even what I want?
|
|
|
|
.map(|f|async{f}).buffer_unordered(32)
|
|
|
|
|
|
|
|
//begin processing immediately
|
|
|
|
.fold((&mut stack,&mut dom),|(stack,dom),bog:Result<_,anyhow::Error>|async{
|
|
|
|
//push child objects onto dom serially as they arrive
|
|
|
|
match bog{
|
|
|
|
Ok(Some((blacklist,data)))=>{
|
|
|
|
let referent=match data{
|
|
|
|
PreparedData::Model(mut model_dom)=>{
|
|
|
|
let referent=model_dom.root().children()[0];
|
|
|
|
model_dom.transfer(referent,dom,item_ref);
|
|
|
|
referent
|
|
|
|
},
|
|
|
|
PreparedData::Builder(script)=>dom.insert(item_ref,script),
|
|
|
|
};
|
|
|
|
//new children need to be traversed
|
|
|
|
stack.push(CompileStackInstruction::TraverseReferent(referent,blacklist));
|
|
|
|
},
|
|
|
|
Ok(None)=>(),
|
|
|
|
Err(e)=>println!("error lole {e:?}"),
|
|
|
|
}
|
|
|
|
(stack,dom)
|
|
|
|
}).await;
|
|
|
|
}
|
|
|
|
},
|
|
|
|
CompileStackInstruction::PopFolder=>assert!(folder.pop(),"pop folder bad"),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let mut output_place=config.output_file.clone();
|
|
|
|
if output_place.extension().is_none()&&tokio::fs::try_exists(output_place.as_path()).await?{
|
|
|
|
output_place.push("place.rbxl");
|
|
|
|
}
|
|
|
|
let output=std::io::BufWriter::new(std::fs::File::create(output_place)?);
|
|
|
|
//write inner objects
|
|
|
|
rbx_binary::to_writer(output,&dom,dom.root().children())?;
|
2024-01-06 01:54:13 +00:00
|
|
|
Ok(())
|
2024-01-08 23:09:12 +00:00
|
|
|
}
|