use std::{io::Read,path::PathBuf}; use clap::{Args,Parser,Subcommand}; use anyhow::Result as AResult; use futures::StreamExt; use rbx_dom_weak::types::Ref; use tokio::io::AsyncReadExt; type AssetID=u64; type AssetIDFileMap=Vec<(AssetID,PathBuf)>; const CONCURRENT_DECODE:usize=8; const CONCURRENT_REQUESTS:usize=32; #[derive(Parser)] #[command(author,version,about,long_about=None)] #[command(propagate_version = true)] struct Cli{ #[command(subcommand)] command:Commands, } #[derive(Subcommand)] enum Commands{ DownloadHistory(DownloadHistorySubcommand), Download(DownloadSubcommand), DownloadGroupInventoryJson(DownloadGroupInventoryJsonSubcommand), Create(CreateSubcommand), Upload(UploadSubcommand), Compile(CompileSubcommand), Decompile(DecompileSubcommand), DecompileHistoryIntoGit(DecompileHistoryIntoGitSubcommand), DownloadAndDecompileHistoryIntoGit(DownloadAndDecompileHistoryIntoGitSubcommand), } #[derive(Args)] struct DownloadHistorySubcommand{ #[arg(long)] asset_id:AssetID, #[arg(long)] cookie_type:CookieType, #[arg(long)] cookie:String, #[arg(long)] output_folder:Option, #[arg(long)] continue_from_versions:Option, #[arg(long)] start_version:Option, #[arg(long)] end_version:Option, } #[derive(Args)] struct DownloadSubcommand{ #[arg(long)] cookie_type:CookieType, #[arg(long)] cookie:String, #[arg(long)] output_folder:Option, #[arg(required=true)] asset_ids:Vec, } #[derive(Args)] struct DownloadGroupInventoryJsonSubcommand{ #[arg(long)] cookie_type:CookieType, #[arg(long)] cookie:String, #[arg(long)] output_folder:Option, #[arg(long)] group:u64, } #[derive(Args)] struct CreateSubcommand{ #[arg(long)] cookie_type:CookieType, #[arg(long)] cookie:String, #[arg(long)] model_name:String, #[arg(long)] description:Option, #[arg(long)] input_file:PathBuf, #[arg(long)] group:Option, #[arg(long)] free_model:Option, #[arg(long)] allow_comments:Option, } #[derive(Args)] struct UploadSubcommand{ #[arg(long)] asset_id:AssetID, #[arg(long)] cookie_type:CookieType, #[arg(long)] cookie:String, #[arg(long)] input_file:PathBuf, #[arg(long)] group:Option, } #[derive(Args)] struct CompileSubcommand{ #[arg(long)] input_folder:PathBuf, #[arg(long)] output_file:PathBuf, #[arg(long)] style:Option, #[arg(long)] template:Option, } #[derive(Args)] struct DecompileSubcommand{ #[arg(long)] input_file:PathBuf, #[arg(long)] output_folder:PathBuf, #[arg(long)] style:DecompileStyle, #[arg(long)] write_template:Option, #[arg(long)] write_models:Option, #[arg(long)] write_scripts:Option, } #[derive(Args)] struct DecompileHistoryIntoGitSubcommand{ #[arg(long)] input_folder:PathBuf, //currently output folder must be the current folder due to git2 limitations //output_folder:cli.output.unwrap(), #[arg(long)] style:DecompileStyle, #[arg(long)] git_committer_name:String, #[arg(long)] git_committer_email:String, #[arg(long)] write_template:Option, #[arg(long)] write_models:Option, #[arg(long)] write_scripts:Option, } #[derive(Args)] struct DownloadAndDecompileHistoryIntoGitSubcommand{ #[arg(long)] asset_id:AssetID, #[arg(long)] cookie_type:CookieType, #[arg(long)] cookie:String, //currently output folder must be the current folder due to git2 limitations //output_folder:cli.output.unwrap(), #[arg(long)] style:DecompileStyle, #[arg(long)] git_committer_name:String, #[arg(long)] git_committer_email:String, #[arg(long)] write_template:Option, #[arg(long)] write_models:Option, #[arg(long)] write_scripts:Option, } #[derive(Clone,clap::ValueEnum)] enum CookieType{ Literal, Environment, File, } #[derive(Clone,Copy,Debug,clap::ValueEnum)] enum DecompileStyle{ Rox, Rojo, RoxRojo, } #[derive(serde::Deserialize)] #[allow(nonstandard_style,dead_code)] struct VersionPage{ previousPageCursor:Option, nextPageCursor:Option, data:Vec, } #[derive(serde::Deserialize,serde::Serialize)] #[allow(nonstandard_style,dead_code)] struct AssetVersion{ Id:u64, assetId:AssetID, assetVersionNumber:u64, creatorType:String, creatorTargetId:u64, creatingUniverseId:Option, created:chrono::DateTime, isPublished:bool, } #[derive(serde::Deserialize)] #[allow(nonstandard_style,dead_code)] struct InventoryPage{ totalResults:u64,//up to 50 filteredKeyword:Option,//"" searchDebugInfo:Option,//null spellCheckerResult:Option,//null queryFacets:Option,//null imageSearchStatus:Option,//null previousPageCursor:Option, nextPageCursor:Option, data:Vec, } #[derive(serde::Deserialize,serde::Serialize)] #[allow(nonstandard_style,dead_code)] struct InventoryItem{ id:u64, name:String, } #[tokio::main] async fn main()->AResult<()>{ let cli=Cli::parse(); match cli.command{ Commands::DownloadHistory(subcommand)=>download_history(DownloadHistoryConfig{ continue_from_versions:subcommand.continue_from_versions.unwrap_or(false), end_version:subcommand.end_version, start_version:subcommand.start_version.unwrap_or(0), output_folder:subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()), cookie:Cookie::from_type(subcommand.cookie_type,subcommand.cookie).await?.0, asset_id:subcommand.asset_id, }).await, Commands::Download(subcommand)=>{ let output_folder=subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()); download_list( Cookie::from_type(subcommand.cookie_type,subcommand.cookie).await?.0, subcommand.asset_ids.into_iter().map(|asset_id|{ let mut path=output_folder.clone(); path.push(asset_id.to_string()); (asset_id,path) }).collect() ).await }, Commands::DownloadGroupInventoryJson(subcommand)=>download_group_inventory_json( Cookie::from_type(subcommand.cookie_type,subcommand.cookie).await?.0, subcommand.group, subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()), ).await, Commands::Create(subcommand)=>create(CreateConfig{ cookie:Cookie::from_type(subcommand.cookie_type,subcommand.cookie).await?.0, group:subcommand.group, input_file:subcommand.input_file, model_name:subcommand.model_name, description:subcommand.description.unwrap_or_else(||String::with_capacity(0)), free_model:subcommand.free_model.unwrap_or(false), allow_comments:subcommand.allow_comments.unwrap_or(false), }).await, Commands::Upload(subcommand)=>upload_list( Cookie::from_type(subcommand.cookie_type,subcommand.cookie).await?.0, subcommand.group, vec![(subcommand.asset_id,subcommand.input_file)] ).await, Commands::Compile(subcommand)=>compile(CompileConfig{ input_folder:subcommand.input_folder, output_file:subcommand.output_file, template:subcommand.template, style:subcommand.style, }).await, Commands::Decompile(subcommand)=>decompile(DecompileConfig{ style:subcommand.style, input_file:subcommand.input_file, output_folder:subcommand.output_folder, write_template:subcommand.write_template.unwrap_or(false), write_models:subcommand.write_models.unwrap_or(false), write_scripts:subcommand.write_scripts.unwrap_or(true), }).await, Commands::DecompileHistoryIntoGit(subcommand)=>decompile_history_into_git(DecompileHistoryConfig{ git_committer_name:subcommand.git_committer_name, git_committer_email:subcommand.git_committer_email, input_folder:subcommand.input_folder, output_folder:std::env::current_dir()?, style:subcommand.style, write_template:subcommand.write_template.unwrap_or(false), write_models:subcommand.write_models.unwrap_or(false), write_scripts:subcommand.write_scripts.unwrap_or(true), }).await, Commands::DownloadAndDecompileHistoryIntoGit(subcommand)=>download_and_decompile_history_into_git(DownloadAndDecompileHistoryConfig{ git_committer_name:subcommand.git_committer_name, git_committer_email:subcommand.git_committer_email, cookie:Cookie::from_type(subcommand.cookie_type,subcommand.cookie).await?.0, asset_id:subcommand.asset_id, output_folder:std::env::current_dir()?, style:subcommand.style, write_template:subcommand.write_template.unwrap_or(false), write_models:subcommand.write_models.unwrap_or(false), write_scripts:subcommand.write_scripts.unwrap_or(true), }).await, } } struct Cookie(String); impl Cookie{ async fn from_type(cookie_type:CookieType,cookie_string:String)->AResult{ Ok(Self(format!(".ROBLOSECURITY={}",match cookie_type{ CookieType::Literal=>cookie_string, CookieType::Environment=>std::env::var(cookie_string)?, CookieType::File=>tokio::fs::read_to_string(cookie_string).await?, }))) } } enum ReaderType{ GZip(flate2::read::GzDecoder>), Raw(std::io::BufReader), } fn maybe_gzip_decode(input:R)->AResult>{ let mut buf=std::io::BufReader::new(input); let peek=std::io::BufRead::fill_buf(&mut buf)?; match &peek[0..2]{ b"\x1f\x8b"=>Ok(ReaderType::GZip(flate2::read::GzDecoder::new(buf))), _=>Ok(ReaderType::Raw(buf)), } } struct CreateConfig{ cookie:String, model_name:String, description:String, input_file:PathBuf, group:Option, free_model:bool, allow_comments:bool, } async fn create(config:CreateConfig)->AResult<()>{ let client=reqwest::Client::new(); let client=&client; let cookie=config.cookie.as_str(); let group=&config.group; let mut url=reqwest::Url::parse("https://data.roblox.com/Data/Upload.ashx?json=1&type=Model&genreTypeId=1")?; //url borrow scope { let mut query=url.query_pairs_mut();//borrow here //archaic roblox api uses 0 for new asset query.append_pair("assetid","0"); query.append_pair("name",config.model_name.as_str()); query.append_pair("description",config.description.as_str()); query.append_pair("ispublic",if config.free_model{"True"}else{"False"}); query.append_pair("allowComments",if config.allow_comments{"True"}else{"False"}); match group{ Some(group_id)=>{query.append_pair("groupId",group_id.to_string().as_str());}, None=>(), } } let body=tokio::fs::read(config.input_file).await?; let mut resp=client.post(url.clone()) .header("Cookie",cookie) .body(body.clone()) .send().await?; //This is called a CSRF challenge apparently if resp.status()==reqwest::StatusCode::FORBIDDEN{ if let Some(csrf_token)=resp.headers().get("X-CSRF-Token"){ resp=client.post(url) .header("X-CSRF-Token",csrf_token) .header("Cookie",cookie) .body(body) .send().await?; }else{ Err(anyhow::Error::msg("Roblox returned 403 with no CSRF"))?; } } let body=match resp.status(){ reqwest::StatusCode::OK=>Ok(resp.bytes().await?), other=>Err(anyhow::Error::msg(other)), }; println!("response.body={:?}",body?); Ok(()) } async fn upload_list(cookie:String,group:Option,asset_id_file_map:AssetIDFileMap)->AResult<()>{ let client=reqwest::Client::new(); //this is calling map on the vec because the closure produces an iterator of futures futures::stream::iter(asset_id_file_map.into_iter() .map(|(asset_id,file)|{ let client=&client; let cookie=cookie.as_str(); let group=&group; async move{ let mut url=reqwest::Url::parse("https://data.roblox.com/Data/Upload.ashx?json=1&type=Model&genreTypeId=1")?; //url borrow scope { let mut query=url.query_pairs_mut();//borrow here query.append_pair("assetid",asset_id.to_string().as_str()); match group{ Some(group_id)=>{query.append_pair("groupId",group_id.to_string().as_str());}, None=>(), } } let body=tokio::fs::read(file).await?; let mut resp=client.post(url.clone()) .header("Cookie",cookie) .body(body.clone()) .send().await?; //This is called a CSRF challenge apparently if resp.status()==reqwest::StatusCode::FORBIDDEN{ if let Some(csrf_token)=resp.headers().get("X-CSRF-Token"){ resp=client.post(url) .header("X-CSRF-Token",csrf_token) .header("Cookie",cookie) .body(body) .send().await?; }else{ Err(anyhow::Error::msg("Roblox returned 403 with no CSRF"))?; } } match resp.status(){ reqwest::StatusCode::OK=>Ok((asset_id,resp.bytes().await?)), other=>Err(anyhow::Error::msg(other)), } } })) .buffer_unordered(CONCURRENT_REQUESTS) .for_each(|b:AResult<_>|async{ match b{ Ok((asset_id,body))=>{ println!("asset_id={} response.body={:?}",asset_id,body); }, Err(e)=>eprintln!("ul error: {}",e), } }).await; Ok(()) } fn read_readable(mut readable:impl Read)->AResult>{ let mut contents=Vec::new(); readable.read_to_end(&mut contents)?; Ok(contents) } async fn download_list(cookie:String,asset_id_file_map:AssetIDFileMap)->AResult<()>{ let client=reqwest::Client::new(); futures::stream::iter(asset_id_file_map.into_iter() .map(|(asset_id,file)|{ let client=&client; let cookie=cookie.as_str(); async move{ let resp=client.get(format!("https://assetdelivery.roblox.com/v1/asset/?ID={}",asset_id)) .header("Cookie",cookie) .send().await?; Ok((file,resp.bytes().await?)) } })) .buffer_unordered(CONCURRENT_REQUESTS) .for_each(|b:AResult<_>|async{ match b{ Ok((dest,body))=>{ let contents=match maybe_gzip_decode(&mut std::io::Cursor::new(body)){ Ok(ReaderType::GZip(readable))=>read_readable(readable), Ok(ReaderType::Raw(readable))=>read_readable(readable), Err(e)=>Err(e), }; match contents{ Ok(data)=>match tokio::fs::write(dest,data).await{ Err(e)=>eprintln!("fs error: {}",e), _=>(), }, Err(e)=>eprintln!("gzip error: {}",e), }; }, Err(e)=>eprintln!("dl error: {}",e), } }).await; Ok(()) } async fn download_inventory_page(client:&reqwest::Client,cookie:&str,group:u64,cursor:Option)->AResult{ let mut url=reqwest::Url::parse(format!("https://apis.roblox.com/toolbox-service/v1/creations/group/{}/10?limit=50",group).as_str())?; //url borrow scope { let mut query=url.query_pairs_mut();//borrow here match cursor.as_deref(){ Some(next_page)=>{query.append_pair("cursor",next_page);} None=>(), } } println!("page url={}",url); let resp=client.get(url) .header("Cookie",cookie) .send().await?; Ok(resp.json::().await?) } async fn get_inventory_pages(client:&reqwest::Client,cookie:&str,group:u64)->AResult>{ let mut cursor:Option=None; let mut asset_list=Vec::new(); loop{ let mut page=download_inventory_page(client,cookie,group,cursor).await?; asset_list.append(&mut page.data); if page.nextPageCursor.is_none(){ break; } cursor=page.nextPageCursor; } Ok(asset_list) } async fn download_group_inventory_json(cookie:String,group:u64,output_folder:PathBuf)->AResult<()>{ let client=reqwest::Client::new(); let item_list=get_inventory_pages(&client,cookie.as_str(),group).await?; let mut path=output_folder.clone(); path.set_file_name("versions.json"); tokio::fs::write(path,serde_json::to_string(&item_list)?).await?; Ok(()) } async fn download_page(client:&reqwest::Client,cookie:&str,asset_id:AssetID,cursor:Option)->AResult{ let mut url=reqwest::Url::parse(format!("https://develop.roblox.com/v1/assets/{}/saved-versions",asset_id).as_str())?; //url borrow scope { let mut query=url.query_pairs_mut();//borrow here //query.append_pair("sortOrder","Asc"); //query.append_pair("limit","100"); //query.append_pair("count","100"); match cursor.as_deref(){ Some(next_page)=>{query.append_pair("cursor",next_page);} None=>(), } } println!("page url={}",url); let resp=client.get(url) .header("Cookie",cookie) .send().await?; Ok(resp.json::().await?) } async fn get_version_history(client:&reqwest::Client,cookie:&str,asset_id:AssetID)->AResult>{ let mut cursor:Option=None; let mut asset_list=Vec::new(); loop{ let mut page=download_page(client,cookie,asset_id,cursor).await?; asset_list.append(&mut page.data); if page.nextPageCursor.is_none(){ break; } cursor=page.nextPageCursor; } asset_list.sort_by(|a,b|a.assetVersionNumber.cmp(&b.assetVersionNumber)); Ok(asset_list) } async fn download_asset_version(client:&reqwest::Client,cookie:&str,asset_id_str:&str,asset_version_str:&str)->AResult{ let mut url=reqwest::Url::parse("https://assetdelivery.roblox.com/v1/asset/")?; //url borrow scope { let mut query=url.query_pairs_mut();//borrow here query.append_pair("ID",asset_id_str); query.append_pair("version",asset_version_str); } println!("download url={}",url); for i in 0..8{ let resp=client.get(url.clone()) .header("Cookie",cookie) .send().await?; if !resp.status().is_success(){ println!("request {} failed",i); continue; } return Ok(resp); } Err(anyhow::Error::msg("all requests failed")) } struct DownloadHistoryConfig{ continue_from_versions:bool, end_version:Option, start_version:u64, output_folder:PathBuf, cookie:String, asset_id:AssetID, } async fn download_history(mut config:DownloadHistoryConfig)->AResult<()>{ let mut asset_list_contents=std::collections::HashSet::new(); let mut asset_list:Vec=Vec::new(); if config.end_version.is_none()&&config.continue_from_versions{ //load prexisting versions list let mut versions_path=config.output_folder.clone(); versions_path.push("versions.json"); match std::fs::File::open(versions_path){ Ok(versions_file)=>asset_list.append(&mut serde_json::from_reader(versions_file)?), Err(e)=>match e.kind(){ std::io::ErrorKind::NotFound=>Err(anyhow::Error::msg("Cannot continue from versions.json - file does not exist"))?, _=>Err(e)?, } } //write down which versions are contained for asset_version in &asset_list{ asset_list_contents.insert(asset_version.assetVersionNumber); } //find the highest number match asset_list.iter().map(|asset_version|asset_version.assetVersionNumber).max(){ Some(max)=>{ //count down contiguously until a number is missing for i in (1..=max).rev(){ if !asset_list_contents.contains(&i){ //that is end_version config.end_version=Some(i); break; } } //if all versions are contained, set start_version to the max + 1 if config.end_version.is_none(){ config.start_version=max+1; } }, None=>Err(anyhow::Error::msg("Cannot continue from versions.json - there are no previous versions"))?, } } let client=reqwest::Client::new(); let asset_id_string=config.asset_id.to_string(); //limit concurrent downloads let mut join_set=tokio::task::JoinSet::new(); //poll paged list of all asset versions let mut cursor:Option=None; loop{ let mut page=download_page(&client,config.cookie.as_str(),config.asset_id,cursor).await?; let client=&client; let cookie=config.cookie.clone(); let asset_id_str=asset_id_string.clone(); let output_folder=config.output_folder.clone(); let data=&page.data; let asset_list_contents=&asset_list_contents; let join_set=&mut join_set; let error_catcher=||async move{ let mut cancel_paging=false; for asset_version in data{ let version_number=asset_version.assetVersionNumber; //skip assets beyond specified end_version if config.end_version.is_some_and(|v|vread_readable(readable)?, ReaderType::Raw(readable)=>read_readable(readable)?, }; tokio::fs::write(path,contents).await?; Ok::<_,anyhow::Error>(()) }); } Ok::<_,anyhow::Error>(cancel_paging) }; let cancel_paging=match error_catcher().await{ Ok(cancel)=>cancel, Err(e)=>{ println!("download error: {}",e); //cancel download and write versions true }, }; if page.nextPageCursor.is_none()||cancel_paging{ for asset_version in page.data.into_iter(){ if !(asset_list_contents.contains(&asset_version.assetVersionNumber) ||config.end_version.is_some_and(|v|v(input:R)->AResult{ let mut buf=std::io::BufReader::new(input); let peek=std::io::BufRead::fill_buf(&mut buf)?; match &peek[0..4]{ b"{ match &peek[4..8]{ b"lox!"=>return rbx_binary::from_reader(buf).map_err(anyhow::Error::msg), b"lox "=>return rbx_xml::from_reader_default(buf).map_err(anyhow::Error::msg), other=>Err(anyhow::Error::msg(format!("Unknown Roblox file type {:?}",other))), } }, _=>Err(anyhow::Error::msg("unsupported file type")), } } #[derive(PartialEq)] enum Class{ Folder, ModuleScript, LocalScript, Script, Model, } struct TreeNode{ name:String, referent:Ref, parent:Ref, class:Class, children:Vec, } impl TreeNode{ fn new(name:String,referent:Ref,parent:Ref,class:Class)->Self{ Self{ name, referent, parent, class, children:Vec::new(), } } } enum TrimStackInstruction{ Referent(Ref), IncrementScript, DecrementScript, } enum WriteStackInstruction<'a>{ Node(&'a TreeNode,u32),//(Node,NameTally) PushFolder(String), PopFolder, Destroy(Ref), } #[derive(Default)] struct PropertiesOverride{ name:Option, class:Option, } impl PropertiesOverride{ fn is_some(&self)->bool{ self.name.is_some() ||self.class.is_some() } } impl std::fmt::Display for PropertiesOverride{ fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{ if let Some(name)=self.name.as_deref(){ writeln!(f,"--!Properties.Name = \"{}\"",name)?; } if let Some(class)=self.class.as_deref(){ writeln!(f,"--!Properties.ClassName = \"{}\"",class)?; } Ok(()) } } fn sanitize<'a>(s:&'a str)->std::borrow::Cow<'a,str>{ lazy_regex::regex!(r"[^A-z0-9.-]").replace_all(s,"_") } fn write_item(dom:&rbx_dom_weak::WeakDom,mut file:PathBuf,node:&TreeNode,node_name_override:String,mut properties:PropertiesOverride,style:DecompileStyle,write_models:bool,write_scripts:bool)->AResult<()>{ file.push(sanitize(node_name_override.as_str()).as_ref()); match node.class{ Class::Folder=>(), Class::ModuleScript|Class::LocalScript|Class::Script=>{ if !write_scripts{ return Ok(()) } //set extension match style{ DecompileStyle::Rox=>assert!(file.set_extension("lua"),"could not set extension"), DecompileStyle::RoxRojo|DecompileStyle::Rojo=>{ match properties.class.as_deref(){ Some("LocalScript")=>{ file.set_extension("client.lua"); properties.class=None; }, Some("Script")=>{ file.set_extension("server.lua"); properties.class=None; }, // Some("ModuleScript")=>{ // file.set_extension("module"); // properties.class=None; // }, None=>assert!(file.set_extension("lua"),"could not set extension"), Some(other)=>Err(anyhow::Error::msg(format!("Attempt to write a {} as a script",other)))?, } } } if let Some(item)=dom.get_by_ref(node.referent){ //TODO: delete disabled scripts if let Some(rbx_dom_weak::types::Variant::String(source))=item.properties.get("Source"){ if properties.is_some(){ //rox style let source=properties.to_string()+source.as_str(); std::fs::write(file,source)?; }else{ std::fs::write(file,source)?; } } } }, Class::Model=>{ if !write_models{ return Ok(()) } assert!(file.set_extension("rbxmx")); let output=std::io::BufWriter::new(std::fs::File::create(file)?); rbx_xml::to_writer_default(output,dom,&[node.referent])?; }, } Ok(()) } struct DecompiledContext{ dom:rbx_dom_weak::WeakDom, tree_refs:std::collections::HashMap, } fn generate_decompiled_context(input:R)->AResult{ let dom=load_dom(input)?; let mut tree_refs=std::collections::HashMap::new(); tree_refs.insert(dom.root_ref(),TreeNode::new( "src".to_owned(), dom.root_ref(), Ref::none(), Class::Folder )); //run rules let mut stack=vec![dom.root()]; while let Some(item)=stack.pop(){ let class=match item.class.as_str(){ "ModuleScript"=>Class::ModuleScript, "LocalScript"=>Class::LocalScript, "Script"=>Class::Script, "Model"=>Class::Model, _=>Class::Folder, }; let skip=match class{ Class::Model=>true, _=>false, }; if let Some(parent_node)=tree_refs.get_mut(&item.parent()){ let referent=item.referent(); let node=TreeNode::new(item.name.clone(),referent,parent_node.referent,class); parent_node.children.push(referent); tree_refs.insert(referent,node); } //look no further, turn this node and all its children into a model if skip{ continue; } for &referent in item.children(){ if let Some(c)=dom.get_by_ref(referent){ stack.push(c); } } } //trim empty folders let mut script_count=0; let mut stack:Vec=tree_refs.get(&dom.root_ref()).unwrap().children .iter().map(|&c|TrimStackInstruction::Referent(c)).collect(); while let Some(instruction)=stack.pop(){ match instruction{ TrimStackInstruction::IncrementScript=>script_count+=1, TrimStackInstruction::DecrementScript=>script_count-=1, TrimStackInstruction::Referent(referent)=>{ let mut delete=None; if let Some(node)=tree_refs.get_mut(&referent){ if node.class==Class::Folder&&script_count!=0{ node.class=Class::Model } if node.class==Class::Folder&&node.children.len()==0{ delete=Some(node.parent); }else{ //how the hell do I do this better without recursion let is_script=match node.class{ Class::ModuleScript|Class::LocalScript|Class::Script=>true, _=>false, }; //stack is popped from back if is_script{ stack.push(TrimStackInstruction::DecrementScript); } for &child_referent in &node.children{ stack.push(TrimStackInstruction::Referent(child_referent)); } if is_script{ stack.push(TrimStackInstruction::IncrementScript); } } } //trim referent if let Some(parent_ref)=delete{ let parent_node=tree_refs.get_mut(&parent_ref) .expect("parent_ref does not exist in tree_refs"); parent_node.children.remove( parent_node.children.iter() .position(|&r|r==referent) .expect("parent.children does not contain referent") ); tree_refs.remove(&referent); } }, } } Ok(DecompiledContext{ dom, tree_refs, }) } struct WriteConfig{ style:DecompileStyle, output_folder:PathBuf, write_template:bool, write_models:bool, write_scripts:bool, } async fn write_files(config:WriteConfig,mut context:DecompiledContext)->AResult<()>{ let mut write_queue=Vec::new(); let mut destroy_queue=Vec::new(); let mut name_tally=std::collections::HashMap::::new(); let mut folder=config.output_folder.clone(); let mut stack=vec![WriteStackInstruction::Node(context.tree_refs.get(&context.dom.root_ref()).unwrap(),0)]; while let Some(instruction)=stack.pop(){ match instruction{ WriteStackInstruction::PushFolder(component)=>folder.push(component), WriteStackInstruction::PopFolder=>assert!(folder.pop(),"weirdness"), WriteStackInstruction::Destroy(referent)=>destroy_queue.push(referent), WriteStackInstruction::Node(node,name_count)=>{ //track properties that must be overriden to compile folder structure back into a place file let mut properties=PropertiesOverride::default(); let has_children=node.children.len()!=0; match node.class{ Class::Folder=>(), Class::ModuleScript=>(),//.lua files are ModuleScript by default Class::LocalScript=>properties.class=Some("LocalScript".to_owned()), Class::Script=>properties.class=Some("Script".to_owned()), Class::Model=>(), } let name_override=if 0name_override.clone(), DecompileStyle::Rojo=>"init".to_owned(), }; //write item in subfolder write_queue.push((subfolder,node,name_final,properties,config.style)); }else{ //write item write_queue.push((folder.clone(),node,name_override.clone(),properties,config.style)); } //queue item to be deleted from dom after child objects are handled (stack is popped from the back) match node.class{ Class::Folder=>(), _=>stack.push(WriteStackInstruction::Destroy(node.referent)), } if has_children{ stack.push(WriteStackInstruction::PopFolder); name_tally.clear(); for referent in &node.children{ if let Some(c)=context.tree_refs.get(referent){ let v=name_tally.entry(c.name.clone()).and_modify(|v|*v+=1).or_default(); stack.push(WriteStackInstruction::Node(c,*v)); } } stack.push(WriteStackInstruction::PushFolder(sanitize(name_override.as_str()).to_string())); } }, } } //run the async { let dom=&context.dom; let write_models=config.write_models; let write_scripts=config.write_scripts; let results:Vec>=rayon::iter::ParallelIterator::collect(rayon::iter::ParallelIterator::map(rayon::iter::IntoParallelIterator::into_par_iter(write_queue),|(write_path,node,node_name_override,properties,style)|{ write_item(&dom,write_path,node,node_name_override,properties,style,write_models,write_scripts) })); for result in results{ result?; } } //run the destroy for destroy_ref in destroy_queue{ context.dom.destroy(destroy_ref); } //write what remains in template.rbxlx if config.write_template{ let mut file=config.output_folder.clone(); file.push("template"); assert!(file.set_extension("rbxlx")); let output=std::io::BufWriter::new(std::fs::File::create(file)?); rbx_xml::to_writer_default(output,&context.dom,context.dom.root().children())?; } Ok(()) } struct DecompileConfig{ style:DecompileStyle, input_file:PathBuf, output_folder:PathBuf, write_template:bool, write_models:bool, write_scripts:bool, } async fn decompile(config:DecompileConfig)->AResult<()>{ //rules: //Class Script|LocalScript|ModuleScript->$Name.lua //Class Model->$Name.rbxmx //overrides.json per-folder [Override{name,class}] //Everything else goes into template.rbxlx //read file let context=generate_decompiled_context(std::io::BufReader::new(std::fs::File::open(config.input_file)?))?; //generate folders, models, and scripts //delete models and scripts from dom write_files(WriteConfig{ style:config.style, output_folder:config.output_folder, write_template:config.write_template, write_models:config.write_models, write_scripts:config.write_scripts, },context).await?; Ok(()) } struct WriteCommitConfig{ git_committer_name:String, git_committer_email:String, output_folder:PathBuf, style:DecompileStyle, write_template:bool, write_models:bool, write_scripts:bool, } async fn write_commit(config:WriteCommitConfig,b:Result,tokio::task::JoinError>,repo:&git2::Repository)->AResult<()>{ let (asset_version,context)=b??; println!("writing files for version {}",asset_version.assetVersionNumber); //clean output dir if config.write_models||config.write_scripts{ let mut src=config.output_folder.clone(); src.push("src"); match std::fs::remove_dir_all(src){ Ok(())=>(), Err(e)=>println!("remove_dir_all src failed {}",e), } } if config.write_template{ let mut template=config.output_folder.clone(); template.push("template.rbxlx"); match std::fs::remove_file(template){ Ok(())=>(), Err(e)=>println!("remove_file template.rbxlx failed {}",e), } } //write files write_files(WriteConfig{ style:config.style, output_folder:config.output_folder.clone(), write_template:config.write_template, write_models:config.write_models, write_scripts:config.write_scripts, },context).await?; let date=asset_version.created; //let sig=repo.signature()?; //this pulls default name and email let sig=git2::Signature::new(config.git_committer_name.as_str(),config.git_committer_email.as_str(),&git2::Time::new(date.timestamp(),0)).unwrap(); let tree_id={ let mut tree_index = repo.index()?; match tree_index.add_all(std::iter::once(config.output_folder.as_path()),git2::IndexAddOption::DEFAULT,None){ Ok(_)=>(), Err(e)=>println!("tree_index.add_all error: {}",e), } match tree_index.update_all(std::iter::once(config.output_folder.as_path()),None){ Ok(_)=>(), Err(e)=>println!("tree_index.update_all error: {}",e), } tree_index.write()?; tree_index.write_tree()? }; let tree=repo.find_tree(tree_id)?; let mut parents=Vec::new(); match repo.head(){ Ok(reference)=>{ let commit=reference.peel_to_commit()?; //test tree against commit tree to see if there is any changes let commit_tree=commit.tree()?; let diff=repo.diff_tree_to_tree(Some(&commit_tree),Some(&tree),None)?; if diff.get_delta(0).is_none(){ println!("no changes"); return Ok(()); } parents.push(commit); }, Err(e)=>println!("repo head error {:?}",e), }; repo.commit( Some("HEAD"),//update_ref &sig,//author &sig,//commiter &format!("v{}", asset_version.assetVersionNumber),//message &tree,//tree (basically files) parents.iter().collect::>>().as_slice(),//parents )?; //commit Ok(()) } struct DecompileHistoryConfig{ git_committer_name:String, git_committer_email:String, input_folder:PathBuf, style:DecompileStyle, output_folder:PathBuf, write_template:bool, write_models:bool, write_scripts:bool, } async fn decompile_history_into_git(config:DecompileHistoryConfig)->AResult<()>{ //use prexisting versions list let mut versions_path=config.input_folder.clone(); versions_path.push("versions.json"); let asset_list:Vec=serde_json::from_reader(std::fs::File::open(versions_path)?)?; let repo=git2::Repository::init(config.output_folder.as_path())?; //decompile all versions futures::stream::iter(asset_list.into_iter() .map(|asset_version|{ let mut file_path=config.input_folder.clone(); tokio::task::spawn_blocking(move||{ file_path.push(format!("{}_v{}.rbxl",asset_version.assetId,asset_version.assetVersionNumber)); let file=std::fs::File::open(file_path)?; let contents=generate_decompiled_context(file)?; Ok::<_,anyhow::Error>((asset_version,contents)) }) })) .buffered(CONCURRENT_DECODE) .for_each(|join_handle_result|async{ match write_commit(WriteCommitConfig{ git_committer_name:config.git_committer_name.clone(), git_committer_email:config.git_committer_email.clone(), style:config.style, output_folder:config.output_folder.clone(), write_template:config.write_template, write_models:config.write_models, write_scripts:config.write_scripts, },join_handle_result,&repo).await{ Ok(())=>(), Err(e)=>println!("decompile/write/commit error: {}",e), } }).await; Ok(()) } struct DownloadAndDecompileHistoryConfig{ cookie:String, asset_id:AssetID, git_committer_name:String, git_committer_email:String, style:DecompileStyle, output_folder:PathBuf, write_template:bool, write_models:bool, write_scripts:bool, } async fn download_and_decompile_history_into_git(config:DownloadAndDecompileHistoryConfig)->AResult<()>{ let client=reqwest::Client::new(); //poll paged list of all asset versions let asset_list=get_version_history(&client,&config.cookie.as_str(),config.asset_id).await?; let repo=git2::Repository::init(config.output_folder.clone())?; //download all versions let asset_id_string=config.asset_id.to_string(); futures::stream::iter(asset_list.into_iter() .map(|asset_version|{ let client=client.clone(); let cookie=config.cookie.clone(); let asset_id_str=asset_id_string.clone(); tokio::task::spawn(async move{ let resp=download_asset_version(&client,cookie.as_str(),asset_id_str.as_str(),asset_version.assetVersionNumber.to_string().as_str()).await?; let contents=match maybe_gzip_decode(std::io::Cursor::new(resp.bytes().await?))?{ ReaderType::GZip(readable)=>generate_decompiled_context(readable)?, ReaderType::Raw(readable)=>generate_decompiled_context(readable)?, }; Ok::<_,anyhow::Error>((asset_version,contents)) }) })) .buffered(CONCURRENT_DECODE) .for_each(|join_handle_result|async{ match write_commit(WriteCommitConfig{ style:config.style, git_committer_name:config.git_committer_name.clone(), git_committer_email:config.git_committer_email.clone(), output_folder:config.output_folder.clone(), write_template:config.write_template, write_models:config.write_models, write_scripts:config.write_scripts, },join_handle_result,&repo).await{ Ok(())=>(), Err(e)=>println!("download/unzip/decompile/write/commit error: {}",e), } }).await; Ok(()) } //holy smokes what am I doing lmao //This giant machine is supposed to search for files according to style rules //e.g. ScriptName.server.lua or init.lua //Obviously I got carried away //I could use an enum! //I could use a struct! //I could use a trait! //I could use an error! //I could use a match! //I could use a function! //eventually: #[derive(Debug)] #[allow(dead_code)]//idk why this thinks it's dead code, the errors are printed out in various places enum QueryResolveError{ NotFound,//0 results Ambiguous,//>1 results JoinError(tokio::task::JoinError), IO(std::io::Error), } impl std::fmt::Display for QueryResolveError{ fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{ write!(f,"{self:?}") } } impl std::error::Error for QueryResolveError{} struct FileWithName{ file:tokio::fs::File, name:String, } async fn get_file_async(mut path:PathBuf,file_name:impl AsRef)->Result{ let name=file_name.as_ref().to_str().unwrap().to_owned(); path.push(file_name); match tokio::fs::File::open(path).await{ Ok(file)=>Ok(FileWithName{file,name}), Err(e)=>match e.kind(){ std::io::ErrorKind::NotFound=>Err(QueryResolveError::NotFound), _=>Err(QueryResolveError::IO(e)), }, } } type QueryHintResult=Result; trait Query{ async fn resolve(self)->QueryHintResult; } type QueryHandle=tokio::task::JoinHandle>; struct QuerySingle{ script:QueryHandle, } impl QuerySingle{ fn rox(search_path:&PathBuf,search_name:&str)->Self{ Self{ script:tokio::spawn(get_file_async(search_path.clone(),format!("{}.lua",search_name))) } } } impl Query for QuerySingle{ async fn resolve(self)->QueryHintResult{ match self.script.await{ Ok(Ok(file))=>Ok(FileHint{file,hint:ScriptHint::ModuleScript}), Ok(Err(e))=>Err(e), Err(e)=>Err(QueryResolveError::JoinError(e)), } } } struct QueryTriple{ module:QueryHandle, server:QueryHandle, client:QueryHandle, } impl QueryTriple{ fn rox_rojo(search_path:&PathBuf,search_name:&str,search_module:bool)->Self{ //this should be implemented as constructors of Triplet and Quadruplet to fully support Trey's suggestion let module_name=if search_module{ format!("{}.module.lua",search_name) }else{ format!("{}.lua",search_name) }; Self{ module:tokio::spawn(get_file_async(search_path.clone(),module_name)), server:tokio::spawn(get_file_async(search_path.clone(),format!("{}.server.lua",search_name))), client:tokio::spawn(get_file_async(search_path.clone(),format!("{}.client.lua",search_name))), } } fn rojo(search_path:&PathBuf)->Self{ QueryTriple::rox_rojo(search_path,"init",false) } } //these functions can be achieved with macros, but I have not learned that yet fn mega_triple_join(query_triplet:(QueryHintResult,QueryHintResult,QueryHintResult))->QueryHintResult{ match query_triplet{ //unambiguously locate file (Ok(f),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound)) |(Err(QueryResolveError::NotFound),Ok(f),Err(QueryResolveError::NotFound)) |(Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Ok(f))=>Ok(f), //multiple files located (Ok(_),Ok(_),Err(QueryResolveError::NotFound)) |(Ok(_),Err(QueryResolveError::NotFound),Ok(_)) |(Err(QueryResolveError::NotFound),Ok(_),Ok(_)) |(Ok(_),Ok(_),Ok(_))=>Err(QueryResolveError::Ambiguous), //no files located (Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound))=>Err(QueryResolveError::NotFound), //other error (Err(e),_,_) |(_,Err(e),_) |(_,_,Err(e))=>Err(e), } } //LETS GOOOOOOOOOOOOOOOO fn mega_quadruple_join(query_quad:(QueryHintResult,QueryHintResult,QueryHintResult,QueryHintResult))->QueryHintResult{ match query_quad{ //unambiguously locate file (Ok(f),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound)) |(Err(QueryResolveError::NotFound),Ok(f),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound)) |(Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Ok(f),Err(QueryResolveError::NotFound)) |(Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Ok(f))=>Ok(f), //multiple files located (Ok(_),Ok(_),Ok(_),Err(QueryResolveError::NotFound)) |(Ok(_),Ok(_),Err(QueryResolveError::NotFound),Ok(_)) |(Ok(_),Ok(_),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound)) |(Ok(_),Err(QueryResolveError::NotFound),Ok(_),Ok(_)) |(Ok(_),Err(QueryResolveError::NotFound),Ok(_),Err(QueryResolveError::NotFound)) |(Ok(_),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Ok(_)) |(Err(QueryResolveError::NotFound),Ok(_),Ok(_),Ok(_)) |(Err(QueryResolveError::NotFound),Ok(_),Ok(_),Err(QueryResolveError::NotFound)) |(Err(QueryResolveError::NotFound),Ok(_),Err(QueryResolveError::NotFound),Ok(_)) |(Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Ok(_),Ok(_)) |(Ok(_),Ok(_),Ok(_),Ok(_))=>Err(QueryResolveError::Ambiguous), //no files located (Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound))=>Err(QueryResolveError::NotFound), //other error (Err(e),_,_,_) |(_,Err(e),_,_) |(_,_,Err(e),_) |(_,_,_,Err(e))=>Err(e), } } impl Query for QueryTriple{ async fn resolve(self)->QueryHintResult{ let (module,server,client)=tokio::join!(self.module,self.server,self.client); mega_triple_join(( module.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::ModuleScript}), server.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::Script}), client.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::LocalScript}), )) } } struct QueryQuad{ module_implicit:QueryHandle, module_explicit:QueryHandle, server:QueryHandle, client:QueryHandle, } impl QueryQuad{ fn rox_rojo(search_path:&PathBuf,search_name:&str)->Self{ let fill=QueryTriple::rox_rojo(search_path,search_name,true); Self{ module_implicit:QuerySingle::rox(search_path,search_name).script,//Script.lua module_explicit:fill.module,//Script.module.lua server:fill.server, client:fill.client, } } } impl Query for QueryQuad{ async fn resolve(self)->QueryHintResult{ let (module_implicit,module_explicit,server,client)=tokio::join!(self.module_implicit,self.module_explicit,self.server,self.client); mega_quadruple_join(( module_implicit.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::ModuleScript}), module_explicit.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::ModuleScript}), server.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::Script}), client.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::LocalScript}), )) } } struct ScriptWithOverrides{ overrides:PropertiesOverride, source:String, } fn extract_script_overrides(mut source:String)->AResult{ let mut overrides=PropertiesOverride::default(); let mut count=0; for line in source.lines(){ //only string type properties are supported atm if let Some(captures)=lazy_regex::regex!(r#"^\-\-\!\s*Properties\.([A-z]\w*)\s*\=\s*"(\w+)"$"#) .captures(line){ count+=line.len(); match &captures[1]{ "Name"=>overrides.name=Some(captures[2].to_owned()), "ClassName"=>overrides.class=Some(captures[2].to_owned()), other=>Err(anyhow::Error::msg(format!("Unimplemented property {other}")))?, } }else{ break; } } Ok(ScriptWithOverrides{overrides,source:source.split_off(count)}) } async fn script_node(search_name:&str,mut file:FileWithName,hint:ScriptHint)->AResult{ //read entire file let mut buf=String::new(); file.file.read_to_string(&mut buf).await?; //regex script according to Properties lines at the top let script_with_overrides=extract_script_overrides(buf)?; //script Ok(CompileNode{ blacklist:Some(file.name), name:script_with_overrides.overrides.name.unwrap_or_else(||search_name.to_owned()), class:match (script_with_overrides.overrides.class.as_deref(),hint){ (Some("ModuleScript"),_) |(None,ScriptHint::ModuleScript)=>CompileClass::ModuleScript(script_with_overrides.source), (Some("LocalScript"),_) |(None,ScriptHint::LocalScript)=>CompileClass::LocalScript(script_with_overrides.source), (Some("Script"),_) |(None,ScriptHint::Script)=>CompileClass::Script(script_with_overrides.source), other=>Err(anyhow::Error::msg(format!("Invalid hint or class {other:?}")))?, }, }) } async fn model_node(search_name:&str,mut file:FileWithName)->AResult{ //read entire file let mut buf=Vec::new(); file.file.read_to_end(&mut buf).await?; //model Ok(CompileNode{ blacklist:Some(file.name), name:search_name.to_owned(),//wrong but gets overwritten by internal model name class:CompileClass::Model(buf), }) } async fn locate_override_file(entry:&tokio::fs::DirEntry,style:Option)->AResult{ let contents_folder=entry.path(); let file_name=entry.file_name(); let search_name=file_name.to_str().unwrap(); //scan inside the folder for an object to define the class of the folder let script_query=async {match style{ Some(DecompileStyle::Rox)=>QuerySingle::rox(&contents_folder,search_name).resolve().await, Some(DecompileStyle::RoxRojo)=>QueryQuad::rox_rojo(&contents_folder,search_name).resolve().await, Some(DecompileStyle::Rojo)=>QueryTriple::rojo(&contents_folder).resolve().await, //try all three and complain if there is ambiguity None=>mega_triple_join(tokio::join!( QuerySingle::rox(&contents_folder,search_name).resolve(), //true=search for module here to avoid ambiguity with QuerySingle::rox results QueryTriple::rox_rojo(&contents_folder,search_name,true).resolve(), QueryTriple::rojo(&contents_folder).resolve(), )) }}; //model files are rox & rox-rojo only, so it's a lot less work... let model_query=get_file_async(contents_folder.clone(),format!("{}.rbxmx",search_name)); //model? script? both? Ok(match tokio::join!(script_query,model_query){ (Ok(FileHint{file,hint}),Err(QueryResolveError::NotFound))=>script_node(search_name,file,hint).await?, (Err(QueryResolveError::NotFound),Ok(file))=>model_node(search_name,file).await?, (Ok(_),Ok(_))=>Err(QueryResolveError::Ambiguous)?, //neither (Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound))=>CompileNode{ name:search_name.to_owned(), blacklist:None, class:CompileClass::Folder, }, //other error (Err(e),_) |(_,Err(e))=>Err(e)? }) } enum FileDiscernment{ Model, Script(ScriptHint), } async fn discern_file(entry:&tokio::fs::DirEntry,style:Option)->AResult{ let mut file_name=entry .file_name() .into_string() .map_err(|e|anyhow::Error::msg(format!("insane file name {e:?}")))?; //reject goobers let is_goober=match style{ Some(DecompileStyle::Rojo)=>true, _=>false, }; let (ext_len,file_discernment)={ if let Some(captures)=lazy_regex::regex!(r"^.*(.module.lua|.client.lua|.server.lua)$") .captures(file_name.as_str()){ let ext=&captures[1]; (ext.len(),match ext{ ".module.lua"=>{ if is_goober{ Err(anyhow::Error::msg(format!("File extension {ext} not supported in style {style:?}")))?; } FileDiscernment::Script(ScriptHint::ModuleScript) }, ".client.lua"=>FileDiscernment::Script(ScriptHint::LocalScript), ".server.lua"=>FileDiscernment::Script(ScriptHint::Script), _=>panic!("Regex failed"), }) }else if let Some(captures)=lazy_regex::regex!(r"^.*(.rbxmx|.lua)$") .captures(file_name.as_str()){ let ext=&captures[1]; (ext.len(),match ext{ ".rbxmx"=>{ if is_goober{ Err(anyhow::Error::msg(format!("File extension {ext} not supported in style {style:?}")))?; } FileDiscernment::Model }, ".lua"=>FileDiscernment::Script(ScriptHint::ModuleScript), _=>panic!("Regex failed"), }) }else{ return Err(anyhow::Error::msg("No file extension")); } }; file_name.truncate(file_name.len()-ext_len); let file=tokio::fs::File::open(entry.path()).await?; Ok(match file_discernment{ FileDiscernment::Model=>model_node(file_name.as_str(),FileWithName{file,name:file_name.clone()}).await?, FileDiscernment::Script(hint)=>script_node(file_name.as_str(),FileWithName{file,name:file_name.clone()},hint).await?, }) } #[derive(Debug)] enum ScriptHint{ Script, LocalScript, ModuleScript, } struct FileHint{ file:FileWithName, hint:ScriptHint, } enum PreparedData{ Model(rbx_dom_weak::WeakDom), Builder(rbx_dom_weak::InstanceBuilder), } enum CompileClass{ Folder, Script(String), LocalScript(String), ModuleScript(String), Model(Vec), } struct CompileNode{ name:String, blacklist:Option, class:CompileClass, } enum CompileStackInstruction{ TraverseReferent(rbx_dom_weak::types::Ref,Option), PopFolder, } struct CompileConfig{ input_folder:PathBuf, output_file:PathBuf, template:Option, style:Option, } fn script_builder(class:&str,name:&str,source:String)->rbx_dom_weak::InstanceBuilder{ let mut builder=rbx_dom_weak::InstanceBuilder::new(class); builder.set_name(name); builder.add_property("Source",rbx_dom_weak::types::Variant::String(source)); builder } enum TooComplicated{ Stop, Value(T), Skip, } async fn compile(config:CompileConfig)->AResult<()>{ //basically decompile in reverse order //load template dom let input={ let template_path=config.template.unwrap_or_else(||{ let mut template_path=config.input_folder.clone(); template_path.push("template.rbxlx"); template_path }); //mr dom doesn't like tokio files std::io::BufReader::new(std::fs::File::open(template_path)?) }; let mut dom=load_dom(input)?; //hack to traverse root folder as the root object dom.root_mut().name="src".to_owned(); //add in scripts and models let mut folder=config.input_folder.clone(); let mut stack:Vec=vec![CompileStackInstruction::TraverseReferent(dom.root_ref(),None)]; while let Some(instruction)=stack.pop(){ match instruction{ CompileStackInstruction::TraverseReferent(item_ref,blacklist)=>{ let sans={ let item=dom.get_by_ref(item_ref).ok_or(anyhow::Error::msg("null child ref"))?; sanitize(item.name.as_str()).to_string() }; folder.push(sans.as_str()); stack.push(CompileStackInstruction::PopFolder); //check if a folder exists with item.name if let Ok(dir)=tokio::fs::read_dir(folder.as_path()).await{ let mut exist_names:std::collections::HashSet={ let item=dom.get_by_ref(item_ref).ok_or(anyhow::Error::msg("null child ref"))?; //push existing dom children objects onto stack (unrelated to exist_names) stack.extend(item.children().into_iter().map(|&referent|CompileStackInstruction::TraverseReferent(referent,None))); //get names of existing objects item.children().into_iter().map(|&child_ref|{ let child=dom.get_by_ref(child_ref).ok_or(anyhow::Error::msg("null child ref"))?; Ok::<_,anyhow::Error>(sanitize(child.name.as_str()).to_string()) }).collect::>()? }; if let Some(dont)=blacklist{ exist_names.insert(dont); } //generate children from folder contents UNLESS! item already has a child of the same name let style=config.style; let exist_names=&exist_names; futures::stream::unfold(dir,|mut dir1|async{ //thread the needle! follow the path that dir takes! let ret1={ //capture a scoped mutable reference so we can forward dir to the next call even on an error let dir2=&mut dir1; (||async move{//error catcher so I can use ? let ret2=if let Some(entry)=dir2.next_entry().await?{ //cull early even if supporting things with identical names is possible if exist_names.contains(entry.file_name().to_str().unwrap()){ TooComplicated::Skip }else{ TooComplicated::Value(entry) } }else{ TooComplicated::Stop }; Ok::<_,anyhow::Error>(ret2) })().await }; match ret1{ Ok(TooComplicated::Stop)=>None, Ok(TooComplicated::Skip)=>Some((Ok(None),dir1)), Ok(TooComplicated::Value(v))=>Some((Ok(Some(v)),dir1)), Err(e)=>Some((Err(e),dir1)), } }) //gotta spawn off the worker threads (Model is slow) .then(|bog|async{ match bog{ Ok(Some(entry))=>tokio::spawn(async move{ let met=entry.metadata().await?; //discern that bad boy let compile_class=match met.is_dir(){ true=>locate_override_file(&entry,style).await?, false=>discern_file(&entry,style).await?, }; //prepare data structure Ok(Some((compile_class.blacklist,match compile_class.class{ CompileClass::Folder=>PreparedData::Builder(rbx_dom_weak::InstanceBuilder::new("Folder").with_name(compile_class.name.as_str())), CompileClass::Script(source)=>PreparedData::Builder(script_builder("Script",compile_class.name.as_str(),source)), CompileClass::LocalScript(source)=>PreparedData::Builder(script_builder("LocalScript",compile_class.name.as_str(),source)), CompileClass::ModuleScript(source)=>PreparedData::Builder(script_builder("ModuleScript",compile_class.name.as_str(),source)), CompileClass::Model(buf)=>PreparedData::Model(rbx_xml::from_reader_default(std::io::Cursor::new(buf))?), }))) }).await?, Ok(None)=>Ok(None), Err(e)=>Err(e), } }) //is this even what I want? .map(|f|async{f}).buffer_unordered(32) //begin processing immediately .fold((&mut stack,&mut dom),|(stack,dom),bog:Result<_,anyhow::Error>|async{ //push child objects onto dom serially as they arrive match bog{ Ok(Some((blacklist,data)))=>{ let referent=match data{ PreparedData::Model(mut model_dom)=>{ let referent=model_dom.root().children()[0]; model_dom.transfer(referent,dom,item_ref); referent }, PreparedData::Builder(script)=>dom.insert(item_ref,script), }; //new children need to be traversed stack.push(CompileStackInstruction::TraverseReferent(referent,blacklist)); }, Ok(None)=>(), Err(e)=>println!("error lole {e:?}"), } (stack,dom) }).await; } }, CompileStackInstruction::PopFolder=>assert!(folder.pop(),"pop folder bad"), } } let mut output_place=config.output_file.clone(); if output_place.extension().is_none()&&tokio::fs::try_exists(output_place.as_path()).await?{ output_place.push("place.rbxl"); } let output=std::io::BufWriter::new(std::fs::File::create(output_place)?); //write inner objects rbx_binary::to_writer(output,&dom,dom.root().children())?; Ok(()) }