use std::io::{Read,Seek}; use clap::{Args,Parser,Subcommand}; use anyhow::Result as AResult; use futures::StreamExt; use rbx_dom_weak::types::Ref; type AssetID=u64; type AssetIDFileMap=Vec<(AssetID,std::path::PathBuf)>; const CONCURRENT_REQUESTS:usize=8; /// Parse a single key-value pair fn parse_key_val(s:&str)->AResult<(T,U)> where T:std::str::FromStr, T::Err:std::error::Error+Send+Sync+'static, U:std::str::FromStr, U::Err:std::error::Error+Send+Sync+'static, { let pos=s .find('=') .ok_or_else(||anyhow::Error::msg(format!("invalid KEY=value: no `=` found in `{s}`")))?; Ok((s[..pos].parse()?,s[pos+1..].parse()?)) } #[derive(Parser)] #[command(author,version,about,long_about=None)] #[command(propagate_version = true)] struct Cli{ #[arg(short,long)] group:Option, //idk how to do this better #[arg(long)] cookie_literal:Option, #[arg(long)] cookie_env:Option, #[arg(long)] cookie_file:Option, #[arg(long,value_parser=parse_key_val::)] asset_id:Option<(AssetID,std::path::PathBuf)>, #[arg(short,long)] input:Option, #[arg(short,long)] output:Option, #[command(subcommand)] command:Commands, } #[derive(Subcommand)] enum Commands{ Download, Upload, Compile, Decompile, } #[derive(Args)] struct PathBufList{ paths:Vec } #[tokio::main] async fn main()->AResult<()>{ let cli=Cli::parse(); let cookie_enum={ match (cli.cookie_literal,cli.cookie_env,cli.cookie_file){ (Some(literal),None,None)=>Some(Cookie::Literal(literal)), (None,Some(env_var),None)=>Some(Cookie::Environment(env_var)), (None,None,Some(path))=>Some(Cookie::File(path)), (None,None,None)=>None, _=>return Err(anyhow::Error::msg("Cookie was specified multiple times.")), } }; let cookie=match cookie_enum{ Some(c)=>Some(format!(".ROBLOSECURITY={}",match c{ Cookie::Literal(s)=>s, Cookie::Environment(var)=>std::env::var(var)?, Cookie::File(path)=>tokio::fs::read_to_string(path).await?, })), None=>None, }; match cli.command{ Commands::Download=>download_list(cookie.unwrap(),vec![cli.asset_id.unwrap()]).await, Commands::Upload=>upload_list(cookie.unwrap(),cli.group,vec![cli.asset_id.unwrap()]).await, Commands::Compile=>compile(cli.input.unwrap(),cli.output.unwrap()), Commands::Decompile=>decompile(cli.input.unwrap(),cli.output.unwrap()), } } enum Cookie{ Literal(String), Environment(String), File(std::path::PathBuf), } enum ReaderType<'a,R:Read+Seek>{ GZip(flate2::read::GzDecoder<&'a mut R>), Raw(&'a mut R), } fn maybe_gzip_decode(input:&mut R)->AResult>{ let mut first_2=[0u8;2]; if let (Ok(()),Ok(()))=(std::io::Read::read_exact(input,&mut first_2),std::io::Seek::rewind(input)){ match &first_2{ b"\x1f\x8b"=>Ok(ReaderType::GZip(flate2::read::GzDecoder::new(input))), _=>Ok(ReaderType::Raw(input)), } }else{ Err(anyhow::Error::msg("failed to peek")) } } async fn upload_list(cookie:String,group:Option,asset_id_file_map:AssetIDFileMap)->AResult<()>{ let client=reqwest::Client::new(); futures::stream::iter(asset_id_file_map) .map(|(asset_id,file)|{ let client=&client; let cookie=cookie.as_str(); let group=&group; async move{ let mut url=reqwest::Url::parse("https://data.roblox.com/Data/Upload.ashx?json=1&type=Model&genreTypeId=1")?; //url borrow scope { let mut query=url.query_pairs_mut();//borrow here query.append_pair("assetid",asset_id.to_string().as_str()); match group{ Some(group_id)=>{query.append_pair("groupId",group_id.to_string().as_str());}, None=>(), } } let body=tokio::fs::read_to_string(file).await?; let mut resp=client.post(url.clone()) .header("Cookie",cookie) .body(body.clone()) .send().await?; //This is called a CSRF challenge apparently if resp.status()==reqwest::StatusCode::FORBIDDEN{ if let Some(csrf_token)=resp.headers().get("X-CSRF-Token"){ resp=client.post(url) .header("X-CSRF-Token",csrf_token) .header("Cookie",cookie) .body(body) .send().await?; }else{ return Err(anyhow::Error::msg("Roblox returned 403 with no CSRF")); } } Ok((asset_id,resp.bytes().await?)) } }) .buffer_unordered(CONCURRENT_REQUESTS) .for_each(|b:AResult<_>|async{ match b{ Ok((asset_id,body))=>{ println!("asset_id={} response.body={:?}",asset_id,body); }, Err(e)=>eprintln!("ul error: {}",e), } }).await; Ok(()) } fn read_readable(mut readable:impl Read)->AResult>{ let mut contents=Vec::new(); readable.read_to_end(&mut contents)?; Ok(contents) } async fn download_list(cookie:String,asset_id_file_map:AssetIDFileMap)->AResult<()>{ let client=reqwest::Client::new(); futures::stream::iter(asset_id_file_map) .map(|(asset_id,file)|{ let client=&client; let cookie=cookie.as_str(); async move{ let resp=client.get(format!("https://assetdelivery.roblox.com/v1/asset/?ID={}",asset_id)) .header("Cookie",cookie) .send().await?; Ok((file,resp.bytes().await?)) } }) .buffer_unordered(CONCURRENT_REQUESTS) .for_each(|b:AResult<_>|async{ match b{ Ok((dest,body))=>{ let contents=match maybe_gzip_decode(&mut std::io::Cursor::new(body)){ Ok(ReaderType::GZip(readable))=>read_readable(readable), Ok(ReaderType::Raw(readable))=>read_readable(readable), Err(e)=>Err(e), }; match contents{ Ok(data)=>match tokio::fs::write(dest,data).await{ Err(e)=>eprintln!("fs error: {}",e), _=>(), }, Err(e)=>eprintln!("gzip error: {}",e), }; }, Err(e)=>eprintln!("dl error: {}",e), } }).await; Ok(()) } fn load_dom(input:&mut R)->AResult{ let mut first_8=[0u8;8]; if let (Ok(()),Ok(()))=(std::io::Read::read_exact(input, &mut first_8),std::io::Seek::rewind(input)){ match &first_8[0..4]{ b"{ match &first_8[4..8]{ b"lox!"=>return rbx_binary::from_reader(input).map_err(anyhow::Error::msg), b"lox "=>return rbx_xml::from_reader(input,rbx_xml::DecodeOptions::default()).map_err(anyhow::Error::msg), other=>Err(anyhow::Error::msg(format!("Unknown Roblox file type {:?}",other))), } }, _=>Err(anyhow::Error::msg("unsupported file type")), } }else{ Err(anyhow::Error::msg("peek failed")) } } #[derive(PartialEq)] enum Class{ Folder, ModuleScript, LocalScript, Script, Model, } struct TreeNode{ name:String, referent:Ref, parent:Ref, class:Class, children:Vec, } impl TreeNode{ fn new(name:String,referent:Ref,parent:Ref,class:Class)->Self{ Self{ name, referent, parent, class, children:Vec::new(), } } } enum TrimStackInstruction{ Referent(Ref), IncrementScript, DecrementScript, } enum WriteStackInstruction<'a>{ Node(&'a TreeNode), PushFolder(String), PopFolder, Destroy(Ref), } #[derive(Default,serde::Deserialize,serde::Serialize)] struct PropertiesOverride{ //Name:Option, ClassName:Option, } impl PropertiesOverride{ fn is_some(&self)->bool{ self.ClassName.is_some() } } fn sanitize<'a>(s:&'a str)->std::borrow::Cow<'a,str>{ lazy_regex::regex!(r"[^a-zA-Z0-9._-]").replace_all(s,"_") } fn write_item(dom:&rbx_dom_weak::WeakDom,mut file:std::path::PathBuf,node:&TreeNode)->AResult<()>{ file.push(sanitize(node.name.as_str()).as_ref()); match node.class{ Class::Folder=>(), Class::ModuleScript|Class::LocalScript|Class::Script=>{ assert!(file.set_extension("lua"),"could not set extension"); assert!(dom.get_by_ref(node.referent).is_some_and(|item|{ //TODO: delete disabled scripts if let Some(rbx_dom_weak::types::Variant::String(source))=item.properties.get("Source"){ std::fs::write(file,source).is_ok() }else{false} }),"some ting wong"); }, Class::Model=>{ assert!(file.set_extension("rbxmx")); let output=std::io::BufWriter::new(std::fs::File::create(file)?); rbx_xml::to_writer_default(output,dom,&[node.referent])?; }, } Ok(()) } fn decompile(input_file:std::path::PathBuf,output_folder:std::path::PathBuf)->AResult<()>{ //rules: //Class Script|LocalScript|ModuleScript->$Name.lua //Class Model->$Name.rbxmx //overrides.json per-folder [Override{name,class}] //Everything else goes into template.rbxlx //read file let mut input=std::io::BufReader::new(std::fs::File::open(input_file)?); let mut dom=load_dom(&mut input)?; let mut tree_refs=std::collections::HashMap::new(); tree_refs.insert(dom.root_ref(),TreeNode::new( "src".to_string(), dom.root_ref(), Ref::none(), Class::Folder )); //run rules let mut stack=vec![dom.root()]; while let Some(item)=stack.pop(){ let class=match item.class.as_str(){ "ModuleScript"=>Class::ModuleScript, "LocalScript"=>Class::LocalScript, "Script"=>Class::Script, "Model"=>Class::Model, _=>Class::Folder, }; let skip=match class{ Class::Model=>true, _=>false, }; if let Some(parent_node)=tree_refs.get_mut(&item.parent()){ let referent=item.referent(); let node=TreeNode::new(item.name.clone(),referent,parent_node.referent,class); parent_node.children.push(referent); tree_refs.insert(referent,node); } if skip{ continue; } for &referent in item.children(){ if let Some(c)=dom.get_by_ref(referent){ stack.push(c); } } } //trim empty folders let mut script_count=0; let mut stack:Vec=tree_refs.get(&dom.root_ref()).unwrap().children .iter().map(|&c|TrimStackInstruction::Referent(c)).collect(); while let Some(instruction)=stack.pop(){ match instruction{ TrimStackInstruction::IncrementScript=>script_count+=1, TrimStackInstruction::DecrementScript=>script_count-=1, TrimStackInstruction::Referent(referent)=>{ let mut delete=None; if let Some(node)=tree_refs.get_mut(&referent){ if node.class==Class::Folder&&script_count!=0{ node.class=Class::Model } if node.class==Class::Folder&&node.children.len()==0{ delete=Some(node.parent); }else{ //how the hell do I do this better without recursion let is_script=match node.class{ Class::ModuleScript|Class::LocalScript|Class::Script=>true, _=>false, }; //stack is popped from back if is_script{ stack.push(TrimStackInstruction::DecrementScript); } for &child_referent in &node.children{ stack.push(TrimStackInstruction::Referent(child_referent)); } if is_script{ stack.push(TrimStackInstruction::IncrementScript); } } } //trim referent if let Some(parent_ref)=delete{ let parent_node=tree_refs.get_mut(&parent_ref) .expect("parent_ref does not exist in tree_refs"); parent_node.children.remove( parent_node.children.iter() .position(|&r|r==referent) .expect("parent.children does not contain referent") ); tree_refs.remove(&referent); } }, } } //generate folders, models, and scripts //delete models and scripts from dom { let mut folder=output_folder.clone(); let mut stack=vec![WriteStackInstruction::Node(tree_refs.get(&dom.root_ref()).unwrap())]; while let Some(instruction)=stack.pop(){ match instruction{ WriteStackInstruction::PushFolder(component)=>folder.push(component), WriteStackInstruction::PopFolder=>assert!(folder.pop(),"weirdness"), WriteStackInstruction::Destroy(referent)=>dom.destroy(referent), WriteStackInstruction::Node(node)=>{ //properties.json to override class or other simple properties let mut properties=PropertiesOverride::default(); let has_children=node.children.len()!=0; match node.class{ Class::Folder=>(), Class::ModuleScript=>{ //.lua files are ModuleScript by default if has_children{ properties.ClassName=Some("ModuleScript".to_string()) } }, Class::LocalScript=>properties.ClassName=Some("LocalScript".to_string()), Class::Script=>properties.ClassName=Some("Script".to_string()), Class::Model=>(), } if has_children||properties.is_some(){ //push temp subfolder let mut subfolder=folder.clone(); subfolder.push(sanitize(node.name.as_str()).as_ref()); //make folder std::fs::create_dir(subfolder.clone())?; //write properties if properties.is_some(){ let mut file=subfolder.clone(); file.push("properties"); assert!(file.set_extension("json"),"could not set extension"); std::fs::write(file,serde_json::to_string(&properties)?)? } //write item in subfolder write_item(&dom,subfolder,node)?; }else{ //write item write_item(&dom,folder.clone(),node)?; } //queue item to be deleted from dom after child objects are handled (stack is popped from the back) match node.class{ Class::Folder=>(), _=>stack.push(WriteStackInstruction::Destroy(node.referent)), } if has_children{ stack.push(WriteStackInstruction::PopFolder); for referent in &node.children{ if let Some(c)=tree_refs.get(referent){ stack.push(WriteStackInstruction::Node(c)); } } stack.push(WriteStackInstruction::PushFolder(sanitize(node.name.as_str()).to_string())); } }, } } } //write what remains in template.rbxlx { let mut file=output_folder.clone(); file.push("template"); assert!(file.set_extension("rbxlx")); let output=std::io::BufWriter::new(std::fs::File::create(file)?); rbx_xml::to_writer_default(output,&dom,&[dom.root_ref()])?; } Ok(()) } fn compile(_folder:std::path::PathBuf,_file:std::path::PathBuf)->AResult<()>{ Ok(()) }