diff --git a/src/main.rs b/src/main.rs index 5c71a24..d67f5c8 100644 --- a/src/main.rs +++ b/src/main.rs @@ -233,21 +233,11 @@ async fn download_list(cookie:String,asset_id_file_map:AssetIDFileMap)->AResult< Ok(()) } -struct DownloadHistoryConfig{ - output_folder:std::path::PathBuf, - cookie:String, - asset_id:AssetID, -} - -async fn download_history(config:DownloadHistoryConfig)->AResult<()>{ - let client=reqwest::Client::new(); - let asset_id_string=config.asset_id.to_string(); - - //poll paged list of all asset versions +async fn get_version_history(client:&reqwest::Client,cookie:&str,asset_id:AssetID)->AResult>{ let mut cursor:Option=None; let mut asset_list=Vec::new(); loop{ - let mut url=reqwest::Url::parse(format!("https://develop.roblox.com/v1/assets/{}/saved-versions",config.asset_id).as_str())?; + let mut url=reqwest::Url::parse(format!("https://develop.roblox.com/v1/assets/{}/saved-versions",asset_id).as_str())?; //url borrow scope { let mut query=url.query_pairs_mut();//borrow here @@ -261,7 +251,7 @@ async fn download_history(config:DownloadHistoryConfig)->AResult<()>{ } println!("page url={}",url); let resp=client.get(url) - .header("Cookie",config.cookie.clone()) + .header("Cookie",cookie) .send().await?; match resp.json::().await{ Ok(mut page)=>{ @@ -275,11 +265,50 @@ async fn download_history(config:DownloadHistoryConfig)->AResult<()>{ } } asset_list.sort_by(|a,b|a.assetVersionNumber.cmp(&b.assetVersionNumber)); + Ok(asset_list) +} + +async fn download_asset_version(client:&reqwest::Client,cookie:&str,asset_id_str:&str,asset_version_str:&str)->AResult{ + let mut url=reqwest::Url::parse("https://assetdelivery.roblox.com/v1/asset/")?; + //url borrow scope + { + let mut query=url.query_pairs_mut();//borrow here + query.append_pair("ID",asset_id_str); + query.append_pair("version",asset_version_str); + } + println!("download url={}",url); + for i in 0..8{ + let resp=client.get(url.clone()) + .header("Cookie",cookie) + .send().await?; + + if !resp.status().is_success(){ + println!("request {} failed",i); + continue; + } + + return Ok(std::io::Cursor::new(resp.bytes().await?)); + } + Err(anyhow::Error::msg("all requests failed")) +} + +struct DownloadHistoryConfig{ + output_folder:std::path::PathBuf, + cookie:String, + asset_id:AssetID, +} + +async fn download_history(config:DownloadHistoryConfig)->AResult<()>{ + let client=reqwest::Client::new(); + + //poll paged list of all asset versions + let asset_list=get_version_history(&client,&config.cookie.as_str(),config.asset_id).await?; let mut path=config.output_folder.clone(); path.set_file_name("versions.json"); tokio::fs::write(path,serde_json::to_string(&asset_list)?).await?; //download all versions + let asset_id_string=config.asset_id.to_string(); futures::stream::iter(asset_list) .map(|asset_version|{ let client=&client; @@ -287,49 +316,27 @@ async fn download_history(config:DownloadHistoryConfig)->AResult<()>{ let asset_id_str=asset_id_string.as_str(); let output_folder=config.output_folder.clone(); async move{ - let mut url=reqwest::Url::parse("https://assetdelivery.roblox.com/v1/asset/")?; - //url borrow scope - { - let mut query=url.query_pairs_mut();//borrow here - query.append_pair("ID",asset_id_str); - query.append_pair("version",asset_version.assetVersionNumber.to_string().as_str()); - } - println!("download url={}",url); - let mut result=Err(anyhow::Error::msg("all requests failed")); - for i in 1..=8{ - let resp=client.get(url.clone()) - .header("Cookie",cookie) - .send().await?; + let mut readable=download_asset_version(client,cookie,asset_id_str,asset_version.assetVersionNumber.to_string().as_str()).await?; - if !resp.status().is_success(){ - println!("request {} failed",i); - continue; - } + let contents=match maybe_gzip_decode(&mut readable)?{ + ReaderType::GZip(readable)=>read_readable(readable)?, + ReaderType::Raw(readable)=>read_readable(readable)?, + }; - let mut path=output_folder; - path.set_file_name(format!("{}_v{}.rbxl",config.asset_id,asset_version.assetVersionNumber)); - result=Ok((path,resp.bytes().await?)); - break; - } - result + let mut path=output_folder; + path.set_file_name(format!("{}_v{}.rbxl",config.asset_id,asset_version.assetVersionNumber)); + + Ok((path,contents)) } }) .buffer_unordered(CONCURRENT_REQUESTS) .for_each(|b:AResult<_>|async{ match b{ - Ok((dest,body))=>{ - let contents=match maybe_gzip_decode(&mut std::io::Cursor::new(body)){ - Ok(ReaderType::GZip(readable))=>read_readable(readable), - Ok(ReaderType::Raw(readable))=>read_readable(readable), - Err(e)=>Err(e), - }; - match contents{ - Ok(data)=>match tokio::fs::write(dest,data).await{ - Err(e)=>eprintln!("fs error: {}",e), - _=>(), - }, - Err(e)=>eprintln!("gzip error: {}",e), - }; + Ok((dest,data))=>{ + match tokio::fs::write(dest,data).await{ + Err(e)=>eprintln!("fs error: {}",e), + _=>(), + } }, Err(e)=>eprintln!("dl error: {}",e), } @@ -439,24 +446,13 @@ fn write_item(dom:&rbx_dom_weak::WeakDom,mut file:std::path::PathBuf,node:&TreeN Ok(()) } -struct DecompileConfig{ - input_file:std::path::PathBuf, - output_folder:std::path::PathBuf, - write_template:bool, - write_models:bool, - write_scripts:bool, +struct DecompiledContext{ + dom:rbx_dom_weak::WeakDom, + tree_refs:std::collections::HashMap, } -fn decompile(config:DecompileConfig)->AResult<()>{ - //rules: - //Class Script|LocalScript|ModuleScript->$Name.lua - //Class Model->$Name.rbxmx - //overrides.json per-folder [Override{name,class}] - //Everything else goes into template.rbxlx - - //read file - let mut input=std::io::BufReader::new(std::fs::File::open(config.input_file)?); - let mut dom=load_dom(&mut input)?; +fn generate_decompiled_context(mut input:R)->AResult{ + let dom=load_dom(&mut input)?; let mut tree_refs=std::collections::HashMap::new(); tree_refs.insert(dom.root_ref(),TreeNode::new( @@ -546,17 +542,28 @@ fn decompile(config:DecompileConfig)->AResult<()>{ } } - //generate folders, models, and scripts - //delete models and scripts from dom + Ok(DecompiledContext{ + dom, + tree_refs, + }) +} +struct WriteConfig{ + output_folder:std::path::PathBuf, + write_template:bool, + write_models:bool, + write_scripts:bool, +} + +fn write_files(config:WriteConfig,mut context:DecompiledContext)->AResult<()>{ let mut name_tally=std::collections::HashMap::::new(); let mut folder=config.output_folder.clone(); - let mut stack=vec![WriteStackInstruction::Node(tree_refs.get(&dom.root_ref()).unwrap(),0)]; + let mut stack=vec![WriteStackInstruction::Node(context.tree_refs.get(&context.dom.root_ref()).unwrap(),0)]; while let Some(instruction)=stack.pop(){ match instruction{ WriteStackInstruction::PushFolder(component)=>folder.push(component), WriteStackInstruction::PopFolder=>assert!(folder.pop(),"weirdness"), - WriteStackInstruction::Destroy(referent)=>dom.destroy(referent), + WriteStackInstruction::Destroy(referent)=>context.dom.destroy(referent), WriteStackInstruction::Node(node,name_count)=>{ //properties.json to override class or other simple properties let mut properties=PropertiesOverride::default(); @@ -592,10 +599,10 @@ fn decompile(config:DecompileConfig)->AResult<()>{ std::fs::write(file,serde_json::to_string(&properties)?)? } //write item in subfolder - write_item(&dom,subfolder,node,name_override.as_str(),config.write_models,config.write_scripts)?; + write_item(&context.dom,subfolder,node,name_override.as_str(),config.write_models,config.write_scripts)?; }else{ //write item - write_item(&dom,folder.clone(),node,name_override.as_str(),config.write_models,config.write_scripts)?; + write_item(&context.dom,folder.clone(),node,name_override.as_str(),config.write_models,config.write_scripts)?; } //queue item to be deleted from dom after child objects are handled (stack is popped from the back) match node.class{ @@ -606,7 +613,7 @@ fn decompile(config:DecompileConfig)->AResult<()>{ stack.push(WriteStackInstruction::PopFolder); name_tally.clear(); for referent in &node.children{ - if let Some(c)=tree_refs.get(referent){ + if let Some(c)=context.tree_refs.get(referent){ let v=name_tally.entry(c.name.clone()).and_modify(|v|*v+=1).or_default(); stack.push(WriteStackInstruction::Node(c,*v)); } @@ -623,12 +630,42 @@ fn decompile(config:DecompileConfig)->AResult<()>{ file.push("template"); assert!(file.set_extension("rbxlx")); let output=std::io::BufWriter::new(std::fs::File::create(file)?); - rbx_xml::to_writer_default(output,&dom,&[dom.root_ref()])?; + rbx_xml::to_writer_default(output,&context.dom,&[context.dom.root_ref()])?; } Ok(()) } +struct DecompileConfig{ + input_file:std::path::PathBuf, + output_folder:std::path::PathBuf, + write_template:bool, + write_models:bool, + write_scripts:bool, +} + +fn decompile(config:DecompileConfig)->AResult<()>{ + //rules: + //Class Script|LocalScript|ModuleScript->$Name.lua + //Class Model->$Name.rbxmx + //overrides.json per-folder [Override{name,class}] + //Everything else goes into template.rbxlx + + //read file + let context=generate_decompiled_context(std::io::BufReader::new(std::fs::File::open(config.input_file)?))?; + + //generate folders, models, and scripts + //delete models and scripts from dom + write_files(WriteConfig{ + output_folder:config.output_folder, + write_template:config.write_template, + write_models:config.write_models, + write_scripts:config.write_scripts, + },context)?; + + Ok(()) +} + fn compile(_folder:std::path::PathBuf,_file:std::path::PathBuf)->AResult<()>{ Ok(()) }