2024-01-11 10:46:47 +00:00
|
|
|
use std::io::Read;
|
2023-12-31 01:59:40 +00:00
|
|
|
use clap::{Args,Parser,Subcommand};
|
|
|
|
use anyhow::Result as AResult;
|
2023-12-31 02:00:51 +00:00
|
|
|
use futures::StreamExt;
|
2024-01-06 01:54:13 +00:00
|
|
|
use rbx_dom_weak::types::Ref;
|
2023-12-31 01:59:40 +00:00
|
|
|
|
|
|
|
type AssetID=u64;
|
2023-12-31 19:15:27 +00:00
|
|
|
type AssetIDFileMap=Vec<(AssetID,std::path::PathBuf)>;
|
2024-01-12 01:11:44 +00:00
|
|
|
const CONCURRENT_DECODE:usize=8;
|
2024-01-06 19:48:05 +00:00
|
|
|
const CONCURRENT_REQUESTS:usize=32;
|
2023-12-31 01:59:40 +00:00
|
|
|
|
|
|
|
#[derive(Parser)]
|
|
|
|
#[command(author,version,about,long_about=None)]
|
|
|
|
#[command(propagate_version = true)]
|
|
|
|
struct Cli{
|
2023-12-31 17:18:41 +00:00
|
|
|
#[arg(short,long)]
|
|
|
|
group:Option<u64>,
|
|
|
|
//idk how to do this better
|
|
|
|
#[arg(long)]
|
|
|
|
cookie_literal:Option<String>,
|
|
|
|
#[arg(long)]
|
|
|
|
cookie_env:Option<String>,
|
|
|
|
#[arg(long)]
|
|
|
|
cookie_file:Option<std::path::PathBuf>,
|
|
|
|
|
2024-01-08 23:09:12 +00:00
|
|
|
#[arg(long)]
|
|
|
|
no_models:Option<bool>,
|
|
|
|
#[arg(long)]
|
|
|
|
no_scripts:Option<bool>,
|
|
|
|
#[arg(long)]
|
|
|
|
no_template:Option<bool>,
|
|
|
|
|
2024-01-11 21:48:57 +00:00
|
|
|
#[arg(long)]
|
|
|
|
git_committer_name:Option<String>,
|
|
|
|
#[arg(long)]
|
|
|
|
git_committer_email:Option<String>,
|
|
|
|
|
2024-01-06 20:38:43 +00:00
|
|
|
#[arg(long)]
|
|
|
|
asset_id:Option<AssetID>,
|
2023-12-31 19:15:27 +00:00
|
|
|
|
2024-01-06 01:54:13 +00:00
|
|
|
#[arg(short,long)]
|
|
|
|
input:Option<std::path::PathBuf>,
|
|
|
|
|
|
|
|
#[arg(short,long)]
|
|
|
|
output:Option<std::path::PathBuf>,
|
|
|
|
|
2023-12-31 01:59:40 +00:00
|
|
|
#[command(subcommand)]
|
|
|
|
command:Commands,
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Subcommand)]
|
|
|
|
enum Commands{
|
2024-01-06 20:38:29 +00:00
|
|
|
DownloadHistory,
|
2023-12-31 19:15:27 +00:00
|
|
|
Download,
|
|
|
|
Upload,
|
2024-01-06 01:54:13 +00:00
|
|
|
Compile,
|
|
|
|
Decompile,
|
2024-01-11 10:47:20 +00:00
|
|
|
DecompileHistoryIntoGit,
|
2024-01-12 00:06:24 +00:00
|
|
|
DownloadAndDecompileHistoryIntoGit,
|
2023-12-31 01:59:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Args)]
|
2023-12-31 18:47:45 +00:00
|
|
|
struct PathBufList{
|
|
|
|
paths:Vec<std::path::PathBuf>
|
2023-12-31 01:59:40 +00:00
|
|
|
}
|
|
|
|
|
2024-01-06 20:38:29 +00:00
|
|
|
#[derive(serde::Deserialize)]
|
|
|
|
struct VersionPage{
|
|
|
|
previousPageCursor:Option<String>,
|
|
|
|
nextPageCursor:Option<String>,
|
|
|
|
data:Vec<AssetVersion>,
|
|
|
|
}
|
|
|
|
#[derive(serde::Deserialize,serde::Serialize)]
|
|
|
|
struct AssetVersion{
|
|
|
|
Id:u64,
|
|
|
|
assetId:AssetID,
|
|
|
|
assetVersionNumber:u64,
|
|
|
|
creatorType:String,
|
|
|
|
creatorTargetId:u64,
|
|
|
|
creatingUniverseId:Option<u64>,
|
|
|
|
created:chrono::DateTime<chrono::Utc>,
|
|
|
|
isPublished:bool,
|
|
|
|
}
|
|
|
|
|
2023-12-31 02:00:51 +00:00
|
|
|
#[tokio::main]
|
|
|
|
async fn main()->AResult<()>{
|
2023-12-31 01:59:40 +00:00
|
|
|
let cli=Cli::parse();
|
2023-12-31 17:18:41 +00:00
|
|
|
|
|
|
|
let cookie_enum={
|
|
|
|
match (cli.cookie_literal,cli.cookie_env,cli.cookie_file){
|
2024-01-06 05:36:21 +00:00
|
|
|
(Some(literal),None,None)=>Some(Cookie::Literal(literal)),
|
|
|
|
(None,Some(env_var),None)=>Some(Cookie::Environment(env_var)),
|
|
|
|
(None,None,Some(path))=>Some(Cookie::File(path)),
|
|
|
|
(None,None,None)=>None,
|
|
|
|
_=>return Err(anyhow::Error::msg("Cookie was specified multiple times.")),
|
2023-12-31 17:18:41 +00:00
|
|
|
}
|
|
|
|
};
|
2024-01-06 05:36:21 +00:00
|
|
|
let cookie=match cookie_enum{
|
|
|
|
Some(c)=>Some(format!(".ROBLOSECURITY={}",match c{
|
|
|
|
Cookie::Literal(s)=>s,
|
|
|
|
Cookie::Environment(var)=>std::env::var(var)?,
|
|
|
|
Cookie::File(path)=>tokio::fs::read_to_string(path).await?,
|
|
|
|
})),
|
|
|
|
None=>None,
|
|
|
|
};
|
2023-12-31 17:18:41 +00:00
|
|
|
|
2023-12-31 01:59:40 +00:00
|
|
|
match cli.command{
|
2024-01-08 23:09:12 +00:00
|
|
|
Commands::DownloadHistory=>download_history(DownloadHistoryConfig{
|
|
|
|
output_folder:cli.output.unwrap(),
|
|
|
|
cookie:cookie.unwrap(),
|
|
|
|
asset_id:cli.asset_id.unwrap(),
|
|
|
|
}).await,
|
2024-01-06 20:38:43 +00:00
|
|
|
Commands::Download=>download_list(cookie.unwrap(),vec![(cli.asset_id.unwrap(),cli.output.unwrap())]).await,
|
|
|
|
Commands::Upload=>upload_list(cookie.unwrap(),cli.group,vec![(cli.asset_id.unwrap(),cli.output.unwrap())]).await,
|
2024-01-06 01:54:13 +00:00
|
|
|
Commands::Compile=>compile(cli.input.unwrap(),cli.output.unwrap()),
|
2024-01-08 23:09:12 +00:00
|
|
|
Commands::Decompile=>decompile(DecompileConfig{
|
|
|
|
input_file:cli.input.unwrap(),
|
|
|
|
output_folder:cli.output.unwrap(),
|
|
|
|
write_template:!cli.no_template.unwrap_or(false),
|
|
|
|
write_models:!cli.no_models.unwrap_or(false),
|
|
|
|
write_scripts:!cli.no_scripts.unwrap_or(false),
|
2024-01-11 21:59:32 +00:00
|
|
|
}).await,
|
2024-01-11 10:47:20 +00:00
|
|
|
Commands::DecompileHistoryIntoGit=>decompile_history_into_git(DecompileHistoryConfig{
|
2024-01-12 00:06:24 +00:00
|
|
|
git_committer_name:cli.git_committer_name.unwrap(),
|
|
|
|
git_committer_email:cli.git_committer_email.unwrap(),
|
|
|
|
input_folder:cli.input.unwrap(),
|
|
|
|
output_folder:cli.output.unwrap(),
|
|
|
|
write_template:!cli.no_template.unwrap_or(false),
|
|
|
|
write_models:!cli.no_models.unwrap_or(false),
|
|
|
|
write_scripts:!cli.no_scripts.unwrap_or(false),
|
|
|
|
}).await,
|
|
|
|
Commands::DownloadAndDecompileHistoryIntoGit=>download_and_decompile_history_into_git(DownloadAndDecompileHistoryConfig{
|
2024-01-11 21:48:57 +00:00
|
|
|
git_committer_name:cli.git_committer_name.unwrap(),
|
|
|
|
git_committer_email:cli.git_committer_email.unwrap(),
|
2024-01-11 10:47:20 +00:00
|
|
|
cookie:cookie.unwrap(),
|
|
|
|
asset_id:cli.asset_id.unwrap(),
|
|
|
|
output_folder:cli.output.unwrap(),
|
|
|
|
write_template:!cli.no_template.unwrap_or(false),
|
|
|
|
write_models:!cli.no_models.unwrap_or(false),
|
|
|
|
write_scripts:!cli.no_scripts.unwrap_or(false),
|
|
|
|
}).await,
|
2023-12-31 01:59:40 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-12-31 17:18:41 +00:00
|
|
|
enum Cookie{
|
|
|
|
Literal(String),
|
|
|
|
Environment(String),
|
|
|
|
File(std::path::PathBuf),
|
|
|
|
}
|
|
|
|
|
2024-01-11 10:46:47 +00:00
|
|
|
enum ReaderType<R:Read>{
|
|
|
|
GZip(flate2::read::GzDecoder<std::io::BufReader<R>>),
|
|
|
|
Raw(std::io::BufReader<R>),
|
2023-12-31 02:00:51 +00:00
|
|
|
}
|
|
|
|
|
2024-01-11 10:46:47 +00:00
|
|
|
fn maybe_gzip_decode<R:Read>(input:R)->AResult<ReaderType<R>>{
|
|
|
|
let mut buf=std::io::BufReader::new(input);
|
|
|
|
let peek=std::io::BufRead::fill_buf(&mut buf)?;
|
|
|
|
match &peek[0..2]{
|
|
|
|
b"\x1f\x8b"=>Ok(ReaderType::GZip(flate2::read::GzDecoder::new(buf))),
|
|
|
|
_=>Ok(ReaderType::Raw(buf)),
|
2023-12-31 02:00:51 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-01-05 21:47:32 +00:00
|
|
|
async fn upload_list(cookie:String,group:Option<u64>,asset_id_file_map:AssetIDFileMap)->AResult<()>{
|
2023-12-31 19:15:27 +00:00
|
|
|
let client=reqwest::Client::new();
|
2024-01-12 05:29:06 +00:00
|
|
|
futures::stream::iter(asset_id_file_map.into_iter()
|
2023-12-31 19:15:27 +00:00
|
|
|
.map(|(asset_id,file)|{
|
|
|
|
let client=&client;
|
|
|
|
let cookie=cookie.as_str();
|
2024-01-05 21:47:32 +00:00
|
|
|
let group=&group;
|
2023-12-31 19:15:27 +00:00
|
|
|
async move{
|
|
|
|
let mut url=reqwest::Url::parse("https://data.roblox.com/Data/Upload.ashx?json=1&type=Model&genreTypeId=1")?;
|
|
|
|
//url borrow scope
|
|
|
|
{
|
|
|
|
let mut query=url.query_pairs_mut();//borrow here
|
|
|
|
query.append_pair("assetid",asset_id.to_string().as_str());
|
2024-01-05 21:47:32 +00:00
|
|
|
match group{
|
|
|
|
Some(group_id)=>{query.append_pair("groupId",group_id.to_string().as_str());},
|
|
|
|
None=>(),
|
2023-12-31 19:15:27 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let body=tokio::fs::read_to_string(file).await?;
|
|
|
|
let mut resp=client.post(url.clone())
|
|
|
|
.header("Cookie",cookie)
|
|
|
|
.body(body.clone())
|
|
|
|
.send().await?;
|
|
|
|
|
|
|
|
//This is called a CSRF challenge apparently
|
|
|
|
if resp.status()==reqwest::StatusCode::FORBIDDEN{
|
|
|
|
if let Some(csrf_token)=resp.headers().get("X-CSRF-Token"){
|
|
|
|
resp=client.post(url)
|
|
|
|
.header("X-CSRF-Token",csrf_token)
|
|
|
|
.header("Cookie",cookie)
|
|
|
|
.body(body)
|
|
|
|
.send().await?;
|
|
|
|
}else{
|
|
|
|
return Err(anyhow::Error::msg("Roblox returned 403 with no CSRF"));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok((asset_id,resp.bytes().await?))
|
|
|
|
}
|
2024-01-12 05:29:06 +00:00
|
|
|
}))
|
2023-12-31 19:15:27 +00:00
|
|
|
.buffer_unordered(CONCURRENT_REQUESTS)
|
|
|
|
.for_each(|b:AResult<_>|async{
|
|
|
|
match b{
|
|
|
|
Ok((asset_id,body))=>{
|
|
|
|
println!("asset_id={} response.body={:?}",asset_id,body);
|
|
|
|
},
|
|
|
|
Err(e)=>eprintln!("ul error: {}",e),
|
|
|
|
}
|
|
|
|
}).await;
|
2023-12-31 01:59:40 +00:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2023-12-31 02:00:51 +00:00
|
|
|
fn read_readable(mut readable:impl Read)->AResult<Vec<u8>>{
|
|
|
|
let mut contents=Vec::new();
|
|
|
|
readable.read_to_end(&mut contents)?;
|
|
|
|
Ok(contents)
|
|
|
|
}
|
|
|
|
|
2023-12-31 18:47:45 +00:00
|
|
|
async fn download_list(cookie:String,asset_id_file_map:AssetIDFileMap)->AResult<()>{
|
2023-12-31 02:00:51 +00:00
|
|
|
let client=reqwest::Client::new();
|
2024-01-12 05:29:06 +00:00
|
|
|
futures::stream::iter(asset_id_file_map.into_iter()
|
2023-12-31 18:47:45 +00:00
|
|
|
.map(|(asset_id,file)|{
|
2023-12-31 02:00:51 +00:00
|
|
|
let client=&client;
|
|
|
|
let cookie=cookie.as_str();
|
|
|
|
async move{
|
|
|
|
let resp=client.get(format!("https://assetdelivery.roblox.com/v1/asset/?ID={}",asset_id))
|
|
|
|
.header("Cookie",cookie)
|
|
|
|
.send().await?;
|
2023-12-31 18:47:45 +00:00
|
|
|
Ok((file,resp.bytes().await?))
|
2023-12-31 02:00:51 +00:00
|
|
|
}
|
2024-01-12 05:29:06 +00:00
|
|
|
}))
|
2023-12-31 02:00:51 +00:00
|
|
|
.buffer_unordered(CONCURRENT_REQUESTS)
|
2024-01-06 20:38:29 +00:00
|
|
|
.for_each(|b:AResult<_>|async{
|
|
|
|
match b{
|
|
|
|
Ok((dest,body))=>{
|
|
|
|
let contents=match maybe_gzip_decode(&mut std::io::Cursor::new(body)){
|
|
|
|
Ok(ReaderType::GZip(readable))=>read_readable(readable),
|
|
|
|
Ok(ReaderType::Raw(readable))=>read_readable(readable),
|
|
|
|
Err(e)=>Err(e),
|
|
|
|
};
|
|
|
|
match contents{
|
|
|
|
Ok(data)=>match tokio::fs::write(dest,data).await{
|
|
|
|
Err(e)=>eprintln!("fs error: {}",e),
|
|
|
|
_=>(),
|
|
|
|
},
|
|
|
|
Err(e)=>eprintln!("gzip error: {}",e),
|
|
|
|
};
|
|
|
|
},
|
|
|
|
Err(e)=>eprintln!("dl error: {}",e),
|
|
|
|
}
|
|
|
|
}).await;
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2024-01-11 07:57:50 +00:00
|
|
|
async fn get_version_history(client:&reqwest::Client,cookie:&str,asset_id:AssetID)->AResult<Vec<AssetVersion>>{
|
2024-01-06 20:38:29 +00:00
|
|
|
let mut cursor:Option<String>=None;
|
|
|
|
let mut asset_list=Vec::new();
|
|
|
|
loop{
|
2024-01-11 07:57:50 +00:00
|
|
|
let mut url=reqwest::Url::parse(format!("https://develop.roblox.com/v1/assets/{}/saved-versions",asset_id).as_str())?;
|
2024-01-06 20:38:29 +00:00
|
|
|
//url borrow scope
|
|
|
|
{
|
|
|
|
let mut query=url.query_pairs_mut();//borrow here
|
|
|
|
//query.append_pair("sortOrder","Asc");
|
|
|
|
//query.append_pair("limit","100");
|
|
|
|
//query.append_pair("count","100");
|
|
|
|
match &cursor{
|
|
|
|
Some(next_page)=>{query.append_pair("cursor",next_page);}
|
|
|
|
None=>(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
println!("page url={}",url);
|
|
|
|
let resp=client.get(url)
|
2024-01-11 07:57:50 +00:00
|
|
|
.header("Cookie",cookie)
|
2024-01-06 20:38:29 +00:00
|
|
|
.send().await?;
|
|
|
|
match resp.json::<VersionPage>().await{
|
|
|
|
Ok(mut page)=>{
|
|
|
|
asset_list.append(&mut page.data);
|
|
|
|
if page.nextPageCursor.is_none(){
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
cursor=page.nextPageCursor;
|
|
|
|
},
|
|
|
|
Err(e)=>panic!("error: {}",e),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
asset_list.sort_by(|a,b|a.assetVersionNumber.cmp(&b.assetVersionNumber));
|
2024-01-11 07:57:50 +00:00
|
|
|
Ok(asset_list)
|
|
|
|
}
|
|
|
|
|
2024-01-11 10:46:47 +00:00
|
|
|
async fn download_asset_version(client:&reqwest::Client,cookie:&str,asset_id_str:&str,asset_version_str:&str)->AResult<reqwest::Response>{
|
2024-01-11 07:57:50 +00:00
|
|
|
let mut url=reqwest::Url::parse("https://assetdelivery.roblox.com/v1/asset/")?;
|
|
|
|
//url borrow scope
|
|
|
|
{
|
|
|
|
let mut query=url.query_pairs_mut();//borrow here
|
|
|
|
query.append_pair("ID",asset_id_str);
|
|
|
|
query.append_pair("version",asset_version_str);
|
|
|
|
}
|
|
|
|
println!("download url={}",url);
|
|
|
|
for i in 0..8{
|
|
|
|
let resp=client.get(url.clone())
|
|
|
|
.header("Cookie",cookie)
|
|
|
|
.send().await?;
|
|
|
|
|
|
|
|
if !resp.status().is_success(){
|
|
|
|
println!("request {} failed",i);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2024-01-11 10:46:47 +00:00
|
|
|
return Ok(resp);
|
2024-01-11 07:57:50 +00:00
|
|
|
}
|
|
|
|
Err(anyhow::Error::msg("all requests failed"))
|
|
|
|
}
|
|
|
|
|
|
|
|
struct DownloadHistoryConfig{
|
|
|
|
output_folder:std::path::PathBuf,
|
|
|
|
cookie:String,
|
|
|
|
asset_id:AssetID,
|
|
|
|
}
|
|
|
|
|
|
|
|
async fn download_history(config:DownloadHistoryConfig)->AResult<()>{
|
|
|
|
let client=reqwest::Client::new();
|
|
|
|
|
|
|
|
//poll paged list of all asset versions
|
|
|
|
let asset_list=get_version_history(&client,&config.cookie.as_str(),config.asset_id).await?;
|
2024-01-08 23:09:12 +00:00
|
|
|
let mut path=config.output_folder.clone();
|
2024-01-06 20:38:29 +00:00
|
|
|
path.set_file_name("versions.json");
|
|
|
|
tokio::fs::write(path,serde_json::to_string(&asset_list)?).await?;
|
|
|
|
|
|
|
|
//download all versions
|
2024-01-11 07:57:50 +00:00
|
|
|
let asset_id_string=config.asset_id.to_string();
|
2024-01-06 20:38:29 +00:00
|
|
|
futures::stream::iter(asset_list)
|
|
|
|
.map(|asset_version|{
|
|
|
|
let client=&client;
|
2024-01-08 23:09:12 +00:00
|
|
|
let cookie=config.cookie.as_str();
|
2024-01-06 20:38:29 +00:00
|
|
|
let asset_id_str=asset_id_string.as_str();
|
2024-01-08 23:09:12 +00:00
|
|
|
let output_folder=config.output_folder.clone();
|
2024-01-06 20:38:29 +00:00
|
|
|
async move{
|
2024-01-11 10:46:47 +00:00
|
|
|
let resp=download_asset_version(client,cookie,asset_id_str,asset_version.assetVersionNumber.to_string().as_str()).await?;
|
|
|
|
let contents=match maybe_gzip_decode(std::io::Cursor::new(resp.bytes().await?))?{
|
2024-01-11 07:57:50 +00:00
|
|
|
ReaderType::GZip(readable)=>read_readable(readable)?,
|
|
|
|
ReaderType::Raw(readable)=>read_readable(readable)?,
|
|
|
|
};
|
|
|
|
|
|
|
|
let mut path=output_folder;
|
|
|
|
path.set_file_name(format!("{}_v{}.rbxl",config.asset_id,asset_version.assetVersionNumber));
|
|
|
|
|
|
|
|
Ok((path,contents))
|
2024-01-06 20:38:29 +00:00
|
|
|
}
|
|
|
|
})
|
|
|
|
.buffer_unordered(CONCURRENT_REQUESTS)
|
2023-12-31 02:00:51 +00:00
|
|
|
.for_each(|b:AResult<_>|async{
|
2024-01-12 05:32:28 +00:00
|
|
|
match b{
|
|
|
|
Ok((dest,data))=>{
|
|
|
|
match tokio::fs::write(dest,data).await{
|
|
|
|
Err(e)=>eprintln!("fs error: {}",e),
|
|
|
|
_=>(),
|
|
|
|
}
|
|
|
|
},
|
|
|
|
Err(e)=>eprintln!("dl error: {}",e),
|
|
|
|
}
|
|
|
|
}).await;
|
2023-12-31 01:59:40 +00:00
|
|
|
Ok(())
|
2024-01-01 20:21:33 +00:00
|
|
|
}
|
2024-01-06 01:54:13 +00:00
|
|
|
|
2024-01-11 10:46:47 +00:00
|
|
|
fn load_dom<R:Read>(input:R)->AResult<rbx_dom_weak::WeakDom>{
|
|
|
|
let mut buf=std::io::BufReader::new(input);
|
|
|
|
let peek=std::io::BufRead::fill_buf(&mut buf)?;
|
|
|
|
match &peek[0..4]{
|
|
|
|
b"<rob"=>{
|
|
|
|
match &peek[4..8]{
|
|
|
|
b"lox!"=>return rbx_binary::from_reader(buf).map_err(anyhow::Error::msg),
|
|
|
|
b"lox "=>return rbx_xml::from_reader_default(buf).map_err(anyhow::Error::msg),
|
|
|
|
other=>Err(anyhow::Error::msg(format!("Unknown Roblox file type {:?}",other))),
|
|
|
|
}
|
|
|
|
},
|
|
|
|
_=>Err(anyhow::Error::msg("unsupported file type")),
|
2024-01-06 01:54:13 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(PartialEq)]
|
|
|
|
enum Class{
|
|
|
|
Folder,
|
|
|
|
ModuleScript,
|
|
|
|
LocalScript,
|
|
|
|
Script,
|
|
|
|
Model,
|
|
|
|
}
|
|
|
|
|
|
|
|
struct TreeNode{
|
|
|
|
name:String,
|
|
|
|
referent:Ref,
|
|
|
|
parent:Ref,
|
|
|
|
class:Class,
|
|
|
|
children:Vec<Ref>,
|
|
|
|
}
|
|
|
|
impl TreeNode{
|
|
|
|
fn new(name:String,referent:Ref,parent:Ref,class:Class)->Self{
|
|
|
|
Self{
|
|
|
|
name,
|
|
|
|
referent,
|
|
|
|
parent,
|
|
|
|
class,
|
|
|
|
children:Vec::new(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
enum TrimStackInstruction{
|
|
|
|
Referent(Ref),
|
|
|
|
IncrementScript,
|
|
|
|
DecrementScript,
|
|
|
|
}
|
|
|
|
|
|
|
|
enum WriteStackInstruction<'a>{
|
2024-01-06 23:01:56 +00:00
|
|
|
Node(&'a TreeNode,u32),//(Node,NameTally)
|
2024-01-06 19:13:52 +00:00
|
|
|
PushFolder(String),
|
2024-01-06 01:54:13 +00:00
|
|
|
PopFolder,
|
|
|
|
Destroy(Ref),
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Default,serde::Deserialize,serde::Serialize)]
|
|
|
|
struct PropertiesOverride{
|
|
|
|
//Name:Option<String>,
|
|
|
|
ClassName:Option<String>,
|
|
|
|
}
|
|
|
|
impl PropertiesOverride{
|
|
|
|
fn is_some(&self)->bool{
|
|
|
|
self.ClassName.is_some()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-01-06 19:13:52 +00:00
|
|
|
fn sanitize<'a>(s:&'a str)->std::borrow::Cow<'a,str>{
|
|
|
|
lazy_regex::regex!(r"[^a-zA-Z0-9._-]").replace_all(s,"_")
|
|
|
|
}
|
|
|
|
|
2024-01-12 00:06:12 +00:00
|
|
|
fn write_item(dom:&rbx_dom_weak::WeakDom,mut file:std::path::PathBuf,node:&TreeNode,node_name_override:String,write_models:bool,write_scripts:bool)->AResult<()>{
|
2024-01-11 22:51:31 +00:00
|
|
|
file.push(sanitize(node_name_override.as_str()).as_ref());
|
2024-01-06 01:54:13 +00:00
|
|
|
match node.class{
|
|
|
|
Class::Folder=>(),
|
|
|
|
Class::ModuleScript|Class::LocalScript|Class::Script=>{
|
2024-01-08 23:09:12 +00:00
|
|
|
if !write_scripts{
|
|
|
|
return Ok(())
|
|
|
|
}
|
2024-01-06 01:54:13 +00:00
|
|
|
assert!(file.set_extension("lua"),"could not set extension");
|
2024-01-12 06:02:10 +00:00
|
|
|
if let Some(item)=dom.get_by_ref(node.referent){
|
2024-01-06 01:54:13 +00:00
|
|
|
//TODO: delete disabled scripts
|
|
|
|
if let Some(rbx_dom_weak::types::Variant::String(source))=item.properties.get("Source"){
|
2024-01-12 06:02:10 +00:00
|
|
|
std::fs::write(file,source)?;
|
|
|
|
}
|
|
|
|
}
|
2024-01-06 01:54:13 +00:00
|
|
|
},
|
|
|
|
Class::Model=>{
|
2024-01-08 23:09:12 +00:00
|
|
|
if !write_models{
|
|
|
|
return Ok(())
|
|
|
|
}
|
2024-01-06 01:54:13 +00:00
|
|
|
assert!(file.set_extension("rbxmx"));
|
|
|
|
let output=std::io::BufWriter::new(std::fs::File::create(file)?);
|
|
|
|
rbx_xml::to_writer_default(output,dom,&[node.referent])?;
|
|
|
|
},
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2024-01-11 07:57:50 +00:00
|
|
|
struct DecompiledContext{
|
|
|
|
dom:rbx_dom_weak::WeakDom,
|
|
|
|
tree_refs:std::collections::HashMap<rbx_dom_weak::types::Ref,TreeNode>,
|
2024-01-08 23:09:12 +00:00
|
|
|
}
|
|
|
|
|
2024-01-11 10:46:47 +00:00
|
|
|
fn generate_decompiled_context<R:Read>(input:R)->AResult<DecompiledContext>{
|
|
|
|
let dom=load_dom(input)?;
|
2024-01-06 01:54:13 +00:00
|
|
|
|
|
|
|
let mut tree_refs=std::collections::HashMap::new();
|
|
|
|
tree_refs.insert(dom.root_ref(),TreeNode::new(
|
|
|
|
"src".to_string(),
|
|
|
|
dom.root_ref(),
|
|
|
|
Ref::none(),
|
|
|
|
Class::Folder
|
|
|
|
));
|
|
|
|
|
|
|
|
//run rules
|
|
|
|
let mut stack=vec![dom.root()];
|
|
|
|
while let Some(item)=stack.pop(){
|
|
|
|
let class=match item.class.as_str(){
|
|
|
|
"ModuleScript"=>Class::ModuleScript,
|
|
|
|
"LocalScript"=>Class::LocalScript,
|
|
|
|
"Script"=>Class::Script,
|
|
|
|
"Model"=>Class::Model,
|
|
|
|
_=>Class::Folder,
|
|
|
|
};
|
|
|
|
let skip=match class{
|
|
|
|
Class::Model=>true,
|
|
|
|
_=>false,
|
|
|
|
};
|
|
|
|
if let Some(parent_node)=tree_refs.get_mut(&item.parent()){
|
|
|
|
let referent=item.referent();
|
|
|
|
let node=TreeNode::new(item.name.clone(),referent,parent_node.referent,class);
|
|
|
|
parent_node.children.push(referent);
|
|
|
|
tree_refs.insert(referent,node);
|
|
|
|
}
|
2024-01-08 23:09:19 +00:00
|
|
|
//look no further, turn this node and all its children into a model
|
2024-01-06 01:54:13 +00:00
|
|
|
if skip{
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
for &referent in item.children(){
|
|
|
|
if let Some(c)=dom.get_by_ref(referent){
|
|
|
|
stack.push(c);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
//trim empty folders
|
|
|
|
let mut script_count=0;
|
|
|
|
let mut stack:Vec<TrimStackInstruction>=tree_refs.get(&dom.root_ref()).unwrap().children
|
|
|
|
.iter().map(|&c|TrimStackInstruction::Referent(c)).collect();
|
|
|
|
while let Some(instruction)=stack.pop(){
|
|
|
|
match instruction{
|
|
|
|
TrimStackInstruction::IncrementScript=>script_count+=1,
|
|
|
|
TrimStackInstruction::DecrementScript=>script_count-=1,
|
|
|
|
TrimStackInstruction::Referent(referent)=>{
|
|
|
|
let mut delete=None;
|
|
|
|
if let Some(node)=tree_refs.get_mut(&referent){
|
|
|
|
if node.class==Class::Folder&&script_count!=0{
|
|
|
|
node.class=Class::Model
|
|
|
|
}
|
|
|
|
if node.class==Class::Folder&&node.children.len()==0{
|
|
|
|
delete=Some(node.parent);
|
|
|
|
}else{
|
|
|
|
//how the hell do I do this better without recursion
|
|
|
|
let is_script=match node.class{
|
|
|
|
Class::ModuleScript|Class::LocalScript|Class::Script=>true,
|
|
|
|
_=>false,
|
|
|
|
};
|
|
|
|
//stack is popped from back
|
|
|
|
if is_script{
|
|
|
|
stack.push(TrimStackInstruction::DecrementScript);
|
|
|
|
}
|
|
|
|
for &child_referent in &node.children{
|
|
|
|
stack.push(TrimStackInstruction::Referent(child_referent));
|
|
|
|
}
|
|
|
|
if is_script{
|
|
|
|
stack.push(TrimStackInstruction::IncrementScript);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
//trim referent
|
|
|
|
if let Some(parent_ref)=delete{
|
|
|
|
let parent_node=tree_refs.get_mut(&parent_ref)
|
|
|
|
.expect("parent_ref does not exist in tree_refs");
|
|
|
|
parent_node.children.remove(
|
|
|
|
parent_node.children.iter()
|
|
|
|
.position(|&r|r==referent)
|
|
|
|
.expect("parent.children does not contain referent")
|
|
|
|
);
|
|
|
|
tree_refs.remove(&referent);
|
|
|
|
}
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-01-11 07:57:50 +00:00
|
|
|
Ok(DecompiledContext{
|
|
|
|
dom,
|
|
|
|
tree_refs,
|
|
|
|
})
|
|
|
|
}
|
2024-01-08 23:09:12 +00:00
|
|
|
|
2024-01-11 07:57:50 +00:00
|
|
|
struct WriteConfig{
|
|
|
|
output_folder:std::path::PathBuf,
|
|
|
|
write_template:bool,
|
|
|
|
write_models:bool,
|
|
|
|
write_scripts:bool,
|
|
|
|
}
|
|
|
|
|
2024-01-11 21:59:32 +00:00
|
|
|
async fn write_files(config:WriteConfig,mut context:DecompiledContext)->AResult<()>{
|
2024-01-11 22:51:31 +00:00
|
|
|
let mut write_queue=Vec::new();
|
|
|
|
let mut destroy_queue=Vec::new();
|
|
|
|
|
2024-01-06 23:01:56 +00:00
|
|
|
let mut name_tally=std::collections::HashMap::<String,u32>::new();
|
2024-01-08 23:09:12 +00:00
|
|
|
let mut folder=config.output_folder.clone();
|
2024-01-11 07:57:50 +00:00
|
|
|
let mut stack=vec![WriteStackInstruction::Node(context.tree_refs.get(&context.dom.root_ref()).unwrap(),0)];
|
2024-01-06 01:54:13 +00:00
|
|
|
while let Some(instruction)=stack.pop(){
|
|
|
|
match instruction{
|
|
|
|
WriteStackInstruction::PushFolder(component)=>folder.push(component),
|
|
|
|
WriteStackInstruction::PopFolder=>assert!(folder.pop(),"weirdness"),
|
2024-01-11 22:51:31 +00:00
|
|
|
WriteStackInstruction::Destroy(referent)=>destroy_queue.push(referent),
|
2024-01-06 23:01:56 +00:00
|
|
|
WriteStackInstruction::Node(node,name_count)=>{
|
2024-01-06 01:54:13 +00:00
|
|
|
//properties.json to override class or other simple properties
|
|
|
|
let mut properties=PropertiesOverride::default();
|
|
|
|
let has_children=node.children.len()!=0;
|
|
|
|
match node.class{
|
|
|
|
Class::Folder=>(),
|
|
|
|
Class::ModuleScript=>{
|
|
|
|
//.lua files are ModuleScript by default
|
|
|
|
if has_children{
|
|
|
|
properties.ClassName=Some("ModuleScript".to_string())
|
|
|
|
}
|
|
|
|
},
|
|
|
|
Class::LocalScript=>properties.ClassName=Some("LocalScript".to_string()),
|
|
|
|
Class::Script=>properties.ClassName=Some("Script".to_string()),
|
|
|
|
Class::Model=>(),
|
|
|
|
}
|
2024-01-06 23:01:56 +00:00
|
|
|
let name_override=if 0<name_count{
|
|
|
|
format!("{}_{}",node.name,name_count)
|
|
|
|
}else{
|
|
|
|
node.name.clone()
|
|
|
|
};
|
2024-01-06 01:54:13 +00:00
|
|
|
if has_children||properties.is_some(){
|
|
|
|
//push temp subfolder
|
|
|
|
let mut subfolder=folder.clone();
|
2024-01-06 23:01:56 +00:00
|
|
|
subfolder.push(sanitize(name_override.as_str()).as_ref());
|
2024-01-06 01:54:13 +00:00
|
|
|
//make folder
|
2024-01-11 22:51:31 +00:00
|
|
|
tokio::fs::create_dir(subfolder.clone()).await?;
|
2024-01-06 01:54:13 +00:00
|
|
|
//write properties
|
|
|
|
if properties.is_some(){
|
|
|
|
let mut file=subfolder.clone();
|
|
|
|
file.push("properties");
|
|
|
|
assert!(file.set_extension("json"),"could not set extension");
|
2024-01-11 22:51:31 +00:00
|
|
|
tokio::fs::write(file,serde_json::to_string(&properties)?).await?;
|
2024-01-06 01:54:13 +00:00
|
|
|
}
|
|
|
|
//write item in subfolder
|
2024-01-12 00:06:12 +00:00
|
|
|
write_queue.push((subfolder,node,name_override.clone()));
|
2024-01-06 01:54:13 +00:00
|
|
|
}else{
|
|
|
|
//write item
|
2024-01-12 00:06:12 +00:00
|
|
|
write_queue.push((folder.clone(),node,name_override.clone()));
|
2024-01-06 01:54:13 +00:00
|
|
|
}
|
|
|
|
//queue item to be deleted from dom after child objects are handled (stack is popped from the back)
|
|
|
|
match node.class{
|
|
|
|
Class::Folder=>(),
|
|
|
|
_=>stack.push(WriteStackInstruction::Destroy(node.referent)),
|
|
|
|
}
|
|
|
|
if has_children{
|
|
|
|
stack.push(WriteStackInstruction::PopFolder);
|
2024-01-06 23:01:56 +00:00
|
|
|
name_tally.clear();
|
2024-01-06 01:54:13 +00:00
|
|
|
for referent in &node.children{
|
2024-01-11 07:57:50 +00:00
|
|
|
if let Some(c)=context.tree_refs.get(referent){
|
2024-01-06 23:01:56 +00:00
|
|
|
let v=name_tally.entry(c.name.clone()).and_modify(|v|*v+=1).or_default();
|
|
|
|
stack.push(WriteStackInstruction::Node(c,*v));
|
2024-01-06 01:54:13 +00:00
|
|
|
}
|
|
|
|
}
|
2024-01-06 23:01:56 +00:00
|
|
|
stack.push(WriteStackInstruction::PushFolder(sanitize(name_override.as_str()).to_string()));
|
2024-01-06 01:54:13 +00:00
|
|
|
}
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
2024-01-08 23:09:12 +00:00
|
|
|
|
2024-01-11 22:51:31 +00:00
|
|
|
//run the async
|
2024-01-12 06:07:01 +00:00
|
|
|
{
|
|
|
|
let dom=&context.dom;
|
|
|
|
let write_models=config.write_models;
|
|
|
|
let write_scripts=config.write_scripts;
|
|
|
|
let results:Vec<AResult<()>>=rayon::iter::ParallelIterator::collect(rayon::iter::ParallelIterator::map(rayon::iter::IntoParallelIterator::into_par_iter(write_queue),|(write_path,node,node_name_override)|{
|
|
|
|
write_item(&dom,write_path,node,node_name_override,write_models,write_scripts)
|
|
|
|
}));
|
|
|
|
for result in results{
|
|
|
|
result?;
|
2024-01-12 00:06:12 +00:00
|
|
|
}
|
2024-01-12 06:07:01 +00:00
|
|
|
}
|
2024-01-11 22:51:31 +00:00
|
|
|
|
|
|
|
//run the destroy
|
|
|
|
for destroy_ref in destroy_queue{
|
|
|
|
context.dom.destroy(destroy_ref);
|
|
|
|
}
|
|
|
|
|
2024-01-06 01:54:13 +00:00
|
|
|
//write what remains in template.rbxlx
|
2024-01-08 23:09:12 +00:00
|
|
|
if config.write_template{
|
|
|
|
let mut file=config.output_folder.clone();
|
2024-01-06 01:54:13 +00:00
|
|
|
file.push("template");
|
|
|
|
assert!(file.set_extension("rbxlx"));
|
|
|
|
let output=std::io::BufWriter::new(std::fs::File::create(file)?);
|
2024-01-11 07:57:50 +00:00
|
|
|
rbx_xml::to_writer_default(output,&context.dom,&[context.dom.root_ref()])?;
|
2024-01-06 01:54:13 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2024-01-11 07:57:50 +00:00
|
|
|
struct DecompileConfig{
|
|
|
|
input_file:std::path::PathBuf,
|
|
|
|
output_folder:std::path::PathBuf,
|
|
|
|
write_template:bool,
|
|
|
|
write_models:bool,
|
|
|
|
write_scripts:bool,
|
|
|
|
}
|
|
|
|
|
2024-01-11 21:59:32 +00:00
|
|
|
async fn decompile(config:DecompileConfig)->AResult<()>{
|
2024-01-11 07:57:50 +00:00
|
|
|
//rules:
|
|
|
|
//Class Script|LocalScript|ModuleScript->$Name.lua
|
|
|
|
//Class Model->$Name.rbxmx
|
|
|
|
//overrides.json per-folder [Override{name,class}]
|
|
|
|
//Everything else goes into template.rbxlx
|
|
|
|
|
|
|
|
//read file
|
|
|
|
let context=generate_decompiled_context(std::io::BufReader::new(std::fs::File::open(config.input_file)?))?;
|
|
|
|
|
|
|
|
//generate folders, models, and scripts
|
|
|
|
//delete models and scripts from dom
|
|
|
|
write_files(WriteConfig{
|
|
|
|
output_folder:config.output_folder,
|
|
|
|
write_template:config.write_template,
|
|
|
|
write_models:config.write_models,
|
|
|
|
write_scripts:config.write_scripts,
|
2024-01-11 21:59:32 +00:00
|
|
|
},context).await?;
|
2024-01-11 07:57:50 +00:00
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2024-01-11 10:47:20 +00:00
|
|
|
struct WriteCommitConfig{
|
2024-01-11 21:48:57 +00:00
|
|
|
git_committer_name:String,
|
|
|
|
git_committer_email:String,
|
2024-01-11 10:47:20 +00:00
|
|
|
output_folder:std::path::PathBuf,
|
|
|
|
write_template:bool,
|
|
|
|
write_models:bool,
|
|
|
|
write_scripts:bool,
|
|
|
|
}
|
|
|
|
|
2024-01-12 00:06:12 +00:00
|
|
|
async fn write_commit(config:WriteCommitConfig,b:Result<AResult<(AssetVersion,DecompiledContext)>,tokio::task::JoinError>,repo:&git2::Repository)->AResult<()>{
|
|
|
|
let (asset_version,context)=b??;
|
2024-01-12 00:37:48 +00:00
|
|
|
println!("writing files for version {}",asset_version.assetVersionNumber);
|
2024-01-11 10:47:20 +00:00
|
|
|
|
|
|
|
//clean output dir
|
2024-01-12 04:23:44 +00:00
|
|
|
if config.write_models||config.write_scripts{
|
2024-01-11 10:47:20 +00:00
|
|
|
let mut src=config.output_folder.clone();
|
|
|
|
src.push("src");
|
|
|
|
match std::fs::remove_dir_all(src){
|
|
|
|
Ok(())=>(),
|
|
|
|
Err(e)=>println!("remove_dir_all src failed {}",e),
|
|
|
|
}
|
2024-01-12 04:23:44 +00:00
|
|
|
}
|
|
|
|
if config.write_template{
|
2024-01-11 10:47:20 +00:00
|
|
|
let mut template=config.output_folder.clone();
|
|
|
|
template.push("template.rbxlx");
|
|
|
|
match std::fs::remove_file(template){
|
|
|
|
Ok(())=>(),
|
|
|
|
Err(e)=>println!("remove_file template.rbxlx failed {}",e),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
//write files
|
|
|
|
write_files(WriteConfig{
|
|
|
|
output_folder:config.output_folder.clone(),
|
|
|
|
write_template:config.write_template,
|
|
|
|
write_models:config.write_models,
|
|
|
|
write_scripts:config.write_scripts,
|
2024-01-11 21:59:32 +00:00
|
|
|
},context).await?;
|
2024-01-11 10:47:20 +00:00
|
|
|
|
|
|
|
let date=asset_version.created;
|
2024-01-12 04:29:22 +00:00
|
|
|
//let sig=repo.signature()?; //this pulls default name and email
|
2024-01-11 21:48:57 +00:00
|
|
|
let sig=git2::Signature::new(config.git_committer_name.as_str(),config.git_committer_email.as_str(),&git2::Time::new(date.timestamp(),0)).unwrap();
|
2024-01-11 10:47:20 +00:00
|
|
|
let tree_id={
|
|
|
|
let mut tree_index = repo.index()?;
|
2024-01-12 04:45:53 +00:00
|
|
|
tree_index.add_all([config.output_folder.as_path()].iter(),git2::IndexAddOption::DEFAULT,None)?;
|
2024-01-12 04:33:12 +00:00
|
|
|
if tree_index.len()==0{
|
|
|
|
println!("tree_index.len()==0");
|
|
|
|
return Ok(());
|
2024-01-12 04:45:53 +00:00
|
|
|
}else{
|
|
|
|
tree_index.update_all([config.output_folder.as_path()].iter(),None)?;
|
2024-01-12 04:33:12 +00:00
|
|
|
}
|
2024-01-12 04:46:13 +00:00
|
|
|
tree_index.write()?;
|
2024-01-11 10:47:20 +00:00
|
|
|
tree_index.write_tree()?
|
|
|
|
};
|
|
|
|
let tree=repo.find_tree(tree_id)?;
|
|
|
|
|
2024-01-12 04:33:04 +00:00
|
|
|
let mut parents=Vec::new();
|
|
|
|
|
2024-01-11 10:47:20 +00:00
|
|
|
match repo.head(){
|
2024-01-12 04:33:12 +00:00
|
|
|
Ok(reference)=>{
|
|
|
|
let commit=reference.peel_to_commit()?;
|
|
|
|
|
|
|
|
//test tree against commit tree to see if there is any changes
|
|
|
|
let commit_tree=commit.tree()?;
|
|
|
|
let diff=repo.diff_tree_to_tree(Some(&commit_tree),Some(&tree),None)?;
|
|
|
|
if diff.deltas().count()==0{
|
|
|
|
println!("no changes");
|
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
|
|
|
|
parents.push(commit);
|
|
|
|
},
|
2024-01-12 04:33:04 +00:00
|
|
|
Err(e)=>println!("repo head error {:?}",e),
|
2024-01-11 10:47:20 +00:00
|
|
|
};
|
|
|
|
|
2024-01-12 04:33:04 +00:00
|
|
|
repo.commit(
|
|
|
|
Some("HEAD"),//update_ref
|
|
|
|
&sig,//author
|
|
|
|
&sig,//commiter
|
|
|
|
&format!("v{}", asset_version.assetVersionNumber),//message
|
|
|
|
&tree,//tree (basically files)
|
|
|
|
parents.iter().collect::<Vec<&git2::Commit<'_>>>().as_slice(),//parents
|
|
|
|
)?;
|
|
|
|
|
2024-01-11 10:47:20 +00:00
|
|
|
//commit
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
struct DecompileHistoryConfig{
|
2024-01-12 00:06:24 +00:00
|
|
|
git_committer_name:String,
|
|
|
|
git_committer_email:String,
|
|
|
|
input_folder:std::path::PathBuf,
|
|
|
|
output_folder:std::path::PathBuf,
|
|
|
|
write_template:bool,
|
|
|
|
write_models:bool,
|
|
|
|
write_scripts:bool,
|
|
|
|
}
|
|
|
|
|
|
|
|
async fn decompile_history_into_git(config:DecompileHistoryConfig)->AResult<()>{
|
|
|
|
//poll paged list of all asset versions
|
|
|
|
let mut versions_path=config.input_folder.clone();
|
|
|
|
versions_path.push("versions.json");
|
|
|
|
let asset_list:Vec<AssetVersion>=serde_json::from_reader(std::fs::File::open(versions_path)?)?;
|
|
|
|
|
|
|
|
let repo=git2::Repository::init(config.output_folder.clone())?;
|
|
|
|
|
|
|
|
//decompile all versions
|
2024-01-12 05:29:06 +00:00
|
|
|
futures::stream::iter(asset_list.into_iter()
|
2024-01-12 00:06:24 +00:00
|
|
|
.map(|asset_version|{
|
|
|
|
let mut file_path=config.input_folder.clone();
|
|
|
|
tokio::task::spawn(async move{
|
|
|
|
file_path.push(format!("{}_v{}.rbxl",asset_version.assetId,asset_version.assetVersionNumber));
|
2024-01-12 00:37:26 +00:00
|
|
|
let file=std::fs::File::open(file_path)?;
|
|
|
|
let contents=generate_decompiled_context(file)?;
|
2024-01-12 00:06:24 +00:00
|
|
|
Ok::<_,anyhow::Error>((asset_version,contents))
|
|
|
|
})
|
2024-01-12 05:29:06 +00:00
|
|
|
}))
|
2024-01-12 01:11:44 +00:00
|
|
|
.buffered(CONCURRENT_DECODE)
|
2024-01-12 00:06:24 +00:00
|
|
|
.for_each(|join_handle_result|async{
|
|
|
|
match write_commit(WriteCommitConfig{
|
|
|
|
git_committer_name:config.git_committer_name.clone(),
|
|
|
|
git_committer_email:config.git_committer_email.clone(),
|
|
|
|
output_folder:config.output_folder.clone(),
|
|
|
|
write_template:config.write_template,
|
|
|
|
write_models:config.write_models,
|
|
|
|
write_scripts:config.write_scripts,
|
|
|
|
},join_handle_result,&repo).await{
|
|
|
|
Ok(())=>(),
|
|
|
|
Err(e)=>println!("download/unzip/decompile/write/commit error: {}",e),
|
|
|
|
}
|
|
|
|
}).await;
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
struct DownloadAndDecompileHistoryConfig{
|
2024-01-11 10:47:20 +00:00
|
|
|
cookie:String,
|
|
|
|
asset_id:AssetID,
|
2024-01-11 21:48:57 +00:00
|
|
|
git_committer_name:String,
|
|
|
|
git_committer_email:String,
|
2024-01-11 10:47:20 +00:00
|
|
|
output_folder:std::path::PathBuf,
|
|
|
|
write_template:bool,
|
|
|
|
write_models:bool,
|
|
|
|
write_scripts:bool,
|
|
|
|
}
|
|
|
|
|
2024-01-12 00:06:24 +00:00
|
|
|
async fn download_and_decompile_history_into_git(config:DownloadAndDecompileHistoryConfig)->AResult<()>{
|
2024-01-11 10:47:20 +00:00
|
|
|
let client=reqwest::Client::new();
|
|
|
|
|
|
|
|
//poll paged list of all asset versions
|
|
|
|
let asset_list=get_version_history(&client,&config.cookie.as_str(),config.asset_id).await?;
|
|
|
|
|
|
|
|
let repo=git2::Repository::init(config.output_folder.clone())?;
|
|
|
|
|
|
|
|
//download all versions
|
|
|
|
let asset_id_string=config.asset_id.to_string();
|
2024-01-12 05:29:06 +00:00
|
|
|
futures::stream::iter(asset_list.into_iter()
|
2024-01-11 10:47:20 +00:00
|
|
|
.map(|asset_version|{
|
2024-01-12 00:06:12 +00:00
|
|
|
let client=client.clone();
|
|
|
|
let cookie=config.cookie.clone();
|
|
|
|
let asset_id_str=asset_id_string.clone();
|
|
|
|
tokio::task::spawn(async move{
|
|
|
|
let resp=download_asset_version(&client,cookie.as_str(),asset_id_str.as_str(),asset_version.assetVersionNumber.to_string().as_str()).await?;
|
2024-01-11 10:47:20 +00:00
|
|
|
let contents=match maybe_gzip_decode(std::io::Cursor::new(resp.bytes().await?))?{
|
|
|
|
ReaderType::GZip(readable)=>generate_decompiled_context(readable)?,
|
|
|
|
ReaderType::Raw(readable)=>generate_decompiled_context(readable)?,
|
|
|
|
};
|
2024-01-12 00:06:12 +00:00
|
|
|
Ok::<_,anyhow::Error>((asset_version,contents))
|
|
|
|
})
|
2024-01-12 05:29:06 +00:00
|
|
|
}))
|
2024-01-12 01:11:44 +00:00
|
|
|
.buffered(CONCURRENT_DECODE)
|
2024-01-12 00:06:12 +00:00
|
|
|
.for_each(|join_handle_result|async{
|
2024-01-11 10:47:20 +00:00
|
|
|
match write_commit(WriteCommitConfig{
|
2024-01-11 21:48:57 +00:00
|
|
|
git_committer_name:config.git_committer_name.clone(),
|
|
|
|
git_committer_email:config.git_committer_email.clone(),
|
2024-01-11 10:47:20 +00:00
|
|
|
output_folder:config.output_folder.clone(),
|
|
|
|
write_template:config.write_template,
|
|
|
|
write_models:config.write_models,
|
|
|
|
write_scripts:config.write_scripts,
|
2024-01-12 00:06:12 +00:00
|
|
|
},join_handle_result,&repo).await{
|
2024-01-11 10:47:20 +00:00
|
|
|
Ok(())=>(),
|
|
|
|
Err(e)=>println!("download/unzip/decompile/write/commit error: {}",e),
|
|
|
|
}
|
|
|
|
}).await;
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2024-01-06 01:54:13 +00:00
|
|
|
fn compile(_folder:std::path::PathBuf,_file:std::path::PathBuf)->AResult<()>{
|
|
|
|
Ok(())
|
2024-01-08 23:09:12 +00:00
|
|
|
}
|