asset-tool/src/main.rs

1407 lines
43 KiB
Rust
Raw Normal View History

2024-01-11 10:46:47 +00:00
use std::io::Read;
2023-12-31 01:59:40 +00:00
use clap::{Args,Parser,Subcommand};
use anyhow::Result as AResult;
2023-12-31 02:00:51 +00:00
use futures::StreamExt;
2024-01-06 01:54:13 +00:00
use rbx_dom_weak::types::Ref;
2024-01-23 05:36:54 +00:00
use tokio::io::AsyncReadExt;
2023-12-31 01:59:40 +00:00
type AssetID=u64;
2023-12-31 19:15:27 +00:00
type AssetIDFileMap=Vec<(AssetID,std::path::PathBuf)>;
2024-01-12 01:11:44 +00:00
const CONCURRENT_DECODE:usize=8;
2024-01-06 19:48:05 +00:00
const CONCURRENT_REQUESTS:usize=32;
2023-12-31 01:59:40 +00:00
#[derive(Parser)]
#[command(author,version,about,long_about=None)]
#[command(propagate_version = true)]
struct Cli{
2024-01-12 19:20:22 +00:00
//asset options
#[arg(short,long)]
group:Option<u64>,
2024-01-12 19:20:22 +00:00
#[arg(long)]
asset_id:Option<AssetID>,
//idk how to do this better
#[arg(long)]
cookie_literal:Option<String>,
#[arg(long)]
cookie_env:Option<String>,
#[arg(long)]
cookie_file:Option<std::path::PathBuf>,
2024-01-14 04:32:54 +00:00
//TODO: read the versions.json file instead of doing this
2024-01-14 19:07:33 +00:00
//TODO: write file dates instead of versions.json
2024-01-13 00:27:31 +00:00
#[arg(long)]
start_version:Option<u64>,
#[arg(long)]
end_version:Option<u64>,
#[arg(long)]
r#continue:bool,
2024-01-12 19:20:22 +00:00
//decompile options
#[arg(long)]
no_models:Option<bool>,
#[arg(long)]
no_scripts:Option<bool>,
#[arg(long)]
no_template:Option<bool>,
2024-01-12 19:24:03 +00:00
#[arg(long)]
style:Option<String>,
2024-01-12 19:20:22 +00:00
//git options
2024-01-11 21:48:57 +00:00
#[arg(long)]
git_committer_name:Option<String>,
#[arg(long)]
git_committer_email:Option<String>,
2024-01-06 01:54:13 +00:00
#[arg(short,long)]
input:Option<std::path::PathBuf>,
#[arg(short,long)]
output:Option<std::path::PathBuf>,
2023-12-31 01:59:40 +00:00
#[command(subcommand)]
command:Commands,
}
#[derive(Subcommand)]
enum Commands{
2024-01-06 20:38:29 +00:00
DownloadHistory,
2024-01-17 05:50:35 +00:00
Download(AssetIDList),
2023-12-31 19:15:27 +00:00
Upload,
2024-01-06 01:54:13 +00:00
Compile,
Decompile,
DecompileHistoryIntoGit,
2024-01-12 00:06:24 +00:00
DownloadAndDecompileHistoryIntoGit,
2023-12-31 01:59:40 +00:00
}
2024-01-12 19:24:03 +00:00
#[derive(Clone,Copy)]
enum DecompileStyle{
Rox,
Rojo,
RoxRojo,
}
2024-01-17 05:50:35 +00:00
#[derive(Args)]
struct AssetIDList{
asset_ids:Vec<AssetID>
}
2023-12-31 01:59:40 +00:00
#[derive(Args)]
struct PathBufList{
paths:Vec<std::path::PathBuf>
2023-12-31 01:59:40 +00:00
}
2024-01-06 20:38:29 +00:00
#[derive(serde::Deserialize)]
2024-01-12 19:23:45 +00:00
#[allow(nonstandard_style,dead_code)]
2024-01-06 20:38:29 +00:00
struct VersionPage{
previousPageCursor:Option<String>,
nextPageCursor:Option<String>,
data:Vec<AssetVersion>,
}
#[derive(serde::Deserialize,serde::Serialize)]
2024-01-12 19:23:45 +00:00
#[allow(nonstandard_style,dead_code)]
2024-01-06 20:38:29 +00:00
struct AssetVersion{
Id:u64,
assetId:AssetID,
assetVersionNumber:u64,
creatorType:String,
creatorTargetId:u64,
creatingUniverseId:Option<u64>,
created:chrono::DateTime<chrono::Utc>,
isPublished:bool,
}
2023-12-31 02:00:51 +00:00
#[tokio::main]
async fn main()->AResult<()>{
2023-12-31 01:59:40 +00:00
let cli=Cli::parse();
let cookie_enum={
match (cli.cookie_literal,cli.cookie_env,cli.cookie_file){
(Some(literal),None,None)=>Some(Cookie::Literal(literal)),
(None,Some(env_var),None)=>Some(Cookie::Environment(env_var)),
(None,None,Some(path))=>Some(Cookie::File(path)),
(None,None,None)=>None,
_=>return Err(anyhow::Error::msg("Cookie was specified multiple times.")),
}
};
let cookie=match cookie_enum{
Some(c)=>Some(format!(".ROBLOSECURITY={}",match c{
Cookie::Literal(s)=>s,
Cookie::Environment(var)=>std::env::var(var)?,
Cookie::File(path)=>tokio::fs::read_to_string(path).await?,
})),
None=>None,
};
2024-01-12 19:24:03 +00:00
let decompile_style=match cli.style.as_deref(){
Some("rox")
|Some("Rox")=>Some(DecompileStyle::Rox),
Some("rojo")
|Some("Rojo")=>Some(DecompileStyle::Rojo),
Some("rox-rojo")
|Some("rojo-rox")
|Some("roxrojo")
|Some("rojorox")
|Some("RoxRojo")
|Some("RojoRox")=>Some(DecompileStyle::RoxRojo),
None=>None,
_=>return Err(anyhow::Error::msg("Invalid style")),
};
2023-12-31 01:59:40 +00:00
match cli.command{
Commands::DownloadHistory=>download_history(DownloadHistoryConfig{
continue_from_versions:cli.r#continue,
end_version:cli.end_version,
2024-01-13 00:27:31 +00:00
start_version:cli.start_version.unwrap_or(0),
output_folder:cli.output.unwrap(),
cookie:cookie.unwrap(),
asset_id:cli.asset_id.unwrap(),
}).await,
2024-01-17 05:50:35 +00:00
Commands::Download(asset_id_list)=>download_list(
cookie.unwrap(),
asset_id_list.asset_ids.into_iter().map(|asset_id|{
let mut path=cli.output.clone().unwrap();
path.push(asset_id.to_string());
(asset_id,path)
}).collect()
).await,
2024-01-06 20:38:43 +00:00
Commands::Upload=>upload_list(cookie.unwrap(),cli.group,vec![(cli.asset_id.unwrap(),cli.output.unwrap())]).await,
2024-01-22 20:28:58 +00:00
Commands::Compile=>compile(CompileConfig{
input_folder:cli.input.unwrap(),
output_file:cli.output.unwrap(),
template:None,
2024-01-23 04:28:12 +00:00
}).await,
Commands::Decompile=>decompile(DecompileConfig{
2024-01-12 19:24:03 +00:00
style:decompile_style.unwrap(),
input_file:cli.input.unwrap(),
output_folder:cli.output.unwrap(),
write_template:!cli.no_template.unwrap_or(false),
write_models:!cli.no_models.unwrap_or(false),
write_scripts:!cli.no_scripts.unwrap_or(false),
2024-01-11 21:59:32 +00:00
}).await,
Commands::DecompileHistoryIntoGit=>decompile_history_into_git(DecompileHistoryConfig{
2024-01-12 00:06:24 +00:00
git_committer_name:cli.git_committer_name.unwrap(),
git_committer_email:cli.git_committer_email.unwrap(),
input_folder:cli.input.unwrap(),
output_folder:cli.output.unwrap(),
2024-01-12 19:24:03 +00:00
style:decompile_style.unwrap(),
2024-01-12 00:06:24 +00:00
write_template:!cli.no_template.unwrap_or(false),
write_models:!cli.no_models.unwrap_or(false),
write_scripts:!cli.no_scripts.unwrap_or(false),
}).await,
Commands::DownloadAndDecompileHistoryIntoGit=>download_and_decompile_history_into_git(DownloadAndDecompileHistoryConfig{
2024-01-11 21:48:57 +00:00
git_committer_name:cli.git_committer_name.unwrap(),
git_committer_email:cli.git_committer_email.unwrap(),
cookie:cookie.unwrap(),
asset_id:cli.asset_id.unwrap(),
output_folder:cli.output.unwrap(),
2024-01-12 19:24:03 +00:00
style:decompile_style.unwrap(),
write_template:!cli.no_template.unwrap_or(false),
write_models:!cli.no_models.unwrap_or(false),
write_scripts:!cli.no_scripts.unwrap_or(false),
}).await,
2023-12-31 01:59:40 +00:00
}
}
enum Cookie{
Literal(String),
Environment(String),
File(std::path::PathBuf),
}
2024-01-11 10:46:47 +00:00
enum ReaderType<R:Read>{
GZip(flate2::read::GzDecoder<std::io::BufReader<R>>),
Raw(std::io::BufReader<R>),
2023-12-31 02:00:51 +00:00
}
2024-01-11 10:46:47 +00:00
fn maybe_gzip_decode<R:Read>(input:R)->AResult<ReaderType<R>>{
let mut buf=std::io::BufReader::new(input);
let peek=std::io::BufRead::fill_buf(&mut buf)?;
match &peek[0..2]{
b"\x1f\x8b"=>Ok(ReaderType::GZip(flate2::read::GzDecoder::new(buf))),
_=>Ok(ReaderType::Raw(buf)),
2023-12-31 02:00:51 +00:00
}
}
2024-01-05 21:47:32 +00:00
async fn upload_list(cookie:String,group:Option<u64>,asset_id_file_map:AssetIDFileMap)->AResult<()>{
2023-12-31 19:15:27 +00:00
let client=reqwest::Client::new();
futures::stream::iter(asset_id_file_map.into_iter()
2023-12-31 19:15:27 +00:00
.map(|(asset_id,file)|{
let client=&client;
let cookie=cookie.as_str();
2024-01-05 21:47:32 +00:00
let group=&group;
2023-12-31 19:15:27 +00:00
async move{
let mut url=reqwest::Url::parse("https://data.roblox.com/Data/Upload.ashx?json=1&type=Model&genreTypeId=1")?;
//url borrow scope
{
let mut query=url.query_pairs_mut();//borrow here
query.append_pair("assetid",asset_id.to_string().as_str());
2024-01-05 21:47:32 +00:00
match group{
Some(group_id)=>{query.append_pair("groupId",group_id.to_string().as_str());},
None=>(),
2023-12-31 19:15:27 +00:00
}
}
let body=tokio::fs::read_to_string(file).await?;
let mut resp=client.post(url.clone())
.header("Cookie",cookie)
.body(body.clone())
.send().await?;
//This is called a CSRF challenge apparently
if resp.status()==reqwest::StatusCode::FORBIDDEN{
if let Some(csrf_token)=resp.headers().get("X-CSRF-Token"){
resp=client.post(url)
.header("X-CSRF-Token",csrf_token)
.header("Cookie",cookie)
.body(body)
.send().await?;
}else{
return Err(anyhow::Error::msg("Roblox returned 403 with no CSRF"));
}
}
Ok((asset_id,resp.bytes().await?))
}
}))
2023-12-31 19:15:27 +00:00
.buffer_unordered(CONCURRENT_REQUESTS)
.for_each(|b:AResult<_>|async{
match b{
Ok((asset_id,body))=>{
println!("asset_id={} response.body={:?}",asset_id,body);
},
Err(e)=>eprintln!("ul error: {}",e),
}
}).await;
2023-12-31 01:59:40 +00:00
Ok(())
}
2023-12-31 02:00:51 +00:00
fn read_readable(mut readable:impl Read)->AResult<Vec<u8>>{
let mut contents=Vec::new();
readable.read_to_end(&mut contents)?;
Ok(contents)
}
async fn download_list(cookie:String,asset_id_file_map:AssetIDFileMap)->AResult<()>{
2023-12-31 02:00:51 +00:00
let client=reqwest::Client::new();
futures::stream::iter(asset_id_file_map.into_iter()
.map(|(asset_id,file)|{
2023-12-31 02:00:51 +00:00
let client=&client;
let cookie=cookie.as_str();
async move{
let resp=client.get(format!("https://assetdelivery.roblox.com/v1/asset/?ID={}",asset_id))
.header("Cookie",cookie)
.send().await?;
Ok((file,resp.bytes().await?))
2023-12-31 02:00:51 +00:00
}
}))
2023-12-31 02:00:51 +00:00
.buffer_unordered(CONCURRENT_REQUESTS)
2024-01-06 20:38:29 +00:00
.for_each(|b:AResult<_>|async{
match b{
Ok((dest,body))=>{
let contents=match maybe_gzip_decode(&mut std::io::Cursor::new(body)){
Ok(ReaderType::GZip(readable))=>read_readable(readable),
Ok(ReaderType::Raw(readable))=>read_readable(readable),
Err(e)=>Err(e),
};
match contents{
Ok(data)=>match tokio::fs::write(dest,data).await{
Err(e)=>eprintln!("fs error: {}",e),
_=>(),
},
Err(e)=>eprintln!("gzip error: {}",e),
};
},
Err(e)=>eprintln!("dl error: {}",e),
}
}).await;
Ok(())
}
2024-01-13 00:19:01 +00:00
async fn download_page(client:&reqwest::Client,cookie:&str,asset_id:AssetID,cursor:Option<String>)->AResult<VersionPage>{
let mut url=reqwest::Url::parse(format!("https://develop.roblox.com/v1/assets/{}/saved-versions",asset_id).as_str())?;
//url borrow scope
{
let mut query=url.query_pairs_mut();//borrow here
//query.append_pair("sortOrder","Asc");
//query.append_pair("limit","100");
//query.append_pair("count","100");
match cursor.as_deref(){
Some(next_page)=>{query.append_pair("cursor",next_page);}
None=>(),
}
}
println!("page url={}",url);
let resp=client.get(url)
.header("Cookie",cookie)
.send().await?;
Ok(resp.json::<VersionPage>().await?)
}
2024-01-11 07:57:50 +00:00
async fn get_version_history(client:&reqwest::Client,cookie:&str,asset_id:AssetID)->AResult<Vec<AssetVersion>>{
2024-01-06 20:38:29 +00:00
let mut cursor:Option<String>=None;
let mut asset_list=Vec::new();
loop{
2024-01-13 00:19:01 +00:00
let mut page=download_page(client,cookie,asset_id,cursor).await?;
asset_list.append(&mut page.data);
if page.nextPageCursor.is_none(){
break;
2024-01-06 20:38:29 +00:00
}
2024-01-13 00:19:01 +00:00
cursor=page.nextPageCursor;
2024-01-06 20:38:29 +00:00
}
asset_list.sort_by(|a,b|a.assetVersionNumber.cmp(&b.assetVersionNumber));
2024-01-11 07:57:50 +00:00
Ok(asset_list)
}
2024-01-11 10:46:47 +00:00
async fn download_asset_version(client:&reqwest::Client,cookie:&str,asset_id_str:&str,asset_version_str:&str)->AResult<reqwest::Response>{
2024-01-11 07:57:50 +00:00
let mut url=reqwest::Url::parse("https://assetdelivery.roblox.com/v1/asset/")?;
//url borrow scope
{
let mut query=url.query_pairs_mut();//borrow here
query.append_pair("ID",asset_id_str);
query.append_pair("version",asset_version_str);
}
println!("download url={}",url);
for i in 0..8{
let resp=client.get(url.clone())
.header("Cookie",cookie)
.send().await?;
if !resp.status().is_success(){
println!("request {} failed",i);
continue;
}
2024-01-11 10:46:47 +00:00
return Ok(resp);
2024-01-11 07:57:50 +00:00
}
Err(anyhow::Error::msg("all requests failed"))
}
struct DownloadHistoryConfig{
continue_from_versions:bool,
end_version:Option<u64>,
2024-01-13 00:27:31 +00:00
start_version:u64,
2024-01-11 07:57:50 +00:00
output_folder:std::path::PathBuf,
cookie:String,
asset_id:AssetID,
}
async fn download_history(mut config:DownloadHistoryConfig)->AResult<()>{
2024-01-14 06:56:12 +00:00
let mut asset_list_contents=std::collections::HashSet::new();
let mut asset_list:Vec<AssetVersion>=Vec::new();
if config.end_version.is_none()&&config.continue_from_versions{
//load prexisting versions list
let mut versions_path=config.output_folder.clone();
versions_path.push("versions.json");
match std::fs::File::open(versions_path){
Ok(versions_file)=>asset_list.append(&mut serde_json::from_reader(versions_file)?),
Err(e)=>match e.kind(){
std::io::ErrorKind::NotFound=>return Err(anyhow::Error::msg("Cannot continue from versions.json - file does not exist")),
_=>Err(e)?,
}
}
//write down which versions are contained
for asset_version in &asset_list{
2024-01-14 06:56:12 +00:00
asset_list_contents.insert(asset_version.assetVersionNumber);
}
//find the highest number
match asset_list.iter().map(|asset_version|asset_version.assetVersionNumber).max(){
Some(max)=>{
//count down contiguously until a number is missing
for i in (1..=max).rev(){
2024-01-14 06:56:12 +00:00
if !asset_list_contents.contains(&i){
//that is end_version
config.end_version=Some(i);
break;
}
}
//if all versions are contained, set start_version to the max + 1
if config.end_version.is_none(){
config.start_version=max+1;
}
},
None=>Err(anyhow::Error::msg("Cannot continue from versions.json - there are no previous versions"))?,
}
}
2024-01-11 07:57:50 +00:00
let client=reqwest::Client::new();
2024-01-13 00:19:01 +00:00
let asset_id_string=config.asset_id.to_string();
//limit concurrent downloads
let mut join_set=tokio::task::JoinSet::new();
2024-01-11 07:57:50 +00:00
//poll paged list of all asset versions
2024-01-13 00:19:01 +00:00
let mut cursor:Option<String>=None;
loop{
let mut page=download_page(&client,config.cookie.as_str(),config.asset_id,cursor).await?;
let client=&client;
let cookie=config.cookie.clone();
let asset_id_str=asset_id_string.clone();
let output_folder=config.output_folder.clone();
let data=&page.data;
2024-01-14 06:56:12 +00:00
let asset_list_contents=&asset_list_contents;
let join_set=&mut join_set;
let error_catcher=||async move{
2024-01-13 00:27:31 +00:00
let mut cancel_paging=false;
for asset_version in data{
2024-01-13 00:19:01 +00:00
let version_number=asset_version.assetVersionNumber;
2024-01-14 06:56:12 +00:00
//skip assets beyond specified end_version
if config.end_version.is_some_and(|v|v<version_number){
continue;
}
2024-01-14 06:56:12 +00:00
//skip assets lower than start_version and cancel paging asset versions
2024-01-13 00:27:31 +00:00
if version_number<config.start_version{
cancel_paging=true;
continue;//don't trust roblox returned order
}
2024-01-14 06:56:12 +00:00
//skip previously downloaded assets
if asset_list_contents.contains(&version_number){
continue;
}
while CONCURRENT_REQUESTS<=join_set.len(){
join_set.join_next().await.unwrap()??;
}
2024-01-13 00:19:01 +00:00
let client=client.clone();
let cookie=cookie.clone();
let asset_id_str=asset_id_str.clone();
2024-01-15 02:43:03 +00:00
let mut path=output_folder.clone();
path.push(format!("{}_v{}.rbxl",config.asset_id,version_number));
join_set.spawn(async move{
2024-01-13 00:19:01 +00:00
let resp=download_asset_version(&client,cookie.as_str(),asset_id_str.as_str(),version_number.to_string().as_str()).await?;
let contents=match maybe_gzip_decode(std::io::Cursor::new(resp.bytes().await?))?{
ReaderType::GZip(readable)=>read_readable(readable)?,
ReaderType::Raw(readable)=>read_readable(readable)?,
};
tokio::fs::write(path,contents).await?;
Ok::<_,anyhow::Error>(())
});
2024-01-13 00:19:01 +00:00
}
Ok::<_,anyhow::Error>(cancel_paging)
};
let cancel_paging=match error_catcher().await{
Ok(cancel)=>cancel,
Err(e)=>{
println!("download error: {}",e);
//cancel download and write versions
true
},
};
2024-01-13 00:27:31 +00:00
if page.nextPageCursor.is_none()||cancel_paging{
for asset_version in page.data.into_iter(){
2024-01-14 06:56:12 +00:00
if !(asset_list_contents.contains(&asset_version.assetVersionNumber)
||config.end_version.is_some_and(|v|v<asset_version.assetVersionNumber)
||asset_version.assetVersionNumber<config.start_version){
2024-01-13 00:27:31 +00:00
asset_list.push(asset_version);
}
}
2024-01-13 00:19:01 +00:00
break;
2024-01-13 00:27:31 +00:00
}else{
asset_list.append(&mut page.data);
2024-01-13 00:19:01 +00:00
}
cursor=page.nextPageCursor;
}
asset_list.sort_by(|a,b|a.assetVersionNumber.cmp(&b.assetVersionNumber));
let mut path=config.output_folder.clone();
2024-01-06 20:38:29 +00:00
path.set_file_name("versions.json");
tokio::fs::write(path,serde_json::to_string(&asset_list)?).await?;
while let Some(result)=join_set.join_next().await{
result??;
2024-01-13 00:19:01 +00:00
}
2024-01-11 07:57:50 +00:00
2023-12-31 01:59:40 +00:00
Ok(())
2024-01-01 20:21:33 +00:00
}
2024-01-06 01:54:13 +00:00
2024-01-11 10:46:47 +00:00
fn load_dom<R:Read>(input:R)->AResult<rbx_dom_weak::WeakDom>{
let mut buf=std::io::BufReader::new(input);
let peek=std::io::BufRead::fill_buf(&mut buf)?;
match &peek[0..4]{
b"<rob"=>{
match &peek[4..8]{
b"lox!"=>return rbx_binary::from_reader(buf).map_err(anyhow::Error::msg),
b"lox "=>return rbx_xml::from_reader_default(buf).map_err(anyhow::Error::msg),
other=>Err(anyhow::Error::msg(format!("Unknown Roblox file type {:?}",other))),
}
},
_=>Err(anyhow::Error::msg("unsupported file type")),
2024-01-06 01:54:13 +00:00
}
}
#[derive(PartialEq)]
enum Class{
Folder,
ModuleScript,
LocalScript,
Script,
Model,
}
struct TreeNode{
name:String,
referent:Ref,
parent:Ref,
class:Class,
children:Vec<Ref>,
}
impl TreeNode{
fn new(name:String,referent:Ref,parent:Ref,class:Class)->Self{
Self{
name,
referent,
parent,
class,
children:Vec::new(),
}
}
}
enum TrimStackInstruction{
Referent(Ref),
IncrementScript,
DecrementScript,
}
enum WriteStackInstruction<'a>{
Node(&'a TreeNode,u32),//(Node,NameTally)
2024-01-06 19:13:52 +00:00
PushFolder(String),
2024-01-06 01:54:13 +00:00
PopFolder,
Destroy(Ref),
}
2024-01-12 19:24:03 +00:00
#[derive(Default)]
2024-01-06 01:54:13 +00:00
struct PropertiesOverride{
2024-01-12 19:24:03 +00:00
name:Option<String>,
class_name:Option<String>,
2024-01-06 01:54:13 +00:00
}
impl PropertiesOverride{
fn is_some(&self)->bool{
2024-01-12 19:24:03 +00:00
self.name.is_some()
||self.class_name.is_some()
2024-01-06 01:54:13 +00:00
}
}
2024-01-12 19:24:03 +00:00
impl std::fmt::Display for PropertiesOverride{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
if let Some(name)=self.name.as_deref(){
writeln!(f,"--! Properties.Name=\"{}\"",name)?;
}
if let Some(class_name)=self.class_name.as_deref(){
writeln!(f,"--! Properties.ClassName=\"{}\"",class_name)?;
}
Ok(())
2024-01-23 04:28:04 +00:00
}
2024-01-12 19:24:03 +00:00
}
2024-01-06 01:54:13 +00:00
2024-01-06 19:13:52 +00:00
fn sanitize<'a>(s:&'a str)->std::borrow::Cow<'a,str>{
lazy_regex::regex!(r"[^a-zA-Z0-9._-]").replace_all(s,"_")
}
2024-01-12 19:24:03 +00:00
fn write_item(dom:&rbx_dom_weak::WeakDom,mut file:std::path::PathBuf,node:&TreeNode,node_name_override:String,mut properties:PropertiesOverride,style:DecompileStyle,write_models:bool,write_scripts:bool)->AResult<()>{
2024-01-11 22:51:31 +00:00
file.push(sanitize(node_name_override.as_str()).as_ref());
2024-01-06 01:54:13 +00:00
match node.class{
Class::Folder=>(),
Class::ModuleScript|Class::LocalScript|Class::Script=>{
if !write_scripts{
return Ok(())
}
2024-01-12 19:24:03 +00:00
//set extension
match style{
DecompileStyle::Rox=>assert!(file.set_extension("lua"),"could not set extension"),
DecompileStyle::RoxRojo|DecompileStyle::Rojo=>{
match properties.class_name.as_deref(){
Some("LocalScript")=>{
file.set_extension("client.lua");
properties.class_name=None;
},
Some("Script")=>{
file.set_extension("server.lua");
properties.class_name=None;
},
// Some("ModuleScript")=>{
// file.set_extension("module");
// properties.class_name=None;
// },
None=>assert!(file.set_extension("lua"),"could not set extension"),
Some(other)=>return Err(anyhow::Error::msg(format!("Attempt to write a {} as a script",other))),
}
}
}
2024-01-12 06:02:10 +00:00
if let Some(item)=dom.get_by_ref(node.referent){
2024-01-06 01:54:13 +00:00
//TODO: delete disabled scripts
if let Some(rbx_dom_weak::types::Variant::String(source))=item.properties.get("Source"){
2024-01-12 19:24:03 +00:00
if properties.is_some(){
//rox style
let source=properties.to_string()+source.as_str();
std::fs::write(file,source)?;
}else{
std::fs::write(file,source)?;
}
2024-01-12 06:02:10 +00:00
}
}
2024-01-06 01:54:13 +00:00
},
Class::Model=>{
if !write_models{
return Ok(())
}
2024-01-06 01:54:13 +00:00
assert!(file.set_extension("rbxmx"));
let output=std::io::BufWriter::new(std::fs::File::create(file)?);
rbx_xml::to_writer_default(output,dom,&[node.referent])?;
},
}
Ok(())
}
2024-01-11 07:57:50 +00:00
struct DecompiledContext{
dom:rbx_dom_weak::WeakDom,
tree_refs:std::collections::HashMap<rbx_dom_weak::types::Ref,TreeNode>,
}
2024-01-11 10:46:47 +00:00
fn generate_decompiled_context<R:Read>(input:R)->AResult<DecompiledContext>{
let dom=load_dom(input)?;
2024-01-06 01:54:13 +00:00
let mut tree_refs=std::collections::HashMap::new();
tree_refs.insert(dom.root_ref(),TreeNode::new(
"src".to_string(),
dom.root_ref(),
Ref::none(),
Class::Folder
));
//run rules
let mut stack=vec![dom.root()];
while let Some(item)=stack.pop(){
let class=match item.class.as_str(){
"ModuleScript"=>Class::ModuleScript,
"LocalScript"=>Class::LocalScript,
"Script"=>Class::Script,
"Model"=>Class::Model,
_=>Class::Folder,
};
let skip=match class{
Class::Model=>true,
_=>false,
};
if let Some(parent_node)=tree_refs.get_mut(&item.parent()){
let referent=item.referent();
let node=TreeNode::new(item.name.clone(),referent,parent_node.referent,class);
parent_node.children.push(referent);
tree_refs.insert(referent,node);
}
2024-01-08 23:09:19 +00:00
//look no further, turn this node and all its children into a model
2024-01-06 01:54:13 +00:00
if skip{
continue;
}
for &referent in item.children(){
if let Some(c)=dom.get_by_ref(referent){
stack.push(c);
}
}
}
//trim empty folders
let mut script_count=0;
let mut stack:Vec<TrimStackInstruction>=tree_refs.get(&dom.root_ref()).unwrap().children
.iter().map(|&c|TrimStackInstruction::Referent(c)).collect();
while let Some(instruction)=stack.pop(){
match instruction{
TrimStackInstruction::IncrementScript=>script_count+=1,
TrimStackInstruction::DecrementScript=>script_count-=1,
TrimStackInstruction::Referent(referent)=>{
let mut delete=None;
if let Some(node)=tree_refs.get_mut(&referent){
if node.class==Class::Folder&&script_count!=0{
node.class=Class::Model
}
if node.class==Class::Folder&&node.children.len()==0{
delete=Some(node.parent);
}else{
//how the hell do I do this better without recursion
let is_script=match node.class{
Class::ModuleScript|Class::LocalScript|Class::Script=>true,
_=>false,
};
//stack is popped from back
if is_script{
stack.push(TrimStackInstruction::DecrementScript);
}
for &child_referent in &node.children{
stack.push(TrimStackInstruction::Referent(child_referent));
}
if is_script{
stack.push(TrimStackInstruction::IncrementScript);
}
}
}
//trim referent
if let Some(parent_ref)=delete{
let parent_node=tree_refs.get_mut(&parent_ref)
.expect("parent_ref does not exist in tree_refs");
parent_node.children.remove(
parent_node.children.iter()
.position(|&r|r==referent)
.expect("parent.children does not contain referent")
);
tree_refs.remove(&referent);
}
},
}
}
2024-01-11 07:57:50 +00:00
Ok(DecompiledContext{
dom,
tree_refs,
})
}
2024-01-11 07:57:50 +00:00
struct WriteConfig{
2024-01-12 19:24:03 +00:00
style:DecompileStyle,
2024-01-11 07:57:50 +00:00
output_folder:std::path::PathBuf,
write_template:bool,
write_models:bool,
write_scripts:bool,
}
2024-01-11 21:59:32 +00:00
async fn write_files(config:WriteConfig,mut context:DecompiledContext)->AResult<()>{
2024-01-11 22:51:31 +00:00
let mut write_queue=Vec::new();
let mut destroy_queue=Vec::new();
let mut name_tally=std::collections::HashMap::<String,u32>::new();
let mut folder=config.output_folder.clone();
2024-01-11 07:57:50 +00:00
let mut stack=vec![WriteStackInstruction::Node(context.tree_refs.get(&context.dom.root_ref()).unwrap(),0)];
2024-01-06 01:54:13 +00:00
while let Some(instruction)=stack.pop(){
match instruction{
WriteStackInstruction::PushFolder(component)=>folder.push(component),
WriteStackInstruction::PopFolder=>assert!(folder.pop(),"weirdness"),
2024-01-11 22:51:31 +00:00
WriteStackInstruction::Destroy(referent)=>destroy_queue.push(referent),
WriteStackInstruction::Node(node,name_count)=>{
2024-01-12 19:24:03 +00:00
//track properties that must be overriden to compile folder structure back into a place file
2024-01-06 01:54:13 +00:00
let mut properties=PropertiesOverride::default();
let has_children=node.children.len()!=0;
match node.class{
Class::Folder=>(),
2024-01-12 19:24:03 +00:00
Class::ModuleScript=>(),//.lua files are ModuleScript by default
Class::LocalScript=>properties.class_name=Some("LocalScript".to_string()),
Class::Script=>properties.class_name=Some("Script".to_string()),
2024-01-06 01:54:13 +00:00
Class::Model=>(),
}
let name_override=if 0<name_count{
2024-01-12 19:24:03 +00:00
properties.name=Some(node.name.clone());
format!("{}_{}",node.name,name_count)
}else{
node.name.clone()
};
2024-01-12 19:24:03 +00:00
if has_children{
2024-01-06 01:54:13 +00:00
//push temp subfolder
let mut subfolder=folder.clone();
subfolder.push(sanitize(name_override.as_str()).as_ref());
2024-01-06 01:54:13 +00:00
//make folder
2024-01-11 22:51:31 +00:00
tokio::fs::create_dir(subfolder.clone()).await?;
2024-01-12 19:24:03 +00:00
let name_final=match config.style{
DecompileStyle::Rox
|DecompileStyle::RoxRojo=>name_override.clone(),
DecompileStyle::Rojo=>"init".to_owned(),
};
2024-01-06 01:54:13 +00:00
//write item in subfolder
2024-01-12 19:24:03 +00:00
write_queue.push((subfolder,node,name_final,properties,config.style));
2024-01-06 01:54:13 +00:00
}else{
//write item
2024-01-12 19:24:03 +00:00
write_queue.push((folder.clone(),node,name_override.clone(),properties,config.style));
2024-01-06 01:54:13 +00:00
}
//queue item to be deleted from dom after child objects are handled (stack is popped from the back)
match node.class{
Class::Folder=>(),
_=>stack.push(WriteStackInstruction::Destroy(node.referent)),
}
if has_children{
stack.push(WriteStackInstruction::PopFolder);
name_tally.clear();
2024-01-06 01:54:13 +00:00
for referent in &node.children{
2024-01-11 07:57:50 +00:00
if let Some(c)=context.tree_refs.get(referent){
let v=name_tally.entry(c.name.clone()).and_modify(|v|*v+=1).or_default();
stack.push(WriteStackInstruction::Node(c,*v));
2024-01-06 01:54:13 +00:00
}
}
stack.push(WriteStackInstruction::PushFolder(sanitize(name_override.as_str()).to_string()));
2024-01-06 01:54:13 +00:00
}
},
}
}
2024-01-11 22:51:31 +00:00
//run the async
{
let dom=&context.dom;
let write_models=config.write_models;
let write_scripts=config.write_scripts;
2024-01-12 19:24:03 +00:00
let results:Vec<AResult<()>>=rayon::iter::ParallelIterator::collect(rayon::iter::ParallelIterator::map(rayon::iter::IntoParallelIterator::into_par_iter(write_queue),|(write_path,node,node_name_override,properties,style)|{
write_item(&dom,write_path,node,node_name_override,properties,style,write_models,write_scripts)
}));
for result in results{
result?;
}
}
2024-01-11 22:51:31 +00:00
//run the destroy
for destroy_ref in destroy_queue{
context.dom.destroy(destroy_ref);
}
2024-01-06 01:54:13 +00:00
//write what remains in template.rbxlx
if config.write_template{
let mut file=config.output_folder.clone();
2024-01-06 01:54:13 +00:00
file.push("template");
assert!(file.set_extension("rbxlx"));
let output=std::io::BufWriter::new(std::fs::File::create(file)?);
2024-01-11 07:57:50 +00:00
rbx_xml::to_writer_default(output,&context.dom,&[context.dom.root_ref()])?;
2024-01-06 01:54:13 +00:00
}
Ok(())
}
2024-01-11 07:57:50 +00:00
struct DecompileConfig{
2024-01-12 19:24:03 +00:00
style:DecompileStyle,
2024-01-11 07:57:50 +00:00
input_file:std::path::PathBuf,
output_folder:std::path::PathBuf,
write_template:bool,
write_models:bool,
write_scripts:bool,
}
2024-01-11 21:59:32 +00:00
async fn decompile(config:DecompileConfig)->AResult<()>{
2024-01-11 07:57:50 +00:00
//rules:
//Class Script|LocalScript|ModuleScript->$Name.lua
//Class Model->$Name.rbxmx
//overrides.json per-folder [Override{name,class}]
//Everything else goes into template.rbxlx
//read file
let context=generate_decompiled_context(std::io::BufReader::new(std::fs::File::open(config.input_file)?))?;
//generate folders, models, and scripts
//delete models and scripts from dom
write_files(WriteConfig{
2024-01-12 19:24:03 +00:00
style:config.style,
2024-01-11 07:57:50 +00:00
output_folder:config.output_folder,
write_template:config.write_template,
write_models:config.write_models,
write_scripts:config.write_scripts,
2024-01-11 21:59:32 +00:00
},context).await?;
2024-01-11 07:57:50 +00:00
Ok(())
}
struct WriteCommitConfig{
2024-01-11 21:48:57 +00:00
git_committer_name:String,
git_committer_email:String,
output_folder:std::path::PathBuf,
2024-01-12 19:24:03 +00:00
style:DecompileStyle,
write_template:bool,
write_models:bool,
write_scripts:bool,
}
async fn write_commit(config:WriteCommitConfig,b:Result<AResult<(AssetVersion,DecompiledContext)>,tokio::task::JoinError>,repo:&git2::Repository)->AResult<()>{
let (asset_version,context)=b??;
2024-01-12 00:37:48 +00:00
println!("writing files for version {}",asset_version.assetVersionNumber);
//clean output dir
2024-01-12 04:23:44 +00:00
if config.write_models||config.write_scripts{
let mut src=config.output_folder.clone();
src.push("src");
match std::fs::remove_dir_all(src){
Ok(())=>(),
Err(e)=>println!("remove_dir_all src failed {}",e),
}
2024-01-12 04:23:44 +00:00
}
if config.write_template{
let mut template=config.output_folder.clone();
template.push("template.rbxlx");
match std::fs::remove_file(template){
Ok(())=>(),
Err(e)=>println!("remove_file template.rbxlx failed {}",e),
}
}
//write files
write_files(WriteConfig{
2024-01-12 19:24:03 +00:00
style:config.style,
output_folder:config.output_folder.clone(),
write_template:config.write_template,
write_models:config.write_models,
write_scripts:config.write_scripts,
2024-01-11 21:59:32 +00:00
},context).await?;
let date=asset_version.created;
2024-01-12 04:29:22 +00:00
//let sig=repo.signature()?; //this pulls default name and email
2024-01-11 21:48:57 +00:00
let sig=git2::Signature::new(config.git_committer_name.as_str(),config.git_committer_email.as_str(),&git2::Time::new(date.timestamp(),0)).unwrap();
let tree_id={
let mut tree_index = repo.index()?;
2024-01-12 04:45:53 +00:00
tree_index.add_all([config.output_folder.as_path()].iter(),git2::IndexAddOption::DEFAULT,None)?;
2024-01-14 19:12:57 +00:00
match tree_index.update_all([config.output_folder.as_path()].iter(),None){
Ok(_)=>(),
Err(e)=>println!("tree_index.update_all error: {}",e),
2024-01-12 04:33:12 +00:00
}
tree_index.write()?;
tree_index.write_tree()?
};
let tree=repo.find_tree(tree_id)?;
2024-01-12 04:33:04 +00:00
let mut parents=Vec::new();
match repo.head(){
2024-01-12 04:33:12 +00:00
Ok(reference)=>{
let commit=reference.peel_to_commit()?;
//test tree against commit tree to see if there is any changes
let commit_tree=commit.tree()?;
let diff=repo.diff_tree_to_tree(Some(&commit_tree),Some(&tree),None)?;
if diff.deltas().count()==0{
println!("no changes");
return Ok(());
}
parents.push(commit);
},
2024-01-12 04:33:04 +00:00
Err(e)=>println!("repo head error {:?}",e),
};
2024-01-12 04:33:04 +00:00
repo.commit(
Some("HEAD"),//update_ref
&sig,//author
&sig,//commiter
&format!("v{}", asset_version.assetVersionNumber),//message
&tree,//tree (basically files)
parents.iter().collect::<Vec<&git2::Commit<'_>>>().as_slice(),//parents
)?;
//commit
Ok(())
}
struct DecompileHistoryConfig{
2024-01-12 00:06:24 +00:00
git_committer_name:String,
git_committer_email:String,
input_folder:std::path::PathBuf,
2024-01-12 19:24:03 +00:00
style:DecompileStyle,
2024-01-12 00:06:24 +00:00
output_folder:std::path::PathBuf,
write_template:bool,
write_models:bool,
write_scripts:bool,
}
async fn decompile_history_into_git(config:DecompileHistoryConfig)->AResult<()>{
//use prexisting versions list
2024-01-12 00:06:24 +00:00
let mut versions_path=config.input_folder.clone();
versions_path.push("versions.json");
let asset_list:Vec<AssetVersion>=serde_json::from_reader(std::fs::File::open(versions_path)?)?;
2024-01-22 20:28:32 +00:00
let repo=git2::Repository::init(config.output_folder.as_path())?;
2024-01-12 00:06:24 +00:00
//decompile all versions
futures::stream::iter(asset_list.into_iter()
2024-01-12 00:06:24 +00:00
.map(|asset_version|{
let mut file_path=config.input_folder.clone();
2024-01-12 06:25:00 +00:00
tokio::task::spawn_blocking(move||{
2024-01-12 00:06:24 +00:00
file_path.push(format!("{}_v{}.rbxl",asset_version.assetId,asset_version.assetVersionNumber));
2024-01-12 00:37:26 +00:00
let file=std::fs::File::open(file_path)?;
let contents=generate_decompiled_context(file)?;
2024-01-12 00:06:24 +00:00
Ok::<_,anyhow::Error>((asset_version,contents))
})
}))
2024-01-12 01:11:44 +00:00
.buffered(CONCURRENT_DECODE)
2024-01-12 00:06:24 +00:00
.for_each(|join_handle_result|async{
match write_commit(WriteCommitConfig{
git_committer_name:config.git_committer_name.clone(),
git_committer_email:config.git_committer_email.clone(),
2024-01-12 19:24:03 +00:00
style:config.style,
2024-01-12 00:06:24 +00:00
output_folder:config.output_folder.clone(),
write_template:config.write_template,
write_models:config.write_models,
write_scripts:config.write_scripts,
},join_handle_result,&repo).await{
Ok(())=>(),
2024-01-14 21:24:33 +00:00
Err(e)=>println!("decompile/write/commit error: {}",e),
2024-01-12 00:06:24 +00:00
}
}).await;
Ok(())
}
struct DownloadAndDecompileHistoryConfig{
cookie:String,
asset_id:AssetID,
2024-01-11 21:48:57 +00:00
git_committer_name:String,
git_committer_email:String,
2024-01-12 19:24:03 +00:00
style:DecompileStyle,
output_folder:std::path::PathBuf,
write_template:bool,
write_models:bool,
write_scripts:bool,
}
2024-01-12 00:06:24 +00:00
async fn download_and_decompile_history_into_git(config:DownloadAndDecompileHistoryConfig)->AResult<()>{
let client=reqwest::Client::new();
//poll paged list of all asset versions
let asset_list=get_version_history(&client,&config.cookie.as_str(),config.asset_id).await?;
let repo=git2::Repository::init(config.output_folder.clone())?;
//download all versions
let asset_id_string=config.asset_id.to_string();
futures::stream::iter(asset_list.into_iter()
.map(|asset_version|{
let client=client.clone();
let cookie=config.cookie.clone();
let asset_id_str=asset_id_string.clone();
tokio::task::spawn(async move{
let resp=download_asset_version(&client,cookie.as_str(),asset_id_str.as_str(),asset_version.assetVersionNumber.to_string().as_str()).await?;
let contents=match maybe_gzip_decode(std::io::Cursor::new(resp.bytes().await?))?{
ReaderType::GZip(readable)=>generate_decompiled_context(readable)?,
ReaderType::Raw(readable)=>generate_decompiled_context(readable)?,
};
Ok::<_,anyhow::Error>((asset_version,contents))
})
}))
2024-01-12 01:11:44 +00:00
.buffered(CONCURRENT_DECODE)
.for_each(|join_handle_result|async{
match write_commit(WriteCommitConfig{
2024-01-12 19:24:03 +00:00
style:config.style,
2024-01-11 21:48:57 +00:00
git_committer_name:config.git_committer_name.clone(),
git_committer_email:config.git_committer_email.clone(),
output_folder:config.output_folder.clone(),
write_template:config.write_template,
write_models:config.write_models,
write_scripts:config.write_scripts,
},join_handle_result,&repo).await{
Ok(())=>(),
Err(e)=>println!("download/unzip/decompile/write/commit error: {}",e),
}
}).await;
Ok(())
}
2024-01-23 04:28:12 +00:00
//holy smokes what am I doing lmao
//This giant machine is supposed to search for files according to style rules
//e.g. ScriptName.server.lua or init.lua
//Obviously I got carried away
//I could use an enum!
//I could use a struct!
//I could use a trait!
//I could use an error!
//I could use a match!
//I could use a function!
//eventually:
#[derive(Debug)]
enum QueryResolveError{
NotFound,//0 results
Ambiguous,//>1 results
JoinError(tokio::task::JoinError),
IO(std::io::Error),
}
impl std::fmt::Display for QueryResolveError{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
write!(f,"{self:?}")
}
}
impl std::error::Error for QueryResolveError{}
async fn get_file_async(mut path:std::path::PathBuf,file_name:impl AsRef<std::path::Path>)->Result<tokio::fs::File,QueryResolveError>{
path.push(file_name);
match tokio::fs::File::open(path).await{
Ok(file)=>Ok(file),
Err(e)=>match e.kind(){
std::io::ErrorKind::NotFound=>Err(QueryResolveError::NotFound),
_=>Err(QueryResolveError::IO(e)),
},
}
}
2024-01-24 02:43:23 +00:00
type QueryHintResult=Result<FileHint,QueryResolveError>;
2024-01-23 04:28:12 +00:00
trait Query{
2024-01-24 02:43:23 +00:00
async fn resolve(self)->QueryHintResult;
2024-01-23 04:28:12 +00:00
}
2024-01-24 02:43:23 +00:00
struct QuerySingle(tokio::task::JoinHandle<Result<tokio::fs::File,QueryResolveError>>);
2024-01-23 04:28:12 +00:00
impl QuerySingle{
fn rox(search_path:&std::path::PathBuf,search_name:&str)->Self{
Self(tokio::spawn(get_file_async(search_path.clone(),format!("{}.lua",search_name))))
}
}
impl Query for QuerySingle{
2024-01-24 02:43:23 +00:00
async fn resolve(self)->QueryHintResult{
2024-01-23 04:28:12 +00:00
match self.0.await{
2024-01-24 02:43:23 +00:00
Ok(Ok(file))=>Ok(FileHint{file,hint:ScriptHint::Undetermined}),
2024-01-23 04:28:12 +00:00
Ok(Err(e))=>Err(e),
Err(e)=>Err(QueryResolveError::JoinError(e)),
}
}
}
2024-01-24 02:43:23 +00:00
struct QueryTriple{
2024-01-23 05:36:54 +00:00
module:QuerySingle,
2024-01-23 04:28:12 +00:00
server:QuerySingle,
client:QuerySingle,
}
2024-01-24 02:43:23 +00:00
impl QueryTriple{
2024-01-23 05:36:54 +00:00
fn rox_rojo(search_path:&std::path::PathBuf,search_name:&str,search_module:bool)->Self{
//this should be implemented as constructors of Triplet and Quadruplet to fully support Trey's suggestion
let module_name=if search_module{
format!("{}.module.lua",search_name)
}else{
format!("{}.lua",search_name)
};
2024-01-23 04:28:12 +00:00
Self{
2024-01-23 05:36:54 +00:00
module:QuerySingle(tokio::spawn(get_file_async(search_path.clone(),module_name))),
2024-01-23 04:28:12 +00:00
server:QuerySingle(tokio::spawn(get_file_async(search_path.clone(),format!("{}.server.lua",search_name)))),
client:QuerySingle(tokio::spawn(get_file_async(search_path.clone(),format!("{}.client.lua",search_name)))),
}
}
2024-01-24 02:43:23 +00:00
fn rojo(search_path:&std::path::PathBuf,search_name:&str)->Self{
QueryTriple::rox_rojo(search_path,"init",false)
}
}
//these functions can be achieved with macros, but I have not learned that yet
fn mega_double_join(query_pair:(QueryHintResult,QueryHintResult))->QueryHintResult{
match query_pair{
//unambiguously locate file
(Ok(f),Err(QueryResolveError::NotFound))
|(Err(QueryResolveError::NotFound),Ok(f))=>Ok(f),
//multiple files located
(Ok(_),Err(QueryResolveError::NotFound))
|(Err(QueryResolveError::NotFound),Ok(_))
|(Ok(_),Ok(_))=>Err(QueryResolveError::Ambiguous),
//no files located
(Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound))=>Err(QueryResolveError::NotFound),
//other error
(Err(e),_)
|(_,Err(e))=>Err(e),
2024-01-23 04:28:12 +00:00
}
}
2024-01-24 02:43:23 +00:00
fn mega_triple_join(query_triplet:(QueryHintResult,QueryHintResult,QueryHintResult))->QueryHintResult{
2024-01-23 04:28:12 +00:00
match query_triplet{
//unambiguously locate file
(Ok(f),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound))
|(Err(QueryResolveError::NotFound),Ok(f),Err(QueryResolveError::NotFound))
|(Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Ok(f))=>Ok(f),
//multiple files located
(Ok(_),Ok(_),Err(QueryResolveError::NotFound))
|(Ok(_),Err(QueryResolveError::NotFound),Ok(_))
|(Err(QueryResolveError::NotFound),Ok(_),Ok(_))
|(Ok(_),Ok(_),Ok(_))=>Err(QueryResolveError::Ambiguous),
//no files located
(Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound))=>Err(QueryResolveError::NotFound),
//other error
(Err(e),_,_)
|(_,Err(e),_)
2024-01-24 02:43:23 +00:00
|(_,_,Err(e))=>Err(e),
2024-01-23 04:28:12 +00:00
}
}
2024-01-24 02:43:23 +00:00
//LETS GOOOOOOOOOOOOOOOO
fn mega_quadruple_join(query_quad:(QueryHintResult,QueryHintResult,QueryHintResult,QueryHintResult))->QueryHintResult{
match query_quad{
//unambiguously locate file
(Ok(f),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound))
|(Err(QueryResolveError::NotFound),Ok(f),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound))
|(Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Ok(f),Err(QueryResolveError::NotFound))
|(Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Ok(f))=>Ok(f),
//multiple files located
(Ok(_),Ok(_),Ok(_),Err(QueryResolveError::NotFound))
|(Ok(_),Ok(_),Err(QueryResolveError::NotFound),Ok(_))
|(Ok(_),Ok(_),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound))
|(Ok(_),Err(QueryResolveError::NotFound),Ok(_),Ok(_))
|(Ok(_),Err(QueryResolveError::NotFound),Ok(_),Err(QueryResolveError::NotFound))
|(Ok(_),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Ok(_))
|(Err(QueryResolveError::NotFound),Ok(_),Ok(_),Ok(_))
|(Err(QueryResolveError::NotFound),Ok(_),Ok(_),Err(QueryResolveError::NotFound))
|(Err(QueryResolveError::NotFound),Ok(_),Err(QueryResolveError::NotFound),Ok(_))
|(Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Ok(_),Ok(_))
|(Ok(_),Ok(_),Ok(_),Ok(_))=>Err(QueryResolveError::Ambiguous),
//no files located
(Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound))=>Err(QueryResolveError::NotFound),
//other error
(Err(e),_,_,_)
|(_,Err(e),_,_)
|(_,_,Err(e),_)
|(_,_,_,Err(e))=>Err(e),
}
}
impl Query for QueryTriple{
async fn resolve(self)->QueryHintResult{
2024-01-23 05:36:54 +00:00
let (module,server,client)=tokio::join!(self.module.0,self.server.0,self.client.0);
2024-01-23 04:28:12 +00:00
mega_triple_join((
2024-01-24 02:43:23 +00:00
module.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::ModuleScript}),
server.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::Script}),
client.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::LocalScript}),
))
}
}
struct QueryQuad{
module_implicit:QuerySingle,
module_explicit:QuerySingle,
server:QuerySingle,
client:QuerySingle,
}
impl QueryQuad{
fn rox_rojo(search_path:&std::path::PathBuf,search_name:&str)->Self{
let fill=QueryTriple::rox_rojo(search_path,search_name,true);
Self{
module_implicit:QuerySingle::rox(search_path,search_name),//Script.lua
module_explicit:fill.module,//Script.module.lua
server:fill.server,
client:fill.client,
}
}
}
impl Query for QueryQuad{
async fn resolve(self)->QueryHintResult{
let (module_implicit,module_explicit,server,client)=tokio::join!(self.module_implicit.0,self.module_explicit.0,self.server.0,self.client.0);
mega_quadruple_join((
module_implicit.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::ModuleScript}),
module_explicit.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::ModuleScript}),
server.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::Script}),
client.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::LocalScript}),
2024-01-23 04:28:12 +00:00
))
}
}
2024-01-23 05:47:02 +00:00
async fn discern_node(search_path:&std::path::PathBuf,search_name:&str,style:Option<DecompileStyle>)->AResult<Option<CompileNode>>{
2024-01-23 04:28:12 +00:00
let mut contents_folder=search_path.clone();
contents_folder.push(search_name);
//folder
2024-01-23 05:47:02 +00:00
Ok(if let Ok(dir)=tokio::fs::read_dir(contents_folder.as_path()).await{
2024-01-23 05:36:54 +00:00
//scan inside the folder for an object to define the class of the folder
2024-01-24 02:43:23 +00:00
let script_query=async {match style{
Some(DecompileStyle::Rox)=>QuerySingle::rox(&contents_folder,search_name).resolve().await,
Some(DecompileStyle::RoxRojo)=>QueryQuad::rox_rojo(&contents_folder,search_name).resolve().await,
Some(DecompileStyle::Rojo)=>QueryTriple::rojo(&contents_folder,search_name).resolve().await,
//try all three and complain if there is ambiguity
None=>mega_triple_join(tokio::join!(
QuerySingle::rox(&contents_folder,search_name).resolve(),
//true=search for module here to avoid ambiguity with QuerySingle::rox results
QueryTriple::rox_rojo(&contents_folder,search_name,true).resolve(),
QueryTriple::rojo(&contents_folder,search_name).resolve(),
))
}};
//model files are rox & rox-rojo only, so it's a lot less work...
let model_query=get_file_async(contents_folder.clone(),format!("{}.rbxmx",search_name));
2024-01-23 05:36:54 +00:00
//model? script? both?
2024-01-24 02:43:23 +00:00
Some(match tokio::join!(script_query,model_query){
(Ok(FileHint{mut file,hint}),Err(QueryResolveError::NotFound))=>{
2024-01-23 05:36:54 +00:00
//read entire file
let mut buf=String::new();
file.read_to_string(&mut buf).await?;
//regex script according to Properties lines at the top
todo!("unimplemented");
//script
CompileNode{
class:CompileClass::Script(buf),
folder:Some(dir),
}
},
(Err(QueryResolveError::NotFound),Ok(mut file))=>{
//read entire file
let mut buf=Vec::new();
file.read_to_end(&mut buf).await?;
//model
CompileNode{
class:CompileClass::Model(buf),
folder:Some(dir),
}
},
(Ok(_),Ok(_))=>Err(QueryResolveError::Ambiguous)?,
2024-01-24 01:46:18 +00:00
//neither
(Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound))=>CompileNode{
class:CompileClass::Folder,
folder:Some(dir),
},
2024-01-23 05:36:54 +00:00
//other error
(Err(e),_)
|(_,Err(e))=>Err(e)?
})
2024-01-23 04:28:12 +00:00
}else{
2024-01-23 05:47:02 +00:00
//a folder of this name does not exist
None
})
2024-01-23 04:28:12 +00:00
}
2024-01-24 02:43:23 +00:00
enum ScriptHint{
Script,
LocalScript,
ModuleScript,
Undetermined,
}
struct FileHint{
file:tokio::fs::File,
hint:ScriptHint,
}
2024-01-22 20:28:58 +00:00
enum CompileClass{
Folder,
Script(String),
LocalScript(String),
ModuleScript(String),
Model(Vec<u8>),
}
struct CompileNode{
folder:Option<tokio::fs::ReadDir>,
class:CompileClass,
}
enum CompileStackInstruction{
Referent(rbx_dom_weak::types::Ref),
PushFolder(String),
PopFolder,
}
struct CompileConfig{
input_folder:std::path::PathBuf,
output_file:std::path::PathBuf,
template:Option<std::path::PathBuf>,
}
2024-01-23 04:28:12 +00:00
async fn compile(config:CompileConfig)->AResult<()>{
2024-01-22 20:28:58 +00:00
//basically decompile in reverse order
//load template dom
let input={
let template_path=config.template.unwrap_or_else(||{
let mut template_path=config.input_folder.clone();
template_path.push("template.rbxlx");
template_path
});
2024-01-23 04:28:12 +00:00
//mr dom doesn't like tokio files
2024-01-22 20:28:58 +00:00
std::io::BufReader::new(std::fs::File::open(template_path)?)
};
let mut dom=load_dom(input)?;
//add in scripts and models
let mut folder=config.input_folder.clone();
folder.push("src");
let mut stack:Vec<CompileStackInstruction>=dom.root().children().into_iter().map(|&referent|CompileStackInstruction::Referent(referent)).collect();
while let Some(instruction)=stack.pop(){
match instruction{
CompileStackInstruction::Referent(item_ref)=>{
let item=dom.get_by_ref(item_ref).ok_or(anyhow::Error::msg("null child ref"))?;
//check if item exists in folder or subfolder of same name
2024-01-23 05:47:02 +00:00
todo!("this is totally unnecessary,
only the folder needs to exist to determine if traversal should continue");
if let Some(obj)=discern_node(&folder,item.name.as_str(),None).await?{
2024-01-23 04:28:12 +00:00
//cool
}else{
//determine if this is ok
}
2024-01-22 20:28:58 +00:00
//push child objects onto dom
//push dom children objects onto stack
stack.push(CompileStackInstruction::PopFolder);
stack.extend(item.children().into_iter().map(|&referent|CompileStackInstruction::Referent(referent)));
stack.push(CompileStackInstruction::PushFolder(sanitize(item.name.as_str()).to_string()));
},
CompileStackInstruction::PushFolder(component)=>folder.push(component),
CompileStackInstruction::PopFolder=>assert!(folder.pop(),"pop folder bad"),
}
}
2024-01-06 01:54:13 +00:00
Ok(())
}