Compare commits

...

25 Commits

Author SHA1 Message Date
7325296813 use local repo 2024-01-24 01:48:52 -08:00
e5c7ed6b75 debug insanity 2024-01-24 01:44:18 -08:00
9d6780a0b0 write discern_file + tweaks 2024-01-24 00:39:01 -08:00
4fd7795457 write colossal code 2024-01-23 23:55:14 -08:00
1ea68d96ef mega_double_join unused 2024-01-23 20:50:51 -08:00
4ced7f6210 change return Err to Err()? 2024-01-23 20:37:03 -08:00
db2c760c49 extract_script_overrides 2024-01-23 20:33:27 -08:00
8ee041918b tweaks 2024-01-23 20:19:00 -08:00
5384bbcb3b support Script.module.lua properly 2024-01-23 18:47:15 -08:00
9f3bd80403 woah 2024-01-23 18:43:32 -08:00
7863137174 tweaking 2024-01-23 18:43:32 -08:00
15fd698a21 colossal fixes & tweaks 2024-01-23 18:43:32 -08:00
07f0b03d45 ok I wrote this I guess 2024-01-23 18:43:32 -08:00
7e27b378e9 wip compile 2024-01-22 20:28:24 -08:00
48ab23bae5 tabs 2024-01-22 20:28:24 -08:00
97286612b7 path 2024-01-22 12:28:32 -08:00
8e589f7f0f download asset list 2024-01-16 21:50:35 -08:00
cbb5805d54 fix download path 2024-01-14 18:43:03 -08:00
d6add611fb fix error message 2024-01-14 13:24:33 -08:00
ebd5bb526e force update 2024-01-14 11:12:57 -08:00
858ed8a89c todo 2024-01-14 11:07:33 -08:00
35657d4d51 versions not correctly updated 2024-01-13 23:08:30 -08:00
15e8c8208f add --continue feature for download history 2024-01-13 21:24:50 -08:00
5574c34045 end_version for download_history + catch download error and write versions.json 2024-01-13 20:49:06 -08:00
e5322ad711 todo 2024-01-13 20:32:54 -08:00
3 changed files with 575 additions and 56 deletions

12
Cargo.lock generated

@ -1171,8 +1171,6 @@ dependencies = [
[[package]]
name = "rbx_binary"
version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ad50c13afe91296dad6508ea7e29f4b665fa56cb664ad01eaf8fdbd3da69d5e1"
dependencies = [
"log",
"lz4",
@ -1186,8 +1184,6 @@ dependencies = [
[[package]]
name = "rbx_dom_weak"
version = "2.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "843a2e0e1446623625943f7228d9d4b5cf3883017e3964733600682506864b34"
dependencies = [
"rbx_types",
"serde",
@ -1196,8 +1192,6 @@ dependencies = [
[[package]]
name = "rbx_reflection"
version = "4.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41e762dfca3217d2d37da631de2fa0d1616edaa61a0a2633263d5d3305baf8c3"
dependencies = [
"rbx_types",
"serde",
@ -1207,8 +1201,6 @@ dependencies = [
[[package]]
name = "rbx_reflection_database"
version = "0.2.9+roblox-596"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6b18f088a2b4aa66324ec97b5b6ffacb53188aef19f3497d95d6a1d1dbb28e66"
dependencies = [
"lazy_static",
"rbx_reflection",
@ -1219,8 +1211,6 @@ dependencies = [
[[package]]
name = "rbx_types"
version = "1.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32a991523e3ad5f43a4d121cb4a1e5bc23f7826bb4a1db5aa51e94f1073150ec"
dependencies = [
"base64 0.13.1",
"bitflags 1.3.2",
@ -1234,8 +1224,6 @@ dependencies = [
[[package]]
name = "rbx_xml"
version = "0.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc87343301303ff0510903fb7eb3dbd1c75bdb6ab780fea6091bdc3f58b5829f"
dependencies = [
"base64 0.13.1",
"log",

@ -15,10 +15,10 @@ git2 = "0.18.1"
lazy-regex = "3.1.0"
pollster = "0.3.0"
rayon = "1.8.0"
rbx_binary = "0.7.1"
rbx_dom_weak = "2.5.0"
rbx_reflection_database = "0.2.7"
rbx_xml = "0.13.1"
rbx_binary = { path = "../rbx-dom/rbx_binary"}
rbx_dom_weak = { path = "../rbx-dom/rbx_dom_weak"}
rbx_reflection_database = { path = "../rbx-dom/rbx_reflection_database"}
rbx_xml = { path = "../rbx-dom/rbx_xml"}
reqwest = { version = "0.11.23", features = ["cookies", "json"] }
serde = { version = "1.0.195", features = ["derive"] }
serde_json = "1.0.111"

@ -3,6 +3,7 @@ use clap::{Args,Parser,Subcommand};
use anyhow::Result as AResult;
use futures::StreamExt;
use rbx_dom_weak::types::Ref;
use tokio::io::AsyncReadExt;
type AssetID=u64;
type AssetIDFileMap=Vec<(AssetID,std::path::PathBuf)>;
@ -25,8 +26,14 @@ struct Cli{
cookie_env:Option<String>,
#[arg(long)]
cookie_file:Option<std::path::PathBuf>,
//TODO: read the versions.json file instead of doing this
//TODO: write file dates instead of versions.json
#[arg(long)]
start_version:Option<u64>,
#[arg(long)]
end_version:Option<u64>,
#[arg(long)]
r#continue:bool,
//decompile options
#[arg(long)]
@ -57,7 +64,7 @@ struct Cli{
#[derive(Subcommand)]
enum Commands{
DownloadHistory,
Download,
Download(AssetIDList),
Upload,
Compile,
Decompile,
@ -72,6 +79,11 @@ enum DecompileStyle{
RoxRojo,
}
#[derive(Args)]
struct AssetIDList{
asset_ids:Vec<AssetID>
}
#[derive(Args)]
struct PathBufList{
paths:Vec<std::path::PathBuf>
@ -107,7 +119,7 @@ async fn main()->AResult<()>{
(None,Some(env_var),None)=>Some(Cookie::Environment(env_var)),
(None,None,Some(path))=>Some(Cookie::File(path)),
(None,None,None)=>None,
_=>return Err(anyhow::Error::msg("Cookie was specified multiple times.")),
_=>Err(anyhow::Error::msg("Cookie was specified multiple times."))?,
}
};
let cookie=match cookie_enum{
@ -131,19 +143,33 @@ async fn main()->AResult<()>{
|Some("RoxRojo")
|Some("RojoRox")=>Some(DecompileStyle::RoxRojo),
None=>None,
_=>return Err(anyhow::Error::msg("Invalid style")),
_=>Err(anyhow::Error::msg("Invalid style"))?,
};
match cli.command{
Commands::DownloadHistory=>download_history(DownloadHistoryConfig{
continue_from_versions:cli.r#continue,
end_version:cli.end_version,
start_version:cli.start_version.unwrap_or(0),
output_folder:cli.output.unwrap(),
cookie:cookie.unwrap(),
asset_id:cli.asset_id.unwrap(),
}).await,
Commands::Download=>download_list(cookie.unwrap(),vec![(cli.asset_id.unwrap(),cli.output.unwrap())]).await,
Commands::Download(asset_id_list)=>download_list(
cookie.unwrap(),
asset_id_list.asset_ids.into_iter().map(|asset_id|{
let mut path=cli.output.clone().unwrap();
path.push(asset_id.to_string());
(asset_id,path)
}).collect()
).await,
Commands::Upload=>upload_list(cookie.unwrap(),cli.group,vec![(cli.asset_id.unwrap(),cli.output.unwrap())]).await,
Commands::Compile=>compile(cli.input.unwrap(),cli.output.unwrap()),
Commands::Compile=>compile(CompileConfig{
input_folder:cli.input.unwrap(),
output_file:cli.output.unwrap(),
template:None,
style:None,
}).await,
Commands::Decompile=>decompile(DecompileConfig{
style:decompile_style.unwrap(),
input_file:cli.input.unwrap(),
@ -230,7 +256,7 @@ async fn upload_list(cookie:String,group:Option<u64>,asset_id_file_map:AssetIDFi
.body(body)
.send().await?;
}else{
return Err(anyhow::Error::msg("Roblox returned 403 with no CSRF"));
Err(anyhow::Error::msg("Roblox returned 403 with no CSRF"))?;
}
}
@ -351,13 +377,51 @@ async fn download_asset_version(client:&reqwest::Client,cookie:&str,asset_id_str
}
struct DownloadHistoryConfig{
continue_from_versions:bool,
end_version:Option<u64>,
start_version:u64,
output_folder:std::path::PathBuf,
cookie:String,
asset_id:AssetID,
}
async fn download_history(config:DownloadHistoryConfig)->AResult<()>{
async fn download_history(mut config:DownloadHistoryConfig)->AResult<()>{
let mut asset_list_contents=std::collections::HashSet::new();
let mut asset_list:Vec<AssetVersion>=Vec::new();
if config.end_version.is_none()&&config.continue_from_versions{
//load prexisting versions list
let mut versions_path=config.output_folder.clone();
versions_path.push("versions.json");
match std::fs::File::open(versions_path){
Ok(versions_file)=>asset_list.append(&mut serde_json::from_reader(versions_file)?),
Err(e)=>match e.kind(){
std::io::ErrorKind::NotFound=>Err(anyhow::Error::msg("Cannot continue from versions.json - file does not exist"))?,
_=>Err(e)?,
}
}
//write down which versions are contained
for asset_version in &asset_list{
asset_list_contents.insert(asset_version.assetVersionNumber);
}
//find the highest number
match asset_list.iter().map(|asset_version|asset_version.assetVersionNumber).max(){
Some(max)=>{
//count down contiguously until a number is missing
for i in (1..=max).rev(){
if !asset_list_contents.contains(&i){
//that is end_version
config.end_version=Some(i);
break;
}
}
//if all versions are contained, set start_version to the max + 1
if config.end_version.is_none(){
config.start_version=max+1;
}
},
None=>Err(anyhow::Error::msg("Cannot continue from versions.json - there are no previous versions"))?,
}
}
let client=reqwest::Client::new();
let asset_id_string=config.asset_id.to_string();
@ -367,23 +431,40 @@ async fn download_history(config:DownloadHistoryConfig)->AResult<()>{
//poll paged list of all asset versions
let mut cursor:Option<String>=None;
let mut asset_list=Vec::new();
loop{
let mut page=download_page(&client,config.cookie.as_str(),config.asset_id,cursor).await?;
let client=&client;
let cookie=config.cookie.clone();
let asset_id_str=asset_id_string.clone();
let output_folder=config.output_folder.clone();
let data=&page.data;
let asset_list_contents=&asset_list_contents;
let join_set=&mut join_set;
let error_catcher=||async move{
let mut cancel_paging=false;
for asset_version in &page.data{
for asset_version in data{
let version_number=asset_version.assetVersionNumber;
//skip assets beyond specified end_version
if config.end_version.is_some_and(|v|v<version_number){
continue;
}
//skip assets lower than start_version and cancel paging asset versions
if version_number<config.start_version{
cancel_paging=true;
continue;//don't trust roblox returned order
}
//skip previously downloaded assets
if asset_list_contents.contains(&version_number){
continue;
}
while CONCURRENT_REQUESTS<=join_set.len(){
join_set.join_next().await.unwrap()??;
}
let client=client.clone();
let cookie=config.cookie.clone();
let asset_id_str=asset_id_string.clone();
let output_folder=config.output_folder.clone();
let cookie=cookie.clone();
let asset_id_str=asset_id_str.clone();
let mut path=output_folder.clone();
path.push(format!("{}_v{}.rbxl",config.asset_id,version_number));
join_set.spawn(async move{
let resp=download_asset_version(&client,cookie.as_str(),asset_id_str.as_str(),version_number.to_string().as_str()).await?;
let contents=match maybe_gzip_decode(std::io::Cursor::new(resp.bytes().await?))?{
@ -391,17 +472,26 @@ async fn download_history(config:DownloadHistoryConfig)->AResult<()>{
ReaderType::Raw(readable)=>read_readable(readable)?,
};
let mut path=output_folder;
path.set_file_name(format!("{}_v{}.rbxl",config.asset_id,version_number));
tokio::fs::write(path,contents).await?;
Ok::<_,anyhow::Error>(())
});
}
Ok::<_,anyhow::Error>(cancel_paging)
};
let cancel_paging=match error_catcher().await{
Ok(cancel)=>cancel,
Err(e)=>{
println!("download error: {}",e);
//cancel download and write versions
true
},
};
if page.nextPageCursor.is_none()||cancel_paging{
for asset_version in page.data.into_iter(){
if config.start_version<=asset_version.assetVersionNumber{
if !(asset_list_contents.contains(&asset_version.assetVersionNumber)
||config.end_version.is_some_and(|v|v<asset_version.assetVersionNumber)
||asset_version.assetVersionNumber<config.start_version){
asset_list.push(asset_version);
}
}
@ -484,28 +574,28 @@ enum WriteStackInstruction<'a>{
#[derive(Default)]
struct PropertiesOverride{
name:Option<String>,
class_name:Option<String>,
class:Option<String>,
}
impl PropertiesOverride{
fn is_some(&self)->bool{
self.name.is_some()
||self.class_name.is_some()
||self.class.is_some()
}
}
impl std::fmt::Display for PropertiesOverride{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
if let Some(name)=self.name.as_deref(){
writeln!(f,"--! Properties.Name=\"{}\"",name)?;
writeln!(f,"--!Properties.Name = \"{}\"",name)?;
}
if let Some(class_name)=self.class_name.as_deref(){
writeln!(f,"--! Properties.ClassName=\"{}\"",class_name)?;
if let Some(class)=self.class.as_deref(){
writeln!(f,"--!Properties.ClassName = \"{}\"",class)?;
}
Ok(())
}
}
}
fn sanitize<'a>(s:&'a str)->std::borrow::Cow<'a,str>{
lazy_regex::regex!(r"[^a-zA-Z0-9._-]").replace_all(s,"_")
lazy_regex::regex!(r"[^A-z0-9.-]").replace_all(s,"_")
}
fn write_item(dom:&rbx_dom_weak::WeakDom,mut file:std::path::PathBuf,node:&TreeNode,node_name_override:String,mut properties:PropertiesOverride,style:DecompileStyle,write_models:bool,write_scripts:bool)->AResult<()>{
@ -521,21 +611,21 @@ fn write_item(dom:&rbx_dom_weak::WeakDom,mut file:std::path::PathBuf,node:&TreeN
match style{
DecompileStyle::Rox=>assert!(file.set_extension("lua"),"could not set extension"),
DecompileStyle::RoxRojo|DecompileStyle::Rojo=>{
match properties.class_name.as_deref(){
match properties.class.as_deref(){
Some("LocalScript")=>{
file.set_extension("client.lua");
properties.class_name=None;
properties.class=None;
},
Some("Script")=>{
file.set_extension("server.lua");
properties.class_name=None;
properties.class=None;
},
// Some("ModuleScript")=>{
// file.set_extension("module");
// properties.class_name=None;
// properties.class=None;
// },
None=>assert!(file.set_extension("lua"),"could not set extension"),
Some(other)=>return Err(anyhow::Error::msg(format!("Attempt to write a {} as a script",other))),
Some(other)=>Err(anyhow::Error::msg(format!("Attempt to write a {} as a script",other)))?,
}
}
}
@ -575,7 +665,7 @@ fn generate_decompiled_context<R:Read>(input:R)->AResult<DecompiledContext>{
let mut tree_refs=std::collections::HashMap::new();
tree_refs.insert(dom.root_ref(),TreeNode::new(
"src".to_string(),
"src".to_owned(),
dom.root_ref(),
Ref::none(),
Class::Folder
@ -694,8 +784,8 @@ async fn write_files(config:WriteConfig,mut context:DecompiledContext)->AResult<
match node.class{
Class::Folder=>(),
Class::ModuleScript=>(),//.lua files are ModuleScript by default
Class::LocalScript=>properties.class_name=Some("LocalScript".to_string()),
Class::Script=>properties.class_name=Some("Script".to_string()),
Class::LocalScript=>properties.class=Some("LocalScript".to_owned()),
Class::Script=>properties.class=Some("Script".to_owned()),
Class::Model=>(),
}
let name_override=if 0<name_count{
@ -853,11 +943,9 @@ async fn write_commit(config:WriteCommitConfig,b:Result<AResult<(AssetVersion,De
let tree_id={
let mut tree_index = repo.index()?;
tree_index.add_all([config.output_folder.as_path()].iter(),git2::IndexAddOption::DEFAULT,None)?;
if tree_index.len()==0{
println!("tree_index.len()==0");
return Ok(());
}else{
tree_index.update_all([config.output_folder.as_path()].iter(),None)?;
match tree_index.update_all([config.output_folder.as_path()].iter(),None){
Ok(_)=>(),
Err(e)=>println!("tree_index.update_all error: {}",e),
}
tree_index.write()?;
tree_index.write_tree()?
@ -908,12 +996,12 @@ struct DecompileHistoryConfig{
}
async fn decompile_history_into_git(config:DecompileHistoryConfig)->AResult<()>{
//poll paged list of all asset versions
//use prexisting versions list
let mut versions_path=config.input_folder.clone();
versions_path.push("versions.json");
let asset_list:Vec<AssetVersion>=serde_json::from_reader(std::fs::File::open(versions_path)?)?;
let repo=git2::Repository::init(config.output_folder.clone())?;
let repo=git2::Repository::init(config.output_folder.as_path())?;
//decompile all versions
futures::stream::iter(asset_list.into_iter()
@ -938,7 +1026,7 @@ async fn decompile_history_into_git(config:DecompileHistoryConfig)->AResult<()>{
write_scripts:config.write_scripts,
},join_handle_result,&repo).await{
Ok(())=>(),
Err(e)=>println!("download/unzip/decompile/write/commit error: {}",e),
Err(e)=>println!("decompile/write/commit error: {}",e),
}
}).await;
Ok(())
@ -998,6 +1086,449 @@ async fn download_and_decompile_history_into_git(config:DownloadAndDecompileHist
Ok(())
}
fn compile(_folder:std::path::PathBuf,_file:std::path::PathBuf)->AResult<()>{
//holy smokes what am I doing lmao
//This giant machine is supposed to search for files according to style rules
//e.g. ScriptName.server.lua or init.lua
//Obviously I got carried away
//I could use an enum!
//I could use a struct!
//I could use a trait!
//I could use an error!
//I could use a match!
//I could use a function!
//eventually:
#[derive(Debug)]
enum QueryResolveError{
NotFound,//0 results
Ambiguous,//>1 results
JoinError(tokio::task::JoinError),
IO(std::io::Error),
}
impl std::fmt::Display for QueryResolveError{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
write!(f,"{self:?}")
}
}
impl std::error::Error for QueryResolveError{}
async fn get_file_async(mut path:std::path::PathBuf,file_name:impl AsRef<std::path::Path>)->Result<tokio::fs::File,QueryResolveError>{
path.push(file_name);
match tokio::fs::File::open(path).await{
Ok(file)=>Ok(file),
Err(e)=>match e.kind(){
std::io::ErrorKind::NotFound=>Err(QueryResolveError::NotFound),
_=>Err(QueryResolveError::IO(e)),
},
}
}
type QueryHintResult=Result<FileHint,QueryResolveError>;
trait Query{
async fn resolve(self)->QueryHintResult;
}
struct QuerySingle(tokio::task::JoinHandle<Result<tokio::fs::File,QueryResolveError>>);
impl QuerySingle{
fn rox(search_path:&std::path::PathBuf,search_name:&str)->Self{
Self(tokio::spawn(get_file_async(search_path.clone(),format!("{}.lua",search_name))))
}
}
impl Query for QuerySingle{
async fn resolve(self)->QueryHintResult{
match self.0.await{
Ok(Ok(file))=>Ok(FileHint{file,hint:ScriptHint::Undetermined}),
Ok(Err(e))=>Err(e),
Err(e)=>Err(QueryResolveError::JoinError(e)),
}
}
}
struct QueryTriple{
module:QuerySingle,
server:QuerySingle,
client:QuerySingle,
}
impl QueryTriple{
fn rox_rojo(search_path:&std::path::PathBuf,search_name:&str,search_module:bool)->Self{
//this should be implemented as constructors of Triplet and Quadruplet to fully support Trey's suggestion
let module_name=if search_module{
format!("{}.module.lua",search_name)
}else{
format!("{}.lua",search_name)
};
Self{
module:QuerySingle(tokio::spawn(get_file_async(search_path.clone(),module_name))),
server:QuerySingle(tokio::spawn(get_file_async(search_path.clone(),format!("{}.server.lua",search_name)))),
client:QuerySingle(tokio::spawn(get_file_async(search_path.clone(),format!("{}.client.lua",search_name)))),
}
}
fn rojo(search_path:&std::path::PathBuf)->Self{
QueryTriple::rox_rojo(search_path,"init",false)
}
}
//these functions can be achieved with macros, but I have not learned that yet
fn mega_triple_join(query_triplet:(QueryHintResult,QueryHintResult,QueryHintResult))->QueryHintResult{
match query_triplet{
//unambiguously locate file
(Ok(f),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound))
|(Err(QueryResolveError::NotFound),Ok(f),Err(QueryResolveError::NotFound))
|(Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Ok(f))=>Ok(f),
//multiple files located
(Ok(_),Ok(_),Err(QueryResolveError::NotFound))
|(Ok(_),Err(QueryResolveError::NotFound),Ok(_))
|(Err(QueryResolveError::NotFound),Ok(_),Ok(_))
|(Ok(_),Ok(_),Ok(_))=>Err(QueryResolveError::Ambiguous),
//no files located
(Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound))=>Err(QueryResolveError::NotFound),
//other error
(Err(e),_,_)
|(_,Err(e),_)
|(_,_,Err(e))=>Err(e),
}
}
//LETS GOOOOOOOOOOOOOOOO
fn mega_quadruple_join(query_quad:(QueryHintResult,QueryHintResult,QueryHintResult,QueryHintResult))->QueryHintResult{
match query_quad{
//unambiguously locate file
(Ok(f),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound))
|(Err(QueryResolveError::NotFound),Ok(f),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound))
|(Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Ok(f),Err(QueryResolveError::NotFound))
|(Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Ok(f))=>Ok(f),
//multiple files located
(Ok(_),Ok(_),Ok(_),Err(QueryResolveError::NotFound))
|(Ok(_),Ok(_),Err(QueryResolveError::NotFound),Ok(_))
|(Ok(_),Ok(_),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound))
|(Ok(_),Err(QueryResolveError::NotFound),Ok(_),Ok(_))
|(Ok(_),Err(QueryResolveError::NotFound),Ok(_),Err(QueryResolveError::NotFound))
|(Ok(_),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Ok(_))
|(Err(QueryResolveError::NotFound),Ok(_),Ok(_),Ok(_))
|(Err(QueryResolveError::NotFound),Ok(_),Ok(_),Err(QueryResolveError::NotFound))
|(Err(QueryResolveError::NotFound),Ok(_),Err(QueryResolveError::NotFound),Ok(_))
|(Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Ok(_),Ok(_))
|(Ok(_),Ok(_),Ok(_),Ok(_))=>Err(QueryResolveError::Ambiguous),
//no files located
(Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound))=>Err(QueryResolveError::NotFound),
//other error
(Err(e),_,_,_)
|(_,Err(e),_,_)
|(_,_,Err(e),_)
|(_,_,_,Err(e))=>Err(e),
}
}
impl Query for QueryTriple{
async fn resolve(self)->QueryHintResult{
let (module,server,client)=tokio::join!(self.module.0,self.server.0,self.client.0);
mega_triple_join((
module.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::ModuleScript}),
server.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::Script}),
client.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::LocalScript}),
))
}
}
struct QueryQuad{
module_implicit:QuerySingle,
module_explicit:QuerySingle,
server:QuerySingle,
client:QuerySingle,
}
impl QueryQuad{
fn rox_rojo(search_path:&std::path::PathBuf,search_name:&str)->Self{
let fill=QueryTriple::rox_rojo(search_path,search_name,true);
Self{
module_implicit:QuerySingle::rox(search_path,search_name),//Script.lua
module_explicit:fill.module,//Script.module.lua
server:fill.server,
client:fill.client,
}
}
}
impl Query for QueryQuad{
async fn resolve(self)->QueryHintResult{
let (module_implicit,module_explicit,server,client)=tokio::join!(self.module_implicit.0,self.module_explicit.0,self.server.0,self.client.0);
mega_quadruple_join((
module_implicit.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::ModuleScript}),
module_explicit.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::ModuleScript}),
server.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::Script}),
client.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::LocalScript}),
))
}
}
struct ScriptWithOverrides{
overrides:PropertiesOverride,
source:String,
}
fn extract_script_overrides(mut source:String)->AResult<ScriptWithOverrides>{
let mut overrides=PropertiesOverride::default();
let mut count=0;
for line in source.lines(){
//only string type properties are supported atm
if let Some(captures)=lazy_regex::regex!(r#"^\-\-\!\s*Properties\.([A-z]\w*)\s*\=\s*"(\w+)"$"#)
.captures(line){
count+=line.len();
match &captures[1]{
"Name"=>overrides.name=Some(captures[2].to_owned()),
"ClassName"=>overrides.class=Some(captures[2].to_owned()),
other=>Err(anyhow::Error::msg(format!("Unimplemented property {other}")))?,
}
}else{
break;
}
}
Ok(ScriptWithOverrides{overrides,source:source.split_off(count)})
}
async fn script_node(search_name:&str,mut file:tokio::fs::File,hint:ScriptHint)->AResult<CompileNode>{
//read entire file
let mut buf=String::new();
file.read_to_string(&mut buf).await?;
//regex script according to Properties lines at the top
let script_with_overrides=extract_script_overrides(buf)?;
//script
Ok(CompileNode{
name:script_with_overrides.overrides.name.unwrap_or_else(||search_name.to_owned()),
class:match (script_with_overrides.overrides.class.as_deref(),hint){
(Some("ModuleScript"),_)
|(None,ScriptHint::ModuleScript|ScriptHint::Undetermined)=>CompileClass::ModuleScript(script_with_overrides.source),
(Some("LocalScript"),_)
|(None,ScriptHint::Script)=>CompileClass::LocalScript(script_with_overrides.source),
(Some("Script"),_)
|(None,ScriptHint::LocalScript)=>CompileClass::Script(script_with_overrides.source),
other=>Err(anyhow::Error::msg(format!("Invalid hint or class {other:?}")))?,
},
})
}
async fn model_node(search_name:&str,mut file:tokio::fs::File)->AResult<CompileNode>{
//read entire file
let mut buf=Vec::new();
file.read_to_end(&mut buf).await?;
//model
Ok(CompileNode{
name:search_name.to_owned(),
class:CompileClass::Model(buf),
})
}
async fn discern_node(entry:&tokio::fs::DirEntry,style:Option<DecompileStyle>)->AResult<Option<CompileNode>>{
let contents_folder=entry.path();
let file_name=entry.file_name();
//is folder? else exit flow control
match tokio::fs::read_dir(contents_folder.as_path()).await{
Ok(_)=>(),//continue flow
Err(e)=>{println!("{:?}",e.raw_os_error());match e.raw_os_error(){
Some(0)//std::io::ErrorKind::NotFound
|Some(20)=>return Ok(None),//std::io::ErrorKind::NotADirectory (not allowed to be used but returns it anyways)
_=>Err(e)?,
}}
}
let search_name=file_name.to_str().unwrap();
//scan inside the folder for an object to define the class of the folder
let script_query=async {match style{
Some(DecompileStyle::Rox)=>QuerySingle::rox(&contents_folder,search_name).resolve().await,
Some(DecompileStyle::RoxRojo)=>QueryQuad::rox_rojo(&contents_folder,search_name).resolve().await,
Some(DecompileStyle::Rojo)=>QueryTriple::rojo(&contents_folder).resolve().await,
//try all three and complain if there is ambiguity
None=>mega_triple_join(tokio::join!(
QuerySingle::rox(&contents_folder,search_name).resolve(),
//true=search for module here to avoid ambiguity with QuerySingle::rox results
QueryTriple::rox_rojo(&contents_folder,search_name,true).resolve(),
QueryTriple::rojo(&contents_folder).resolve(),
))
}};
//model files are rox & rox-rojo only, so it's a lot less work...
let model_query=get_file_async(contents_folder.clone(),format!("{}.rbxmx",search_name));
//model? script? both?
Ok(Some(match tokio::join!(script_query,model_query){
(Ok(FileHint{file,hint}),Err(QueryResolveError::NotFound))=>script_node(search_name,file,hint).await?,
(Err(QueryResolveError::NotFound),Ok(file))=>model_node(search_name,file).await?,
(Ok(_),Ok(_))=>Err(QueryResolveError::Ambiguous)?,
//neither
(Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound))=>CompileNode{
name:search_name.to_owned(),
class:CompileClass::Folder,
},
//other error
(Err(e),_)
|(_,Err(e))=>Err(e)?
}))
}
enum FileDiscernment{
Model,
Script(ScriptHint),
}
async fn discern_file(entry:&tokio::fs::DirEntry,style:Option<DecompileStyle>)->AResult<Option<CompileNode>>{
let path=entry.path();
let file_discernment=match path.extension(){
Some(extension)=>match extension.to_str(){
Some("lua")=>FileDiscernment::Script(ScriptHint::Undetermined),
Some("module.lua")=>FileDiscernment::Script(ScriptHint::ModuleScript),
Some("client.lua")=>FileDiscernment::Script(ScriptHint::LocalScript),
Some("server.lua")=>FileDiscernment::Script(ScriptHint::Script),
Some("rbxmx")=>FileDiscernment::Model,
other=>Err(anyhow::Error::msg(format!("Weird file extension: {other:?}")))?,
},
None=>Err(anyhow::Error::msg("No file extension"))?,
};
//reject goobers
match (style,&file_discernment){
(Some(DecompileStyle::Rojo),FileDiscernment::Script(ScriptHint::ModuleScript))
|(Some(DecompileStyle::Rojo),FileDiscernment::Model)=>Err(anyhow::Error::msg("Invalid file extension for style"))?,
_=>(),
}
let search_name=path.to_str().unwrap();
let file=tokio::fs::File::open(path.as_path()).await?;
Ok(Some(match file_discernment{
FileDiscernment::Model=>model_node(search_name,file).await?,
FileDiscernment::Script(hint)=>script_node(search_name,file,hint).await?,
}))
}
#[derive(Debug)]
enum ScriptHint{
Script,
LocalScript,
ModuleScript,
Undetermined,
}
struct FileHint{
file:tokio::fs::File,
hint:ScriptHint,
}
enum PreparedData{
Model(rbx_dom_weak::WeakDom),
Builder(rbx_dom_weak::InstanceBuilder),
}
enum CompileClass{
Folder,
Script(String),
LocalScript(String),
ModuleScript(String),
Model(Vec<u8>),
}
struct CompileNode{
name:String,
class:CompileClass,
}
enum CompileStackInstruction{
TraverseReferent(rbx_dom_weak::types::Ref),
PopFolder,
}
struct CompileConfig{
input_folder:std::path::PathBuf,
output_file:std::path::PathBuf,
template:Option<std::path::PathBuf>,
style:Option<DecompileStyle>,
}
fn script_builder(class:&str,name:&str,source:String)->rbx_dom_weak::InstanceBuilder{
let mut builder=rbx_dom_weak::InstanceBuilder::new(class);
builder.set_name(name);
builder.add_property("Source",rbx_dom_weak::types::Variant::String(source));
builder
}
async fn compile(config:CompileConfig)->AResult<()>{
//basically decompile in reverse order
//load template dom
let input={
let template_path=config.template.unwrap_or_else(||{
let mut template_path=config.input_folder.clone();
template_path.push("template.rbxlx");
template_path
});
//mr dom doesn't like tokio files
std::io::BufReader::new(std::fs::File::open(template_path)?)
};
let mut dom=load_dom(input)?;
//add in scripts and models
let mut folder=config.input_folder.clone();
folder.push("src");
let mut stack:Vec<CompileStackInstruction>=dom.get_by_ref(dom.root().children()[0]).unwrap().children().into_iter().map(|&referent|CompileStackInstruction::TraverseReferent(referent)).collect();
while let Some(instruction)=stack.pop(){
match instruction{
CompileStackInstruction::TraverseReferent(item_ref)=>{
let sans={
let item=dom.get_by_ref(item_ref).ok_or(anyhow::Error::msg("null child ref"))?;
sanitize(item.name.as_str()).to_string()
};
folder.push(sans.as_str());
stack.push(CompileStackInstruction::PopFolder);
//check if a folder exists with item.name
if let Ok(mut dir)=tokio::fs::read_dir(folder.as_path()).await{
let mut exist_names=std::collections::HashSet::new();
{
let item=dom.get_by_ref(item_ref).ok_or(anyhow::Error::msg("null child ref"))?;
for &child_ref in item.children(){
let child=dom.get_by_ref(child_ref).ok_or(anyhow::Error::msg("null child ref"))?;
let child_sans=sanitize(child.name.as_str()).to_string();
exist_names.insert(child_sans);
}
}
//generate children from folder contents UNLESS! item already has a child of the same name
let mut join_set=tokio::task::JoinSet::new();
//I wish I could make the join_next() loop begin processing immediately,
//but I don't know an ergonomic way to do that.
//this will probably be fine considering there won't be millions of files in the directories
while let Some(entry)=dir.next_entry().await?{
//cull early even if supporting things with identical names is possible
if !exist_names.contains(entry.file_name().to_str().unwrap()){
let style=config.style;
join_set.spawn(async move{
let met=entry.metadata().await?;
let scooby_doo=if met.is_dir(){
discern_node(&entry,style).await
}else{
discern_file(&entry,style).await
};
//discern that bad boy
Ok::<_,anyhow::Error>(match scooby_doo?{
Some(compile_class)=>{
//prepare data structure
Some(match compile_class.class{
CompileClass::Folder=>PreparedData::Builder(rbx_dom_weak::InstanceBuilder::new("Folder").with_name(compile_class.name.as_str())),
CompileClass::Script(source)=>PreparedData::Builder(script_builder("Script",compile_class.name.as_str(),source)),
CompileClass::LocalScript(source)=>PreparedData::Builder(script_builder("LocalScript",compile_class.name.as_str(),source)),
CompileClass::ModuleScript(source)=>PreparedData::Builder(script_builder("ModuleScript",compile_class.name.as_str(),source)),
CompileClass::Model(buf)=>PreparedData::Model(rbx_xml::from_reader_default(std::io::Cursor::new(buf))?),
})
},
None=>None,
})
});
}
}
//this is only able to begin after dir iterator is exhausted
//push child objects onto dom
while let Some(goober)=join_set.join_next().await{
match goober??{
Some(PreparedData::Model(mut model_dom))=>model_dom.transfer(model_dom.root().children()[0],&mut dom,item_ref),
Some(PreparedData::Builder(script))=>{dom.insert(item_ref,script);},
None=>print!("There was a None"),
}
}
//push dom children objects onto stack
let item=dom.get_by_ref(item_ref).ok_or(anyhow::Error::msg("null child ref"))?;
stack.extend(item.children().into_iter().map(|&referent|CompileStackInstruction::TraverseReferent(referent)));
}
},
CompileStackInstruction::PopFolder=>assert!(folder.pop(),"pop folder bad"),
}
}
let mut output_place=config.output_file.clone();
if output_place.extension().is_none()&&tokio::fs::try_exists(output_place.as_path()).await?{
output_place.push("place.rbxl");
}
let output=std::io::BufWriter::new(std::fs::File::create(output_place)?);
rbx_binary::to_writer(output,&dom,&[dom.root_ref()])?;
Ok(())
}