This commit is contained in:
Quaternions 2024-07-01 13:46:02 -07:00
parent 48c2a010d8
commit dba7aa427f
7 changed files with 262 additions and 242 deletions

2
Cargo.lock generated
View File

@ -118,7 +118,6 @@ dependencies = [
"git2", "git2",
"lazy-regex", "lazy-regex",
"pollster", "pollster",
"rayon",
"rbx_asset", "rbx_asset",
"rbx_binary", "rbx_binary",
"rbx_dom_weak", "rbx_dom_weak",
@ -1358,6 +1357,7 @@ version = "0.1.0"
dependencies = [ dependencies = [
"futures", "futures",
"lazy-regex", "lazy-regex",
"rayon",
"rbx_dom_weak", "rbx_dom_weak",
"rbx_xml", "rbx_xml",
"tokio", "tokio",

View File

@ -13,7 +13,6 @@ futures = "0.3.30"
git2 = "0.18.1" git2 = "0.18.1"
lazy-regex = "3.1.0" lazy-regex = "3.1.0"
pollster = "0.3.0" pollster = "0.3.0"
rayon = "1.8.0"
rbx_asset = { path = "rbx_asset" } rbx_asset = { path = "rbx_asset" }
rbx_binary = "0.7.4" rbx_binary = "0.7.4"
rbx_dom_weak = "2.7.0" rbx_dom_weak = "2.7.0"

View File

@ -6,6 +6,7 @@ edition = "2021"
[dependencies] [dependencies]
futures = "0.3.30" futures = "0.3.30"
lazy-regex = "3.1.0" lazy-regex = "3.1.0"
rayon = "1.8.0"
rbx_dom_weak = "2.7.0" rbx_dom_weak = "2.7.0"
rbx_xml = "0.13.3" rbx_xml = "0.13.3"
tokio = { version = "1.35.1", features = ["fs"] } tokio = { version = "1.35.1", features = ["fs"] }

View File

@ -1,7 +1,8 @@
use std::path::PathBuf; use std::path::PathBuf;
use futures::{StreamExt, TryStreamExt};
use tokio::io::AsyncReadExt; use tokio::io::AsyncReadExt;
use crate::types::{DecompileStyle,PropertiesOverride}; use crate::types::{Style,PropertiesOverride};
use crate::common::sanitize; use crate::common::sanitize;
//holy smokes what am I doing lmao //holy smokes what am I doing lmao
@ -235,7 +236,7 @@ enum CompileNodeError{
FileName(std::ffi::OsString), FileName(std::ffi::OsString),
ExtensionNotSupportedInStyle{ ExtensionNotSupportedInStyle{
extension:String, extension:String,
style:Option<DecompileStyle>, style:Option<Style>,
}, },
NoExtension, NoExtension,
} }
@ -279,15 +280,15 @@ impl CompileNode{
}) })
} }
async fn from_folder(entry:&tokio::fs::DirEntry,style:Option<DecompileStyle>)->Result<Self,CompileNodeError>{ async fn from_folder(entry:&tokio::fs::DirEntry,style:Option<Style>)->Result<Self,CompileNodeError>{
let contents_folder=entry.path(); let contents_folder=entry.path();
let file_name=entry.file_name(); let file_name=entry.file_name();
let search_name=file_name.to_str().unwrap(); let search_name=file_name.to_str().unwrap();
//scan inside the folder for an object to define the class of the folder //scan inside the folder for an object to define the class of the folder
let script_query=async {match style{ let script_query=async {match style{
Some(DecompileStyle::Rox)=>QuerySingle::rox(&contents_folder,search_name).resolve().await, Some(Style::Rox)=>QuerySingle::rox(&contents_folder,search_name).resolve().await,
Some(DecompileStyle::RoxRojo)=>QueryQuad::rox_rojo(&contents_folder,search_name).resolve().await, Some(Style::RoxRojo)=>QueryQuad::rox_rojo(&contents_folder,search_name).resolve().await,
Some(DecompileStyle::Rojo)=>QueryTriple::rojo(&contents_folder).resolve().await, Some(Style::Rojo)=>QueryTriple::rojo(&contents_folder).resolve().await,
//try all three and complain if there is ambiguity //try all three and complain if there is ambiguity
None=>mega_triple_join(tokio::join!( None=>mega_triple_join(tokio::join!(
QuerySingle::rox(&contents_folder,search_name).resolve(), QuerySingle::rox(&contents_folder,search_name).resolve(),
@ -315,14 +316,14 @@ impl CompileNode{
}) })
} }
async fn from_file(entry:&tokio::fs::DirEntry,style:Option<DecompileStyle>)->Result<Self,CompileNodeError>{ async fn from_file(entry:&tokio::fs::DirEntry,style:Option<Style>)->Result<Self,CompileNodeError>{
let mut file_name=entry let mut file_name=entry
.file_name() .file_name()
.into_string() .into_string()
.map_err(CompileNodeError::FileName)?; .map_err(CompileNodeError::FileName)?;
//reject goobers //reject goobers
let is_goober=match style{ let is_goober=match style{
Some(DecompileStyle::Rojo)=>true, Some(Style::Rojo)=>true,
_=>false, _=>false,
}; };
let (ext_len,file_discernment)={ let (ext_len,file_discernment)={
@ -394,11 +395,20 @@ enum TooComplicated<T>{
Skip, Skip,
} }
enum CompileError{ pub struct CompileConfig{
NullChildRef, input_folder:PathBuf,
style:Option<Style>,
} }
async fn compile(config:CompileConfig,&mut dom:rbx_dom_weak::WeakDom)->Result<(),CompileError>{ enum CompileError{
NullChildRef,
IO(std::io::Error),
CompileNode(CompileNodeError),
DecodeError(rbx_xml::DecodeError),
JoinError(tokio::task::JoinError),
}
async fn compile(config:CompileConfig,mut dom:&mut rbx_dom_weak::WeakDom)->Result<(),CompileError>{
//add in scripts and models //add in scripts and models
let mut folder=config.input_folder.clone(); let mut folder=config.input_folder.clone();
let mut stack:Vec<CompileStackInstruction>=vec![CompileStackInstruction::TraverseReferent(dom.root_ref(),None)]; let mut stack:Vec<CompileStackInstruction>=vec![CompileStackInstruction::TraverseReferent(dom.root_ref(),None)];
@ -446,14 +456,14 @@ async fn compile(config:CompileConfig,&mut dom:rbx_dom_weak::WeakDom)->Result<()
}else{ }else{
TooComplicated::Stop TooComplicated::Stop
}; };
Ok::<_,anyhow::Error>(ret2) Ok::<_,std::io::Error>(ret2)
})().await })().await
}; };
match ret1{ match ret1{
Ok(TooComplicated::Stop)=>None, Ok(TooComplicated::Stop)=>None,
Ok(TooComplicated::Skip)=>Some((Ok(None),dir1)), Ok(TooComplicated::Skip)=>Some((Ok(None),dir1)),
Ok(TooComplicated::Value(v))=>Some((Ok(Some(v)),dir1)), Ok(TooComplicated::Value(v))=>Some((Ok(Some(v)),dir1)),
Err(e)=>Some((Err(e),dir1)), Err(e)=>Some((Err(CompileError::IO(e)),dir1)),
} }
}) })
@ -461,21 +471,21 @@ async fn compile(config:CompileConfig,&mut dom:rbx_dom_weak::WeakDom)->Result<()
.then(|bog|async{ .then(|bog|async{
match bog{ match bog{
Ok(Some(entry))=>tokio::spawn(async move{ Ok(Some(entry))=>tokio::spawn(async move{
let met=entry.metadata().await?; let met=entry.metadata().await.map_err(CompileError::IO)?;
//discern that bad boy //discern that bad boy
let compile_class=match met.is_dir(){ let compile_class=match met.is_dir(){
true=>CompileNode::from_folder(&entry,style).await?, true=>CompileNode::from_folder(&entry,style).await,
false=>CompileNode::from_file(&entry,style).await?, false=>CompileNode::from_file(&entry,style).await,
}; }.map_err(CompileError::CompileNode)?;
//prepare data structure //prepare data structure
Ok(Some((compile_class.blacklist,match compile_class.class{ Ok(Some((compile_class.blacklist,match compile_class.class{
CompileClass::Folder=>PreparedData::Builder(rbx_dom_weak::InstanceBuilder::new("Folder").with_name(compile_class.name.as_str())), CompileClass::Folder=>PreparedData::Builder(rbx_dom_weak::InstanceBuilder::new("Folder").with_name(compile_class.name.as_str())),
CompileClass::Script(source)=>PreparedData::Builder(script_builder("Script",compile_class.name.as_str(),source)), CompileClass::Script(source)=>PreparedData::Builder(script_builder("Script",compile_class.name.as_str(),source)),
CompileClass::LocalScript(source)=>PreparedData::Builder(script_builder("LocalScript",compile_class.name.as_str(),source)), CompileClass::LocalScript(source)=>PreparedData::Builder(script_builder("LocalScript",compile_class.name.as_str(),source)),
CompileClass::ModuleScript(source)=>PreparedData::Builder(script_builder("ModuleScript",compile_class.name.as_str(),source)), CompileClass::ModuleScript(source)=>PreparedData::Builder(script_builder("ModuleScript",compile_class.name.as_str(),source)),
CompileClass::Model(buf)=>PreparedData::Model(rbx_xml::from_reader_default(std::io::Cursor::new(buf))?), CompileClass::Model(buf)=>PreparedData::Model(rbx_xml::from_reader_default(std::io::Cursor::new(buf)).map_err(CompileError::DecodeError)?),
}))) })))
}).await?, }).await.map_err(CompileError::JoinError)?,
Ok(None)=>Ok(None), Ok(None)=>Ok(None),
Err(e)=>Err(e), Err(e)=>Err(e),
} }
@ -485,10 +495,10 @@ async fn compile(config:CompileConfig,&mut dom:rbx_dom_weak::WeakDom)->Result<()
.map(|f|async{f}).buffer_unordered(32) .map(|f|async{f}).buffer_unordered(32)
//begin processing immediately //begin processing immediately
.fold((&mut stack,&mut dom),|(stack,dom),bog:Result<_,anyhow::Error>|async{ .try_fold((&mut stack,&mut dom),|(stack,dom):(&mut Vec<CompileStackInstruction>,_),bog|async{
//push child objects onto dom serially as they arrive //push child objects onto dom serially as they arrive
match bog{ match bog{
Ok(Some((blacklist,data)))=>{ Some((blacklist,data))=>{
let referent=match data{ let referent=match data{
PreparedData::Model(mut model_dom)=>{ PreparedData::Model(mut model_dom)=>{
let referent=model_dom.root().children()[0]; let referent=model_dom.root().children()[0];
@ -500,14 +510,14 @@ async fn compile(config:CompileConfig,&mut dom:rbx_dom_weak::WeakDom)->Result<()
//new children need to be traversed //new children need to be traversed
stack.push(CompileStackInstruction::TraverseReferent(referent,blacklist)); stack.push(CompileStackInstruction::TraverseReferent(referent,blacklist));
}, },
Ok(None)=>(), None=>(),
Err(e)=>println!("error lole {e:?}"),
} }
(stack,dom) Ok((stack,dom))
}).await; }).await?;
} }
}, },
CompileStackInstruction::PopFolder=>assert!(folder.pop(),"pop folder bad"), CompileStackInstruction::PopFolder=>assert!(folder.pop(),"pop folder bad"),
} }
} }
unreachable!();
} }

View File

@ -1,6 +1,6 @@
use std::{io::Read, path::PathBuf}; use std::path::PathBuf;
use rbx_dom_weak::types::Ref; use rbx_dom_weak::types::Ref;
use crate::{common::sanitize, types::{DecompileStyle, PropertiesOverride}}; use crate::{common::sanitize, types::{Style, PropertiesOverride}};
#[derive(PartialEq)] #[derive(PartialEq)]
enum Class{ enum Class{
@ -43,7 +43,13 @@ enum WriteStackInstruction<'a>{
Destroy(Ref), Destroy(Ref),
} }
fn write_item(dom:&rbx_dom_weak::WeakDom,mut file:PathBuf,node:&TreeNode,node_name_override:String,mut properties:PropertiesOverride,style:DecompileStyle,write_models:bool,write_scripts:bool)->AResult<()>{ enum WriteError{
ClassNotScript(String),
IO(std::io::Error),
EncodeError(rbx_xml::EncodeError),
}
fn write_item(dom:&rbx_dom_weak::WeakDom,mut file:PathBuf,node:&TreeNode,node_name_override:String,mut properties:PropertiesOverride,style:Style,write_models:bool,write_scripts:bool)->Result<(),WriteError>{
file.push(sanitize(node_name_override.as_str()).as_ref()); file.push(sanitize(node_name_override.as_str()).as_ref());
match node.class{ match node.class{
Class::Folder=>(), Class::Folder=>(),
@ -54,8 +60,8 @@ fn write_item(dom:&rbx_dom_weak::WeakDom,mut file:PathBuf,node:&TreeNode,node_na
//set extension //set extension
match style{ match style{
DecompileStyle::Rox=>assert!(file.set_extension("lua"),"could not set extension"), Style::Rox=>assert!(file.set_extension("lua"),"could not set extension"),
DecompileStyle::RoxRojo|DecompileStyle::Rojo=>{ Style::RoxRojo|Style::Rojo=>{
match properties.class.as_deref(){ match properties.class.as_deref(){
Some("LocalScript")=>{ Some("LocalScript")=>{
file.set_extension("client.lua"); file.set_extension("client.lua");
@ -70,7 +76,7 @@ fn write_item(dom:&rbx_dom_weak::WeakDom,mut file:PathBuf,node:&TreeNode,node_na
// properties.class=None; // properties.class=None;
// }, // },
None=>assert!(file.set_extension("lua"),"could not set extension"), None=>assert!(file.set_extension("lua"),"could not set extension"),
Some(other)=>Err(anyhow::Error::msg(format!("Attempt to write a {} as a script",other)))?, Some(other)=>Err(WriteError::ClassNotScript(other.to_owned()))?,
} }
} }
} }
@ -81,9 +87,9 @@ fn write_item(dom:&rbx_dom_weak::WeakDom,mut file:PathBuf,node:&TreeNode,node_na
if properties.is_some(){ if properties.is_some(){
//rox style //rox style
let source=properties.to_string()+source.as_str(); let source=properties.to_string()+source.as_str();
std::fs::write(file,source)?; std::fs::write(file,source).map_err(WriteError::IO)?;
}else{ }else{
std::fs::write(file,source)?; std::fs::write(file,source).map_err(WriteError::IO)?;
} }
} }
} }
@ -93,216 +99,218 @@ fn write_item(dom:&rbx_dom_weak::WeakDom,mut file:PathBuf,node:&TreeNode,node_na
return Ok(()) return Ok(())
} }
assert!(file.set_extension("rbxmx")); assert!(file.set_extension("rbxmx"));
let output=std::io::BufWriter::new(std::fs::File::create(file)?); let output=std::io::BufWriter::new(std::fs::File::create(file).map_err(WriteError::IO)?);
rbx_xml::to_writer_default(output,dom,&[node.referent])?; rbx_xml::to_writer_default(output,dom,&[node.referent]).map_err(WriteError::EncodeError)?;
}, },
} }
Ok(()) Ok(())
} }
struct DecompiledContext{ pub struct WriteConfig{
dom:rbx_dom_weak::WeakDom, style:Style,
tree_refs:std::collections::HashMap<rbx_dom_weak::types::Ref,TreeNode>,
}
fn generate_decompiled_context(dom:rbx_dom_weak::WeakDom)->Result<DecompiledContext,DecompileError>{
let mut tree_refs=std::collections::HashMap::new();
tree_refs.insert(dom.root_ref(),TreeNode::new(
"src".to_owned(),
dom.root_ref(),
Ref::none(),
Class::Folder
));
//run rules
let mut stack=vec![dom.root()];
while let Some(item)=stack.pop(){
let class=match item.class.as_str(){
"ModuleScript"=>Class::ModuleScript,
"LocalScript"=>Class::LocalScript,
"Script"=>Class::Script,
"Model"=>Class::Model,
_=>Class::Folder,
};
let skip=match class{
Class::Model=>true,
_=>false,
};
if let Some(parent_node)=tree_refs.get_mut(&item.parent()){
let referent=item.referent();
let node=TreeNode::new(item.name.clone(),referent,parent_node.referent,class);
parent_node.children.push(referent);
tree_refs.insert(referent,node);
}
//look no further, turn this node and all its children into a model
if skip{
continue;
}
for &referent in item.children(){
if let Some(c)=dom.get_by_ref(referent){
stack.push(c);
}
}
}
//trim empty folders
let mut script_count=0;
let mut stack:Vec<TrimStackInstruction>=tree_refs.get(&dom.root_ref()).unwrap().children
.iter().map(|&c|TrimStackInstruction::Referent(c)).collect();
while let Some(instruction)=stack.pop(){
match instruction{
TrimStackInstruction::IncrementScript=>script_count+=1,
TrimStackInstruction::DecrementScript=>script_count-=1,
TrimStackInstruction::Referent(referent)=>{
let mut delete=None;
if let Some(node)=tree_refs.get_mut(&referent){
if node.class==Class::Folder&&script_count!=0{
node.class=Class::Model
}
if node.class==Class::Folder&&node.children.len()==0{
delete=Some(node.parent);
}else{
//how the hell do I do this better without recursion
let is_script=match node.class{
Class::ModuleScript|Class::LocalScript|Class::Script=>true,
_=>false,
};
//stack is popped from back
if is_script{
stack.push(TrimStackInstruction::DecrementScript);
}
for &child_referent in &node.children{
stack.push(TrimStackInstruction::Referent(child_referent));
}
if is_script{
stack.push(TrimStackInstruction::IncrementScript);
}
}
}
//trim referent
if let Some(parent_ref)=delete{
let parent_node=tree_refs.get_mut(&parent_ref)
.expect("parent_ref does not exist in tree_refs");
parent_node.children.remove(
parent_node.children.iter()
.position(|&r|r==referent)
.expect("parent.children does not contain referent")
);
tree_refs.remove(&referent);
}
},
}
}
Ok(DecompiledContext{
dom,
tree_refs,
})
}
struct WriteConfig{
style:DecompileStyle,
output_folder:PathBuf, output_folder:PathBuf,
write_template:bool, write_template:bool,
write_models:bool, write_models:bool,
write_scripts:bool, write_scripts:bool,
} }
async fn write_files(config:WriteConfig,mut context:DecompiledContext)->Result<(),WriteError>{ pub struct DecompiledContext{
let mut write_queue=Vec::new(); dom:rbx_dom_weak::WeakDom,
let mut destroy_queue=Vec::new(); tree_refs:std::collections::HashMap<rbx_dom_weak::types::Ref,TreeNode>,
}
let mut name_tally=std::collections::HashMap::<String,u32>::new(); impl DecompiledContext{
let mut folder=config.output_folder.clone(); /// Will panic on circular tree structure but otherwise infallible
let mut stack=vec![WriteStackInstruction::Node(context.tree_refs.get(&context.dom.root_ref()).unwrap(),0)]; pub fn from_dom(dom:rbx_dom_weak::WeakDom)->Self{
while let Some(instruction)=stack.pop(){ let mut tree_refs=std::collections::HashMap::new();
match instruction{ tree_refs.insert(dom.root_ref(),TreeNode::new(
WriteStackInstruction::PushFolder(component)=>folder.push(component), "src".to_owned(),
WriteStackInstruction::PopFolder=>assert!(folder.pop(),"weirdness"), dom.root_ref(),
WriteStackInstruction::Destroy(referent)=>destroy_queue.push(referent), Ref::none(),
WriteStackInstruction::Node(node,name_count)=>{ Class::Folder
//track properties that must be overriden to compile folder structure back into a place file ));
let mut properties=PropertiesOverride::default();
let has_children=node.children.len()!=0; //run rules
match node.class{ let mut stack=vec![dom.root()];
Class::Folder=>(), while let Some(item)=stack.pop(){
Class::ModuleScript=>(),//.lua files are ModuleScript by default let class=match item.class.as_str(){
Class::LocalScript=>properties.class=Some("LocalScript".to_owned()), "ModuleScript"=>Class::ModuleScript,
Class::Script=>properties.class=Some("Script".to_owned()), "LocalScript"=>Class::LocalScript,
Class::Model=>(), "Script"=>Class::Script,
"Model"=>Class::Model,
_=>Class::Folder,
};
let skip=match class{
Class::Model=>true,
_=>false,
};
if let Some(parent_node)=tree_refs.get_mut(&item.parent()){
let referent=item.referent();
let node=TreeNode::new(item.name.clone(),referent,parent_node.referent,class);
parent_node.children.push(referent);
tree_refs.insert(referent,node);
}
//look no further, turn this node and all its children into a model
if skip{
continue;
}
for &referent in item.children(){
if let Some(c)=dom.get_by_ref(referent){
stack.push(c);
} }
let name_override=if 0<name_count{ }
properties.name=Some(node.name.clone()); }
format!("{}_{}",node.name,name_count)
}else{
node.name.clone()
};
if has_children{ //trim empty folders
//push temp subfolder let mut script_count=0;
let mut subfolder=folder.clone(); let mut stack:Vec<TrimStackInstruction>=tree_refs.get(&dom.root_ref()).unwrap().children
subfolder.push(sanitize(name_override.as_str()).as_ref()); .iter().map(|&c|TrimStackInstruction::Referent(c)).collect();
//make folder while let Some(instruction)=stack.pop(){
tokio::fs::create_dir(subfolder.clone()).await?; match instruction{
TrimStackInstruction::IncrementScript=>script_count+=1,
let name_final=match config.style{ TrimStackInstruction::DecrementScript=>script_count-=1,
DecompileStyle::Rox TrimStackInstruction::Referent(referent)=>{
|DecompileStyle::RoxRojo=>name_override.clone(), let mut delete=None;
DecompileStyle::Rojo=>"init".to_owned(), if let Some(node)=tree_refs.get_mut(&referent){
}; if node.class==Class::Folder&&script_count!=0{
node.class=Class::Model
//write item in subfolder }
write_queue.push((subfolder,node,name_final,properties,config.style)); if node.class==Class::Folder&&node.children.len()==0{
}else{ delete=Some(node.parent);
//write item }else{
write_queue.push((folder.clone(),node,name_override.clone(),properties,config.style)); //how the hell do I do this better without recursion
} let is_script=match node.class{
//queue item to be deleted from dom after child objects are handled (stack is popped from the back) Class::ModuleScript|Class::LocalScript|Class::Script=>true,
match node.class{ _=>false,
Class::Folder=>(), };
_=>stack.push(WriteStackInstruction::Destroy(node.referent)), //stack is popped from back
} if is_script{
if has_children{ stack.push(TrimStackInstruction::DecrementScript);
stack.push(WriteStackInstruction::PopFolder); }
name_tally.clear(); for &child_referent in &node.children{
for referent in &node.children{ stack.push(TrimStackInstruction::Referent(child_referent));
if let Some(c)=context.tree_refs.get(referent){ }
let v=name_tally.entry(c.name.clone()).and_modify(|v|*v+=1).or_default(); if is_script{
stack.push(WriteStackInstruction::Node(c,*v)); stack.push(TrimStackInstruction::IncrementScript);
}
} }
} }
stack.push(WriteStackInstruction::PushFolder(sanitize(name_override.as_str()).to_string())); //trim referent
} if let Some(parent_ref)=delete{
}, let parent_node=tree_refs.get_mut(&parent_ref)
.expect("parent_ref does not exist in tree_refs");
parent_node.children.remove(
parent_node.children.iter()
.position(|&r|r==referent)
.expect("parent.children does not contain referent")
);
tree_refs.remove(&referent);
}
},
}
}
Self{
dom,
tree_refs,
} }
} }
pub async fn write_files(mut self,config:WriteConfig)->Result<(),WriteError>{
let mut write_queue=Vec::new();
let mut destroy_queue=Vec::new();
//run the async let mut name_tally=std::collections::HashMap::<String,u32>::new();
{ let mut folder=config.output_folder.clone();
let dom=&context.dom; let mut stack=vec![WriteStackInstruction::Node(self.tree_refs.get(&self.dom.root_ref()).unwrap(),0)];
let write_models=config.write_models; while let Some(instruction)=stack.pop(){
let write_scripts=config.write_scripts; match instruction{
let results:Vec<AResult<()>>=rayon::iter::ParallelIterator::collect(rayon::iter::ParallelIterator::map(rayon::iter::IntoParallelIterator::into_par_iter(write_queue),|(write_path,node,node_name_override,properties,style)|{ WriteStackInstruction::PushFolder(component)=>folder.push(component),
write_item(&dom,write_path,node,node_name_override,properties,style,write_models,write_scripts) WriteStackInstruction::PopFolder=>assert!(folder.pop(),"weirdness"),
})); WriteStackInstruction::Destroy(referent)=>destroy_queue.push(referent),
for result in results{ WriteStackInstruction::Node(node,name_count)=>{
result?; //track properties that must be overriden to compile folder structure back into a place file
let mut properties=PropertiesOverride::default();
let has_children=node.children.len()!=0;
match node.class{
Class::Folder=>(),
Class::ModuleScript=>(),//.lua files are ModuleScript by default
Class::LocalScript=>properties.class=Some("LocalScript".to_owned()),
Class::Script=>properties.class=Some("Script".to_owned()),
Class::Model=>(),
}
let name_override=if 0<name_count{
properties.name=Some(node.name.clone());
format!("{}_{}",node.name,name_count)
}else{
node.name.clone()
};
if has_children{
//push temp subfolder
let mut subfolder=folder.clone();
subfolder.push(sanitize(name_override.as_str()).as_ref());
//make folder
tokio::fs::create_dir(subfolder.clone()).await.map_err(WriteError::IO)?;
let name_final=match config.style{
Style::Rox
|Style::RoxRojo=>name_override.clone(),
Style::Rojo=>"init".to_owned(),
};
//write item in subfolder
write_queue.push((subfolder,node,name_final,properties,config.style));
}else{
//write item
write_queue.push((folder.clone(),node,name_override.clone(),properties,config.style));
}
//queue item to be deleted from dom after child objects are handled (stack is popped from the back)
match node.class{
Class::Folder=>(),
_=>stack.push(WriteStackInstruction::Destroy(node.referent)),
}
if has_children{
stack.push(WriteStackInstruction::PopFolder);
name_tally.clear();
for referent in &node.children{
if let Some(c)=self.tree_refs.get(referent){
let v=name_tally.entry(c.name.clone()).and_modify(|v|*v+=1).or_default();
stack.push(WriteStackInstruction::Node(c,*v));
}
}
stack.push(WriteStackInstruction::PushFolder(sanitize(name_override.as_str()).to_string()));
}
},
}
} }
}
//run the destroy //run the async
for destroy_ref in destroy_queue{ {
context.dom.destroy(destroy_ref); let dom=&self.dom;
} let write_models=config.write_models;
let write_scripts=config.write_scripts;
let results:Vec<Result<(),WriteError>>=rayon::iter::ParallelIterator::collect(rayon::iter::ParallelIterator::map(rayon::iter::IntoParallelIterator::into_par_iter(write_queue),|(write_path,node,node_name_override,properties,style)|{
write_item(&dom,write_path,node,node_name_override,properties,style,write_models,write_scripts)
}));
for result in results{
result?;
}
}
//write what remains in template.rbxlx //run the destroy
if config.write_template{ for destroy_ref in destroy_queue{
let mut file=config.output_folder.clone(); self.dom.destroy(destroy_ref);
file.push("template"); }
assert!(file.set_extension("rbxlx"));
let output=std::io::BufWriter::new(std::fs::File::create(file)?);
rbx_xml::to_writer_default(output,&context.dom,context.dom.root().children())?;
}
Ok(()) //write what remains in template.rbxlx
if config.write_template{
let mut file=config.output_folder.clone();
file.push("template");
assert!(file.set_extension("rbxlx"));
let output=std::io::BufWriter::new(std::fs::File::create(file).map_err(WriteError::IO)?);
rbx_xml::to_writer_default(output,&self.dom,self.dom.root().children()).map_err(WriteError::EncodeError)?;
}
Ok(())
}
} }

View File

@ -1,5 +1,5 @@
#[derive(Clone,Copy,Debug)] #[derive(Clone,Copy,Debug)]
pub enum DecompileStyle{ pub enum Style{
Rox, Rox,
Rojo, Rojo,
RoxRojo, RoxRojo,

View File

@ -1,7 +1,6 @@
use std::{io::Read,path::PathBuf}; use std::{io::Read,path::PathBuf};
use clap::{Args,Parser,Subcommand}; use clap::{Args,Parser,Subcommand};
use anyhow::Result as AResult; use anyhow::Result as AResult;
use futures::StreamExt;
use rbx_dom_weak::types::Ref; use rbx_dom_weak::types::Ref;
use tokio::io::AsyncReadExt; use tokio::io::AsyncReadExt;
use rbx_asset::context::{RobloxContext,InventoryItem,AssetVersion}; use rbx_asset::context::{RobloxContext,InventoryItem,AssetVersion};
@ -110,7 +109,7 @@ struct CompileSubcommand{
#[arg(long)] #[arg(long)]
output_file:PathBuf, output_file:PathBuf,
#[arg(long)] #[arg(long)]
style:Option<DecompileStyle>, style:Option<Style>,
#[arg(long)] #[arg(long)]
template:Option<PathBuf>, template:Option<PathBuf>,
} }
@ -121,7 +120,7 @@ struct DecompileSubcommand{
#[arg(long)] #[arg(long)]
output_folder:Option<PathBuf>, output_folder:Option<PathBuf>,
#[arg(long)] #[arg(long)]
style:DecompileStyle, style:Style,
#[arg(long)] #[arg(long)]
write_template:Option<bool>, write_template:Option<bool>,
#[arg(long)] #[arg(long)]
@ -136,7 +135,7 @@ struct DecompileHistoryIntoGitSubcommand{
//currently output folder must be the current folder due to git2 limitations //currently output folder must be the current folder due to git2 limitations
//output_folder:cli.output.unwrap(), //output_folder:cli.output.unwrap(),
#[arg(long)] #[arg(long)]
style:DecompileStyle, style:Style,
#[arg(long)] #[arg(long)]
git_committer_name:String, git_committer_name:String,
#[arg(long)] #[arg(long)]
@ -159,7 +158,7 @@ struct DownloadAndDecompileHistoryIntoGitSubcommand{
//currently output folder must be the current folder due to git2 limitations //currently output folder must be the current folder due to git2 limitations
//output_folder:cli.output.unwrap(), //output_folder:cli.output.unwrap(),
#[arg(long)] #[arg(long)]
style:DecompileStyle, style:Style,
#[arg(long)] #[arg(long)]
git_committer_name:String, git_committer_name:String,
#[arg(long)] #[arg(long)]
@ -180,17 +179,17 @@ enum CookieType{
} }
#[derive(Clone,Copy,Debug,clap::ValueEnum)] #[derive(Clone,Copy,Debug,clap::ValueEnum)]
pub enum DecompileStyle{ pub enum Style{
Rox, Rox,
Rojo, Rojo,
RoxRojo, RoxRojo,
} }
impl DecompileStyle{ impl Style{
fn rox(&self)->rox_compiler::types::DecompileStyle{ fn rox(&self)->rox_compiler::types::Style{
match self{ match self{
DecompileStyle::Rox=>rox_compiler::types::DecompileStyle::Rox, Style::Rox=>rox_compiler::types::Style::Rox,
DecompileStyle::Rojo=>rox_compiler::types::DecompileStyle::Rojo, Style::Rojo=>rox_compiler::types::Style::Rojo,
DecompileStyle::RoxRojo=>rox_compiler::types::DecompileStyle::RoxRojo, Style::RoxRojo=>rox_compiler::types::Style::RoxRojo,
} }
} }
} }
@ -548,7 +547,7 @@ fn load_dom<R:Read>(input:R)->AResult<rbx_dom_weak::WeakDom>{
struct DecompileConfig{ struct DecompileConfig{
style:DecompileStyle, style:Style,
input_file:PathBuf, input_file:PathBuf,
output_folder:PathBuf, output_folder:PathBuf,
write_template:bool, write_template:bool,
@ -583,7 +582,7 @@ struct WriteCommitConfig{
git_committer_name:String, git_committer_name:String,
git_committer_email:String, git_committer_email:String,
output_folder:PathBuf, output_folder:PathBuf,
style:DecompileStyle, style:Style,
write_template:bool, write_template:bool,
write_models:bool, write_models:bool,
write_scripts:bool, write_scripts:bool,
@ -674,7 +673,7 @@ struct DecompileHistoryConfig{
git_committer_name:String, git_committer_name:String,
git_committer_email:String, git_committer_email:String,
input_folder:PathBuf, input_folder:PathBuf,
style:DecompileStyle, style:Style,
output_folder:PathBuf, output_folder:PathBuf,
write_template:bool, write_template:bool,
write_models:bool, write_models:bool,
@ -723,7 +722,7 @@ struct DownloadAndDecompileHistoryConfig{
asset_id:AssetID, asset_id:AssetID,
git_committer_name:String, git_committer_name:String,
git_committer_email:String, git_committer_email:String,
style:DecompileStyle, style:Style,
output_folder:PathBuf, output_folder:PathBuf,
write_template:bool, write_template:bool,
write_models:bool, write_models:bool,
@ -770,7 +769,7 @@ struct CompileConfig{
input_folder:PathBuf, input_folder:PathBuf,
output_file:PathBuf, output_file:PathBuf,
template:Option<PathBuf>, template:Option<PathBuf>,
style:Option<DecompileStyle>, style:Option<Style>,
} }
async fn compile(config:CompileConfig)->AResult<()>{ async fn compile(config:CompileConfig)->AResult<()>{
@ -784,7 +783,10 @@ async fn compile(config:CompileConfig)->AResult<()>{
//hack to traverse root folder as the root object //hack to traverse root folder as the root object
dom.root_mut().name="src".to_owned(); dom.root_mut().name="src".to_owned();
something_something_dom_write(&mut dom).await?; rox_compiler::compile::compile(rox_compiler::types::CompileConfig{
input_folder:config.input_folder,
style:config.style,
},&mut dom).await?;
let mut output_place=config.output_file.clone(); let mut output_place=config.output_file.clone();
if output_place.extension().is_none()&&tokio::fs::try_exists(output_place.as_path()).await?{ if output_place.extension().is_none()&&tokio::fs::try_exists(output_place.as_path()).await?{