forked from StrafesNET/asset-tool
refactor rox_compiler into module
This commit is contained in:
parent
64ac70f946
commit
0f797356fb
14
Cargo.lock
generated
14
Cargo.lock
generated
@ -118,12 +118,12 @@ dependencies = [
|
||||
"git2",
|
||||
"lazy-regex",
|
||||
"pollster",
|
||||
"rayon",
|
||||
"rbx_asset",
|
||||
"rbx_binary",
|
||||
"rbx_dom_weak",
|
||||
"rbx_reflection_database",
|
||||
"rbx_xml",
|
||||
"rox_compiler",
|
||||
"serde_json",
|
||||
"tokio",
|
||||
]
|
||||
@ -1351,6 +1351,18 @@ dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rox_compiler"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"futures",
|
||||
"lazy-regex",
|
||||
"rayon",
|
||||
"rbx_dom_weak",
|
||||
"rbx_xml",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustc-demangle"
|
||||
version = "0.1.24"
|
||||
|
@ -1,4 +1,4 @@
|
||||
workspace = { members = ["rbx_asset"] }
|
||||
workspace = { members = ["rbx_asset", "rox_compiler"] }
|
||||
[package]
|
||||
name = "asset-tool"
|
||||
version = "0.3.1"
|
||||
@ -13,12 +13,12 @@ futures = "0.3.30"
|
||||
git2 = "0.18.1"
|
||||
lazy-regex = "3.1.0"
|
||||
pollster = "0.3.0"
|
||||
rayon = "1.8.0"
|
||||
rbx_asset = { path = "rbx_asset" }
|
||||
rbx_binary = "0.7.4"
|
||||
rbx_dom_weak = "2.7.0"
|
||||
rbx_reflection_database = "0.2.10"
|
||||
rbx_xml = "0.13.3"
|
||||
rox_compiler = { path = "rox_compiler" }
|
||||
serde_json = "1.0.111"
|
||||
tokio = { version = "1.35.1", features = ["macros", "rt-multi-thread", "fs"] }
|
||||
|
||||
|
12
rox_compiler/Cargo.toml
Normal file
12
rox_compiler/Cargo.toml
Normal file
@ -0,0 +1,12 @@
|
||||
[package]
|
||||
name = "rox_compiler"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
futures = "0.3.30"
|
||||
lazy-regex = "3.1.0"
|
||||
rayon = "1.8.0"
|
||||
rbx_dom_weak = "2.7.0"
|
||||
rbx_xml = "0.13.3"
|
||||
tokio = { version = "1.35.1", features = ["fs"] }
|
33
rox_compiler/src/common.rs
Normal file
33
rox_compiler/src/common.rs
Normal file
@ -0,0 +1,33 @@
|
||||
#[derive(Clone,Copy,Debug)]
|
||||
pub enum Style{
|
||||
Rox,
|
||||
Rojo,
|
||||
RoxRojo,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub(crate) struct PropertiesOverride{
|
||||
pub name:Option<String>,
|
||||
pub class:Option<String>,
|
||||
}
|
||||
impl PropertiesOverride{
|
||||
pub fn is_some(&self)->bool{
|
||||
self.name.is_some()
|
||||
||self.class.is_some()
|
||||
}
|
||||
}
|
||||
impl std::fmt::Display for PropertiesOverride{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
if let Some(name)=self.name.as_deref(){
|
||||
writeln!(f,"--!Properties.Name = \"{}\"",name)?;
|
||||
}
|
||||
if let Some(class)=self.class.as_deref(){
|
||||
writeln!(f,"--!Properties.ClassName = \"{}\"",class)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn sanitize<'a>(s:&'a str)->std::borrow::Cow<'a,str>{
|
||||
lazy_regex::regex!(r"[^A-z0-9.-]").replace_all(s,"_")
|
||||
}
|
548
rox_compiler/src/compile.rs
Normal file
548
rox_compiler/src/compile.rs
Normal file
@ -0,0 +1,548 @@
|
||||
use std::path::PathBuf;
|
||||
use futures::{StreamExt, TryStreamExt};
|
||||
use tokio::io::AsyncReadExt;
|
||||
|
||||
use crate::common::{sanitize,Style,PropertiesOverride};
|
||||
|
||||
//holy smokes what am I doing lmao
|
||||
//This giant machine is supposed to search for files according to style rules
|
||||
//e.g. ScriptName.server.lua or init.lua
|
||||
//Obviously I got carried away
|
||||
//I could use an enum!
|
||||
//I could use a struct!
|
||||
//I could use a trait!
|
||||
//I could use an error!
|
||||
//I could use a match!
|
||||
//I could use a function!
|
||||
//eventually:
|
||||
#[derive(Debug)]
|
||||
#[allow(dead_code)]//idk why this thinks it's dead code, the errors are printed out in various places
|
||||
pub enum QueryResolveError{
|
||||
NotFound,//0 results
|
||||
Ambiguous,//>1 results
|
||||
JoinError(tokio::task::JoinError),
|
||||
IO(std::io::Error),
|
||||
}
|
||||
impl std::fmt::Display for QueryResolveError{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for QueryResolveError{}
|
||||
|
||||
struct FileWithName{
|
||||
file:tokio::fs::File,
|
||||
name:String,
|
||||
}
|
||||
|
||||
async fn get_file_async(mut path:PathBuf,file_name:impl AsRef<std::path::Path>)->Result<FileWithName,QueryResolveError>{
|
||||
let name=file_name.as_ref().to_str().unwrap().to_owned();
|
||||
path.push(file_name);
|
||||
match tokio::fs::File::open(path).await{
|
||||
Ok(file)=>Ok(FileWithName{file,name}),
|
||||
Err(e)=>match e.kind(){
|
||||
std::io::ErrorKind::NotFound=>Err(QueryResolveError::NotFound),
|
||||
_=>Err(QueryResolveError::IO(e)),
|
||||
},
|
||||
}
|
||||
}
|
||||
type QueryHintResult=Result<FileHint,QueryResolveError>;
|
||||
trait Query{
|
||||
async fn resolve(self)->QueryHintResult;
|
||||
}
|
||||
type QueryHandle=tokio::task::JoinHandle<Result<FileWithName,QueryResolveError>>;
|
||||
struct QuerySingle{
|
||||
script:QueryHandle,
|
||||
}
|
||||
impl QuerySingle{
|
||||
fn rox(search_path:&PathBuf,search_name:&str)->Self{
|
||||
Self{
|
||||
script:tokio::spawn(get_file_async(search_path.clone(),format!("{}.lua",search_name)))
|
||||
}
|
||||
}
|
||||
}
|
||||
impl Query for QuerySingle{
|
||||
async fn resolve(self)->QueryHintResult{
|
||||
match self.script.await{
|
||||
Ok(Ok(file))=>Ok(FileHint{file,hint:ScriptHint::ModuleScript}),
|
||||
Ok(Err(e))=>Err(e),
|
||||
Err(e)=>Err(QueryResolveError::JoinError(e)),
|
||||
}
|
||||
}
|
||||
}
|
||||
struct QueryTriple{
|
||||
module:QueryHandle,
|
||||
server:QueryHandle,
|
||||
client:QueryHandle,
|
||||
}
|
||||
impl QueryTriple{
|
||||
fn rox_rojo(search_path:&PathBuf,search_name:&str,search_module:bool)->Self{
|
||||
//this should be implemented as constructors of Triplet and Quadruplet to fully support Trey's suggestion
|
||||
let module_name=if search_module{
|
||||
format!("{}.module.lua",search_name)
|
||||
}else{
|
||||
format!("{}.lua",search_name)
|
||||
};
|
||||
Self{
|
||||
module:tokio::spawn(get_file_async(search_path.clone(),module_name)),
|
||||
server:tokio::spawn(get_file_async(search_path.clone(),format!("{}.server.lua",search_name))),
|
||||
client:tokio::spawn(get_file_async(search_path.clone(),format!("{}.client.lua",search_name))),
|
||||
}
|
||||
}
|
||||
fn rojo(search_path:&PathBuf)->Self{
|
||||
QueryTriple::rox_rojo(search_path,"init",false)
|
||||
}
|
||||
}
|
||||
//these functions can be achieved with macros, but I have not learned that yet
|
||||
fn mega_triple_join(query_triplet:(QueryHintResult,QueryHintResult,QueryHintResult))->QueryHintResult{
|
||||
match query_triplet{
|
||||
//unambiguously locate file
|
||||
(Ok(f),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound))
|
||||
|(Err(QueryResolveError::NotFound),Ok(f),Err(QueryResolveError::NotFound))
|
||||
|(Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Ok(f))=>Ok(f),
|
||||
//multiple files located
|
||||
(Ok(_),Ok(_),Err(QueryResolveError::NotFound))
|
||||
|(Ok(_),Err(QueryResolveError::NotFound),Ok(_))
|
||||
|(Err(QueryResolveError::NotFound),Ok(_),Ok(_))
|
||||
|(Ok(_),Ok(_),Ok(_))=>Err(QueryResolveError::Ambiguous),
|
||||
//no files located
|
||||
(Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound))=>Err(QueryResolveError::NotFound),
|
||||
//other error
|
||||
(Err(e),_,_)
|
||||
|(_,Err(e),_)
|
||||
|(_,_,Err(e))=>Err(e),
|
||||
}
|
||||
}
|
||||
//LETS GOOOOOOOOOOOOOOOO
|
||||
fn mega_quadruple_join(query_quad:(QueryHintResult,QueryHintResult,QueryHintResult,QueryHintResult))->QueryHintResult{
|
||||
match query_quad{
|
||||
//unambiguously locate file
|
||||
(Ok(f),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound))
|
||||
|(Err(QueryResolveError::NotFound),Ok(f),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound))
|
||||
|(Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Ok(f),Err(QueryResolveError::NotFound))
|
||||
|(Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Ok(f))=>Ok(f),
|
||||
//multiple files located
|
||||
(Ok(_),Ok(_),Ok(_),Err(QueryResolveError::NotFound))
|
||||
|(Ok(_),Ok(_),Err(QueryResolveError::NotFound),Ok(_))
|
||||
|(Ok(_),Ok(_),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound))
|
||||
|(Ok(_),Err(QueryResolveError::NotFound),Ok(_),Ok(_))
|
||||
|(Ok(_),Err(QueryResolveError::NotFound),Ok(_),Err(QueryResolveError::NotFound))
|
||||
|(Ok(_),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Ok(_))
|
||||
|(Err(QueryResolveError::NotFound),Ok(_),Ok(_),Ok(_))
|
||||
|(Err(QueryResolveError::NotFound),Ok(_),Ok(_),Err(QueryResolveError::NotFound))
|
||||
|(Err(QueryResolveError::NotFound),Ok(_),Err(QueryResolveError::NotFound),Ok(_))
|
||||
|(Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Ok(_),Ok(_))
|
||||
|(Ok(_),Ok(_),Ok(_),Ok(_))=>Err(QueryResolveError::Ambiguous),
|
||||
//no files located
|
||||
(Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound))=>Err(QueryResolveError::NotFound),
|
||||
//other error
|
||||
(Err(e),_,_,_)
|
||||
|(_,Err(e),_,_)
|
||||
|(_,_,Err(e),_)
|
||||
|(_,_,_,Err(e))=>Err(e),
|
||||
}
|
||||
}
|
||||
impl Query for QueryTriple{
|
||||
async fn resolve(self)->QueryHintResult{
|
||||
let (module,server,client)=tokio::join!(self.module,self.server,self.client);
|
||||
mega_triple_join((
|
||||
module.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::ModuleScript}),
|
||||
server.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::Script}),
|
||||
client.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::LocalScript}),
|
||||
))
|
||||
}
|
||||
}
|
||||
struct QueryQuad{
|
||||
module_implicit:QueryHandle,
|
||||
module_explicit:QueryHandle,
|
||||
server:QueryHandle,
|
||||
client:QueryHandle,
|
||||
}
|
||||
impl QueryQuad{
|
||||
fn rox_rojo(search_path:&PathBuf,search_name:&str)->Self{
|
||||
let fill=QueryTriple::rox_rojo(search_path,search_name,true);
|
||||
Self{
|
||||
module_implicit:QuerySingle::rox(search_path,search_name).script,//Script.lua
|
||||
module_explicit:fill.module,//Script.module.lua
|
||||
server:fill.server,
|
||||
client:fill.client,
|
||||
}
|
||||
}
|
||||
}
|
||||
impl Query for QueryQuad{
|
||||
async fn resolve(self)->QueryHintResult{
|
||||
let (module_implicit,module_explicit,server,client)=tokio::join!(self.module_implicit,self.module_explicit,self.server,self.client);
|
||||
mega_quadruple_join((
|
||||
module_implicit.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::ModuleScript}),
|
||||
module_explicit.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::ModuleScript}),
|
||||
server.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::Script}),
|
||||
client.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::LocalScript}),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
struct ScriptWithOverrides{
|
||||
overrides:PropertiesOverride,
|
||||
source:String,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum ScriptWithOverridesError{
|
||||
UnimplementedProperty(String),
|
||||
}
|
||||
impl std::fmt::Display for ScriptWithOverridesError{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for ScriptWithOverridesError{}
|
||||
|
||||
impl ScriptWithOverrides{
|
||||
fn from_source(mut source:String)->Result<Self,ScriptWithOverridesError>{
|
||||
let mut overrides=PropertiesOverride::default();
|
||||
let mut count=0;
|
||||
for line in source.lines(){
|
||||
//only string type properties are supported atm
|
||||
if let Some(captures)=lazy_regex::regex!(r#"^\-\-\!\s*Properties\.([A-z]\w*)\s*\=\s*"(\w+)"$"#)
|
||||
.captures(line){
|
||||
count+=line.len();
|
||||
match &captures[1]{
|
||||
"Name"=>overrides.name=Some(captures[2].to_owned()),
|
||||
"ClassName"=>overrides.class=Some(captures[2].to_owned()),
|
||||
other=>Err(ScriptWithOverridesError::UnimplementedProperty(other.to_owned()))?,
|
||||
}
|
||||
}else{
|
||||
break;
|
||||
}
|
||||
}
|
||||
Ok(ScriptWithOverrides{overrides,source:source.split_off(count)})
|
||||
}
|
||||
}
|
||||
|
||||
enum CompileClass{
|
||||
Folder,
|
||||
Script(String),
|
||||
LocalScript(String),
|
||||
ModuleScript(String),
|
||||
Model(Vec<u8>),
|
||||
}
|
||||
|
||||
struct CompileNode{
|
||||
name:String,
|
||||
blacklist:Option<String>,
|
||||
class:CompileClass,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum CompileNodeError{
|
||||
IO(std::io::Error),
|
||||
ScriptWithOverrides(ScriptWithOverridesError),
|
||||
InvalidClassOrHint{
|
||||
class:Option<String>,
|
||||
hint:ScriptHint
|
||||
},
|
||||
QueryResolveError(QueryResolveError),
|
||||
/// Conversion from OsString to String failed
|
||||
FileName(std::ffi::OsString),
|
||||
ExtensionNotSupportedInStyle{
|
||||
extension:String,
|
||||
style:Option<Style>,
|
||||
},
|
||||
NoExtension,
|
||||
}
|
||||
impl std::fmt::Display for CompileNodeError{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for CompileNodeError{}
|
||||
|
||||
enum FileDiscernment{
|
||||
Model,
|
||||
Script(ScriptHint),
|
||||
}
|
||||
|
||||
impl CompileNode{
|
||||
async fn script(search_name:&str,mut file:FileWithName,hint:ScriptHint)->Result<Self,CompileNodeError>{
|
||||
//read entire file
|
||||
let mut buf=String::new();
|
||||
file.file.read_to_string(&mut buf).await.map_err(CompileNodeError::IO)?;
|
||||
//regex script according to Properties lines at the top
|
||||
let script_with_overrides=ScriptWithOverrides::from_source(buf).map_err(CompileNodeError::ScriptWithOverrides)?;
|
||||
//script
|
||||
Ok(Self{
|
||||
blacklist:Some(file.name),
|
||||
name:script_with_overrides.overrides.name.unwrap_or_else(||search_name.to_owned()),
|
||||
class:match (script_with_overrides.overrides.class.as_deref(),hint){
|
||||
(Some("ModuleScript"),_)
|
||||
|(None,ScriptHint::ModuleScript)=>CompileClass::ModuleScript(script_with_overrides.source),
|
||||
(Some("LocalScript"),_)
|
||||
|(None,ScriptHint::LocalScript)=>CompileClass::LocalScript(script_with_overrides.source),
|
||||
(Some("Script"),_)
|
||||
|(None,ScriptHint::Script)=>CompileClass::Script(script_with_overrides.source),
|
||||
(class,hint)=>Err(CompileNodeError::InvalidClassOrHint{class:class.map(|s|s.to_owned()),hint})?,
|
||||
},
|
||||
})
|
||||
}
|
||||
async fn model(search_name:&str,mut file:FileWithName)->Result<Self,CompileNodeError>{
|
||||
//read entire file
|
||||
let mut buf=Vec::new();
|
||||
file.file.read_to_end(&mut buf).await.map_err(CompileNodeError::IO)?;
|
||||
//model
|
||||
Ok(Self{
|
||||
blacklist:Some(file.name),
|
||||
name:search_name.to_owned(),//wrong but gets overwritten by internal model name
|
||||
class:CompileClass::Model(buf),
|
||||
})
|
||||
}
|
||||
|
||||
async fn from_folder(entry:&tokio::fs::DirEntry,style:Option<Style>)->Result<Self,CompileNodeError>{
|
||||
let contents_folder=entry.path();
|
||||
let file_name=entry.file_name();
|
||||
let search_name=file_name.to_str().unwrap();
|
||||
//scan inside the folder for an object to define the class of the folder
|
||||
let script_query=async {match style{
|
||||
Some(Style::Rox)=>QuerySingle::rox(&contents_folder,search_name).resolve().await,
|
||||
Some(Style::RoxRojo)=>QueryQuad::rox_rojo(&contents_folder,search_name).resolve().await,
|
||||
Some(Style::Rojo)=>QueryTriple::rojo(&contents_folder).resolve().await,
|
||||
//try all three and complain if there is ambiguity
|
||||
None=>mega_triple_join(tokio::join!(
|
||||
QuerySingle::rox(&contents_folder,search_name).resolve(),
|
||||
//true=search for module here to avoid ambiguity with QuerySingle::rox results
|
||||
QueryTriple::rox_rojo(&contents_folder,search_name,true).resolve(),
|
||||
QueryTriple::rojo(&contents_folder).resolve(),
|
||||
))
|
||||
}};
|
||||
//model files are rox & rox-rojo only, so it's a lot less work...
|
||||
let model_query=get_file_async(contents_folder.clone(),format!("{}.rbxmx",search_name));
|
||||
//model? script? both?
|
||||
Ok(match tokio::join!(script_query,model_query){
|
||||
(Ok(FileHint{file,hint}),Err(QueryResolveError::NotFound))=>Self::script(search_name,file,hint).await?,
|
||||
(Err(QueryResolveError::NotFound),Ok(file))=>Self::model(search_name,file).await?,
|
||||
(Ok(_),Ok(_))=>Err(CompileNodeError::QueryResolveError(QueryResolveError::Ambiguous))?,
|
||||
//neither
|
||||
(Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound))=>Self{
|
||||
name:search_name.to_owned(),
|
||||
blacklist:None,
|
||||
class:CompileClass::Folder,
|
||||
},
|
||||
//other error
|
||||
(Err(e),_)
|
||||
|(_,Err(e))=>Err(CompileNodeError::QueryResolveError(e))?
|
||||
})
|
||||
}
|
||||
|
||||
async fn from_file(entry:&tokio::fs::DirEntry,style:Option<Style>)->Result<Self,CompileNodeError>{
|
||||
let mut file_name=entry
|
||||
.file_name()
|
||||
.into_string()
|
||||
.map_err(CompileNodeError::FileName)?;
|
||||
//reject goobers
|
||||
let is_goober=match style{
|
||||
Some(Style::Rojo)=>true,
|
||||
_=>false,
|
||||
};
|
||||
let (ext_len,file_discernment)={
|
||||
if let Some(captures)=lazy_regex::regex!(r"^.*(.module.lua|.client.lua|.server.lua|.rbxmx|.lua)$")
|
||||
.captures(file_name.as_str()){
|
||||
let ext=&captures[1];
|
||||
(ext.len(),match ext{
|
||||
".module.lua"=>{
|
||||
if is_goober{
|
||||
Err(CompileNodeError::ExtensionNotSupportedInStyle{extension:ext.to_owned(),style})?;
|
||||
}
|
||||
FileDiscernment::Script(ScriptHint::ModuleScript)
|
||||
},
|
||||
".client.lua"=>FileDiscernment::Script(ScriptHint::LocalScript),
|
||||
".server.lua"=>FileDiscernment::Script(ScriptHint::Script),
|
||||
".rbxmx"=>{
|
||||
if is_goober{
|
||||
Err(CompileNodeError::ExtensionNotSupportedInStyle{extension:ext.to_owned(),style})?;
|
||||
}
|
||||
FileDiscernment::Model
|
||||
},
|
||||
".lua"=>FileDiscernment::Script(ScriptHint::ModuleScript),
|
||||
_=>panic!("Regex failed"),
|
||||
})
|
||||
}else{
|
||||
return Err(CompileNodeError::NoExtension);
|
||||
}
|
||||
};
|
||||
file_name.truncate(file_name.len()-ext_len);
|
||||
let file=tokio::fs::File::open(entry.path()).await.map_err(CompileNodeError::IO)?;
|
||||
Ok(match file_discernment{
|
||||
FileDiscernment::Model=>Self::model(file_name.as_str(),FileWithName{file,name:file_name.clone()}).await?,
|
||||
FileDiscernment::Script(hint)=>Self::script(file_name.as_str(),FileWithName{file,name:file_name.clone()},hint).await?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum ScriptHint{
|
||||
Script,
|
||||
LocalScript,
|
||||
ModuleScript,
|
||||
}
|
||||
struct FileHint{
|
||||
file:FileWithName,
|
||||
hint:ScriptHint,
|
||||
}
|
||||
|
||||
enum PreparedData{
|
||||
Model(rbx_dom_weak::WeakDom),
|
||||
Builder(rbx_dom_weak::InstanceBuilder),
|
||||
}
|
||||
|
||||
enum CompileStackInstruction{
|
||||
TraverseReferent(rbx_dom_weak::types::Ref,Option<String>),
|
||||
PopFolder,
|
||||
}
|
||||
|
||||
fn script_builder(class:&str,name:&str,source:String)->rbx_dom_weak::InstanceBuilder{
|
||||
let mut builder=rbx_dom_weak::InstanceBuilder::new(class);
|
||||
builder.set_name(name);
|
||||
builder.add_property("Source",rbx_dom_weak::types::Variant::String(source));
|
||||
builder
|
||||
}
|
||||
|
||||
enum TooComplicated<T>{
|
||||
Stop,
|
||||
Value(T),
|
||||
Skip,
|
||||
}
|
||||
|
||||
pub struct CompileConfig{
|
||||
pub input_folder:PathBuf,
|
||||
pub style:Option<Style>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum CompileError{
|
||||
NullChildRef,
|
||||
IO(std::io::Error),
|
||||
CompileNode(CompileNodeError),
|
||||
DecodeError(rbx_xml::DecodeError),
|
||||
JoinError(tokio::task::JoinError),
|
||||
}
|
||||
impl std::fmt::Display for CompileError{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for CompileError{}
|
||||
|
||||
pub async fn compile(config:CompileConfig,mut dom:&mut rbx_dom_weak::WeakDom)->Result<(),CompileError>{
|
||||
//hack to traverse root folder as the root object
|
||||
dom.root_mut().name="src".to_owned();
|
||||
//add in scripts and models
|
||||
let mut folder=config.input_folder.clone();
|
||||
let mut stack:Vec<CompileStackInstruction>=vec![CompileStackInstruction::TraverseReferent(dom.root_ref(),None)];
|
||||
while let Some(instruction)=stack.pop(){
|
||||
match instruction{
|
||||
CompileStackInstruction::TraverseReferent(item_ref,blacklist)=>{
|
||||
//scope to avoid holding item ref
|
||||
{
|
||||
let item=dom.get_by_ref(item_ref).ok_or(CompileError::NullChildRef)?;
|
||||
let folder_name=sanitize(item.name.as_str());
|
||||
folder.push(folder_name.as_ref());
|
||||
//drop item
|
||||
}
|
||||
stack.push(CompileStackInstruction::PopFolder);
|
||||
//check if a folder exists with item.name
|
||||
if let Ok(dir)=tokio::fs::read_dir(folder.as_path()).await{
|
||||
let mut exist_names:std::collections::HashSet<String>={
|
||||
let item=dom.get_by_ref(item_ref).ok_or(CompileError::NullChildRef)?;
|
||||
//push existing dom children objects onto stack (unrelated to exist_names)
|
||||
stack.extend(item.children().into_iter().map(|&referent|CompileStackInstruction::TraverseReferent(referent,None)));
|
||||
//get names of existing objects
|
||||
item.children().into_iter().map(|&child_ref|{
|
||||
let child=dom.get_by_ref(child_ref).ok_or(CompileError::NullChildRef)?;
|
||||
Ok::<_,CompileError>(sanitize(child.name.as_str()).to_string())
|
||||
}).collect::<Result<_,CompileError>>()?
|
||||
};
|
||||
if let Some(dont)=blacklist{
|
||||
exist_names.insert(dont);
|
||||
}
|
||||
//generate children from folder contents UNLESS! item already has a child of the same name
|
||||
|
||||
let style=config.style;
|
||||
let exist_names=&exist_names;
|
||||
futures::stream::unfold(dir,|mut dir1|async{
|
||||
//thread the needle! follow the path that dir takes!
|
||||
let ret1={
|
||||
//capture a scoped mutable reference so we can forward dir to the next call even on an error
|
||||
let dir2=&mut dir1;
|
||||
(||async move{//error catcher so I can use ?
|
||||
let ret2=if let Some(entry)=dir2.next_entry().await?{
|
||||
//cull early even if supporting things with identical names is possible
|
||||
if exist_names.contains(entry.file_name().to_str().unwrap()){
|
||||
TooComplicated::Skip
|
||||
}else{
|
||||
TooComplicated::Value(entry)
|
||||
}
|
||||
}else{
|
||||
TooComplicated::Stop
|
||||
};
|
||||
Ok(ret2)
|
||||
})().await
|
||||
};
|
||||
match ret1{
|
||||
Ok(TooComplicated::Stop)=>None,
|
||||
Ok(TooComplicated::Skip)=>Some((Ok(None),dir1)),
|
||||
Ok(TooComplicated::Value(v))=>Some((Ok(Some(v)),dir1)),
|
||||
Err(e)=>Some((Err(CompileError::IO(e)),dir1)),
|
||||
}
|
||||
})
|
||||
|
||||
//gotta spawn off the worker threads (Model is slow)
|
||||
.then(|bog|async{
|
||||
match bog{
|
||||
Ok(Some(entry))=>tokio::spawn(async move{
|
||||
let met=entry.metadata().await.map_err(CompileError::IO)?;
|
||||
//discern that bad boy
|
||||
let compile_class=match met.is_dir(){
|
||||
true=>CompileNode::from_folder(&entry,style).await,
|
||||
false=>CompileNode::from_file(&entry,style).await,
|
||||
}.map_err(CompileError::CompileNode)?;
|
||||
//prepare data structure
|
||||
Ok(Some((compile_class.blacklist,match compile_class.class{
|
||||
CompileClass::Folder=>PreparedData::Builder(rbx_dom_weak::InstanceBuilder::new("Folder").with_name(compile_class.name.as_str())),
|
||||
CompileClass::Script(source)=>PreparedData::Builder(script_builder("Script",compile_class.name.as_str(),source)),
|
||||
CompileClass::LocalScript(source)=>PreparedData::Builder(script_builder("LocalScript",compile_class.name.as_str(),source)),
|
||||
CompileClass::ModuleScript(source)=>PreparedData::Builder(script_builder("ModuleScript",compile_class.name.as_str(),source)),
|
||||
CompileClass::Model(buf)=>PreparedData::Model(rbx_xml::from_reader_default(std::io::Cursor::new(buf)).map_err(CompileError::DecodeError)?),
|
||||
})))
|
||||
}).await.map_err(CompileError::JoinError)?,
|
||||
Ok(None)=>Ok(None),
|
||||
Err(e)=>Err(e),
|
||||
}
|
||||
})
|
||||
|
||||
//is this even what I want?
|
||||
.map(|f|async{f}).buffer_unordered(32)
|
||||
|
||||
//begin processing immediately
|
||||
//TODO: fix dom being &mut &mut inside the closure
|
||||
.try_fold((&mut stack,&mut dom),|(stack,dom),bog|async{
|
||||
//push child objects onto dom serially as they arrive
|
||||
if let Some((blacklist,data))=bog{
|
||||
let referent=match data{
|
||||
PreparedData::Model(mut model_dom)=>{
|
||||
let referent=model_dom.root().children()[0];
|
||||
model_dom.transfer(referent,dom,item_ref);
|
||||
referent
|
||||
},
|
||||
PreparedData::Builder(script)=>dom.insert(item_ref,script),
|
||||
};
|
||||
//new children need to be traversed
|
||||
stack.push(CompileStackInstruction::TraverseReferent(referent,blacklist));
|
||||
}
|
||||
Ok((stack,dom))
|
||||
}).await?;
|
||||
}
|
||||
},
|
||||
CompileStackInstruction::PopFolder=>assert!(folder.pop(),"pop folder bad"),
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
323
rox_compiler/src/decompile.rs
Normal file
323
rox_compiler/src/decompile.rs
Normal file
@ -0,0 +1,323 @@
|
||||
use std::path::PathBuf;
|
||||
use rbx_dom_weak::types::Ref;
|
||||
use crate::common::{sanitize,Style,PropertiesOverride};
|
||||
|
||||
#[derive(PartialEq)]
|
||||
enum Class{
|
||||
Folder,
|
||||
ModuleScript,
|
||||
LocalScript,
|
||||
Script,
|
||||
Model,
|
||||
}
|
||||
|
||||
struct TreeNode{
|
||||
name:String,
|
||||
referent:Ref,
|
||||
parent:Ref,
|
||||
class:Class,
|
||||
children:Vec<Ref>,
|
||||
}
|
||||
impl TreeNode{
|
||||
fn new(name:String,referent:Ref,parent:Ref,class:Class)->Self{
|
||||
Self{
|
||||
name,
|
||||
referent,
|
||||
parent,
|
||||
class,
|
||||
children:Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum TrimStackInstruction{
|
||||
Referent(Ref),
|
||||
IncrementScript,
|
||||
DecrementScript,
|
||||
}
|
||||
|
||||
enum WriteStackInstruction<'a>{
|
||||
Node(&'a TreeNode,u32),//(Node,NameTally)
|
||||
PushFolder(String),
|
||||
PopFolder,
|
||||
Destroy(Ref),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum WriteError{
|
||||
ClassNotScript(String),
|
||||
IO(std::io::Error),
|
||||
EncodeError(rbx_xml::EncodeError),
|
||||
}
|
||||
impl std::fmt::Display for WriteError{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for WriteError{}
|
||||
|
||||
fn write_item(dom:&rbx_dom_weak::WeakDom,mut file:PathBuf,node:&TreeNode,node_name_override:String,mut properties:PropertiesOverride,style:Style,write_models:bool,write_scripts:bool)->Result<(),WriteError>{
|
||||
file.push(sanitize(node_name_override.as_str()).as_ref());
|
||||
match node.class{
|
||||
Class::Folder=>(),
|
||||
Class::ModuleScript|Class::LocalScript|Class::Script=>{
|
||||
if !write_scripts{
|
||||
return Ok(())
|
||||
}
|
||||
|
||||
//set extension
|
||||
match style{
|
||||
Style::Rox=>assert!(file.set_extension("lua"),"could not set extension"),
|
||||
Style::RoxRojo|Style::Rojo=>{
|
||||
match properties.class.as_deref(){
|
||||
Some("LocalScript")=>{
|
||||
file.set_extension("client.lua");
|
||||
properties.class=None;
|
||||
},
|
||||
Some("Script")=>{
|
||||
file.set_extension("server.lua");
|
||||
properties.class=None;
|
||||
},
|
||||
// Some("ModuleScript")=>{
|
||||
// file.set_extension("module");
|
||||
// properties.class=None;
|
||||
// },
|
||||
None=>assert!(file.set_extension("lua"),"could not set extension"),
|
||||
Some(other)=>Err(WriteError::ClassNotScript(other.to_owned()))?,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(item)=dom.get_by_ref(node.referent){
|
||||
//TODO: delete disabled scripts
|
||||
if let Some(rbx_dom_weak::types::Variant::String(source))=item.properties.get("Source"){
|
||||
if properties.is_some(){
|
||||
//rox style
|
||||
let source=properties.to_string()+source.as_str();
|
||||
std::fs::write(file,source).map_err(WriteError::IO)?;
|
||||
}else{
|
||||
std::fs::write(file,source).map_err(WriteError::IO)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
Class::Model=>{
|
||||
if !write_models{
|
||||
return Ok(())
|
||||
}
|
||||
assert!(file.set_extension("rbxmx"));
|
||||
let output=std::io::BufWriter::new(std::fs::File::create(file).map_err(WriteError::IO)?);
|
||||
rbx_xml::to_writer_default(output,dom,&[node.referent]).map_err(WriteError::EncodeError)?;
|
||||
},
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub struct WriteConfig{
|
||||
pub style:Style,
|
||||
pub output_folder:PathBuf,
|
||||
pub write_template:bool,
|
||||
pub write_models:bool,
|
||||
pub write_scripts:bool,
|
||||
}
|
||||
|
||||
pub struct DecompiledContext{
|
||||
dom:rbx_dom_weak::WeakDom,
|
||||
tree_refs:std::collections::HashMap<rbx_dom_weak::types::Ref,TreeNode>,
|
||||
}
|
||||
|
||||
impl DecompiledContext{
|
||||
/// Will panic on circular tree structure but otherwise infallible
|
||||
pub fn from_dom(dom:rbx_dom_weak::WeakDom)->Self{
|
||||
let mut tree_refs=std::collections::HashMap::new();
|
||||
tree_refs.insert(dom.root_ref(),TreeNode::new(
|
||||
"src".to_owned(),
|
||||
dom.root_ref(),
|
||||
Ref::none(),
|
||||
Class::Folder
|
||||
));
|
||||
|
||||
//run rules
|
||||
let mut stack=vec![dom.root()];
|
||||
while let Some(item)=stack.pop(){
|
||||
let class=match item.class.as_str(){
|
||||
"ModuleScript"=>Class::ModuleScript,
|
||||
"LocalScript"=>Class::LocalScript,
|
||||
"Script"=>Class::Script,
|
||||
"Model"=>Class::Model,
|
||||
_=>Class::Folder,
|
||||
};
|
||||
let skip=match class{
|
||||
Class::Model=>true,
|
||||
_=>false,
|
||||
};
|
||||
if let Some(parent_node)=tree_refs.get_mut(&item.parent()){
|
||||
let referent=item.referent();
|
||||
let node=TreeNode::new(item.name.clone(),referent,parent_node.referent,class);
|
||||
parent_node.children.push(referent);
|
||||
tree_refs.insert(referent,node);
|
||||
}
|
||||
//look no further, turn this node and all its children into a model
|
||||
if skip{
|
||||
continue;
|
||||
}
|
||||
for &referent in item.children(){
|
||||
if let Some(c)=dom.get_by_ref(referent){
|
||||
stack.push(c);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//trim empty folders
|
||||
let mut script_count=0;
|
||||
let mut stack:Vec<TrimStackInstruction>=tree_refs.get(&dom.root_ref()).unwrap().children
|
||||
.iter().map(|&c|TrimStackInstruction::Referent(c)).collect();
|
||||
while let Some(instruction)=stack.pop(){
|
||||
match instruction{
|
||||
TrimStackInstruction::IncrementScript=>script_count+=1,
|
||||
TrimStackInstruction::DecrementScript=>script_count-=1,
|
||||
TrimStackInstruction::Referent(referent)=>{
|
||||
let mut delete=None;
|
||||
if let Some(node)=tree_refs.get_mut(&referent){
|
||||
if node.class==Class::Folder&&script_count!=0{
|
||||
node.class=Class::Model
|
||||
}
|
||||
if node.class==Class::Folder&&node.children.len()==0{
|
||||
delete=Some(node.parent);
|
||||
}else{
|
||||
//how the hell do I do this better without recursion
|
||||
let is_script=match node.class{
|
||||
Class::ModuleScript|Class::LocalScript|Class::Script=>true,
|
||||
_=>false,
|
||||
};
|
||||
//stack is popped from back
|
||||
if is_script{
|
||||
stack.push(TrimStackInstruction::DecrementScript);
|
||||
}
|
||||
for &child_referent in &node.children{
|
||||
stack.push(TrimStackInstruction::Referent(child_referent));
|
||||
}
|
||||
if is_script{
|
||||
stack.push(TrimStackInstruction::IncrementScript);
|
||||
}
|
||||
}
|
||||
}
|
||||
//trim referent
|
||||
if let Some(parent_ref)=delete{
|
||||
let parent_node=tree_refs.get_mut(&parent_ref)
|
||||
.expect("parent_ref does not exist in tree_refs");
|
||||
parent_node.children.remove(
|
||||
parent_node.children.iter()
|
||||
.position(|&r|r==referent)
|
||||
.expect("parent.children does not contain referent")
|
||||
);
|
||||
tree_refs.remove(&referent);
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
Self{
|
||||
dom,
|
||||
tree_refs,
|
||||
}
|
||||
}
|
||||
pub async fn write_files(mut self,config:WriteConfig)->Result<(),WriteError>{
|
||||
let mut write_queue=Vec::new();
|
||||
let mut destroy_queue=Vec::new();
|
||||
|
||||
let mut name_tally=std::collections::HashMap::<String,u32>::new();
|
||||
let mut folder=config.output_folder.clone();
|
||||
let mut stack=vec![WriteStackInstruction::Node(self.tree_refs.get(&self.dom.root_ref()).unwrap(),0)];
|
||||
while let Some(instruction)=stack.pop(){
|
||||
match instruction{
|
||||
WriteStackInstruction::PushFolder(component)=>folder.push(component),
|
||||
WriteStackInstruction::PopFolder=>assert!(folder.pop(),"weirdness"),
|
||||
WriteStackInstruction::Destroy(referent)=>destroy_queue.push(referent),
|
||||
WriteStackInstruction::Node(node,name_count)=>{
|
||||
//track properties that must be overriden to compile folder structure back into a place file
|
||||
let mut properties=PropertiesOverride::default();
|
||||
let has_children=node.children.len()!=0;
|
||||
match node.class{
|
||||
Class::Folder=>(),
|
||||
Class::ModuleScript=>(),//.lua files are ModuleScript by default
|
||||
Class::LocalScript=>properties.class=Some("LocalScript".to_owned()),
|
||||
Class::Script=>properties.class=Some("Script".to_owned()),
|
||||
Class::Model=>(),
|
||||
}
|
||||
let name_override=if 0<name_count{
|
||||
properties.name=Some(node.name.clone());
|
||||
format!("{}_{}",node.name,name_count)
|
||||
}else{
|
||||
node.name.clone()
|
||||
};
|
||||
|
||||
if has_children{
|
||||
//push temp subfolder
|
||||
let mut subfolder=folder.clone();
|
||||
subfolder.push(sanitize(name_override.as_str()).as_ref());
|
||||
//make folder
|
||||
tokio::fs::create_dir(subfolder.clone()).await.map_err(WriteError::IO)?;
|
||||
|
||||
let name_final=match config.style{
|
||||
Style::Rox
|
||||
|Style::RoxRojo=>name_override.clone(),
|
||||
Style::Rojo=>"init".to_owned(),
|
||||
};
|
||||
|
||||
//write item in subfolder
|
||||
write_queue.push((subfolder,node,name_final,properties,config.style));
|
||||
}else{
|
||||
//write item
|
||||
write_queue.push((folder.clone(),node,name_override.clone(),properties,config.style));
|
||||
}
|
||||
//queue item to be deleted from dom after child objects are handled (stack is popped from the back)
|
||||
match node.class{
|
||||
Class::Folder=>(),
|
||||
_=>stack.push(WriteStackInstruction::Destroy(node.referent)),
|
||||
}
|
||||
if has_children{
|
||||
stack.push(WriteStackInstruction::PopFolder);
|
||||
name_tally.clear();
|
||||
for referent in &node.children{
|
||||
if let Some(c)=self.tree_refs.get(referent){
|
||||
let v=name_tally.entry(c.name.clone()).and_modify(|v|*v+=1).or_default();
|
||||
stack.push(WriteStackInstruction::Node(c,*v));
|
||||
}
|
||||
}
|
||||
stack.push(WriteStackInstruction::PushFolder(sanitize(name_override.as_str()).to_string()));
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
//run the async
|
||||
{
|
||||
let dom=&self.dom;
|
||||
let write_models=config.write_models;
|
||||
let write_scripts=config.write_scripts;
|
||||
let results:Vec<Result<(),WriteError>>=rayon::iter::ParallelIterator::collect(rayon::iter::ParallelIterator::map(rayon::iter::IntoParallelIterator::into_par_iter(write_queue),|(write_path,node,node_name_override,properties,style)|{
|
||||
write_item(&dom,write_path,node,node_name_override,properties,style,write_models,write_scripts)
|
||||
}));
|
||||
for result in results{
|
||||
result?;
|
||||
}
|
||||
}
|
||||
|
||||
//run the destroy
|
||||
for destroy_ref in destroy_queue{
|
||||
self.dom.destroy(destroy_ref);
|
||||
}
|
||||
|
||||
//write what remains in template.rbxlx
|
||||
if config.write_template{
|
||||
let mut file=config.output_folder.clone();
|
||||
file.push("template");
|
||||
assert!(file.set_extension("rbxlx"));
|
||||
let output=std::io::BufWriter::new(std::fs::File::create(file).map_err(WriteError::IO)?);
|
||||
rbx_xml::to_writer_default(output,&self.dom,self.dom.root().children()).map_err(WriteError::EncodeError)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
9
rox_compiler/src/lib.rs
Normal file
9
rox_compiler/src/lib.rs
Normal file
@ -0,0 +1,9 @@
|
||||
mod common;
|
||||
mod compile;
|
||||
mod decompile;
|
||||
//export minimal interface
|
||||
pub use common::Style;
|
||||
pub use compile::CompileConfig;
|
||||
pub use compile::compile;//cringe unstandardized interface
|
||||
pub use decompile::WriteConfig;
|
||||
pub use decompile::DecompiledContext;
|
880
src/main.rs
880
src/main.rs
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue
Block a user