This commit is contained in:
Quaternions 2024-07-01 13:46:02 -07:00
parent 48c2a010d8
commit dba7aa427f
7 changed files with 262 additions and 242 deletions

2
Cargo.lock generated
View File

@ -118,7 +118,6 @@ dependencies = [
"git2", "git2",
"lazy-regex", "lazy-regex",
"pollster", "pollster",
"rayon",
"rbx_asset", "rbx_asset",
"rbx_binary", "rbx_binary",
"rbx_dom_weak", "rbx_dom_weak",
@ -1358,6 +1357,7 @@ version = "0.1.0"
dependencies = [ dependencies = [
"futures", "futures",
"lazy-regex", "lazy-regex",
"rayon",
"rbx_dom_weak", "rbx_dom_weak",
"rbx_xml", "rbx_xml",
"tokio", "tokio",

View File

@ -13,7 +13,6 @@ futures = "0.3.30"
git2 = "0.18.1" git2 = "0.18.1"
lazy-regex = "3.1.0" lazy-regex = "3.1.0"
pollster = "0.3.0" pollster = "0.3.0"
rayon = "1.8.0"
rbx_asset = { path = "rbx_asset" } rbx_asset = { path = "rbx_asset" }
rbx_binary = "0.7.4" rbx_binary = "0.7.4"
rbx_dom_weak = "2.7.0" rbx_dom_weak = "2.7.0"

View File

@ -6,6 +6,7 @@ edition = "2021"
[dependencies] [dependencies]
futures = "0.3.30" futures = "0.3.30"
lazy-regex = "3.1.0" lazy-regex = "3.1.0"
rayon = "1.8.0"
rbx_dom_weak = "2.7.0" rbx_dom_weak = "2.7.0"
rbx_xml = "0.13.3" rbx_xml = "0.13.3"
tokio = { version = "1.35.1", features = ["fs"] } tokio = { version = "1.35.1", features = ["fs"] }

View File

@ -1,7 +1,8 @@
use std::path::PathBuf; use std::path::PathBuf;
use futures::{StreamExt, TryStreamExt};
use tokio::io::AsyncReadExt; use tokio::io::AsyncReadExt;
use crate::types::{DecompileStyle,PropertiesOverride}; use crate::types::{Style,PropertiesOverride};
use crate::common::sanitize; use crate::common::sanitize;
//holy smokes what am I doing lmao //holy smokes what am I doing lmao
@ -235,7 +236,7 @@ enum CompileNodeError{
FileName(std::ffi::OsString), FileName(std::ffi::OsString),
ExtensionNotSupportedInStyle{ ExtensionNotSupportedInStyle{
extension:String, extension:String,
style:Option<DecompileStyle>, style:Option<Style>,
}, },
NoExtension, NoExtension,
} }
@ -279,15 +280,15 @@ impl CompileNode{
}) })
} }
async fn from_folder(entry:&tokio::fs::DirEntry,style:Option<DecompileStyle>)->Result<Self,CompileNodeError>{ async fn from_folder(entry:&tokio::fs::DirEntry,style:Option<Style>)->Result<Self,CompileNodeError>{
let contents_folder=entry.path(); let contents_folder=entry.path();
let file_name=entry.file_name(); let file_name=entry.file_name();
let search_name=file_name.to_str().unwrap(); let search_name=file_name.to_str().unwrap();
//scan inside the folder for an object to define the class of the folder //scan inside the folder for an object to define the class of the folder
let script_query=async {match style{ let script_query=async {match style{
Some(DecompileStyle::Rox)=>QuerySingle::rox(&contents_folder,search_name).resolve().await, Some(Style::Rox)=>QuerySingle::rox(&contents_folder,search_name).resolve().await,
Some(DecompileStyle::RoxRojo)=>QueryQuad::rox_rojo(&contents_folder,search_name).resolve().await, Some(Style::RoxRojo)=>QueryQuad::rox_rojo(&contents_folder,search_name).resolve().await,
Some(DecompileStyle::Rojo)=>QueryTriple::rojo(&contents_folder).resolve().await, Some(Style::Rojo)=>QueryTriple::rojo(&contents_folder).resolve().await,
//try all three and complain if there is ambiguity //try all three and complain if there is ambiguity
None=>mega_triple_join(tokio::join!( None=>mega_triple_join(tokio::join!(
QuerySingle::rox(&contents_folder,search_name).resolve(), QuerySingle::rox(&contents_folder,search_name).resolve(),
@ -315,14 +316,14 @@ impl CompileNode{
}) })
} }
async fn from_file(entry:&tokio::fs::DirEntry,style:Option<DecompileStyle>)->Result<Self,CompileNodeError>{ async fn from_file(entry:&tokio::fs::DirEntry,style:Option<Style>)->Result<Self,CompileNodeError>{
let mut file_name=entry let mut file_name=entry
.file_name() .file_name()
.into_string() .into_string()
.map_err(CompileNodeError::FileName)?; .map_err(CompileNodeError::FileName)?;
//reject goobers //reject goobers
let is_goober=match style{ let is_goober=match style{
Some(DecompileStyle::Rojo)=>true, Some(Style::Rojo)=>true,
_=>false, _=>false,
}; };
let (ext_len,file_discernment)={ let (ext_len,file_discernment)={
@ -394,11 +395,20 @@ enum TooComplicated<T>{
Skip, Skip,
} }
enum CompileError{ pub struct CompileConfig{
NullChildRef, input_folder:PathBuf,
style:Option<Style>,
} }
async fn compile(config:CompileConfig,&mut dom:rbx_dom_weak::WeakDom)->Result<(),CompileError>{ enum CompileError{
NullChildRef,
IO(std::io::Error),
CompileNode(CompileNodeError),
DecodeError(rbx_xml::DecodeError),
JoinError(tokio::task::JoinError),
}
async fn compile(config:CompileConfig,mut dom:&mut rbx_dom_weak::WeakDom)->Result<(),CompileError>{
//add in scripts and models //add in scripts and models
let mut folder=config.input_folder.clone(); let mut folder=config.input_folder.clone();
let mut stack:Vec<CompileStackInstruction>=vec![CompileStackInstruction::TraverseReferent(dom.root_ref(),None)]; let mut stack:Vec<CompileStackInstruction>=vec![CompileStackInstruction::TraverseReferent(dom.root_ref(),None)];
@ -446,14 +456,14 @@ async fn compile(config:CompileConfig,&mut dom:rbx_dom_weak::WeakDom)->Result<()
}else{ }else{
TooComplicated::Stop TooComplicated::Stop
}; };
Ok::<_,anyhow::Error>(ret2) Ok::<_,std::io::Error>(ret2)
})().await })().await
}; };
match ret1{ match ret1{
Ok(TooComplicated::Stop)=>None, Ok(TooComplicated::Stop)=>None,
Ok(TooComplicated::Skip)=>Some((Ok(None),dir1)), Ok(TooComplicated::Skip)=>Some((Ok(None),dir1)),
Ok(TooComplicated::Value(v))=>Some((Ok(Some(v)),dir1)), Ok(TooComplicated::Value(v))=>Some((Ok(Some(v)),dir1)),
Err(e)=>Some((Err(e),dir1)), Err(e)=>Some((Err(CompileError::IO(e)),dir1)),
} }
}) })
@ -461,21 +471,21 @@ async fn compile(config:CompileConfig,&mut dom:rbx_dom_weak::WeakDom)->Result<()
.then(|bog|async{ .then(|bog|async{
match bog{ match bog{
Ok(Some(entry))=>tokio::spawn(async move{ Ok(Some(entry))=>tokio::spawn(async move{
let met=entry.metadata().await?; let met=entry.metadata().await.map_err(CompileError::IO)?;
//discern that bad boy //discern that bad boy
let compile_class=match met.is_dir(){ let compile_class=match met.is_dir(){
true=>CompileNode::from_folder(&entry,style).await?, true=>CompileNode::from_folder(&entry,style).await,
false=>CompileNode::from_file(&entry,style).await?, false=>CompileNode::from_file(&entry,style).await,
}; }.map_err(CompileError::CompileNode)?;
//prepare data structure //prepare data structure
Ok(Some((compile_class.blacklist,match compile_class.class{ Ok(Some((compile_class.blacklist,match compile_class.class{
CompileClass::Folder=>PreparedData::Builder(rbx_dom_weak::InstanceBuilder::new("Folder").with_name(compile_class.name.as_str())), CompileClass::Folder=>PreparedData::Builder(rbx_dom_weak::InstanceBuilder::new("Folder").with_name(compile_class.name.as_str())),
CompileClass::Script(source)=>PreparedData::Builder(script_builder("Script",compile_class.name.as_str(),source)), CompileClass::Script(source)=>PreparedData::Builder(script_builder("Script",compile_class.name.as_str(),source)),
CompileClass::LocalScript(source)=>PreparedData::Builder(script_builder("LocalScript",compile_class.name.as_str(),source)), CompileClass::LocalScript(source)=>PreparedData::Builder(script_builder("LocalScript",compile_class.name.as_str(),source)),
CompileClass::ModuleScript(source)=>PreparedData::Builder(script_builder("ModuleScript",compile_class.name.as_str(),source)), CompileClass::ModuleScript(source)=>PreparedData::Builder(script_builder("ModuleScript",compile_class.name.as_str(),source)),
CompileClass::Model(buf)=>PreparedData::Model(rbx_xml::from_reader_default(std::io::Cursor::new(buf))?), CompileClass::Model(buf)=>PreparedData::Model(rbx_xml::from_reader_default(std::io::Cursor::new(buf)).map_err(CompileError::DecodeError)?),
}))) })))
}).await?, }).await.map_err(CompileError::JoinError)?,
Ok(None)=>Ok(None), Ok(None)=>Ok(None),
Err(e)=>Err(e), Err(e)=>Err(e),
} }
@ -485,10 +495,10 @@ async fn compile(config:CompileConfig,&mut dom:rbx_dom_weak::WeakDom)->Result<()
.map(|f|async{f}).buffer_unordered(32) .map(|f|async{f}).buffer_unordered(32)
//begin processing immediately //begin processing immediately
.fold((&mut stack,&mut dom),|(stack,dom),bog:Result<_,anyhow::Error>|async{ .try_fold((&mut stack,&mut dom),|(stack,dom):(&mut Vec<CompileStackInstruction>,_),bog|async{
//push child objects onto dom serially as they arrive //push child objects onto dom serially as they arrive
match bog{ match bog{
Ok(Some((blacklist,data)))=>{ Some((blacklist,data))=>{
let referent=match data{ let referent=match data{
PreparedData::Model(mut model_dom)=>{ PreparedData::Model(mut model_dom)=>{
let referent=model_dom.root().children()[0]; let referent=model_dom.root().children()[0];
@ -500,14 +510,14 @@ async fn compile(config:CompileConfig,&mut dom:rbx_dom_weak::WeakDom)->Result<()
//new children need to be traversed //new children need to be traversed
stack.push(CompileStackInstruction::TraverseReferent(referent,blacklist)); stack.push(CompileStackInstruction::TraverseReferent(referent,blacklist));
}, },
Ok(None)=>(), None=>(),
Err(e)=>println!("error lole {e:?}"),
} }
(stack,dom) Ok((stack,dom))
}).await; }).await?;
} }
}, },
CompileStackInstruction::PopFolder=>assert!(folder.pop(),"pop folder bad"), CompileStackInstruction::PopFolder=>assert!(folder.pop(),"pop folder bad"),
} }
} }
unreachable!();
} }

View File

@ -1,6 +1,6 @@
use std::{io::Read, path::PathBuf}; use std::path::PathBuf;
use rbx_dom_weak::types::Ref; use rbx_dom_weak::types::Ref;
use crate::{common::sanitize, types::{DecompileStyle, PropertiesOverride}}; use crate::{common::sanitize, types::{Style, PropertiesOverride}};
#[derive(PartialEq)] #[derive(PartialEq)]
enum Class{ enum Class{
@ -43,7 +43,13 @@ enum WriteStackInstruction<'a>{
Destroy(Ref), Destroy(Ref),
} }
fn write_item(dom:&rbx_dom_weak::WeakDom,mut file:PathBuf,node:&TreeNode,node_name_override:String,mut properties:PropertiesOverride,style:DecompileStyle,write_models:bool,write_scripts:bool)->AResult<()>{ enum WriteError{
ClassNotScript(String),
IO(std::io::Error),
EncodeError(rbx_xml::EncodeError),
}
fn write_item(dom:&rbx_dom_weak::WeakDom,mut file:PathBuf,node:&TreeNode,node_name_override:String,mut properties:PropertiesOverride,style:Style,write_models:bool,write_scripts:bool)->Result<(),WriteError>{
file.push(sanitize(node_name_override.as_str()).as_ref()); file.push(sanitize(node_name_override.as_str()).as_ref());
match node.class{ match node.class{
Class::Folder=>(), Class::Folder=>(),
@ -54,8 +60,8 @@ fn write_item(dom:&rbx_dom_weak::WeakDom,mut file:PathBuf,node:&TreeNode,node_na
//set extension //set extension
match style{ match style{
DecompileStyle::Rox=>assert!(file.set_extension("lua"),"could not set extension"), Style::Rox=>assert!(file.set_extension("lua"),"could not set extension"),
DecompileStyle::RoxRojo|DecompileStyle::Rojo=>{ Style::RoxRojo|Style::Rojo=>{
match properties.class.as_deref(){ match properties.class.as_deref(){
Some("LocalScript")=>{ Some("LocalScript")=>{
file.set_extension("client.lua"); file.set_extension("client.lua");
@ -70,7 +76,7 @@ fn write_item(dom:&rbx_dom_weak::WeakDom,mut file:PathBuf,node:&TreeNode,node_na
// properties.class=None; // properties.class=None;
// }, // },
None=>assert!(file.set_extension("lua"),"could not set extension"), None=>assert!(file.set_extension("lua"),"could not set extension"),
Some(other)=>Err(anyhow::Error::msg(format!("Attempt to write a {} as a script",other)))?, Some(other)=>Err(WriteError::ClassNotScript(other.to_owned()))?,
} }
} }
} }
@ -81,9 +87,9 @@ fn write_item(dom:&rbx_dom_weak::WeakDom,mut file:PathBuf,node:&TreeNode,node_na
if properties.is_some(){ if properties.is_some(){
//rox style //rox style
let source=properties.to_string()+source.as_str(); let source=properties.to_string()+source.as_str();
std::fs::write(file,source)?; std::fs::write(file,source).map_err(WriteError::IO)?;
}else{ }else{
std::fs::write(file,source)?; std::fs::write(file,source).map_err(WriteError::IO)?;
} }
} }
} }
@ -93,19 +99,29 @@ fn write_item(dom:&rbx_dom_weak::WeakDom,mut file:PathBuf,node:&TreeNode,node_na
return Ok(()) return Ok(())
} }
assert!(file.set_extension("rbxmx")); assert!(file.set_extension("rbxmx"));
let output=std::io::BufWriter::new(std::fs::File::create(file)?); let output=std::io::BufWriter::new(std::fs::File::create(file).map_err(WriteError::IO)?);
rbx_xml::to_writer_default(output,dom,&[node.referent])?; rbx_xml::to_writer_default(output,dom,&[node.referent]).map_err(WriteError::EncodeError)?;
}, },
} }
Ok(()) Ok(())
} }
struct DecompiledContext{ pub struct WriteConfig{
style:Style,
output_folder:PathBuf,
write_template:bool,
write_models:bool,
write_scripts:bool,
}
pub struct DecompiledContext{
dom:rbx_dom_weak::WeakDom, dom:rbx_dom_weak::WeakDom,
tree_refs:std::collections::HashMap<rbx_dom_weak::types::Ref,TreeNode>, tree_refs:std::collections::HashMap<rbx_dom_weak::types::Ref,TreeNode>,
} }
fn generate_decompiled_context(dom:rbx_dom_weak::WeakDom)->Result<DecompiledContext,DecompileError>{ impl DecompiledContext{
/// Will panic on circular tree structure but otherwise infallible
pub fn from_dom(dom:rbx_dom_weak::WeakDom)->Self{
let mut tree_refs=std::collections::HashMap::new(); let mut tree_refs=std::collections::HashMap::new();
tree_refs.insert(dom.root_ref(),TreeNode::new( tree_refs.insert(dom.root_ref(),TreeNode::new(
"src".to_owned(), "src".to_owned(),
@ -194,27 +210,18 @@ fn generate_decompiled_context(dom:rbx_dom_weak::WeakDom)->Result<DecompiledCont
} }
} }
Ok(DecompiledContext{ Self{
dom, dom,
tree_refs, tree_refs,
}) }
} }
pub async fn write_files(mut self,config:WriteConfig)->Result<(),WriteError>{
struct WriteConfig{
style:DecompileStyle,
output_folder:PathBuf,
write_template:bool,
write_models:bool,
write_scripts:bool,
}
async fn write_files(config:WriteConfig,mut context:DecompiledContext)->Result<(),WriteError>{
let mut write_queue=Vec::new(); let mut write_queue=Vec::new();
let mut destroy_queue=Vec::new(); let mut destroy_queue=Vec::new();
let mut name_tally=std::collections::HashMap::<String,u32>::new(); let mut name_tally=std::collections::HashMap::<String,u32>::new();
let mut folder=config.output_folder.clone(); let mut folder=config.output_folder.clone();
let mut stack=vec![WriteStackInstruction::Node(context.tree_refs.get(&context.dom.root_ref()).unwrap(),0)]; let mut stack=vec![WriteStackInstruction::Node(self.tree_refs.get(&self.dom.root_ref()).unwrap(),0)];
while let Some(instruction)=stack.pop(){ while let Some(instruction)=stack.pop(){
match instruction{ match instruction{
WriteStackInstruction::PushFolder(component)=>folder.push(component), WriteStackInstruction::PushFolder(component)=>folder.push(component),
@ -243,12 +250,12 @@ async fn write_files(config:WriteConfig,mut context:DecompiledContext)->Result<(
let mut subfolder=folder.clone(); let mut subfolder=folder.clone();
subfolder.push(sanitize(name_override.as_str()).as_ref()); subfolder.push(sanitize(name_override.as_str()).as_ref());
//make folder //make folder
tokio::fs::create_dir(subfolder.clone()).await?; tokio::fs::create_dir(subfolder.clone()).await.map_err(WriteError::IO)?;
let name_final=match config.style{ let name_final=match config.style{
DecompileStyle::Rox Style::Rox
|DecompileStyle::RoxRojo=>name_override.clone(), |Style::RoxRojo=>name_override.clone(),
DecompileStyle::Rojo=>"init".to_owned(), Style::Rojo=>"init".to_owned(),
}; };
//write item in subfolder //write item in subfolder
@ -266,7 +273,7 @@ async fn write_files(config:WriteConfig,mut context:DecompiledContext)->Result<(
stack.push(WriteStackInstruction::PopFolder); stack.push(WriteStackInstruction::PopFolder);
name_tally.clear(); name_tally.clear();
for referent in &node.children{ for referent in &node.children{
if let Some(c)=context.tree_refs.get(referent){ if let Some(c)=self.tree_refs.get(referent){
let v=name_tally.entry(c.name.clone()).and_modify(|v|*v+=1).or_default(); let v=name_tally.entry(c.name.clone()).and_modify(|v|*v+=1).or_default();
stack.push(WriteStackInstruction::Node(c,*v)); stack.push(WriteStackInstruction::Node(c,*v));
} }
@ -279,10 +286,10 @@ async fn write_files(config:WriteConfig,mut context:DecompiledContext)->Result<(
//run the async //run the async
{ {
let dom=&context.dom; let dom=&self.dom;
let write_models=config.write_models; let write_models=config.write_models;
let write_scripts=config.write_scripts; let write_scripts=config.write_scripts;
let results:Vec<AResult<()>>=rayon::iter::ParallelIterator::collect(rayon::iter::ParallelIterator::map(rayon::iter::IntoParallelIterator::into_par_iter(write_queue),|(write_path,node,node_name_override,properties,style)|{ let results:Vec<Result<(),WriteError>>=rayon::iter::ParallelIterator::collect(rayon::iter::ParallelIterator::map(rayon::iter::IntoParallelIterator::into_par_iter(write_queue),|(write_path,node,node_name_override,properties,style)|{
write_item(&dom,write_path,node,node_name_override,properties,style,write_models,write_scripts) write_item(&dom,write_path,node,node_name_override,properties,style,write_models,write_scripts)
})); }));
for result in results{ for result in results{
@ -292,7 +299,7 @@ async fn write_files(config:WriteConfig,mut context:DecompiledContext)->Result<(
//run the destroy //run the destroy
for destroy_ref in destroy_queue{ for destroy_ref in destroy_queue{
context.dom.destroy(destroy_ref); self.dom.destroy(destroy_ref);
} }
//write what remains in template.rbxlx //write what remains in template.rbxlx
@ -300,9 +307,10 @@ async fn write_files(config:WriteConfig,mut context:DecompiledContext)->Result<(
let mut file=config.output_folder.clone(); let mut file=config.output_folder.clone();
file.push("template"); file.push("template");
assert!(file.set_extension("rbxlx")); assert!(file.set_extension("rbxlx"));
let output=std::io::BufWriter::new(std::fs::File::create(file)?); let output=std::io::BufWriter::new(std::fs::File::create(file).map_err(WriteError::IO)?);
rbx_xml::to_writer_default(output,&context.dom,context.dom.root().children())?; rbx_xml::to_writer_default(output,&self.dom,self.dom.root().children()).map_err(WriteError::EncodeError)?;
} }
Ok(()) Ok(())
}
} }

View File

@ -1,5 +1,5 @@
#[derive(Clone,Copy,Debug)] #[derive(Clone,Copy,Debug)]
pub enum DecompileStyle{ pub enum Style{
Rox, Rox,
Rojo, Rojo,
RoxRojo, RoxRojo,

View File

@ -1,7 +1,6 @@
use std::{io::Read,path::PathBuf}; use std::{io::Read,path::PathBuf};
use clap::{Args,Parser,Subcommand}; use clap::{Args,Parser,Subcommand};
use anyhow::Result as AResult; use anyhow::Result as AResult;
use futures::StreamExt;
use rbx_dom_weak::types::Ref; use rbx_dom_weak::types::Ref;
use tokio::io::AsyncReadExt; use tokio::io::AsyncReadExt;
use rbx_asset::context::{RobloxContext,InventoryItem,AssetVersion}; use rbx_asset::context::{RobloxContext,InventoryItem,AssetVersion};
@ -110,7 +109,7 @@ struct CompileSubcommand{
#[arg(long)] #[arg(long)]
output_file:PathBuf, output_file:PathBuf,
#[arg(long)] #[arg(long)]
style:Option<DecompileStyle>, style:Option<Style>,
#[arg(long)] #[arg(long)]
template:Option<PathBuf>, template:Option<PathBuf>,
} }
@ -121,7 +120,7 @@ struct DecompileSubcommand{
#[arg(long)] #[arg(long)]
output_folder:Option<PathBuf>, output_folder:Option<PathBuf>,
#[arg(long)] #[arg(long)]
style:DecompileStyle, style:Style,
#[arg(long)] #[arg(long)]
write_template:Option<bool>, write_template:Option<bool>,
#[arg(long)] #[arg(long)]
@ -136,7 +135,7 @@ struct DecompileHistoryIntoGitSubcommand{
//currently output folder must be the current folder due to git2 limitations //currently output folder must be the current folder due to git2 limitations
//output_folder:cli.output.unwrap(), //output_folder:cli.output.unwrap(),
#[arg(long)] #[arg(long)]
style:DecompileStyle, style:Style,
#[arg(long)] #[arg(long)]
git_committer_name:String, git_committer_name:String,
#[arg(long)] #[arg(long)]
@ -159,7 +158,7 @@ struct DownloadAndDecompileHistoryIntoGitSubcommand{
//currently output folder must be the current folder due to git2 limitations //currently output folder must be the current folder due to git2 limitations
//output_folder:cli.output.unwrap(), //output_folder:cli.output.unwrap(),
#[arg(long)] #[arg(long)]
style:DecompileStyle, style:Style,
#[arg(long)] #[arg(long)]
git_committer_name:String, git_committer_name:String,
#[arg(long)] #[arg(long)]
@ -180,17 +179,17 @@ enum CookieType{
} }
#[derive(Clone,Copy,Debug,clap::ValueEnum)] #[derive(Clone,Copy,Debug,clap::ValueEnum)]
pub enum DecompileStyle{ pub enum Style{
Rox, Rox,
Rojo, Rojo,
RoxRojo, RoxRojo,
} }
impl DecompileStyle{ impl Style{
fn rox(&self)->rox_compiler::types::DecompileStyle{ fn rox(&self)->rox_compiler::types::Style{
match self{ match self{
DecompileStyle::Rox=>rox_compiler::types::DecompileStyle::Rox, Style::Rox=>rox_compiler::types::Style::Rox,
DecompileStyle::Rojo=>rox_compiler::types::DecompileStyle::Rojo, Style::Rojo=>rox_compiler::types::Style::Rojo,
DecompileStyle::RoxRojo=>rox_compiler::types::DecompileStyle::RoxRojo, Style::RoxRojo=>rox_compiler::types::Style::RoxRojo,
} }
} }
} }
@ -548,7 +547,7 @@ fn load_dom<R:Read>(input:R)->AResult<rbx_dom_weak::WeakDom>{
struct DecompileConfig{ struct DecompileConfig{
style:DecompileStyle, style:Style,
input_file:PathBuf, input_file:PathBuf,
output_folder:PathBuf, output_folder:PathBuf,
write_template:bool, write_template:bool,
@ -583,7 +582,7 @@ struct WriteCommitConfig{
git_committer_name:String, git_committer_name:String,
git_committer_email:String, git_committer_email:String,
output_folder:PathBuf, output_folder:PathBuf,
style:DecompileStyle, style:Style,
write_template:bool, write_template:bool,
write_models:bool, write_models:bool,
write_scripts:bool, write_scripts:bool,
@ -674,7 +673,7 @@ struct DecompileHistoryConfig{
git_committer_name:String, git_committer_name:String,
git_committer_email:String, git_committer_email:String,
input_folder:PathBuf, input_folder:PathBuf,
style:DecompileStyle, style:Style,
output_folder:PathBuf, output_folder:PathBuf,
write_template:bool, write_template:bool,
write_models:bool, write_models:bool,
@ -723,7 +722,7 @@ struct DownloadAndDecompileHistoryConfig{
asset_id:AssetID, asset_id:AssetID,
git_committer_name:String, git_committer_name:String,
git_committer_email:String, git_committer_email:String,
style:DecompileStyle, style:Style,
output_folder:PathBuf, output_folder:PathBuf,
write_template:bool, write_template:bool,
write_models:bool, write_models:bool,
@ -770,7 +769,7 @@ struct CompileConfig{
input_folder:PathBuf, input_folder:PathBuf,
output_file:PathBuf, output_file:PathBuf,
template:Option<PathBuf>, template:Option<PathBuf>,
style:Option<DecompileStyle>, style:Option<Style>,
} }
async fn compile(config:CompileConfig)->AResult<()>{ async fn compile(config:CompileConfig)->AResult<()>{
@ -784,7 +783,10 @@ async fn compile(config:CompileConfig)->AResult<()>{
//hack to traverse root folder as the root object //hack to traverse root folder as the root object
dom.root_mut().name="src".to_owned(); dom.root_mut().name="src".to_owned();
something_something_dom_write(&mut dom).await?; rox_compiler::compile::compile(rox_compiler::types::CompileConfig{
input_folder:config.input_folder,
style:config.style,
},&mut dom).await?;
let mut output_place=config.output_file.clone(); let mut output_place=config.output_file.clone();
if output_place.extension().is_none()&&tokio::fs::try_exists(output_place.as_path()).await?{ if output_place.extension().is_none()&&tokio::fs::try_exists(output_place.as_path()).await?{