Compare commits
2 Commits
feature/ta
...
quat-histo
Author | SHA1 | Date | |
---|---|---|---|
32a9d75a62 | |||
1712e0c7e3 |
12
Cargo.lock
generated
12
Cargo.lock
generated
@ -969,9 +969,9 @@ checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.76"
|
||||
version = "1.0.75"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "95fc56cda0b5c3325f5fbbd7ff9fda9e02bb00bb3dac51252d2f1bfa1cb8cc8c"
|
||||
checksum = "907a61bd0f64c2f29cd1cf1dc34d05176426a3f504a78010f08416ddb7b13708"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
@ -1286,18 +1286,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.195"
|
||||
version = "1.0.194"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "63261df402c67811e9ac6def069e4786148c4563f4b50fd4bf30aa370d626b02"
|
||||
checksum = "0b114498256798c94a0689e1a15fec6005dee8ac1f41de56404b67afc2a4b773"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.195"
|
||||
version = "1.0.194"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "46fe8f8603d81ba86327b23a2e9cdf49e1255fb94a4c5f297f6ee0547178ea2c"
|
||||
checksum = "a3385e45322e8f9931410f01b3031ec534c3947d0e94c18049af4d9f9907d4e0"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
@ -18,7 +18,7 @@ rbx_dom_weak = "2.5.0"
|
||||
rbx_reflection_database = "0.2.7"
|
||||
rbx_xml = "0.13.1"
|
||||
reqwest = { version = "0.11.23", features = ["cookies", "json"] }
|
||||
serde = { version = "1.0.195", features = ["derive"] }
|
||||
serde = { version = "1.0.194", features = ["derive"] }
|
||||
serde_json = "1.0.111"
|
||||
tokio = { version = "1.35.1", features = ["macros", "rt-multi-thread", "fs"] }
|
||||
|
||||
|
553
src/main.rs
553
src/main.rs
@ -2,11 +2,25 @@ use std::io::{Read,Seek};
|
||||
use clap::{Args,Parser,Subcommand};
|
||||
use anyhow::Result as AResult;
|
||||
use futures::StreamExt;
|
||||
use rbx_dom_weak::types::Ref;
|
||||
use serde::Serialize;
|
||||
|
||||
type AssetID=u64;
|
||||
type AssetIDFileMap=Vec<(AssetID,std::path::PathBuf)>;
|
||||
const CONCURRENT_REQUESTS:usize=32;
|
||||
const CONCURRENT_REQUESTS:usize=8;
|
||||
|
||||
/// Parse a single key-value pair
|
||||
fn parse_key_val<T,U>(s:&str)->AResult<(T,U)>
|
||||
where
|
||||
T:std::str::FromStr,
|
||||
T::Err:std::error::Error+Send+Sync+'static,
|
||||
U:std::str::FromStr,
|
||||
U::Err:std::error::Error+Send+Sync+'static,
|
||||
{
|
||||
let pos=s
|
||||
.find('=')
|
||||
.ok_or_else(||anyhow::Error::msg(format!("invalid KEY=value: no `=` found in `{s}`")))?;
|
||||
Ok((s[..pos].parse()?,s[pos+1..].parse()?))
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(author,version,about,long_about=None)]
|
||||
@ -22,21 +36,8 @@ struct Cli{
|
||||
#[arg(long)]
|
||||
cookie_file:Option<std::path::PathBuf>,
|
||||
|
||||
#[arg(long)]
|
||||
no_models:Option<bool>,
|
||||
#[arg(long)]
|
||||
no_scripts:Option<bool>,
|
||||
#[arg(long)]
|
||||
no_template:Option<bool>,
|
||||
|
||||
#[arg(long)]
|
||||
asset_id:Option<AssetID>,
|
||||
|
||||
#[arg(short,long)]
|
||||
input:Option<std::path::PathBuf>,
|
||||
|
||||
#[arg(short,long)]
|
||||
output:Option<std::path::PathBuf>,
|
||||
#[arg(long,value_parser=parse_key_val::<AssetID,std::path::PathBuf>)]
|
||||
asset_id:(AssetID,std::path::PathBuf),
|
||||
|
||||
#[command(subcommand)]
|
||||
command:Commands,
|
||||
@ -44,11 +45,8 @@ struct Cli{
|
||||
|
||||
#[derive(Subcommand)]
|
||||
enum Commands{
|
||||
DownloadHistory,
|
||||
Download,
|
||||
Upload,
|
||||
Compile,
|
||||
Decompile,
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
@ -80,39 +78,71 @@ async fn main()->AResult<()>{
|
||||
|
||||
let cookie_enum={
|
||||
match (cli.cookie_literal,cli.cookie_env,cli.cookie_file){
|
||||
(Some(literal),None,None)=>Some(Cookie::Literal(literal)),
|
||||
(None,Some(env_var),None)=>Some(Cookie::Environment(env_var)),
|
||||
(None,None,Some(path))=>Some(Cookie::File(path)),
|
||||
(None,None,None)=>None,
|
||||
_=>return Err(anyhow::Error::msg("Cookie was specified multiple times.")),
|
||||
(Some(literal),None,None)=>Cookie::Literal(literal),
|
||||
(None,Some(env_var),None)=>Cookie::Environment(env_var),
|
||||
(None,None,Some(path))=>Cookie::File(path),
|
||||
_=>return Err(anyhow::Error::msg("Cookie was not specified or was specified multiple times.")),
|
||||
}
|
||||
};
|
||||
let cookie=match cookie_enum{
|
||||
Some(c)=>Some(format!(".ROBLOSECURITY={}",match c{
|
||||
Cookie::Literal(s)=>s,
|
||||
Cookie::Environment(var)=>std::env::var(var)?,
|
||||
Cookie::File(path)=>tokio::fs::read_to_string(path).await?,
|
||||
})),
|
||||
None=>None,
|
||||
};
|
||||
let cookie=format!(".ROBLOSECURITY={}",match cookie_enum{
|
||||
Cookie::Literal(s)=>s,
|
||||
Cookie::Environment(var)=>std::env::var(var)?,
|
||||
Cookie::File(path)=>tokio::fs::read_to_string(path).await?.trim().to_string(),
|
||||
});
|
||||
|
||||
match cli.command{
|
||||
Commands::DownloadHistory=>download_history(DownloadHistoryConfig{
|
||||
output_folder:cli.output.unwrap(),
|
||||
cookie:cookie.unwrap(),
|
||||
asset_id:cli.asset_id.unwrap(),
|
||||
}).await,
|
||||
Commands::Download=>download_list(cookie.unwrap(),vec![(cli.asset_id.unwrap(),cli.output.unwrap())]).await,
|
||||
Commands::Upload=>upload_list(cookie.unwrap(),cli.group,vec![(cli.asset_id.unwrap(),cli.output.unwrap())]).await,
|
||||
Commands::Compile=>compile(cli.input.unwrap(),cli.output.unwrap()),
|
||||
Commands::Decompile=>decompile(DecompileConfig{
|
||||
input_file:cli.input.unwrap(),
|
||||
output_folder:cli.output.unwrap(),
|
||||
write_template:!cli.no_template.unwrap_or(false),
|
||||
write_models:!cli.no_models.unwrap_or(false),
|
||||
write_scripts:!cli.no_scripts.unwrap_or(false),
|
||||
}),
|
||||
}
|
||||
let group=match cli.group{
|
||||
Some(group_id)=>Owner::Group(group_id),
|
||||
None=>Owner::User,
|
||||
};
|
||||
{
|
||||
let client=reqwest::Client::new();
|
||||
let mut cursor:Option<String>=None;
|
||||
let mut asset_list=Vec::new();
|
||||
loop{
|
||||
let mut url=reqwest::Url::parse("https://develop.roblox.com/v1/assets/252877716/saved-versions")?;
|
||||
//url borrow scope
|
||||
{
|
||||
let mut query=url.query_pairs_mut();//borrow here
|
||||
//query.append_pair("sortOrder","Asc");
|
||||
//query.append_pair("limit","100");
|
||||
//query.append_pair("count","100");
|
||||
match &cursor{
|
||||
Some(next_page)=>{query.append_pair("cursor",next_page);}
|
||||
None=>(),
|
||||
}
|
||||
}
|
||||
println!("url={}",url);
|
||||
let resp=client.get(url)
|
||||
.header("Cookie",cookie.clone())
|
||||
.send().await?;
|
||||
println!("resp:{:?}",resp);
|
||||
match resp.json::<VersionPage>().await{
|
||||
Ok(mut page)=>{
|
||||
asset_list.append(&mut page.data);
|
||||
if page.nextPageCursor.is_none(){
|
||||
break;
|
||||
}
|
||||
cursor=page.nextPageCursor;
|
||||
},
|
||||
Err(e)=>panic!("error: {}",e),
|
||||
}
|
||||
}
|
||||
asset_list.sort_by(|a,b|a.assetVersionNumber.cmp(&b.assetVersionNumber));
|
||||
let mut path=std::path::PathBuf::new();
|
||||
path.set_file_name("versions.json");
|
||||
tokio::fs::write(path,serde_json::to_string(&asset_list)?).await?;
|
||||
};
|
||||
Ok(())
|
||||
|
||||
// match cli.command{
|
||||
// Commands::Download=>download_list(cookie,vec![cli.asset_id]).await,
|
||||
// Commands::Upload=>upload_list(cookie,group,vec![cli.asset_id]).await,
|
||||
// }
|
||||
}
|
||||
|
||||
enum Owner{
|
||||
Group(u64),
|
||||
User
|
||||
}
|
||||
|
||||
enum Cookie{
|
||||
@ -138,22 +168,22 @@ fn maybe_gzip_decode<R:Read+Seek>(input:&mut R)->AResult<ReaderType<R>>{
|
||||
}
|
||||
}
|
||||
|
||||
async fn upload_list(cookie:String,group:Option<u64>,asset_id_file_map:AssetIDFileMap)->AResult<()>{
|
||||
async fn upload_list(cookie:String,owner:Owner,asset_id_file_map:AssetIDFileMap)->AResult<()>{
|
||||
let client=reqwest::Client::new();
|
||||
futures::stream::iter(asset_id_file_map)
|
||||
.map(|(asset_id,file)|{
|
||||
let client=&client;
|
||||
let cookie=cookie.as_str();
|
||||
let group=&group;
|
||||
let owner=&owner;
|
||||
async move{
|
||||
let mut url=reqwest::Url::parse("https://data.roblox.com/Data/Upload.ashx?json=1&type=Model&genreTypeId=1")?;
|
||||
//url borrow scope
|
||||
{
|
||||
let mut query=url.query_pairs_mut();//borrow here
|
||||
query.append_pair("assetid",asset_id.to_string().as_str());
|
||||
match group{
|
||||
Some(group_id)=>{query.append_pair("groupId",group_id.to_string().as_str());},
|
||||
None=>(),
|
||||
match owner{
|
||||
Owner::Group(group_id)=>{query.append_pair("groupId",group_id.to_string().as_str());},
|
||||
Owner::User=>(),
|
||||
}
|
||||
}
|
||||
|
||||
@ -197,121 +227,30 @@ fn read_readable(mut readable:impl Read)->AResult<Vec<u8>>{
|
||||
Ok(contents)
|
||||
}
|
||||
|
||||
const BHOP_PLACEID:u64=252877716;
|
||||
|
||||
async fn download_list(cookie:String,asset_id_file_map:AssetIDFileMap)->AResult<()>{
|
||||
let client=reqwest::Client::new();
|
||||
futures::stream::iter(asset_id_file_map)
|
||||
.map(|(asset_id,file)|{
|
||||
futures::stream::iter(1..=1006)
|
||||
.map(|version_id|{
|
||||
let client=&client;
|
||||
let cookie=cookie.as_str();
|
||||
async move{
|
||||
let resp=client.get(format!("https://assetdelivery.roblox.com/v1/asset/?ID={}",asset_id))
|
||||
.header("Cookie",cookie)
|
||||
.send().await?;
|
||||
Ok((file,resp.bytes().await?))
|
||||
}
|
||||
})
|
||||
.buffer_unordered(CONCURRENT_REQUESTS)
|
||||
.for_each(|b:AResult<_>|async{
|
||||
match b{
|
||||
Ok((dest,body))=>{
|
||||
let contents=match maybe_gzip_decode(&mut std::io::Cursor::new(body)){
|
||||
Ok(ReaderType::GZip(readable))=>read_readable(readable),
|
||||
Ok(ReaderType::Raw(readable))=>read_readable(readable),
|
||||
Err(e)=>Err(e),
|
||||
};
|
||||
match contents{
|
||||
Ok(data)=>match tokio::fs::write(dest,data).await{
|
||||
Err(e)=>eprintln!("fs error: {}",e),
|
||||
_=>(),
|
||||
},
|
||||
Err(e)=>eprintln!("gzip error: {}",e),
|
||||
};
|
||||
},
|
||||
Err(e)=>eprintln!("dl error: {}",e),
|
||||
}
|
||||
}).await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct DownloadHistoryConfig{
|
||||
output_folder:std::path::PathBuf,
|
||||
cookie:String,
|
||||
asset_id:AssetID,
|
||||
}
|
||||
|
||||
async fn download_history(config:DownloadHistoryConfig)->AResult<()>{
|
||||
let client=reqwest::Client::new();
|
||||
let asset_id_string=config.asset_id.to_string();
|
||||
|
||||
//poll paged list of all asset versions
|
||||
let mut cursor:Option<String>=None;
|
||||
let mut asset_list=Vec::new();
|
||||
loop{
|
||||
let mut url=reqwest::Url::parse(format!("https://develop.roblox.com/v1/assets/{}/saved-versions",config.asset_id).as_str())?;
|
||||
//url borrow scope
|
||||
{
|
||||
let mut query=url.query_pairs_mut();//borrow here
|
||||
//query.append_pair("sortOrder","Asc");
|
||||
//query.append_pair("limit","100");
|
||||
//query.append_pair("count","100");
|
||||
match &cursor{
|
||||
Some(next_page)=>{query.append_pair("cursor",next_page);}
|
||||
None=>(),
|
||||
}
|
||||
}
|
||||
println!("page url={}",url);
|
||||
let resp=client.get(url)
|
||||
.header("Cookie",config.cookie.clone())
|
||||
.send().await?;
|
||||
match resp.json::<VersionPage>().await{
|
||||
Ok(mut page)=>{
|
||||
asset_list.append(&mut page.data);
|
||||
if page.nextPageCursor.is_none(){
|
||||
break;
|
||||
}
|
||||
cursor=page.nextPageCursor;
|
||||
},
|
||||
Err(e)=>panic!("error: {}",e),
|
||||
}
|
||||
}
|
||||
asset_list.sort_by(|a,b|a.assetVersionNumber.cmp(&b.assetVersionNumber));
|
||||
let mut path=config.output_folder.clone();
|
||||
path.set_file_name("versions.json");
|
||||
tokio::fs::write(path,serde_json::to_string(&asset_list)?).await?;
|
||||
|
||||
//download all versions
|
||||
futures::stream::iter(asset_list)
|
||||
.map(|asset_version|{
|
||||
let client=&client;
|
||||
let cookie=config.cookie.as_str();
|
||||
let asset_id_str=asset_id_string.as_str();
|
||||
let output_folder=config.output_folder.clone();
|
||||
async move{
|
||||
let mut url=reqwest::Url::parse("https://assetdelivery.roblox.com/v1/asset/")?;
|
||||
//url borrow scope
|
||||
{
|
||||
let mut query=url.query_pairs_mut();//borrow here
|
||||
query.append_pair("ID",asset_id_str);
|
||||
query.append_pair("version",asset_version.assetVersionNumber.to_string().as_str());
|
||||
query.append_pair("ID",BHOP_PLACEID.to_string().as_str());
|
||||
query.append_pair("version",version_id.to_string().as_str());
|
||||
}
|
||||
println!("download url={}",url);
|
||||
let mut result=Err(anyhow::Error::msg("all requests failed"));
|
||||
for i in 1..=8{
|
||||
let resp=client.get(url.clone())
|
||||
.header("Cookie",cookie)
|
||||
.send().await?;
|
||||
println!("url={}",url);
|
||||
let resp=client.get(url)
|
||||
.header("Cookie",cookie)
|
||||
.send().await?;
|
||||
|
||||
if !resp.status().is_success(){
|
||||
println!("request {} failed",i);
|
||||
continue;
|
||||
}
|
||||
|
||||
let mut path=output_folder;
|
||||
path.set_file_name(format!("{}_v{}.rbxl",config.asset_id,asset_version.assetVersionNumber));
|
||||
result=Ok((path,resp.bytes().await?));
|
||||
break;
|
||||
}
|
||||
result
|
||||
let mut path=std::path::PathBuf::new();
|
||||
path.set_file_name(BHOP_PLACEID.to_string()+"_v"+version_id.to_string().as_str()+".rbxl");
|
||||
Ok((path,resp.bytes().await?))
|
||||
}
|
||||
})
|
||||
.buffer_unordered(CONCURRENT_REQUESTS)
|
||||
@ -336,299 +275,3 @@ async fn download_history(config:DownloadHistoryConfig)->AResult<()>{
|
||||
}).await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn load_dom<R:Read+Seek>(input:&mut R)->AResult<rbx_dom_weak::WeakDom>{
|
||||
let mut first_8=[0u8;8];
|
||||
if let (Ok(()),Ok(()))=(std::io::Read::read_exact(input, &mut first_8),std::io::Seek::rewind(input)){
|
||||
match &first_8[0..4]{
|
||||
b"<rob"=>{
|
||||
match &first_8[4..8]{
|
||||
b"lox!"=>return rbx_binary::from_reader(input).map_err(anyhow::Error::msg),
|
||||
b"lox "=>return rbx_xml::from_reader(input,rbx_xml::DecodeOptions::default()).map_err(anyhow::Error::msg),
|
||||
other=>Err(anyhow::Error::msg(format!("Unknown Roblox file type {:?}",other))),
|
||||
}
|
||||
},
|
||||
_=>Err(anyhow::Error::msg("unsupported file type")),
|
||||
}
|
||||
}else{
|
||||
Err(anyhow::Error::msg("peek failed"))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq)]
|
||||
enum Class{
|
||||
Folder,
|
||||
ModuleScript,
|
||||
LocalScript,
|
||||
Script,
|
||||
Model,
|
||||
}
|
||||
|
||||
struct TreeNode{
|
||||
name:String,
|
||||
referent:Ref,
|
||||
parent:Ref,
|
||||
class:Class,
|
||||
children:Vec<Ref>,
|
||||
}
|
||||
impl TreeNode{
|
||||
fn new(name:String,referent:Ref,parent:Ref,class:Class)->Self{
|
||||
Self{
|
||||
name,
|
||||
referent,
|
||||
parent,
|
||||
class,
|
||||
children:Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum TrimStackInstruction{
|
||||
Referent(Ref),
|
||||
IncrementScript,
|
||||
DecrementScript,
|
||||
}
|
||||
|
||||
enum WriteStackInstruction<'a>{
|
||||
Node(&'a TreeNode,u32),//(Node,NameTally)
|
||||
PushFolder(String),
|
||||
PopFolder,
|
||||
Destroy(Ref),
|
||||
}
|
||||
|
||||
#[derive(Default,serde::Deserialize,serde::Serialize)]
|
||||
struct PropertiesOverride{
|
||||
//Name:Option<String>,
|
||||
ClassName:Option<String>,
|
||||
}
|
||||
impl PropertiesOverride{
|
||||
fn is_some(&self)->bool{
|
||||
self.ClassName.is_some()
|
||||
}
|
||||
}
|
||||
|
||||
fn sanitize<'a>(s:&'a str)->std::borrow::Cow<'a,str>{
|
||||
lazy_regex::regex!(r"[^a-zA-Z0-9._-]").replace_all(s,"_")
|
||||
}
|
||||
|
||||
fn write_item(dom:&rbx_dom_weak::WeakDom,mut file:std::path::PathBuf,node:&TreeNode,node_name_override:&str,write_models:bool,write_scripts:bool)->AResult<()>{
|
||||
file.push(sanitize(node_name_override).as_ref());
|
||||
match node.class{
|
||||
Class::Folder=>(),
|
||||
Class::ModuleScript|Class::LocalScript|Class::Script=>{
|
||||
if !write_scripts{
|
||||
return Ok(())
|
||||
}
|
||||
assert!(file.set_extension("lua"),"could not set extension");
|
||||
assert!(dom.get_by_ref(node.referent).is_some_and(|item|{
|
||||
//TODO: delete disabled scripts
|
||||
if let Some(rbx_dom_weak::types::Variant::String(source))=item.properties.get("Source"){
|
||||
std::fs::write(file,source).is_ok()
|
||||
}else{false}
|
||||
}),"no string property or file failed to write");
|
||||
},
|
||||
Class::Model=>{
|
||||
if !write_models{
|
||||
return Ok(())
|
||||
}
|
||||
assert!(file.set_extension("rbxmx"));
|
||||
let output=std::io::BufWriter::new(std::fs::File::create(file)?);
|
||||
rbx_xml::to_writer_default(output,dom,&[node.referent])?;
|
||||
},
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct DecompileConfig{
|
||||
input_file:std::path::PathBuf,
|
||||
output_folder:std::path::PathBuf,
|
||||
write_template:bool,
|
||||
write_models:bool,
|
||||
write_scripts:bool,
|
||||
}
|
||||
|
||||
fn decompile(config:DecompileConfig)->AResult<()>{
|
||||
//rules:
|
||||
//Class Script|LocalScript|ModuleScript->$Name.lua
|
||||
//Class Model->$Name.rbxmx
|
||||
//overrides.json per-folder [Override{name,class}]
|
||||
//Everything else goes into template.rbxlx
|
||||
|
||||
//read file
|
||||
let mut input=std::io::BufReader::new(std::fs::File::open(config.input_file)?);
|
||||
let mut dom=load_dom(&mut input)?;
|
||||
|
||||
let mut tree_refs=std::collections::HashMap::new();
|
||||
tree_refs.insert(dom.root_ref(),TreeNode::new(
|
||||
"src".to_string(),
|
||||
dom.root_ref(),
|
||||
Ref::none(),
|
||||
Class::Folder
|
||||
));
|
||||
|
||||
//run rules
|
||||
let mut stack=vec![dom.root()];
|
||||
while let Some(item)=stack.pop(){
|
||||
let class=match item.class.as_str(){
|
||||
"ModuleScript"=>Class::ModuleScript,
|
||||
"LocalScript"=>Class::LocalScript,
|
||||
"Script"=>Class::Script,
|
||||
"Model"=>Class::Model,
|
||||
_=>Class::Folder,
|
||||
};
|
||||
let skip=match class{
|
||||
Class::Model=>true,
|
||||
_=>false,
|
||||
};
|
||||
if let Some(parent_node)=tree_refs.get_mut(&item.parent()){
|
||||
let referent=item.referent();
|
||||
let node=TreeNode::new(item.name.clone(),referent,parent_node.referent,class);
|
||||
parent_node.children.push(referent);
|
||||
tree_refs.insert(referent,node);
|
||||
}
|
||||
//look no further, turn this node and all its children into a model
|
||||
if skip{
|
||||
continue;
|
||||
}
|
||||
for &referent in item.children(){
|
||||
if let Some(c)=dom.get_by_ref(referent){
|
||||
stack.push(c);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//trim empty folders
|
||||
let mut script_count=0;
|
||||
let mut stack:Vec<TrimStackInstruction>=tree_refs.get(&dom.root_ref()).unwrap().children
|
||||
.iter().map(|&c|TrimStackInstruction::Referent(c)).collect();
|
||||
while let Some(instruction)=stack.pop(){
|
||||
match instruction{
|
||||
TrimStackInstruction::IncrementScript=>script_count+=1,
|
||||
TrimStackInstruction::DecrementScript=>script_count-=1,
|
||||
TrimStackInstruction::Referent(referent)=>{
|
||||
let mut delete=None;
|
||||
if let Some(node)=tree_refs.get_mut(&referent){
|
||||
if node.class==Class::Folder&&script_count!=0{
|
||||
node.class=Class::Model
|
||||
}
|
||||
if node.class==Class::Folder&&node.children.len()==0{
|
||||
delete=Some(node.parent);
|
||||
}else{
|
||||
//how the hell do I do this better without recursion
|
||||
let is_script=match node.class{
|
||||
Class::ModuleScript|Class::LocalScript|Class::Script=>true,
|
||||
_=>false,
|
||||
};
|
||||
//stack is popped from back
|
||||
if is_script{
|
||||
stack.push(TrimStackInstruction::DecrementScript);
|
||||
}
|
||||
for &child_referent in &node.children{
|
||||
stack.push(TrimStackInstruction::Referent(child_referent));
|
||||
}
|
||||
if is_script{
|
||||
stack.push(TrimStackInstruction::IncrementScript);
|
||||
}
|
||||
}
|
||||
}
|
||||
//trim referent
|
||||
if let Some(parent_ref)=delete{
|
||||
let parent_node=tree_refs.get_mut(&parent_ref)
|
||||
.expect("parent_ref does not exist in tree_refs");
|
||||
parent_node.children.remove(
|
||||
parent_node.children.iter()
|
||||
.position(|&r|r==referent)
|
||||
.expect("parent.children does not contain referent")
|
||||
);
|
||||
tree_refs.remove(&referent);
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
//generate folders, models, and scripts
|
||||
//delete models and scripts from dom
|
||||
|
||||
let mut name_tally=std::collections::HashMap::<String,u32>::new();
|
||||
let mut folder=config.output_folder.clone();
|
||||
let mut stack=vec![WriteStackInstruction::Node(tree_refs.get(&dom.root_ref()).unwrap(),0)];
|
||||
while let Some(instruction)=stack.pop(){
|
||||
match instruction{
|
||||
WriteStackInstruction::PushFolder(component)=>folder.push(component),
|
||||
WriteStackInstruction::PopFolder=>assert!(folder.pop(),"weirdness"),
|
||||
WriteStackInstruction::Destroy(referent)=>dom.destroy(referent),
|
||||
WriteStackInstruction::Node(node,name_count)=>{
|
||||
//properties.json to override class or other simple properties
|
||||
let mut properties=PropertiesOverride::default();
|
||||
let has_children=node.children.len()!=0;
|
||||
match node.class{
|
||||
Class::Folder=>(),
|
||||
Class::ModuleScript=>{
|
||||
//.lua files are ModuleScript by default
|
||||
if has_children{
|
||||
properties.ClassName=Some("ModuleScript".to_string())
|
||||
}
|
||||
},
|
||||
Class::LocalScript=>properties.ClassName=Some("LocalScript".to_string()),
|
||||
Class::Script=>properties.ClassName=Some("Script".to_string()),
|
||||
Class::Model=>(),
|
||||
}
|
||||
let name_override=if 0<name_count{
|
||||
format!("{}_{}",node.name,name_count)
|
||||
}else{
|
||||
node.name.clone()
|
||||
};
|
||||
if has_children||properties.is_some(){
|
||||
//push temp subfolder
|
||||
let mut subfolder=folder.clone();
|
||||
subfolder.push(sanitize(name_override.as_str()).as_ref());
|
||||
//make folder
|
||||
std::fs::create_dir(subfolder.clone())?;
|
||||
//write properties
|
||||
if properties.is_some(){
|
||||
let mut file=subfolder.clone();
|
||||
file.push("properties");
|
||||
assert!(file.set_extension("json"),"could not set extension");
|
||||
std::fs::write(file,serde_json::to_string(&properties)?)?
|
||||
}
|
||||
//write item in subfolder
|
||||
write_item(&dom,subfolder,node,name_override.as_str(),config.write_models,config.write_scripts)?;
|
||||
}else{
|
||||
//write item
|
||||
write_item(&dom,folder.clone(),node,name_override.as_str(),config.write_models,config.write_scripts)?;
|
||||
}
|
||||
//queue item to be deleted from dom after child objects are handled (stack is popped from the back)
|
||||
match node.class{
|
||||
Class::Folder=>(),
|
||||
_=>stack.push(WriteStackInstruction::Destroy(node.referent)),
|
||||
}
|
||||
if has_children{
|
||||
stack.push(WriteStackInstruction::PopFolder);
|
||||
name_tally.clear();
|
||||
for referent in &node.children{
|
||||
if let Some(c)=tree_refs.get(referent){
|
||||
let v=name_tally.entry(c.name.clone()).and_modify(|v|*v+=1).or_default();
|
||||
stack.push(WriteStackInstruction::Node(c,*v));
|
||||
}
|
||||
}
|
||||
stack.push(WriteStackInstruction::PushFolder(sanitize(name_override.as_str()).to_string()));
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
//write what remains in template.rbxlx
|
||||
if config.write_template{
|
||||
let mut file=config.output_folder.clone();
|
||||
file.push("template");
|
||||
assert!(file.set_extension("rbxlx"));
|
||||
let output=std::io::BufWriter::new(std::fs::File::create(file)?);
|
||||
rbx_xml::to_writer_default(output,&dom,&[dom.root_ref()])?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn compile(_folder:std::path::PathBuf,_file:std::path::PathBuf)->AResult<()>{
|
||||
Ok(())
|
||||
}
|
||||
|
Reference in New Issue
Block a user