29 Commits

Author SHA1 Message Date
40fc2229a3 don't exit on empty tree 2024-02-24 00:08:45 -08:00
4007b2cb0d update deps 2024-02-22 20:10:22 -08:00
d9f67c5d1a mess with git history code 2024-02-22 19:54:40 -08:00
9bcf34fb1c grobble grobble 2024-01-25 01:27:42 -08:00
79544c5a4b stop printing intended behaviour 2024-01-25 01:16:54 -08:00
759727d4e9 move file reading to worker threads 2024-01-25 01:16:54 -08:00
1164f8e12a execute order 66 2024-01-25 00:26:35 -08:00
44725f65df perform a perfect backflip
This replaces the join set with a futures stream but does not fulfill the goal of scanning ahead with a cap.
2024-01-25 00:16:14 -08:00
dfe899a7d8 rearrange code into iterator 2024-01-24 21:10:13 -08:00
d5b8c10264 fix override file duplication 2024-01-24 18:37:50 -08:00
fc4cebc862 -1 loc 2024-01-24 18:37:22 -08:00
658266aa89 fix decompile template generation & decompilation + write correct data to final file 2024-01-24 17:27:15 -08:00
56899fa7da enormous code for custom logic 2024-01-24 16:51:19 -08:00
d9531f1d4e custom file stem 2024-01-24 16:25:00 -08:00
994eb9c4be remove unnecessary Option 2024-01-24 14:58:27 -08:00
17bfbef482 misc tweaks + tweak Query objects + remove ScriptHint::Undetermined 2024-01-24 14:42:39 -08:00
e5c7ed6b75 debug insanity 2024-01-24 01:44:18 -08:00
9d6780a0b0 write discern_file + tweaks 2024-01-24 00:39:01 -08:00
4fd7795457 write colossal code 2024-01-23 23:55:14 -08:00
1ea68d96ef mega_double_join unused 2024-01-23 20:50:51 -08:00
4ced7f6210 change return Err to Err()? 2024-01-23 20:37:03 -08:00
db2c760c49 extract_script_overrides 2024-01-23 20:33:27 -08:00
8ee041918b tweaks 2024-01-23 20:19:00 -08:00
5384bbcb3b support Script.module.lua properly 2024-01-23 18:47:15 -08:00
9f3bd80403 woah 2024-01-23 18:43:32 -08:00
7863137174 tweaking 2024-01-23 18:43:32 -08:00
15fd698a21 colossal fixes & tweaks 2024-01-23 18:43:32 -08:00
07f0b03d45 ok I wrote this I guess 2024-01-23 18:43:32 -08:00
7e27b378e9 wip compile 2024-01-22 20:28:24 -08:00
7 changed files with 706 additions and 1052 deletions

708
Cargo.lock generated

File diff suppressed because it is too large Load Diff

@ -1,24 +1,26 @@
workspace = { members = ["rbx_asset"] }
[package]
name = "asset-tool"
version = "0.3.1"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
anyhow = "1.0.75"
chrono = { version = "0.4.31", features = ["serde"] }
clap = { version = "4.4.2", features = ["derive"] }
flate2 = "1.0.28"
futures = "0.3.30"
git2 = "0.18.1"
lazy-regex = "3.1.0"
pollster = "0.3.0"
rayon = "1.8.0"
rbx_asset = { path = "rbx_asset" }
rbx_binary = "0.7.4"
rbx_dom_weak = "2.7.0"
rbx_reflection_database = "0.2.10"
rbx_xml = "0.13.3"
reqwest = { version = "0.11.23", features = ["cookies", "json"] }
serde = { version = "1.0.195", features = ["derive"] }
serde_json = "1.0.111"
tokio = { version = "1.35.1", features = ["macros", "rt-multi-thread", "fs"] }

28
LICENSE

@ -1,23 +1,9 @@
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
MIT License
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
Copyright (c) 2023 Quaternions
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

@ -1,14 +0,0 @@
[package]
name = "rbx_asset"
version = "0.1.0"
edition = "2021"
publish = ["strafesnet"]
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
chrono = { version = "0.4.38", features = ["serde"] }
flate2 = "1.0.29"
reqwest = { version = "0.12.4", features = ["json"] }
serde = { version = "1.0.199", features = ["derive"] }
url = "2.5.0"

@ -1,311 +0,0 @@
#[derive(Debug)]
pub enum PostError{
Reqwest(reqwest::Error),
CSRF,
}
impl std::fmt::Display for PostError{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f,"{self:?}")
}
}
impl std::error::Error for PostError{}
#[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
pub struct CreateRequest{
pub name:String,
pub description:String,
pub ispublic:bool,
pub allowComments:bool,
pub groupId:Option<u64>,
}
#[derive(Debug)]
pub enum CreateError{
ParseError(url::ParseError),
PostError(PostError),
Reqwest(reqwest::Error),
}
impl std::fmt::Display for CreateError{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f,"{self:?}")
}
}
impl std::error::Error for CreateError{}
#[allow(nonstandard_style,dead_code)]
pub struct UploadRequest{
pub assetid:u64,
pub name:Option<String>,
pub description:Option<String>,
pub ispublic:Option<bool>,
pub allowComments:Option<bool>,
pub groupId:Option<u64>,
}
#[derive(Debug)]
pub enum UploadError{
ParseError(url::ParseError),
PostError(PostError),
Reqwest(reqwest::Error),
AssetIdIsZero,
}
impl std::fmt::Display for UploadError{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f,"{self:?}")
}
}
impl std::error::Error for UploadError{}
#[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
pub struct UploadResponse{
pub AssetId:u64,
pub AssetVersionId:u64,
}
#[allow(nonstandard_style,dead_code)]
pub struct DownloadRequest{
pub asset_id:u64,
pub version:Option<u64>,
}
#[derive(Debug)]
pub enum DownloadError{
ParseError(url::ParseError),
Reqwest(reqwest::Error),
IO(std::io::Error)
}
impl std::fmt::Display for DownloadError{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f,"{self:?}")
}
}
impl std::error::Error for DownloadError{}
pub struct HistoryPageRequest{
pub asset_id:u64,
pub cursor:Option<String>,
}
#[derive(serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
pub struct AssetVersion{
pub Id:u64,
pub assetId:u64,
pub assetVersionNumber:u64,
pub creatorType:String,
pub creatorTargetId:u64,
pub creatingUniverseId:Option<u64>,
pub created:chrono::DateTime<chrono::Utc>,
pub isPublished:bool,
}
#[derive(serde::Deserialize)]
#[allow(nonstandard_style,dead_code)]
pub struct HistoryPageResponse{
pub previousPageCursor:Option<String>,
pub nextPageCursor:Option<String>,
pub data:Vec<AssetVersion>,
}
#[derive(Debug)]
pub enum HistoryPageError{
ParseError(url::ParseError),
Reqwest(reqwest::Error),
}
impl std::fmt::Display for HistoryPageError{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f,"{self:?}")
}
}
impl std::error::Error for HistoryPageError{}
pub struct InventoryPageRequest{
pub group:u64,
pub cursor:Option<String>,
}
#[derive(serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
pub struct InventoryItem{
pub id:u64,
pub name:String,
}
#[derive(serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
pub struct InventoryPageResponse{
pub totalResults:u64,//up to 50
pub filteredKeyword:Option<String>,//""
pub searchDebugInfo:Option<String>,//null
pub spellCheckerResult:Option<String>,//null
pub queryFacets:Option<String>,//null
pub imageSearchStatus:Option<String>,//null
pub previousPageCursor:Option<String>,
pub nextPageCursor:Option<String>,
pub data:Vec<InventoryItem>,
}
#[derive(Debug)]
pub enum InventoryPageError{
ParseError(url::ParseError),
Reqwest(reqwest::Error),
}
impl std::fmt::Display for InventoryPageError{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f,"{self:?}")
}
}
impl std::error::Error for InventoryPageError{}
//idk how to do this better
enum ReaderType<R:std::io::Read>{
GZip(flate2::read::GzDecoder<std::io::BufReader<R>>),
Raw(std::io::BufReader<R>),
}
fn maybe_gzip_decode<R:std::io::Read>(input:R)->std::io::Result<ReaderType<R>>{
let mut buf=std::io::BufReader::new(input);
let peek=std::io::BufRead::fill_buf(&mut buf)?;
match &peek[0..2]{
b"\x1f\x8b"=>Ok(ReaderType::GZip(flate2::read::GzDecoder::new(buf))),
_=>Ok(ReaderType::Raw(buf)),
}
}
fn read_readable(mut readable:impl std::io::Read)->std::io::Result<Vec<u8>>{
let mut contents=Vec::new();
readable.read_to_end(&mut contents)?;
Ok(contents)
}
#[derive(Clone)]
pub struct RobloxContext{
pub cookie:String,
pub client:reqwest::Client,
}
impl RobloxContext{
pub fn new(cookie:String)->Self{
Self{
cookie,
client:reqwest::Client::new(),
}
}
async fn get(&self,url:impl reqwest::IntoUrl)->Result<reqwest::Response,reqwest::Error>{
self.client.get(url)
.header("Cookie",self.cookie.as_str())
.send().await
}
async fn post(&self,url:url::Url,body:impl Into<reqwest::Body>+Clone)->Result<reqwest::Response,PostError>{
let mut resp=self.client.post(url.clone())
.header("Cookie",self.cookie.as_str())
.body(body.clone())
.send().await.map_err(PostError::Reqwest)?;
//This is called a CSRF challenge apparently
if resp.status()==reqwest::StatusCode::FORBIDDEN{
if let Some(csrf_token)=resp.headers().get("X-CSRF-Token"){
resp=self.client.post(url)
.header("X-CSRF-Token",csrf_token)
.header("Cookie",self.cookie.as_str())
.body(body)
.send().await.map_err(PostError::Reqwest)?;
}else{
Err(PostError::CSRF)?;
}
}
Ok(resp)
}
pub async fn create(&self,config:CreateRequest,body:impl Into<reqwest::Body>+Clone)->Result<UploadResponse,CreateError>{
let mut url=reqwest::Url::parse("https://data.roblox.com/Data/Upload.ashx?json=1&type=Model&genreTypeId=1").map_err(CreateError::ParseError)?;
//url borrow scope
{
let mut query=url.query_pairs_mut();//borrow here
//archaic roblox api uses 0 for new asset
query.append_pair("assetid","0");
query.append_pair("name",config.name.as_str());
query.append_pair("description",config.description.as_str());
query.append_pair("ispublic",if config.ispublic{"True"}else{"False"});
query.append_pair("allowComments",if config.allowComments{"True"}else{"False"});
match config.groupId{
Some(group_id)=>{query.append_pair("groupId",group_id.to_string().as_str());},
None=>(),
}
}
let resp=self.post(url,body).await.map_err(CreateError::PostError)?;
Ok(resp.json::<UploadResponse>().await.map_err(CreateError::Reqwest)?)
}
pub async fn upload(&self,config:UploadRequest,body:impl Into<reqwest::Body>+Clone)->Result<UploadResponse,UploadError>{
let mut url=reqwest::Url::parse("https://data.roblox.com/Data/Upload.ashx?json=1&type=Model&genreTypeId=1").map_err(UploadError::ParseError)?;
//url borrow scope
{
let mut query=url.query_pairs_mut();//borrow here
//archaic roblox api uses 0 for new asset
match config.assetid{
0=>return Err(UploadError::AssetIdIsZero),
assetid=>{query.append_pair("assetid",assetid.to_string().as_str());},
}
if let Some(name)=config.name.as_deref(){
query.append_pair("name",name);
}
if let Some(description)=config.description.as_deref(){
query.append_pair("description",description);
}
if let Some(ispublic)=config.ispublic{
query.append_pair("ispublic",if ispublic{"True"}else{"False"});
}
if let Some(allow_comments)=config.allowComments{
query.append_pair("allowComments",if allow_comments{"True"}else{"False"});
}
if let Some(group_id)=config.groupId{
query.append_pair("groupId",group_id.to_string().as_str());
}
}
let resp=self.post(url,body).await.map_err(UploadError::PostError)?;
Ok(resp.json::<UploadResponse>().await.map_err(UploadError::Reqwest)?)
}
pub async fn download(&self,config:DownloadRequest)->Result<Vec<u8>,DownloadError>{
let mut url=reqwest::Url::parse("https://assetdelivery.roblox.com/v1/asset/").map_err(DownloadError::ParseError)?;
//url borrow scope
{
let mut query=url.query_pairs_mut();//borrow here
query.append_pair("ID",config.asset_id.to_string().as_str());
if let Some(version)=config.version{
query.append_pair("version",version.to_string().as_str());
}
}
let resp=self.get(url).await.map_err(DownloadError::Reqwest)?;
let body=resp.bytes().await.map_err(DownloadError::Reqwest)?;
match maybe_gzip_decode(&mut std::io::Cursor::new(body)){
Ok(ReaderType::GZip(readable))=>read_readable(readable),
Ok(ReaderType::Raw(readable))=>read_readable(readable),
Err(e)=>Err(e),
}.map_err(DownloadError::IO)
}
pub async fn history_page(&self,config:HistoryPageRequest)->Result<HistoryPageResponse,HistoryPageError>{
let mut url=reqwest::Url::parse(format!("https://develop.roblox.com/v1/assets/{}/saved-versions",config.asset_id).as_str()).map_err(HistoryPageError::ParseError)?;
//url borrow scope
{
let mut query=url.query_pairs_mut();//borrow here
//query.append_pair("sortOrder","Asc");
//query.append_pair("limit","100");
//query.append_pair("count","100");
if let Some(cursor)=config.cursor.as_deref(){
query.append_pair("cursor",cursor);
}
}
Ok(self.get(url).await.map_err(HistoryPageError::Reqwest)?
.json::<HistoryPageResponse>().await.map_err(HistoryPageError::Reqwest)?)
}
pub async fn inventory_page(&self,config:InventoryPageRequest)->Result<InventoryPageResponse,InventoryPageError>{
let mut url=reqwest::Url::parse(format!("https://apis.roblox.com/toolbox-service/v1/creations/group/{}/10?limit=50",config.group).as_str()).map_err(InventoryPageError::ParseError)?;
//url borrow scope
{
let mut query=url.query_pairs_mut();//borrow here
if let Some(cursor)=config.cursor.as_deref(){
query.append_pair("cursor",cursor);
}
}
Ok(self.get(url).await.map_err(InventoryPageError::Reqwest)?
.json::<InventoryPageResponse>().await.map_err(InventoryPageError::Reqwest)?)
}
}

@ -1 +0,0 @@
pub mod context;

@ -1,13 +1,12 @@
use std::{io::Read,path::PathBuf};
use std::io::Read;
use clap::{Args,Parser,Subcommand};
use anyhow::Result as AResult;
use futures::StreamExt;
use rbx_dom_weak::types::Ref;
use tokio::io::AsyncReadExt;
use rbx_asset::context::{RobloxContext,InventoryItem,AssetVersion};
type AssetID=u64;
type AssetIDFileMap=Vec<(AssetID,PathBuf)>;
type AssetIDFileMap=Vec<(AssetID,std::path::PathBuf)>;
const CONCURRENT_DECODE:usize=8;
const CONCURRENT_REQUESTS:usize=32;
@ -15,313 +14,260 @@ const CONCURRENT_REQUESTS:usize=32;
#[command(author,version,about,long_about=None)]
#[command(propagate_version = true)]
struct Cli{
//asset options
#[arg(short,long)]
group:Option<u64>,
#[arg(long)]
asset_id:Option<AssetID>,
//idk how to do this better
#[arg(long)]
cookie_literal:Option<String>,
#[arg(long)]
cookie_env:Option<String>,
#[arg(long)]
cookie_file:Option<std::path::PathBuf>,
//TODO: read the versions.json file instead of doing this
//TODO: write file dates instead of versions.json
#[arg(long)]
start_version:Option<u64>,
#[arg(long)]
end_version:Option<u64>,
#[arg(long)]
r#continue:bool,
//decompile options
#[arg(long)]
no_models:Option<bool>,
#[arg(long)]
no_scripts:Option<bool>,
#[arg(long)]
no_template:Option<bool>,
#[arg(long)]
style:Option<String>,
//git options
#[arg(long)]
git_committer_name:Option<String>,
#[arg(long)]
git_committer_email:Option<String>,
#[arg(short,long)]
input:Option<std::path::PathBuf>,
#[arg(short,long)]
output:Option<std::path::PathBuf>,
#[command(subcommand)]
command:Commands,
}
#[derive(Subcommand)]
enum Commands{
DownloadHistory(DownloadHistorySubcommand),
Download(DownloadSubcommand),
DownloadGroupInventoryJson(DownloadGroupInventoryJsonSubcommand),
Create(CreateSubcommand),
Upload(UploadSubcommand),
Compile(CompileSubcommand),
Decompile(DecompileSubcommand),
DecompileHistoryIntoGit(DecompileHistoryIntoGitSubcommand),
DownloadAndDecompileHistoryIntoGit(DownloadAndDecompileHistoryIntoGitSubcommand),
DownloadHistory,
Download(AssetIDList),
Upload,
Compile,
Decompile,
DecompileHistoryIntoGit,
DownloadAndDecompileHistoryIntoGit,
}
#[derive(Args)]
struct DownloadHistorySubcommand{
#[arg(long)]
asset_id:AssetID,
#[arg(long)]
cookie_type:CookieType,
#[arg(long)]
cookie:String,
#[arg(long)]
output_folder:Option<PathBuf>,
#[arg(long)]
continue_from_versions:Option<bool>,
#[arg(long)]
start_version:Option<u64>,
#[arg(long)]
end_version:Option<u64>,
}
#[derive(Args)]
struct DownloadSubcommand{
#[arg(long)]
cookie_type:CookieType,
#[arg(long)]
cookie:String,
#[arg(long)]
output_folder:Option<PathBuf>,
#[arg(required=true)]
asset_ids:Vec<AssetID>,
}
#[derive(Args)]
struct DownloadGroupInventoryJsonSubcommand{
#[arg(long)]
cookie_type:CookieType,
#[arg(long)]
cookie:String,
#[arg(long)]
output_folder:Option<PathBuf>,
#[arg(long)]
group:u64,
}
#[derive(Args)]
struct CreateSubcommand{
#[arg(long)]
cookie_type:CookieType,
#[arg(long)]
cookie:String,
#[arg(long)]
model_name:String,
#[arg(long)]
description:Option<String>,
#[arg(long)]
input_file:PathBuf,
#[arg(long)]
group:Option<u64>,
#[arg(long)]
free_model:Option<bool>,
#[arg(long)]
allow_comments:Option<bool>,
}
#[derive(Args)]
struct UploadSubcommand{
#[arg(long)]
asset_id:AssetID,
#[arg(long)]
cookie_type:CookieType,
#[arg(long)]
cookie:String,
#[arg(long)]
input_file:PathBuf,
#[arg(long)]
group:Option<u64>,
}
#[derive(Args)]
struct CompileSubcommand{
#[arg(long)]
input_folder:Option<PathBuf>,
#[arg(long)]
output_file:PathBuf,
#[arg(long)]
style:Option<DecompileStyle>,
#[arg(long)]
template:Option<PathBuf>,
}
#[derive(Args)]
struct DecompileSubcommand{
#[arg(long)]
input_file:PathBuf,
#[arg(long)]
output_folder:Option<PathBuf>,
#[arg(long)]
style:DecompileStyle,
#[arg(long)]
write_template:Option<bool>,
#[arg(long)]
write_models:Option<bool>,
#[arg(long)]
write_scripts:Option<bool>,
}
#[derive(Args)]
struct DecompileHistoryIntoGitSubcommand{
#[arg(long)]
input_folder:PathBuf,
//currently output folder must be the current folder due to git2 limitations
//output_folder:cli.output.unwrap(),
#[arg(long)]
style:DecompileStyle,
#[arg(long)]
git_committer_name:String,
#[arg(long)]
git_committer_email:String,
#[arg(long)]
write_template:Option<bool>,
#[arg(long)]
write_models:Option<bool>,
#[arg(long)]
write_scripts:Option<bool>,
}
#[derive(Args)]
struct DownloadAndDecompileHistoryIntoGitSubcommand{
#[arg(long)]
asset_id:AssetID,
#[arg(long)]
cookie_type:CookieType,
#[arg(long)]
cookie:String,
//currently output folder must be the current folder due to git2 limitations
//output_folder:cli.output.unwrap(),
#[arg(long)]
style:DecompileStyle,
#[arg(long)]
git_committer_name:String,
#[arg(long)]
git_committer_email:String,
#[arg(long)]
write_template:Option<bool>,
#[arg(long)]
write_models:Option<bool>,
#[arg(long)]
write_scripts:Option<bool>,
}
#[derive(Clone,clap::ValueEnum)]
enum CookieType{
Literal,
Environment,
File,
}
#[derive(Clone,Copy,Debug,clap::ValueEnum)]
#[derive(Clone,Copy,Debug)]
enum DecompileStyle{
Rox,
Rojo,
RoxRojo,
}
#[derive(Args)]
struct AssetIDList{
asset_ids:Vec<AssetID>
}
#[derive(Args)]
struct PathBufList{
paths:Vec<std::path::PathBuf>
}
#[derive(serde::Deserialize)]
#[allow(nonstandard_style,dead_code)]
struct VersionPage{
previousPageCursor:Option<String>,
nextPageCursor:Option<String>,
data:Vec<AssetVersion>,
}
#[derive(serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
struct AssetVersion{
Id:u64,
assetId:AssetID,
assetVersionNumber:u64,
creatorType:String,
creatorTargetId:u64,
creatingUniverseId:Option<u64>,
created:chrono::DateTime<chrono::Utc>,
isPublished:bool,
}
#[tokio::main]
async fn main()->AResult<()>{
let cli=Cli::parse();
let cookie_enum={
match (cli.cookie_literal,cli.cookie_env,cli.cookie_file){
(Some(literal),None,None)=>Some(Cookie::Literal(literal)),
(None,Some(env_var),None)=>Some(Cookie::Environment(env_var)),
(None,None,Some(path))=>Some(Cookie::File(path)),
(None,None,None)=>None,
_=>Err(anyhow::Error::msg("Cookie was specified multiple times."))?,
}
};
let cookie=match cookie_enum{
Some(c)=>Some(format!(".ROBLOSECURITY={}",match c{
Cookie::Literal(s)=>s,
Cookie::Environment(var)=>std::env::var(var)?,
Cookie::File(path)=>tokio::fs::read_to_string(path).await?,
})),
None=>None,
};
let decompile_style=match cli.style.as_deref(){
Some("rox")
|Some("Rox")=>Some(DecompileStyle::Rox),
Some("rojo")
|Some("Rojo")=>Some(DecompileStyle::Rojo),
Some("rox-rojo")
|Some("rojo-rox")
|Some("roxrojo")
|Some("rojorox")
|Some("RoxRojo")
|Some("RojoRox")=>Some(DecompileStyle::RoxRojo),
None=>None,
_=>Err(anyhow::Error::msg("Invalid style"))?,
};
match cli.command{
Commands::DownloadHistory(subcommand)=>download_history(DownloadHistoryConfig{
continue_from_versions:subcommand.continue_from_versions.unwrap_or(false),
end_version:subcommand.end_version,
start_version:subcommand.start_version.unwrap_or(0),
output_folder:subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
cookie:Cookie::from_type(subcommand.cookie_type,subcommand.cookie).await?.0,
asset_id:subcommand.asset_id,
Commands::DownloadHistory=>download_history(DownloadHistoryConfig{
continue_from_versions:cli.r#continue,
end_version:cli.end_version,
start_version:cli.start_version.unwrap_or(0),
output_folder:cli.output.unwrap(),
cookie:cookie.unwrap(),
asset_id:cli.asset_id.unwrap(),
}).await,
Commands::Download(subcommand)=>{
let output_folder=subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap());
download_list(
Cookie::from_type(subcommand.cookie_type,subcommand.cookie).await?.0,
subcommand.asset_ids.into_iter().map(|asset_id|{
let mut path=output_folder.clone();
path.push(asset_id.to_string());
(asset_id,path)
}).collect()
).await
},
Commands::DownloadGroupInventoryJson(subcommand)=>download_group_inventory_json(
Cookie::from_type(subcommand.cookie_type,subcommand.cookie).await?.0,
subcommand.group,
subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
Commands::Download(asset_id_list)=>download_list(
cookie.unwrap(),
asset_id_list.asset_ids.into_iter().map(|asset_id|{
let mut path=cli.output.clone().unwrap();
path.push(asset_id.to_string());
(asset_id,path)
}).collect()
).await,
Commands::Create(subcommand)=>create(CreateConfig{
cookie:Cookie::from_type(subcommand.cookie_type,subcommand.cookie).await?.0,
group:subcommand.group,
input_file:subcommand.input_file,
model_name:subcommand.model_name,
description:subcommand.description.unwrap_or_else(||String::with_capacity(0)),
free_model:subcommand.free_model.unwrap_or(false),
allow_comments:subcommand.allow_comments.unwrap_or(false),
Commands::Upload=>upload_list(cookie.unwrap(),cli.group,vec![(cli.asset_id.unwrap(),cli.output.unwrap())]).await,
Commands::Compile=>compile(CompileConfig{
input_folder:cli.input.unwrap(),
output_file:cli.output.unwrap(),
template:None,
style:None,
}).await,
Commands::Upload(subcommand)=>upload_list(
Cookie::from_type(subcommand.cookie_type,subcommand.cookie).await?.0,
subcommand.group,
vec![(subcommand.asset_id,subcommand.input_file)]
).await,
Commands::Compile(subcommand)=>compile(CompileConfig{
input_folder:subcommand.input_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
output_file:subcommand.output_file,
template:subcommand.template,
style:subcommand.style,
Commands::Decompile=>decompile(DecompileConfig{
style:decompile_style.unwrap(),
input_file:cli.input.unwrap(),
output_folder:cli.output.unwrap(),
write_template:!cli.no_template.unwrap_or(false),
write_models:!cli.no_models.unwrap_or(false),
write_scripts:!cli.no_scripts.unwrap_or(false),
}).await,
Commands::Decompile(subcommand)=>decompile(DecompileConfig{
style:subcommand.style,
input_file:subcommand.input_file,
output_folder:subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
write_template:subcommand.write_template.unwrap_or(false),
write_models:subcommand.write_models.unwrap_or(false),
write_scripts:subcommand.write_scripts.unwrap_or(true),
}).await,
Commands::DecompileHistoryIntoGit(subcommand)=>decompile_history_into_git(DecompileHistoryConfig{
git_committer_name:subcommand.git_committer_name,
git_committer_email:subcommand.git_committer_email,
input_folder:subcommand.input_folder,
output_folder:std::env::current_dir()?,
style:subcommand.style,
write_template:subcommand.write_template.unwrap_or(false),
write_models:subcommand.write_models.unwrap_or(false),
write_scripts:subcommand.write_scripts.unwrap_or(true),
Commands::DecompileHistoryIntoGit=>decompile_history_into_git(DecompileHistoryConfig{
git_committer_name:cli.git_committer_name.unwrap(),
git_committer_email:cli.git_committer_email.unwrap(),
input_folder:cli.input.unwrap(),
output_folder:cli.output.unwrap(),
style:decompile_style.unwrap(),
write_template:!cli.no_template.unwrap_or(false),
write_models:!cli.no_models.unwrap_or(false),
write_scripts:!cli.no_scripts.unwrap_or(false),
}).await,
Commands::DownloadAndDecompileHistoryIntoGit(subcommand)=>download_and_decompile_history_into_git(DownloadAndDecompileHistoryConfig{
git_committer_name:subcommand.git_committer_name,
git_committer_email:subcommand.git_committer_email,
cookie:Cookie::from_type(subcommand.cookie_type,subcommand.cookie).await?.0,
asset_id:subcommand.asset_id,
output_folder:std::env::current_dir()?,
style:subcommand.style,
write_template:subcommand.write_template.unwrap_or(false),
write_models:subcommand.write_models.unwrap_or(false),
write_scripts:subcommand.write_scripts.unwrap_or(true),
Commands::DownloadAndDecompileHistoryIntoGit=>download_and_decompile_history_into_git(DownloadAndDecompileHistoryConfig{
git_committer_name:cli.git_committer_name.unwrap(),
git_committer_email:cli.git_committer_email.unwrap(),
cookie:cookie.unwrap(),
asset_id:cli.asset_id.unwrap(),
output_folder:cli.output.unwrap(),
style:decompile_style.unwrap(),
write_template:!cli.no_template.unwrap_or(false),
write_models:!cli.no_models.unwrap_or(false),
write_scripts:!cli.no_scripts.unwrap_or(false),
}).await,
}
}
struct Cookie(String);
impl Cookie{
async fn from_type(cookie_type:CookieType,cookie_string:String)->AResult<Self>{
Ok(Self(format!(".ROBLOSECURITY={}",match cookie_type{
CookieType::Literal=>cookie_string,
CookieType::Environment=>std::env::var(cookie_string)?,
CookieType::File=>tokio::fs::read_to_string(cookie_string).await?,
})))
enum Cookie{
Literal(String),
Environment(String),
File(std::path::PathBuf),
}
enum ReaderType<R:Read>{
GZip(flate2::read::GzDecoder<std::io::BufReader<R>>),
Raw(std::io::BufReader<R>),
}
fn maybe_gzip_decode<R:Read>(input:R)->AResult<ReaderType<R>>{
let mut buf=std::io::BufReader::new(input);
let peek=std::io::BufRead::fill_buf(&mut buf)?;
match &peek[0..2]{
b"\x1f\x8b"=>Ok(ReaderType::GZip(flate2::read::GzDecoder::new(buf))),
_=>Ok(ReaderType::Raw(buf)),
}
}
struct CreateConfig{
cookie:String,
model_name:String,
description:String,
input_file:PathBuf,
group:Option<u64>,
free_model:bool,
allow_comments:bool,
}
async fn create(config:CreateConfig)->AResult<()>{
let resp=RobloxContext::new(config.cookie)
.create(rbx_asset::context::CreateRequest{
name:config.model_name,
description:config.description,
ispublic:config.free_model,
allowComments:config.allow_comments,
groupId:config.group,
},tokio::fs::read(config.input_file).await?).await?;
println!("UploadResponse={:?}",resp);
Ok(())
}
async fn upload_list(cookie:String,group:Option<u64>,asset_id_file_map:AssetIDFileMap)->AResult<()>{
let context=RobloxContext::new(cookie);
//this is calling map on the vec because the closure produces an iterator of futures
let client=reqwest::Client::new();
futures::stream::iter(asset_id_file_map.into_iter()
.map(|(asset_id,file)|{
let context=&context;
let client=&client;
let cookie=cookie.as_str();
let group=&group;
async move{
Ok((asset_id,context.upload(rbx_asset::context::UploadRequest{
assetid:asset_id,
name:None,
description:None,
ispublic:None,
allowComments:None,
groupId:group,
},tokio::fs::read(file).await?).await?))
let mut url=reqwest::Url::parse("https://data.roblox.com/Data/Upload.ashx?json=1&type=Model&genreTypeId=1")?;
//url borrow scope
{
let mut query=url.query_pairs_mut();//borrow here
query.append_pair("assetid",asset_id.to_string().as_str());
match group{
Some(group_id)=>{query.append_pair("groupId",group_id.to_string().as_str());},
None=>(),
}
}
let body=tokio::fs::read_to_string(file).await?;
let mut resp=client.post(url.clone())
.header("Cookie",cookie)
.body(body.clone())
.send().await?;
//This is called a CSRF challenge apparently
if resp.status()==reqwest::StatusCode::FORBIDDEN{
if let Some(csrf_token)=resp.headers().get("X-CSRF-Token"){
resp=client.post(url)
.header("X-CSRF-Token",csrf_token)
.header("Cookie",cookie)
.body(body)
.send().await?;
}else{
Err(anyhow::Error::msg("Roblox returned 403 with no CSRF"))?;
}
}
Ok((asset_id,resp.bytes().await?))
}
}))
.buffer_unordered(CONCURRENT_REQUESTS)
.for_each(|b:AResult<_>|async{
match b{
Ok((asset_id,body))=>{
println!("asset_id={} UploadResponse={:?}",asset_id,body);
println!("asset_id={} response.body={:?}",asset_id,body);
},
Err(e)=>eprintln!("ul error: {}",e),
}
@ -329,29 +275,41 @@ async fn upload_list(cookie:String,group:Option<u64>,asset_id_file_map:AssetIDFi
Ok(())
}
fn read_readable(mut readable:impl Read)->AResult<Vec<u8>>{
let mut contents=Vec::new();
readable.read_to_end(&mut contents)?;
Ok(contents)
}
async fn download_list(cookie:String,asset_id_file_map:AssetIDFileMap)->AResult<()>{
let context=RobloxContext::new(cookie);
let client=reqwest::Client::new();
futures::stream::iter(asset_id_file_map.into_iter()
.map(|(asset_id,file)|{
let context=&context;
let client=&client;
let cookie=cookie.as_str();
async move{
Ok((file,context.download(rbx_asset::context::DownloadRequest{asset_id,version:None}).await?))
let resp=client.get(format!("https://assetdelivery.roblox.com/v1/asset/?ID={}",asset_id))
.header("Cookie",cookie)
.send().await?;
Ok((file,resp.bytes().await?))
}
}))
.buffer_unordered(CONCURRENT_REQUESTS)
.for_each(|b:AResult<_>|async{
match b{
Ok((mut dest,data))=>{
//known file types
match &data[0..4]{
b"<rob"=>dest.set_extension("rbxm"),
b"\x89PNG"=>dest.set_extension("png"),
_=>false,
Ok((dest,body))=>{
let contents=match maybe_gzip_decode(&mut std::io::Cursor::new(body)){
Ok(ReaderType::GZip(readable))=>read_readable(readable),
Ok(ReaderType::Raw(readable))=>read_readable(readable),
Err(e)=>Err(e),
};
match contents{
Ok(data)=>match tokio::fs::write(dest,data).await{
Err(e)=>eprintln!("fs error: {}",e),
_=>(),
},
Err(e)=>eprintln!("gzip error: {}",e),
};
match tokio::fs::write(dest,data).await{
Err(e)=>eprintln!("fs error: {}",e),
_=>(),
}
},
Err(e)=>eprintln!("dl error: {}",e),
}
@ -359,36 +317,31 @@ async fn download_list(cookie:String,asset_id_file_map:AssetIDFileMap)->AResult<
Ok(())
}
async fn get_inventory_pages(context:&RobloxContext,group:u64)->AResult<Vec<InventoryItem>>{
let mut cursor:Option<String>=None;
let mut asset_list=Vec::new();
loop{
let mut page=context.inventory_page(rbx_asset::context::InventoryPageRequest{group,cursor}).await?;
asset_list.append(&mut page.data);
if page.nextPageCursor.is_none(){
break;
async fn download_page(client:&reqwest::Client,cookie:&str,asset_id:AssetID,cursor:Option<String>)->AResult<VersionPage>{
let mut url=reqwest::Url::parse(format!("https://develop.roblox.com/v1/assets/{}/saved-versions",asset_id).as_str())?;
//url borrow scope
{
let mut query=url.query_pairs_mut();//borrow here
//query.append_pair("sortOrder","Asc");
//query.append_pair("limit","100");
//query.append_pair("count","100");
match cursor.as_deref(){
Some(next_page)=>{query.append_pair("cursor",next_page);}
None=>(),
}
cursor=page.nextPageCursor;
}
Ok(asset_list)
println!("page url={}",url);
let resp=client.get(url)
.header("Cookie",cookie)
.send().await?;
Ok(resp.json::<VersionPage>().await?)
}
async fn download_group_inventory_json(cookie:String,group:u64,output_folder:PathBuf)->AResult<()>{
let context=RobloxContext::new(cookie);
let item_list=get_inventory_pages(&context,group).await?;
let mut path=output_folder.clone();
path.set_file_name("versions.json");
tokio::fs::write(path,serde_json::to_string(&item_list)?).await?;
Ok(())
}
async fn get_version_history(context:&RobloxContext,asset_id:AssetID)->AResult<Vec<AssetVersion>>{
async fn get_version_history(client:&reqwest::Client,cookie:&str,asset_id:AssetID)->AResult<Vec<AssetVersion>>{
let mut cursor:Option<String>=None;
let mut asset_list=Vec::new();
loop{
let mut page=context.history_page(rbx_asset::context::HistoryPageRequest{asset_id,cursor}).await?;
let mut page=download_page(client,cookie,asset_id,cursor).await?;
asset_list.append(&mut page.data);
if page.nextPageCursor.is_none(){
break;
@ -399,11 +352,35 @@ async fn get_version_history(context:&RobloxContext,asset_id:AssetID)->AResult<V
Ok(asset_list)
}
async fn download_asset_version(client:&reqwest::Client,cookie:&str,asset_id_str:&str,asset_version_str:&str)->AResult<reqwest::Response>{
let mut url=reqwest::Url::parse("https://assetdelivery.roblox.com/v1/asset/")?;
//url borrow scope
{
let mut query=url.query_pairs_mut();//borrow here
query.append_pair("ID",asset_id_str);
query.append_pair("version",asset_version_str);
}
println!("download url={}",url);
for i in 0..8{
let resp=client.get(url.clone())
.header("Cookie",cookie)
.send().await?;
if !resp.status().is_success(){
println!("request {} failed",i);
continue;
}
return Ok(resp);
}
Err(anyhow::Error::msg("all requests failed"))
}
struct DownloadHistoryConfig{
continue_from_versions:bool,
end_version:Option<u64>,
start_version:u64,
output_folder:PathBuf,
output_folder:std::path::PathBuf,
cookie:String,
asset_id:AssetID,
}
@ -445,7 +422,9 @@ async fn download_history(mut config:DownloadHistoryConfig)->AResult<()>{
None=>Err(anyhow::Error::msg("Cannot continue from versions.json - there are no previous versions"))?,
}
}
let context=RobloxContext::new(config.cookie);
let client=reqwest::Client::new();
let asset_id_string=config.asset_id.to_string();
//limit concurrent downloads
let mut join_set=tokio::task::JoinSet::new();
@ -453,8 +432,10 @@ async fn download_history(mut config:DownloadHistoryConfig)->AResult<()>{
//poll paged list of all asset versions
let mut cursor:Option<String>=None;
loop{
let mut page=context.history_page(rbx_asset::context::HistoryPageRequest{asset_id:config.asset_id,cursor}).await?;
let context=&context;
let mut page=download_page(&client,config.cookie.as_str(),config.asset_id,cursor).await?;
let client=&client;
let cookie=config.cookie.clone();
let asset_id_str=asset_id_string.clone();
let output_folder=config.output_folder.clone();
let data=&page.data;
let asset_list_contents=&asset_list_contents;
@ -479,13 +460,19 @@ async fn download_history(mut config:DownloadHistoryConfig)->AResult<()>{
while CONCURRENT_REQUESTS<=join_set.len(){
join_set.join_next().await.unwrap()??;
}
let context=context.clone();
let client=client.clone();
let cookie=cookie.clone();
let asset_id_str=asset_id_str.clone();
let mut path=output_folder.clone();
path.push(format!("{}_v{}.rbxl",config.asset_id,version_number));
join_set.spawn(async move{
let file=context.download(rbx_asset::context::DownloadRequest{asset_id:config.asset_id,version:Some(version_number)}).await?;
let resp=download_asset_version(&client,cookie.as_str(),asset_id_str.as_str(),version_number.to_string().as_str()).await?;
let contents=match maybe_gzip_decode(std::io::Cursor::new(resp.bytes().await?))?{
ReaderType::GZip(readable)=>read_readable(readable)?,
ReaderType::Raw(readable)=>read_readable(readable)?,
};
tokio::fs::write(path,file).await?;
tokio::fs::write(path,contents).await?;
Ok::<_,anyhow::Error>(())
});
@ -534,8 +521,8 @@ fn load_dom<R:Read>(input:R)->AResult<rbx_dom_weak::WeakDom>{
match &peek[0..4]{
b"<rob"=>{
match &peek[4..8]{
b"lox!"=>rbx_binary::from_reader(buf).map_err(anyhow::Error::msg),
b"lox "=>rbx_xml::from_reader_default(buf).map_err(anyhow::Error::msg),
b"lox!"=>return rbx_binary::from_reader(buf).map_err(anyhow::Error::msg),
b"lox "=>return rbx_xml::from_reader_default(buf).map_err(anyhow::Error::msg),
other=>Err(anyhow::Error::msg(format!("Unknown Roblox file type {:?}",other))),
}
},
@ -611,7 +598,7 @@ fn sanitize<'a>(s:&'a str)->std::borrow::Cow<'a,str>{
lazy_regex::regex!(r"[^A-z0-9.-]").replace_all(s,"_")
}
fn write_item(dom:&rbx_dom_weak::WeakDom,mut file:PathBuf,node:&TreeNode,node_name_override:String,mut properties:PropertiesOverride,style:DecompileStyle,write_models:bool,write_scripts:bool)->AResult<()>{
fn write_item(dom:&rbx_dom_weak::WeakDom,mut file:std::path::PathBuf,node:&TreeNode,node_name_override:String,mut properties:PropertiesOverride,style:DecompileStyle,write_models:bool,write_scripts:bool)->AResult<()>{
file.push(sanitize(node_name_override.as_str()).as_ref());
match node.class{
Class::Folder=>(),
@ -772,7 +759,7 @@ fn generate_decompiled_context<R:Read>(input:R)->AResult<DecompiledContext>{
struct WriteConfig{
style:DecompileStyle,
output_folder:PathBuf,
output_folder:std::path::PathBuf,
write_template:bool,
write_models:bool,
write_scripts:bool,
@ -879,8 +866,8 @@ async fn write_files(config:WriteConfig,mut context:DecompiledContext)->AResult<
struct DecompileConfig{
style:DecompileStyle,
input_file:PathBuf,
output_folder:PathBuf,
input_file:std::path::PathBuf,
output_folder:std::path::PathBuf,
write_template:bool,
write_models:bool,
write_scripts:bool,
@ -912,7 +899,7 @@ async fn decompile(config:DecompileConfig)->AResult<()>{
struct WriteCommitConfig{
git_committer_name:String,
git_committer_email:String,
output_folder:PathBuf,
output_folder:std::path::PathBuf,
style:DecompileStyle,
write_template:bool,
write_models:bool,
@ -1003,9 +990,9 @@ async fn write_commit(config:WriteCommitConfig,b:Result<AResult<(AssetVersion,De
struct DecompileHistoryConfig{
git_committer_name:String,
git_committer_email:String,
input_folder:PathBuf,
input_folder:std::path::PathBuf,
style:DecompileStyle,
output_folder:PathBuf,
output_folder:std::path::PathBuf,
write_template:bool,
write_models:bool,
write_scripts:bool,
@ -1054,28 +1041,34 @@ struct DownloadAndDecompileHistoryConfig{
git_committer_name:String,
git_committer_email:String,
style:DecompileStyle,
output_folder:PathBuf,
output_folder:std::path::PathBuf,
write_template:bool,
write_models:bool,
write_scripts:bool,
}
async fn download_and_decompile_history_into_git(config:DownloadAndDecompileHistoryConfig)->AResult<()>{
let context=RobloxContext::new(config.cookie);
let client=reqwest::Client::new();
//poll paged list of all asset versions
let asset_list=get_version_history(&context,config.asset_id).await?;
let asset_list=get_version_history(&client,&config.cookie.as_str(),config.asset_id).await?;
let repo=git2::Repository::init(config.output_folder.clone())?;
//download all versions
let asset_id=config.asset_id;
let asset_id_string=config.asset_id.to_string();
futures::stream::iter(asset_list.into_iter()
.map(|asset_version|{
let context=context.clone();
let client=client.clone();
let cookie=config.cookie.clone();
let asset_id_str=asset_id_string.clone();
tokio::task::spawn(async move{
let file=context.download(rbx_asset::context::DownloadRequest{asset_id,version:Some(asset_version.assetVersionNumber)}).await?;
Ok::<_,anyhow::Error>((asset_version,generate_decompiled_context(std::io::Cursor::new(file))?))
let resp=download_asset_version(&client,cookie.as_str(),asset_id_str.as_str(),asset_version.assetVersionNumber.to_string().as_str()).await?;
let contents=match maybe_gzip_decode(std::io::Cursor::new(resp.bytes().await?))?{
ReaderType::GZip(readable)=>generate_decompiled_context(readable)?,
ReaderType::Raw(readable)=>generate_decompiled_context(readable)?,
};
Ok::<_,anyhow::Error>((asset_version,contents))
})
}))
.buffered(CONCURRENT_DECODE)
@ -1108,7 +1101,6 @@ async fn download_and_decompile_history_into_git(config:DownloadAndDecompileHist
//I could use a function!
//eventually:
#[derive(Debug)]
#[allow(dead_code)]//idk why this thinks it's dead code, the errors are printed out in various places
enum QueryResolveError{
NotFound,//0 results
Ambiguous,//>1 results
@ -1127,7 +1119,7 @@ struct FileWithName{
name:String,
}
async fn get_file_async(mut path:PathBuf,file_name:impl AsRef<std::path::Path>)->Result<FileWithName,QueryResolveError>{
async fn get_file_async(mut path:std::path::PathBuf,file_name:impl AsRef<std::path::Path>)->Result<FileWithName,QueryResolveError>{
let name=file_name.as_ref().to_str().unwrap().to_owned();
path.push(file_name);
match tokio::fs::File::open(path).await{
@ -1147,7 +1139,7 @@ struct QuerySingle{
script:QueryHandle,
}
impl QuerySingle{
fn rox(search_path:&PathBuf,search_name:&str)->Self{
fn rox(search_path:&std::path::PathBuf,search_name:&str)->Self{
Self{
script:tokio::spawn(get_file_async(search_path.clone(),format!("{}.lua",search_name)))
}
@ -1168,7 +1160,7 @@ struct QueryTriple{
client:QueryHandle,
}
impl QueryTriple{
fn rox_rojo(search_path:&PathBuf,search_name:&str,search_module:bool)->Self{
fn rox_rojo(search_path:&std::path::PathBuf,search_name:&str,search_module:bool)->Self{
//this should be implemented as constructors of Triplet and Quadruplet to fully support Trey's suggestion
let module_name=if search_module{
format!("{}.module.lua",search_name)
@ -1181,7 +1173,7 @@ impl QueryTriple{
client:tokio::spawn(get_file_async(search_path.clone(),format!("{}.client.lua",search_name))),
}
}
fn rojo(search_path:&PathBuf)->Self{
fn rojo(search_path:&std::path::PathBuf)->Self{
QueryTriple::rox_rojo(search_path,"init",false)
}
}
@ -1251,7 +1243,7 @@ struct QueryQuad{
client:QueryHandle,
}
impl QueryQuad{
fn rox_rojo(search_path:&PathBuf,search_name:&str)->Self{
fn rox_rojo(search_path:&std::path::PathBuf,search_name:&str)->Self{
let fill=QueryTriple::rox_rojo(search_path,search_name,true);
Self{
module_implicit:QuerySingle::rox(search_path,search_name).script,//Script.lua
@ -1312,9 +1304,9 @@ async fn script_node(search_name:&str,mut file:FileWithName,hint:ScriptHint)->AR
(Some("ModuleScript"),_)
|(None,ScriptHint::ModuleScript)=>CompileClass::ModuleScript(script_with_overrides.source),
(Some("LocalScript"),_)
|(None,ScriptHint::LocalScript)=>CompileClass::LocalScript(script_with_overrides.source),
|(None,ScriptHint::Script)=>CompileClass::LocalScript(script_with_overrides.source),
(Some("Script"),_)
|(None,ScriptHint::Script)=>CompileClass::Script(script_with_overrides.source),
|(None,ScriptHint::LocalScript)=>CompileClass::Script(script_with_overrides.source),
other=>Err(anyhow::Error::msg(format!("Invalid hint or class {other:?}")))?,
},
})
@ -1460,9 +1452,9 @@ enum CompileStackInstruction{
}
struct CompileConfig{
input_folder:PathBuf,
output_file:PathBuf,
template:Option<PathBuf>,
input_folder:std::path::PathBuf,
output_file:std::path::PathBuf,
template:Option<std::path::PathBuf>,
style:Option<DecompileStyle>,
}
@ -1482,17 +1474,21 @@ enum TooComplicated<T>{
async fn compile(config:CompileConfig)->AResult<()>{
//basically decompile in reverse order
//load template dom
let mut dom=match config.template{
let input={
let template_path=config.template.unwrap_or_else(||{
let mut template_path=config.input_folder.clone();
template_path.push("template.rbxlx");
template_path
});
//mr dom doesn't like tokio files
Some(template_path)=>load_dom(std::io::BufReader::new(std::fs::File::open(template_path)?))?,
None=>rbx_dom_weak::WeakDom::default(),
std::io::BufReader::new(std::fs::File::open(template_path)?)
};
//hack to traverse root folder as the root object
dom.root_mut().name="src".to_owned();
let mut dom=load_dom(input)?;
//add in scripts and models
let mut folder=config.input_folder.clone();
let mut stack:Vec<CompileStackInstruction>=vec![CompileStackInstruction::TraverseReferent(dom.root_ref(),None)];
folder.push("src");
let mut stack:Vec<CompileStackInstruction>=dom.root().children().into_iter().map(|&referent|CompileStackInstruction::TraverseReferent(referent,None)).collect();
while let Some(instruction)=stack.pop(){
match instruction{
CompileStackInstruction::TraverseReferent(item_ref,blacklist)=>{