Compare commits

...

12 Commits

Author SHA1 Message Date
980d3cb05b no more cookie 2024-07-02 15:23:36 -07:00
d53efd7441 builds 2024-07-02 15:22:22 -07:00
cc7e445498 asd 2024-07-02 14:35:00 -07:00
7e4f96a19c wip 2024-07-02 14:35:00 -07:00
8a40ec3380 move type conversion to argument stuff 2024-07-02 14:34:54 -07:00
5ea1845555 v0.3.4 download-decompile 2024-07-01 20:42:16 -07:00
d5f3467ddd download-decompile 2024-07-01 20:41:52 -07:00
b988f59221 v0.3.3 compile-upload 2024-07-01 18:18:22 -07:00
89302d46fa compile-upload 2024-07-01 18:17:10 -07:00
ee034a93ee fix compiling with no template 2024-07-01 17:59:41 -07:00
9808c2ac0c v0.3.2 rox_compiler 2024-07-01 17:59:41 -07:00
e1710ff8bf refactor rox_compiler into module 2024-07-01 17:59:41 -07:00
10 changed files with 1523 additions and 1103 deletions

45
Cargo.lock generated
View File

@ -110,7 +110,7 @@ checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711"
[[package]] [[package]]
name = "asset-tool" name = "asset-tool"
version = "0.3.1" version = "0.3.4"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"clap", "clap",
@ -118,12 +118,12 @@ dependencies = [
"git2", "git2",
"lazy-regex", "lazy-regex",
"pollster", "pollster",
"rayon",
"rbx_asset", "rbx_asset",
"rbx_binary", "rbx_binary",
"rbx_dom_weak", "rbx_dom_weak",
"rbx_reflection_database", "rbx_reflection_database",
"rbx_xml", "rbx_xml",
"rox_compiler",
"serde_json", "serde_json",
"tokio", "tokio",
] ]
@ -890,6 +890,16 @@ version = "0.3.17"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
[[package]]
name = "mime_guess"
version = "2.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4192263c238a5f0d0c6bfd21f336a313a4ce1c450542449ca191bb657b4642ef"
dependencies = [
"mime",
"unicase",
]
[[package]] [[package]]
name = "miniz_oxide" name = "miniz_oxide"
version = "0.7.4" version = "0.7.4"
@ -1156,12 +1166,13 @@ dependencies = [
[[package]] [[package]]
name = "rbx_asset" name = "rbx_asset"
version = "0.1.0" version = "0.2.0"
dependencies = [ dependencies = [
"chrono", "chrono",
"flate2", "flate2",
"reqwest", "reqwest",
"serde", "serde",
"serde_json",
"url", "url",
] ]
@ -1294,6 +1305,7 @@ dependencies = [
"js-sys", "js-sys",
"log", "log",
"mime", "mime",
"mime_guess",
"native-tls", "native-tls",
"once_cell", "once_cell",
"percent-encoding", "percent-encoding",
@ -1351,6 +1363,18 @@ dependencies = [
"serde", "serde",
] ]
[[package]]
name = "rox_compiler"
version = "0.1.0"
dependencies = [
"futures",
"lazy-regex",
"rayon",
"rbx_dom_weak",
"rbx_xml",
"tokio",
]
[[package]] [[package]]
name = "rustc-demangle" name = "rustc-demangle"
version = "0.1.24" version = "0.1.24"
@ -1733,6 +1757,15 @@ version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b"
[[package]]
name = "unicase"
version = "2.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89"
dependencies = [
"version_check",
]
[[package]] [[package]]
name = "unicode-bidi" name = "unicode-bidi"
version = "0.3.15" version = "0.3.15"
@ -1783,6 +1816,12 @@ version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
[[package]]
name = "version_check"
version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
[[package]] [[package]]
name = "want" name = "want"
version = "0.3.1" version = "0.3.1"

View File

@ -1,7 +1,7 @@
workspace = { members = ["rbx_asset"] } workspace = { members = ["rbx_asset", "rox_compiler"] }
[package] [package]
name = "asset-tool" name = "asset-tool"
version = "0.3.1" version = "0.3.4"
edition = "2021" edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@ -13,12 +13,12 @@ futures = "0.3.30"
git2 = "0.18.1" git2 = "0.18.1"
lazy-regex = "3.1.0" lazy-regex = "3.1.0"
pollster = "0.3.0" pollster = "0.3.0"
rayon = "1.8.0"
rbx_asset = { path = "rbx_asset" } rbx_asset = { path = "rbx_asset" }
rbx_binary = "0.7.4" rbx_binary = "0.7.4"
rbx_dom_weak = "2.7.0" rbx_dom_weak = "2.7.0"
rbx_reflection_database = "0.2.10" rbx_reflection_database = "0.2.10"
rbx_xml = "0.13.3" rbx_xml = "0.13.3"
rox_compiler = { path = "rox_compiler" }
serde_json = "1.0.111" serde_json = "1.0.111"
tokio = { version = "1.35.1", features = ["macros", "rt-multi-thread", "fs"] } tokio = { version = "1.35.1", features = ["macros", "rt-multi-thread", "fs"] }

View File

@ -1,6 +1,6 @@
[package] [package]
name = "rbx_asset" name = "rbx_asset"
version = "0.1.0" version = "0.2.0"
edition = "2021" edition = "2021"
publish = ["strafesnet"] publish = ["strafesnet"]
@ -9,6 +9,7 @@ publish = ["strafesnet"]
[dependencies] [dependencies]
chrono = { version = "0.4.38", features = ["serde"] } chrono = { version = "0.4.38", features = ["serde"] }
flate2 = "1.0.29" flate2 = "1.0.29"
reqwest = { version = "0.12.4", features = ["json"] } reqwest = { version = "0.12.4", features = ["json","multipart"] }
serde = { version = "1.0.199", features = ["derive"] } serde = { version = "1.0.199", features = ["derive"] }
serde_json = "1.0.111"
url = "2.5.0" url = "2.5.0"

View File

@ -1,28 +1,22 @@
#[derive(Debug)]
pub enum PostError{
Reqwest(reqwest::Error),
CSRF,
}
impl std::fmt::Display for PostError{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f,"{self:?}")
}
}
impl std::error::Error for PostError{}
#[derive(Debug,serde::Deserialize,serde::Serialize)] #[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)] #[allow(nonstandard_style,dead_code)]
pub struct CreateRequest{ pub enum AssetType{
pub name:String, Audio,
Decal,
Model,
}
#[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
pub struct CreateAssetRequest{
pub assetType:AssetType,
pub creationContext:CreationContext,
pub description:String, pub description:String,
pub ispublic:bool, pub displayName:String,
pub allowComments:bool,
pub groupId:Option<u64>,
} }
#[derive(Debug)] #[derive(Debug)]
pub enum CreateError{ pub enum CreateError{
ParseError(url::ParseError), ParseError(url::ParseError),
PostError(PostError), SerializeError(serde_json::Error),
Reqwest(reqwest::Error), Reqwest(reqwest::Error),
} }
impl std::fmt::Display for CreateError{ impl std::fmt::Display for CreateError{
@ -32,54 +26,96 @@ impl std::fmt::Display for CreateError{
} }
impl std::error::Error for CreateError{} impl std::error::Error for CreateError{}
#[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)] #[allow(nonstandard_style,dead_code)]
pub struct UploadRequest{ pub struct UpdateAssetRequest{
pub assetid:u64, pub assetId:u64,
pub name:Option<String>, pub displayName:Option<String>,
pub description:Option<String>, pub description:Option<String>,
pub ispublic:Option<bool>, }
pub allowComments:Option<bool>,
pub groupId:Option<u64>, //woo nested roblox stuff
#[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
pub struct Creator{
pub userId:u64,
pub groupId:u64,
}
#[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
pub struct CreationContext{
pub creator:Creator,
pub expectedPrice:u64,
}
#[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
pub enum ModerationResult{
MODERATION_STATE_REVIEWING,
MODERATION_STATE_REJECTED,
MODERATION_STATE_APPROVED,
}
#[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
pub struct Preview{
pub asset:String,
pub altText:String,
}
#[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
pub struct AssetResponse{
pub assetId:u64,
pub creationContext:CreationContext,
pub description:String,
pub displayName:String,
pub path:String,
pub revisionId:u64,
pub revisionCreateTime:chrono::DateTime<chrono::Utc>,
pub moderationResult:ModerationResult,
pub icon:String,
pub previews:Vec<Preview>,
}
#[allow(nonstandard_style,dead_code)]
pub struct UpdatePlaceRequest{
pub universeId:u64,
pub placeId:u64,
}
#[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
pub struct UpdatePlaceResponse{
pub versionNumber:u64,
} }
#[derive(Debug)] #[derive(Debug)]
pub enum UploadError{ pub enum UpdateError{
ParseError(url::ParseError), ParseError(url::ParseError),
PostError(PostError), SerializeError(serde_json::Error),
Reqwest(reqwest::Error), Reqwest(reqwest::Error),
AssetIdIsZero,
} }
impl std::fmt::Display for UploadError{ impl std::fmt::Display for UpdateError{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f,"{self:?}") write!(f,"{self:?}")
} }
} }
impl std::error::Error for UploadError{} impl std::error::Error for UpdateError{}
#[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
pub struct UploadResponse{
pub AssetId:u64,
pub AssetVersionId:u64,
}
#[allow(nonstandard_style,dead_code)] #[allow(nonstandard_style,dead_code)]
pub struct DownloadRequest{ pub struct GetAssetRequest{
pub asset_id:u64, pub asset_id:u64,
pub version:Option<u64>, pub version:Option<u64>,
} }
#[derive(Debug)] #[derive(Debug)]
pub enum DownloadError{ pub enum GetError{
ParseError(url::ParseError), ParseError(url::ParseError),
Reqwest(reqwest::Error), Reqwest(reqwest::Error),
IO(std::io::Error) IO(std::io::Error)
} }
impl std::fmt::Display for DownloadError{ impl std::fmt::Display for GetError{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f,"{self:?}") write!(f,"{self:?}")
} }
} }
impl std::error::Error for DownloadError{} impl std::error::Error for GetError{}
pub struct HistoryPageRequest{ pub struct AssetVersionsRequest{
pub asset_id:u64, pub asset_id:u64,
pub cursor:Option<String>, pub cursor:Option<String>,
} }
@ -97,22 +133,22 @@ pub struct AssetVersion{
} }
#[derive(serde::Deserialize)] #[derive(serde::Deserialize)]
#[allow(nonstandard_style,dead_code)] #[allow(nonstandard_style,dead_code)]
pub struct HistoryPageResponse{ pub struct AssetVersionsResponse{
pub previousPageCursor:Option<String>, pub previousPageCursor:Option<String>,
pub nextPageCursor:Option<String>, pub nextPageCursor:Option<String>,
pub data:Vec<AssetVersion>, pub data:Vec<AssetVersion>,
} }
#[derive(Debug)] #[derive(Debug)]
pub enum HistoryPageError{ pub enum AssetVersionsError{
ParseError(url::ParseError), ParseError(url::ParseError),
Reqwest(reqwest::Error), Reqwest(reqwest::Error),
} }
impl std::fmt::Display for HistoryPageError{ impl std::fmt::Display for AssetVersionsError{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f,"{self:?}") write!(f,"{self:?}")
} }
} }
impl std::error::Error for HistoryPageError{} impl std::error::Error for AssetVersionsError{}
pub struct InventoryPageRequest{ pub struct InventoryPageRequest{
pub group:u64, pub group:u64,
@ -170,97 +206,69 @@ fn read_readable(mut readable:impl std::io::Read)->std::io::Result<Vec<u8>>{
#[derive(Clone)] #[derive(Clone)]
pub struct RobloxContext{ pub struct RobloxContext{
pub cookie:String, pub api_key:String,
pub client:reqwest::Client, pub client:reqwest::Client,
} }
impl RobloxContext{ impl RobloxContext{
pub fn new(cookie:String)->Self{ pub fn new(api_key:String)->Self{
Self{ Self{
cookie, api_key,
client:reqwest::Client::new(), client:reqwest::Client::new(),
} }
} }
async fn get(&self,url:impl reqwest::IntoUrl)->Result<reqwest::Response,reqwest::Error>{ async fn get(&self,url:impl reqwest::IntoUrl)->Result<reqwest::Response,reqwest::Error>{
self.client.get(url) self.client.get(url)
.header("Cookie",self.cookie.as_str()) .header("x-api-key",self.api_key.as_str())
.send().await .send().await
} }
async fn post(&self,url:url::Url,body:impl Into<reqwest::Body>+Clone)->Result<reqwest::Response,PostError>{ async fn post(&self,url:url::Url,body:impl Into<reqwest::Body>+Clone)->Result<reqwest::Response,reqwest::Error>{
let mut resp=self.client.post(url.clone()) self.client.post(url)
.header("Cookie",self.cookie.as_str()) .header("x-api-key",self.api_key.as_str())
.body(body.clone())
.send().await.map_err(PostError::Reqwest)?;
//This is called a CSRF challenge apparently
if resp.status()==reqwest::StatusCode::FORBIDDEN{
if let Some(csrf_token)=resp.headers().get("X-CSRF-Token"){
resp=self.client.post(url)
.header("X-CSRF-Token",csrf_token)
.header("Cookie",self.cookie.as_str())
.body(body) .body(body)
.send().await.map_err(PostError::Reqwest)?; .send().await
}else{
Err(PostError::CSRF)?;
} }
async fn patch_form(&self,url:url::Url,form:reqwest::multipart::Form)->Result<reqwest::Response,reqwest::Error>{
self.client.patch(url)
.header("x-api-key",self.api_key.as_str())
.multipart(form)
.send().await
} }
async fn post_form(&self,url:url::Url,form:reqwest::multipart::Form)->Result<reqwest::Response,reqwest::Error>{
self.client.post(url)
.header("x-api-key",self.api_key.as_str())
.multipart(form)
.send().await
}
pub async fn create_asset(&self,config:CreateAssetRequest,body:impl Into<std::borrow::Cow<'static,[u8]>>)->Result<AssetResponse,CreateError>{
let url=reqwest::Url::parse("https://apis.roblox.com/assets/v1/assets").map_err(CreateError::ParseError)?;
Ok(resp) let request_config=serde_json::to_string(&config).map_err(CreateError::SerializeError)?;
}
pub async fn create(&self,config:CreateRequest,body:impl Into<reqwest::Body>+Clone)->Result<UploadResponse,CreateError>{
let mut url=reqwest::Url::parse("https://data.roblox.com/Data/Upload.ashx?json=1&type=Model&genreTypeId=1").map_err(CreateError::ParseError)?;
//url borrow scope
{
let mut query=url.query_pairs_mut();//borrow here
//archaic roblox api uses 0 for new asset
query.append_pair("assetid","0");
query.append_pair("name",config.name.as_str());
query.append_pair("description",config.description.as_str());
query.append_pair("ispublic",if config.ispublic{"True"}else{"False"});
query.append_pair("allowComments",if config.allowComments{"True"}else{"False"});
match config.groupId{
Some(group_id)=>{query.append_pair("groupId",group_id.to_string().as_str());},
None=>(),
}
}
let resp=self.post(url,body).await.map_err(CreateError::PostError)?; let form=reqwest::multipart::Form::new()
.text("request",request_config)
.part("fileContent",reqwest::multipart::Part::bytes(body));
Ok(resp.json::<UploadResponse>().await.map_err(CreateError::Reqwest)?) let resp=self.post_form(url,form).await.map_err(CreateError::Reqwest)?;
}
pub async fn upload(&self,config:UploadRequest,body:impl Into<reqwest::Body>+Clone)->Result<UploadResponse,UploadError>{
let mut url=reqwest::Url::parse("https://data.roblox.com/Data/Upload.ashx?json=1&type=Model&genreTypeId=1").map_err(UploadError::ParseError)?;
//url borrow scope
{
let mut query=url.query_pairs_mut();//borrow here
//archaic roblox api uses 0 for new asset
match config.assetid{
0=>return Err(UploadError::AssetIdIsZero),
assetid=>{query.append_pair("assetid",assetid.to_string().as_str());},
}
if let Some(name)=config.name.as_deref(){
query.append_pair("name",name);
}
if let Some(description)=config.description.as_deref(){
query.append_pair("description",description);
}
if let Some(ispublic)=config.ispublic{
query.append_pair("ispublic",if ispublic{"True"}else{"False"});
}
if let Some(allow_comments)=config.allowComments{
query.append_pair("allowComments",if allow_comments{"True"}else{"False"});
}
if let Some(group_id)=config.groupId{
query.append_pair("groupId",group_id.to_string().as_str());
}
}
let resp=self.post(url,body).await.map_err(UploadError::PostError)?; Ok(resp.json::<AssetResponse>().await.map_err(CreateError::Reqwest)?)
Ok(resp.json::<UploadResponse>().await.map_err(UploadError::Reqwest)?)
} }
pub async fn download(&self,config:DownloadRequest)->Result<Vec<u8>,DownloadError>{ pub async fn update_asset(&self,config:UpdateAssetRequest,body:impl Into<std::borrow::Cow<'static,[u8]>>)->Result<AssetResponse,UpdateError>{
let mut url=reqwest::Url::parse("https://assetdelivery.roblox.com/v1/asset/").map_err(DownloadError::ParseError)?; let raw_url=format!("https://apis.roblox.com/assets/v1/assets/{}",config.assetId);
let url=reqwest::Url::parse(raw_url.as_str()).map_err(UpdateError::ParseError)?;
let request_config=serde_json::to_string(&config).map_err(UpdateError::SerializeError)?;
let form=reqwest::multipart::Form::new()
.text("request",request_config)
.part("fileContent",reqwest::multipart::Part::bytes(body));
let resp=self.patch_form(url,form).await.map_err(UpdateError::Reqwest)?;
Ok(resp.json::<AssetResponse>().await.map_err(UpdateError::Reqwest)?)
}
pub async fn get_asset(&self,config:GetAssetRequest)->Result<Vec<u8>,GetError>{
let mut url=reqwest::Url::parse("https://assetdelivery.roblox.com/v1/asset/").map_err(GetError::ParseError)?;
//url borrow scope //url borrow scope
{ {
let mut query=url.query_pairs_mut();//borrow here let mut query=url.query_pairs_mut();//borrow here
@ -269,31 +277,22 @@ impl RobloxContext{
query.append_pair("version",version.to_string().as_str()); query.append_pair("version",version.to_string().as_str());
} }
} }
let resp=self.get(url).await.map_err(DownloadError::Reqwest)?; let resp=self.get(url).await.map_err(GetError::Reqwest)?;
let body=resp.bytes().await.map_err(DownloadError::Reqwest)?; let body=resp.bytes().await.map_err(GetError::Reqwest)?;
match maybe_gzip_decode(&mut std::io::Cursor::new(body)){ match maybe_gzip_decode(&mut std::io::Cursor::new(body)){
Ok(ReaderType::GZip(readable))=>read_readable(readable), Ok(ReaderType::GZip(readable))=>read_readable(readable),
Ok(ReaderType::Raw(readable))=>read_readable(readable), Ok(ReaderType::Raw(readable))=>read_readable(readable),
Err(e)=>Err(e), Err(e)=>Err(e),
}.map_err(DownloadError::IO) }.map_err(GetError::IO)
}
pub async fn history_page(&self,config:HistoryPageRequest)->Result<HistoryPageResponse,HistoryPageError>{
let mut url=reqwest::Url::parse(format!("https://develop.roblox.com/v1/assets/{}/saved-versions",config.asset_id).as_str()).map_err(HistoryPageError::ParseError)?;
//url borrow scope
{
let mut query=url.query_pairs_mut();//borrow here
//query.append_pair("sortOrder","Asc");
//query.append_pair("limit","100");
//query.append_pair("count","100");
if let Some(cursor)=config.cursor.as_deref(){
query.append_pair("cursor",cursor);
}
} }
pub async fn get_asset_versions(&self,config:AssetVersionsRequest)->Result<AssetVersionsResponse,AssetVersionsError>{
let raw_url=format!("https://apis.roblox.com/assets/v1/assets/{}/versions",config.asset_id);
let url=reqwest::Url::parse(raw_url.as_str()).map_err(AssetVersionsError::ParseError)?;
Ok(self.get(url).await.map_err(HistoryPageError::Reqwest)? Ok(self.get(url).await.map_err(AssetVersionsError::Reqwest)?
.json::<HistoryPageResponse>().await.map_err(HistoryPageError::Reqwest)?) .json::<AssetVersionsResponse>().await.map_err(AssetVersionsError::Reqwest)?)
} }
pub async fn inventory_page(&self,config:InventoryPageRequest)->Result<InventoryPageResponse,InventoryPageError>{ pub async fn inventory_page(&self,config:InventoryPageRequest)->Result<InventoryPageResponse,InventoryPageError>{
let mut url=reqwest::Url::parse(format!("https://apis.roblox.com/toolbox-service/v1/creations/group/{}/10?limit=50",config.group).as_str()).map_err(InventoryPageError::ParseError)?; let mut url=reqwest::Url::parse(format!("https://apis.roblox.com/toolbox-service/v1/creations/group/{}/10?limit=50",config.group).as_str()).map_err(InventoryPageError::ParseError)?;
@ -308,4 +307,12 @@ impl RobloxContext{
Ok(self.get(url).await.map_err(InventoryPageError::Reqwest)? Ok(self.get(url).await.map_err(InventoryPageError::Reqwest)?
.json::<InventoryPageResponse>().await.map_err(InventoryPageError::Reqwest)?) .json::<InventoryPageResponse>().await.map_err(InventoryPageError::Reqwest)?)
} }
pub async fn update_place(&self,config:UpdatePlaceRequest,body:impl Into<reqwest::Body>+Clone)->Result<UpdatePlaceResponse,UpdateError>{
let raw_url=format!("https://apis.roblox.com/universes/v1/{}/places/{}/versions",config.universeId,config.placeId);
let url=reqwest::Url::parse(raw_url.as_str()).map_err(UpdateError::ParseError)?;
let resp=self.post(url,body).await.map_err(UpdateError::Reqwest)?;
Ok(resp.json::<UpdatePlaceResponse>().await.map_err(UpdateError::Reqwest)?)
}
} }

12
rox_compiler/Cargo.toml Normal file
View File

@ -0,0 +1,12 @@
[package]
name = "rox_compiler"
version = "0.1.0"
edition = "2021"
[dependencies]
futures = "0.3.30"
lazy-regex = "3.1.0"
rayon = "1.8.0"
rbx_dom_weak = "2.7.0"
rbx_xml = "0.13.3"
tokio = { version = "1.35.1", features = ["fs"] }

View File

@ -0,0 +1,33 @@
#[derive(Clone,Copy,Debug)]
pub enum Style{
Rox,
Rojo,
RoxRojo,
}
#[derive(Default)]
pub(crate) struct PropertiesOverride{
pub name:Option<String>,
pub class:Option<String>,
}
impl PropertiesOverride{
pub fn is_some(&self)->bool{
self.name.is_some()
||self.class.is_some()
}
}
impl std::fmt::Display for PropertiesOverride{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
if let Some(name)=self.name.as_deref(){
writeln!(f,"--!Properties.Name = \"{}\"",name)?;
}
if let Some(class)=self.class.as_deref(){
writeln!(f,"--!Properties.ClassName = \"{}\"",class)?;
}
Ok(())
}
}
pub(crate) fn sanitize<'a>(s:&'a str)->std::borrow::Cow<'a,str>{
lazy_regex::regex!(r"[^A-z0-9.-]").replace_all(s,"_")
}

551
rox_compiler/src/compile.rs Normal file
View File

@ -0,0 +1,551 @@
use std::path::PathBuf;
use futures::{StreamExt, TryStreamExt};
use tokio::io::AsyncReadExt;
use crate::common::{sanitize,Style,PropertiesOverride};
//holy smokes what am I doing lmao
//This giant machine is supposed to search for files according to style rules
//e.g. ScriptName.server.lua or init.lua
//Obviously I got carried away
//I could use an enum!
//I could use a struct!
//I could use a trait!
//I could use an error!
//I could use a match!
//I could use a function!
//eventually:
#[derive(Debug)]
#[allow(dead_code)]//idk why this thinks it's dead code, the errors are printed out in various places
pub enum QueryResolveError{
NotFound,//0 results
Ambiguous,//>1 results
JoinError(tokio::task::JoinError),
IO(std::io::Error),
}
impl std::fmt::Display for QueryResolveError{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
write!(f,"{self:?}")
}
}
impl std::error::Error for QueryResolveError{}
struct FileWithName{
file:tokio::fs::File,
name:String,
}
async fn get_file_async(mut path:PathBuf,file_name:impl AsRef<std::path::Path>)->Result<FileWithName,QueryResolveError>{
let name=file_name.as_ref().to_str().unwrap().to_owned();
path.push(file_name);
match tokio::fs::File::open(path).await{
Ok(file)=>Ok(FileWithName{file,name}),
Err(e)=>match e.kind(){
std::io::ErrorKind::NotFound=>Err(QueryResolveError::NotFound),
_=>Err(QueryResolveError::IO(e)),
},
}
}
type QueryHintResult=Result<FileHint,QueryResolveError>;
trait Query{
async fn resolve(self)->QueryHintResult;
}
type QueryHandle=tokio::task::JoinHandle<Result<FileWithName,QueryResolveError>>;
struct QuerySingle{
script:QueryHandle,
}
impl QuerySingle{
fn rox(search_path:&PathBuf,search_name:&str)->Self{
Self{
script:tokio::spawn(get_file_async(search_path.clone(),format!("{}.lua",search_name)))
}
}
}
impl Query for QuerySingle{
async fn resolve(self)->QueryHintResult{
match self.script.await{
Ok(Ok(file))=>Ok(FileHint{file,hint:ScriptHint::ModuleScript}),
Ok(Err(e))=>Err(e),
Err(e)=>Err(QueryResolveError::JoinError(e)),
}
}
}
struct QueryTriple{
module:QueryHandle,
server:QueryHandle,
client:QueryHandle,
}
impl QueryTriple{
fn rox_rojo(search_path:&PathBuf,search_name:&str,search_module:bool)->Self{
//this should be implemented as constructors of Triplet and Quadruplet to fully support Trey's suggestion
let module_name=if search_module{
format!("{}.module.lua",search_name)
}else{
format!("{}.lua",search_name)
};
Self{
module:tokio::spawn(get_file_async(search_path.clone(),module_name)),
server:tokio::spawn(get_file_async(search_path.clone(),format!("{}.server.lua",search_name))),
client:tokio::spawn(get_file_async(search_path.clone(),format!("{}.client.lua",search_name))),
}
}
fn rojo(search_path:&PathBuf)->Self{
QueryTriple::rox_rojo(search_path,"init",false)
}
}
//these functions can be achieved with macros, but I have not learned that yet
fn mega_triple_join(query_triplet:(QueryHintResult,QueryHintResult,QueryHintResult))->QueryHintResult{
match query_triplet{
//unambiguously locate file
(Ok(f),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound))
|(Err(QueryResolveError::NotFound),Ok(f),Err(QueryResolveError::NotFound))
|(Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Ok(f))=>Ok(f),
//multiple files located
(Ok(_),Ok(_),Err(QueryResolveError::NotFound))
|(Ok(_),Err(QueryResolveError::NotFound),Ok(_))
|(Err(QueryResolveError::NotFound),Ok(_),Ok(_))
|(Ok(_),Ok(_),Ok(_))=>Err(QueryResolveError::Ambiguous),
//no files located
(Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound))=>Err(QueryResolveError::NotFound),
//other error
(Err(e),_,_)
|(_,Err(e),_)
|(_,_,Err(e))=>Err(e),
}
}
//LETS GOOOOOOOOOOOOOOOO
fn mega_quadruple_join(query_quad:(QueryHintResult,QueryHintResult,QueryHintResult,QueryHintResult))->QueryHintResult{
match query_quad{
//unambiguously locate file
(Ok(f),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound))
|(Err(QueryResolveError::NotFound),Ok(f),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound))
|(Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Ok(f),Err(QueryResolveError::NotFound))
|(Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Ok(f))=>Ok(f),
//multiple files located
(Ok(_),Ok(_),Ok(_),Err(QueryResolveError::NotFound))
|(Ok(_),Ok(_),Err(QueryResolveError::NotFound),Ok(_))
|(Ok(_),Ok(_),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound))
|(Ok(_),Err(QueryResolveError::NotFound),Ok(_),Ok(_))
|(Ok(_),Err(QueryResolveError::NotFound),Ok(_),Err(QueryResolveError::NotFound))
|(Ok(_),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Ok(_))
|(Err(QueryResolveError::NotFound),Ok(_),Ok(_),Ok(_))
|(Err(QueryResolveError::NotFound),Ok(_),Ok(_),Err(QueryResolveError::NotFound))
|(Err(QueryResolveError::NotFound),Ok(_),Err(QueryResolveError::NotFound),Ok(_))
|(Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Ok(_),Ok(_))
|(Ok(_),Ok(_),Ok(_),Ok(_))=>Err(QueryResolveError::Ambiguous),
//no files located
(Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound))=>Err(QueryResolveError::NotFound),
//other error
(Err(e),_,_,_)
|(_,Err(e),_,_)
|(_,_,Err(e),_)
|(_,_,_,Err(e))=>Err(e),
}
}
impl Query for QueryTriple{
async fn resolve(self)->QueryHintResult{
let (module,server,client)=tokio::join!(self.module,self.server,self.client);
mega_triple_join((
module.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::ModuleScript}),
server.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::Script}),
client.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::LocalScript}),
))
}
}
struct QueryQuad{
module_implicit:QueryHandle,
module_explicit:QueryHandle,
server:QueryHandle,
client:QueryHandle,
}
impl QueryQuad{
fn rox_rojo(search_path:&PathBuf,search_name:&str)->Self{
let fill=QueryTriple::rox_rojo(search_path,search_name,true);
Self{
module_implicit:QuerySingle::rox(search_path,search_name).script,//Script.lua
module_explicit:fill.module,//Script.module.lua
server:fill.server,
client:fill.client,
}
}
}
impl Query for QueryQuad{
async fn resolve(self)->QueryHintResult{
let (module_implicit,module_explicit,server,client)=tokio::join!(self.module_implicit,self.module_explicit,self.server,self.client);
mega_quadruple_join((
module_implicit.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::ModuleScript}),
module_explicit.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::ModuleScript}),
server.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::Script}),
client.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::LocalScript}),
))
}
}
struct ScriptWithOverrides{
overrides:PropertiesOverride,
source:String,
}
#[derive(Debug)]
pub enum ScriptWithOverridesError{
UnimplementedProperty(String),
}
impl std::fmt::Display for ScriptWithOverridesError{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
write!(f,"{self:?}")
}
}
impl std::error::Error for ScriptWithOverridesError{}
impl ScriptWithOverrides{
fn from_source(mut source:String)->Result<Self,ScriptWithOverridesError>{
let mut overrides=PropertiesOverride::default();
let mut count=0;
for line in source.lines(){
//only string type properties are supported atm
if let Some(captures)=lazy_regex::regex!(r#"^\-\-\!\s*Properties\.([A-z]\w*)\s*\=\s*"(\w+)"$"#)
.captures(line){
count+=line.len();
match &captures[1]{
"Name"=>overrides.name=Some(captures[2].to_owned()),
"ClassName"=>overrides.class=Some(captures[2].to_owned()),
other=>Err(ScriptWithOverridesError::UnimplementedProperty(other.to_owned()))?,
}
}else{
break;
}
}
Ok(ScriptWithOverrides{overrides,source:source.split_off(count)})
}
}
enum CompileClass{
Folder,
Script(String),
LocalScript(String),
ModuleScript(String),
Model(Vec<u8>),
}
struct CompileNode{
name:String,
blacklist:Option<String>,
class:CompileClass,
}
#[derive(Debug)]
pub enum CompileNodeError{
IO(std::io::Error),
ScriptWithOverrides(ScriptWithOverridesError),
InvalidClassOrHint{
class:Option<String>,
hint:ScriptHint
},
QueryResolveError(QueryResolveError),
/// Conversion from OsString to String failed
FileName(std::ffi::OsString),
ExtensionNotSupportedInStyle{
extension:String,
style:Option<Style>,
},
NoExtension,
}
impl std::fmt::Display for CompileNodeError{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
write!(f,"{self:?}")
}
}
impl std::error::Error for CompileNodeError{}
enum FileDiscernment{
Model,
Script(ScriptHint),
}
impl CompileNode{
async fn script(search_name:&str,mut file:FileWithName,hint:ScriptHint)->Result<Self,CompileNodeError>{
//read entire file
let mut buf=String::new();
file.file.read_to_string(&mut buf).await.map_err(CompileNodeError::IO)?;
//regex script according to Properties lines at the top
let script_with_overrides=ScriptWithOverrides::from_source(buf).map_err(CompileNodeError::ScriptWithOverrides)?;
//script
Ok(Self{
blacklist:Some(file.name),
name:script_with_overrides.overrides.name.unwrap_or_else(||search_name.to_owned()),
class:match (script_with_overrides.overrides.class.as_deref(),hint){
(Some("ModuleScript"),_)
|(None,ScriptHint::ModuleScript)=>CompileClass::ModuleScript(script_with_overrides.source),
(Some("LocalScript"),_)
|(None,ScriptHint::LocalScript)=>CompileClass::LocalScript(script_with_overrides.source),
(Some("Script"),_)
|(None,ScriptHint::Script)=>CompileClass::Script(script_with_overrides.source),
(class,hint)=>Err(CompileNodeError::InvalidClassOrHint{class:class.map(|s|s.to_owned()),hint})?,
},
})
}
async fn model(search_name:&str,mut file:FileWithName)->Result<Self,CompileNodeError>{
//read entire file
let mut buf=Vec::new();
file.file.read_to_end(&mut buf).await.map_err(CompileNodeError::IO)?;
//model
Ok(Self{
blacklist:Some(file.name),
name:search_name.to_owned(),//wrong but gets overwritten by internal model name
class:CompileClass::Model(buf),
})
}
async fn from_folder(entry:&tokio::fs::DirEntry,style:Option<Style>)->Result<Self,CompileNodeError>{
let contents_folder=entry.path();
let file_name=entry.file_name();
let search_name=file_name.to_str().unwrap();
//scan inside the folder for an object to define the class of the folder
let script_query=async {match style{
Some(Style::Rox)=>QuerySingle::rox(&contents_folder,search_name).resolve().await,
Some(Style::RoxRojo)=>QueryQuad::rox_rojo(&contents_folder,search_name).resolve().await,
Some(Style::Rojo)=>QueryTriple::rojo(&contents_folder).resolve().await,
//try all three and complain if there is ambiguity
None=>mega_triple_join(tokio::join!(
QuerySingle::rox(&contents_folder,search_name).resolve(),
//true=search for module here to avoid ambiguity with QuerySingle::rox results
QueryTriple::rox_rojo(&contents_folder,search_name,true).resolve(),
QueryTriple::rojo(&contents_folder).resolve(),
))
}};
//model files are rox & rox-rojo only, so it's a lot less work...
let model_query=get_file_async(contents_folder.clone(),format!("{}.rbxmx",search_name));
//model? script? both?
Ok(match tokio::join!(script_query,model_query){
(Ok(FileHint{file,hint}),Err(QueryResolveError::NotFound))=>Self::script(search_name,file,hint).await?,
(Err(QueryResolveError::NotFound),Ok(file))=>Self::model(search_name,file).await?,
(Ok(_),Ok(_))=>Err(CompileNodeError::QueryResolveError(QueryResolveError::Ambiguous))?,
//neither
(Err(QueryResolveError::NotFound),Err(QueryResolveError::NotFound))=>Self{
name:search_name.to_owned(),
blacklist:None,
class:CompileClass::Folder,
},
//other error
(Err(e),_)
|(_,Err(e))=>Err(CompileNodeError::QueryResolveError(e))?
})
}
async fn from_file(entry:&tokio::fs::DirEntry,style:Option<Style>)->Result<Self,CompileNodeError>{
let mut file_name=entry
.file_name()
.into_string()
.map_err(CompileNodeError::FileName)?;
//reject goobers
let is_goober=match style{
Some(Style::Rojo)=>true,
_=>false,
};
let (ext_len,file_discernment)={
if let Some(captures)=lazy_regex::regex!(r"^.*(.module.lua|.client.lua|.server.lua|.rbxmx|.lua)$")
.captures(file_name.as_str()){
let ext=&captures[1];
(ext.len(),match ext{
".module.lua"=>{
if is_goober{
Err(CompileNodeError::ExtensionNotSupportedInStyle{extension:ext.to_owned(),style})?;
}
FileDiscernment::Script(ScriptHint::ModuleScript)
},
".client.lua"=>FileDiscernment::Script(ScriptHint::LocalScript),
".server.lua"=>FileDiscernment::Script(ScriptHint::Script),
".rbxmx"=>{
if is_goober{
Err(CompileNodeError::ExtensionNotSupportedInStyle{extension:ext.to_owned(),style})?;
}
FileDiscernment::Model
},
".lua"=>FileDiscernment::Script(ScriptHint::ModuleScript),
_=>panic!("Regex failed"),
})
}else{
return Err(CompileNodeError::NoExtension);
}
};
file_name.truncate(file_name.len()-ext_len);
let file=tokio::fs::File::open(entry.path()).await.map_err(CompileNodeError::IO)?;
Ok(match file_discernment{
FileDiscernment::Model=>Self::model(file_name.as_str(),FileWithName{file,name:file_name.clone()}).await?,
FileDiscernment::Script(hint)=>Self::script(file_name.as_str(),FileWithName{file,name:file_name.clone()},hint).await?,
})
}
}
#[derive(Debug)]
pub enum ScriptHint{
Script,
LocalScript,
ModuleScript,
}
struct FileHint{
file:FileWithName,
hint:ScriptHint,
}
enum PreparedData{
Model(rbx_dom_weak::WeakDom),
Builder(rbx_dom_weak::InstanceBuilder),
}
enum CompileStackInstruction{
TraverseReferent(rbx_dom_weak::types::Ref,Option<String>),
PopFolder,
}
fn script_builder(class:&str,name:&str,source:String)->rbx_dom_weak::InstanceBuilder{
let mut builder=rbx_dom_weak::InstanceBuilder::new(class);
builder.set_name(name);
builder.add_property("Source",rbx_dom_weak::types::Variant::String(source));
builder
}
enum TooComplicated<T>{
Stop,
Value(T),
Skip,
}
pub struct CompileConfig{
pub input_folder:PathBuf,
pub style:Option<Style>,
}
#[derive(Debug)]
pub enum CompileError{
NullChildRef,
IO(std::io::Error),
CompileNode(CompileNodeError),
DecodeError(rbx_xml::DecodeError),
JoinError(tokio::task::JoinError),
}
impl std::fmt::Display for CompileError{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
write!(f,"{self:?}")
}
}
impl std::error::Error for CompileError{}
pub async fn compile(config:CompileConfig,mut dom:&mut rbx_dom_weak::WeakDom)->Result<(),CompileError>{
//hack to traverse root folder as the root object
dom.root_mut().name="src".to_owned();
//add in scripts and models
let mut folder=config.input_folder.clone();
let mut stack:Vec<CompileStackInstruction>=vec![CompileStackInstruction::TraverseReferent(dom.root_ref(),None)];
while let Some(instruction)=stack.pop(){
match instruction{
CompileStackInstruction::TraverseReferent(item_ref,blacklist)=>{
//scope to avoid holding item ref
{
let item=dom.get_by_ref(item_ref).ok_or(CompileError::NullChildRef)?;
let folder_name=sanitize(item.name.as_str());
folder.push(folder_name.as_ref());
//drop item
}
stack.push(CompileStackInstruction::PopFolder);
//check if a folder exists with item.name
if let Ok(dir)=tokio::fs::read_dir(folder.as_path()).await{
let mut exist_names:std::collections::HashSet<String>={
let item=dom.get_by_ref(item_ref).ok_or(CompileError::NullChildRef)?;
//push existing dom children objects onto stack (unrelated to exist_names)
stack.extend(item.children().into_iter().map(|&referent|CompileStackInstruction::TraverseReferent(referent,None)));
//get names of existing objects
item.children().into_iter().map(|&child_ref|{
let child=dom.get_by_ref(child_ref).ok_or(CompileError::NullChildRef)?;
Ok::<_,CompileError>(sanitize(child.name.as_str()).to_string())
}).collect::<Result<_,CompileError>>()?
};
if let Some(dont)=blacklist{
exist_names.insert(dont);
}
//generate children from folder contents UNLESS! item already has a child of the same name
let style=config.style;
let exist_names=&exist_names;
futures::stream::unfold(dir,|mut dir1|async{
//thread the needle! follow the path that dir takes!
let ret1={
//capture a scoped mutable reference so we can forward dir to the next call even on an error
let dir2=&mut dir1;
(||async move{//error catcher so I can use ?
let ret2=if let Some(entry)=dir2.next_entry().await?{
//cull early even if supporting things with identical names is possible
if exist_names.contains(entry.file_name().to_str().unwrap()){
TooComplicated::Skip
}else{
TooComplicated::Value(entry)
}
}else{
TooComplicated::Stop
};
Ok(ret2)
})().await
};
match ret1{
Ok(TooComplicated::Stop)=>None,
Ok(TooComplicated::Skip)=>Some((Ok(None),dir1)),
Ok(TooComplicated::Value(v))=>Some((Ok(Some(v)),dir1)),
Err(e)=>Some((Err(CompileError::IO(e)),dir1)),
}
})
//gotta spawn off the worker threads (Model is slow)
.then(|bog|async{
match bog{
Ok(Some(entry))=>tokio::spawn(async move{
let met=entry.metadata().await.map_err(CompileError::IO)?;
//discern that bad boy
let compile_class=match met.is_dir(){
true=>CompileNode::from_folder(&entry,style).await,
false=>CompileNode::from_file(&entry,style).await,
}.map_err(CompileError::CompileNode)?;
//prepare data structure
Ok(Some((compile_class.blacklist,match compile_class.class{
CompileClass::Folder=>PreparedData::Builder(rbx_dom_weak::InstanceBuilder::new("Folder").with_name(compile_class.name.as_str())),
CompileClass::Script(source)=>PreparedData::Builder(script_builder("Script",compile_class.name.as_str(),source)),
CompileClass::LocalScript(source)=>PreparedData::Builder(script_builder("LocalScript",compile_class.name.as_str(),source)),
CompileClass::ModuleScript(source)=>PreparedData::Builder(script_builder("ModuleScript",compile_class.name.as_str(),source)),
CompileClass::Model(buf)=>PreparedData::Model(rbx_xml::from_reader_default(std::io::Cursor::new(buf)).map_err(CompileError::DecodeError)?),
})))
}).await.map_err(CompileError::JoinError)?,
Ok(None)=>Ok(None),
Err(e)=>Err(e),
}
})
//is this even what I want?
.map(|f|async{f}).buffer_unordered(32)
//begin processing immediately
//TODO: fix dom being &mut &mut inside the closure
.try_fold((&mut stack,&mut dom),|(stack,dom),bog|async{
//push child objects onto dom serially as they arrive
match bog{
Some((blacklist,data))=>{
let referent=match data{
PreparedData::Model(mut model_dom)=>{
let referent=model_dom.root().children()[0];
model_dom.transfer(referent,dom,item_ref);
referent
},
PreparedData::Builder(script)=>dom.insert(item_ref,script),
};
//new children need to be traversed
stack.push(CompileStackInstruction::TraverseReferent(referent,blacklist));
},
None=>(),
}
Ok((stack,dom))
}).await?;
}
},
CompileStackInstruction::PopFolder=>assert!(folder.pop(),"pop folder bad"),
}
}
unreachable!();
}

View File

@ -0,0 +1,323 @@
use std::path::PathBuf;
use rbx_dom_weak::types::Ref;
use crate::common::{sanitize,Style,PropertiesOverride};
#[derive(PartialEq)]
enum Class{
Folder,
ModuleScript,
LocalScript,
Script,
Model,
}
struct TreeNode{
name:String,
referent:Ref,
parent:Ref,
class:Class,
children:Vec<Ref>,
}
impl TreeNode{
fn new(name:String,referent:Ref,parent:Ref,class:Class)->Self{
Self{
name,
referent,
parent,
class,
children:Vec::new(),
}
}
}
enum TrimStackInstruction{
Referent(Ref),
IncrementScript,
DecrementScript,
}
enum WriteStackInstruction<'a>{
Node(&'a TreeNode,u32),//(Node,NameTally)
PushFolder(String),
PopFolder,
Destroy(Ref),
}
#[derive(Debug)]
pub enum WriteError{
ClassNotScript(String),
IO(std::io::Error),
EncodeError(rbx_xml::EncodeError),
}
impl std::fmt::Display for WriteError{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
write!(f,"{self:?}")
}
}
impl std::error::Error for WriteError{}
fn write_item(dom:&rbx_dom_weak::WeakDom,mut file:PathBuf,node:&TreeNode,node_name_override:String,mut properties:PropertiesOverride,style:Style,write_models:bool,write_scripts:bool)->Result<(),WriteError>{
file.push(sanitize(node_name_override.as_str()).as_ref());
match node.class{
Class::Folder=>(),
Class::ModuleScript|Class::LocalScript|Class::Script=>{
if !write_scripts{
return Ok(())
}
//set extension
match style{
Style::Rox=>assert!(file.set_extension("lua"),"could not set extension"),
Style::RoxRojo|Style::Rojo=>{
match properties.class.as_deref(){
Some("LocalScript")=>{
file.set_extension("client.lua");
properties.class=None;
},
Some("Script")=>{
file.set_extension("server.lua");
properties.class=None;
},
// Some("ModuleScript")=>{
// file.set_extension("module");
// properties.class=None;
// },
None=>assert!(file.set_extension("lua"),"could not set extension"),
Some(other)=>Err(WriteError::ClassNotScript(other.to_owned()))?,
}
}
}
if let Some(item)=dom.get_by_ref(node.referent){
//TODO: delete disabled scripts
if let Some(rbx_dom_weak::types::Variant::String(source))=item.properties.get("Source"){
if properties.is_some(){
//rox style
let source=properties.to_string()+source.as_str();
std::fs::write(file,source).map_err(WriteError::IO)?;
}else{
std::fs::write(file,source).map_err(WriteError::IO)?;
}
}
}
},
Class::Model=>{
if !write_models{
return Ok(())
}
assert!(file.set_extension("rbxmx"));
let output=std::io::BufWriter::new(std::fs::File::create(file).map_err(WriteError::IO)?);
rbx_xml::to_writer_default(output,dom,&[node.referent]).map_err(WriteError::EncodeError)?;
},
}
Ok(())
}
pub struct WriteConfig{
pub style:Style,
pub output_folder:PathBuf,
pub write_template:bool,
pub write_models:bool,
pub write_scripts:bool,
}
pub struct DecompiledContext{
dom:rbx_dom_weak::WeakDom,
tree_refs:std::collections::HashMap<rbx_dom_weak::types::Ref,TreeNode>,
}
impl DecompiledContext{
/// Will panic on circular tree structure but otherwise infallible
pub fn from_dom(dom:rbx_dom_weak::WeakDom)->Self{
let mut tree_refs=std::collections::HashMap::new();
tree_refs.insert(dom.root_ref(),TreeNode::new(
"src".to_owned(),
dom.root_ref(),
Ref::none(),
Class::Folder
));
//run rules
let mut stack=vec![dom.root()];
while let Some(item)=stack.pop(){
let class=match item.class.as_str(){
"ModuleScript"=>Class::ModuleScript,
"LocalScript"=>Class::LocalScript,
"Script"=>Class::Script,
"Model"=>Class::Model,
_=>Class::Folder,
};
let skip=match class{
Class::Model=>true,
_=>false,
};
if let Some(parent_node)=tree_refs.get_mut(&item.parent()){
let referent=item.referent();
let node=TreeNode::new(item.name.clone(),referent,parent_node.referent,class);
parent_node.children.push(referent);
tree_refs.insert(referent,node);
}
//look no further, turn this node and all its children into a model
if skip{
continue;
}
for &referent in item.children(){
if let Some(c)=dom.get_by_ref(referent){
stack.push(c);
}
}
}
//trim empty folders
let mut script_count=0;
let mut stack:Vec<TrimStackInstruction>=tree_refs.get(&dom.root_ref()).unwrap().children
.iter().map(|&c|TrimStackInstruction::Referent(c)).collect();
while let Some(instruction)=stack.pop(){
match instruction{
TrimStackInstruction::IncrementScript=>script_count+=1,
TrimStackInstruction::DecrementScript=>script_count-=1,
TrimStackInstruction::Referent(referent)=>{
let mut delete=None;
if let Some(node)=tree_refs.get_mut(&referent){
if node.class==Class::Folder&&script_count!=0{
node.class=Class::Model
}
if node.class==Class::Folder&&node.children.len()==0{
delete=Some(node.parent);
}else{
//how the hell do I do this better without recursion
let is_script=match node.class{
Class::ModuleScript|Class::LocalScript|Class::Script=>true,
_=>false,
};
//stack is popped from back
if is_script{
stack.push(TrimStackInstruction::DecrementScript);
}
for &child_referent in &node.children{
stack.push(TrimStackInstruction::Referent(child_referent));
}
if is_script{
stack.push(TrimStackInstruction::IncrementScript);
}
}
}
//trim referent
if let Some(parent_ref)=delete{
let parent_node=tree_refs.get_mut(&parent_ref)
.expect("parent_ref does not exist in tree_refs");
parent_node.children.remove(
parent_node.children.iter()
.position(|&r|r==referent)
.expect("parent.children does not contain referent")
);
tree_refs.remove(&referent);
}
},
}
}
Self{
dom,
tree_refs,
}
}
pub async fn write_files(mut self,config:WriteConfig)->Result<(),WriteError>{
let mut write_queue=Vec::new();
let mut destroy_queue=Vec::new();
let mut name_tally=std::collections::HashMap::<String,u32>::new();
let mut folder=config.output_folder.clone();
let mut stack=vec![WriteStackInstruction::Node(self.tree_refs.get(&self.dom.root_ref()).unwrap(),0)];
while let Some(instruction)=stack.pop(){
match instruction{
WriteStackInstruction::PushFolder(component)=>folder.push(component),
WriteStackInstruction::PopFolder=>assert!(folder.pop(),"weirdness"),
WriteStackInstruction::Destroy(referent)=>destroy_queue.push(referent),
WriteStackInstruction::Node(node,name_count)=>{
//track properties that must be overriden to compile folder structure back into a place file
let mut properties=PropertiesOverride::default();
let has_children=node.children.len()!=0;
match node.class{
Class::Folder=>(),
Class::ModuleScript=>(),//.lua files are ModuleScript by default
Class::LocalScript=>properties.class=Some("LocalScript".to_owned()),
Class::Script=>properties.class=Some("Script".to_owned()),
Class::Model=>(),
}
let name_override=if 0<name_count{
properties.name=Some(node.name.clone());
format!("{}_{}",node.name,name_count)
}else{
node.name.clone()
};
if has_children{
//push temp subfolder
let mut subfolder=folder.clone();
subfolder.push(sanitize(name_override.as_str()).as_ref());
//make folder
tokio::fs::create_dir(subfolder.clone()).await.map_err(WriteError::IO)?;
let name_final=match config.style{
Style::Rox
|Style::RoxRojo=>name_override.clone(),
Style::Rojo=>"init".to_owned(),
};
//write item in subfolder
write_queue.push((subfolder,node,name_final,properties,config.style));
}else{
//write item
write_queue.push((folder.clone(),node,name_override.clone(),properties,config.style));
}
//queue item to be deleted from dom after child objects are handled (stack is popped from the back)
match node.class{
Class::Folder=>(),
_=>stack.push(WriteStackInstruction::Destroy(node.referent)),
}
if has_children{
stack.push(WriteStackInstruction::PopFolder);
name_tally.clear();
for referent in &node.children{
if let Some(c)=self.tree_refs.get(referent){
let v=name_tally.entry(c.name.clone()).and_modify(|v|*v+=1).or_default();
stack.push(WriteStackInstruction::Node(c,*v));
}
}
stack.push(WriteStackInstruction::PushFolder(sanitize(name_override.as_str()).to_string()));
}
},
}
}
//run the async
{
let dom=&self.dom;
let write_models=config.write_models;
let write_scripts=config.write_scripts;
let results:Vec<Result<(),WriteError>>=rayon::iter::ParallelIterator::collect(rayon::iter::ParallelIterator::map(rayon::iter::IntoParallelIterator::into_par_iter(write_queue),|(write_path,node,node_name_override,properties,style)|{
write_item(&dom,write_path,node,node_name_override,properties,style,write_models,write_scripts)
}));
for result in results{
result?;
}
}
//run the destroy
for destroy_ref in destroy_queue{
self.dom.destroy(destroy_ref);
}
//write what remains in template.rbxlx
if config.write_template{
let mut file=config.output_folder.clone();
file.push("template");
assert!(file.set_extension("rbxlx"));
let output=std::io::BufWriter::new(std::fs::File::create(file).map_err(WriteError::IO)?);
rbx_xml::to_writer_default(output,&self.dom,self.dom.root().children()).map_err(WriteError::EncodeError)?;
}
Ok(())
}
}

9
rox_compiler/src/lib.rs Normal file
View File

@ -0,0 +1,9 @@
mod common;
mod compile;
mod decompile;
//export minimal interface
pub use common::Style;
pub use compile::CompileConfig;
pub use compile::compile;//cringe unstandardized interface
pub use decompile::WriteConfig;
pub use decompile::DecompiledContext;

File diff suppressed because it is too large Load Diff