Compare commits
22 Commits
rox_compil
...
container-
Author | SHA1 | Date | |
---|---|---|---|
fe326c8457 | |||
0d92221a27 | |||
9c862717a5 | |||
11fee65354 | |||
c947691f75 | |||
cb984a9f20 | |||
e46ad7a6a8 | |||
4805f3bc08 | |||
9638672dde | |||
c945036d60 | |||
f9bdfd0e00 | |||
d468113e51 | |||
b72bed638d | |||
452c00d53e | |||
b89a787af2 | |||
5085f6587f | |||
c856301aa6 | |||
d38152600e | |||
c08ff63033 | |||
6720f6213f | |||
db34436d64 | |||
a6ae26a93e |
24
.drone.yml
Normal file
24
.drone.yml
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
---
|
||||||
|
kind: pipeline
|
||||||
|
type: docker
|
||||||
|
|
||||||
|
platform:
|
||||||
|
os: linux
|
||||||
|
arch: amd64
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: image
|
||||||
|
image: plugins/docker
|
||||||
|
settings:
|
||||||
|
registry: git.itzana.me
|
||||||
|
repo: git.itzana.me/strafesnet/asset-tool
|
||||||
|
tags:
|
||||||
|
- latest
|
||||||
|
username:
|
||||||
|
from_secret: GIT_USER
|
||||||
|
password:
|
||||||
|
from_secret: GIT_PASS
|
||||||
|
dockerfile: Containerfile
|
||||||
|
when:
|
||||||
|
branch:
|
||||||
|
- master
|
31
Cargo.lock
generated
31
Cargo.lock
generated
@ -110,7 +110,7 @@ checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "asset-tool"
|
name = "asset-tool"
|
||||||
version = "0.3.1"
|
version = "0.4.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"clap",
|
"clap",
|
||||||
@ -890,6 +890,16 @@ version = "0.3.17"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
|
checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "mime_guess"
|
||||||
|
version = "2.0.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "4192263c238a5f0d0c6bfd21f336a313a4ce1c450542449ca191bb657b4642ef"
|
||||||
|
dependencies = [
|
||||||
|
"mime",
|
||||||
|
"unicase",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "miniz_oxide"
|
name = "miniz_oxide"
|
||||||
version = "0.7.4"
|
version = "0.7.4"
|
||||||
@ -1156,12 +1166,13 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rbx_asset"
|
name = "rbx_asset"
|
||||||
version = "0.1.0"
|
version = "0.2.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"chrono",
|
"chrono",
|
||||||
"flate2",
|
"flate2",
|
||||||
"reqwest",
|
"reqwest",
|
||||||
"serde",
|
"serde",
|
||||||
|
"serde_json",
|
||||||
"url",
|
"url",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -1294,6 +1305,7 @@ dependencies = [
|
|||||||
"js-sys",
|
"js-sys",
|
||||||
"log",
|
"log",
|
||||||
"mime",
|
"mime",
|
||||||
|
"mime_guess",
|
||||||
"native-tls",
|
"native-tls",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"percent-encoding",
|
"percent-encoding",
|
||||||
@ -1745,6 +1757,15 @@ version = "0.2.5"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b"
|
checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "unicase"
|
||||||
|
version = "2.7.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89"
|
||||||
|
dependencies = [
|
||||||
|
"version_check",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "unicode-bidi"
|
name = "unicode-bidi"
|
||||||
version = "0.3.15"
|
version = "0.3.15"
|
||||||
@ -1795,6 +1816,12 @@ version = "0.2.15"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
|
checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "version_check"
|
||||||
|
version = "0.9.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "want"
|
name = "want"
|
||||||
version = "0.3.1"
|
version = "0.3.1"
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
workspace = { members = ["rbx_asset", "rox_compiler"] }
|
workspace = { members = ["rbx_asset", "rox_compiler"] }
|
||||||
[package]
|
[package]
|
||||||
name = "asset-tool"
|
name = "asset-tool"
|
||||||
version = "0.3.1"
|
version = "0.4.1"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
# Using the `rust-musl-builder` as base image, instead of
|
# Using the `rust-musl-builder` as base image, instead of
|
||||||
# the official Rust toolchain
|
# the official Rust toolchain
|
||||||
FROM clux/muslrust:stable AS chef
|
FROM docker.io/clux/muslrust:stable AS chef
|
||||||
USER root
|
USER root
|
||||||
RUN cargo install cargo-chef
|
RUN cargo install cargo-chef
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
@ -16,8 +16,8 @@ RUN cargo chef cook --release --target x86_64-unknown-linux-musl --recipe-path r
|
|||||||
COPY . .
|
COPY . .
|
||||||
RUN cargo build --release --target x86_64-unknown-linux-musl --bin asset-tool
|
RUN cargo build --release --target x86_64-unknown-linux-musl --bin asset-tool
|
||||||
|
|
||||||
FROM alpine AS runtime
|
FROM docker.io/alpine:latest AS runtime
|
||||||
RUN addgroup -S myuser && adduser -S myuser -G myuser
|
RUN addgroup -S myuser && adduser -S myuser -G myuser
|
||||||
COPY --from=builder /app/target/x86_64-unknown-linux-musl/release/asset-tool /usr/local/bin/
|
COPY --from=builder /app/target/x86_64-unknown-linux-musl/release/asset-tool /usr/local/bin/
|
||||||
USER myuser
|
USER myuser
|
||||||
CMD ["/usr/local/bin/asset-tool"]
|
ENTRYPOINT ["/usr/local/bin/asset-tool"]
|
@ -1,3 +1,5 @@
|
|||||||
# asset-tool
|
# asset-tool
|
||||||
|
|
||||||
|
[](https://ci.itzana.me/StrafesNET/asset-tool)
|
||||||
|
|
||||||
For uploading and downloading roblox assets.
|
For uploading and downloading roblox assets.
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "rbx_asset"
|
name = "rbx_asset"
|
||||||
version = "0.1.0"
|
version = "0.2.1"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
publish = ["strafesnet"]
|
publish = ["strafesnet"]
|
||||||
|
|
||||||
@ -9,6 +9,7 @@ publish = ["strafesnet"]
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
chrono = { version = "0.4.38", features = ["serde"] }
|
chrono = { version = "0.4.38", features = ["serde"] }
|
||||||
flate2 = "1.0.29"
|
flate2 = "1.0.29"
|
||||||
reqwest = { version = "0.12.4", features = ["json"] }
|
reqwest = { version = "0.12.4", features = ["json","multipart"] }
|
||||||
serde = { version = "1.0.199", features = ["derive"] }
|
serde = { version = "1.0.199", features = ["derive"] }
|
||||||
|
serde_json = "1.0.111"
|
||||||
url = "2.5.0"
|
url = "2.5.0"
|
||||||
|
335
rbx_asset/src/cloud.rs
Normal file
335
rbx_asset/src/cloud.rs
Normal file
@ -0,0 +1,335 @@
|
|||||||
|
#[derive(Debug,serde::Deserialize,serde::Serialize)]
|
||||||
|
#[allow(nonstandard_style,dead_code)]
|
||||||
|
pub enum AssetType{
|
||||||
|
Audio,
|
||||||
|
Decal,
|
||||||
|
Model,
|
||||||
|
}
|
||||||
|
#[derive(Debug,serde::Deserialize,serde::Serialize)]
|
||||||
|
#[allow(nonstandard_style,dead_code)]
|
||||||
|
pub struct CreateAssetRequest{
|
||||||
|
pub assetType:AssetType,
|
||||||
|
pub creationContext:CreationContext,
|
||||||
|
pub description:String,
|
||||||
|
pub displayName:String,
|
||||||
|
}
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum CreateError{
|
||||||
|
ParseError(url::ParseError),
|
||||||
|
SerializeError(serde_json::Error),
|
||||||
|
Reqwest(reqwest::Error),
|
||||||
|
}
|
||||||
|
impl std::fmt::Display for CreateError{
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f,"{self:?}")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl std::error::Error for CreateError{}
|
||||||
|
|
||||||
|
#[derive(Debug,serde::Deserialize,serde::Serialize)]
|
||||||
|
#[allow(nonstandard_style,dead_code)]
|
||||||
|
pub struct UpdateAssetRequest{
|
||||||
|
pub assetId:u64,
|
||||||
|
pub displayName:Option<String>,
|
||||||
|
pub description:Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
//woo nested roblox stuff
|
||||||
|
#[derive(Debug,serde::Deserialize,serde::Serialize)]
|
||||||
|
#[allow(nonstandard_style,dead_code)]
|
||||||
|
pub struct Creator{
|
||||||
|
pub userId:u64,
|
||||||
|
pub groupId:u64,
|
||||||
|
}
|
||||||
|
#[derive(Debug,serde::Deserialize,serde::Serialize)]
|
||||||
|
#[allow(nonstandard_style,dead_code)]
|
||||||
|
pub struct CreationContext{
|
||||||
|
pub creator:Creator,
|
||||||
|
pub expectedPrice:u64,
|
||||||
|
}
|
||||||
|
#[derive(Debug,serde::Deserialize,serde::Serialize)]
|
||||||
|
#[allow(nonstandard_style,dead_code)]
|
||||||
|
pub enum ModerationResult{
|
||||||
|
MODERATION_STATE_REVIEWING,
|
||||||
|
MODERATION_STATE_REJECTED,
|
||||||
|
MODERATION_STATE_APPROVED,
|
||||||
|
}
|
||||||
|
#[derive(Debug,serde::Deserialize,serde::Serialize)]
|
||||||
|
#[allow(nonstandard_style,dead_code)]
|
||||||
|
pub struct Preview{
|
||||||
|
pub asset:String,
|
||||||
|
pub altText:String,
|
||||||
|
}
|
||||||
|
#[allow(nonstandard_style,dead_code)]
|
||||||
|
pub struct UpdatePlaceRequest{
|
||||||
|
pub universeId:u64,
|
||||||
|
pub placeId:u64,
|
||||||
|
}
|
||||||
|
#[derive(Debug,serde::Deserialize,serde::Serialize)]
|
||||||
|
#[allow(nonstandard_style,dead_code)]
|
||||||
|
pub struct UpdatePlaceResponse{
|
||||||
|
pub versionNumber:u64,
|
||||||
|
}
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum UpdateError{
|
||||||
|
ParseError(url::ParseError),
|
||||||
|
SerializeError(serde_json::Error),
|
||||||
|
Reqwest(reqwest::Error),
|
||||||
|
}
|
||||||
|
impl std::fmt::Display for UpdateError{
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f,"{self:?}")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl std::error::Error for UpdateError{}
|
||||||
|
|
||||||
|
#[allow(nonstandard_style,dead_code)]
|
||||||
|
pub struct GetAssetRequest{
|
||||||
|
pub asset_id:u64,
|
||||||
|
pub version:Option<u64>,
|
||||||
|
}
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum GetError{
|
||||||
|
ParseError(url::ParseError),
|
||||||
|
Reqwest(reqwest::Error),
|
||||||
|
IO(std::io::Error)
|
||||||
|
}
|
||||||
|
impl std::fmt::Display for GetError{
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f,"{self:?}")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl std::error::Error for GetError{}
|
||||||
|
|
||||||
|
pub struct AssetVersionsRequest{
|
||||||
|
pub asset_id:u64,
|
||||||
|
pub cursor:Option<String>,
|
||||||
|
}
|
||||||
|
#[derive(Debug,serde::Deserialize,serde::Serialize)]
|
||||||
|
#[allow(nonstandard_style,dead_code)]
|
||||||
|
pub struct AssetVersion{
|
||||||
|
pub Id:u64,
|
||||||
|
pub assetId:u64,
|
||||||
|
pub assetVersionNumber:u64,
|
||||||
|
pub creatorType:String,
|
||||||
|
pub creatorTargetId:u64,
|
||||||
|
pub creatingUniverseId:Option<u64>,
|
||||||
|
pub created:chrono::DateTime<chrono::Utc>,
|
||||||
|
pub isPublished:bool,
|
||||||
|
}
|
||||||
|
#[derive(Debug,serde::Deserialize)]
|
||||||
|
#[allow(nonstandard_style,dead_code)]
|
||||||
|
pub struct AssetVersionsResponse{
|
||||||
|
pub previousPageCursor:Option<String>,
|
||||||
|
pub nextPageCursor:Option<String>,
|
||||||
|
pub data:Vec<AssetVersion>,
|
||||||
|
}
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum AssetVersionsError{
|
||||||
|
ParseError(url::ParseError),
|
||||||
|
Reqwest(reqwest::Error),
|
||||||
|
}
|
||||||
|
impl std::fmt::Display for AssetVersionsError{
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f,"{self:?}")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl std::error::Error for AssetVersionsError{}
|
||||||
|
|
||||||
|
pub struct InventoryPageRequest{
|
||||||
|
pub group:u64,
|
||||||
|
pub cursor:Option<String>,
|
||||||
|
}
|
||||||
|
#[derive(Debug,serde::Deserialize,serde::Serialize)]
|
||||||
|
#[allow(nonstandard_style,dead_code)]
|
||||||
|
pub struct InventoryItem{
|
||||||
|
pub id:u64,
|
||||||
|
pub name:String,
|
||||||
|
}
|
||||||
|
#[derive(Debug,serde::Deserialize,serde::Serialize)]
|
||||||
|
#[allow(nonstandard_style,dead_code)]
|
||||||
|
pub struct InventoryPageResponse{
|
||||||
|
pub totalResults:u64,//up to 50
|
||||||
|
pub filteredKeyword:Option<String>,//""
|
||||||
|
pub searchDebugInfo:Option<String>,//null
|
||||||
|
pub spellCheckerResult:Option<String>,//null
|
||||||
|
pub queryFacets:Option<String>,//null
|
||||||
|
pub imageSearchStatus:Option<String>,//null
|
||||||
|
pub previousPageCursor:Option<String>,
|
||||||
|
pub nextPageCursor:Option<String>,
|
||||||
|
pub data:Vec<InventoryItem>,
|
||||||
|
}
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum InventoryPageError{
|
||||||
|
ParseError(url::ParseError),
|
||||||
|
Reqwest(reqwest::Error),
|
||||||
|
}
|
||||||
|
impl std::fmt::Display for InventoryPageError{
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f,"{self:?}")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl std::error::Error for InventoryPageError{}
|
||||||
|
|
||||||
|
#[derive(Debug,serde::Deserialize,serde::Serialize)]
|
||||||
|
#[allow(nonstandard_style,dead_code)]
|
||||||
|
pub struct RobloxOperation{
|
||||||
|
pub path:Option<std::path::PathBuf>,
|
||||||
|
pub metadata:Option<String>,
|
||||||
|
pub done:Option<bool>,
|
||||||
|
pub error:Option<String>,
|
||||||
|
pub response:Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
//idk how to do this better
|
||||||
|
enum ReaderType<R:std::io::Read>{
|
||||||
|
GZip(flate2::read::GzDecoder<std::io::BufReader<R>>),
|
||||||
|
Raw(std::io::BufReader<R>),
|
||||||
|
}
|
||||||
|
fn maybe_gzip_decode<R:std::io::Read>(input:R)->std::io::Result<ReaderType<R>>{
|
||||||
|
let mut buf=std::io::BufReader::new(input);
|
||||||
|
let peek=std::io::BufRead::fill_buf(&mut buf)?;
|
||||||
|
match &peek[0..2]{
|
||||||
|
b"\x1f\x8b"=>Ok(ReaderType::GZip(flate2::read::GzDecoder::new(buf))),
|
||||||
|
_=>Ok(ReaderType::Raw(buf)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fn read_readable(mut readable:impl std::io::Read)->std::io::Result<Vec<u8>>{
|
||||||
|
let mut contents=Vec::new();
|
||||||
|
readable.read_to_end(&mut contents)?;
|
||||||
|
Ok(contents)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct ApiKey(String);
|
||||||
|
impl ApiKey{
|
||||||
|
pub fn new(api_key:String)->Self{
|
||||||
|
Self(api_key)
|
||||||
|
}
|
||||||
|
pub fn get(self)->String{
|
||||||
|
self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct CloudContext{
|
||||||
|
pub api_key:String,
|
||||||
|
pub client:reqwest::Client,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CloudContext{
|
||||||
|
pub fn new(api_key:ApiKey)->Self{
|
||||||
|
Self{
|
||||||
|
api_key:api_key.get(),
|
||||||
|
client:reqwest::Client::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
async fn get(&self,url:impl reqwest::IntoUrl)->Result<reqwest::Response,reqwest::Error>{
|
||||||
|
self.client.get(url)
|
||||||
|
.header("x-api-key",self.api_key.as_str())
|
||||||
|
.send().await
|
||||||
|
}
|
||||||
|
async fn post(&self,url:url::Url,body:impl Into<reqwest::Body>+Clone)->Result<reqwest::Response,reqwest::Error>{
|
||||||
|
self.client.post(url)
|
||||||
|
.header("x-api-key",self.api_key.as_str())
|
||||||
|
.body(body)
|
||||||
|
.send().await
|
||||||
|
}
|
||||||
|
async fn patch_form(&self,url:url::Url,form:reqwest::multipart::Form)->Result<reqwest::Response,reqwest::Error>{
|
||||||
|
self.client.patch(url)
|
||||||
|
.header("x-api-key",self.api_key.as_str())
|
||||||
|
.multipart(form)
|
||||||
|
.send().await
|
||||||
|
}
|
||||||
|
async fn post_form(&self,url:url::Url,form:reqwest::multipart::Form)->Result<reqwest::Response,reqwest::Error>{
|
||||||
|
self.client.post(url)
|
||||||
|
.header("x-api-key",self.api_key.as_str())
|
||||||
|
.multipart(form)
|
||||||
|
.send().await
|
||||||
|
}
|
||||||
|
pub async fn create_asset(&self,config:CreateAssetRequest,body:impl Into<std::borrow::Cow<'static,[u8]>>)->Result<RobloxOperation,CreateError>{
|
||||||
|
let url=reqwest::Url::parse("https://apis.roblox.com/assets/v1/assets").map_err(CreateError::ParseError)?;
|
||||||
|
|
||||||
|
let request_config=serde_json::to_string(&config).map_err(CreateError::SerializeError)?;
|
||||||
|
|
||||||
|
let form=reqwest::multipart::Form::new()
|
||||||
|
.text("request",request_config)
|
||||||
|
.part("fileContent",reqwest::multipart::Part::bytes(body));
|
||||||
|
|
||||||
|
let resp=self.post_form(url,form).await.map_err(CreateError::Reqwest)?
|
||||||
|
.error_for_status().map_err(CreateError::Reqwest)?;
|
||||||
|
|
||||||
|
Ok(resp.json::<RobloxOperation>().await.map_err(CreateError::Reqwest)?)
|
||||||
|
}
|
||||||
|
pub async fn update_asset(&self,config:UpdateAssetRequest,body:impl Into<std::borrow::Cow<'static,[u8]>>)->Result<RobloxOperation,UpdateError>{
|
||||||
|
let raw_url=format!("https://apis.roblox.com/assets/v1/assets/{}",config.assetId);
|
||||||
|
let url=reqwest::Url::parse(raw_url.as_str()).map_err(UpdateError::ParseError)?;
|
||||||
|
|
||||||
|
let request_config=serde_json::to_string(&config).map_err(UpdateError::SerializeError)?;
|
||||||
|
|
||||||
|
let form=reqwest::multipart::Form::new()
|
||||||
|
.text("request",request_config)
|
||||||
|
.part("fileContent",reqwest::multipart::Part::bytes(body));
|
||||||
|
|
||||||
|
let resp=self.patch_form(url,form).await
|
||||||
|
.map_err(UpdateError::Reqwest)?
|
||||||
|
//roblox api documentation is very poor, just give the status code and drop the json
|
||||||
|
.error_for_status().map_err(UpdateError::Reqwest)?;
|
||||||
|
|
||||||
|
Ok(resp.json::<RobloxOperation>().await.map_err(UpdateError::Reqwest)?)
|
||||||
|
}
|
||||||
|
pub async fn get_asset(&self,config:GetAssetRequest)->Result<Vec<u8>,GetError>{
|
||||||
|
let mut url=reqwest::Url::parse("https://assetdelivery.roblox.com/v1/asset/").map_err(GetError::ParseError)?;
|
||||||
|
//url borrow scope
|
||||||
|
{
|
||||||
|
let mut query=url.query_pairs_mut();//borrow here
|
||||||
|
query.append_pair("ID",config.asset_id.to_string().as_str());
|
||||||
|
if let Some(version)=config.version{
|
||||||
|
query.append_pair("version",version.to_string().as_str());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let resp=self.get(url).await.map_err(GetError::Reqwest)?;
|
||||||
|
|
||||||
|
let body=resp.bytes().await.map_err(GetError::Reqwest)?;
|
||||||
|
|
||||||
|
match maybe_gzip_decode(&mut std::io::Cursor::new(body)){
|
||||||
|
Ok(ReaderType::GZip(readable))=>read_readable(readable),
|
||||||
|
Ok(ReaderType::Raw(readable))=>read_readable(readable),
|
||||||
|
Err(e)=>Err(e),
|
||||||
|
}.map_err(GetError::IO)
|
||||||
|
}
|
||||||
|
pub async fn get_asset_versions(&self,config:AssetVersionsRequest)->Result<AssetVersionsResponse,AssetVersionsError>{
|
||||||
|
let raw_url=format!("https://apis.roblox.com/assets/v1/assets/{}/versions",config.asset_id);
|
||||||
|
let url=reqwest::Url::parse(raw_url.as_str()).map_err(AssetVersionsError::ParseError)?;
|
||||||
|
|
||||||
|
Ok(self.get(url).await.map_err(AssetVersionsError::Reqwest)?
|
||||||
|
.json::<AssetVersionsResponse>().await.map_err(AssetVersionsError::Reqwest)?)
|
||||||
|
}
|
||||||
|
pub async fn inventory_page(&self,config:InventoryPageRequest)->Result<InventoryPageResponse,InventoryPageError>{
|
||||||
|
let mut url=reqwest::Url::parse(format!("https://apis.roblox.com/toolbox-service/v1/creations/group/{}/10?limit=50",config.group).as_str()).map_err(InventoryPageError::ParseError)?;
|
||||||
|
//url borrow scope
|
||||||
|
{
|
||||||
|
let mut query=url.query_pairs_mut();//borrow here
|
||||||
|
if let Some(cursor)=config.cursor.as_deref(){
|
||||||
|
query.append_pair("cursor",cursor);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(self.get(url).await.map_err(InventoryPageError::Reqwest)?
|
||||||
|
.json::<InventoryPageResponse>().await.map_err(InventoryPageError::Reqwest)?)
|
||||||
|
}
|
||||||
|
pub async fn update_place(&self,config:UpdatePlaceRequest,body:impl Into<reqwest::Body>+Clone)->Result<UpdatePlaceResponse,UpdateError>{
|
||||||
|
let raw_url=format!("https://apis.roblox.com/universes/v1/{}/places/{}/versions",config.universeId,config.placeId);
|
||||||
|
let mut url=reqwest::Url::parse(raw_url.as_str()).map_err(UpdateError::ParseError)?;
|
||||||
|
//url borrow scope
|
||||||
|
{
|
||||||
|
let mut query=url.query_pairs_mut();//borrow here
|
||||||
|
query.append_pair("versionType","Published");
|
||||||
|
}
|
||||||
|
|
||||||
|
let resp=self.post(url,body).await.map_err(UpdateError::Reqwest)?
|
||||||
|
.error_for_status().map_err(UpdateError::Reqwest)?;
|
||||||
|
|
||||||
|
Ok(resp.json::<UpdatePlaceResponse>().await.map_err(UpdateError::Reqwest)?)
|
||||||
|
}
|
||||||
|
}
|
@ -62,24 +62,24 @@ pub struct UploadResponse{
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[allow(nonstandard_style,dead_code)]
|
#[allow(nonstandard_style,dead_code)]
|
||||||
pub struct DownloadRequest{
|
pub struct GetAssetRequest{
|
||||||
pub asset_id:u64,
|
pub asset_id:u64,
|
||||||
pub version:Option<u64>,
|
pub version:Option<u64>,
|
||||||
}
|
}
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum DownloadError{
|
pub enum GetError{
|
||||||
ParseError(url::ParseError),
|
ParseError(url::ParseError),
|
||||||
Reqwest(reqwest::Error),
|
Reqwest(reqwest::Error),
|
||||||
IO(std::io::Error)
|
IO(std::io::Error)
|
||||||
}
|
}
|
||||||
impl std::fmt::Display for DownloadError{
|
impl std::fmt::Display for GetError{
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
write!(f,"{self:?}")
|
write!(f,"{self:?}")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl std::error::Error for DownloadError{}
|
impl std::error::Error for GetError{}
|
||||||
|
|
||||||
pub struct HistoryPageRequest{
|
pub struct AssetVersionsPageRequest{
|
||||||
pub asset_id:u64,
|
pub asset_id:u64,
|
||||||
pub cursor:Option<String>,
|
pub cursor:Option<String>,
|
||||||
}
|
}
|
||||||
@ -97,22 +97,22 @@ pub struct AssetVersion{
|
|||||||
}
|
}
|
||||||
#[derive(serde::Deserialize)]
|
#[derive(serde::Deserialize)]
|
||||||
#[allow(nonstandard_style,dead_code)]
|
#[allow(nonstandard_style,dead_code)]
|
||||||
pub struct HistoryPageResponse{
|
pub struct AssetVersionsPageResponse{
|
||||||
pub previousPageCursor:Option<String>,
|
pub previousPageCursor:Option<String>,
|
||||||
pub nextPageCursor:Option<String>,
|
pub nextPageCursor:Option<String>,
|
||||||
pub data:Vec<AssetVersion>,
|
pub data:Vec<AssetVersion>,
|
||||||
}
|
}
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum HistoryPageError{
|
pub enum AssetVersionsPageError{
|
||||||
ParseError(url::ParseError),
|
ParseError(url::ParseError),
|
||||||
Reqwest(reqwest::Error),
|
Reqwest(reqwest::Error),
|
||||||
}
|
}
|
||||||
impl std::fmt::Display for HistoryPageError{
|
impl std::fmt::Display for AssetVersionsPageError{
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
write!(f,"{self:?}")
|
write!(f,"{self:?}")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl std::error::Error for HistoryPageError{}
|
impl std::error::Error for AssetVersionsPageError{}
|
||||||
|
|
||||||
pub struct InventoryPageRequest{
|
pub struct InventoryPageRequest{
|
||||||
pub group:u64,
|
pub group:u64,
|
||||||
@ -169,15 +169,25 @@ fn read_readable(mut readable:impl std::io::Read)->std::io::Result<Vec<u8>>{
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct RobloxContext{
|
pub struct Cookie(String);
|
||||||
|
impl Cookie{
|
||||||
|
pub fn new(cookie:String)->Self{
|
||||||
|
Self(cookie)
|
||||||
|
}
|
||||||
|
pub fn get(self)->String{
|
||||||
|
self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct CookieContext{
|
||||||
pub cookie:String,
|
pub cookie:String,
|
||||||
pub client:reqwest::Client,
|
pub client:reqwest::Client,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl RobloxContext{
|
impl CookieContext{
|
||||||
pub fn new(cookie:String)->Self{
|
pub fn new(cookie:Cookie)->Self{
|
||||||
Self{
|
Self{
|
||||||
cookie,
|
cookie:cookie.get(),
|
||||||
client:reqwest::Client::new(),
|
client:reqwest::Client::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -259,8 +269,8 @@ impl RobloxContext{
|
|||||||
|
|
||||||
Ok(resp.json::<UploadResponse>().await.map_err(UploadError::Reqwest)?)
|
Ok(resp.json::<UploadResponse>().await.map_err(UploadError::Reqwest)?)
|
||||||
}
|
}
|
||||||
pub async fn download(&self,config:DownloadRequest)->Result<Vec<u8>,DownloadError>{
|
pub async fn get_asset(&self,config:GetAssetRequest)->Result<Vec<u8>,GetError>{
|
||||||
let mut url=reqwest::Url::parse("https://assetdelivery.roblox.com/v1/asset/").map_err(DownloadError::ParseError)?;
|
let mut url=reqwest::Url::parse("https://assetdelivery.roblox.com/v1/asset/").map_err(GetError::ParseError)?;
|
||||||
//url borrow scope
|
//url borrow scope
|
||||||
{
|
{
|
||||||
let mut query=url.query_pairs_mut();//borrow here
|
let mut query=url.query_pairs_mut();//borrow here
|
||||||
@ -269,18 +279,18 @@ impl RobloxContext{
|
|||||||
query.append_pair("version",version.to_string().as_str());
|
query.append_pair("version",version.to_string().as_str());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let resp=self.get(url).await.map_err(DownloadError::Reqwest)?;
|
let resp=self.get(url).await.map_err(GetError::Reqwest)?;
|
||||||
|
|
||||||
let body=resp.bytes().await.map_err(DownloadError::Reqwest)?;
|
let body=resp.bytes().await.map_err(GetError::Reqwest)?;
|
||||||
|
|
||||||
match maybe_gzip_decode(&mut std::io::Cursor::new(body)){
|
match maybe_gzip_decode(&mut std::io::Cursor::new(body)){
|
||||||
Ok(ReaderType::GZip(readable))=>read_readable(readable),
|
Ok(ReaderType::GZip(readable))=>read_readable(readable),
|
||||||
Ok(ReaderType::Raw(readable))=>read_readable(readable),
|
Ok(ReaderType::Raw(readable))=>read_readable(readable),
|
||||||
Err(e)=>Err(e),
|
Err(e)=>Err(e),
|
||||||
}.map_err(DownloadError::IO)
|
}.map_err(GetError::IO)
|
||||||
}
|
}
|
||||||
pub async fn history_page(&self,config:HistoryPageRequest)->Result<HistoryPageResponse,HistoryPageError>{
|
pub async fn get_asset_versions_page(&self,config:AssetVersionsPageRequest)->Result<AssetVersionsPageResponse,AssetVersionsPageError>{
|
||||||
let mut url=reqwest::Url::parse(format!("https://develop.roblox.com/v1/assets/{}/saved-versions",config.asset_id).as_str()).map_err(HistoryPageError::ParseError)?;
|
let mut url=reqwest::Url::parse(format!("https://develop.roblox.com/v1/assets/{}/saved-versions",config.asset_id).as_str()).map_err(AssetVersionsPageError::ParseError)?;
|
||||||
//url borrow scope
|
//url borrow scope
|
||||||
{
|
{
|
||||||
let mut query=url.query_pairs_mut();//borrow here
|
let mut query=url.query_pairs_mut();//borrow here
|
||||||
@ -292,10 +302,10 @@ impl RobloxContext{
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(self.get(url).await.map_err(HistoryPageError::Reqwest)?
|
Ok(self.get(url).await.map_err(AssetVersionsPageError::Reqwest)?
|
||||||
.json::<HistoryPageResponse>().await.map_err(HistoryPageError::Reqwest)?)
|
.json::<AssetVersionsPageResponse>().await.map_err(AssetVersionsPageError::Reqwest)?)
|
||||||
}
|
}
|
||||||
pub async fn inventory_page(&self,config:InventoryPageRequest)->Result<InventoryPageResponse,InventoryPageError>{
|
pub async fn get_inventory_page(&self,config:InventoryPageRequest)->Result<InventoryPageResponse,InventoryPageError>{
|
||||||
let mut url=reqwest::Url::parse(format!("https://apis.roblox.com/toolbox-service/v1/creations/group/{}/10?limit=50",config.group).as_str()).map_err(InventoryPageError::ParseError)?;
|
let mut url=reqwest::Url::parse(format!("https://apis.roblox.com/toolbox-service/v1/creations/group/{}/10?limit=50",config.group).as_str()).map_err(InventoryPageError::ParseError)?;
|
||||||
//url borrow scope
|
//url borrow scope
|
||||||
{
|
{
|
@ -1 +1,2 @@
|
|||||||
pub mod context;
|
pub mod cloud;
|
||||||
|
pub mod cookie;
|
||||||
|
@ -19,15 +19,15 @@ impl PropertiesOverride{
|
|||||||
impl std::fmt::Display for PropertiesOverride{
|
impl std::fmt::Display for PropertiesOverride{
|
||||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||||
if let Some(name)=self.name.as_deref(){
|
if let Some(name)=self.name.as_deref(){
|
||||||
writeln!(f,"--!Properties.Name = \"{}\"",name)?;
|
writeln!(f,"-- Properties.Name = \"{}\"",name)?;
|
||||||
}
|
}
|
||||||
if let Some(class)=self.class.as_deref(){
|
if let Some(class)=self.class.as_deref(){
|
||||||
writeln!(f,"--!Properties.ClassName = \"{}\"",class)?;
|
writeln!(f,"-- Properties.ClassName = \"{}\"",class)?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn sanitize<'a>(s:&'a str)->std::borrow::Cow<'a,str>{
|
pub(crate) fn sanitize<'a>(s:&'a str)->std::borrow::Cow<'a,str>{
|
||||||
lazy_regex::regex!(r"[^A-z0-9.-]").replace_all(s,"_")
|
lazy_regex::regex!(r"[^A-Za-z0-9.-]").replace_all(s,"_")
|
||||||
}
|
}
|
||||||
|
@ -203,7 +203,7 @@ impl ScriptWithOverrides{
|
|||||||
let mut count=0;
|
let mut count=0;
|
||||||
for line in source.lines(){
|
for line in source.lines(){
|
||||||
//only string type properties are supported atm
|
//only string type properties are supported atm
|
||||||
if let Some(captures)=lazy_regex::regex!(r#"^\-\-\!\s*Properties\.([A-z]\w*)\s*\=\s*"(\w+)"$"#)
|
if let Some(captures)=lazy_regex::regex!(r#"^\-\-\s*Properties\.([A-Za-z]\w*)\s*\=\s*"(\w+)"$"#)
|
||||||
.captures(line){
|
.captures(line){
|
||||||
count+=line.len();
|
count+=line.len();
|
||||||
match &captures[1]{
|
match &captures[1]{
|
||||||
@ -237,7 +237,10 @@ struct CompileNode{
|
|||||||
pub enum CompileNodeError{
|
pub enum CompileNodeError{
|
||||||
IO(std::io::Error),
|
IO(std::io::Error),
|
||||||
ScriptWithOverrides(ScriptWithOverridesError),
|
ScriptWithOverrides(ScriptWithOverridesError),
|
||||||
InvalidHintOrClass(Option<String>,ScriptHint),
|
InvalidClassOrHint{
|
||||||
|
class:Option<String>,
|
||||||
|
hint:ScriptHint
|
||||||
|
},
|
||||||
QueryResolveError(QueryResolveError),
|
QueryResolveError(QueryResolveError),
|
||||||
/// Conversion from OsString to String failed
|
/// Conversion from OsString to String failed
|
||||||
FileName(std::ffi::OsString),
|
FileName(std::ffi::OsString),
|
||||||
@ -245,7 +248,7 @@ pub enum CompileNodeError{
|
|||||||
extension:String,
|
extension:String,
|
||||||
style:Option<Style>,
|
style:Option<Style>,
|
||||||
},
|
},
|
||||||
NoExtension,
|
UnknownExtension,
|
||||||
}
|
}
|
||||||
impl std::fmt::Display for CompileNodeError{
|
impl std::fmt::Display for CompileNodeError{
|
||||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||||
@ -277,7 +280,7 @@ impl CompileNode{
|
|||||||
|(None,ScriptHint::LocalScript)=>CompileClass::LocalScript(script_with_overrides.source),
|
|(None,ScriptHint::LocalScript)=>CompileClass::LocalScript(script_with_overrides.source),
|
||||||
(Some("Script"),_)
|
(Some("Script"),_)
|
||||||
|(None,ScriptHint::Script)=>CompileClass::Script(script_with_overrides.source),
|
|(None,ScriptHint::Script)=>CompileClass::Script(script_with_overrides.source),
|
||||||
other=>Err(CompileNodeError::InvalidHintOrClass(other.0.map(|s|s.to_owned()),other.1))?,
|
(class,hint)=>Err(CompileNodeError::InvalidClassOrHint{class:class.map(|s|s.to_owned()),hint})?,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -340,7 +343,7 @@ impl CompileNode{
|
|||||||
_=>false,
|
_=>false,
|
||||||
};
|
};
|
||||||
let (ext_len,file_discernment)={
|
let (ext_len,file_discernment)={
|
||||||
if let Some(captures)=lazy_regex::regex!(r"^.*(.module.lua|.client.lua|.server.lua|.rbxmx|.lua)$")
|
if let Some(captures)=lazy_regex::regex!(r"^.*(\.module\.lua|\.client\.lua|\.server\.lua)$")
|
||||||
.captures(file_name.as_str()){
|
.captures(file_name.as_str()){
|
||||||
let ext=&captures[1];
|
let ext=&captures[1];
|
||||||
(ext.len(),match ext{
|
(ext.len(),match ext{
|
||||||
@ -352,6 +355,12 @@ impl CompileNode{
|
|||||||
},
|
},
|
||||||
".client.lua"=>FileDiscernment::Script(ScriptHint::LocalScript),
|
".client.lua"=>FileDiscernment::Script(ScriptHint::LocalScript),
|
||||||
".server.lua"=>FileDiscernment::Script(ScriptHint::Script),
|
".server.lua"=>FileDiscernment::Script(ScriptHint::Script),
|
||||||
|
_=>panic!("Regex failed"),
|
||||||
|
})
|
||||||
|
}else if let Some(captures)=lazy_regex::regex!(r"^.*(\.rbxmx|\.lua)$")
|
||||||
|
.captures(file_name.as_str()){
|
||||||
|
let ext=&captures[1];
|
||||||
|
(ext.len(),match ext{
|
||||||
".rbxmx"=>{
|
".rbxmx"=>{
|
||||||
if is_goober{
|
if is_goober{
|
||||||
Err(CompileNodeError::ExtensionNotSupportedInStyle{extension:ext.to_owned(),style})?;
|
Err(CompileNodeError::ExtensionNotSupportedInStyle{extension:ext.to_owned(),style})?;
|
||||||
@ -362,7 +371,7 @@ impl CompileNode{
|
|||||||
_=>panic!("Regex failed"),
|
_=>panic!("Regex failed"),
|
||||||
})
|
})
|
||||||
}else{
|
}else{
|
||||||
return Err(CompileNodeError::NoExtension);
|
return Err(CompileNodeError::UnknownExtension);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
file_name.truncate(file_name.len()-ext_len);
|
file_name.truncate(file_name.len()-ext_len);
|
||||||
@ -429,17 +438,21 @@ impl std::fmt::Display for CompileError{
|
|||||||
impl std::error::Error for CompileError{}
|
impl std::error::Error for CompileError{}
|
||||||
|
|
||||||
pub async fn compile(config:CompileConfig,mut dom:&mut rbx_dom_weak::WeakDom)->Result<(),CompileError>{
|
pub async fn compile(config:CompileConfig,mut dom:&mut rbx_dom_weak::WeakDom)->Result<(),CompileError>{
|
||||||
|
//hack to traverse root folder as the root object
|
||||||
|
dom.root_mut().name="src".to_owned();
|
||||||
//add in scripts and models
|
//add in scripts and models
|
||||||
let mut folder=config.input_folder.clone();
|
let mut folder=config.input_folder.clone();
|
||||||
let mut stack:Vec<CompileStackInstruction>=vec![CompileStackInstruction::TraverseReferent(dom.root_ref(),None)];
|
let mut stack:Vec<CompileStackInstruction>=vec![CompileStackInstruction::TraverseReferent(dom.root_ref(),None)];
|
||||||
while let Some(instruction)=stack.pop(){
|
while let Some(instruction)=stack.pop(){
|
||||||
match instruction{
|
match instruction{
|
||||||
CompileStackInstruction::TraverseReferent(item_ref,blacklist)=>{
|
CompileStackInstruction::TraverseReferent(item_ref,blacklist)=>{
|
||||||
let sans={
|
//scope to avoid holding item ref
|
||||||
|
{
|
||||||
let item=dom.get_by_ref(item_ref).ok_or(CompileError::NullChildRef)?;
|
let item=dom.get_by_ref(item_ref).ok_or(CompileError::NullChildRef)?;
|
||||||
sanitize(item.name.as_str()).to_string()
|
let folder_name=sanitize(item.name.as_str());
|
||||||
};
|
folder.push(folder_name.as_ref());
|
||||||
folder.push(sans.as_str());
|
//drop item
|
||||||
|
}
|
||||||
stack.push(CompileStackInstruction::PopFolder);
|
stack.push(CompileStackInstruction::PopFolder);
|
||||||
//check if a folder exists with item.name
|
//check if a folder exists with item.name
|
||||||
if let Ok(dir)=tokio::fs::read_dir(folder.as_path()).await{
|
if let Ok(dir)=tokio::fs::read_dir(folder.as_path()).await{
|
||||||
@ -476,7 +489,7 @@ pub async fn compile(config:CompileConfig,mut dom:&mut rbx_dom_weak::WeakDom)->R
|
|||||||
}else{
|
}else{
|
||||||
TooComplicated::Stop
|
TooComplicated::Stop
|
||||||
};
|
};
|
||||||
Ok::<_,std::io::Error>(ret2)
|
Ok(ret2)
|
||||||
})().await
|
})().await
|
||||||
};
|
};
|
||||||
match ret1{
|
match ret1{
|
||||||
@ -493,10 +506,19 @@ pub async fn compile(config:CompileConfig,mut dom:&mut rbx_dom_weak::WeakDom)->R
|
|||||||
Ok(Some(entry))=>tokio::spawn(async move{
|
Ok(Some(entry))=>tokio::spawn(async move{
|
||||||
let met=entry.metadata().await.map_err(CompileError::IO)?;
|
let met=entry.metadata().await.map_err(CompileError::IO)?;
|
||||||
//discern that bad boy
|
//discern that bad boy
|
||||||
let compile_class=match met.is_dir(){
|
let compile_class={
|
||||||
true=>CompileNode::from_folder(&entry,style).await,
|
let result=match met.is_dir(){
|
||||||
false=>CompileNode::from_file(&entry,style).await,
|
true=>CompileNode::from_folder(&entry,style).await,
|
||||||
}.map_err(CompileError::CompileNode)?;
|
false=>CompileNode::from_file(&entry,style).await,
|
||||||
|
};
|
||||||
|
match result{
|
||||||
|
Ok(compile_class)=>compile_class,
|
||||||
|
Err(e)=>{
|
||||||
|
println!("Ignoring file {entry:?} due to error {e}");
|
||||||
|
return Ok(None);
|
||||||
|
},
|
||||||
|
}
|
||||||
|
};
|
||||||
//prepare data structure
|
//prepare data structure
|
||||||
Ok(Some((compile_class.blacklist,match compile_class.class{
|
Ok(Some((compile_class.blacklist,match compile_class.class{
|
||||||
CompileClass::Folder=>PreparedData::Builder(rbx_dom_weak::InstanceBuilder::new("Folder").with_name(compile_class.name.as_str())),
|
CompileClass::Folder=>PreparedData::Builder(rbx_dom_weak::InstanceBuilder::new("Folder").with_name(compile_class.name.as_str())),
|
||||||
@ -515,22 +537,20 @@ pub async fn compile(config:CompileConfig,mut dom:&mut rbx_dom_weak::WeakDom)->R
|
|||||||
.map(|f|async{f}).buffer_unordered(32)
|
.map(|f|async{f}).buffer_unordered(32)
|
||||||
|
|
||||||
//begin processing immediately
|
//begin processing immediately
|
||||||
.try_fold((&mut stack,&mut dom),|(stack,dom):(&mut Vec<CompileStackInstruction>,_),bog|async{
|
//TODO: fix dom being &mut &mut inside the closure
|
||||||
|
.try_fold((&mut stack,&mut dom),|(stack,dom),bog|async{
|
||||||
//push child objects onto dom serially as they arrive
|
//push child objects onto dom serially as they arrive
|
||||||
match bog{
|
if let Some((blacklist,data))=bog{
|
||||||
Some((blacklist,data))=>{
|
let referent=match data{
|
||||||
let referent=match data{
|
PreparedData::Model(mut model_dom)=>{
|
||||||
PreparedData::Model(mut model_dom)=>{
|
let referent=model_dom.root().children()[0];
|
||||||
let referent=model_dom.root().children()[0];
|
model_dom.transfer(referent,dom,item_ref);
|
||||||
model_dom.transfer(referent,dom,item_ref);
|
referent
|
||||||
referent
|
},
|
||||||
},
|
PreparedData::Builder(script)=>dom.insert(item_ref,script),
|
||||||
PreparedData::Builder(script)=>dom.insert(item_ref,script),
|
};
|
||||||
};
|
//new children need to be traversed
|
||||||
//new children need to be traversed
|
stack.push(CompileStackInstruction::TraverseReferent(referent,blacklist));
|
||||||
stack.push(CompileStackInstruction::TraverseReferent(referent,blacklist));
|
|
||||||
},
|
|
||||||
None=>(),
|
|
||||||
}
|
}
|
||||||
Ok((stack,dom))
|
Ok((stack,dom))
|
||||||
}).await?;
|
}).await?;
|
||||||
@ -539,5 +559,5 @@ pub async fn compile(config:CompileConfig,mut dom:&mut rbx_dom_weak::WeakDom)->R
|
|||||||
CompileStackInstruction::PopFolder=>assert!(folder.pop(),"pop folder bad"),
|
CompileStackInstruction::PopFolder=>assert!(folder.pop(),"pop folder bad"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
unreachable!();
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
mod common;
|
mod common;
|
||||||
mod compile;
|
mod compile;
|
||||||
mod decompile;
|
mod decompile;
|
||||||
//export specific types
|
//export minimal interface
|
||||||
pub use common::Style;
|
pub use common::Style;
|
||||||
pub use compile::CompileConfig;
|
pub use compile::CompileConfig;
|
||||||
pub use compile::compile;//cringe non standardized interface
|
pub use compile::compile;//cringe unstandardized interface
|
||||||
pub use decompile::DecompiledContext;
|
|
||||||
pub use decompile::WriteConfig;
|
pub use decompile::WriteConfig;
|
||||||
|
pub use decompile::DecompiledContext;
|
||||||
|
539
src/main.rs
539
src/main.rs
@ -2,7 +2,8 @@ use std::{io::Read,path::PathBuf};
|
|||||||
use clap::{Args,Parser,Subcommand};
|
use clap::{Args,Parser,Subcommand};
|
||||||
use anyhow::Result as AResult;
|
use anyhow::Result as AResult;
|
||||||
use futures::StreamExt;
|
use futures::StreamExt;
|
||||||
use rbx_asset::context::{RobloxContext,InventoryItem,AssetVersion};
|
use rbx_asset::cloud::{ApiKey,CloudContext};
|
||||||
|
use rbx_asset::cookie::{Cookie,CookieContext,AssetVersion,InventoryItem};
|
||||||
|
|
||||||
type AssetID=u64;
|
type AssetID=u64;
|
||||||
type AssetIDFileMap=Vec<(AssetID,PathBuf)>;
|
type AssetIDFileMap=Vec<(AssetID,PathBuf)>;
|
||||||
@ -19,25 +20,37 @@ struct Cli{
|
|||||||
|
|
||||||
#[derive(Subcommand)]
|
#[derive(Subcommand)]
|
||||||
enum Commands{
|
enum Commands{
|
||||||
|
Info(InfoSubcommand),
|
||||||
DownloadHistory(DownloadHistorySubcommand),
|
DownloadHistory(DownloadHistorySubcommand),
|
||||||
Download(DownloadSubcommand),
|
Download(DownloadSubcommand),
|
||||||
|
DownloadDecompile(DownloadDecompileSubcommand),
|
||||||
DownloadGroupInventoryJson(DownloadGroupInventoryJsonSubcommand),
|
DownloadGroupInventoryJson(DownloadGroupInventoryJsonSubcommand),
|
||||||
Create(CreateSubcommand),
|
CreateAsset(CreateAssetSubcommand),
|
||||||
Upload(UploadSubcommand),
|
UploadAsset(UpdateAssetSubcommand),
|
||||||
|
UploadPlace(UpdatePlaceSubcommand),
|
||||||
Compile(CompileSubcommand),
|
Compile(CompileSubcommand),
|
||||||
|
CompileUploadAsset(CompileUploadAssetSubcommand),
|
||||||
|
CompileUploadPlace(CompileUploadPlaceSubcommand),
|
||||||
Decompile(DecompileSubcommand),
|
Decompile(DecompileSubcommand),
|
||||||
DecompileHistoryIntoGit(DecompileHistoryIntoGitSubcommand),
|
DecompileHistoryIntoGit(DecompileHistoryIntoGitSubcommand),
|
||||||
DownloadAndDecompileHistoryIntoGit(DownloadAndDecompileHistoryIntoGitSubcommand),
|
DownloadAndDecompileHistoryIntoGit(DownloadAndDecompileHistoryIntoGitSubcommand),
|
||||||
}
|
}
|
||||||
|
#[derive(Args)]
|
||||||
|
struct InfoSubcommand{
|
||||||
|
#[arg(long)]
|
||||||
|
path:PathBuf,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Args)]
|
#[derive(Args)]
|
||||||
struct DownloadHistorySubcommand{
|
struct DownloadHistorySubcommand{
|
||||||
#[arg(long)]
|
#[arg(long)]
|
||||||
asset_id:AssetID,
|
asset_id:AssetID,
|
||||||
#[arg(long)]
|
#[arg(long,group="cookie",required=true)]
|
||||||
cookie_type:CookieType,
|
cookie_literal:Option<String>,
|
||||||
#[arg(long)]
|
#[arg(long,group="cookie",required=true)]
|
||||||
cookie:String,
|
cookie_envvar:Option<String>,
|
||||||
|
#[arg(long,group="cookie",required=true)]
|
||||||
|
cookie_file:Option<PathBuf>,
|
||||||
#[arg(long)]
|
#[arg(long)]
|
||||||
output_folder:Option<PathBuf>,
|
output_folder:Option<PathBuf>,
|
||||||
#[arg(long)]
|
#[arg(long)]
|
||||||
@ -49,10 +62,12 @@ struct DownloadHistorySubcommand{
|
|||||||
}
|
}
|
||||||
#[derive(Args)]
|
#[derive(Args)]
|
||||||
struct DownloadSubcommand{
|
struct DownloadSubcommand{
|
||||||
#[arg(long)]
|
#[arg(long,group="api_key",required=true)]
|
||||||
cookie_type:CookieType,
|
api_key_literal:Option<String>,
|
||||||
#[arg(long)]
|
#[arg(long,group="api_key",required=true)]
|
||||||
cookie:String,
|
api_key_envvar:Option<String>,
|
||||||
|
#[arg(long,group="api_key",required=true)]
|
||||||
|
api_key_file:Option<PathBuf>,
|
||||||
#[arg(long)]
|
#[arg(long)]
|
||||||
output_folder:Option<PathBuf>,
|
output_folder:Option<PathBuf>,
|
||||||
#[arg(required=true)]
|
#[arg(required=true)]
|
||||||
@ -60,21 +75,25 @@ struct DownloadSubcommand{
|
|||||||
}
|
}
|
||||||
#[derive(Args)]
|
#[derive(Args)]
|
||||||
struct DownloadGroupInventoryJsonSubcommand{
|
struct DownloadGroupInventoryJsonSubcommand{
|
||||||
#[arg(long)]
|
#[arg(long,group="cookie",required=true)]
|
||||||
cookie_type:CookieType,
|
cookie_literal:Option<String>,
|
||||||
#[arg(long)]
|
#[arg(long,group="cookie",required=true)]
|
||||||
cookie:String,
|
cookie_envvar:Option<String>,
|
||||||
|
#[arg(long,group="cookie",required=true)]
|
||||||
|
cookie_file:Option<PathBuf>,
|
||||||
#[arg(long)]
|
#[arg(long)]
|
||||||
output_folder:Option<PathBuf>,
|
output_folder:Option<PathBuf>,
|
||||||
#[arg(long)]
|
#[arg(long)]
|
||||||
group:u64,
|
group:u64,
|
||||||
}
|
}
|
||||||
#[derive(Args)]
|
#[derive(Args)]
|
||||||
struct CreateSubcommand{
|
struct CreateAssetSubcommand{
|
||||||
#[arg(long)]
|
#[arg(long,group="api_key",required=true)]
|
||||||
cookie_type:CookieType,
|
api_key_literal:Option<String>,
|
||||||
#[arg(long)]
|
#[arg(long,group="api_key",required=true)]
|
||||||
cookie:String,
|
api_key_envvar:Option<String>,
|
||||||
|
#[arg(long,group="api_key",required=true)]
|
||||||
|
api_key_file:Option<PathBuf>,
|
||||||
#[arg(long)]
|
#[arg(long)]
|
||||||
model_name:String,
|
model_name:String,
|
||||||
#[arg(long)]
|
#[arg(long)]
|
||||||
@ -82,24 +101,37 @@ struct CreateSubcommand{
|
|||||||
#[arg(long)]
|
#[arg(long)]
|
||||||
input_file:PathBuf,
|
input_file:PathBuf,
|
||||||
#[arg(long)]
|
#[arg(long)]
|
||||||
group:Option<u64>,
|
creator_user_id:u64,
|
||||||
#[arg(long)]
|
#[arg(long)]
|
||||||
free_model:Option<bool>,
|
creator_group_id:Option<u64>,
|
||||||
#[arg(long)]
|
|
||||||
allow_comments:Option<bool>,
|
|
||||||
}
|
}
|
||||||
#[derive(Args)]
|
#[derive(Args)]
|
||||||
struct UploadSubcommand{
|
struct UpdateAssetSubcommand{
|
||||||
#[arg(long)]
|
#[arg(long)]
|
||||||
asset_id:AssetID,
|
asset_id:AssetID,
|
||||||
#[arg(long)]
|
#[arg(long,group="api_key",required=true)]
|
||||||
cookie_type:CookieType,
|
api_key_literal:Option<String>,
|
||||||
#[arg(long)]
|
#[arg(long,group="api_key",required=true)]
|
||||||
cookie:String,
|
api_key_envvar:Option<String>,
|
||||||
|
#[arg(long,group="api_key",required=true)]
|
||||||
|
api_key_file:Option<PathBuf>,
|
||||||
#[arg(long)]
|
#[arg(long)]
|
||||||
input_file:PathBuf,
|
input_file:PathBuf,
|
||||||
|
}
|
||||||
|
#[derive(Args)]
|
||||||
|
struct UpdatePlaceSubcommand{
|
||||||
#[arg(long)]
|
#[arg(long)]
|
||||||
group:Option<u64>,
|
place_id:u64,
|
||||||
|
#[arg(long)]
|
||||||
|
universe_id:u64,
|
||||||
|
#[arg(long,group="api_key",required=true)]
|
||||||
|
api_key_literal:Option<String>,
|
||||||
|
#[arg(long,group="api_key",required=true)]
|
||||||
|
api_key_envvar:Option<String>,
|
||||||
|
#[arg(long,group="api_key",required=true)]
|
||||||
|
api_key_file:Option<PathBuf>,
|
||||||
|
#[arg(long)]
|
||||||
|
input_file:PathBuf,
|
||||||
}
|
}
|
||||||
#[derive(Args)]
|
#[derive(Args)]
|
||||||
struct CompileSubcommand{
|
struct CompileSubcommand{
|
||||||
@ -113,6 +145,42 @@ struct CompileSubcommand{
|
|||||||
template:Option<PathBuf>,
|
template:Option<PathBuf>,
|
||||||
}
|
}
|
||||||
#[derive(Args)]
|
#[derive(Args)]
|
||||||
|
struct CompileUploadAssetSubcommand{
|
||||||
|
#[arg(long)]
|
||||||
|
asset_id:AssetID,
|
||||||
|
#[arg(long,group="api_key",required=true)]
|
||||||
|
api_key_literal:Option<String>,
|
||||||
|
#[arg(long,group="api_key",required=true)]
|
||||||
|
api_key_envvar:Option<String>,
|
||||||
|
#[arg(long,group="api_key",required=true)]
|
||||||
|
api_key_file:Option<PathBuf>,
|
||||||
|
#[arg(long)]
|
||||||
|
input_folder:Option<PathBuf>,
|
||||||
|
#[arg(long)]
|
||||||
|
style:Option<Style>,
|
||||||
|
#[arg(long)]
|
||||||
|
template:Option<PathBuf>,
|
||||||
|
}
|
||||||
|
#[derive(Args)]
|
||||||
|
struct CompileUploadPlaceSubcommand{
|
||||||
|
#[arg(long)]
|
||||||
|
place_id:u64,
|
||||||
|
#[arg(long)]
|
||||||
|
universe_id:u64,
|
||||||
|
#[arg(long,group="api_key",required=true)]
|
||||||
|
api_key_literal:Option<String>,
|
||||||
|
#[arg(long,group="api_key",required=true)]
|
||||||
|
api_key_envvar:Option<String>,
|
||||||
|
#[arg(long,group="api_key",required=true)]
|
||||||
|
api_key_file:Option<PathBuf>,
|
||||||
|
#[arg(long)]
|
||||||
|
input_folder:Option<PathBuf>,
|
||||||
|
#[arg(long)]
|
||||||
|
style:Option<Style>,
|
||||||
|
#[arg(long)]
|
||||||
|
template:Option<PathBuf>,
|
||||||
|
}
|
||||||
|
#[derive(Args)]
|
||||||
struct DecompileSubcommand{
|
struct DecompileSubcommand{
|
||||||
#[arg(long)]
|
#[arg(long)]
|
||||||
input_file:PathBuf,
|
input_file:PathBuf,
|
||||||
@ -128,6 +196,27 @@ struct DecompileSubcommand{
|
|||||||
write_scripts:Option<bool>,
|
write_scripts:Option<bool>,
|
||||||
}
|
}
|
||||||
#[derive(Args)]
|
#[derive(Args)]
|
||||||
|
struct DownloadDecompileSubcommand{
|
||||||
|
#[arg(long,group="cookie",required=true)]
|
||||||
|
cookie_literal:Option<String>,
|
||||||
|
#[arg(long,group="cookie",required=true)]
|
||||||
|
cookie_envvar:Option<String>,
|
||||||
|
#[arg(long,group="cookie",required=true)]
|
||||||
|
cookie_file:Option<PathBuf>,
|
||||||
|
#[arg(long)]
|
||||||
|
output_folder:Option<PathBuf>,
|
||||||
|
#[arg(long)]
|
||||||
|
asset_id:AssetID,
|
||||||
|
#[arg(long)]
|
||||||
|
style:Style,
|
||||||
|
#[arg(long)]
|
||||||
|
write_template:Option<bool>,
|
||||||
|
#[arg(long)]
|
||||||
|
write_models:Option<bool>,
|
||||||
|
#[arg(long)]
|
||||||
|
write_scripts:Option<bool>,
|
||||||
|
}
|
||||||
|
#[derive(Args)]
|
||||||
struct DecompileHistoryIntoGitSubcommand{
|
struct DecompileHistoryIntoGitSubcommand{
|
||||||
#[arg(long)]
|
#[arg(long)]
|
||||||
input_folder:PathBuf,
|
input_folder:PathBuf,
|
||||||
@ -150,10 +239,12 @@ struct DecompileHistoryIntoGitSubcommand{
|
|||||||
struct DownloadAndDecompileHistoryIntoGitSubcommand{
|
struct DownloadAndDecompileHistoryIntoGitSubcommand{
|
||||||
#[arg(long)]
|
#[arg(long)]
|
||||||
asset_id:AssetID,
|
asset_id:AssetID,
|
||||||
#[arg(long)]
|
#[arg(long,group="cookie",required=true)]
|
||||||
cookie_type:CookieType,
|
cookie_literal:Option<String>,
|
||||||
#[arg(long)]
|
#[arg(long,group="cookie",required=true)]
|
||||||
cookie:String,
|
cookie_envvar:Option<String>,
|
||||||
|
#[arg(long,group="cookie",required=true)]
|
||||||
|
cookie_file:Option<PathBuf>,
|
||||||
//currently output folder must be the current folder due to git2 limitations
|
//currently output folder must be the current folder due to git2 limitations
|
||||||
//output_folder:cli.output.unwrap(),
|
//output_folder:cli.output.unwrap(),
|
||||||
#[arg(long)]
|
#[arg(long)]
|
||||||
@ -170,15 +261,8 @@ struct DownloadAndDecompileHistoryIntoGitSubcommand{
|
|||||||
write_scripts:Option<bool>,
|
write_scripts:Option<bool>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone,clap::ValueEnum)]
|
|
||||||
enum CookieType{
|
|
||||||
Literal,
|
|
||||||
Environment,
|
|
||||||
File,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone,Copy,Debug,clap::ValueEnum)]
|
#[derive(Clone,Copy,Debug,clap::ValueEnum)]
|
||||||
pub enum Style{
|
enum Style{
|
||||||
Rox,
|
Rox,
|
||||||
Rojo,
|
Rojo,
|
||||||
RoxRojo,
|
RoxRojo,
|
||||||
@ -193,22 +277,42 @@ impl Style{
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn info(path:PathBuf)->AResult<()>{
|
||||||
|
let dir=std::env::current_dir().unwrap();
|
||||||
|
println!("pwd={:?}",dir);
|
||||||
|
println!("path={path:?}");
|
||||||
|
let mut read_dir=tokio::fs::read_dir(path).await?;
|
||||||
|
while let Some(entry)=read_dir.next_entry().await?{
|
||||||
|
println!("{:?}",entry);
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn main()->AResult<()>{
|
async fn main()->AResult<()>{
|
||||||
let cli=Cli::parse();
|
let cli=Cli::parse();
|
||||||
match cli.command{
|
match cli.command{
|
||||||
|
Commands::Info(subcommand)=>info(subcommand.path).await,
|
||||||
Commands::DownloadHistory(subcommand)=>download_history(DownloadHistoryConfig{
|
Commands::DownloadHistory(subcommand)=>download_history(DownloadHistoryConfig{
|
||||||
continue_from_versions:subcommand.continue_from_versions.unwrap_or(false),
|
continue_from_versions:subcommand.continue_from_versions.unwrap_or(false),
|
||||||
end_version:subcommand.end_version,
|
end_version:subcommand.end_version,
|
||||||
start_version:subcommand.start_version.unwrap_or(0),
|
start_version:subcommand.start_version.unwrap_or(0),
|
||||||
output_folder:subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
|
output_folder:subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
|
||||||
cookie:Cookie::from_type(subcommand.cookie_type,subcommand.cookie).await?.0,
|
cookie:cookie_from_args(
|
||||||
|
subcommand.cookie_literal,
|
||||||
|
subcommand.cookie_envvar,
|
||||||
|
subcommand.cookie_file,
|
||||||
|
).await?,
|
||||||
asset_id:subcommand.asset_id,
|
asset_id:subcommand.asset_id,
|
||||||
}).await,
|
}).await,
|
||||||
Commands::Download(subcommand)=>{
|
Commands::Download(subcommand)=>{
|
||||||
let output_folder=subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap());
|
let output_folder=subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap());
|
||||||
download_list(
|
download_list(
|
||||||
Cookie::from_type(subcommand.cookie_type,subcommand.cookie).await?.0,
|
api_key_from_args(
|
||||||
|
subcommand.api_key_literal,
|
||||||
|
subcommand.api_key_envvar,
|
||||||
|
subcommand.api_key_file,
|
||||||
|
).await?,
|
||||||
subcommand.asset_ids.into_iter().map(|asset_id|{
|
subcommand.asset_ids.into_iter().map(|asset_id|{
|
||||||
let mut path=output_folder.clone();
|
let mut path=output_folder.clone();
|
||||||
path.push(asset_id.to_string());
|
path.push(asset_id.to_string());
|
||||||
@ -216,33 +320,92 @@ async fn main()->AResult<()>{
|
|||||||
}).collect()
|
}).collect()
|
||||||
).await
|
).await
|
||||||
},
|
},
|
||||||
|
Commands::DownloadDecompile(subcommand)=>{
|
||||||
|
download_decompile(DownloadDecompileConfig{
|
||||||
|
cookie:cookie_from_args(
|
||||||
|
subcommand.cookie_literal,
|
||||||
|
subcommand.cookie_envvar,
|
||||||
|
subcommand.cookie_file,
|
||||||
|
).await?,
|
||||||
|
asset_id:subcommand.asset_id,
|
||||||
|
output_folder:subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
|
||||||
|
style:subcommand.style.rox(),
|
||||||
|
write_template:subcommand.write_template.unwrap_or(false),
|
||||||
|
write_models:subcommand.write_models.unwrap_or(false),
|
||||||
|
write_scripts:subcommand.write_scripts.unwrap_or(true),
|
||||||
|
}).await
|
||||||
|
},
|
||||||
Commands::DownloadGroupInventoryJson(subcommand)=>download_group_inventory_json(
|
Commands::DownloadGroupInventoryJson(subcommand)=>download_group_inventory_json(
|
||||||
Cookie::from_type(subcommand.cookie_type,subcommand.cookie).await?.0,
|
cookie_from_args(
|
||||||
|
subcommand.cookie_literal,
|
||||||
|
subcommand.cookie_envvar,
|
||||||
|
subcommand.cookie_file,
|
||||||
|
).await?,
|
||||||
subcommand.group,
|
subcommand.group,
|
||||||
subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
|
subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
|
||||||
).await,
|
).await,
|
||||||
Commands::Create(subcommand)=>create(CreateConfig{
|
Commands::CreateAsset(subcommand)=>create(CreateConfig{
|
||||||
cookie:Cookie::from_type(subcommand.cookie_type,subcommand.cookie).await?.0,
|
api_key:api_key_from_args(
|
||||||
group:subcommand.group,
|
subcommand.api_key_literal,
|
||||||
|
subcommand.api_key_envvar,
|
||||||
|
subcommand.api_key_file,
|
||||||
|
).await?,
|
||||||
|
creator_user_id:subcommand.creator_user_id,
|
||||||
|
creator_group_id:subcommand.creator_group_id,
|
||||||
input_file:subcommand.input_file,
|
input_file:subcommand.input_file,
|
||||||
model_name:subcommand.model_name,
|
model_name:subcommand.model_name,
|
||||||
description:subcommand.description.unwrap_or_else(||String::with_capacity(0)),
|
description:subcommand.description.unwrap_or_else(||String::with_capacity(0)),
|
||||||
free_model:subcommand.free_model.unwrap_or(false),
|
|
||||||
allow_comments:subcommand.allow_comments.unwrap_or(false),
|
|
||||||
}).await,
|
}).await,
|
||||||
Commands::Upload(subcommand)=>upload_list(
|
Commands::UploadAsset(subcommand)=>upload_asset(UploadAssetConfig{
|
||||||
Cookie::from_type(subcommand.cookie_type,subcommand.cookie).await?.0,
|
api_key:api_key_from_args(
|
||||||
subcommand.group,
|
subcommand.api_key_literal,
|
||||||
vec![(subcommand.asset_id,subcommand.input_file)]
|
subcommand.api_key_envvar,
|
||||||
).await,
|
subcommand.api_key_file,
|
||||||
|
).await?,
|
||||||
|
asset_id:subcommand.asset_id,
|
||||||
|
input_file:subcommand.input_file,
|
||||||
|
}).await,
|
||||||
|
Commands::UploadPlace(subcommand)=>upload_place(UploadPlaceConfig{
|
||||||
|
api_key:api_key_from_args(
|
||||||
|
subcommand.api_key_literal,
|
||||||
|
subcommand.api_key_envvar,
|
||||||
|
subcommand.api_key_file,
|
||||||
|
).await?,
|
||||||
|
place_id:subcommand.place_id,
|
||||||
|
universe_id:subcommand.universe_id,
|
||||||
|
input_file:subcommand.input_file,
|
||||||
|
}).await,
|
||||||
Commands::Compile(subcommand)=>compile(CompileConfig{
|
Commands::Compile(subcommand)=>compile(CompileConfig{
|
||||||
input_folder:subcommand.input_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
|
input_folder:subcommand.input_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
|
||||||
output_file:subcommand.output_file,
|
output_file:subcommand.output_file,
|
||||||
template:subcommand.template,
|
template:subcommand.template,
|
||||||
style:subcommand.style,
|
style:subcommand.style.map(|s|s.rox()),
|
||||||
|
}).await,
|
||||||
|
Commands::CompileUploadAsset(subcommand)=>compile_upload_asset(CompileUploadAssetConfig{
|
||||||
|
input_folder:subcommand.input_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
|
||||||
|
template:subcommand.template,
|
||||||
|
style:subcommand.style.map(|s|s.rox()),
|
||||||
|
api_key:api_key_from_args(
|
||||||
|
subcommand.api_key_literal,
|
||||||
|
subcommand.api_key_envvar,
|
||||||
|
subcommand.api_key_file,
|
||||||
|
).await?,
|
||||||
|
asset_id:subcommand.asset_id,
|
||||||
|
}).await,
|
||||||
|
Commands::CompileUploadPlace(subcommand)=>compile_upload_place(CompileUploadPlaceConfig{
|
||||||
|
input_folder:subcommand.input_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
|
||||||
|
template:subcommand.template,
|
||||||
|
style:subcommand.style.map(|s|s.rox()),
|
||||||
|
api_key:api_key_from_args(
|
||||||
|
subcommand.api_key_literal,
|
||||||
|
subcommand.api_key_envvar,
|
||||||
|
subcommand.api_key_file,
|
||||||
|
).await?,
|
||||||
|
place_id:subcommand.place_id,
|
||||||
|
universe_id:subcommand.universe_id,
|
||||||
}).await,
|
}).await,
|
||||||
Commands::Decompile(subcommand)=>decompile(DecompileConfig{
|
Commands::Decompile(subcommand)=>decompile(DecompileConfig{
|
||||||
style:subcommand.style,
|
style:subcommand.style.rox(),
|
||||||
input_file:subcommand.input_file,
|
input_file:subcommand.input_file,
|
||||||
output_folder:subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
|
output_folder:subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
|
||||||
write_template:subcommand.write_template.unwrap_or(false),
|
write_template:subcommand.write_template.unwrap_or(false),
|
||||||
@ -254,7 +417,7 @@ async fn main()->AResult<()>{
|
|||||||
git_committer_email:subcommand.git_committer_email,
|
git_committer_email:subcommand.git_committer_email,
|
||||||
input_folder:subcommand.input_folder,
|
input_folder:subcommand.input_folder,
|
||||||
output_folder:std::env::current_dir()?,
|
output_folder:std::env::current_dir()?,
|
||||||
style:subcommand.style,
|
style:subcommand.style.rox(),
|
||||||
write_template:subcommand.write_template.unwrap_or(false),
|
write_template:subcommand.write_template.unwrap_or(false),
|
||||||
write_models:subcommand.write_models.unwrap_or(false),
|
write_models:subcommand.write_models.unwrap_or(false),
|
||||||
write_scripts:subcommand.write_scripts.unwrap_or(true),
|
write_scripts:subcommand.write_scripts.unwrap_or(true),
|
||||||
@ -262,10 +425,14 @@ async fn main()->AResult<()>{
|
|||||||
Commands::DownloadAndDecompileHistoryIntoGit(subcommand)=>download_and_decompile_history_into_git(DownloadAndDecompileHistoryConfig{
|
Commands::DownloadAndDecompileHistoryIntoGit(subcommand)=>download_and_decompile_history_into_git(DownloadAndDecompileHistoryConfig{
|
||||||
git_committer_name:subcommand.git_committer_name,
|
git_committer_name:subcommand.git_committer_name,
|
||||||
git_committer_email:subcommand.git_committer_email,
|
git_committer_email:subcommand.git_committer_email,
|
||||||
cookie:Cookie::from_type(subcommand.cookie_type,subcommand.cookie).await?.0,
|
cookie:cookie_from_args(
|
||||||
|
subcommand.cookie_literal,
|
||||||
|
subcommand.cookie_envvar,
|
||||||
|
subcommand.cookie_file,
|
||||||
|
).await?,
|
||||||
asset_id:subcommand.asset_id,
|
asset_id:subcommand.asset_id,
|
||||||
output_folder:std::env::current_dir()?,
|
output_folder:std::env::current_dir()?,
|
||||||
style:subcommand.style,
|
style:subcommand.style.rox(),
|
||||||
write_template:subcommand.write_template.unwrap_or(false),
|
write_template:subcommand.write_template.unwrap_or(false),
|
||||||
write_models:subcommand.write_models.unwrap_or(false),
|
write_models:subcommand.write_models.unwrap_or(false),
|
||||||
write_scripts:subcommand.write_scripts.unwrap_or(true),
|
write_scripts:subcommand.write_scripts.unwrap_or(true),
|
||||||
@ -273,76 +440,91 @@ async fn main()->AResult<()>{
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
struct Cookie(String);
|
async fn cookie_from_args(literal:Option<String>,environment:Option<String>,file:Option<PathBuf>)->AResult<Cookie>{
|
||||||
impl Cookie{
|
let cookie=match (literal,environment,file){
|
||||||
async fn from_type(cookie_type:CookieType,cookie_string:String)->AResult<Self>{
|
(Some(cookie_literal),None,None)=>cookie_literal,
|
||||||
Ok(Self(format!(".ROBLOSECURITY={}",match cookie_type{
|
(None,Some(cookie_environment),None)=>std::env::var(cookie_environment)?,
|
||||||
CookieType::Literal=>cookie_string,
|
(None,None,Some(cookie_file))=>tokio::fs::read_to_string(cookie_file).await?,
|
||||||
CookieType::Environment=>std::env::var(cookie_string)?,
|
_=>Err(anyhow::Error::msg("Illegal api key argument triple"))?,
|
||||||
CookieType::File=>tokio::fs::read_to_string(cookie_string).await?,
|
};
|
||||||
})))
|
Ok(Cookie::new(format!(".ROBLOSECURITY={cookie}")))
|
||||||
}
|
}
|
||||||
|
async fn api_key_from_args(literal:Option<String>,environment:Option<String>,file:Option<PathBuf>)->AResult<ApiKey>{
|
||||||
|
let api_key=match (literal,environment,file){
|
||||||
|
(Some(api_key_literal),None,None)=>api_key_literal,
|
||||||
|
(None,Some(api_key_environment),None)=>std::env::var(api_key_environment)?,
|
||||||
|
(None,None,Some(api_key_file))=>tokio::fs::read_to_string(api_key_file).await?,
|
||||||
|
_=>Err(anyhow::Error::msg("Illegal api key argument triple"))?,
|
||||||
|
};
|
||||||
|
Ok(ApiKey::new(api_key))
|
||||||
}
|
}
|
||||||
|
|
||||||
struct CreateConfig{
|
struct CreateConfig{
|
||||||
cookie:String,
|
api_key:ApiKey,
|
||||||
model_name:String,
|
model_name:String,
|
||||||
description:String,
|
description:String,
|
||||||
input_file:PathBuf,
|
input_file:PathBuf,
|
||||||
group:Option<u64>,
|
creator_user_id:u64,
|
||||||
free_model:bool,
|
creator_group_id:Option<u64>,
|
||||||
allow_comments:bool,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
///This is hardcoded to create models atm
|
||||||
async fn create(config:CreateConfig)->AResult<()>{
|
async fn create(config:CreateConfig)->AResult<()>{
|
||||||
let resp=RobloxContext::new(config.cookie)
|
let resp=CloudContext::new(config.api_key)
|
||||||
.create(rbx_asset::context::CreateRequest{
|
.create_asset(rbx_asset::cloud::CreateAssetRequest{
|
||||||
name:config.model_name,
|
assetType:rbx_asset::cloud::AssetType::Model,
|
||||||
|
displayName:config.model_name,
|
||||||
description:config.description,
|
description:config.description,
|
||||||
ispublic:config.free_model,
|
creationContext:rbx_asset::cloud::CreationContext{
|
||||||
allowComments:config.allow_comments,
|
creator:rbx_asset::cloud::Creator{
|
||||||
groupId:config.group,
|
userId:config.creator_user_id,
|
||||||
|
groupId:config.creator_group_id.unwrap_or(0),
|
||||||
|
},
|
||||||
|
expectedPrice:0,
|
||||||
|
}
|
||||||
|
},tokio::fs::read(config.input_file).await?).await?;
|
||||||
|
println!("CreateResponse={:?}",resp);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
struct UploadAssetConfig{
|
||||||
|
api_key:ApiKey,
|
||||||
|
asset_id:u64,
|
||||||
|
input_file:PathBuf,
|
||||||
|
}
|
||||||
|
async fn upload_asset(config:UploadAssetConfig)->AResult<()>{
|
||||||
|
let context=CloudContext::new(config.api_key);
|
||||||
|
let resp=context.update_asset(rbx_asset::cloud::UpdateAssetRequest{
|
||||||
|
assetId:config.asset_id,
|
||||||
|
displayName:None,
|
||||||
|
description:None,
|
||||||
},tokio::fs::read(config.input_file).await?).await?;
|
},tokio::fs::read(config.input_file).await?).await?;
|
||||||
println!("UploadResponse={:?}",resp);
|
println!("UploadResponse={:?}",resp);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn upload_list(cookie:String,group:Option<u64>,asset_id_file_map:AssetIDFileMap)->AResult<()>{
|
struct UploadPlaceConfig{
|
||||||
let context=RobloxContext::new(cookie);
|
api_key:ApiKey,
|
||||||
//this is calling map on the vec because the closure produces an iterator of futures
|
place_id:u64,
|
||||||
futures::stream::iter(asset_id_file_map.into_iter()
|
universe_id:u64,
|
||||||
.map(|(asset_id,file)|{
|
input_file:PathBuf,
|
||||||
let context=&context;
|
}
|
||||||
async move{
|
async fn upload_place(config:UploadPlaceConfig)->AResult<()>{
|
||||||
Ok((asset_id,context.upload(rbx_asset::context::UploadRequest{
|
let context=CloudContext::new(config.api_key);
|
||||||
assetid:asset_id,
|
context.update_place(rbx_asset::cloud::UpdatePlaceRequest{
|
||||||
name:None,
|
placeId:config.place_id,
|
||||||
description:None,
|
universeId:config.universe_id,
|
||||||
ispublic:None,
|
},tokio::fs::read(config.input_file).await?).await?;
|
||||||
allowComments:None,
|
|
||||||
groupId:group,
|
|
||||||
},tokio::fs::read(file).await?).await?))
|
|
||||||
}
|
|
||||||
}))
|
|
||||||
.buffer_unordered(CONCURRENT_REQUESTS)
|
|
||||||
.for_each(|b:AResult<_>|async{
|
|
||||||
match b{
|
|
||||||
Ok((asset_id,body))=>{
|
|
||||||
println!("asset_id={} UploadResponse={:?}",asset_id,body);
|
|
||||||
},
|
|
||||||
Err(e)=>eprintln!("ul error: {}",e),
|
|
||||||
}
|
|
||||||
}).await;
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn download_list(cookie:String,asset_id_file_map:AssetIDFileMap)->AResult<()>{
|
async fn download_list(api_key:ApiKey,asset_id_file_map:AssetIDFileMap)->AResult<()>{
|
||||||
let context=RobloxContext::new(cookie);
|
let context=CloudContext::new(api_key);
|
||||||
futures::stream::iter(asset_id_file_map.into_iter()
|
futures::stream::iter(asset_id_file_map.into_iter()
|
||||||
.map(|(asset_id,file)|{
|
.map(|(asset_id,file)|{
|
||||||
let context=&context;
|
let context=&context;
|
||||||
async move{
|
async move{
|
||||||
Ok((file,context.download(rbx_asset::context::DownloadRequest{asset_id,version:None}).await?))
|
Ok((file,context.get_asset(rbx_asset::cloud::GetAssetRequest{asset_id,version:None}).await?))
|
||||||
}
|
}
|
||||||
}))
|
}))
|
||||||
.buffer_unordered(CONCURRENT_REQUESTS)
|
.buffer_unordered(CONCURRENT_REQUESTS)
|
||||||
@ -360,11 +542,11 @@ async fn download_list(cookie:String,asset_id_file_map:AssetIDFileMap)->AResult<
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn get_inventory_pages(context:&RobloxContext,group:u64)->AResult<Vec<InventoryItem>>{
|
async fn get_inventory_pages(context:&CookieContext,group:u64)->AResult<Vec<InventoryItem>>{
|
||||||
let mut cursor:Option<String>=None;
|
let mut cursor:Option<String>=None;
|
||||||
let mut asset_list=Vec::new();
|
let mut asset_list=Vec::new();
|
||||||
loop{
|
loop{
|
||||||
let mut page=context.inventory_page(rbx_asset::context::InventoryPageRequest{group,cursor}).await?;
|
let mut page=context.get_inventory_page(rbx_asset::cookie::InventoryPageRequest{group,cursor}).await?;
|
||||||
asset_list.append(&mut page.data);
|
asset_list.append(&mut page.data);
|
||||||
if page.nextPageCursor.is_none(){
|
if page.nextPageCursor.is_none(){
|
||||||
break;
|
break;
|
||||||
@ -374,8 +556,8 @@ async fn get_inventory_pages(context:&RobloxContext,group:u64)->AResult<Vec<Inve
|
|||||||
Ok(asset_list)
|
Ok(asset_list)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn download_group_inventory_json(cookie:String,group:u64,output_folder:PathBuf)->AResult<()>{
|
async fn download_group_inventory_json(cookie:Cookie,group:u64,output_folder:PathBuf)->AResult<()>{
|
||||||
let context=RobloxContext::new(cookie);
|
let context=CookieContext::new(cookie);
|
||||||
let item_list=get_inventory_pages(&context,group).await?;
|
let item_list=get_inventory_pages(&context,group).await?;
|
||||||
|
|
||||||
let mut path=output_folder.clone();
|
let mut path=output_folder.clone();
|
||||||
@ -385,11 +567,11 @@ async fn download_group_inventory_json(cookie:String,group:u64,output_folder:Pat
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn get_version_history(context:&RobloxContext,asset_id:AssetID)->AResult<Vec<AssetVersion>>{
|
async fn get_version_history(context:&CookieContext,asset_id:AssetID)->AResult<Vec<AssetVersion>>{
|
||||||
let mut cursor:Option<String>=None;
|
let mut cursor:Option<String>=None;
|
||||||
let mut asset_list=Vec::new();
|
let mut asset_list=Vec::new();
|
||||||
loop{
|
loop{
|
||||||
let mut page=context.history_page(rbx_asset::context::HistoryPageRequest{asset_id,cursor}).await?;
|
let mut page=context.get_asset_versions_page(rbx_asset::cookie::AssetVersionsPageRequest{asset_id,cursor}).await?;
|
||||||
asset_list.append(&mut page.data);
|
asset_list.append(&mut page.data);
|
||||||
if page.nextPageCursor.is_none(){
|
if page.nextPageCursor.is_none(){
|
||||||
break;
|
break;
|
||||||
@ -405,7 +587,7 @@ struct DownloadHistoryConfig{
|
|||||||
end_version:Option<u64>,
|
end_version:Option<u64>,
|
||||||
start_version:u64,
|
start_version:u64,
|
||||||
output_folder:PathBuf,
|
output_folder:PathBuf,
|
||||||
cookie:String,
|
cookie:Cookie,
|
||||||
asset_id:AssetID,
|
asset_id:AssetID,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -446,7 +628,7 @@ async fn download_history(mut config:DownloadHistoryConfig)->AResult<()>{
|
|||||||
None=>Err(anyhow::Error::msg("Cannot continue from versions.json - there are no previous versions"))?,
|
None=>Err(anyhow::Error::msg("Cannot continue from versions.json - there are no previous versions"))?,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let context=RobloxContext::new(config.cookie);
|
let context=CookieContext::new(config.cookie);
|
||||||
|
|
||||||
//limit concurrent downloads
|
//limit concurrent downloads
|
||||||
let mut join_set=tokio::task::JoinSet::new();
|
let mut join_set=tokio::task::JoinSet::new();
|
||||||
@ -454,7 +636,7 @@ async fn download_history(mut config:DownloadHistoryConfig)->AResult<()>{
|
|||||||
//poll paged list of all asset versions
|
//poll paged list of all asset versions
|
||||||
let mut cursor:Option<String>=None;
|
let mut cursor:Option<String>=None;
|
||||||
loop{
|
loop{
|
||||||
let mut page=context.history_page(rbx_asset::context::HistoryPageRequest{asset_id:config.asset_id,cursor}).await?;
|
let mut page=context.get_asset_versions_page(rbx_asset::cookie::AssetVersionsPageRequest{asset_id:config.asset_id,cursor}).await?;
|
||||||
let context=&context;
|
let context=&context;
|
||||||
let output_folder=config.output_folder.clone();
|
let output_folder=config.output_folder.clone();
|
||||||
let data=&page.data;
|
let data=&page.data;
|
||||||
@ -484,7 +666,7 @@ async fn download_history(mut config:DownloadHistoryConfig)->AResult<()>{
|
|||||||
let mut path=output_folder.clone();
|
let mut path=output_folder.clone();
|
||||||
path.push(format!("{}_v{}.rbxl",config.asset_id,version_number));
|
path.push(format!("{}_v{}.rbxl",config.asset_id,version_number));
|
||||||
join_set.spawn(async move{
|
join_set.spawn(async move{
|
||||||
let file=context.download(rbx_asset::context::DownloadRequest{asset_id:config.asset_id,version:Some(version_number)}).await?;
|
let file=context.get_asset(rbx_asset::cookie::GetAssetRequest{asset_id:config.asset_id,version:Some(version_number)}).await?;
|
||||||
|
|
||||||
tokio::fs::write(path,file).await?;
|
tokio::fs::write(path,file).await?;
|
||||||
|
|
||||||
@ -546,7 +728,7 @@ fn load_dom<R:Read>(input:R)->AResult<rbx_dom_weak::WeakDom>{
|
|||||||
|
|
||||||
|
|
||||||
struct DecompileConfig{
|
struct DecompileConfig{
|
||||||
style:Style,
|
style:rox_compiler::Style,
|
||||||
input_file:PathBuf,
|
input_file:PathBuf,
|
||||||
output_folder:PathBuf,
|
output_folder:PathBuf,
|
||||||
write_template:bool,
|
write_template:bool,
|
||||||
@ -568,7 +750,35 @@ async fn decompile(config:DecompileConfig)->AResult<()>{
|
|||||||
//generate folders, models, and scripts
|
//generate folders, models, and scripts
|
||||||
//delete models and scripts from dom
|
//delete models and scripts from dom
|
||||||
context.write_files(rox_compiler::WriteConfig{
|
context.write_files(rox_compiler::WriteConfig{
|
||||||
style:config.style.rox(),
|
style:config.style,
|
||||||
|
output_folder:config.output_folder,
|
||||||
|
write_template:config.write_template,
|
||||||
|
write_models:config.write_models,
|
||||||
|
write_scripts:config.write_scripts,
|
||||||
|
}).await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
struct DownloadDecompileConfig{
|
||||||
|
cookie:Cookie,
|
||||||
|
asset_id:AssetID,
|
||||||
|
style:rox_compiler::Style,
|
||||||
|
output_folder:PathBuf,
|
||||||
|
write_template:bool,
|
||||||
|
write_models:bool,
|
||||||
|
write_scripts:bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn download_decompile(config:DownloadDecompileConfig)->AResult<()>{
|
||||||
|
let context=CookieContext::new(config.cookie);
|
||||||
|
let file=context.get_asset(rbx_asset::cookie::GetAssetRequest{asset_id:config.asset_id,version:None}).await?;
|
||||||
|
|
||||||
|
let dom=load_dom(std::io::Cursor::new(file))?;
|
||||||
|
let context=rox_compiler::DecompiledContext::from_dom(dom);
|
||||||
|
|
||||||
|
context.write_files(rox_compiler::WriteConfig{
|
||||||
|
style:config.style,
|
||||||
output_folder:config.output_folder,
|
output_folder:config.output_folder,
|
||||||
write_template:config.write_template,
|
write_template:config.write_template,
|
||||||
write_models:config.write_models,
|
write_models:config.write_models,
|
||||||
@ -582,7 +792,7 @@ struct WriteCommitConfig{
|
|||||||
git_committer_name:String,
|
git_committer_name:String,
|
||||||
git_committer_email:String,
|
git_committer_email:String,
|
||||||
output_folder:PathBuf,
|
output_folder:PathBuf,
|
||||||
style:Style,
|
style:rox_compiler::Style,
|
||||||
write_template:bool,
|
write_template:bool,
|
||||||
write_models:bool,
|
write_models:bool,
|
||||||
write_scripts:bool,
|
write_scripts:bool,
|
||||||
@ -612,7 +822,7 @@ async fn write_commit(config:WriteCommitConfig,b:Result<AResult<(AssetVersion,ro
|
|||||||
|
|
||||||
//write files
|
//write files
|
||||||
context.write_files(rox_compiler::WriteConfig{
|
context.write_files(rox_compiler::WriteConfig{
|
||||||
style:config.style.rox(),
|
style:config.style,
|
||||||
output_folder:config.output_folder.clone(),
|
output_folder:config.output_folder.clone(),
|
||||||
write_template:config.write_template,
|
write_template:config.write_template,
|
||||||
write_models:config.write_models,
|
write_models:config.write_models,
|
||||||
@ -673,7 +883,7 @@ struct DecompileHistoryConfig{
|
|||||||
git_committer_name:String,
|
git_committer_name:String,
|
||||||
git_committer_email:String,
|
git_committer_email:String,
|
||||||
input_folder:PathBuf,
|
input_folder:PathBuf,
|
||||||
style:Style,
|
style:rox_compiler::Style,
|
||||||
output_folder:PathBuf,
|
output_folder:PathBuf,
|
||||||
write_template:bool,
|
write_template:bool,
|
||||||
write_models:bool,
|
write_models:bool,
|
||||||
@ -719,11 +929,11 @@ async fn decompile_history_into_git(config:DecompileHistoryConfig)->AResult<()>{
|
|||||||
}
|
}
|
||||||
|
|
||||||
struct DownloadAndDecompileHistoryConfig{
|
struct DownloadAndDecompileHistoryConfig{
|
||||||
cookie:String,
|
cookie:Cookie,
|
||||||
asset_id:AssetID,
|
asset_id:AssetID,
|
||||||
git_committer_name:String,
|
git_committer_name:String,
|
||||||
git_committer_email:String,
|
git_committer_email:String,
|
||||||
style:Style,
|
style:rox_compiler::Style,
|
||||||
output_folder:PathBuf,
|
output_folder:PathBuf,
|
||||||
write_template:bool,
|
write_template:bool,
|
||||||
write_models:bool,
|
write_models:bool,
|
||||||
@ -731,7 +941,7 @@ struct DownloadAndDecompileHistoryConfig{
|
|||||||
}
|
}
|
||||||
|
|
||||||
async fn download_and_decompile_history_into_git(config:DownloadAndDecompileHistoryConfig)->AResult<()>{
|
async fn download_and_decompile_history_into_git(config:DownloadAndDecompileHistoryConfig)->AResult<()>{
|
||||||
let context=RobloxContext::new(config.cookie);
|
let context=CookieContext::new(config.cookie);
|
||||||
|
|
||||||
//poll paged list of all asset versions
|
//poll paged list of all asset versions
|
||||||
let asset_list=get_version_history(&context,config.asset_id).await?;
|
let asset_list=get_version_history(&context,config.asset_id).await?;
|
||||||
@ -744,7 +954,7 @@ async fn download_and_decompile_history_into_git(config:DownloadAndDecompileHist
|
|||||||
.map(|asset_version|{
|
.map(|asset_version|{
|
||||||
let context=context.clone();
|
let context=context.clone();
|
||||||
tokio::task::spawn(async move{
|
tokio::task::spawn(async move{
|
||||||
let file=context.download(rbx_asset::context::DownloadRequest{asset_id,version:Some(asset_version.assetVersionNumber)}).await?;
|
let file=context.get_asset(rbx_asset::cookie::GetAssetRequest{asset_id,version:Some(asset_version.assetVersionNumber)}).await?;
|
||||||
let dom=load_dom(std::io::Cursor::new(file))?;
|
let dom=load_dom(std::io::Cursor::new(file))?;
|
||||||
Ok::<_,anyhow::Error>((asset_version,rox_compiler::DecompiledContext::from_dom(dom)))
|
Ok::<_,anyhow::Error>((asset_version,rox_compiler::DecompiledContext::from_dom(dom)))
|
||||||
})
|
})
|
||||||
@ -771,7 +981,7 @@ struct CompileConfig{
|
|||||||
input_folder:PathBuf,
|
input_folder:PathBuf,
|
||||||
output_file:PathBuf,
|
output_file:PathBuf,
|
||||||
template:Option<PathBuf>,
|
template:Option<PathBuf>,
|
||||||
style:Option<Style>,
|
style:Option<rox_compiler::Style>,
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn compile(config:CompileConfig)->AResult<()>{
|
async fn compile(config:CompileConfig)->AResult<()>{
|
||||||
@ -780,14 +990,12 @@ async fn compile(config:CompileConfig)->AResult<()>{
|
|||||||
let mut dom=match config.template{
|
let mut dom=match config.template{
|
||||||
//mr dom doesn't like tokio files
|
//mr dom doesn't like tokio files
|
||||||
Some(template_path)=>load_dom(std::io::BufReader::new(std::fs::File::open(template_path)?))?,
|
Some(template_path)=>load_dom(std::io::BufReader::new(std::fs::File::open(template_path)?))?,
|
||||||
None=>rbx_dom_weak::WeakDom::default(),
|
None=>rbx_dom_weak::WeakDom::new(rbx_dom_weak::InstanceBuilder::new("DataModel")),
|
||||||
};
|
};
|
||||||
//hack to traverse root folder as the root object
|
|
||||||
dom.root_mut().name="src".to_owned();
|
|
||||||
|
|
||||||
rox_compiler::compile(rox_compiler::CompileConfig{
|
rox_compiler::compile(rox_compiler::CompileConfig{
|
||||||
input_folder:config.input_folder,
|
input_folder:config.input_folder,
|
||||||
style:config.style.map(|s|s.rox()),
|
style:config.style,
|
||||||
},&mut dom).await?;
|
},&mut dom).await?;
|
||||||
|
|
||||||
let mut output_place=config.output_file.clone();
|
let mut output_place=config.output_file.clone();
|
||||||
@ -799,3 +1007,70 @@ async fn compile(config:CompileConfig)->AResult<()>{
|
|||||||
rbx_binary::to_writer(output,&dom,dom.root().children())?;
|
rbx_binary::to_writer(output,&dom,dom.root().children())?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
struct CompileUploadAssetConfig{
|
||||||
|
input_folder:PathBuf,
|
||||||
|
template:Option<PathBuf>,
|
||||||
|
style:Option<rox_compiler::Style>,
|
||||||
|
api_key:ApiKey,
|
||||||
|
asset_id:AssetID,
|
||||||
|
}
|
||||||
|
async fn compile_upload_asset(config:CompileUploadAssetConfig)->AResult<()>{
|
||||||
|
let mut dom=match config.template{
|
||||||
|
//mr dom doesn't like tokio files
|
||||||
|
Some(template_path)=>load_dom(std::io::BufReader::new(std::fs::File::open(template_path)?))?,
|
||||||
|
None=>rbx_dom_weak::WeakDom::new(rbx_dom_weak::InstanceBuilder::new("DataModel")),
|
||||||
|
};
|
||||||
|
|
||||||
|
rox_compiler::compile(rox_compiler::CompileConfig{
|
||||||
|
input_folder:config.input_folder,
|
||||||
|
style:config.style,
|
||||||
|
},&mut dom).await?;
|
||||||
|
|
||||||
|
//make a binary file in a buffer in memory
|
||||||
|
let mut data=Vec::new();
|
||||||
|
rbx_binary::to_writer(std::io::Cursor::new(&mut data),&dom,dom.root().children())?;
|
||||||
|
|
||||||
|
//upload it
|
||||||
|
let context=CloudContext::new(config.api_key);
|
||||||
|
let resp=context.update_asset(rbx_asset::cloud::UpdateAssetRequest{
|
||||||
|
assetId:config.asset_id,
|
||||||
|
displayName:None,
|
||||||
|
description:None,
|
||||||
|
},data).await?;
|
||||||
|
println!("UploadResponse={:?}",resp);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
struct CompileUploadPlaceConfig{
|
||||||
|
input_folder:PathBuf,
|
||||||
|
template:Option<PathBuf>,
|
||||||
|
style:Option<rox_compiler::Style>,
|
||||||
|
api_key:ApiKey,
|
||||||
|
place_id:u64,
|
||||||
|
universe_id:u64,
|
||||||
|
}
|
||||||
|
async fn compile_upload_place(config:CompileUploadPlaceConfig)->AResult<()>{
|
||||||
|
let mut dom=match config.template{
|
||||||
|
//mr dom doesn't like tokio files
|
||||||
|
Some(template_path)=>load_dom(std::io::BufReader::new(std::fs::File::open(template_path)?))?,
|
||||||
|
None=>rbx_dom_weak::WeakDom::new(rbx_dom_weak::InstanceBuilder::new("DataModel")),
|
||||||
|
};
|
||||||
|
|
||||||
|
rox_compiler::compile(rox_compiler::CompileConfig{
|
||||||
|
input_folder:config.input_folder,
|
||||||
|
style:config.style,
|
||||||
|
},&mut dom).await?;
|
||||||
|
|
||||||
|
//make a binary file in a buffer in memory
|
||||||
|
let mut data=Vec::new();
|
||||||
|
rbx_binary::to_writer(std::io::Cursor::new(&mut data),&dom,dom.root().children())?;
|
||||||
|
|
||||||
|
//upload it
|
||||||
|
let context=CloudContext::new(config.api_key);
|
||||||
|
context.update_place(rbx_asset::cloud::UpdatePlaceRequest{
|
||||||
|
universeId:config.universe_id,
|
||||||
|
placeId:config.place_id,
|
||||||
|
},data).await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
Reference in New Issue
Block a user