41 Commits

Author SHA1 Message Date
173ad65c5c Merge pull request 'Fix regex' (#27) from staging into master
All checks were successful
continuous-integration/drone/push Build is passing
Reviewed-on: #27
2025-12-20 23:56:07 +00:00
e10cfcfcb5 rox_compiler: fix regex
All checks were successful
continuous-integration/drone/push Build is passing
2025-12-20 15:54:07 -08:00
9080c20227 Merge pull request 'v0.5.1 - Faster Serialize' (#26) from staging into master
All checks were successful
continuous-integration/drone/push Build is passing
Reviewed-on: #26
2025-12-13 22:36:44 +00:00
3ec2b659f3 v0.5.1
All checks were successful
continuous-integration/drone/push Build is passing
2025-12-13 14:35:32 -08:00
f2db341277 Merge pull request 'Update deps + drop lazy_regex' (#25) from staging into master
All checks were successful
continuous-integration/drone/push Build is passing
Reviewed-on: #25
2025-12-13 22:33:19 +00:00
5a95ccc633 remove unused lints
All checks were successful
continuous-integration/drone/push Build is passing
2025-12-09 14:32:12 -08:00
68a0c7113e use expect instead of allow 2025-12-09 14:31:58 -08:00
2088256e29 update deps
All checks were successful
continuous-integration/drone/push Build is passing
2025-11-27 15:30:53 -08:00
d50e86f809 update deps
All checks were successful
continuous-integration/drone/push Build is passing
2025-11-09 06:06:00 -08:00
b37b08d564 drop lazy_regex dep 2025-11-09 06:04:36 -08:00
316200ead4 Merge pull request 'fix deploy' (#24) from staging into master
All checks were successful
continuous-integration/drone/push Build is passing
Reviewed-on: #24
2025-08-28 11:06:55 +00:00
d66f786aca fix deploy
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-28 04:06:18 -07:00
4d36c3b9b8 Merge pull request 'put git behind feature flag to prevent openssl woes' (#23) from staging into master
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
Reviewed-on: #23
2025-08-26 04:03:59 +00:00
e05c1ddabf put git behind feature flag to prevent openssl woes
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-25 21:03:38 -07:00
d8a65d9d91 Merge pull request 'asset-tool: use rustls' (#22) from staging into master
Some checks failed
continuous-integration/drone/push Build is failing
Reviewed-on: #22
2025-08-26 03:59:29 +00:00
a2b4980bf3 asset-tool: use rustls
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-25 20:59:05 -07:00
395eee9a19 Merge pull request 'fix deploy' (#21) from staging into master
Some checks failed
continuous-integration/drone/push Build is failing
Reviewed-on: #21
2025-08-26 03:56:43 +00:00
ad3c446b43 fix deploy
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-25 20:54:56 -07:00
cec7307acc Merge pull request 'fix deploy' (#20) from staging into master
Some checks failed
continuous-integration/drone/push Build is failing
Reviewed-on: #20
2025-08-26 03:53:26 +00:00
6cc9ded572 fix deploy
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-25 20:53:04 -07:00
7800a70b80 Merge pull request 'Deploy Updates' (#19) from staging into master
Some checks failed
continuous-integration/drone/push Build is failing
Reviewed-on: #19
2025-08-26 03:44:23 +00:00
2faddb741f asset-tool v0.5.0 new commands + minor breaking changes
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-25 20:43:10 -07:00
f59a2e0b6a rbx_asset v0.5.0 minor braking changes 2025-08-25 20:41:29 -07:00
e5e75ef3cb DownloadCreationsHistory
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-25 19:47:16 -07:00
fdf0c14309 detect resume files correctly in download_creations_pages_from_checkpoint 2025-08-25 19:47:16 -07:00
8885eb744b propagate error in download_creations_pages_from_checkpoint 2025-08-25 19:47:14 -07:00
c0ded31a6a delete cursor when completed 2025-08-25 19:47:12 -07:00
d9bf50e1c4 fix get_creations_pages 2025-08-25 19:47:10 -07:00
10a50948fc Merge pull request 'asset tool v0.4.12 update rbx-dom' (#16) from staging into master
All checks were successful
continuous-integration/drone/push Build is passing
Reviewed-on: #16
2025-05-01 00:26:28 +00:00
2987a6b321 Merge pull request 'v0.4.4 api tweaks' (#15) from staging into master
All checks were successful
continuous-integration/drone/push Build is passing
Reviewed-on: #15
2025-04-11 01:29:34 +00:00
ad8e1865f3 Merge pull request 'rbx_asset: cloud: implement new asset-delivery-api' (#14) from staging into master
All checks were successful
continuous-integration/drone/push Build is passing
Reviewed-on: #14
2025-04-06 22:23:12 +00:00
27deef3dd6 Merge pull request 'UploadResponse.AssetVersion + add continue to DownloadUserInventoryJson' (#13) from staging into master
All checks were successful
continuous-integration/drone/push Build is passing
Reviewed-on: #13
2025-04-04 20:56:56 +00:00
89a478eaac Merge pull request 'asset-tool: add AssetDetails + DownloadVersionV2' (#11) from staging into master
All checks were successful
continuous-integration/drone/push Build is passing
Reviewed-on: #11
2025-04-03 22:54:41 +00:00
d6adc1da45 Merge pull request 'v0.4.9 roblox error in body' (#10) from staging into master
All checks were successful
continuous-integration/drone/push Build is passing
Reviewed-on: #10
2025-02-11 21:54:11 +00:00
e89edf287f Merge pull request 'v0.4.8 better errors' (#9) from staging into master
All checks were successful
continuous-integration/drone/push Build is passing
Reviewed-on: #9
2025-01-17 10:59:21 +00:00
4ffeaa5784 Merge pull request 'v0.4.7 user inventory + git fix' (#8) from staging into master
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
Reviewed-on: #8
2024-10-01 20:10:30 +00:00
607f964928 Merge pull request 'add documentation, update dependencies' (#6) from staging into master
All checks were successful
continuous-integration/drone/push Build is passing
Reviewed-on: #6
2024-09-17 04:44:30 +00:00
8dc7c96f2d Merge pull request 'Create multiple assets concurrently' (#5) from staging into master
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
Reviewed-on: #5
2024-08-17 18:00:27 +00:00
68d751f81f Merge pull request 'use old api for download, error on http status' (#4) from staging into master
All checks were successful
continuous-integration/drone/push Build is passing
Reviewed-on: #4
2024-07-16 18:21:06 +00:00
c2052be036 Merge pull request 'use old api for compile-upload-asset' (#3) from staging into master
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
Reviewed-on: #3
2024-07-10 17:18:05 +00:00
2e9485dea6 Merge pull request 'add old asset upload api' (#2) from staging into master
All checks were successful
continuous-integration/drone/push Build is passing
Reviewed-on: #2
2024-07-10 16:45:17 +00:00
11 changed files with 760 additions and 571 deletions

View File

@@ -7,6 +7,17 @@ platform:
arch: amd64
steps:
- name: build
image: clux/muslrust:1.89.0-stable
commands:
- cargo build --release --target x86_64-unknown-linux-musl
when:
branch:
- master
event:
- push
- pull_request
- name: image
image: plugins/docker
settings:
@@ -19,6 +30,15 @@ steps:
password:
from_secret: GIT_PASS
dockerfile: Containerfile
depends_on:
- build
when:
branch:
- master
- master
event:
- push
---
kind: signature
hmac: 52507904dfaada892c05a61422dc5e147c1438419ed841d0f1e3e3ec2b193540
...

964
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,7 @@
workspace = { members = ["rbx_asset", "rox_compiler"] }
[package]
name = "asset-tool"
version = "0.4.12"
version = "0.5.1"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@@ -10,17 +10,20 @@ edition = "2021"
anyhow = "1.0.75"
clap = { version = "4.4.2", features = ["derive"] }
futures = "0.3.30"
git2 = "0.20.0"
lazy-regex = "3.1.0"
rbx_asset = { path = "rbx_asset" }
rbx_binary = "1.0.0"
rbx_dom_weak = "3.0.0"
rbx_reflection_database = "1.0.3"
rbx_xml = "1.0.0"
git2 = { version = "0.20.0", optional = true }
rbx_asset = { path = "rbx_asset", features = ["gzip", "rustls-tls"], default-features = false }
rbx_binary = "2.0.0"
rbx_dom_weak = "4.0.0"
rbx_reflection_database = "2.0.1"
rbx_xml = "2.0.0"
rox_compiler = { path = "rox_compiler" }
serde_json = "1.0.111"
tokio = { version = "1.35.1", features = ["macros", "rt-multi-thread", "fs"] }
[features]
default = []
git = ["dep:git2"]
[profile.release]
#lto = true
strip = true

View File

@@ -1,23 +1,3 @@
# Using the `rust-musl-builder` as base image, instead of
# the official Rust toolchain
FROM docker.io/clux/muslrust:stable AS chef
USER root
RUN cargo install cargo-chef
WORKDIR /app
FROM chef AS planner
COPY . .
RUN cargo chef prepare --recipe-path recipe.json
FROM chef AS builder
COPY --from=planner /app/recipe.json recipe.json
# Notice that we are specifying the --target flag!
RUN cargo chef cook --release --target x86_64-unknown-linux-musl --recipe-path recipe.json
COPY . .
RUN cargo build --release --target x86_64-unknown-linux-musl --bin asset-tool
FROM docker.io/alpine:latest AS runtime
RUN addgroup -S myuser && adduser -S myuser -G myuser
COPY --from=builder /app/target/x86_64-unknown-linux-musl/release/asset-tool /usr/local/bin/
USER myuser
ENTRYPOINT ["/usr/local/bin/asset-tool"]
FROM alpine:3.22 AS runtime
COPY /target/x86_64-unknown-linux-musl/release/asset-tool /usr/local/bin/
ENTRYPOINT ["/usr/local/bin/asset-tool"]

View File

@@ -1,6 +1,6 @@
[package]
name = "rbx_asset"
version = "0.4.10"
version = "0.5.0"
edition = "2021"
publish = ["strafesnet"]
repository = "https://git.itzana.me/StrafesNET/asset-tool"

View File

@@ -3,14 +3,13 @@ use crate::util::{serialize_u64,deserialize_u64,response_ok};
use crate::types::{ResponseError,MaybeGzippedBytes};
#[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
pub enum AssetType{
Audio,
Decal,
Model,
}
#[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
#[expect(nonstandard_style)]
pub struct CreateAssetRequest{
pub assetType:AssetType,
pub creationContext:CreationContext,
@@ -56,7 +55,7 @@ impl std::fmt::Display for CreateError{
impl std::error::Error for CreateError{}
#[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
#[expect(nonstandard_style)]
pub struct UpdateAssetRequest{
pub assetId:u64,
pub displayName:Option<String>,
@@ -65,42 +64,41 @@ pub struct UpdateAssetRequest{
//woo nested roblox stuff
#[derive(Clone,Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
#[expect(nonstandard_style)]
pub enum Creator{
userId(#[serde(deserialize_with="deserialize_u64",serialize_with="serialize_u64")]u64),
groupId(#[serde(deserialize_with="deserialize_u64",serialize_with="serialize_u64")]u64),
}
#[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
#[expect(nonstandard_style)]
pub struct CreationContext{
pub creator:Creator,
pub expectedPrice:Option<u64>,
}
#[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
pub enum ModerationState{
Reviewing,
Rejected,
Approved,
}
#[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
#[expect(nonstandard_style)]
pub struct ModerationResult{
pub moderationState:ModerationState,
}
#[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
#[expect(nonstandard_style)]
pub struct Preview{
pub asset:String,
pub altText:String,
}
#[allow(nonstandard_style,dead_code)]
#[expect(nonstandard_style)]
pub struct UpdatePlaceRequest{
pub universeId:u64,
pub placeId:u64,
}
#[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
#[expect(nonstandard_style)]
pub struct UpdatePlaceResponse{
pub versionNumber:u64,
}
@@ -146,7 +144,7 @@ pub struct GetAssetLatestRequest{
}
*/
#[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
#[expect(nonstandard_style)]
pub struct AssetResponse{
//u64 wrapped in quotes wohoo!!
#[serde(deserialize_with="deserialize_u64")]
@@ -166,7 +164,6 @@ pub struct AssetResponse{
#[serde(default)]
pub previews:Vec<Preview>,
}
#[allow(nonstandard_style,dead_code)]
pub struct GetAssetVersionRequest{
pub asset_id:u64,
pub version:u64,
@@ -197,13 +194,13 @@ impl AssetLocation{
}
#[derive(Debug,serde::Deserialize)]
#[allow(nonstandard_style,dead_code)]
#[expect(nonstandard_style)]
pub struct AssetMetadata{
pub metadataType:u32,
pub value:String,
}
#[derive(Debug,serde::Deserialize)]
#[allow(nonstandard_style,dead_code)]
#[expect(nonstandard_style)]
pub struct AssetLocationInfo{
pub location:Option<AssetLocation>,
pub requestId:String,
@@ -219,7 +216,7 @@ pub struct AssetVersionsRequest{
pub cursor:Option<String>,
}
#[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
#[expect(nonstandard_style)]
pub struct AssetVersion{
pub Id:u64,
pub assetId:u64,
@@ -231,7 +228,7 @@ pub struct AssetVersion{
pub isPublished:bool,
}
#[derive(Debug,serde::Deserialize)]
#[allow(nonstandard_style,dead_code)]
#[expect(nonstandard_style)]
pub struct AssetVersionsResponse{
pub previousPageCursor:Option<String>,
pub nextPageCursor:Option<String>,
@@ -255,13 +252,12 @@ pub struct InventoryPageRequest{
pub cursor:Option<String>,
}
#[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
pub struct InventoryItem{
pub id:u64,
pub name:String,
}
#[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
#[expect(nonstandard_style)]
pub struct InventoryPageResponse{
pub totalResults:u64,//up to 50
pub filteredKeyword:Option<String>,//""
@@ -299,7 +295,7 @@ impl std::fmt::Display for OperationError{
}
impl std::error::Error for OperationError{}
#[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
#[expect(nonstandard_style)]
struct RobloxOperation{
pub path:Option<String>,
pub metadata:Option<String>,

View File

@@ -15,7 +15,7 @@ impl std::fmt::Display for PostError{
impl std::error::Error for PostError{}
#[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
#[expect(nonstandard_style)]
pub struct CreateRequest{
pub name:String,
pub description:String,
@@ -43,7 +43,7 @@ impl std::fmt::Display for CreateError{
}
impl std::error::Error for CreateError{}
#[allow(nonstandard_style,dead_code)]
#[expect(nonstandard_style)]
pub struct UploadRequest{
pub assetid:u64,
pub name:Option<String>,
@@ -73,17 +73,15 @@ impl std::fmt::Display for UploadError{
}
impl std::error::Error for UploadError{}
#[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
#[expect(nonstandard_style)]
pub struct UploadResponse{
pub AssetId:u64,
pub AssetVersion:u64,
}
#[allow(nonstandard_style,dead_code)]
pub struct GetAssetDetailsRequest{
pub asset_id:u64,
}
#[allow(nonstandard_style,dead_code)]
pub struct GetAssetRequest{
pub asset_id:u64,
pub version:Option<u64>,
@@ -118,13 +116,13 @@ impl std::fmt::Display for GetAssetV2Error{
impl std::error::Error for GetAssetV2Error{}
#[derive(serde::Deserialize)]
#[allow(nonstandard_style,dead_code)]
#[expect(nonstandard_style)]
pub struct GetAssetV2AssetMetadata{
pub metadataType:u32,
pub value:String,
}
#[derive(serde::Deserialize)]
#[allow(nonstandard_style,dead_code)]
#[expect(nonstandard_style)]
pub struct GetAssetV2Location{
pub assetFormat:String,// "source"
location:String,// this value is private so users cannot mutate it
@@ -137,7 +135,7 @@ impl GetAssetV2Location{
}
}
#[derive(serde::Deserialize)]
#[allow(nonstandard_style,dead_code)]
#[expect(nonstandard_style)]
pub struct GetAssetV2Info{
pub locations:Vec<GetAssetV2Location>,
pub requestId:String,
@@ -162,7 +160,7 @@ pub enum CreatorType{
#[derive(Debug)]
#[derive(serde::Deserialize)]
#[allow(nonstandard_style,dead_code)]
#[expect(nonstandard_style)]
pub struct Creator{
pub Id:u64,
pub Name:String,
@@ -173,7 +171,7 @@ pub struct Creator{
#[derive(Debug)]
#[derive(serde::Deserialize)]
#[allow(nonstandard_style,dead_code)]
#[expect(nonstandard_style)]
pub struct AssetDetails{
pub TargetId:u64,
pub ProductType:Option<String>,
@@ -209,7 +207,7 @@ pub struct AssetVersionsPageRequest{
pub cursor:Option<String>,
}
#[derive(serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
#[expect(nonstandard_style)]
pub struct AssetVersion{
pub Id:u64,
pub assetId:u64,
@@ -221,7 +219,7 @@ pub struct AssetVersion{
pub isPublished:bool,
}
#[derive(serde::Deserialize)]
#[allow(nonstandard_style,dead_code)]
#[expect(nonstandard_style)]
pub struct AssetVersionsPageResponse{
pub previousPageCursor:Option<String>,
pub nextPageCursor:Option<String>,
@@ -257,13 +255,12 @@ pub struct CreationsPageRequest{
pub cursor:Option<String>,
}
#[derive(serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
pub struct CreationsItem{
pub id:u64,
pub name:String,
}
#[derive(serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
#[expect(nonstandard_style)]
pub struct CreationsPageResponse{
pub totalResults:u64,//up to 50
pub filteredKeyword:Option<String>,//""
@@ -282,14 +279,14 @@ pub struct UserInventoryPageRequest{
}
#[derive(serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
#[expect(nonstandard_style)]
pub struct UserInventoryItemOwner{
pub userId:u64,
pub username:String,
pub buildersClubMembershipType:String,
}
#[derive(serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
#[expect(nonstandard_style)]
pub struct UserInventoryItem{
pub userAssetId:u64,
pub assetId:u64,
@@ -301,7 +298,7 @@ pub struct UserInventoryItem{
pub updated:chrono::DateTime<chrono::Utc>,
}
#[derive(serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
#[expect(nonstandard_style)]
pub struct UserInventoryPageResponse{
pub previousPageCursor:Option<String>,
pub nextPageCursor:Option<String>,
@@ -324,13 +321,13 @@ impl std::fmt::Display for SetAssetsPermissionsError{
impl std::error::Error for SetAssetsPermissionsError{}
#[derive(serde::Serialize)]
#[allow(nonstandard_style)]
#[expect(nonstandard_style)]
struct AssetPermissions{
assetId:u64,
grantToDependencies:bool,//true
}
#[derive(serde::Serialize)]
#[allow(nonstandard_style)]
#[expect(nonstandard_style)]
struct SetAssetsPermissions<'a>{
subjectType:&'a str,// "Universe"
subjectId:&'a str,// "4422715291"

View File

@@ -10,8 +10,8 @@ authors = ["Rhys Lloyd <krakow20@gmail.com>"]
[dependencies]
futures = "0.3.30"
lazy-regex = "3.1.0"
regex = { version = "1.11.3", default-features = false, features = ["unicode-perl"] }
rayon = "1.8.0"
rbx_dom_weak = "3.0.0"
rbx_xml = "1.0.0"
rbx_dom_weak = "4.0.0"
rbx_xml = "2.0.0"
tokio = { version = "1.35.1", features = ["fs"] }

View File

@@ -28,6 +28,16 @@ impl std::fmt::Display for PropertiesOverride{
}
}
pub(crate) fn sanitize(s:&str)->std::borrow::Cow<'_,str>{
lazy_regex::regex!(r"[^A-Za-z0-9.-]").replace_all(s,"_")
#[macro_export]
macro_rules! lazy_regex{
($r:literal)=>{{
use regex::Regex;
use std::sync::LazyLock;
static RE:LazyLock<Regex>=LazyLock::new(||Regex::new($r).unwrap());
&RE
}};
}
pub(crate) fn sanitize(s:&str)->std::borrow::Cow<'_,str>{
lazy_regex!(r"[^A-Za-z0-9.-]").replace_all(s,"_")
}

View File

@@ -2,6 +2,7 @@ use std::path::{Path,PathBuf};
use futures::{StreamExt, TryStreamExt};
use tokio::io::AsyncReadExt;
use crate::lazy_regex;
use crate::common::{sanitize,Style,PropertiesOverride};
//holy smokes what am I doing lmao
@@ -202,7 +203,7 @@ impl ScriptWithOverrides{
let mut count=0;
for line in source.lines(){
//only string type properties are supported atm
if let Some(captures)=lazy_regex::regex!(r#"^\-\-\s*Properties\.([A-Za-z]\w*)\s*\=\s*"(\w+)"$"#)
if let Some(captures)=lazy_regex!(r#"^\-\-\s*Properties\.([A-Za-z]\w*)\s*\=\s*"(\w+)"$"#)
.captures(line){
count+=line.len();
match &captures[1]{
@@ -339,7 +340,7 @@ impl CompileNode{
//reject goobers
let is_goober=matches!(style,Some(Style::Rojo));
let (ext_len,file_discernment)={
if let Some(captures)=lazy_regex::regex!(r"^.*(\.module\.lua|\.client\.lua|\.server\.lua)$")
if let Some(captures)=lazy_regex!(r"^.*(\.module\.lua|\.client\.lua|\.server\.lua)$")
.captures(file_name.as_str()){
let ext=&captures[1];
(ext.len(),match ext{
@@ -353,7 +354,7 @@ impl CompileNode{
".server.lua"=>FileDiscernment::Script(ScriptHint::Script),
_=>panic!("Regex failed"),
})
}else if let Some(captures)=lazy_regex::regex!(r"^.*(\.rbxmx|\.lua)$")
}else if let Some(captures)=lazy_regex!(r"^.*(\.rbxmx|\.lua)$")
.captures(file_name.as_str()){
let ext=&captures[1];
(ext.len(),match ext{

View File

@@ -2,14 +2,16 @@ use std::io::Read;
use std::path::{Path,PathBuf};
use clap::{Args,Parser,Subcommand};
use anyhow::{anyhow,Result as AResult};
use futures::StreamExt;
use futures::{StreamExt,TryStreamExt};
use rbx_asset::cloud::{ApiKey,Context as CloudContext};
use rbx_asset::cookie::{Cookie,Context as CookieContext,AssetVersion,CreationsItem};
type AssetID=u64;
type AssetIDFileMap=Vec<(AssetID,PathBuf)>;
#[cfg(feature="git")]
const CONCURRENT_DECODE:usize=8;
const CONCURRENT_REQUESTS:usize=32;
const CONCURRENT_FS:usize=64;
#[derive(Parser)]
#[command(author,version,about,long_about=None)]
@@ -30,6 +32,7 @@ enum Commands{
DownloadVersionV2(DownloadVersionSubcommand),
DownloadDecompile(DownloadDecompileSubcommand),
DownloadCreationsJson(DownloadCreationsJsonSubcommand),
DownloadCreationsHistory(DownloadCreationsHistorySubcommand),
DownloadUserInventoryJson(DownloadUserInventoryJsonSubcommand),
CreateAsset(CreateAssetSubcommand),
CreateAssetMedia(CreateAssetMediaSubcommand),
@@ -41,7 +44,9 @@ enum Commands{
CompileUploadAsset(CompileUploadAssetSubcommand),
CompileUploadPlace(CompileUploadPlaceSubcommand),
Decompile(DecompileSubcommand),
#[cfg(feature="git")]
DecompileHistoryIntoGit(DecompileHistoryIntoGitSubcommand),
#[cfg(feature="git")]
DownloadAndDecompileHistoryIntoGit(DownloadAndDecompileHistoryIntoGitSubcommand),
RunLuau(RunLuauSubcommand),
}
@@ -605,6 +610,7 @@ async fn main()->AResult<()>{
subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
subcommand.continue_from_cursor.unwrap_or(false),
).await,
Commands::DownloadCreationsHistory(subcommand)=>subcommand.run().await,
Commands::DownloadUserInventoryJson(subcommand)=>download_user_inventory_json(
cookie_from_args(
subcommand.cookie_literal,
@@ -736,6 +742,7 @@ async fn main()->AResult<()>{
write_models:subcommand.write_models.unwrap_or(false),
write_scripts:subcommand.write_scripts.unwrap_or(true),
}).await,
#[cfg(feature="git")]
Commands::DecompileHistoryIntoGit(subcommand)=>decompile_history_into_git(DecompileHistoryConfig{
git_committer_name:subcommand.git_committer_name,
git_committer_email:subcommand.git_committer_email,
@@ -746,6 +753,7 @@ async fn main()->AResult<()>{
write_models:subcommand.write_models.unwrap_or(false),
write_scripts:subcommand.write_scripts.unwrap_or(true),
}).await,
#[cfg(feature="git")]
Commands::DownloadAndDecompileHistoryIntoGit(subcommand)=>download_and_decompile_history_into_git(DownloadAndDecompileHistoryConfig{
git_committer_name:subcommand.git_committer_name,
git_committer_email:subcommand.git_committer_email,
@@ -1166,10 +1174,10 @@ async fn get_creations_pages(
loop{
let mut page=context.get_creations_page(&config).await?;
asset_list.append(&mut page.data);
if page.nextPageCursor.is_none(){
config.cursor=page.nextPageCursor;
if config.cursor.is_none(){
break;
}
config.cursor=page.nextPageCursor;
}
Ok(())
}
@@ -1182,15 +1190,34 @@ async fn download_creations_pages_from_checkpoint(context:&CookieContext,owner:r
let (mut asset_list,mut config)=if continue_from_cursor{
// load state from files
let (versions,cursor)=tokio::try_join!(
let (versions,cursor)=tokio::join!(
tokio::fs::read(versions_path.as_path()),
tokio::fs::read_to_string(cursor_path.as_path()),
)?;
);
// allow versions to not exist
let (versions,cursor)=match (versions,cursor){
// continue downloading
(Ok(versions),Ok(cursor))=>(serde_json::from_slice(&versions)?,Some(cursor)),
// already downloaded
(Ok(versions),Err(e)) if matches!(e.kind(),std::io::ErrorKind::NotFound)=>return Ok(serde_json::from_slice(&versions)?),
// not downloaded
(Err(e),result) if matches!(e.kind(),std::io::ErrorKind::NotFound)=>{
match result{
Ok(_)=>{},
Err(e) if matches!(e.kind(),std::io::ErrorKind::NotFound)=>{},
Err(e)=>Err(e)?,
}
(Vec::new(),None)
},
// other errors
(Ok(_),Err(e))=>Err(e)?,
(Err(e),_)=>Err(e)?,
};
(
serde_json::from_slice(&versions)?,
versions,
rbx_asset::cookie::CreationsPageRequest{
owner,
cursor:Some(cursor),
cursor,
}
)
}else{
@@ -1204,16 +1231,21 @@ async fn download_creations_pages_from_checkpoint(context:&CookieContext,owner:r
)
};
match get_creations_pages(&context,&mut asset_list,&mut config).await{
Ok(())=>println!("Pages polling complete"),
Err(e)=>println!("Error: {e}"),
}
get_creations_pages(&context,&mut asset_list,&mut config).await?;
let cursor_fut=async{
if let Some(cursor)=config.cursor{
println!("writing cursor state...");
// there was a problem, write out cursor
tokio::fs::write(cursor_path,cursor).await?;
}else{
// no cursor
if let Err(e)=tokio::fs::remove_file(cursor_path).await{
match e.kind(){
std::io::ErrorKind::NotFound=>println!("Cannot delete cursor: file not found"),
_=>Err(e)?,
}
}
}
Ok(())
};
@@ -1300,6 +1332,148 @@ async fn download_user_inventory_json(cookie:Cookie,user_id:u64,output_folder:Pa
Ok(())
}
/// Download all versions of all assets created by a group or user. The output is written to a folder structure in the output directory.
#[derive(Args)]
struct DownloadCreationsHistorySubcommand{
#[arg(long,group="cookie",required=true)]
cookie_literal:Option<String>,
#[arg(long,group="cookie",required=true)]
cookie_envvar:Option<String>,
#[arg(long,group="cookie",required=true)]
cookie_file:Option<PathBuf>,
#[arg(long,group="api_key",required=true)]
api_key_literal:Option<String>,
#[arg(long,group="api_key",required=true)]
api_key_envvar:Option<String>,
#[arg(long,group="api_key",required=true)]
api_key_file:Option<PathBuf>,
#[arg(long)]
output_folder:Option<PathBuf>,
#[arg(long,group="owner",required=true)]
group_id:Option<u64>,
#[arg(long,group="owner",required=true)]
user_id:Option<u64>,
#[arg(long)]
r#continue:Option<bool>,
}
impl DownloadCreationsHistorySubcommand{
async fn run(self)->AResult<()>{
download_creations_history(
cookie_from_args(
self.cookie_literal,
self.cookie_envvar,
self.cookie_file,
).await?,
api_key_from_args(
self.api_key_literal,
self.api_key_envvar,
self.api_key_file,
).await?,
owner_from_args(
self.user_id,
self.group_id,
)?,
self.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
self.r#continue.unwrap_or(false),
).await
}
}
async fn download_creations_history(cookie:Cookie,api_key:ApiKey,owner:rbx_asset::cookie::Owner,output_folder:PathBuf,r#continue:bool)->AResult<()>{
let cookie_context=CookieContext::new(cookie);
let cloud_context=CloudContext::new(api_key);
// get list of all assets in inventory
let asset_list=download_creations_pages_from_checkpoint(&cookie_context,owner,output_folder.as_path(),r#continue).await?;
// create folder directories
let asset_folders:Vec<PathBuf> ={
futures::stream::iter(asset_list.iter().map(|asset|async{
// create asset folder
let mut asset_folder=output_folder.clone();
asset_folder.push(asset.id.to_string());
tokio::fs::create_dir_all(asset_folder.as_path()).await?;
Ok::<_,anyhow::Error>(asset_folder)
}))
.buffered(CONCURRENT_FS)
.try_collect().await?
};
#[expect(dead_code)]
#[derive(Debug)]
enum Error<'a>{
NoLocations(Job<'a>),
GetVersionLocationError(rbx_asset::cloud::GetError),
GetError(rbx_asset::cloud::GetError),
Io(std::io::Error),
}
#[derive(Clone,Copy,Debug)]
struct Job<'a>{
path:&'a PathBuf,
asset_id:u64,
asset_version:u64,
}
let mut job_list=Vec::new();
// create flattened futures stream to parallel download all asset versions
for (path,asset) in asset_folders.iter().zip(asset_list){
// save versions file
let mut versions_path=path.to_owned();
versions_path.push("versions.json");
let version_history=if r#continue{
let file=tokio::fs::read(versions_path.as_path()).await?;
serde_json::from_slice(&file)?
}else{
println!("Downloading history for {} - {}",asset.id,asset.name);
let version_history=get_version_history(&cookie_context,asset.id).await?;
println!("Found {} versions",version_history.len());
tokio::fs::write(versions_path,serde_json::to_string(&version_history)?).await?;
version_history
};
job_list.extend(version_history.into_iter().map(|asset_version|
Job{
path,
asset_id:asset.id,
asset_version:asset_version.assetVersionNumber,
}
));
}
println!("Completed jobs list. Number of jobs: {}",job_list.len());
futures::stream::iter(job_list).map(async|job|{
let mut dest=job.path.to_owned();
dest.push(format!("{}_v{}.rbxl",job.asset_id,job.asset_version));
//if the file already exists, don't try downloading it again
if tokio::fs::try_exists(dest.as_path()).await.map_err(Error::Io)?{
return Ok(());
}
let location=cloud_context.get_asset_version_location(rbx_asset::cloud::GetAssetVersionRequest{
asset_id:job.asset_id,
version:job.asset_version,
}).await.map_err(Error::GetVersionLocationError)?;
let location=location.location.ok_or(Error::NoLocations(job))?;
let downloaded=cloud_context.get_asset(&location).await.map_err(Error::GetError)?;
tokio::fs::write(dest,downloaded.to_vec().map_err(Error::Io)?).await.map_err(Error::Io)?;
Ok(())
})
.buffer_unordered(CONCURRENT_REQUESTS)
.for_each(async|result|{
match result{
Ok(())=>{},
Err(Error::NoLocations(job))=>println!("Job failed due to no locations: asset_id={} version={}",job.asset_id,job.asset_version),
Err(e)=>println!("Error: {e:?}"),
}
}).await;
println!("All jobs complete.");
Ok(())
}
async fn get_version_history(context:&CookieContext,asset_id:AssetID)->AResult<Vec<AssetVersion>>{
let mut page_request=rbx_asset::cookie::AssetVersionsPageRequest{
asset_id,
@@ -1543,6 +1717,7 @@ async fn download_decompile(config:DownloadDecompileConfig)->AResult<()>{
Ok(())
}
#[cfg(feature="git")]
struct WriteCommitConfig{
git_committer_name:String,
git_committer_email:String,
@@ -1553,6 +1728,7 @@ struct WriteCommitConfig{
write_scripts:bool,
}
#[cfg(feature="git")]
async fn write_commit(config:WriteCommitConfig,b:Result<AResult<(AssetVersion,rox_compiler::DecompiledContext)>,tokio::task::JoinError>,repo:&git2::Repository)->AResult<()>{
let (asset_version,context)=b??;
println!("writing files for version {}",asset_version.assetVersionNumber);
@@ -1634,6 +1810,7 @@ async fn write_commit(config:WriteCommitConfig,b:Result<AResult<(AssetVersion,ro
Ok(())
}
#[cfg(feature="git")]
struct DecompileHistoryConfig{
git_committer_name:String,
git_committer_email:String,
@@ -1645,6 +1822,7 @@ struct DecompileHistoryConfig{
write_scripts:bool,
}
#[cfg(feature="git")]
async fn decompile_history_into_git(config:DecompileHistoryConfig)->AResult<()>{
//use prexisting versions list
let mut versions_path=config.input_folder.clone();
@@ -1683,6 +1861,7 @@ async fn decompile_history_into_git(config:DecompileHistoryConfig)->AResult<()>{
Ok(())
}
#[cfg(feature="git")]
struct DownloadAndDecompileHistoryConfig{
cookie:Cookie,
asset_id:AssetID,
@@ -1695,6 +1874,7 @@ struct DownloadAndDecompileHistoryConfig{
write_scripts:bool,
}
#[cfg(feature="git")]
async fn download_and_decompile_history_into_git(config:DownloadAndDecompileHistoryConfig)->AResult<()>{
let context=CookieContext::new(config.cookie);