103 Commits

Author SHA1 Message Date
7ba16464c4 handle file variations correctly
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-25 19:47:44 -07:00
66230d031c do not redownload 2025-08-25 19:47:44 -07:00
f6aa44ffc5 return list verbatim if no cursor 2025-08-25 19:47:44 -07:00
ae166d8509 do not error on remove 2025-08-25 19:47:44 -07:00
a4ae552169 fix cursor bug 2025-08-25 19:47:44 -07:00
23d687e072 explicit error path 2025-08-25 19:47:44 -07:00
71bbfa0128 fix stack overflow 2025-08-25 19:47:44 -07:00
89da9108c2 allow the versions to not exist 2025-08-25 19:47:44 -07:00
04d5592aaf delete cursor file if completed 2025-08-25 19:47:44 -07:00
bd3605ab87 allow the cursor to not exist 2025-08-25 19:47:44 -07:00
13cff42bbc fix error path 2025-08-25 19:47:44 -07:00
60ba5511ad plumb api key through DownloadCreationsHistory
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-25 17:54:52 -07:00
cf67ad510b allow resume from files
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-25 17:42:04 -07:00
e6a548a1a1 get_asset_v2 2025-08-25 17:42:04 -07:00
d2bee93fbb DownloadCreationsHistory 2025-08-25 17:42:04 -07:00
55d5f97f0b rbx_asset: update GetAssetV2Info
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-25 17:41:58 -07:00
7665ccc5d1 rbx_asset: accept reference in get_asset_versions_page
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-25 16:04:56 -07:00
24f50de2ae use sort_by_key
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-25 15:47:26 -07:00
afd8a74e34 add continue from cursor option to DownloadCreationsJson 2025-08-25 15:47:26 -07:00
ddf294c6f9 rename cursor.json to just cursor 2025-08-25 15:35:25 -07:00
a9a8c01cb1 do creations pages like user 2025-08-25 15:35:13 -07:00
287969b7d5 update UserInventoryItemOwner.buildersClubMembershipType type
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-23 21:34:50 -07:00
b80868e722 v0.4.10 Luau Execution API
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-09 00:06:13 -07:00
e1503ba898 v0.4.10-pre2
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-08 20:35:33 -07:00
97086e351f rbx_asset: rename LuauSessionLatestRequest 2025-08-08 20:35:33 -07:00
89d96db03c rbx_asset: unversioned request 2025-08-08 20:32:52 -07:00
6b9ae59e7f v0.4.10-pre1 Luau Execution API
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-08 20:27:46 -07:00
dd4344f514 Luau Execution API (#18)
All checks were successful
continuous-integration/drone/push Build is passing
Tested to some extent

Reviewed-on: #18
Co-authored-by: Rhys Lloyd <krakow20@gmail.com>
Co-committed-by: Rhys Lloyd <krakow20@gmail.com>
2025-08-09 03:26:03 +00:00
8a400faae2 rbx_asset: rename GetError variant
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-08 17:33:26 -07:00
6dff5f5145 rbx_asset: relax operation allocation requirement
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-08 16:59:24 -07:00
f3f048e293 v0.4.9 rustls passthru
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-05 21:20:31 -07:00
10c9ddd696 goofy feature stuff
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-05 21:17:01 -07:00
d73567050c update deps 2025-08-05 21:14:30 -07:00
c1e53e42bc remove attributes OBE 2025-08-05 21:09:47 -07:00
b3defd31fc replace allow with expect 2025-08-05 21:08:23 -07:00
bf3b429c66 rbx_asset v0.4.8 default field
All checks were successful
continuous-integration/drone/push Build is passing
2025-07-01 04:56:23 -07:00
20899a3fae rbx_asset: default field
All checks were successful
continuous-integration/drone/push Build is passing
2025-07-01 04:55:56 -07:00
d60cedf430 rbx_asset: v0.4.7 roblox api changed
All checks were successful
continuous-integration/drone/push Build is passing
2025-07-01 01:12:43 -07:00
ad435fb8c9 update deps 2025-07-01 01:12:43 -07:00
9f1bdd6a1f rbx_asset: roblox api changed
All checks were successful
continuous-integration/drone/push Build is passing
2025-07-01 01:08:51 -07:00
0bf0b92efb asset location commands
All checks were successful
continuous-integration/drone/push Build is passing
2025-06-13 20:04:18 -07:00
41cd60c459 untab 2025-06-13 20:04:07 -07:00
52a0bf221b rbx_asset: visibility mistake
All checks were successful
continuous-integration/drone/push Build is passing
2025-06-11 23:34:21 -07:00
f2bd298cd1 rbx_asset: try out ref for funsies 2025-06-11 23:34:21 -07:00
89bbe00e3d Set Universe Asset Permissions (#17)
All checks were successful
continuous-integration/drone/push Build is passing
This implements an endpoint to set universe asset permissions.

Reviewed-on: #17
Co-authored-by: Quaternions <krakow20@gmail.com>
Co-committed-by: Quaternions <krakow20@gmail.com>
2025-06-12 02:33:12 +00:00
369f19452c rbx_asset: omit Cursor
All checks were successful
continuous-integration/drone/push Build is passing
2025-05-13 23:44:11 -07:00
9e78be3d09 rbx_asset v0.4.5 fix error type
All checks were successful
continuous-integration/drone/push Build is passing
2025-05-13 23:28:00 -07:00
70414d94ae clippy fixes
All checks were successful
continuous-integration/drone/push Build is passing
2025-05-13 23:26:15 -07:00
819eea1b4a rbx_asset: error type is too damn big 2025-05-13 23:23:58 -07:00
9eabb0197c asset-tool v0.4.12 update rbx-dom
All checks were successful
continuous-integration/drone/push Build is passing
2025-04-30 17:23:36 -07:00
fa9d42fc1f update deps
All checks were successful
continuous-integration/drone/push Build is passing
2025-04-30 17:13:02 -07:00
8f754f0bca update rbx-dom 2025-04-30 17:12:49 -07:00
450b6a0829 update deps
All checks were successful
continuous-integration/drone/push Build is passing
2025-04-10 17:26:47 -07:00
091a2a92f1 rbx_asset: v0.4.4 parse string ints + save intermediate allocation
All checks were successful
continuous-integration/drone/push Build is passing
2025-04-10 17:24:49 -07:00
31aae80cc5 rbx_asset: change api to save intermediate allocation 2025-04-10 17:24:49 -07:00
041cc75015 rbx_asset: move code into util, types 2025-04-10 17:24:49 -07:00
d77312309f rbx_asset: helpers for integers within a string 2025-04-10 17:24:49 -07:00
50145460b9 rbx_asset: simplify gzip logic
All checks were successful
continuous-integration/drone/push Build is passing
2025-04-10 00:25:34 -07:00
df2a5bb9ce rbx_asset: v0.4.3 optional AssetLocation
All checks were successful
continuous-integration/drone/push Build is passing
2025-04-08 16:54:59 -07:00
e40041a894 rbx_asset: change api for asset location again 2025-04-08 16:54:14 -07:00
45c1e52c0f rbx_asset: v0.4.2
All checks were successful
continuous-integration/drone/push Build is passing
2025-04-05 14:53:47 -07:00
0196f47374 rbx_asset: description is optional
All checks were successful
continuous-integration/drone/push Build is passing
2025-04-05 14:53:32 -07:00
a8163014ad rbx_asset: v0.4.1
All checks were successful
continuous-integration/drone/push Build is passing
2025-04-05 14:23:26 -07:00
4d26e7ad19 rbx_asset: discover asset location
All checks were successful
continuous-integration/drone/push Build is passing
2025-04-05 14:21:57 -07:00
99077cf467 rbx_asset: v0.4.0
All checks were successful
continuous-integration/drone/push Build is passing
2025-04-05 13:51:16 -07:00
302603998e rbx_asset: cloud: implement new asset-delivery-api 2025-04-05 13:09:59 -07:00
71cae5c089 rbx_asset: cloud: tweak asset info requests, remove get_asset 2025-04-05 13:09:25 -07:00
fb9dd8660d rbx_asset: deduplicate common code 2025-04-05 12:59:44 -07:00
64e4887b83 rbx_asset: use #[serde(default)] instead of Option<Vec<_>> 2025-04-05 12:59:24 -07:00
d125829a00 rbx_asset: rename CloudContext & CookieContext to Context 2025-04-05 12:23:39 -07:00
5509fd2166 rbx_asset: context fields should be private 2025-04-05 12:23:13 -07:00
e17db96e86 asset-tool v0.4.11 continue feature for DownloadUserInventoryJson
All checks were successful
continuous-integration/drone/push Build is passing
2025-04-04 13:53:19 -07:00
875b059074 asset-tool: tweak get_user_inventory_pages 2025-04-04 13:53:19 -07:00
9b1c709e7c asset-tool: add continue to DownloadUserInventoryJson
All checks were successful
continuous-integration/drone/push Build is passing
2025-04-04 13:36:18 -07:00
ebd48269b8 rbx_asset: v0.3.4 UploadResponse.AssetVersion
All checks were successful
continuous-integration/drone/push Build is passing
2025-04-03 16:14:33 -07:00
5a4ac0e7f2 rbx_asset: include asset version in UploadResponse
All checks were successful
continuous-integration/drone/push Build is passing
2025-04-03 16:13:45 -07:00
68ebbad7a7 rbx_asset: v0.3.3 optional field
All checks were successful
continuous-integration/drone/push Build is passing
2025-04-03 13:56:10 -07:00
c99b752738 rbx_asset: optional field on AssetDetails 2025-04-03 13:56:02 -07:00
4f798e5f07 rbx_asset: v0.3.1 fix missing field 2025-04-03 13:55:56 -07:00
39fa74d44a rbx_asset: fix missing field on GetAssetV2Location 2025-04-03 13:55:56 -07:00
edb5ea7648 rbx_asset: v0.3.1 CreatorType traits
All checks were successful
continuous-integration/drone/push Build is passing
2025-04-03 12:57:17 -07:00
504ff40385 rbx_asset: derive additional traits for CreatorType 2025-04-03 12:56:42 -07:00
89c0ee2cc2 update deps
All checks were successful
continuous-integration/drone/push Build is passing
2025-04-03 12:53:09 -07:00
f7e565bd0b asset-tool: v0.4.10
All checks were successful
continuous-integration/drone/push Build is passing
2025-04-03 12:47:02 -07:00
fe8062d8e0 rbx_asset: v0.3.0 add CreatorType + get_asset_details + get_asset_v2 2025-04-03 12:47:02 -07:00
e6fe04aa73 asset-tool: add AssetDetails 2025-04-03 12:47:02 -07:00
656de62bdc asset-tool: add DownloadVersionV2 2025-04-03 12:47:02 -07:00
4d90a74a82 rbx_asset: get_asset_v2 2025-04-03 12:47:02 -07:00
fb6fb67954 rbx_asset: tweak get_asset 2025-04-03 12:45:18 -07:00
b0f1e964a6 rbx_asset: add get_asset_details 2025-04-03 12:45:18 -07:00
aea777ecd3 rbx_asset: add CreatorType 2025-04-03 12:43:16 -07:00
6eca84a08a DownloadVersion command
All checks were successful
continuous-integration/drone/push Build is passing
2025-02-12 14:55:19 -08:00
b7bab46e04 v0.4.9 roblox error message in body
All checks were successful
continuous-integration/drone/push Build is passing
2025-02-11 13:53:16 -08:00
0413767ef9 v0.2.6 roblox error message in body
All checks were successful
continuous-integration/drone/push Build is passing
2025-02-11 13:50:31 -08:00
b69698fd8e rbx_asset: special error for special roblox
All checks were successful
continuous-integration/drone/push Build is passing
2025-02-11 13:43:11 -08:00
309fc2494d update deps 2025-02-11 13:31:14 -08:00
b352707b99 rbx_asset v0.2.5 better error info
All checks were successful
continuous-integration/drone/push Build is passing
2024-12-13 19:55:45 -08:00
81f411272f rbx_asset keep typed UploadResponse 2024-12-13 19:55:19 -08:00
e45f3c2cf9 cloud: use response_ok helper function
All checks were successful
continuous-integration/drone/push Build is passing
2024-12-13 19:35:31 -08:00
3cd65158a0 update deps
All checks were successful
continuous-integration/drone/push Build is passing
2024-12-13 19:19:04 -08:00
38d78ff2c5 cookie: refactor http errors to include more useful information 2024-12-13 19:18:35 -08:00
c49f9e4dd3 cookie: prepend in library 2024-12-13 19:01:37 -08:00
a99b5a2666 update Roblox API
All checks were successful
continuous-integration/drone/push Build is passing
2024-10-23 10:43:59 -07:00
13 changed files with 2425 additions and 639 deletions

1472
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,7 @@
workspace = { members = ["rbx_asset", "rox_compiler"] }
[package]
name = "asset-tool"
version = "0.4.7"
version = "0.4.12"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@@ -10,14 +10,13 @@ edition = "2021"
anyhow = "1.0.75"
clap = { version = "4.4.2", features = ["derive"] }
futures = "0.3.30"
git2 = "0.18.1"
git2 = "0.20.0"
lazy-regex = "3.1.0"
pollster = "0.3.0"
rbx_asset = { path = "rbx_asset" }
rbx_binary = "0.7.4"
rbx_dom_weak = "2.7.0"
rbx_reflection_database = "0.2.10"
rbx_xml = "0.13.3"
rbx_binary = "1.0.0"
rbx_dom_weak = "3.0.0"
rbx_reflection_database = "1.0.3"
rbx_xml = "1.0.0"
rox_compiler = { path = "rox_compiler" }
serde_json = "1.0.111"
tokio = { version = "1.35.1", features = ["macros", "rt-multi-thread", "fs"] }

View File

@@ -1,6 +1,6 @@
[package]
name = "rbx_asset"
version = "0.2.3"
version = "0.4.10"
edition = "2021"
publish = ["strafesnet"]
repository = "https://git.itzana.me/StrafesNET/asset-tool"
@@ -10,10 +10,22 @@ authors = ["Rhys Lloyd <krakow20@gmail.com>"]
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[features]
default = ["gzip", "default-tls"]
gzip = ["dep:flate2"]
default-tls = ["reqwest/default-tls"]
rustls-tls = ["reqwest/rustls-tls"]
[dependencies]
bytes = "1.10.1"
chrono = { version = "0.4.38", features = ["serde"] }
flate2 = "1.0.29"
reqwest = { version = "0.12.4", features = ["json","multipart"] }
flate2 = { version = "1.0.29", optional = true }
reqwest = { version = "0.12.4", features = [
"json", "multipart",
# default features
"charset", "http2", "system-proxy"
], default-features = false }
serde = { version = "1.0.199", features = ["derive"] }
serde_json = "1.0.111"
url = "2.5.0"

44
rbx_asset/src/body.rs Normal file
View File

@@ -0,0 +1,44 @@
use reqwest::Body;
pub trait ContentType:Into<Body>{
fn content_type(&self)->&'static str;
}
#[derive(Clone,Copy,Debug)]
pub struct Json<T>(pub(crate)T);
impl<T:Into<Body>> From<Json<T>> for Body{
fn from(Json(value):Json<T>)->Self{
value.into()
}
}
impl<T:Into<Body>> ContentType for Json<T>{
fn content_type(&self)->&'static str{
"application/json"
}
}
#[derive(Clone,Copy,Debug)]
pub struct Text<T>(pub(crate)T);
impl<T:Into<Body>> From<Text<T>> for Body{
fn from(Text(value):Text<T>)->Self{
value.into()
}
}
impl<T:Into<Body>> ContentType for Text<T>{
fn content_type(&self)->&'static str{
"text/plain"
}
}
#[derive(Clone,Copy,Debug)]
pub struct Binary<T>(pub(crate)T);
impl<T:Into<Body>> From<Binary<T>> for Body{
fn from(Binary(value):Binary<T>)->Self{
value.into()
}
}
impl<T:Into<Body>> ContentType for Binary<T>{
fn content_type(&self)->&'static str{
"application/octet-stream"
}
}

View File

@@ -1,3 +1,7 @@
use crate::body::{Binary,ContentType,Json};
use crate::util::{serialize_u64,deserialize_u64,response_ok};
use crate::types::{ResponseError,MaybeGzippedBytes};
#[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
pub enum AssetType{
@@ -29,7 +33,7 @@ pub struct AssetOperation{
operation:RobloxOperation,
}
impl AssetOperation{
pub async fn try_get_asset(&self,context:&CloudContext)->Result<AssetResponse,AssetOperationError>{
pub async fn try_get_asset(&self,context:&Context)->Result<AssetResponse,AssetOperationError>{
serde_json::from_value(
self.operation
.try_get_reponse(context).await
@@ -40,6 +44,7 @@ impl AssetOperation{
#[derive(Debug)]
pub enum CreateError{
Parse(url::ParseError),
Response(ResponseError),
Serialize(serde_json::Error),
Reqwest(reqwest::Error),
}
@@ -62,8 +67,8 @@ pub struct UpdateAssetRequest{
#[derive(Clone,Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
pub enum Creator{
userId(String),//u64 string
groupId(String),//u64 string
userId(#[serde(deserialize_with="deserialize_u64",serialize_with="serialize_u64")]u64),
groupId(#[serde(deserialize_with="deserialize_u64",serialize_with="serialize_u64")]u64),
}
#[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
@@ -102,6 +107,7 @@ pub struct UpdatePlaceResponse{
#[derive(Debug)]
pub enum UpdateError{
ParseError(url::ParseError),
Response(ResponseError),
SerializeError(serde_json::Error),
Reqwest(reqwest::Error),
}
@@ -112,10 +118,10 @@ impl std::fmt::Display for UpdateError{
}
impl std::error::Error for UpdateError{}
struct GetAssetOperationRequest{
operation_id:String,
struct GetAssetOperationRequest<'a>{
operation_id:&'a str,
}
pub struct GetAssetInfoRequest{
pub struct GetAssetLatestRequest{
pub asset_id:u64,
}
/*
@@ -142,33 +148,34 @@ pub struct GetAssetInfoRequest{
#[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
pub struct AssetResponse{
pub assetId:String,//u64 wrapped in quotes wohoo!!
//u64 wrapped in quotes wohoo!!
#[serde(deserialize_with="deserialize_u64")]
#[serde(serialize_with="serialize_u64")]
pub assetId:u64,
pub assetType:AssetType,
pub creationContext:CreationContext,
pub description:String,
pub description:Option<String>,
pub displayName:String,
pub path:String,
pub revisionCreateTime:chrono::DateTime<chrono::Utc>,
pub revisionId:String,//u64
#[serde(deserialize_with="deserialize_u64")]
#[serde(serialize_with="serialize_u64")]
pub revisionId:u64,
pub moderationResult:ModerationResult,
pub icon:Option<String>,
pub previews:Option<Vec<Preview>>,
#[serde(default)]
pub previews:Vec<Preview>,
}
#[allow(nonstandard_style,dead_code)]
pub struct GetAssetVersionRequest{
pub asset_id:u64,
pub version:u64,
}
#[allow(nonstandard_style,dead_code)]
pub struct GetAssetRequest{
pub asset_id:u64,
pub version:Option<u64>,
}
#[derive(Debug)]
pub enum GetError{
ParseError(url::ParseError),
Parse(url::ParseError),
Response(ResponseError),
Reqwest(reqwest::Error),
IO(std::io::Error)
}
impl std::fmt::Display for GetError{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
@@ -177,6 +184,36 @@ impl std::fmt::Display for GetError{
}
impl std::error::Error for GetError{}
#[derive(Debug,serde::Deserialize,serde::Serialize)]
pub struct AssetLocation(
// the location is private so users cannot mutate it
String
);
impl AssetLocation{
pub fn location(&self)->&str{
let Self(location)=self;
location
}
}
#[derive(Debug,serde::Deserialize)]
#[allow(nonstandard_style,dead_code)]
pub struct AssetMetadata{
pub metadataType:u32,
pub value:String,
}
#[derive(Debug,serde::Deserialize)]
#[allow(nonstandard_style,dead_code)]
pub struct AssetLocationInfo{
pub location:Option<AssetLocation>,
pub requestId:String,
pub isArchived:bool,
pub assetTypeId:u32,
#[serde(default)]
pub assetMetadatas:Vec<AssetMetadata>,
pub isRecordable:bool,
}
pub struct AssetVersionsRequest{
pub asset_id:u64,
pub cursor:Option<String>,
@@ -203,6 +240,7 @@ pub struct AssetVersionsResponse{
#[derive(Debug)]
pub enum AssetVersionsError{
ParseError(url::ParseError),
Response(ResponseError),
Reqwest(reqwest::Error),
}
impl std::fmt::Display for AssetVersionsError{
@@ -238,6 +276,7 @@ pub struct InventoryPageResponse{
#[derive(Debug)]
pub enum InventoryPageError{
ParseError(url::ParseError),
Response(ResponseError),
Reqwest(reqwest::Error),
}
impl std::fmt::Display for InventoryPageError{
@@ -279,33 +318,120 @@ impl RobloxOperation{
None=>self.path.as_deref()?.get(11..),
}
}
pub async fn try_get_reponse(&self,context:&CloudContext)->Result<serde_json::Value,OperationError>{
pub async fn try_get_reponse(&self,context:&Context)->Result<serde_json::Value,OperationError>{
context.get_asset_operation(GetAssetOperationRequest{
operation_id:self.operation_id()
.ok_or(OperationError::NoOperationId)?
.to_owned(),
.ok_or(OperationError::NoOperationId)?,
}).await.map_err(OperationError::Get)?
.response.ok_or(OperationError::NotDone)
}
}
//idk how to do this better
enum ReaderType<R:std::io::Read>{
GZip(flate2::read::GzDecoder<std::io::BufReader<R>>),
Raw(std::io::BufReader<R>),
#[derive(Debug)]
pub enum LuauSessionError{
Get(GetError),
Unspecified,
NotDone,
NoOutput,
NoError,
}
fn maybe_gzip_decode<R:std::io::Read>(input:R)->std::io::Result<ReaderType<R>>{
let mut buf=std::io::BufReader::new(input);
let peek=std::io::BufRead::fill_buf(&mut buf)?;
match &peek[0..2]{
b"\x1f\x8b"=>Ok(ReaderType::GZip(flate2::read::GzDecoder::new(buf))),
_=>Ok(ReaderType::Raw(buf)),
impl std::fmt::Display for LuauSessionError{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f,"{self:?}")
}
}
fn read_readable(mut readable:impl std::io::Read)->std::io::Result<Vec<u8>>{
let mut contents=Vec::new();
readable.read_to_end(&mut contents)?;
Ok(contents)
impl std::error::Error for LuauSessionError{}
#[derive(Debug,serde::Serialize)]
#[expect(nonstandard_style)]
pub struct LuauSessionCreate<'a>{
pub script:&'a str,
#[serde(skip_serializing_if="Option::is_none")]
pub user:Option<&'a str>,
#[serde(skip_serializing_if="Option::is_none")]
pub timeout:Option<&'a str>,
#[serde(skip_serializing_if="Option::is_none")]
pub binaryInput:Option<&'a str>,
#[serde(skip_serializing_if="Option::is_none")]
pub enableBinaryOutput:Option<bool>,
#[serde(skip_serializing_if="Option::is_none")]
pub binaryOutputUri:Option<&'a str>,
}
#[derive(Debug,serde::Deserialize)]
#[expect(nonstandard_style)]
pub enum LuauSessionState{
STATE_UNSPECIFIED,
PROCESSING,
COMPLETE,
FAILED,
}
#[derive(Debug,serde::Deserialize)]
pub struct LuauError{
pub code:String,
pub message:String,
}
#[derive(Debug,serde::Deserialize)]
pub struct LuauResults{
pub results:Vec<serde_json::Value>,
}
#[derive(Debug,serde::Deserialize)]
#[expect(nonstandard_style)]
pub struct LuauSessionResponse{
path:String,
#[serde(deserialize_with="deserialize_u64")]
pub user:u64,
pub state:LuauSessionState,
pub script:String,
pub error:Option<LuauError>,
pub output:Option<LuauResults>,
pub binaryInput:String,
pub enableBinaryOutput:bool,
pub binaryOutputUri:String,
}
impl LuauSessionResponse{
pub fn path(&self)->&str{
&self.path
}
pub async fn try_get_result(&self,context:&Context)->Result<Result<LuauResults,LuauError>,LuauSessionError>{
let response=context.get_luau_session(self).await.map_err(LuauSessionError::Get)?;
match response.state{
LuauSessionState::STATE_UNSPECIFIED=>Err(LuauSessionError::Unspecified),
LuauSessionState::PROCESSING=>Err(LuauSessionError::NotDone),
LuauSessionState::COMPLETE=>Ok(Ok(response.output.ok_or(LuauSessionError::NoOutput)?)),
LuauSessionState::FAILED=>Ok(Err(response.error.ok_or(LuauSessionError::NoError)?)),
}
}
}
pub trait AsSessionPath{
fn into_session_path(&self)->impl AsRef<str>;
}
impl AsSessionPath for LuauSessionResponse{
fn into_session_path(&self)->impl AsRef<str>{
&self.path
}
}
pub struct LuauSessionLatestRequest{
pub universe_id:u64,
pub place_id:u64,
}
impl AsSessionPath for LuauSessionLatestRequest{
fn into_session_path(&self)->impl AsRef<str>{
let universe_id=self.universe_id;
let place_id=self.place_id;
format!("universes/{universe_id}/places/{place_id}/luau-execution-session-tasks")
}
}
pub struct LuauSessionVersionRequest{
pub universe_id:u64,
pub place_id:u64,
pub version_id:u64,
}
impl AsSessionPath for LuauSessionVersionRequest{
fn into_session_path(&self)->impl AsRef<str>{
let universe_id=self.universe_id;
let place_id=self.place_id;
let version_id=self.version_id;
format!("universes/{universe_id}/places/{place_id}/versions/{version_id}/luau-execution-session-tasks")
}
}
#[derive(Clone)]
@@ -320,12 +446,12 @@ impl ApiKey{
}
#[derive(Clone)]
pub struct CloudContext{
pub api_key:String,
pub client:reqwest::Client,
pub struct Context{
api_key:String,
client:reqwest::Client,
}
impl CloudContext{
impl Context{
pub fn new(api_key:ApiKey)->Self{
Self{
api_key:api_key.get(),
@@ -337,9 +463,10 @@ impl CloudContext{
.header("x-api-key",self.api_key.as_str())
.send().await
}
async fn post(&self,url:url::Url,body:impl Into<reqwest::Body>+Clone)->Result<reqwest::Response,reqwest::Error>{
async fn post(&self,url:url::Url,body:impl ContentType)->Result<reqwest::Response,reqwest::Error>{
self.client.post(url)
.header("x-api-key",self.api_key.as_str())
.header("Content-Type",body.content_type())
.body(body)
.send().await
}
@@ -368,9 +495,9 @@ impl CloudContext{
.text("request",request_config)
.part("fileContent",part);
let operation=self.post_form(url,form).await
.map_err(CreateError::Reqwest)?
.error_for_status().map_err(CreateError::Reqwest)?
let operation=response_ok(
self.post_form(url,form).await.map_err(CreateError::Reqwest)?
).await.map_err(CreateError::Response)?
.json::<RobloxOperation>().await.map_err(CreateError::Reqwest)?;
Ok(AssetOperation{
@@ -387,62 +514,97 @@ impl CloudContext{
.text("request",request_config)
.part("fileContent",reqwest::multipart::Part::bytes(body));
let operation=self.patch_form(url,form).await
.map_err(UpdateError::Reqwest)?
//roblox api documentation is very poor, just give the status code and drop the json
.error_for_status().map_err(UpdateError::Reqwest)?
let operation=response_ok(
self.patch_form(url,form).await.map_err(UpdateError::Reqwest)?
).await.map_err(UpdateError::Response)?
.json::<RobloxOperation>().await.map_err(UpdateError::Reqwest)?;
Ok(AssetOperation{
operation,
})
}
async fn get_asset_operation(&self,config:GetAssetOperationRequest)->Result<RobloxOperation,GetError>{
async fn get_asset_operation(&self,config:GetAssetOperationRequest<'_>)->Result<RobloxOperation,GetError>{
let raw_url=format!("https://apis.roblox.com/assets/v1/operations/{}",config.operation_id);
let url=reqwest::Url::parse(raw_url.as_str()).map_err(GetError::ParseError)?;
let url=reqwest::Url::parse(raw_url.as_str()).map_err(GetError::Parse)?;
self.get(url).await.map_err(GetError::Reqwest)?
.error_for_status().map_err(GetError::Reqwest)?
response_ok(
self.get(url).await.map_err(GetError::Reqwest)?
).await.map_err(GetError::Response)?
.json::<RobloxOperation>().await.map_err(GetError::Reqwest)
}
pub async fn get_asset_info(&self,config:GetAssetInfoRequest)->Result<AssetResponse,GetError>{
let raw_url=format!("https://apis.roblox.com/assets/v1/assets/{}",config.asset_id);
let url=reqwest::Url::parse(raw_url.as_str()).map_err(GetError::ParseError)?;
pub async fn create_luau_session(&self,config:&impl AsSessionPath,session:LuauSessionCreate<'_>)->Result<LuauSessionResponse,CreateError>{
let raw_url=format!("https://apis.roblox.com/cloud/v2/{}",config.into_session_path().as_ref());
let url=reqwest::Url::parse(raw_url.as_str()).map_err(CreateError::Parse)?;
self.get(url).await.map_err(GetError::Reqwest)?
.error_for_status().map_err(GetError::Reqwest)?
let body=serde_json::to_string(&session).map_err(CreateError::Serialize)?;
response_ok(
self.post(url,Json(body)).await.map_err(CreateError::Reqwest)?
).await.map_err(CreateError::Response)?
.json::<LuauSessionResponse>().await.map_err(CreateError::Reqwest)
}
pub async fn get_luau_session(&self,config:&impl AsSessionPath)->Result<LuauSessionResponse,GetError>{
let raw_url=format!("https://apis.roblox.com/cloud/v2/{}",config.into_session_path().as_ref());
let url=reqwest::Url::parse(raw_url.as_str()).map_err(GetError::Parse)?;
response_ok(
self.get(url).await.map_err(GetError::Reqwest)?
).await.map_err(GetError::Response)?
.json::<LuauSessionResponse>().await.map_err(GetError::Reqwest)
}
pub async fn get_asset_info(&self,config:GetAssetLatestRequest)->Result<AssetResponse,GetError>{
let raw_url=format!("https://apis.roblox.com/assets/v1/assets/{}",config.asset_id);
let url=reqwest::Url::parse(raw_url.as_str()).map_err(GetError::Parse)?;
response_ok(
self.get(url).await.map_err(GetError::Reqwest)?
).await.map_err(GetError::Response)?
.json::<AssetResponse>().await.map_err(GetError::Reqwest)
}
pub async fn get_asset_version(&self,config:GetAssetVersionRequest)->Result<Vec<u8>,GetError>{
pub async fn get_asset_version_info(&self,config:GetAssetVersionRequest)->Result<AssetResponse,GetError>{
let raw_url=format!("https://apis.roblox.com/assets/v1/assets/{}/versions/{}",config.asset_id,config.version);
let url=reqwest::Url::parse(raw_url.as_str()).map_err(GetError::ParseError)?;
let url=reqwest::Url::parse(raw_url.as_str()).map_err(GetError::Parse)?;
let body=self.get(url).await.map_err(GetError::Reqwest)?
.error_for_status().map_err(GetError::Reqwest)?
response_ok(
self.get(url).await.map_err(GetError::Reqwest)?
).await.map_err(GetError::Response)?
.json::<AssetResponse>().await.map_err(GetError::Reqwest)
}
pub async fn get_asset_location(&self,config:GetAssetLatestRequest)->Result<AssetLocationInfo,GetError>{
let raw_url=format!("https://apis.roblox.com/asset-delivery-api/v1/assetId/{}",config.asset_id);
let url=reqwest::Url::parse(raw_url.as_str()).map_err(GetError::Parse)?;
response_ok(
self.get(url).await.map_err(GetError::Reqwest)?
).await.map_err(GetError::Response)?
.json().await.map_err(GetError::Reqwest)
}
pub async fn get_asset_version_location(&self,config:GetAssetVersionRequest)->Result<AssetLocationInfo,GetError>{
let raw_url=format!("https://apis.roblox.com/asset-delivery-api/v1/assetId/{}/version/{}",config.asset_id,config.version);
let url=reqwest::Url::parse(raw_url.as_str()).map_err(GetError::Parse)?;
response_ok(
self.get(url).await.map_err(GetError::Reqwest)?
).await.map_err(GetError::Response)?
.json().await.map_err(GetError::Reqwest)
}
pub async fn get_asset(&self,config:&AssetLocation)->Result<MaybeGzippedBytes,GetError>{
let url=reqwest::Url::parse(config.location()).map_err(GetError::Parse)?;
let bytes=response_ok(
self.get(url).await.map_err(GetError::Reqwest)?
).await.map_err(GetError::Response)?
.bytes().await.map_err(GetError::Reqwest)?;
match maybe_gzip_decode(&mut std::io::Cursor::new(body)){
Ok(ReaderType::GZip(readable))=>read_readable(readable),
Ok(ReaderType::Raw(readable))=>read_readable(readable),
Err(e)=>Err(e),
}.map_err(GetError::IO)
}
pub async fn get_asset(&self,config:GetAssetRequest)->Result<Vec<u8>,GetError>{
let version=match config.version{
Some(version)=>version,
None=>self.get_asset_info(GetAssetInfoRequest{asset_id:config.asset_id}).await?.revisionId.parse().unwrap(),
};
self.get_asset_version(GetAssetVersionRequest{
asset_id:config.asset_id,
version,
}).await
Ok(MaybeGzippedBytes::new(bytes))
}
pub async fn get_asset_versions(&self,config:AssetVersionsRequest)->Result<AssetVersionsResponse,AssetVersionsError>{
let raw_url=format!("https://apis.roblox.com/assets/v1/assets/{}/versions",config.asset_id);
let url=reqwest::Url::parse(raw_url.as_str()).map_err(AssetVersionsError::ParseError)?;
self.get(url).await.map_err(AssetVersionsError::Reqwest)?
.error_for_status().map_err(AssetVersionsError::Reqwest)?
response_ok(
self.get(url).await.map_err(AssetVersionsError::Reqwest)?
).await.map_err(AssetVersionsError::Response)?
.json::<AssetVersionsResponse>().await.map_err(AssetVersionsError::Reqwest)
}
pub async fn inventory_page(&self,config:InventoryPageRequest)->Result<InventoryPageResponse,InventoryPageError>{
@@ -455,8 +617,9 @@ impl CloudContext{
}
}
self.get(url).await.map_err(InventoryPageError::Reqwest)?
.error_for_status().map_err(InventoryPageError::Reqwest)?
response_ok(
self.get(url).await.map_err(InventoryPageError::Reqwest)?
).await.map_err(InventoryPageError::Response)?
.json::<InventoryPageResponse>().await.map_err(InventoryPageError::Reqwest)
}
pub async fn update_place(&self,config:UpdatePlaceRequest,body:impl Into<reqwest::Body>+Clone)->Result<UpdatePlaceResponse,UpdateError>{
@@ -468,8 +631,9 @@ impl CloudContext{
query.append_pair("versionType","Published");
}
self.post(url,body).await.map_err(UpdateError::Reqwest)?
.error_for_status().map_err(UpdateError::Reqwest)?
response_ok(
self.post(url,Binary(body)).await.map_err(UpdateError::Reqwest)?
).await.map_err(UpdateError::Response)?
.json::<UpdatePlaceResponse>().await.map_err(UpdateError::Reqwest)
}
}

View File

@@ -1,3 +1,7 @@
use crate::body::{ContentType,Json};
use crate::util::response_ok;
use crate::types::{ResponseError,MaybeGzippedBytes};
#[derive(Debug)]
pub enum PostError{
Reqwest(reqwest::Error),
@@ -23,7 +27,14 @@ pub struct CreateRequest{
pub enum CreateError{
ParseError(url::ParseError),
PostError(PostError),
Response(ResponseError),
Reqwest(reqwest::Error),
ParseInt{
response:String,
err:std::num::ParseIntError,
},
VersionHeaderMissing,
ToStr(reqwest::header::ToStrError),
}
impl std::fmt::Display for CreateError{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
@@ -46,7 +57,14 @@ pub enum UploadError{
ParseError(url::ParseError),
PostError(PostError),
Reqwest(reqwest::Error),
Response(ResponseError),
AssetIdIsZero,
ParseInt{
response:String,
err:std::num::ParseIntError,
},
VersionHeaderMissing,
ToStr(reqwest::header::ToStrError),
}
impl std::fmt::Display for UploadError{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
@@ -58,9 +76,13 @@ impl std::error::Error for UploadError{}
#[allow(nonstandard_style,dead_code)]
pub struct UploadResponse{
pub AssetId:u64,
pub AssetVersionId:u64,
pub AssetVersion:u64,
}
#[allow(nonstandard_style,dead_code)]
pub struct GetAssetDetailsRequest{
pub asset_id:u64,
}
#[allow(nonstandard_style,dead_code)]
pub struct GetAssetRequest{
pub asset_id:u64,
@@ -69,8 +91,8 @@ pub struct GetAssetRequest{
#[derive(Debug)]
pub enum GetError{
ParseError(url::ParseError),
Response(ResponseError),
Reqwest(reqwest::Error),
IO(std::io::Error)
}
impl std::fmt::Display for GetError{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
@@ -79,6 +101,109 @@ impl std::fmt::Display for GetError{
}
impl std::error::Error for GetError{}
#[derive(Debug)]
pub enum GetAssetV2Error{
ParseError(url::ParseError),
Response(ResponseError),
VersionHeaderMissing,
ToStr(reqwest::header::ToStrError),
ParseInt(std::num::ParseIntError),
Reqwest(reqwest::Error),
}
impl std::fmt::Display for GetAssetV2Error{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f,"{self:?}")
}
}
impl std::error::Error for GetAssetV2Error{}
#[derive(serde::Deserialize)]
#[allow(nonstandard_style,dead_code)]
pub struct GetAssetV2AssetMetadata{
pub metadataType:u32,
pub value:String,
}
#[derive(serde::Deserialize)]
#[allow(nonstandard_style,dead_code)]
pub struct GetAssetV2Location{
pub assetFormat:String,// "source"
location:String,// this value is private so users cannot mutate it
#[serde(default)]
pub assetMetadatas:Vec<GetAssetV2AssetMetadata>,
}
impl GetAssetV2Location{
pub fn location(&self)->&str{
&self.location
}
}
#[derive(serde::Deserialize)]
#[allow(nonstandard_style,dead_code)]
pub struct GetAssetV2Info{
pub locations:Vec<GetAssetV2Location>,
pub requestId:String,
pub isArchived:bool,
pub assetTypeId:u32,
pub isRecordable:Option<bool>,
pub IsHashDynamic:Option<bool>,
pub IsCopyrightProtected:Option<bool>,
}
pub struct GetAssetV2{
pub version:u64,
pub info:GetAssetV2Info,
}
#[derive(Clone,Copy,Debug,Eq,PartialEq,Hash)]
#[derive(serde::Deserialize,serde::Serialize)]
pub enum CreatorType{
User,
Group,
}
#[derive(Debug)]
#[derive(serde::Deserialize)]
#[allow(nonstandard_style,dead_code)]
pub struct Creator{
pub Id:u64,
pub Name:String,
pub CreatorType:CreatorType,
pub CreatorTargetId:u64,
pub HasVerifiedBadge:bool,
}
#[derive(Debug)]
#[derive(serde::Deserialize)]
#[allow(nonstandard_style,dead_code)]
pub struct AssetDetails{
pub TargetId:u64,
pub ProductType:Option<String>,
pub AssetId:u64,
pub ProductId:u64,
pub Name:String,
pub Description:String,
pub AssetTypeId:u32,
pub Creator:Creator,
pub IconImageAssetId:u64,
pub Created:chrono::DateTime<chrono::Utc>,
pub Updated:chrono::DateTime<chrono::Utc>,
pub PriceInRobux:Option<u32>,
pub PriceInTickets:Option<u32>,
pub Sales:u32,
pub IsNew:bool,
pub IsForSale:bool,
pub IsPublicDomain:bool,
pub IsLimited:bool,
pub IsLimitedUnique:bool,
pub Remaining:Option<u32>,
pub MinimumMembershipLevel:u32,
pub ContentRatingTypeId:u32,
pub SaleAvailabilityLocations:Option<String>,
pub SaleLocation:Option<String>,
pub CollectibleItemId:Option<u64>,
pub CollectibleProductId:Option<u64>,
pub CollectiblesItemDetails:Option<String>,
}
pub struct AssetVersionsPageRequest{
pub asset_id:u64,
pub cursor:Option<String>,
@@ -89,7 +214,7 @@ pub struct AssetVersion{
pub Id:u64,
pub assetId:u64,
pub assetVersionNumber:u64,
pub creatorType:String,
pub creatorType:CreatorType,
pub creatorTargetId:u64,
pub creatingUniverseId:Option<u64>,
pub created:chrono::DateTime<chrono::Utc>,
@@ -105,6 +230,7 @@ pub struct AssetVersionsPageResponse{
#[derive(Debug)]
pub enum PageError{
ParseError(url::ParseError),
Response(ResponseError),
Reqwest(reqwest::Error),
}
impl std::fmt::Display for PageError{
@@ -158,21 +284,21 @@ pub struct UserInventoryPageRequest{
#[derive(serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
pub struct UserInventoryItemOwner{
userId:u64,
username:String,
buildersClubMembershipType:u64,
pub userId:u64,
pub username:String,
pub buildersClubMembershipType:String,
}
#[derive(serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
pub struct UserInventoryItem{
userAssetId:u64,
assetId:u64,
assetName:String,
collectibleItemId:Option<String>,
collectibleItemInstanceId:Option<String>,
owner:UserInventoryItemOwner,
created:chrono::DateTime<chrono::Utc>,
updated:chrono::DateTime<chrono::Utc>,
pub userAssetId:u64,
pub assetId:u64,
pub assetName:String,
pub collectibleItemId:Option<String>,
pub collectibleItemInstanceId:Option<String>,
pub owner:UserInventoryItemOwner,
pub created:chrono::DateTime<chrono::Utc>,
pub updated:chrono::DateTime<chrono::Utc>,
}
#[derive(serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
@@ -182,42 +308,76 @@ pub struct UserInventoryPageResponse{
pub data:Vec<UserInventoryItem>,
}
//idk how to do this better
enum ReaderType<R:std::io::Read>{
GZip(flate2::read::GzDecoder<std::io::BufReader<R>>),
Raw(std::io::BufReader<R>),
#[derive(Debug)]
pub enum SetAssetsPermissionsError{
Parse(url::ParseError),
JSONEncode(serde_json::Error),
Patch(PostError),
Response(ResponseError),
Reqwest(reqwest::Error),
}
fn maybe_gzip_decode<R:std::io::Read>(input:R)->std::io::Result<ReaderType<R>>{
let mut buf=std::io::BufReader::new(input);
let peek=std::io::BufRead::fill_buf(&mut buf)?;
match &peek[0..2]{
b"\x1f\x8b"=>Ok(ReaderType::GZip(flate2::read::GzDecoder::new(buf))),
_=>Ok(ReaderType::Raw(buf)),
impl std::fmt::Display for SetAssetsPermissionsError{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f,"{self:?}")
}
}
fn read_readable(mut readable:impl std::io::Read)->std::io::Result<Vec<u8>>{
let mut contents=Vec::new();
readable.read_to_end(&mut contents)?;
Ok(contents)
impl std::error::Error for SetAssetsPermissionsError{}
#[derive(serde::Serialize)]
#[allow(nonstandard_style)]
struct AssetPermissions{
assetId:u64,
grantToDependencies:bool,//true
}
#[derive(serde::Serialize)]
#[allow(nonstandard_style)]
struct SetAssetsPermissions<'a>{
subjectType:&'a str,// "Universe"
subjectId:&'a str,// "4422715291"
action:&'a str,// "Use",
enableDeepAccessCheck:bool,//true,
requests:&'a [AssetPermissions],
}
pub struct SetAssetsPermissionsRequest<'a>{
pub universe_id:u64,
pub asset_ids:&'a [u64],
}
impl SetAssetsPermissionsRequest<'_>{
fn serialize(&self)->Result<String,serde_json::Error>{
let ref requests:Vec<_>=self.asset_ids.iter().map(|&asset_id|AssetPermissions{
assetId:asset_id,
grantToDependencies:true,
}).collect();
let ref subject_id=self.universe_id.to_string();
let ref permissions=SetAssetsPermissions{
subjectType:"Universe",
subjectId:subject_id,
action:"Use",
enableDeepAccessCheck:true,
requests,
};
serde_json::to_string(permissions)
}
}
#[derive(Clone)]
pub struct Cookie(String);
impl Cookie{
/// cookie is prepended with ".ROBLOSECURITY=" by this function
pub fn new(cookie:String)->Self{
Self(cookie)
Self(format!(".ROBLOSECURITY={cookie}"))
}
pub fn get(self)->String{
self.0
}
}
#[derive(Clone)]
pub struct CookieContext{
pub cookie:String,
pub client:reqwest::Client,
pub struct Context{
cookie:String,
client:reqwest::Client,
}
impl CookieContext{
impl Context{
pub fn new(cookie:Cookie)->Self{
Self{
cookie:cookie.get(),
@@ -250,6 +410,29 @@ impl CookieContext{
Ok(resp)
}
async fn patch(&self,url:url::Url,body:impl ContentType+Clone)->Result<reqwest::Response,PostError>{
let mut resp=self.client.patch(url.clone())
.header("Cookie",self.cookie.as_str())
.header("Content-Type",body.content_type())
.body(body.clone())
.send().await.map_err(PostError::Reqwest)?;
//This is called a CSRF challenge apparently
if resp.status()==reqwest::StatusCode::FORBIDDEN{
if let Some(csrf_token)=resp.headers().get("X-CSRF-Token"){
resp=self.client.patch(url)
.header("X-CSRF-Token",csrf_token)
.header("Cookie",self.cookie.as_str())
.header("Content-Type",body.content_type())
.body(body)
.send().await.map_err(PostError::Reqwest)?;
}else{
Err(PostError::CSRF)?;
}
}
Ok(resp)
}
pub async fn create(&self,config:CreateRequest,body:impl Into<reqwest::Body>+Clone)->Result<UploadResponse,CreateError>{
let mut url=reqwest::Url::parse("https://data.roblox.com/Data/Upload.ashx?json=1&type=Model&genreTypeId=1").map_err(CreateError::ParseError)?;
//url borrow scope
@@ -265,10 +448,31 @@ impl CookieContext{
query.append_pair("groupId",group_id.to_string().as_str());
}
}
let response=response_ok(
self.post(url,body).await.map_err(CreateError::PostError)?
).await.map_err(CreateError::Response)?;
self.post(url,body).await.map_err(CreateError::PostError)?
.error_for_status().map_err(CreateError::Reqwest)?
.json::<UploadResponse>().await.map_err(CreateError::Reqwest)
let version_str=response
.headers()
.get("roblox-assetversionnumber")
.ok_or(CreateError::VersionHeaderMissing)?
.to_str()
.map_err(CreateError::ToStr)?;
let version=version_str.parse()
.map_err(|err|CreateError::ParseInt{err,response:version_str.to_owned()})?;
let response=response.text().await.map_err(CreateError::Reqwest)?;
match response.parse(){
Ok(asset_id)=>Ok(UploadResponse{
AssetId:asset_id,
AssetVersion:version,
}),
Err(err)=>Err(CreateError::ParseInt{
response,
err,
})
}
}
pub async fn upload(&self,config:UploadRequest,body:impl Into<reqwest::Body>+Clone)->Result<UploadResponse,UploadError>{
let mut url=reqwest::Url::parse("https://data.roblox.com/Data/Upload.ashx?json=1&type=Model&genreTypeId=1").map_err(UploadError::ParseError)?;
@@ -296,12 +500,33 @@ impl CookieContext{
query.append_pair("groupId",group_id.to_string().as_str());
}
}
let response=response_ok(
self.post(url,body).await.map_err(UploadError::PostError)?
).await.map_err(UploadError::Response)?;
self.post(url,body).await.map_err(UploadError::PostError)?
.error_for_status().map_err(UploadError::Reqwest)?
.json::<UploadResponse>().await.map_err(UploadError::Reqwest)
let version_str=response
.headers()
.get("roblox-assetversionnumber")
.ok_or(UploadError::VersionHeaderMissing)?
.to_str()
.map_err(UploadError::ToStr)?;
let version=version_str.parse()
.map_err(|err|UploadError::ParseInt{err,response:version_str.to_owned()})?;
let response=response.text().await.map_err(UploadError::Reqwest)?;
match response.parse(){
Ok(asset_id)=>Ok(UploadResponse{
AssetId:asset_id,
AssetVersion:version,
}),
Err(err)=>Err(UploadError::ParseInt{
response,
err,
})
}
}
pub async fn get_asset(&self,config:GetAssetRequest)->Result<Vec<u8>,GetError>{
pub async fn get_asset(&self,config:GetAssetRequest)->Result<MaybeGzippedBytes,GetError>{
let mut url=reqwest::Url::parse("https://assetdelivery.roblox.com/v1/asset/").map_err(GetError::ParseError)?;
//url borrow scope
{
@@ -311,17 +536,62 @@ impl CookieContext{
query.append_pair("version",version.to_string().as_str());
}
}
let body=self.get(url).await.map_err(GetError::Reqwest)?
.error_for_status().map_err(GetError::Reqwest)?
let bytes=response_ok(
self.get(url).await.map_err(GetError::Reqwest)?
).await.map_err(GetError::Response)?
.bytes().await.map_err(GetError::Reqwest)?;
match maybe_gzip_decode(&mut std::io::Cursor::new(body)){
Ok(ReaderType::GZip(readable))=>read_readable(readable),
Ok(ReaderType::Raw(readable))=>read_readable(readable),
Err(e)=>Err(e),
}.map_err(GetError::IO)
Ok(MaybeGzippedBytes::new(bytes))
}
pub async fn get_asset_versions_page(&self,config:AssetVersionsPageRequest)->Result<AssetVersionsPageResponse,PageError>{
pub async fn get_asset_v2(&self,config:GetAssetRequest)->Result<GetAssetV2,GetAssetV2Error>{
let mut url=reqwest::Url::parse("https://assetdelivery.roblox.com/v2/asset").map_err(GetAssetV2Error::ParseError)?;
//url borrow scope
{
let mut query=url.query_pairs_mut();//borrow here
query.append_pair("ID",config.asset_id.to_string().as_str());
if let Some(version)=config.version{
query.append_pair("version",version.to_string().as_str());
}
}
let response=response_ok(
self.get(url).await.map_err(GetAssetV2Error::Reqwest)?
).await.map_err(GetAssetV2Error::Response)?;
let version=response
.headers()
.get("roblox-assetversionnumber")
.ok_or(GetAssetV2Error::VersionHeaderMissing)?
.to_str()
.map_err(GetAssetV2Error::ToStr)?
.parse()
.map_err(GetAssetV2Error::ParseInt)?;
let info=response.json().await.map_err(GetAssetV2Error::Reqwest)?;
Ok(GetAssetV2{
version,
info,
})
}
pub async fn get_asset_v2_download(&self,config:&GetAssetV2Location)->Result<MaybeGzippedBytes,GetError>{
let url=reqwest::Url::parse(config.location.as_str()).map_err(GetError::ParseError)?;
let bytes=response_ok(
self.get(url).await.map_err(GetError::Reqwest)?
).await.map_err(GetError::Response)?
.bytes().await.map_err(GetError::Reqwest)?;
Ok(MaybeGzippedBytes::new(bytes))
}
pub async fn get_asset_details(&self,config:GetAssetDetailsRequest)->Result<AssetDetails,GetError>{
let url=reqwest::Url::parse(format!("https://economy.roblox.com/v2/assets/{}/details",config.asset_id).as_str()).map_err(GetError::ParseError)?;
response_ok(
self.get(url).await.map_err(GetError::Reqwest)?
).await.map_err(GetError::Response)?
.json().await.map_err(GetError::Reqwest)
}
pub async fn get_asset_versions_page(&self,config:&AssetVersionsPageRequest)->Result<AssetVersionsPageResponse,PageError>{
let mut url=reqwest::Url::parse(format!("https://develop.roblox.com/v1/assets/{}/saved-versions",config.asset_id).as_str()).map_err(PageError::ParseError)?;
//url borrow scope
{
@@ -333,9 +603,9 @@ impl CookieContext{
query.append_pair("cursor",cursor);
}
}
self.get(url).await.map_err(PageError::Reqwest)?
.error_for_status().map_err(PageError::Reqwest)?
response_ok(
self.get(url).await.map_err(PageError::Reqwest)?
).await.map_err(PageError::Response)?
.json::<AssetVersionsPageResponse>().await.map_err(PageError::Reqwest)
}
pub async fn get_creations_page(&self,config:&CreationsPageRequest)->Result<CreationsPageResponse,PageError>{
@@ -348,9 +618,9 @@ impl CookieContext{
query.append_pair("cursor",cursor);
}
}
self.get(url).await.map_err(PageError::Reqwest)?
.error_for_status().map_err(PageError::Reqwest)?
response_ok(
self.get(url).await.map_err(PageError::Reqwest)?
).await.map_err(PageError::Response)?
.json::<CreationsPageResponse>().await.map_err(PageError::Reqwest)
}
pub async fn get_user_inventory_page(&self,config:&UserInventoryPageRequest)->Result<UserInventoryPageResponse,PageError>{
@@ -362,9 +632,21 @@ impl CookieContext{
query.append_pair("cursor",cursor);
}
}
self.get(url).await.map_err(PageError::Reqwest)?
.error_for_status().map_err(PageError::Reqwest)?
response_ok(
self.get(url).await.map_err(PageError::Reqwest)?
).await.map_err(PageError::Response)?
.json::<UserInventoryPageResponse>().await.map_err(PageError::Reqwest)
}
/// Used to enable an asset to be loaded onto a group game.
pub async fn set_assets_permissions(&self,config:SetAssetsPermissionsRequest<'_>)->Result<(),SetAssetsPermissionsError>{
let url=reqwest::Url::parse("https://apis.roblox.com/asset-permissions-api/v1/assets/permissions").map_err(SetAssetsPermissionsError::Parse)?;
let body=config.serialize().map_err(SetAssetsPermissionsError::JSONEncode)?;
response_ok(
self.patch(url,Json(body)).await.map_err(SetAssetsPermissionsError::Patch)?
).await.map_err(SetAssetsPermissionsError::Response)?;
Ok(())
}
}

View File

@@ -1,2 +1,5 @@
pub mod cloud;
pub mod cookie;
pub mod types;
mod body;
mod util;

67
rbx_asset/src/types.rs Normal file
View File

@@ -0,0 +1,67 @@
#[derive(Debug)]
pub struct UrlAndBody{
pub url:url::Url,
pub body:String,
}
#[derive(Debug)]
pub enum ResponseError{
Reqwest(reqwest::Error),
Details{
status_code:reqwest::StatusCode,
url_and_body:Box<UrlAndBody>,
},
}
impl std::fmt::Display for ResponseError{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f,"{self:?}")
}
}
impl std::error::Error for ResponseError{}
#[cfg(feature="gzip")]
use flate2::read::GzDecoder;
/// Some bytes that might be gzipped. Use the read_with or to_vec methods to transparently decode gzip.
pub struct MaybeGzippedBytes{
bytes:bytes::Bytes,
}
impl MaybeGzippedBytes{
pub(crate) fn new(bytes:bytes::Bytes)->Self{
Self{bytes}
}
pub fn into_inner(self)->bytes::Bytes{
self.bytes
}
/// get a reference to the bytes, ignoring gzip decoding
pub fn as_raw_ref(&self)->&[u8]{
self.bytes.as_ref()
}
/// Transparently decode gzip data, if present (intermediate allocation)
#[cfg(feature="gzip")]
pub fn to_vec(&self)->std::io::Result<Vec<u8>>{
use std::io::Read;
match self.bytes.get(0..2){
Some(b"\x1f\x8b")=>{
let mut buf=Vec::new();
GzDecoder::new(self.bytes.as_ref()).read_to_end(&mut buf)?;
Ok(buf)
},
_=>Ok(self.bytes.to_vec())
}
}
/// Read the bytes with the provided decoders.
/// The idea is to make a function that is generic over std::io::Read
/// and pass the same function to both closures.
/// This two closure hack must be done because of the different concrete types.
#[cfg(feature="gzip")]
pub fn read_with<'a,ReadGzip,ReadRaw,T>(&'a self,read_gzip:ReadGzip,read_raw:ReadRaw)->T
where
ReadGzip:Fn(GzDecoder<&'a [u8]>)->T,
ReadRaw:Fn(&'a [u8])->T,
{
match self.bytes.get(0..2){
Some(b"\x1f\x8b")=>read_gzip(GzDecoder::new(self.bytes.as_ref())),
_=>read_raw(self.bytes.as_ref())
}
}
}

39
rbx_asset/src/util.rs Normal file
View File

@@ -0,0 +1,39 @@
use crate::types::{ResponseError,UrlAndBody};
// lazy function to draw out meaningful info from http response on failure
pub(crate) async fn response_ok(response:reqwest::Response)->Result<reqwest::Response,ResponseError>{
let status_code=response.status();
if status_code.is_success(){
Ok(response)
}else{
let url=response.url().to_owned();
let bytes=response.bytes().await.map_err(ResponseError::Reqwest)?;
let body=String::from_utf8_lossy(&bytes).to_string();
Err(ResponseError::Details{
status_code,
url_and_body:Box::new(UrlAndBody{url,body})
})
}
}
use serde::de::{Error,Unexpected};
use serde::{Deserializer,Serializer};
struct U64StringVisitor;
impl serde::de::Visitor<'_> for U64StringVisitor{
type Value=u64;
fn expecting(&self,formatter:&mut std::fmt::Formatter)->std::fmt::Result{
write!(formatter,"string value with int")
}
fn visit_str<E:Error>(self,v:&str)->Result<Self::Value,E>{
v.parse().map_err(|_|E::invalid_value(Unexpected::Str(v),&"u64"))
}
}
pub(crate) fn deserialize_u64<'de,D:Deserializer<'de>>(deserializer:D)->Result<u64,D::Error>{
deserializer.deserialize_any(U64StringVisitor)
}
pub(crate) fn serialize_u64<S:Serializer>(v:&u64,serializer:S)->Result<S::Ok,S::Error>{
serializer.serialize_str(v.to_string().as_str())
}

View File

@@ -12,6 +12,6 @@ authors = ["Rhys Lloyd <krakow20@gmail.com>"]
futures = "0.3.30"
lazy-regex = "3.1.0"
rayon = "1.8.0"
rbx_dom_weak = "2.7.0"
rbx_xml = "0.13.3"
rbx_dom_weak = "3.0.0"
rbx_xml = "1.0.0"
tokio = { version = "1.35.1", features = ["fs"] }

View File

@@ -16,7 +16,6 @@ use crate::common::{sanitize,Style,PropertiesOverride};
//I could use a function!
//eventually:
#[derive(Debug)]
#[allow(dead_code)]//idk why this thinks it's dead code, the errors are printed out in various places
pub enum QueryResolveError{
NotFound,//0 results
Ambiguous,//>1 results

View File

@@ -1,4 +1,5 @@
use std::path::PathBuf;
use rbx_dom_weak::ustr;
use rbx_dom_weak::types::Ref;
use crate::common::{sanitize,Style,PropertiesOverride};
@@ -90,7 +91,7 @@ fn write_item(dom:&rbx_dom_weak::WeakDom,mut file:PathBuf,node:&TreeNode,node_na
if let Some(item)=dom.get_by_ref(node.referent){
//TODO: delete disabled scripts
if let Some(rbx_dom_weak::types::Variant::String(source))=item.properties.get("Source"){
if let Some(rbx_dom_weak::types::Variant::String(source))=item.properties.get(&ustr("Source")){
if properties.is_some(){
//rox style
let source=properties.to_string()+source.as_str();

View File

@@ -1,14 +1,16 @@
use std::{io::Read,path::PathBuf};
use std::io::Read;
use std::path::{Path,PathBuf};
use clap::{Args,Parser,Subcommand};
use anyhow::{anyhow,Result as AResult};
use futures::StreamExt;
use rbx_asset::cloud::{ApiKey,CloudContext};
use rbx_asset::cookie::{Cookie,CookieContext,AssetVersion,CreationsItem};
use futures::{StreamExt,TryStreamExt};
use rbx_asset::cloud::{ApiKey,Context as CloudContext};
use rbx_asset::cookie::{Cookie,Context as CookieContext,AssetVersion,CreationsItem};
type AssetID=u64;
type AssetIDFileMap=Vec<(AssetID,PathBuf)>;
const CONCURRENT_DECODE:usize=8;
const CONCURRENT_REQUESTS:usize=32;
const CONCURRENT_FS:usize=64;
#[derive(Parser)]
#[command(author,version,about,long_about=None)]
@@ -20,10 +22,16 @@ struct Cli{
#[derive(Subcommand)]
enum Commands{
AssetDetails(AssetDetailsSubcommand),
DownloadHistory(DownloadHistorySubcommand),
Download(DownloadSubcommand),
DownloadVersion(DownloadVersionSubcommand),
DownloadLocation(DownloadLocationSubcommand),
DownloadVersionLocation(DownloadVersionLocationSubcommand),
DownloadVersionV2(DownloadVersionSubcommand),
DownloadDecompile(DownloadDecompileSubcommand),
DownloadCreationsJson(DownloadCreationsJsonSubcommand),
DownloadCreationsHistory(DownloadCreationsHistorySubcommand),
DownloadUserInventoryJson(DownloadUserInventoryJsonSubcommand),
CreateAsset(CreateAssetSubcommand),
CreateAssetMedia(CreateAssetMediaSubcommand),
@@ -37,6 +45,7 @@ enum Commands{
Decompile(DecompileSubcommand),
DecompileHistoryIntoGit(DecompileHistoryIntoGitSubcommand),
DownloadAndDecompileHistoryIntoGit(DownloadAndDecompileHistoryIntoGitSubcommand),
RunLuau(RunLuauSubcommand),
}
/// Download a range of assets from the asset version history. Download summary is saved to `output_folder/versions.json`, and can be optionally used to download only new versions the next time.
@@ -59,7 +68,19 @@ struct DownloadHistorySubcommand{
#[arg(long)]
end_version:Option<u64>,
}
/// Download a single asset by id.
/// Print the details for an asset
#[derive(Args)]
struct AssetDetailsSubcommand{
#[arg(long,group="cookie",required=true)]
cookie_literal:Option<String>,
#[arg(long,group="cookie",required=true)]
cookie_envvar:Option<String>,
#[arg(long,group="cookie",required=true)]
cookie_file:Option<PathBuf>,
#[arg(required=true)]
asset_id:AssetID,
}
/// Download a list of assets by id.
#[derive(Args)]
struct DownloadSubcommand{
#[arg(long,group="cookie",required=true)]
@@ -73,6 +94,48 @@ struct DownloadSubcommand{
#[arg(required=true)]
asset_ids:Vec<AssetID>,
}
/// Download a single asset by id, optionally specifying the version to download.
#[derive(Args)]
struct DownloadVersionSubcommand{
#[arg(long,group="cookie",required=true)]
cookie_literal:Option<String>,
#[arg(long,group="cookie",required=true)]
cookie_envvar:Option<String>,
#[arg(long,group="cookie",required=true)]
cookie_file:Option<PathBuf>,
#[arg(long)]
output_folder:Option<PathBuf>,
#[arg(long)]
asset_id:AssetID,
#[arg(long)]
asset_version:Option<u64>,
}
/// Get download urls for a list of assets by id.
#[derive(Args)]
struct DownloadLocationSubcommand{
#[arg(long,group="api_key",required=true)]
api_key_literal:Option<String>,
#[arg(long,group="api_key",required=true)]
api_key_envvar:Option<String>,
#[arg(long,group="api_key",required=true)]
api_key_file:Option<PathBuf>,
#[arg(required=true)]
asset_ids:Vec<AssetID>,
}
/// Get a download url for a single asset by id, optionally specifying the version to download.
#[derive(Args)]
struct DownloadVersionLocationSubcommand{
#[arg(long,group="api_key",required=true)]
api_key_literal:Option<String>,
#[arg(long,group="api_key",required=true)]
api_key_envvar:Option<String>,
#[arg(long,group="api_key",required=true)]
api_key_file:Option<PathBuf>,
#[arg(long)]
asset_id:AssetID,
#[arg(long)]
asset_version:Option<u64>,
}
/// Download the list of asset ids (not the assets themselves) created by a group or user. The output is written to `output_folder/versions.json`
#[derive(Args)]
struct DownloadCreationsJsonSubcommand{
@@ -88,6 +151,8 @@ struct DownloadCreationsJsonSubcommand{
group_id:Option<u64>,
#[arg(long,group="owner",required=true)]
user_id:Option<u64>,
#[arg(long)]
continue_from_cursor:Option<bool>,
}
/// Download the list of asset ids (not the assets themselves) in a user's inventory. The output is written to `output_folder/versions.json`
#[derive(Args)]
@@ -102,6 +167,8 @@ struct DownloadUserInventoryJsonSubcommand{
output_folder:Option<PathBuf>,
#[arg(long)]
user_id:u64,
#[arg(long)]
continue_from_cursor:Option<bool>,
}
/// Upload a (.rbxm, .rbxmx) model file, creating a new asset. Can be any type of model, including modulescripts.
#[derive(Args)]
@@ -137,7 +204,7 @@ struct CreateAssetMediaSubcommand{
#[arg(long)]
model_name:String,
#[arg(long)]
description:Option<String>,
description:String,
#[arg(long)]
input_file:PathBuf,
#[arg(long)]
@@ -366,6 +433,24 @@ struct DownloadAndDecompileHistoryIntoGitSubcommand{
#[arg(long)]
write_scripts:Option<bool>,
}
/// Run a Luau script.
#[derive(Args)]
struct RunLuauSubcommand{
#[arg(long,group="api_key",required=true)]
api_key_literal:Option<String>,
#[arg(long,group="api_key",required=true)]
api_key_envvar:Option<String>,
#[arg(long,group="api_key",required=true)]
api_key_file:Option<PathBuf>,
#[arg(long,group="script",required=true)]
script_literal:Option<String>,
#[arg(long,group="script",required=true)]
script_file:Option<PathBuf>,
#[arg(long)]
universe_id:u64,
#[arg(long)]
place_id:u64,
}
#[derive(Clone,Copy,Debug,clap::ValueEnum)]
enum Style{
@@ -402,6 +487,16 @@ impl AssetType{
async fn main()->AResult<()>{
let cli=Cli::parse();
match cli.command{
Commands::AssetDetails(subcommand)=>{
asset_details(
cookie_from_args(
subcommand.cookie_literal,
subcommand.cookie_envvar,
subcommand.cookie_file,
).await?,
subcommand.asset_id
).await
},
Commands::DownloadHistory(subcommand)=>download_history(DownloadHistoryConfig{
continue_from_versions:subcommand.continue_from_versions.unwrap_or(false),
end_version:subcommand.end_version,
@@ -429,6 +524,61 @@ async fn main()->AResult<()>{
}).collect()
).await
},
Commands::DownloadVersion(subcommand)=>{
let output_folder=subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap());
download_version(
cookie_from_args(
subcommand.cookie_literal,
subcommand.cookie_envvar,
subcommand.cookie_file,
).await?,
subcommand.asset_id,
subcommand.asset_version,
{
let mut path=output_folder.clone();
path.push(subcommand.asset_id.to_string());
path
},
).await
},
Commands::DownloadLocation(subcommand)=>{
download_list_locations(
api_key_from_args(
subcommand.api_key_literal,
subcommand.api_key_envvar,
subcommand.api_key_file,
).await?,
&subcommand.asset_ids
).await
},
Commands::DownloadVersionLocation(subcommand)=>{
download_location(
api_key_from_args(
subcommand.api_key_literal,
subcommand.api_key_envvar,
subcommand.api_key_file,
).await?,
subcommand.asset_id,
subcommand.asset_version,
).await
},
Commands::DownloadVersionV2(subcommand)=>{
let output_folder=subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap());
download_version_v2(
cookie_from_args(
subcommand.cookie_literal,
subcommand.cookie_envvar,
subcommand.cookie_file,
).await?,
subcommand.asset_id,
subcommand.asset_version,
{
let mut path=output_folder.clone();
path.push(subcommand.asset_id.to_string());
path
},
).await
},
Commands::DownloadDecompile(subcommand)=>{
download_decompile(DownloadDecompileConfig{
cookie:cookie_from_args(
@@ -455,7 +605,9 @@ async fn main()->AResult<()>{
subcommand.group_id,
)?,
subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
subcommand.continue_from_cursor.unwrap_or(false),
).await,
Commands::DownloadCreationsHistory(subcommand)=>subcommand.run().await,
Commands::DownloadUserInventoryJson(subcommand)=>download_user_inventory_json(
cookie_from_args(
subcommand.cookie_literal,
@@ -464,6 +616,7 @@ async fn main()->AResult<()>{
).await?,
subcommand.user_id,
subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
subcommand.continue_from_cursor.unwrap_or(false),
).await,
Commands::CreateAsset(subcommand)=>create_asset(CreateAssetConfig{
cookie:cookie_from_args(
@@ -485,14 +638,14 @@ async fn main()->AResult<()>{
subcommand.api_key_file,
).await?,
creator:match (subcommand.creator_user_id,subcommand.creator_group_id){
(Some(user_id),None)=>rbx_asset::cloud::Creator::userId(user_id.to_string()),
(None,Some(group_id))=>rbx_asset::cloud::Creator::groupId(group_id.to_string()),
(Some(user_id),None)=>rbx_asset::cloud::Creator::userId(user_id),
(None,Some(group_id))=>rbx_asset::cloud::Creator::groupId(group_id),
other=>Err(anyhow!("Invalid creator {other:?}"))?,
},
input_file:subcommand.input_file,
asset_type:subcommand.asset_type.cloud(),
model_name:subcommand.model_name,
description:subcommand.description.unwrap_or_else(||String::with_capacity(0)),
description:subcommand.description,
expected_price:subcommand.expected_price,
}).await,
Commands::CreateAssetMedias(subcommand)=>create_asset_medias(CreateAssetMediasConfig{
@@ -507,8 +660,8 @@ async fn main()->AResult<()>{
subcommand.cookie_file,
).await?,
creator:match (subcommand.creator_user_id,subcommand.creator_group_id){
(Some(user_id),None)=>rbx_asset::cloud::Creator::userId(user_id.to_string()),
(None,Some(group_id))=>rbx_asset::cloud::Creator::groupId(group_id.to_string()),
(Some(user_id),None)=>rbx_asset::cloud::Creator::userId(user_id),
(None,Some(group_id))=>rbx_asset::cloud::Creator::groupId(group_id),
other=>Err(anyhow!("Invalid creator {other:?}"))?,
},
description:subcommand.description.unwrap_or_else(||String::with_capacity(0)),
@@ -611,6 +764,21 @@ async fn main()->AResult<()>{
write_models:subcommand.write_models.unwrap_or(false),
write_scripts:subcommand.write_scripts.unwrap_or(true),
}).await,
Commands::RunLuau(subcommand)=>run_luau(RunLuauConfig{
api_key:api_key_from_args(
subcommand.api_key_literal,
subcommand.api_key_envvar,
subcommand.api_key_file,
).await?,
script:match subcommand.script_literal{
Some(script)=>script,
None=>std::fs::read_to_string(subcommand.script_file.unwrap())?,
},
request:rbx_asset::cloud::LuauSessionLatestRequest{
place_id:subcommand.place_id,
universe_id:subcommand.universe_id,
},
}).await,
}
}
@@ -621,7 +789,7 @@ async fn cookie_from_args(literal:Option<String>,environment:Option<String>,file
(None,None,Some(cookie_file))=>tokio::fs::read_to_string(cookie_file).await?,
_=>Err(anyhow::Error::msg("Illegal cookie argument triple"))?,
};
Ok(Cookie::new(format!(".ROBLOSECURITY={cookie}")))
Ok(Cookie::new(cookie))
}
async fn api_key_from_args(literal:Option<String>,environment:Option<String>,file:Option<PathBuf>)->AResult<ApiKey>{
let api_key=match (literal,environment,file){
@@ -678,10 +846,10 @@ async fn get_asset_exp_backoff(
context:&CloudContext,
asset_operation:&rbx_asset::cloud::AssetOperation
)->Result<rbx_asset::cloud::AssetResponse,rbx_asset::cloud::AssetOperationError>{
const BACKOFF_MUL:f32=1.3956124250860895286;//exp(1/3)
const BACKOFF_MUL:f32=1.395_612_5;//exp(1/3)
let mut backoff=1000f32;
loop{
match asset_operation.try_get_asset(&context).await{
match asset_operation.try_get_asset(context).await{
//try again when the operation is not done
Err(rbx_asset::cloud::AssetOperationError::Operation(rbx_asset::cloud::OperationError::NotDone))=>(),
//return all other results
@@ -724,7 +892,7 @@ struct CreateAssetMediasConfig{
}
#[derive(Debug)]
#[allow(dead_code)]
#[expect(dead_code)]
enum CreateAssetMediasError{
NoFileStem(PathBuf),
IO(std::io::Error),
@@ -739,7 +907,7 @@ impl std::fmt::Display for CreateAssetMediasError{
impl std::error::Error for CreateAssetMediasError{}
#[derive(Debug)]
#[allow(dead_code)]
#[expect(dead_code)]
enum PollOperationError{
CreateAssetMedias(CreateAssetMediasError),
AssetOperation(rbx_asset::cloud::AssetOperationError),
@@ -752,7 +920,7 @@ impl std::fmt::Display for PollOperationError{
impl std::error::Error for PollOperationError{}
#[derive(Debug)]
#[allow(dead_code)]
#[expect(dead_code)]
enum DownloadDecalError{
PollOperation(PollOperationError),
ParseInt(std::num::ParseIntError),
@@ -761,6 +929,7 @@ enum DownloadDecalError{
NoFirstInstance,
NoTextureProperty,
TexturePropertyInvalid,
TextureContentNotUri,
}
impl std::fmt::Display for DownloadDecalError{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
@@ -824,20 +993,25 @@ async fn create_asset_medias(config:CreateAssetMediasConfig)->AResult<()>{
let cookie_context=&cookie_context;
async move{(path,
async move{
use rbx_dom_weak::ustr;
let asset_response=asset_response_result.map_err(DownloadDecalError::PollOperation)?;
let file=cookie_context.get_asset(rbx_asset::cookie::GetAssetRequest{
asset_id:asset_response.assetId.parse().map_err(DownloadDecalError::ParseInt)?,
let maybe_gzip=cookie_context.get_asset(rbx_asset::cookie::GetAssetRequest{
asset_id:asset_response.assetId,
version:None,
}).await.map_err(DownloadDecalError::Get)?;
let dom=load_dom(std::io::Cursor::new(file)).map_err(DownloadDecalError::LoadDom)?;
let dom=maybe_gzip.read_with(load_dom,load_dom).map_err(DownloadDecalError::LoadDom)?;
let instance=dom.get_by_ref(
*dom.root().children().first().ok_or(DownloadDecalError::NoFirstInstance)?
).ok_or(DownloadDecalError::NoFirstInstance)?;
let texture=instance.properties.get("Texture").ok_or(DownloadDecalError::NoTextureProperty)?;
let asset_url=match texture{
rbx_dom_weak::types::Variant::Content(url)=>url.clone().into_string(),
let texture=instance.properties.get(&ustr("TextureContent")).ok_or(DownloadDecalError::NoTextureProperty)?;
let content=match texture{
rbx_dom_weak::types::Variant::Content(content)=>content,
_=>Err(DownloadDecalError::TexturePropertyInvalid)?,
};
let asset_url=match content.value(){
rbx_dom_weak::types::ContentType::Uri(uri)=>uri.clone(),
_=>Err(DownloadDecalError::TextureContentNotUri)?,
};
Ok::<_,DownloadDecalError>((asset_response.displayName,asset_url))
}
.await)}
@@ -906,6 +1080,34 @@ async fn upload_place(config:UploadPlaceConfig)->AResult<()>{
Ok(())
}
async fn asset_details(cookie:Cookie,asset_id:AssetID)->AResult<()>{
let context=CookieContext::new(cookie);
let details=context.get_asset_details(rbx_asset::cookie::GetAssetDetailsRequest{asset_id}).await?;
println!("details:{details:?}");
Ok(())
}
async fn download_version(cookie:Cookie,asset_id:AssetID,version:Option<u64>,dest:PathBuf)->AResult<()>{
let context=CookieContext::new(cookie);
let maybe_gzip=context.get_asset(rbx_asset::cookie::GetAssetRequest{asset_id,version}).await?;
tokio::fs::write(dest,maybe_gzip.to_vec()?).await?;
Ok(())
}
async fn download_version_v2(cookie:Cookie,asset_id:AssetID,version:Option<u64>,dest:PathBuf)->AResult<()>{
let context=CookieContext::new(cookie);
// v2 has two steps
let info=context.get_asset_v2(rbx_asset::cookie::GetAssetRequest{asset_id,version}).await?;
println!("version:{}",info.version);
let location=info.info.locations.first().ok_or(anyhow::Error::msg("No locations"))?;
let maybe_gzip=context.get_asset_v2_download(location).await?;
tokio::fs::write(dest,maybe_gzip.to_vec()?).await?;
Ok(())
}
async fn download_list(cookie:Cookie,asset_id_file_map:AssetIDFileMap)->AResult<()>{
let context=CookieContext::new(cookie);
futures::stream::iter(asset_id_file_map.into_iter()
@@ -917,84 +1119,371 @@ async fn download_list(cookie:Cookie,asset_id_file_map:AssetIDFileMap)->AResult<
}))
.buffer_unordered(CONCURRENT_REQUESTS)
.for_each(|b:AResult<_>|async{
match b{
Ok((dest,data))=>if let Err(e)=tokio::fs::write(dest,data).await{
eprintln!("fs error: {}",e);
},
Err(e)=>eprintln!("dl error: {}",e),
}
}).await;
match b{
Ok((dest,maybe_gzip))=>if let Err(e)=async{tokio::fs::write(dest,maybe_gzip.to_vec()?).await}.await{
eprintln!("fs error: {}",e);
},
Err(e)=>eprintln!("dl error: {}",e),
}
}).await;
Ok(())
}
async fn get_creations_pages(context:&CookieContext,owner:rbx_asset::cookie::Owner)->AResult<Vec<CreationsItem>>{
let mut config=rbx_asset::cookie::CreationsPageRequest{
owner,
cursor:None,
async fn download_list_locations(api_key:ApiKey,asset_id_file_map:&[u64])->AResult<()>{
let context=CloudContext::new(api_key);
futures::stream::iter(asset_id_file_map)
.map(|&asset_id|
context.get_asset_location(rbx_asset::cloud::GetAssetLatestRequest{asset_id})
)
.buffer_unordered(CONCURRENT_REQUESTS)
.for_each(|result|async{
match result{
Ok(asset_location_info)=>match asset_location_info.location{
Some(location)=>println!("{}",location.location()),
None=>println!("This asset is private!"),
},
Err(e)=>eprintln!("dl error: {}",e),
}
}).await;
Ok(())
}
async fn download_location(api_key:ApiKey,asset_id:AssetID,version:Option<u64>)->AResult<()>{
let context=CloudContext::new(api_key);
let asset_location_info=match version{
Some(version)=>context.get_asset_version_location(rbx_asset::cloud::GetAssetVersionRequest{asset_id,version}).await?,
None=>context.get_asset_location(rbx_asset::cloud::GetAssetLatestRequest{asset_id}).await?,
};
let mut asset_list=Vec::new();
match asset_location_info.location{
Some(location)=>println!("{}",location.location()),
None=>println!("This asset is private!"),
}
Ok(())
}
async fn get_creations_pages(
context:&CookieContext,
asset_list:&mut Vec<rbx_asset::cookie::CreationsItem>,
config:&mut rbx_asset::cookie::CreationsPageRequest,
)->AResult<()>{
loop{
let mut page=context.get_creations_page(&config).await?;
asset_list.append(&mut page.data);
if page.nextPageCursor.is_none(){
config.cursor=page.nextPageCursor;
if config.cursor.is_none(){
break;
}
config.cursor=page.nextPageCursor;
}
Ok(())
}
async fn download_creations_pages_from_checkpoint(context:&CookieContext,owner:rbx_asset::cookie::Owner,output_folder:&Path,continue_from_cursor:bool)->AResult<Vec<CreationsItem>>{
let mut versions_path=output_folder.to_owned();
versions_path.set_file_name("versions.json");
let mut cursor_path=output_folder.to_owned();
cursor_path.set_file_name("cursor");
let (mut asset_list,mut config)=if continue_from_cursor{
// load state from files
let (versions,cursor)=tokio::join!(
tokio::fs::read(versions_path.as_path()),
tokio::fs::read_to_string(cursor_path.as_path()),
);
// allow versions to not exist
let (versions,cursor)=match (versions,cursor){
// continue downloading
(Ok(versions),Ok(cursor))=>(serde_json::from_slice(&versions)?,Some(cursor)),
// already downloaded
(Ok(versions),Err(e)) if matches!(e.kind(),std::io::ErrorKind::NotFound)=>return Ok(serde_json::from_slice(&versions)?),
// not downloaded
(Err(e),result) if matches!(e.kind(),std::io::ErrorKind::NotFound)=>{
match result{
Ok(_)=>{},
Err(e) if matches!(e.kind(),std::io::ErrorKind::NotFound)=>{},
Err(e)=>Err(e)?,
}
(Vec::new(),None)
},
// other errors
(Ok(_),Err(e))=>Err(e)?,
(Err(e),_)=>Err(e)?,
};
(
versions,
rbx_asset::cookie::CreationsPageRequest{
owner,
cursor,
}
)
}else{
// create new state
(
Vec::new(),
rbx_asset::cookie::CreationsPageRequest{
owner,
cursor:None,
}
)
};
get_creations_pages(&context,&mut asset_list,&mut config).await?;
let cursor_fut=async{
if let Some(cursor)=config.cursor{
println!("writing cursor state...");
// there was a problem, write out cursor
tokio::fs::write(cursor_path,cursor).await?;
}else{
// no cursor
if let Err(e)=tokio::fs::remove_file(cursor_path).await{
match e.kind(){
std::io::ErrorKind::NotFound=>println!("Cannot delete cursor: file not found"),
_=>Err(e)?,
}
}
}
Ok(())
};
let versions_fut=tokio::fs::write(versions_path,serde_json::to_string(&asset_list)?);
tokio::try_join!(versions_fut,cursor_fut)?;
Ok(asset_list)
}
async fn download_creations_json(cookie:Cookie,owner:rbx_asset::cookie::Owner,output_folder:PathBuf)->AResult<()>{
async fn download_creations_json(cookie:Cookie,owner:rbx_asset::cookie::Owner,output_folder:PathBuf,continue_from_cursor:bool)->AResult<()>{
let context=CookieContext::new(cookie);
let item_list=get_creations_pages(&context,owner).await?;
let mut path=output_folder.clone();
path.set_file_name("versions.json");
tokio::fs::write(path,serde_json::to_string(&item_list)?).await?;
download_creations_pages_from_checkpoint(&context,owner,output_folder.as_path(),continue_from_cursor).await?;
Ok(())
}
async fn get_user_inventory_pages(context:&CookieContext,user_id:u64)->AResult<Vec<rbx_asset::cookie::UserInventoryItem>>{
let mut config=rbx_asset::cookie::UserInventoryPageRequest{
user_id,
cursor:None,
};
let mut asset_list=Vec::new();
async fn get_user_inventory_pages(
context:&CookieContext,
asset_list:&mut Vec<rbx_asset::cookie::UserInventoryItem>,
config:&mut rbx_asset::cookie::UserInventoryPageRequest,
)->AResult<()>{
loop{
let mut page=context.get_user_inventory_page(&config).await?;
asset_list.append(&mut page.data);
if page.nextPageCursor.is_none(){
let page=context.get_user_inventory_page(config).await?;
asset_list.extend(page.data);
config.cursor=page.nextPageCursor;
if config.cursor.is_none(){
break;
}
config.cursor=page.nextPageCursor;
}
Ok(asset_list)
Ok(())
}
async fn download_user_inventory_json(cookie:Cookie,user_id:u64,output_folder:PathBuf)->AResult<()>{
let context=CookieContext::new(cookie);
let item_list=get_user_inventory_pages(&context,user_id).await?;
async fn download_user_inventory_json(cookie:Cookie,user_id:u64,output_folder:PathBuf,continue_from_cursor:bool)->AResult<()>{
let mut versions_path=output_folder.clone();
versions_path.set_file_name("versions.json");
let mut cursor_path=output_folder.clone();
cursor_path.set_file_name("cursor");
let mut path=output_folder.clone();
path.set_file_name("versions.json");
tokio::fs::write(path,serde_json::to_string(&item_list)?).await?;
let context=CookieContext::new(cookie);
let (mut asset_list,mut config)=if continue_from_cursor{
// load state from files
let (versions,cursor)=tokio::try_join!(
tokio::fs::read(versions_path.as_path()),
tokio::fs::read_to_string(cursor_path.as_path()),
)?;
(
serde_json::from_slice(&versions)?,
rbx_asset::cookie::UserInventoryPageRequest{
user_id,
cursor:Some(cursor),
}
)
}else{
// create new state
(
Vec::new(),
rbx_asset::cookie::UserInventoryPageRequest{
user_id,
cursor:None,
}
)
};
match get_user_inventory_pages(&context,&mut asset_list,&mut config).await{
Ok(())=>println!("Pages polling complete"),
Err(e)=>println!("Error: {e}"),
}
let cursor_fut=async{
if let Some(cursor)=config.cursor{
println!("writing cursor state...");
// there was a problem, write out cursor
tokio::fs::write(cursor_path,cursor).await?;
}
Ok(())
};
let versions_fut=tokio::fs::write(versions_path,serde_json::to_string(&asset_list)?);
tokio::try_join!(versions_fut,cursor_fut)?;
Ok(())
}
/// Download all versions of all assets created by a group or user. The output is written to a folder structure in the output directory.
#[derive(Args)]
struct DownloadCreationsHistorySubcommand{
#[arg(long,group="cookie",required=true)]
cookie_literal:Option<String>,
#[arg(long,group="cookie",required=true)]
cookie_envvar:Option<String>,
#[arg(long,group="cookie",required=true)]
cookie_file:Option<PathBuf>,
#[arg(long,group="api_key",required=true)]
api_key_literal:Option<String>,
#[arg(long,group="api_key",required=true)]
api_key_envvar:Option<String>,
#[arg(long,group="api_key",required=true)]
api_key_file:Option<PathBuf>,
#[arg(long)]
output_folder:Option<PathBuf>,
#[arg(long,group="owner",required=true)]
group_id:Option<u64>,
#[arg(long,group="owner",required=true)]
user_id:Option<u64>,
#[arg(long)]
r#continue:Option<bool>,
}
impl DownloadCreationsHistorySubcommand{
async fn run(self)->AResult<()>{
download_creations_history(
cookie_from_args(
self.cookie_literal,
self.cookie_envvar,
self.cookie_file,
).await?,
api_key_from_args(
self.api_key_literal,
self.api_key_envvar,
self.api_key_file,
).await?,
owner_from_args(
self.user_id,
self.group_id,
)?,
self.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
self.r#continue.unwrap_or(false),
).await
}
}
async fn download_creations_history(cookie:Cookie,api_key:ApiKey,owner:rbx_asset::cookie::Owner,output_folder:PathBuf,r#continue:bool)->AResult<()>{
let cookie_context=CookieContext::new(cookie);
let cloud_context=CloudContext::new(api_key);
// get list of all assets in inventory
let asset_list=download_creations_pages_from_checkpoint(&cookie_context,owner,output_folder.as_path(),r#continue).await?;
// create folder directories
let asset_folders:Vec<PathBuf> ={
futures::stream::iter(asset_list.iter().map(|asset|async{
// create asset folder
let mut asset_folder=output_folder.clone();
asset_folder.push(asset.id.to_string());
tokio::fs::create_dir_all(asset_folder.as_path()).await?;
Ok::<_,anyhow::Error>(asset_folder)
}))
.buffered(CONCURRENT_FS)
.try_collect().await?
};
#[expect(dead_code)]
#[derive(Debug)]
enum Error<'a>{
NoLocations(Job<'a>),
GetVersionLocationError(rbx_asset::cloud::GetError),
GetError(rbx_asset::cloud::GetError),
Io(std::io::Error),
}
#[derive(Clone,Copy,Debug)]
struct Job<'a>{
path:&'a PathBuf,
asset_id:u64,
asset_version:u64,
}
let mut job_list=Vec::new();
// create flattened futures stream to parallel download all asset versions
for (path,asset) in asset_folders.iter().zip(asset_list){
// save versions file
let mut versions_path=path.to_owned();
versions_path.push("versions.json");
let version_history=if r#continue{
let file=tokio::fs::read(versions_path.as_path()).await?;
serde_json::from_slice(&file)?
}else{
println!("Downloading history for {} - {}",asset.id,asset.name);
let version_history=get_version_history(&cookie_context,asset.id).await?;
println!("Found {} versions",version_history.len());
tokio::fs::write(versions_path,serde_json::to_string(&version_history)?).await?;
version_history
};
job_list.extend(version_history.into_iter().map(|asset_version|
Job{
path,
asset_id:asset.id,
asset_version:asset_version.assetVersionNumber,
}
));
}
println!("Completed jobs list. Number of jobs: {}",job_list.len());
futures::stream::iter(job_list).map(async|job|{
let mut dest=job.path.to_owned();
dest.push(format!("{}_v{}.rbxl",job.asset_id,job.asset_version));
//if the file already exists, don't try downloading it again
if tokio::fs::try_exists(dest.as_path()).await.map_err(Error::Io)?{
return Ok(());
}
let location=cloud_context.get_asset_version_location(rbx_asset::cloud::GetAssetVersionRequest{
asset_id:job.asset_id,
version:job.asset_version,
}).await.map_err(Error::GetVersionLocationError)?;
let location=location.location.ok_or(Error::NoLocations(job))?;
let downloaded=cloud_context.get_asset(&location).await.map_err(Error::GetError)?;
tokio::fs::write(dest,downloaded.to_vec().map_err(Error::Io)?).await.map_err(Error::Io)?;
Ok(())
})
.buffer_unordered(CONCURRENT_REQUESTS)
.for_each(async|result|{
match result{
Ok(())=>{},
Err(Error::NoLocations(job))=>println!("Job failed due to no locations: asset_id={} version={}",job.asset_id,job.asset_version),
Err(e)=>println!("Error: {e:?}"),
}
}).await;
println!("All jobs complete.");
Ok(())
}
async fn get_version_history(context:&CookieContext,asset_id:AssetID)->AResult<Vec<AssetVersion>>{
let mut cursor:Option<String>=None;
let mut page_request=rbx_asset::cookie::AssetVersionsPageRequest{
asset_id,
cursor:None,
};
let mut asset_list=Vec::new();
loop{
let mut page=context.get_asset_versions_page(rbx_asset::cookie::AssetVersionsPageRequest{asset_id,cursor}).await?;
let mut page=context.get_asset_versions_page(&page_request).await?;
asset_list.append(&mut page.data);
if page.nextPageCursor.is_none(){
break;
}
cursor=page.nextPageCursor;
page_request.cursor=page.nextPageCursor;
}
asset_list.sort_by(|a,b|a.assetVersionNumber.cmp(&b.assetVersionNumber));
asset_list.sort_by_key(|a|a.assetVersionNumber);
Ok(asset_list)
}
@@ -1050,9 +1539,12 @@ async fn download_history(mut config:DownloadHistoryConfig)->AResult<()>{
let mut join_set=tokio::task::JoinSet::new();
//poll paged list of all asset versions
let mut cursor:Option<String>=None;
let mut page_request=rbx_asset::cookie::AssetVersionsPageRequest{
asset_id:config.asset_id,
cursor:None,
};
loop{
let mut page=context.get_asset_versions_page(rbx_asset::cookie::AssetVersionsPageRequest{asset_id:config.asset_id,cursor}).await?;
let mut page=context.get_asset_versions_page(&page_request).await?;
let context=&context;
let output_folder=config.output_folder.clone();
let data=&page.data;
@@ -1082,9 +1574,9 @@ async fn download_history(mut config:DownloadHistoryConfig)->AResult<()>{
let mut path=output_folder.clone();
path.push(format!("{}_v{}.rbxl",config.asset_id,version_number));
join_set.spawn(async move{
let file=context.get_asset(rbx_asset::cookie::GetAssetRequest{asset_id:config.asset_id,version:Some(version_number)}).await?;
let maybe_gzip=context.get_asset(rbx_asset::cookie::GetAssetRequest{asset_id:config.asset_id,version:Some(version_number)}).await?;
tokio::fs::write(path,file).await?;
tokio::fs::write(path,maybe_gzip.to_vec()?).await?;
Ok::<_,anyhow::Error>(())
});
@@ -1111,10 +1603,10 @@ async fn download_history(mut config:DownloadHistoryConfig)->AResult<()>{
}else{
asset_list.append(&mut page.data);
}
cursor=page.nextPageCursor;
page_request.cursor=page.nextPageCursor;
}
asset_list.sort_by(|a,b|a.assetVersionNumber.cmp(&b.assetVersionNumber));
asset_list.sort_by_key(|a|a.assetVersionNumber);
let mut path=config.output_folder.clone();
path.set_file_name("versions.json");
@@ -1128,7 +1620,7 @@ async fn download_history(mut config:DownloadHistoryConfig)->AResult<()>{
}
#[derive(Debug)]
#[allow(dead_code)]
#[expect(dead_code)]
enum LoadDomError{
IO(std::io::Error),
RbxBinary(rbx_binary::DecodeError),
@@ -1204,9 +1696,9 @@ struct DownloadDecompileConfig{
async fn download_decompile(config:DownloadDecompileConfig)->AResult<()>{
let context=CookieContext::new(config.cookie);
let file=context.get_asset(rbx_asset::cookie::GetAssetRequest{asset_id:config.asset_id,version:None}).await?;
let maybe_gzip=context.get_asset(rbx_asset::cookie::GetAssetRequest{asset_id:config.asset_id,version:None}).await?;
let dom=load_dom(std::io::Cursor::new(file))?;
let dom=maybe_gzip.read_with(load_dom,load_dom)?;
let context=rox_compiler::DecompiledContext::from_dom(dom);
context.write_files(rox_compiler::WriteConfig{
@@ -1386,8 +1878,8 @@ async fn download_and_decompile_history_into_git(config:DownloadAndDecompileHist
.map(|asset_version|{
let context=context.clone();
tokio::task::spawn(async move{
let file=context.get_asset(rbx_asset::cookie::GetAssetRequest{asset_id,version:Some(asset_version.assetVersionNumber)}).await?;
let dom=load_dom(std::io::Cursor::new(file))?;
let maybe_gzip=context.get_asset(rbx_asset::cookie::GetAssetRequest{asset_id,version:Some(asset_version.assetVersionNumber)}).await?;
let dom=maybe_gzip.read_with(load_dom,load_dom)?;
Ok::<_,anyhow::Error>((asset_version,rox_compiler::DecompiledContext::from_dom(dom)))
})
}))
@@ -1511,3 +2003,43 @@ async fn compile_upload_place(config:CompileUploadPlaceConfig)->AResult<()>{
println!("UploadResponse={:?}",resp);
Ok(())
}
async fn get_luau_result_exp_backoff(
context:&CloudContext,
luau_session:&rbx_asset::cloud::LuauSessionResponse
)->Result<Result<rbx_asset::cloud::LuauResults,rbx_asset::cloud::LuauError>,rbx_asset::cloud::LuauSessionError>{
const BACKOFF_MUL:f32=1.395_612_5;//exp(1/3)
let mut backoff=1000f32;
loop{
match luau_session.try_get_result(context).await{
//try again when the operation is not done
Err(rbx_asset::cloud::LuauSessionError::NotDone)=>(),
//return all other results
other_result=>return other_result,
}
println!("Operation not complete; waiting {:.0}ms...",backoff);
tokio::time::sleep(std::time::Duration::from_millis(backoff as u64)).await;
backoff*=BACKOFF_MUL;
}
}
struct RunLuauConfig{
api_key:ApiKey,
script:String,
request:rbx_asset::cloud::LuauSessionLatestRequest,
}
async fn run_luau(config:RunLuauConfig)->AResult<()>{
let context=CloudContext::new(config.api_key);
let session=rbx_asset::cloud::LuauSessionCreate{
script:&config.script,
user:None,
timeout:None,
binaryInput:None,
enableBinaryOutput:None,
binaryOutputUri:None,
};
let response=context.create_luau_session(&config.request,session).await?;
dbg!(&response);
let result=get_luau_result_exp_backoff(&context,&response).await?;
dbg!(&result);
Ok(())
}