Compare commits
1 Commits
creations-
...
rbx-dom
| Author | SHA1 | Date | |
|---|---|---|---|
|
38106f92d5
|
821
Cargo.lock
generated
821
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,7 @@
|
||||
workspace = { members = ["rbx_asset", "rox_compiler"] }
|
||||
[package]
|
||||
name = "asset-tool"
|
||||
version = "0.4.12"
|
||||
version = "0.4.11"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
@@ -15,7 +15,7 @@ lazy-regex = "3.1.0"
|
||||
rbx_asset = { path = "rbx_asset" }
|
||||
rbx_binary = "1.0.0"
|
||||
rbx_dom_weak = "3.0.0"
|
||||
rbx_reflection_database = "1.0.3"
|
||||
rbx_reflection_database = "1.0.1"
|
||||
rbx_xml = "1.0.0"
|
||||
rox_compiler = { path = "rox_compiler" }
|
||||
serde_json = "1.0.111"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "rbx_asset"
|
||||
version = "0.4.10"
|
||||
version = "0.4.3"
|
||||
edition = "2021"
|
||||
publish = ["strafesnet"]
|
||||
repository = "https://git.itzana.me/StrafesNET/asset-tool"
|
||||
@@ -10,22 +10,10 @@ authors = ["Rhys Lloyd <krakow20@gmail.com>"]
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[features]
|
||||
default = ["gzip", "default-tls"]
|
||||
gzip = ["dep:flate2"]
|
||||
|
||||
default-tls = ["reqwest/default-tls"]
|
||||
rustls-tls = ["reqwest/rustls-tls"]
|
||||
|
||||
[dependencies]
|
||||
bytes = "1.10.1"
|
||||
chrono = { version = "0.4.38", features = ["serde"] }
|
||||
flate2 = { version = "1.0.29", optional = true }
|
||||
reqwest = { version = "0.12.4", features = [
|
||||
"json", "multipart",
|
||||
# default features
|
||||
"charset", "http2", "system-proxy"
|
||||
], default-features = false }
|
||||
flate2 = "1.0.29"
|
||||
reqwest = { version = "0.12.4", features = ["json","multipart"] }
|
||||
serde = { version = "1.0.199", features = ["derive"] }
|
||||
serde_json = "1.0.111"
|
||||
url = "2.5.0"
|
||||
|
||||
@@ -1,44 +0,0 @@
|
||||
use reqwest::Body;
|
||||
|
||||
pub trait ContentType:Into<Body>{
|
||||
fn content_type(&self)->&'static str;
|
||||
}
|
||||
|
||||
#[derive(Clone,Copy,Debug)]
|
||||
pub struct Json<T>(pub(crate)T);
|
||||
impl<T:Into<Body>> From<Json<T>> for Body{
|
||||
fn from(Json(value):Json<T>)->Self{
|
||||
value.into()
|
||||
}
|
||||
}
|
||||
impl<T:Into<Body>> ContentType for Json<T>{
|
||||
fn content_type(&self)->&'static str{
|
||||
"application/json"
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone,Copy,Debug)]
|
||||
pub struct Text<T>(pub(crate)T);
|
||||
impl<T:Into<Body>> From<Text<T>> for Body{
|
||||
fn from(Text(value):Text<T>)->Self{
|
||||
value.into()
|
||||
}
|
||||
}
|
||||
impl<T:Into<Body>> ContentType for Text<T>{
|
||||
fn content_type(&self)->&'static str{
|
||||
"text/plain"
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone,Copy,Debug)]
|
||||
pub struct Binary<T>(pub(crate)T);
|
||||
impl<T:Into<Body>> From<Binary<T>> for Body{
|
||||
fn from(Binary(value):Binary<T>)->Self{
|
||||
value.into()
|
||||
}
|
||||
}
|
||||
impl<T:Into<Body>> ContentType for Binary<T>{
|
||||
fn content_type(&self)->&'static str{
|
||||
"application/octet-stream"
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,4 @@
|
||||
use crate::body::{Binary,ContentType,Json};
|
||||
use crate::util::{serialize_u64,deserialize_u64,response_ok};
|
||||
use crate::types::{ResponseError,MaybeGzippedBytes};
|
||||
use crate::{ResponseError,ReaderType,maybe_gzip_decode,read_readable};
|
||||
|
||||
#[derive(Debug,serde::Deserialize,serde::Serialize)]
|
||||
#[allow(nonstandard_style,dead_code)]
|
||||
@@ -67,8 +65,8 @@ pub struct UpdateAssetRequest{
|
||||
#[derive(Clone,Debug,serde::Deserialize,serde::Serialize)]
|
||||
#[allow(nonstandard_style,dead_code)]
|
||||
pub enum Creator{
|
||||
userId(#[serde(deserialize_with="deserialize_u64",serialize_with="serialize_u64")]u64),
|
||||
groupId(#[serde(deserialize_with="deserialize_u64",serialize_with="serialize_u64")]u64),
|
||||
userId(String),//u64 string
|
||||
groupId(String),//u64 string
|
||||
}
|
||||
#[derive(Debug,serde::Deserialize,serde::Serialize)]
|
||||
#[allow(nonstandard_style,dead_code)]
|
||||
@@ -118,8 +116,8 @@ impl std::fmt::Display for UpdateError{
|
||||
}
|
||||
impl std::error::Error for UpdateError{}
|
||||
|
||||
struct GetAssetOperationRequest<'a>{
|
||||
operation_id:&'a str,
|
||||
struct GetAssetOperationRequest{
|
||||
operation_id:String,
|
||||
}
|
||||
pub struct GetAssetLatestRequest{
|
||||
pub asset_id:u64,
|
||||
@@ -148,19 +146,14 @@ pub struct GetAssetLatestRequest{
|
||||
#[derive(Debug,serde::Deserialize,serde::Serialize)]
|
||||
#[allow(nonstandard_style,dead_code)]
|
||||
pub struct AssetResponse{
|
||||
//u64 wrapped in quotes wohoo!!
|
||||
#[serde(deserialize_with="deserialize_u64")]
|
||||
#[serde(serialize_with="serialize_u64")]
|
||||
pub assetId:u64,
|
||||
pub assetId:String,//u64 wrapped in quotes wohoo!!
|
||||
pub assetType:AssetType,
|
||||
pub creationContext:CreationContext,
|
||||
pub description:Option<String>,
|
||||
pub displayName:String,
|
||||
pub path:String,
|
||||
pub revisionCreateTime:chrono::DateTime<chrono::Utc>,
|
||||
#[serde(deserialize_with="deserialize_u64")]
|
||||
#[serde(serialize_with="serialize_u64")]
|
||||
pub revisionId:u64,
|
||||
pub revisionId:String,//u64
|
||||
pub moderationResult:ModerationResult,
|
||||
pub icon:Option<String>,
|
||||
#[serde(default)]
|
||||
@@ -173,9 +166,10 @@ pub struct GetAssetVersionRequest{
|
||||
}
|
||||
#[derive(Debug)]
|
||||
pub enum GetError{
|
||||
Parse(url::ParseError),
|
||||
ParseError(url::ParseError),
|
||||
Response(ResponseError),
|
||||
Reqwest(reqwest::Error),
|
||||
IO(std::io::Error)
|
||||
}
|
||||
impl std::fmt::Display for GetError{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
@@ -196,22 +190,15 @@ impl AssetLocation{
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug,serde::Deserialize)]
|
||||
#[allow(nonstandard_style,dead_code)]
|
||||
pub struct AssetMetadata{
|
||||
pub metadataType:u32,
|
||||
pub value:String,
|
||||
}
|
||||
#[derive(Debug,serde::Deserialize)]
|
||||
#[allow(nonstandard_style,dead_code)]
|
||||
pub struct AssetLocationInfo{
|
||||
pub location:Option<AssetLocation>,
|
||||
pub requestId:String,
|
||||
pub IsHashDynamic:bool,
|
||||
pub IsCopyrightProtected:bool,
|
||||
pub isArchived:bool,
|
||||
pub assetTypeId:u32,
|
||||
#[serde(default)]
|
||||
pub assetMetadatas:Vec<AssetMetadata>,
|
||||
pub isRecordable:bool,
|
||||
}
|
||||
|
||||
pub struct AssetVersionsRequest{
|
||||
@@ -321,119 +308,13 @@ impl RobloxOperation{
|
||||
pub async fn try_get_reponse(&self,context:&Context)->Result<serde_json::Value,OperationError>{
|
||||
context.get_asset_operation(GetAssetOperationRequest{
|
||||
operation_id:self.operation_id()
|
||||
.ok_or(OperationError::NoOperationId)?,
|
||||
.ok_or(OperationError::NoOperationId)?
|
||||
.to_owned(),
|
||||
}).await.map_err(OperationError::Get)?
|
||||
.response.ok_or(OperationError::NotDone)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum LuauSessionError{
|
||||
Get(GetError),
|
||||
Unspecified,
|
||||
NotDone,
|
||||
NoOutput,
|
||||
NoError,
|
||||
}
|
||||
impl std::fmt::Display for LuauSessionError{
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for LuauSessionError{}
|
||||
#[derive(Debug,serde::Serialize)]
|
||||
#[expect(nonstandard_style)]
|
||||
pub struct LuauSessionCreate<'a>{
|
||||
pub script:&'a str,
|
||||
#[serde(skip_serializing_if="Option::is_none")]
|
||||
pub user:Option<&'a str>,
|
||||
#[serde(skip_serializing_if="Option::is_none")]
|
||||
pub timeout:Option<&'a str>,
|
||||
#[serde(skip_serializing_if="Option::is_none")]
|
||||
pub binaryInput:Option<&'a str>,
|
||||
#[serde(skip_serializing_if="Option::is_none")]
|
||||
pub enableBinaryOutput:Option<bool>,
|
||||
#[serde(skip_serializing_if="Option::is_none")]
|
||||
pub binaryOutputUri:Option<&'a str>,
|
||||
}
|
||||
#[derive(Debug,serde::Deserialize)]
|
||||
#[expect(nonstandard_style)]
|
||||
pub enum LuauSessionState{
|
||||
STATE_UNSPECIFIED,
|
||||
PROCESSING,
|
||||
COMPLETE,
|
||||
FAILED,
|
||||
}
|
||||
#[derive(Debug,serde::Deserialize)]
|
||||
pub struct LuauError{
|
||||
pub code:String,
|
||||
pub message:String,
|
||||
}
|
||||
#[derive(Debug,serde::Deserialize)]
|
||||
pub struct LuauResults{
|
||||
pub results:Vec<serde_json::Value>,
|
||||
}
|
||||
#[derive(Debug,serde::Deserialize)]
|
||||
#[expect(nonstandard_style)]
|
||||
pub struct LuauSessionResponse{
|
||||
path:String,
|
||||
#[serde(deserialize_with="deserialize_u64")]
|
||||
pub user:u64,
|
||||
pub state:LuauSessionState,
|
||||
pub script:String,
|
||||
pub error:Option<LuauError>,
|
||||
pub output:Option<LuauResults>,
|
||||
pub binaryInput:String,
|
||||
pub enableBinaryOutput:bool,
|
||||
pub binaryOutputUri:String,
|
||||
}
|
||||
impl LuauSessionResponse{
|
||||
pub fn path(&self)->&str{
|
||||
&self.path
|
||||
}
|
||||
pub async fn try_get_result(&self,context:&Context)->Result<Result<LuauResults,LuauError>,LuauSessionError>{
|
||||
let response=context.get_luau_session(self).await.map_err(LuauSessionError::Get)?;
|
||||
match response.state{
|
||||
LuauSessionState::STATE_UNSPECIFIED=>Err(LuauSessionError::Unspecified),
|
||||
LuauSessionState::PROCESSING=>Err(LuauSessionError::NotDone),
|
||||
LuauSessionState::COMPLETE=>Ok(Ok(response.output.ok_or(LuauSessionError::NoOutput)?)),
|
||||
LuauSessionState::FAILED=>Ok(Err(response.error.ok_or(LuauSessionError::NoError)?)),
|
||||
}
|
||||
}
|
||||
}
|
||||
pub trait AsSessionPath{
|
||||
fn into_session_path(&self)->impl AsRef<str>;
|
||||
}
|
||||
impl AsSessionPath for LuauSessionResponse{
|
||||
fn into_session_path(&self)->impl AsRef<str>{
|
||||
&self.path
|
||||
}
|
||||
}
|
||||
pub struct LuauSessionLatestRequest{
|
||||
pub universe_id:u64,
|
||||
pub place_id:u64,
|
||||
}
|
||||
impl AsSessionPath for LuauSessionLatestRequest{
|
||||
fn into_session_path(&self)->impl AsRef<str>{
|
||||
let universe_id=self.universe_id;
|
||||
let place_id=self.place_id;
|
||||
format!("universes/{universe_id}/places/{place_id}/luau-execution-session-tasks")
|
||||
}
|
||||
}
|
||||
pub struct LuauSessionVersionRequest{
|
||||
pub universe_id:u64,
|
||||
pub place_id:u64,
|
||||
pub version_id:u64,
|
||||
}
|
||||
impl AsSessionPath for LuauSessionVersionRequest{
|
||||
fn into_session_path(&self)->impl AsRef<str>{
|
||||
let universe_id=self.universe_id;
|
||||
let place_id=self.place_id;
|
||||
let version_id=self.version_id;
|
||||
format!("universes/{universe_id}/places/{place_id}/versions/{version_id}/luau-execution-session-tasks")
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ApiKey(String);
|
||||
impl ApiKey{
|
||||
@@ -463,10 +344,9 @@ impl Context{
|
||||
.header("x-api-key",self.api_key.as_str())
|
||||
.send().await
|
||||
}
|
||||
async fn post(&self,url:url::Url,body:impl ContentType)->Result<reqwest::Response,reqwest::Error>{
|
||||
async fn post(&self,url:url::Url,body:impl Into<reqwest::Body>+Clone)->Result<reqwest::Response,reqwest::Error>{
|
||||
self.client.post(url)
|
||||
.header("x-api-key",self.api_key.as_str())
|
||||
.header("Content-Type",body.content_type())
|
||||
.body(body)
|
||||
.send().await
|
||||
}
|
||||
@@ -495,7 +375,7 @@ impl Context{
|
||||
.text("request",request_config)
|
||||
.part("fileContent",part);
|
||||
|
||||
let operation=response_ok(
|
||||
let operation=crate::response_ok(
|
||||
self.post_form(url,form).await.map_err(CreateError::Reqwest)?
|
||||
).await.map_err(CreateError::Response)?
|
||||
.json::<RobloxOperation>().await.map_err(CreateError::Reqwest)?;
|
||||
@@ -514,7 +394,7 @@ impl Context{
|
||||
.text("request",request_config)
|
||||
.part("fileContent",reqwest::multipart::Part::bytes(body));
|
||||
|
||||
let operation=response_ok(
|
||||
let operation=crate::response_ok(
|
||||
self.patch_form(url,form).await.map_err(UpdateError::Reqwest)?
|
||||
).await.map_err(UpdateError::Response)?
|
||||
.json::<RobloxOperation>().await.map_err(UpdateError::Reqwest)?;
|
||||
@@ -523,86 +403,70 @@ impl Context{
|
||||
operation,
|
||||
})
|
||||
}
|
||||
async fn get_asset_operation(&self,config:GetAssetOperationRequest<'_>)->Result<RobloxOperation,GetError>{
|
||||
async fn get_asset_operation(&self,config:GetAssetOperationRequest)->Result<RobloxOperation,GetError>{
|
||||
let raw_url=format!("https://apis.roblox.com/assets/v1/operations/{}",config.operation_id);
|
||||
let url=reqwest::Url::parse(raw_url.as_str()).map_err(GetError::Parse)?;
|
||||
let url=reqwest::Url::parse(raw_url.as_str()).map_err(GetError::ParseError)?;
|
||||
|
||||
response_ok(
|
||||
crate::response_ok(
|
||||
self.get(url).await.map_err(GetError::Reqwest)?
|
||||
).await.map_err(GetError::Response)?
|
||||
.json::<RobloxOperation>().await.map_err(GetError::Reqwest)
|
||||
}
|
||||
pub async fn create_luau_session(&self,config:&impl AsSessionPath,session:LuauSessionCreate<'_>)->Result<LuauSessionResponse,CreateError>{
|
||||
let raw_url=format!("https://apis.roblox.com/cloud/v2/{}",config.into_session_path().as_ref());
|
||||
let url=reqwest::Url::parse(raw_url.as_str()).map_err(CreateError::Parse)?;
|
||||
|
||||
let body=serde_json::to_string(&session).map_err(CreateError::Serialize)?;
|
||||
|
||||
response_ok(
|
||||
self.post(url,Json(body)).await.map_err(CreateError::Reqwest)?
|
||||
).await.map_err(CreateError::Response)?
|
||||
.json::<LuauSessionResponse>().await.map_err(CreateError::Reqwest)
|
||||
}
|
||||
pub async fn get_luau_session(&self,config:&impl AsSessionPath)->Result<LuauSessionResponse,GetError>{
|
||||
let raw_url=format!("https://apis.roblox.com/cloud/v2/{}",config.into_session_path().as_ref());
|
||||
let url=reqwest::Url::parse(raw_url.as_str()).map_err(GetError::Parse)?;
|
||||
|
||||
response_ok(
|
||||
self.get(url).await.map_err(GetError::Reqwest)?
|
||||
).await.map_err(GetError::Response)?
|
||||
.json::<LuauSessionResponse>().await.map_err(GetError::Reqwest)
|
||||
}
|
||||
pub async fn get_asset_info(&self,config:GetAssetLatestRequest)->Result<AssetResponse,GetError>{
|
||||
let raw_url=format!("https://apis.roblox.com/assets/v1/assets/{}",config.asset_id);
|
||||
let url=reqwest::Url::parse(raw_url.as_str()).map_err(GetError::Parse)?;
|
||||
let url=reqwest::Url::parse(raw_url.as_str()).map_err(GetError::ParseError)?;
|
||||
|
||||
response_ok(
|
||||
crate::response_ok(
|
||||
self.get(url).await.map_err(GetError::Reqwest)?
|
||||
).await.map_err(GetError::Response)?
|
||||
.json::<AssetResponse>().await.map_err(GetError::Reqwest)
|
||||
}
|
||||
pub async fn get_asset_version_info(&self,config:GetAssetVersionRequest)->Result<AssetResponse,GetError>{
|
||||
let raw_url=format!("https://apis.roblox.com/assets/v1/assets/{}/versions/{}",config.asset_id,config.version);
|
||||
let url=reqwest::Url::parse(raw_url.as_str()).map_err(GetError::Parse)?;
|
||||
let url=reqwest::Url::parse(raw_url.as_str()).map_err(GetError::ParseError)?;
|
||||
|
||||
response_ok(
|
||||
crate::response_ok(
|
||||
self.get(url).await.map_err(GetError::Reqwest)?
|
||||
).await.map_err(GetError::Response)?
|
||||
.json::<AssetResponse>().await.map_err(GetError::Reqwest)
|
||||
}
|
||||
pub async fn get_asset_location(&self,config:GetAssetLatestRequest)->Result<AssetLocationInfo,GetError>{
|
||||
let raw_url=format!("https://apis.roblox.com/asset-delivery-api/v1/assetId/{}",config.asset_id);
|
||||
let url=reqwest::Url::parse(raw_url.as_str()).map_err(GetError::Parse)?;
|
||||
let url=reqwest::Url::parse(raw_url.as_str()).map_err(GetError::ParseError)?;
|
||||
|
||||
response_ok(
|
||||
crate::response_ok(
|
||||
self.get(url).await.map_err(GetError::Reqwest)?
|
||||
).await.map_err(GetError::Response)?
|
||||
.json().await.map_err(GetError::Reqwest)
|
||||
}
|
||||
pub async fn get_asset_version_location(&self,config:GetAssetVersionRequest)->Result<AssetLocationInfo,GetError>{
|
||||
let raw_url=format!("https://apis.roblox.com/asset-delivery-api/v1/assetId/{}/version/{}",config.asset_id,config.version);
|
||||
let url=reqwest::Url::parse(raw_url.as_str()).map_err(GetError::Parse)?;
|
||||
let url=reqwest::Url::parse(raw_url.as_str()).map_err(GetError::ParseError)?;
|
||||
|
||||
response_ok(
|
||||
crate::response_ok(
|
||||
self.get(url).await.map_err(GetError::Reqwest)?
|
||||
).await.map_err(GetError::Response)?
|
||||
.json().await.map_err(GetError::Reqwest)
|
||||
}
|
||||
pub async fn get_asset(&self,config:&AssetLocation)->Result<MaybeGzippedBytes,GetError>{
|
||||
let url=reqwest::Url::parse(config.location()).map_err(GetError::Parse)?;
|
||||
pub async fn get_asset(&self,config:&AssetLocation)->Result<Vec<u8>,GetError>{
|
||||
let url=reqwest::Url::parse(config.location()).map_err(GetError::ParseError)?;
|
||||
|
||||
let bytes=response_ok(
|
||||
let body=crate::response_ok(
|
||||
self.get(url).await.map_err(GetError::Reqwest)?
|
||||
).await.map_err(GetError::Response)?
|
||||
.bytes().await.map_err(GetError::Reqwest)?;
|
||||
|
||||
Ok(MaybeGzippedBytes::new(bytes))
|
||||
match maybe_gzip_decode(std::io::Cursor::new(body)){
|
||||
Ok(ReaderType::GZip(readable))=>read_readable(readable),
|
||||
Ok(ReaderType::Raw(readable))=>read_readable(readable),
|
||||
Err(e)=>Err(e),
|
||||
}.map_err(GetError::IO)
|
||||
}
|
||||
pub async fn get_asset_versions(&self,config:AssetVersionsRequest)->Result<AssetVersionsResponse,AssetVersionsError>{
|
||||
let raw_url=format!("https://apis.roblox.com/assets/v1/assets/{}/versions",config.asset_id);
|
||||
let url=reqwest::Url::parse(raw_url.as_str()).map_err(AssetVersionsError::ParseError)?;
|
||||
|
||||
response_ok(
|
||||
crate::response_ok(
|
||||
self.get(url).await.map_err(AssetVersionsError::Reqwest)?
|
||||
).await.map_err(AssetVersionsError::Response)?
|
||||
.json::<AssetVersionsResponse>().await.map_err(AssetVersionsError::Reqwest)
|
||||
@@ -617,7 +481,7 @@ impl Context{
|
||||
}
|
||||
}
|
||||
|
||||
response_ok(
|
||||
crate::response_ok(
|
||||
self.get(url).await.map_err(InventoryPageError::Reqwest)?
|
||||
).await.map_err(InventoryPageError::Response)?
|
||||
.json::<InventoryPageResponse>().await.map_err(InventoryPageError::Reqwest)
|
||||
@@ -631,8 +495,8 @@ impl Context{
|
||||
query.append_pair("versionType","Published");
|
||||
}
|
||||
|
||||
response_ok(
|
||||
self.post(url,Binary(body)).await.map_err(UpdateError::Reqwest)?
|
||||
crate::response_ok(
|
||||
self.post(url,body).await.map_err(UpdateError::Reqwest)?
|
||||
).await.map_err(UpdateError::Response)?
|
||||
.json::<UpdatePlaceResponse>().await.map_err(UpdateError::Reqwest)
|
||||
}
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
use crate::body::{ContentType,Json};
|
||||
use crate::util::response_ok;
|
||||
use crate::types::{ResponseError,MaybeGzippedBytes};
|
||||
use crate::{ResponseError,ReaderType,maybe_gzip_decode,read_readable};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum PostError{
|
||||
@@ -93,6 +91,7 @@ pub enum GetError{
|
||||
ParseError(url::ParseError),
|
||||
Response(ResponseError),
|
||||
Reqwest(reqwest::Error),
|
||||
IO(std::io::Error)
|
||||
}
|
||||
impl std::fmt::Display for GetError{
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
@@ -141,11 +140,10 @@ impl GetAssetV2Location{
|
||||
pub struct GetAssetV2Info{
|
||||
pub locations:Vec<GetAssetV2Location>,
|
||||
pub requestId:String,
|
||||
pub IsHashDynamic:bool,
|
||||
pub IsCopyrightProtected:bool,
|
||||
pub isArchived:bool,
|
||||
pub assetTypeId:u32,
|
||||
pub isRecordable:Option<bool>,
|
||||
pub IsHashDynamic:Option<bool>,
|
||||
pub IsCopyrightProtected:Option<bool>,
|
||||
}
|
||||
|
||||
pub struct GetAssetV2{
|
||||
@@ -284,21 +282,21 @@ pub struct UserInventoryPageRequest{
|
||||
#[derive(serde::Deserialize,serde::Serialize)]
|
||||
#[allow(nonstandard_style,dead_code)]
|
||||
pub struct UserInventoryItemOwner{
|
||||
pub userId:u64,
|
||||
pub username:String,
|
||||
pub buildersClubMembershipType:String,
|
||||
userId:u64,
|
||||
username:String,
|
||||
buildersClubMembershipType:u64,
|
||||
}
|
||||
#[derive(serde::Deserialize,serde::Serialize)]
|
||||
#[allow(nonstandard_style,dead_code)]
|
||||
pub struct UserInventoryItem{
|
||||
pub userAssetId:u64,
|
||||
pub assetId:u64,
|
||||
pub assetName:String,
|
||||
pub collectibleItemId:Option<String>,
|
||||
pub collectibleItemInstanceId:Option<String>,
|
||||
pub owner:UserInventoryItemOwner,
|
||||
pub created:chrono::DateTime<chrono::Utc>,
|
||||
pub updated:chrono::DateTime<chrono::Utc>,
|
||||
userAssetId:u64,
|
||||
assetId:u64,
|
||||
assetName:String,
|
||||
collectibleItemId:Option<String>,
|
||||
collectibleItemInstanceId:Option<String>,
|
||||
owner:UserInventoryItemOwner,
|
||||
created:chrono::DateTime<chrono::Utc>,
|
||||
updated:chrono::DateTime<chrono::Utc>,
|
||||
}
|
||||
#[derive(serde::Deserialize,serde::Serialize)]
|
||||
#[allow(nonstandard_style,dead_code)]
|
||||
@@ -308,58 +306,6 @@ pub struct UserInventoryPageResponse{
|
||||
pub data:Vec<UserInventoryItem>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum SetAssetsPermissionsError{
|
||||
Parse(url::ParseError),
|
||||
JSONEncode(serde_json::Error),
|
||||
Patch(PostError),
|
||||
Response(ResponseError),
|
||||
Reqwest(reqwest::Error),
|
||||
}
|
||||
impl std::fmt::Display for SetAssetsPermissionsError{
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for SetAssetsPermissionsError{}
|
||||
|
||||
#[derive(serde::Serialize)]
|
||||
#[allow(nonstandard_style)]
|
||||
struct AssetPermissions{
|
||||
assetId:u64,
|
||||
grantToDependencies:bool,//true
|
||||
}
|
||||
#[derive(serde::Serialize)]
|
||||
#[allow(nonstandard_style)]
|
||||
struct SetAssetsPermissions<'a>{
|
||||
subjectType:&'a str,// "Universe"
|
||||
subjectId:&'a str,// "4422715291"
|
||||
action:&'a str,// "Use",
|
||||
enableDeepAccessCheck:bool,//true,
|
||||
requests:&'a [AssetPermissions],
|
||||
}
|
||||
pub struct SetAssetsPermissionsRequest<'a>{
|
||||
pub universe_id:u64,
|
||||
pub asset_ids:&'a [u64],
|
||||
}
|
||||
impl SetAssetsPermissionsRequest<'_>{
|
||||
fn serialize(&self)->Result<String,serde_json::Error>{
|
||||
let ref requests:Vec<_>=self.asset_ids.iter().map(|&asset_id|AssetPermissions{
|
||||
assetId:asset_id,
|
||||
grantToDependencies:true,
|
||||
}).collect();
|
||||
let ref subject_id=self.universe_id.to_string();
|
||||
let ref permissions=SetAssetsPermissions{
|
||||
subjectType:"Universe",
|
||||
subjectId:subject_id,
|
||||
action:"Use",
|
||||
enableDeepAccessCheck:true,
|
||||
requests,
|
||||
};
|
||||
serde_json::to_string(permissions)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Cookie(String);
|
||||
impl Cookie{
|
||||
@@ -410,29 +356,6 @@ impl Context{
|
||||
|
||||
Ok(resp)
|
||||
}
|
||||
async fn patch(&self,url:url::Url,body:impl ContentType+Clone)->Result<reqwest::Response,PostError>{
|
||||
let mut resp=self.client.patch(url.clone())
|
||||
.header("Cookie",self.cookie.as_str())
|
||||
.header("Content-Type",body.content_type())
|
||||
.body(body.clone())
|
||||
.send().await.map_err(PostError::Reqwest)?;
|
||||
|
||||
//This is called a CSRF challenge apparently
|
||||
if resp.status()==reqwest::StatusCode::FORBIDDEN{
|
||||
if let Some(csrf_token)=resp.headers().get("X-CSRF-Token"){
|
||||
resp=self.client.patch(url)
|
||||
.header("X-CSRF-Token",csrf_token)
|
||||
.header("Cookie",self.cookie.as_str())
|
||||
.header("Content-Type",body.content_type())
|
||||
.body(body)
|
||||
.send().await.map_err(PostError::Reqwest)?;
|
||||
}else{
|
||||
Err(PostError::CSRF)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(resp)
|
||||
}
|
||||
pub async fn create(&self,config:CreateRequest,body:impl Into<reqwest::Body>+Clone)->Result<UploadResponse,CreateError>{
|
||||
let mut url=reqwest::Url::parse("https://data.roblox.com/Data/Upload.ashx?json=1&type=Model&genreTypeId=1").map_err(CreateError::ParseError)?;
|
||||
//url borrow scope
|
||||
@@ -448,7 +371,7 @@ impl Context{
|
||||
query.append_pair("groupId",group_id.to_string().as_str());
|
||||
}
|
||||
}
|
||||
let response=response_ok(
|
||||
let response=crate::response_ok(
|
||||
self.post(url,body).await.map_err(CreateError::PostError)?
|
||||
).await.map_err(CreateError::Response)?;
|
||||
|
||||
@@ -500,7 +423,7 @@ impl Context{
|
||||
query.append_pair("groupId",group_id.to_string().as_str());
|
||||
}
|
||||
}
|
||||
let response=response_ok(
|
||||
let response=crate::response_ok(
|
||||
self.post(url,body).await.map_err(UploadError::PostError)?
|
||||
).await.map_err(UploadError::Response)?;
|
||||
|
||||
@@ -526,7 +449,7 @@ impl Context{
|
||||
})
|
||||
}
|
||||
}
|
||||
pub async fn get_asset(&self,config:GetAssetRequest)->Result<MaybeGzippedBytes,GetError>{
|
||||
pub async fn get_asset(&self,config:GetAssetRequest)->Result<Vec<u8>,GetError>{
|
||||
let mut url=reqwest::Url::parse("https://assetdelivery.roblox.com/v1/asset/").map_err(GetError::ParseError)?;
|
||||
//url borrow scope
|
||||
{
|
||||
@@ -536,13 +459,16 @@ impl Context{
|
||||
query.append_pair("version",version.to_string().as_str());
|
||||
}
|
||||
}
|
||||
|
||||
let bytes=response_ok(
|
||||
let body=crate::response_ok(
|
||||
self.get(url).await.map_err(GetError::Reqwest)?
|
||||
).await.map_err(GetError::Response)?
|
||||
.bytes().await.map_err(GetError::Reqwest)?;
|
||||
|
||||
Ok(MaybeGzippedBytes::new(bytes))
|
||||
match maybe_gzip_decode(std::io::Cursor::new(body)){
|
||||
Ok(ReaderType::GZip(readable))=>read_readable(readable),
|
||||
Ok(ReaderType::Raw(readable))=>read_readable(readable),
|
||||
Err(e)=>Err(e),
|
||||
}.map_err(GetError::IO)
|
||||
}
|
||||
pub async fn get_asset_v2(&self,config:GetAssetRequest)->Result<GetAssetV2,GetAssetV2Error>{
|
||||
let mut url=reqwest::Url::parse("https://assetdelivery.roblox.com/v2/asset").map_err(GetAssetV2Error::ParseError)?;
|
||||
@@ -554,7 +480,7 @@ impl Context{
|
||||
query.append_pair("version",version.to_string().as_str());
|
||||
}
|
||||
}
|
||||
let response=response_ok(
|
||||
let response=crate::response_ok(
|
||||
self.get(url).await.map_err(GetAssetV2Error::Reqwest)?
|
||||
).await.map_err(GetAssetV2Error::Response)?;
|
||||
|
||||
@@ -574,24 +500,28 @@ impl Context{
|
||||
info,
|
||||
})
|
||||
}
|
||||
pub async fn get_asset_v2_download(&self,config:&GetAssetV2Location)->Result<MaybeGzippedBytes,GetError>{
|
||||
pub async fn get_asset_v2_download(&self,config:&GetAssetV2Location)->Result<Vec<u8>,GetError>{
|
||||
let url=reqwest::Url::parse(config.location.as_str()).map_err(GetError::ParseError)?;
|
||||
|
||||
let bytes=response_ok(
|
||||
let body=crate::response_ok(
|
||||
self.get(url).await.map_err(GetError::Reqwest)?
|
||||
).await.map_err(GetError::Response)?
|
||||
.bytes().await.map_err(GetError::Reqwest)?;
|
||||
|
||||
Ok(MaybeGzippedBytes::new(bytes))
|
||||
match maybe_gzip_decode(std::io::Cursor::new(body)){
|
||||
Ok(ReaderType::GZip(readable))=>read_readable(readable),
|
||||
Ok(ReaderType::Raw(readable))=>read_readable(readable),
|
||||
Err(e)=>Err(e),
|
||||
}.map_err(GetError::IO)
|
||||
}
|
||||
pub async fn get_asset_details(&self,config:GetAssetDetailsRequest)->Result<AssetDetails,GetError>{
|
||||
let url=reqwest::Url::parse(format!("https://economy.roblox.com/v2/assets/{}/details",config.asset_id).as_str()).map_err(GetError::ParseError)?;
|
||||
response_ok(
|
||||
crate::response_ok(
|
||||
self.get(url).await.map_err(GetError::Reqwest)?
|
||||
).await.map_err(GetError::Response)?
|
||||
.json().await.map_err(GetError::Reqwest)
|
||||
}
|
||||
pub async fn get_asset_versions_page(&self,config:&AssetVersionsPageRequest)->Result<AssetVersionsPageResponse,PageError>{
|
||||
pub async fn get_asset_versions_page(&self,config:AssetVersionsPageRequest)->Result<AssetVersionsPageResponse,PageError>{
|
||||
let mut url=reqwest::Url::parse(format!("https://develop.roblox.com/v1/assets/{}/saved-versions",config.asset_id).as_str()).map_err(PageError::ParseError)?;
|
||||
//url borrow scope
|
||||
{
|
||||
@@ -603,7 +533,7 @@ impl Context{
|
||||
query.append_pair("cursor",cursor);
|
||||
}
|
||||
}
|
||||
response_ok(
|
||||
crate::response_ok(
|
||||
self.get(url).await.map_err(PageError::Reqwest)?
|
||||
).await.map_err(PageError::Response)?
|
||||
.json::<AssetVersionsPageResponse>().await.map_err(PageError::Reqwest)
|
||||
@@ -618,7 +548,7 @@ impl Context{
|
||||
query.append_pair("cursor",cursor);
|
||||
}
|
||||
}
|
||||
response_ok(
|
||||
crate::response_ok(
|
||||
self.get(url).await.map_err(PageError::Reqwest)?
|
||||
).await.map_err(PageError::Response)?
|
||||
.json::<CreationsPageResponse>().await.map_err(PageError::Reqwest)
|
||||
@@ -632,21 +562,9 @@ impl Context{
|
||||
query.append_pair("cursor",cursor);
|
||||
}
|
||||
}
|
||||
response_ok(
|
||||
crate::response_ok(
|
||||
self.get(url).await.map_err(PageError::Reqwest)?
|
||||
).await.map_err(PageError::Response)?
|
||||
.json::<UserInventoryPageResponse>().await.map_err(PageError::Reqwest)
|
||||
}
|
||||
/// Used to enable an asset to be loaded onto a group game.
|
||||
pub async fn set_assets_permissions(&self,config:SetAssetsPermissionsRequest<'_>)->Result<(),SetAssetsPermissionsError>{
|
||||
let url=reqwest::Url::parse("https://apis.roblox.com/asset-permissions-api/v1/assets/permissions").map_err(SetAssetsPermissionsError::Parse)?;
|
||||
|
||||
let body=config.serialize().map_err(SetAssetsPermissionsError::JSONEncode)?;
|
||||
|
||||
response_ok(
|
||||
self.patch(url,Json(body)).await.map_err(SetAssetsPermissionsError::Patch)?
|
||||
).await.map_err(SetAssetsPermissionsError::Response)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,56 @@
|
||||
pub mod cloud;
|
||||
pub mod cookie;
|
||||
pub mod types;
|
||||
mod body;
|
||||
mod util;
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub struct StatusCodeWithUrlAndBody{
|
||||
pub status_code:reqwest::StatusCode,
|
||||
pub url:url::Url,
|
||||
pub body:String,
|
||||
}
|
||||
#[derive(Debug)]
|
||||
pub enum ResponseError{
|
||||
Reqwest(reqwest::Error),
|
||||
StatusCodeWithUrlAndBody(StatusCodeWithUrlAndBody),
|
||||
}
|
||||
impl std::fmt::Display for ResponseError{
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for ResponseError{}
|
||||
// lazy function to draw out meaningful info from http response on failure
|
||||
pub(crate) async fn response_ok(response:reqwest::Response)->Result<reqwest::Response,ResponseError>{
|
||||
let status_code=response.status();
|
||||
if status_code.is_success(){
|
||||
Ok(response)
|
||||
}else{
|
||||
let url=response.url().to_owned();
|
||||
let bytes=response.bytes().await.map_err(ResponseError::Reqwest)?;
|
||||
let body=String::from_utf8_lossy(&bytes).to_string();
|
||||
Err(ResponseError::StatusCodeWithUrlAndBody(StatusCodeWithUrlAndBody{
|
||||
status_code,
|
||||
url,
|
||||
body,
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
//idk how to do this better
|
||||
pub(crate) enum ReaderType<R:std::io::Read>{
|
||||
GZip(flate2::read::GzDecoder<std::io::BufReader<R>>),
|
||||
Raw(std::io::BufReader<R>),
|
||||
}
|
||||
pub(crate) fn maybe_gzip_decode<R:std::io::Read>(input:R)->std::io::Result<ReaderType<R>>{
|
||||
let mut buf=std::io::BufReader::new(input);
|
||||
let peek=std::io::BufRead::fill_buf(&mut buf)?;
|
||||
match &peek[0..2]{
|
||||
b"\x1f\x8b"=>Ok(ReaderType::GZip(flate2::read::GzDecoder::new(buf))),
|
||||
_=>Ok(ReaderType::Raw(buf)),
|
||||
}
|
||||
}
|
||||
pub(crate) fn read_readable(mut readable:impl std::io::Read)->std::io::Result<Vec<u8>>{
|
||||
let mut contents=Vec::new();
|
||||
readable.read_to_end(&mut contents)?;
|
||||
Ok(contents)
|
||||
}
|
||||
|
||||
@@ -1,67 +0,0 @@
|
||||
#[derive(Debug)]
|
||||
pub struct UrlAndBody{
|
||||
pub url:url::Url,
|
||||
pub body:String,
|
||||
}
|
||||
#[derive(Debug)]
|
||||
pub enum ResponseError{
|
||||
Reqwest(reqwest::Error),
|
||||
Details{
|
||||
status_code:reqwest::StatusCode,
|
||||
url_and_body:Box<UrlAndBody>,
|
||||
},
|
||||
}
|
||||
impl std::fmt::Display for ResponseError{
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for ResponseError{}
|
||||
|
||||
#[cfg(feature="gzip")]
|
||||
use flate2::read::GzDecoder;
|
||||
|
||||
/// Some bytes that might be gzipped. Use the read_with or to_vec methods to transparently decode gzip.
|
||||
pub struct MaybeGzippedBytes{
|
||||
bytes:bytes::Bytes,
|
||||
}
|
||||
impl MaybeGzippedBytes{
|
||||
pub(crate) fn new(bytes:bytes::Bytes)->Self{
|
||||
Self{bytes}
|
||||
}
|
||||
pub fn into_inner(self)->bytes::Bytes{
|
||||
self.bytes
|
||||
}
|
||||
/// get a reference to the bytes, ignoring gzip decoding
|
||||
pub fn as_raw_ref(&self)->&[u8]{
|
||||
self.bytes.as_ref()
|
||||
}
|
||||
/// Transparently decode gzip data, if present (intermediate allocation)
|
||||
#[cfg(feature="gzip")]
|
||||
pub fn to_vec(&self)->std::io::Result<Vec<u8>>{
|
||||
use std::io::Read;
|
||||
match self.bytes.get(0..2){
|
||||
Some(b"\x1f\x8b")=>{
|
||||
let mut buf=Vec::new();
|
||||
GzDecoder::new(self.bytes.as_ref()).read_to_end(&mut buf)?;
|
||||
Ok(buf)
|
||||
},
|
||||
_=>Ok(self.bytes.to_vec())
|
||||
}
|
||||
}
|
||||
/// Read the bytes with the provided decoders.
|
||||
/// The idea is to make a function that is generic over std::io::Read
|
||||
/// and pass the same function to both closures.
|
||||
/// This two closure hack must be done because of the different concrete types.
|
||||
#[cfg(feature="gzip")]
|
||||
pub fn read_with<'a,ReadGzip,ReadRaw,T>(&'a self,read_gzip:ReadGzip,read_raw:ReadRaw)->T
|
||||
where
|
||||
ReadGzip:Fn(GzDecoder<&'a [u8]>)->T,
|
||||
ReadRaw:Fn(&'a [u8])->T,
|
||||
{
|
||||
match self.bytes.get(0..2){
|
||||
Some(b"\x1f\x8b")=>read_gzip(GzDecoder::new(self.bytes.as_ref())),
|
||||
_=>read_raw(self.bytes.as_ref())
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,39 +0,0 @@
|
||||
use crate::types::{ResponseError,UrlAndBody};
|
||||
|
||||
// lazy function to draw out meaningful info from http response on failure
|
||||
pub(crate) async fn response_ok(response:reqwest::Response)->Result<reqwest::Response,ResponseError>{
|
||||
let status_code=response.status();
|
||||
if status_code.is_success(){
|
||||
Ok(response)
|
||||
}else{
|
||||
let url=response.url().to_owned();
|
||||
let bytes=response.bytes().await.map_err(ResponseError::Reqwest)?;
|
||||
let body=String::from_utf8_lossy(&bytes).to_string();
|
||||
Err(ResponseError::Details{
|
||||
status_code,
|
||||
url_and_body:Box::new(UrlAndBody{url,body})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
use serde::de::{Error,Unexpected};
|
||||
use serde::{Deserializer,Serializer};
|
||||
|
||||
struct U64StringVisitor;
|
||||
impl serde::de::Visitor<'_> for U64StringVisitor{
|
||||
type Value=u64;
|
||||
fn expecting(&self,formatter:&mut std::fmt::Formatter)->std::fmt::Result{
|
||||
write!(formatter,"string value with int")
|
||||
}
|
||||
fn visit_str<E:Error>(self,v:&str)->Result<Self::Value,E>{
|
||||
v.parse().map_err(|_|E::invalid_value(Unexpected::Str(v),&"u64"))
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn deserialize_u64<'de,D:Deserializer<'de>>(deserializer:D)->Result<u64,D::Error>{
|
||||
deserializer.deserialize_any(U64StringVisitor)
|
||||
}
|
||||
|
||||
pub(crate) fn serialize_u64<S:Serializer>(v:&u64,serializer:S)->Result<S::Ok,S::Error>{
|
||||
serializer.serialize_str(v.to_string().as_str())
|
||||
}
|
||||
@@ -16,6 +16,7 @@ use crate::common::{sanitize,Style,PropertiesOverride};
|
||||
//I could use a function!
|
||||
//eventually:
|
||||
#[derive(Debug)]
|
||||
#[allow(dead_code)]//idk why this thinks it's dead code, the errors are printed out in various places
|
||||
pub enum QueryResolveError{
|
||||
NotFound,//0 results
|
||||
Ambiguous,//>1 results
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use std::path::PathBuf;
|
||||
use rbx_dom_weak::ustr;
|
||||
use rbx_dom_weak::types::Ref;
|
||||
use crate::common::{sanitize,Style,PropertiesOverride};
|
||||
|
||||
@@ -91,7 +90,7 @@ fn write_item(dom:&rbx_dom_weak::WeakDom,mut file:PathBuf,node:&TreeNode,node_na
|
||||
|
||||
if let Some(item)=dom.get_by_ref(node.referent){
|
||||
//TODO: delete disabled scripts
|
||||
if let Some(rbx_dom_weak::types::Variant::String(source))=item.properties.get(&ustr("Source")){
|
||||
if let Some(rbx_dom_weak::types::Variant::String(source))=item.properties.get(&rbx_dom_weak::ustr("Source")){
|
||||
if properties.is_some(){
|
||||
//rox style
|
||||
let source=properties.to_string()+source.as_str();
|
||||
|
||||
503
src/main.rs
503
src/main.rs
@@ -1,8 +1,7 @@
|
||||
use std::io::Read;
|
||||
use std::path::{Path,PathBuf};
|
||||
use std::{io::Read,path::PathBuf};
|
||||
use clap::{Args,Parser,Subcommand};
|
||||
use anyhow::{anyhow,Result as AResult};
|
||||
use futures::{StreamExt,TryStreamExt};
|
||||
use futures::StreamExt;
|
||||
use rbx_asset::cloud::{ApiKey,Context as CloudContext};
|
||||
use rbx_asset::cookie::{Cookie,Context as CookieContext,AssetVersion,CreationsItem};
|
||||
|
||||
@@ -10,7 +9,6 @@ type AssetID=u64;
|
||||
type AssetIDFileMap=Vec<(AssetID,PathBuf)>;
|
||||
const CONCURRENT_DECODE:usize=8;
|
||||
const CONCURRENT_REQUESTS:usize=32;
|
||||
const CONCURRENT_FS:usize=64;
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(author,version,about,long_about=None)]
|
||||
@@ -26,12 +24,9 @@ enum Commands{
|
||||
DownloadHistory(DownloadHistorySubcommand),
|
||||
Download(DownloadSubcommand),
|
||||
DownloadVersion(DownloadVersionSubcommand),
|
||||
DownloadLocation(DownloadLocationSubcommand),
|
||||
DownloadVersionLocation(DownloadVersionLocationSubcommand),
|
||||
DownloadVersionV2(DownloadVersionSubcommand),
|
||||
DownloadDecompile(DownloadDecompileSubcommand),
|
||||
DownloadCreationsJson(DownloadCreationsJsonSubcommand),
|
||||
DownloadCreationsHistory(DownloadCreationsHistorySubcommand),
|
||||
DownloadUserInventoryJson(DownloadUserInventoryJsonSubcommand),
|
||||
CreateAsset(CreateAssetSubcommand),
|
||||
CreateAssetMedia(CreateAssetMediaSubcommand),
|
||||
@@ -45,7 +40,6 @@ enum Commands{
|
||||
Decompile(DecompileSubcommand),
|
||||
DecompileHistoryIntoGit(DecompileHistoryIntoGitSubcommand),
|
||||
DownloadAndDecompileHistoryIntoGit(DownloadAndDecompileHistoryIntoGitSubcommand),
|
||||
RunLuau(RunLuauSubcommand),
|
||||
}
|
||||
|
||||
/// Download a range of assets from the asset version history. Download summary is saved to `output_folder/versions.json`, and can be optionally used to download only new versions the next time.
|
||||
@@ -110,32 +104,6 @@ struct DownloadVersionSubcommand{
|
||||
#[arg(long)]
|
||||
asset_version:Option<u64>,
|
||||
}
|
||||
/// Get download urls for a list of assets by id.
|
||||
#[derive(Args)]
|
||||
struct DownloadLocationSubcommand{
|
||||
#[arg(long,group="api_key",required=true)]
|
||||
api_key_literal:Option<String>,
|
||||
#[arg(long,group="api_key",required=true)]
|
||||
api_key_envvar:Option<String>,
|
||||
#[arg(long,group="api_key",required=true)]
|
||||
api_key_file:Option<PathBuf>,
|
||||
#[arg(required=true)]
|
||||
asset_ids:Vec<AssetID>,
|
||||
}
|
||||
/// Get a download url for a single asset by id, optionally specifying the version to download.
|
||||
#[derive(Args)]
|
||||
struct DownloadVersionLocationSubcommand{
|
||||
#[arg(long,group="api_key",required=true)]
|
||||
api_key_literal:Option<String>,
|
||||
#[arg(long,group="api_key",required=true)]
|
||||
api_key_envvar:Option<String>,
|
||||
#[arg(long,group="api_key",required=true)]
|
||||
api_key_file:Option<PathBuf>,
|
||||
#[arg(long)]
|
||||
asset_id:AssetID,
|
||||
#[arg(long)]
|
||||
asset_version:Option<u64>,
|
||||
}
|
||||
/// Download the list of asset ids (not the assets themselves) created by a group or user. The output is written to `output_folder/versions.json`
|
||||
#[derive(Args)]
|
||||
struct DownloadCreationsJsonSubcommand{
|
||||
@@ -151,8 +119,6 @@ struct DownloadCreationsJsonSubcommand{
|
||||
group_id:Option<u64>,
|
||||
#[arg(long,group="owner",required=true)]
|
||||
user_id:Option<u64>,
|
||||
#[arg(long)]
|
||||
continue_from_cursor:Option<bool>,
|
||||
}
|
||||
/// Download the list of asset ids (not the assets themselves) in a user's inventory. The output is written to `output_folder/versions.json`
|
||||
#[derive(Args)]
|
||||
@@ -433,24 +399,6 @@ struct DownloadAndDecompileHistoryIntoGitSubcommand{
|
||||
#[arg(long)]
|
||||
write_scripts:Option<bool>,
|
||||
}
|
||||
/// Run a Luau script.
|
||||
#[derive(Args)]
|
||||
struct RunLuauSubcommand{
|
||||
#[arg(long,group="api_key",required=true)]
|
||||
api_key_literal:Option<String>,
|
||||
#[arg(long,group="api_key",required=true)]
|
||||
api_key_envvar:Option<String>,
|
||||
#[arg(long,group="api_key",required=true)]
|
||||
api_key_file:Option<PathBuf>,
|
||||
#[arg(long,group="script",required=true)]
|
||||
script_literal:Option<String>,
|
||||
#[arg(long,group="script",required=true)]
|
||||
script_file:Option<PathBuf>,
|
||||
#[arg(long)]
|
||||
universe_id:u64,
|
||||
#[arg(long)]
|
||||
place_id:u64,
|
||||
}
|
||||
|
||||
#[derive(Clone,Copy,Debug,clap::ValueEnum)]
|
||||
enum Style{
|
||||
@@ -541,27 +489,6 @@ async fn main()->AResult<()>{
|
||||
},
|
||||
).await
|
||||
},
|
||||
Commands::DownloadLocation(subcommand)=>{
|
||||
download_list_locations(
|
||||
api_key_from_args(
|
||||
subcommand.api_key_literal,
|
||||
subcommand.api_key_envvar,
|
||||
subcommand.api_key_file,
|
||||
).await?,
|
||||
&subcommand.asset_ids
|
||||
).await
|
||||
},
|
||||
Commands::DownloadVersionLocation(subcommand)=>{
|
||||
download_location(
|
||||
api_key_from_args(
|
||||
subcommand.api_key_literal,
|
||||
subcommand.api_key_envvar,
|
||||
subcommand.api_key_file,
|
||||
).await?,
|
||||
subcommand.asset_id,
|
||||
subcommand.asset_version,
|
||||
).await
|
||||
},
|
||||
Commands::DownloadVersionV2(subcommand)=>{
|
||||
let output_folder=subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap());
|
||||
download_version_v2(
|
||||
@@ -605,9 +532,7 @@ async fn main()->AResult<()>{
|
||||
subcommand.group_id,
|
||||
)?,
|
||||
subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
|
||||
subcommand.continue_from_cursor.unwrap_or(false),
|
||||
).await,
|
||||
Commands::DownloadCreationsHistory(subcommand)=>subcommand.run().await,
|
||||
Commands::DownloadUserInventoryJson(subcommand)=>download_user_inventory_json(
|
||||
cookie_from_args(
|
||||
subcommand.cookie_literal,
|
||||
@@ -638,8 +563,8 @@ async fn main()->AResult<()>{
|
||||
subcommand.api_key_file,
|
||||
).await?,
|
||||
creator:match (subcommand.creator_user_id,subcommand.creator_group_id){
|
||||
(Some(user_id),None)=>rbx_asset::cloud::Creator::userId(user_id),
|
||||
(None,Some(group_id))=>rbx_asset::cloud::Creator::groupId(group_id),
|
||||
(Some(user_id),None)=>rbx_asset::cloud::Creator::userId(user_id.to_string()),
|
||||
(None,Some(group_id))=>rbx_asset::cloud::Creator::groupId(group_id.to_string()),
|
||||
other=>Err(anyhow!("Invalid creator {other:?}"))?,
|
||||
},
|
||||
input_file:subcommand.input_file,
|
||||
@@ -660,8 +585,8 @@ async fn main()->AResult<()>{
|
||||
subcommand.cookie_file,
|
||||
).await?,
|
||||
creator:match (subcommand.creator_user_id,subcommand.creator_group_id){
|
||||
(Some(user_id),None)=>rbx_asset::cloud::Creator::userId(user_id),
|
||||
(None,Some(group_id))=>rbx_asset::cloud::Creator::groupId(group_id),
|
||||
(Some(user_id),None)=>rbx_asset::cloud::Creator::userId(user_id.to_string()),
|
||||
(None,Some(group_id))=>rbx_asset::cloud::Creator::groupId(group_id.to_string()),
|
||||
other=>Err(anyhow!("Invalid creator {other:?}"))?,
|
||||
},
|
||||
description:subcommand.description.unwrap_or_else(||String::with_capacity(0)),
|
||||
@@ -764,21 +689,6 @@ async fn main()->AResult<()>{
|
||||
write_models:subcommand.write_models.unwrap_or(false),
|
||||
write_scripts:subcommand.write_scripts.unwrap_or(true),
|
||||
}).await,
|
||||
Commands::RunLuau(subcommand)=>run_luau(RunLuauConfig{
|
||||
api_key:api_key_from_args(
|
||||
subcommand.api_key_literal,
|
||||
subcommand.api_key_envvar,
|
||||
subcommand.api_key_file,
|
||||
).await?,
|
||||
script:match subcommand.script_literal{
|
||||
Some(script)=>script,
|
||||
None=>std::fs::read_to_string(subcommand.script_file.unwrap())?,
|
||||
},
|
||||
request:rbx_asset::cloud::LuauSessionLatestRequest{
|
||||
place_id:subcommand.place_id,
|
||||
universe_id:subcommand.universe_id,
|
||||
},
|
||||
}).await,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -846,10 +756,10 @@ async fn get_asset_exp_backoff(
|
||||
context:&CloudContext,
|
||||
asset_operation:&rbx_asset::cloud::AssetOperation
|
||||
)->Result<rbx_asset::cloud::AssetResponse,rbx_asset::cloud::AssetOperationError>{
|
||||
const BACKOFF_MUL:f32=1.395_612_5;//exp(1/3)
|
||||
const BACKOFF_MUL:f32=1.3956124250860895286;//exp(1/3)
|
||||
let mut backoff=1000f32;
|
||||
loop{
|
||||
match asset_operation.try_get_asset(context).await{
|
||||
match asset_operation.try_get_asset(&context).await{
|
||||
//try again when the operation is not done
|
||||
Err(rbx_asset::cloud::AssetOperationError::Operation(rbx_asset::cloud::OperationError::NotDone))=>(),
|
||||
//return all other results
|
||||
@@ -892,7 +802,7 @@ struct CreateAssetMediasConfig{
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[expect(dead_code)]
|
||||
#[allow(dead_code)]
|
||||
enum CreateAssetMediasError{
|
||||
NoFileStem(PathBuf),
|
||||
IO(std::io::Error),
|
||||
@@ -907,7 +817,7 @@ impl std::fmt::Display for CreateAssetMediasError{
|
||||
impl std::error::Error for CreateAssetMediasError{}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[expect(dead_code)]
|
||||
#[allow(dead_code)]
|
||||
enum PollOperationError{
|
||||
CreateAssetMedias(CreateAssetMediasError),
|
||||
AssetOperation(rbx_asset::cloud::AssetOperationError),
|
||||
@@ -920,7 +830,7 @@ impl std::fmt::Display for PollOperationError{
|
||||
impl std::error::Error for PollOperationError{}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[expect(dead_code)]
|
||||
#[allow(dead_code)]
|
||||
enum DownloadDecalError{
|
||||
PollOperation(PollOperationError),
|
||||
ParseInt(std::num::ParseIntError),
|
||||
@@ -929,7 +839,6 @@ enum DownloadDecalError{
|
||||
NoFirstInstance,
|
||||
NoTextureProperty,
|
||||
TexturePropertyInvalid,
|
||||
TextureContentNotUri,
|
||||
}
|
||||
impl std::fmt::Display for DownloadDecalError{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
@@ -993,25 +902,23 @@ async fn create_asset_medias(config:CreateAssetMediasConfig)->AResult<()>{
|
||||
let cookie_context=&cookie_context;
|
||||
async move{(path,
|
||||
async move{
|
||||
use rbx_dom_weak::ustr;
|
||||
let asset_response=asset_response_result.map_err(DownloadDecalError::PollOperation)?;
|
||||
let maybe_gzip=cookie_context.get_asset(rbx_asset::cookie::GetAssetRequest{
|
||||
asset_id:asset_response.assetId,
|
||||
let file=cookie_context.get_asset(rbx_asset::cookie::GetAssetRequest{
|
||||
asset_id:asset_response.assetId.parse().map_err(DownloadDecalError::ParseInt)?,
|
||||
version:None,
|
||||
}).await.map_err(DownloadDecalError::Get)?;
|
||||
let dom=maybe_gzip.read_with(load_dom,load_dom).map_err(DownloadDecalError::LoadDom)?;
|
||||
let dom=load_dom(std::io::Cursor::new(file)).map_err(DownloadDecalError::LoadDom)?;
|
||||
let instance=dom.get_by_ref(
|
||||
*dom.root().children().first().ok_or(DownloadDecalError::NoFirstInstance)?
|
||||
).ok_or(DownloadDecalError::NoFirstInstance)?;
|
||||
let texture=instance.properties.get(&ustr("TextureContent")).ok_or(DownloadDecalError::NoTextureProperty)?;
|
||||
let content=match texture{
|
||||
rbx_dom_weak::types::Variant::Content(content)=>content,
|
||||
let texture=instance.properties.get(&rbx_dom_weak::ustr("Texture")).ok_or(DownloadDecalError::NoTextureProperty)?;
|
||||
let asset_url=match texture{
|
||||
rbx_dom_weak::types::Variant::Content(content)=>match content.value(){
|
||||
rbx_dom_weak::types::ContentType::Uri(url)=>url.as_str().to_owned(),
|
||||
_=>Err(DownloadDecalError::TexturePropertyInvalid)?,
|
||||
}
|
||||
_=>Err(DownloadDecalError::TexturePropertyInvalid)?,
|
||||
};
|
||||
let asset_url=match content.value(){
|
||||
rbx_dom_weak::types::ContentType::Uri(uri)=>uri.clone(),
|
||||
_=>Err(DownloadDecalError::TextureContentNotUri)?,
|
||||
};
|
||||
Ok::<_,DownloadDecalError>((asset_response.displayName,asset_url))
|
||||
}
|
||||
.await)}
|
||||
@@ -1089,8 +996,8 @@ async fn asset_details(cookie:Cookie,asset_id:AssetID)->AResult<()>{
|
||||
|
||||
async fn download_version(cookie:Cookie,asset_id:AssetID,version:Option<u64>,dest:PathBuf)->AResult<()>{
|
||||
let context=CookieContext::new(cookie);
|
||||
let maybe_gzip=context.get_asset(rbx_asset::cookie::GetAssetRequest{asset_id,version}).await?;
|
||||
tokio::fs::write(dest,maybe_gzip.to_vec()?).await?;
|
||||
let data=context.get_asset(rbx_asset::cookie::GetAssetRequest{asset_id,version}).await?;
|
||||
tokio::fs::write(dest,data).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1102,9 +1009,9 @@ async fn download_version_v2(cookie:Cookie,asset_id:AssetID,version:Option<u64>,
|
||||
println!("version:{}",info.version);
|
||||
|
||||
let location=info.info.locations.first().ok_or(anyhow::Error::msg("No locations"))?;
|
||||
let maybe_gzip=context.get_asset_v2_download(location).await?;
|
||||
let data=context.get_asset_v2_download(location).await?;
|
||||
|
||||
tokio::fs::write(dest,maybe_gzip.to_vec()?).await?;
|
||||
tokio::fs::write(dest,data).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1119,142 +1026,40 @@ async fn download_list(cookie:Cookie,asset_id_file_map:AssetIDFileMap)->AResult<
|
||||
}))
|
||||
.buffer_unordered(CONCURRENT_REQUESTS)
|
||||
.for_each(|b:AResult<_>|async{
|
||||
match b{
|
||||
Ok((dest,maybe_gzip))=>if let Err(e)=async{tokio::fs::write(dest,maybe_gzip.to_vec()?).await}.await{
|
||||
eprintln!("fs error: {}",e);
|
||||
},
|
||||
Err(e)=>eprintln!("dl error: {}",e),
|
||||
}
|
||||
}).await;
|
||||
match b{
|
||||
Ok((dest,data))=>if let Err(e)=tokio::fs::write(dest,data).await{
|
||||
eprintln!("fs error: {}",e);
|
||||
},
|
||||
Err(e)=>eprintln!("dl error: {}",e),
|
||||
}
|
||||
}).await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn download_list_locations(api_key:ApiKey,asset_id_file_map:&[u64])->AResult<()>{
|
||||
let context=CloudContext::new(api_key);
|
||||
futures::stream::iter(asset_id_file_map)
|
||||
.map(|&asset_id|
|
||||
context.get_asset_location(rbx_asset::cloud::GetAssetLatestRequest{asset_id})
|
||||
)
|
||||
.buffer_unordered(CONCURRENT_REQUESTS)
|
||||
.for_each(|result|async{
|
||||
match result{
|
||||
Ok(asset_location_info)=>match asset_location_info.location{
|
||||
Some(location)=>println!("{}",location.location()),
|
||||
None=>println!("This asset is private!"),
|
||||
},
|
||||
Err(e)=>eprintln!("dl error: {}",e),
|
||||
}
|
||||
}).await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn download_location(api_key:ApiKey,asset_id:AssetID,version:Option<u64>)->AResult<()>{
|
||||
let context=CloudContext::new(api_key);
|
||||
let asset_location_info=match version{
|
||||
Some(version)=>context.get_asset_version_location(rbx_asset::cloud::GetAssetVersionRequest{asset_id,version}).await?,
|
||||
None=>context.get_asset_location(rbx_asset::cloud::GetAssetLatestRequest{asset_id}).await?,
|
||||
async fn get_creations_pages(context:&CookieContext,owner:rbx_asset::cookie::Owner)->AResult<Vec<CreationsItem>>{
|
||||
let mut config=rbx_asset::cookie::CreationsPageRequest{
|
||||
owner,
|
||||
cursor:None,
|
||||
};
|
||||
match asset_location_info.location{
|
||||
Some(location)=>println!("{}",location.location()),
|
||||
None=>println!("This asset is private!"),
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn get_creations_pages(
|
||||
context:&CookieContext,
|
||||
asset_list:&mut Vec<rbx_asset::cookie::CreationsItem>,
|
||||
config:&mut rbx_asset::cookie::CreationsPageRequest,
|
||||
)->AResult<()>{
|
||||
let mut asset_list=Vec::new();
|
||||
loop{
|
||||
let mut page=context.get_creations_page(&config).await?;
|
||||
asset_list.append(&mut page.data);
|
||||
config.cursor=page.nextPageCursor;
|
||||
if config.cursor.is_none(){
|
||||
if page.nextPageCursor.is_none(){
|
||||
break;
|
||||
}
|
||||
config.cursor=page.nextPageCursor;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn download_creations_pages_from_checkpoint(context:&CookieContext,owner:rbx_asset::cookie::Owner,output_folder:&Path,continue_from_cursor:bool)->AResult<Vec<CreationsItem>>{
|
||||
let mut versions_path=output_folder.to_owned();
|
||||
versions_path.set_file_name("versions.json");
|
||||
let mut cursor_path=output_folder.to_owned();
|
||||
cursor_path.set_file_name("cursor");
|
||||
|
||||
let (mut asset_list,mut config)=if continue_from_cursor{
|
||||
// load state from files
|
||||
let (versions,cursor)=tokio::join!(
|
||||
tokio::fs::read(versions_path.as_path()),
|
||||
tokio::fs::read_to_string(cursor_path.as_path()),
|
||||
);
|
||||
// allow versions to not exist
|
||||
let (versions,cursor)=match (versions,cursor){
|
||||
// continue downloading
|
||||
(Ok(versions),Ok(cursor))=>(serde_json::from_slice(&versions)?,Some(cursor)),
|
||||
// already downloaded
|
||||
(Ok(versions),Err(e)) if matches!(e.kind(),std::io::ErrorKind::NotFound)=>return Ok(serde_json::from_slice(&versions)?),
|
||||
// not downloaded
|
||||
(Err(e),result) if matches!(e.kind(),std::io::ErrorKind::NotFound)=>{
|
||||
match result{
|
||||
Ok(_)=>{},
|
||||
Err(e) if matches!(e.kind(),std::io::ErrorKind::NotFound)=>{},
|
||||
Err(e)=>Err(e)?,
|
||||
}
|
||||
(Vec::new(),None)
|
||||
},
|
||||
// other errors
|
||||
(Ok(_),Err(e))=>Err(e)?,
|
||||
(Err(e),_)=>Err(e)?,
|
||||
};
|
||||
(
|
||||
versions,
|
||||
rbx_asset::cookie::CreationsPageRequest{
|
||||
owner,
|
||||
cursor,
|
||||
}
|
||||
)
|
||||
}else{
|
||||
// create new state
|
||||
(
|
||||
Vec::new(),
|
||||
rbx_asset::cookie::CreationsPageRequest{
|
||||
owner,
|
||||
cursor:None,
|
||||
}
|
||||
)
|
||||
};
|
||||
|
||||
get_creations_pages(&context,&mut asset_list,&mut config).await?;
|
||||
|
||||
let cursor_fut=async{
|
||||
if let Some(cursor)=config.cursor{
|
||||
println!("writing cursor state...");
|
||||
// there was a problem, write out cursor
|
||||
tokio::fs::write(cursor_path,cursor).await?;
|
||||
}else{
|
||||
// no cursor
|
||||
if let Err(e)=tokio::fs::remove_file(cursor_path).await{
|
||||
match e.kind(){
|
||||
std::io::ErrorKind::NotFound=>println!("Cannot delete cursor: file not found"),
|
||||
_=>Err(e)?,
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
};
|
||||
let versions_fut=tokio::fs::write(versions_path,serde_json::to_string(&asset_list)?);
|
||||
|
||||
tokio::try_join!(versions_fut,cursor_fut)?;
|
||||
|
||||
Ok(asset_list)
|
||||
}
|
||||
|
||||
async fn download_creations_json(cookie:Cookie,owner:rbx_asset::cookie::Owner,output_folder:PathBuf,continue_from_cursor:bool)->AResult<()>{
|
||||
async fn download_creations_json(cookie:Cookie,owner:rbx_asset::cookie::Owner,output_folder:PathBuf)->AResult<()>{
|
||||
let context=CookieContext::new(cookie);
|
||||
let item_list=get_creations_pages(&context,owner).await?;
|
||||
|
||||
download_creations_pages_from_checkpoint(&context,owner,output_folder.as_path(),continue_from_cursor).await?;
|
||||
let mut path=output_folder.clone();
|
||||
path.set_file_name("versions.json");
|
||||
tokio::fs::write(path,serde_json::to_string(&item_list)?).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -1265,7 +1070,7 @@ async fn get_user_inventory_pages(
|
||||
config:&mut rbx_asset::cookie::UserInventoryPageRequest,
|
||||
)->AResult<()>{
|
||||
loop{
|
||||
let page=context.get_user_inventory_page(config).await?;
|
||||
let page=context.get_user_inventory_page(&config).await?;
|
||||
asset_list.extend(page.data);
|
||||
config.cursor=page.nextPageCursor;
|
||||
if config.cursor.is_none(){
|
||||
@@ -1279,7 +1084,7 @@ async fn download_user_inventory_json(cookie:Cookie,user_id:u64,output_folder:Pa
|
||||
let mut versions_path=output_folder.clone();
|
||||
versions_path.set_file_name("versions.json");
|
||||
let mut cursor_path=output_folder.clone();
|
||||
cursor_path.set_file_name("cursor");
|
||||
cursor_path.set_file_name("cursor.json");
|
||||
|
||||
let context=CookieContext::new(cookie);
|
||||
|
||||
@@ -1327,163 +1132,18 @@ async fn download_user_inventory_json(cookie:Cookie,user_id:u64,output_folder:Pa
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Download all versions of all assets created by a group or user. The output is written to a folder structure in the output directory.
|
||||
#[derive(Args)]
|
||||
struct DownloadCreationsHistorySubcommand{
|
||||
#[arg(long,group="cookie",required=true)]
|
||||
cookie_literal:Option<String>,
|
||||
#[arg(long,group="cookie",required=true)]
|
||||
cookie_envvar:Option<String>,
|
||||
#[arg(long,group="cookie",required=true)]
|
||||
cookie_file:Option<PathBuf>,
|
||||
#[arg(long,group="api_key",required=true)]
|
||||
api_key_literal:Option<String>,
|
||||
#[arg(long,group="api_key",required=true)]
|
||||
api_key_envvar:Option<String>,
|
||||
#[arg(long,group="api_key",required=true)]
|
||||
api_key_file:Option<PathBuf>,
|
||||
#[arg(long)]
|
||||
output_folder:Option<PathBuf>,
|
||||
#[arg(long,group="owner",required=true)]
|
||||
group_id:Option<u64>,
|
||||
#[arg(long,group="owner",required=true)]
|
||||
user_id:Option<u64>,
|
||||
#[arg(long)]
|
||||
r#continue:Option<bool>,
|
||||
}
|
||||
impl DownloadCreationsHistorySubcommand{
|
||||
async fn run(self)->AResult<()>{
|
||||
download_creations_history(
|
||||
cookie_from_args(
|
||||
self.cookie_literal,
|
||||
self.cookie_envvar,
|
||||
self.cookie_file,
|
||||
).await?,
|
||||
api_key_from_args(
|
||||
self.api_key_literal,
|
||||
self.api_key_envvar,
|
||||
self.api_key_file,
|
||||
).await?,
|
||||
owner_from_args(
|
||||
self.user_id,
|
||||
self.group_id,
|
||||
)?,
|
||||
self.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
|
||||
self.r#continue.unwrap_or(false),
|
||||
).await
|
||||
}
|
||||
}
|
||||
async fn download_creations_history(cookie:Cookie,api_key:ApiKey,owner:rbx_asset::cookie::Owner,output_folder:PathBuf,r#continue:bool)->AResult<()>{
|
||||
|
||||
let cookie_context=CookieContext::new(cookie);
|
||||
let cloud_context=CloudContext::new(api_key);
|
||||
|
||||
// get list of all assets in inventory
|
||||
let asset_list=download_creations_pages_from_checkpoint(&cookie_context,owner,output_folder.as_path(),r#continue).await?;
|
||||
|
||||
// create folder directories
|
||||
let asset_folders:Vec<PathBuf> ={
|
||||
futures::stream::iter(asset_list.iter().map(|asset|async{
|
||||
// create asset folder
|
||||
let mut asset_folder=output_folder.clone();
|
||||
asset_folder.push(asset.id.to_string());
|
||||
tokio::fs::create_dir_all(asset_folder.as_path()).await?;
|
||||
Ok::<_,anyhow::Error>(asset_folder)
|
||||
}))
|
||||
.buffered(CONCURRENT_FS)
|
||||
.try_collect().await?
|
||||
};
|
||||
|
||||
#[expect(dead_code)]
|
||||
#[derive(Debug)]
|
||||
enum Error<'a>{
|
||||
NoLocations(Job<'a>),
|
||||
GetVersionLocationError(rbx_asset::cloud::GetError),
|
||||
GetError(rbx_asset::cloud::GetError),
|
||||
Io(std::io::Error),
|
||||
}
|
||||
#[derive(Clone,Copy,Debug)]
|
||||
struct Job<'a>{
|
||||
path:&'a PathBuf,
|
||||
asset_id:u64,
|
||||
asset_version:u64,
|
||||
}
|
||||
let mut job_list=Vec::new();
|
||||
|
||||
// create flattened futures stream to parallel download all asset versions
|
||||
for (path,asset) in asset_folders.iter().zip(asset_list){
|
||||
|
||||
// save versions file
|
||||
let mut versions_path=path.to_owned();
|
||||
versions_path.push("versions.json");
|
||||
|
||||
let version_history=if r#continue{
|
||||
let file=tokio::fs::read(versions_path.as_path()).await?;
|
||||
serde_json::from_slice(&file)?
|
||||
}else{
|
||||
println!("Downloading history for {} - {}",asset.id,asset.name);
|
||||
let version_history=get_version_history(&cookie_context,asset.id).await?;
|
||||
println!("Found {} versions",version_history.len());
|
||||
tokio::fs::write(versions_path,serde_json::to_string(&version_history)?).await?;
|
||||
version_history
|
||||
};
|
||||
|
||||
job_list.extend(version_history.into_iter().map(|asset_version|
|
||||
Job{
|
||||
path,
|
||||
asset_id:asset.id,
|
||||
asset_version:asset_version.assetVersionNumber,
|
||||
}
|
||||
));
|
||||
}
|
||||
|
||||
println!("Completed jobs list. Number of jobs: {}",job_list.len());
|
||||
|
||||
futures::stream::iter(job_list).map(async|job|{
|
||||
let mut dest=job.path.to_owned();
|
||||
dest.push(format!("{}_v{}.rbxl",job.asset_id,job.asset_version));
|
||||
//if the file already exists, don't try downloading it again
|
||||
if tokio::fs::try_exists(dest.as_path()).await.map_err(Error::Io)?{
|
||||
return Ok(());
|
||||
}
|
||||
let location=cloud_context.get_asset_version_location(rbx_asset::cloud::GetAssetVersionRequest{
|
||||
asset_id:job.asset_id,
|
||||
version:job.asset_version,
|
||||
}).await.map_err(Error::GetVersionLocationError)?;
|
||||
let location=location.location.ok_or(Error::NoLocations(job))?;
|
||||
let downloaded=cloud_context.get_asset(&location).await.map_err(Error::GetError)?;
|
||||
tokio::fs::write(dest,downloaded.to_vec().map_err(Error::Io)?).await.map_err(Error::Io)?;
|
||||
Ok(())
|
||||
})
|
||||
.buffer_unordered(CONCURRENT_REQUESTS)
|
||||
.for_each(async|result|{
|
||||
match result{
|
||||
Ok(())=>{},
|
||||
Err(Error::NoLocations(job))=>println!("Job failed due to no locations: asset_id={} version={}",job.asset_id,job.asset_version),
|
||||
Err(e)=>println!("Error: {e:?}"),
|
||||
}
|
||||
}).await;
|
||||
|
||||
println!("All jobs complete.");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn get_version_history(context:&CookieContext,asset_id:AssetID)->AResult<Vec<AssetVersion>>{
|
||||
let mut page_request=rbx_asset::cookie::AssetVersionsPageRequest{
|
||||
asset_id,
|
||||
cursor:None,
|
||||
};
|
||||
let mut cursor:Option<String>=None;
|
||||
let mut asset_list=Vec::new();
|
||||
loop{
|
||||
let mut page=context.get_asset_versions_page(&page_request).await?;
|
||||
let mut page=context.get_asset_versions_page(rbx_asset::cookie::AssetVersionsPageRequest{asset_id,cursor}).await?;
|
||||
asset_list.append(&mut page.data);
|
||||
if page.nextPageCursor.is_none(){
|
||||
break;
|
||||
}
|
||||
page_request.cursor=page.nextPageCursor;
|
||||
cursor=page.nextPageCursor;
|
||||
}
|
||||
asset_list.sort_by_key(|a|a.assetVersionNumber);
|
||||
asset_list.sort_by(|a,b|a.assetVersionNumber.cmp(&b.assetVersionNumber));
|
||||
Ok(asset_list)
|
||||
}
|
||||
|
||||
@@ -1539,12 +1199,9 @@ async fn download_history(mut config:DownloadHistoryConfig)->AResult<()>{
|
||||
let mut join_set=tokio::task::JoinSet::new();
|
||||
|
||||
//poll paged list of all asset versions
|
||||
let mut page_request=rbx_asset::cookie::AssetVersionsPageRequest{
|
||||
asset_id:config.asset_id,
|
||||
cursor:None,
|
||||
};
|
||||
let mut cursor:Option<String>=None;
|
||||
loop{
|
||||
let mut page=context.get_asset_versions_page(&page_request).await?;
|
||||
let mut page=context.get_asset_versions_page(rbx_asset::cookie::AssetVersionsPageRequest{asset_id:config.asset_id,cursor}).await?;
|
||||
let context=&context;
|
||||
let output_folder=config.output_folder.clone();
|
||||
let data=&page.data;
|
||||
@@ -1574,9 +1231,9 @@ async fn download_history(mut config:DownloadHistoryConfig)->AResult<()>{
|
||||
let mut path=output_folder.clone();
|
||||
path.push(format!("{}_v{}.rbxl",config.asset_id,version_number));
|
||||
join_set.spawn(async move{
|
||||
let maybe_gzip=context.get_asset(rbx_asset::cookie::GetAssetRequest{asset_id:config.asset_id,version:Some(version_number)}).await?;
|
||||
let file=context.get_asset(rbx_asset::cookie::GetAssetRequest{asset_id:config.asset_id,version:Some(version_number)}).await?;
|
||||
|
||||
tokio::fs::write(path,maybe_gzip.to_vec()?).await?;
|
||||
tokio::fs::write(path,file).await?;
|
||||
|
||||
Ok::<_,anyhow::Error>(())
|
||||
});
|
||||
@@ -1603,10 +1260,10 @@ async fn download_history(mut config:DownloadHistoryConfig)->AResult<()>{
|
||||
}else{
|
||||
asset_list.append(&mut page.data);
|
||||
}
|
||||
page_request.cursor=page.nextPageCursor;
|
||||
cursor=page.nextPageCursor;
|
||||
}
|
||||
|
||||
asset_list.sort_by_key(|a|a.assetVersionNumber);
|
||||
asset_list.sort_by(|a,b|a.assetVersionNumber.cmp(&b.assetVersionNumber));
|
||||
|
||||
let mut path=config.output_folder.clone();
|
||||
path.set_file_name("versions.json");
|
||||
@@ -1620,7 +1277,7 @@ async fn download_history(mut config:DownloadHistoryConfig)->AResult<()>{
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[expect(dead_code)]
|
||||
#[allow(dead_code)]
|
||||
enum LoadDomError{
|
||||
IO(std::io::Error),
|
||||
RbxBinary(rbx_binary::DecodeError),
|
||||
@@ -1696,9 +1353,9 @@ struct DownloadDecompileConfig{
|
||||
|
||||
async fn download_decompile(config:DownloadDecompileConfig)->AResult<()>{
|
||||
let context=CookieContext::new(config.cookie);
|
||||
let maybe_gzip=context.get_asset(rbx_asset::cookie::GetAssetRequest{asset_id:config.asset_id,version:None}).await?;
|
||||
let file=context.get_asset(rbx_asset::cookie::GetAssetRequest{asset_id:config.asset_id,version:None}).await?;
|
||||
|
||||
let dom=maybe_gzip.read_with(load_dom,load_dom)?;
|
||||
let dom=load_dom(std::io::Cursor::new(file))?;
|
||||
let context=rox_compiler::DecompiledContext::from_dom(dom);
|
||||
|
||||
context.write_files(rox_compiler::WriteConfig{
|
||||
@@ -1878,8 +1535,8 @@ async fn download_and_decompile_history_into_git(config:DownloadAndDecompileHist
|
||||
.map(|asset_version|{
|
||||
let context=context.clone();
|
||||
tokio::task::spawn(async move{
|
||||
let maybe_gzip=context.get_asset(rbx_asset::cookie::GetAssetRequest{asset_id,version:Some(asset_version.assetVersionNumber)}).await?;
|
||||
let dom=maybe_gzip.read_with(load_dom,load_dom)?;
|
||||
let file=context.get_asset(rbx_asset::cookie::GetAssetRequest{asset_id,version:Some(asset_version.assetVersionNumber)}).await?;
|
||||
let dom=load_dom(std::io::Cursor::new(file))?;
|
||||
Ok::<_,anyhow::Error>((asset_version,rox_compiler::DecompiledContext::from_dom(dom)))
|
||||
})
|
||||
}))
|
||||
@@ -2003,43 +1660,3 @@ async fn compile_upload_place(config:CompileUploadPlaceConfig)->AResult<()>{
|
||||
println!("UploadResponse={:?}",resp);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn get_luau_result_exp_backoff(
|
||||
context:&CloudContext,
|
||||
luau_session:&rbx_asset::cloud::LuauSessionResponse
|
||||
)->Result<Result<rbx_asset::cloud::LuauResults,rbx_asset::cloud::LuauError>,rbx_asset::cloud::LuauSessionError>{
|
||||
const BACKOFF_MUL:f32=1.395_612_5;//exp(1/3)
|
||||
let mut backoff=1000f32;
|
||||
loop{
|
||||
match luau_session.try_get_result(context).await{
|
||||
//try again when the operation is not done
|
||||
Err(rbx_asset::cloud::LuauSessionError::NotDone)=>(),
|
||||
//return all other results
|
||||
other_result=>return other_result,
|
||||
}
|
||||
println!("Operation not complete; waiting {:.0}ms...",backoff);
|
||||
tokio::time::sleep(std::time::Duration::from_millis(backoff as u64)).await;
|
||||
backoff*=BACKOFF_MUL;
|
||||
}
|
||||
}
|
||||
struct RunLuauConfig{
|
||||
api_key:ApiKey,
|
||||
script:String,
|
||||
request:rbx_asset::cloud::LuauSessionLatestRequest,
|
||||
}
|
||||
async fn run_luau(config:RunLuauConfig)->AResult<()>{
|
||||
let context=CloudContext::new(config.api_key);
|
||||
let session=rbx_asset::cloud::LuauSessionCreate{
|
||||
script:&config.script,
|
||||
user:None,
|
||||
timeout:None,
|
||||
binaryInput:None,
|
||||
enableBinaryOutput:None,
|
||||
binaryOutputUri:None,
|
||||
};
|
||||
let response=context.create_luau_session(&config.request,session).await?;
|
||||
dbg!(&response);
|
||||
let result=get_luau_result_exp_backoff(&context,&response).await?;
|
||||
dbg!(&result);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user