Compare commits

..

20 Commits

Author SHA1 Message Date
4cb8fd1c7d
support output-folder option for git-related features
Repository-based logic seems to automatically be made relative to the top-level path the repo was initialised with, so we can actually support an output folder with no issues here
2024-10-01 20:44:58 +01:00
b575274116
fix decompiling history into git
Why does this work and the version with the directory doesn't? yeah idk. But hey, this one successfully stages the files, so ill take it
2024-10-01 20:32:59 +01:00
607f964928 Merge pull request 'add documentation, update dependencies' (#6) from staging into master
Reviewed-on: StrafesNET/asset-tool#6
2024-09-17 04:44:30 +00:00
bc11997e88 v0.4.6 documentation + update deps 2024-09-16 21:44:10 -07:00
4b5ceef5d4 update deps 2024-09-16 21:40:13 -07:00
00f8cddde0 write some documentation 2024-08-24 12:22:21 -07:00
87993d0f52 improve directions 2024-08-17 11:31:14 -07:00
8dc7c96f2d Merge pull request 'Create multiple assets concurrently' (#5) from staging into master
Reviewed-on: StrafesNET/asset-tool#5
2024-08-17 18:00:27 +00:00
40c166fcca mistake from clippy changes! 2024-08-17 10:57:23 -07:00
9c52957a03 spaces 2024-08-17 10:46:10 -07:00
f5d50a0de2 optimize backoff math 2024-08-17 10:46:10 -07:00
61c2026bff v0.4.5 create-asset-medias 2024-08-17 09:44:06 -07:00
0259284940 update deps 2024-08-17 09:44:06 -07:00
7870723b31 RobloxOperation is private
hide this garbage inside the api module bruh
2024-08-17 09:44:06 -07:00
136dbb3054 plumb path to the end in an ugly way (patches welcome) 2024-08-16 23:48:17 -07:00
b9d2a1fbc7 redo it without filter_map - errors are propagated through 2024-08-16 23:47:46 -07:00
57a163dfb1 create-asset-medias 2024-08-16 23:29:03 -07:00
68d751f81f Merge pull request 'use old api for download, error on http status' (#4) from staging into master
Reviewed-on: StrafesNET/asset-tool#4
2024-07-16 18:21:06 +00:00
c2052be036 Merge pull request 'use old api for compile-upload-asset' (#3) from staging into master
Reviewed-on: StrafesNET/asset-tool#3
2024-07-10 17:18:05 +00:00
2e9485dea6 Merge pull request 'add old asset upload api' (#2) from staging into master
Reviewed-on: StrafesNET/asset-tool#2
2024-07-10 16:45:17 +00:00
6 changed files with 429 additions and 392 deletions

592
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,7 +1,7 @@
workspace = { members = ["rbx_asset", "rox_compiler"] } workspace = { members = ["rbx_asset", "rox_compiler"] }
[package] [package]
name = "asset-tool" name = "asset-tool"
version = "0.4.4" version = "0.4.6"
edition = "2021" edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

View File

@ -2,4 +2,20 @@
[![Build Status](https://ci.itzana.me/api/badges/StrafesNET/asset-tool/status.svg?ref=refs/heads/master)](https://ci.itzana.me/StrafesNET/asset-tool) [![Build Status](https://ci.itzana.me/api/badges/StrafesNET/asset-tool/status.svg?ref=refs/heads/master)](https://ci.itzana.me/StrafesNET/asset-tool)
For uploading and downloading roblox assets. For uploading and downloading roblox assets.
See [releases](https://git.itzana.me/StrafesNET/asset-tool/releases) for downloads.
To get started, you will need an api key and/or cookie depending on which command you use. Api keys can be created from the open cloud section on the creator hub, cookies must be extracted from a browser session. Do not share your cookie file with anyone or use it with tools that you do not trust, as it gives unrestricted permissions to do any account actions on your behalf. The api keys are significantly more safe because you can choose exactly what permissions the given key has, and which ip addresses can use it.
The help text lists available commands:
`asset-tool --help`
For help with a specific command:
`asset-tool download --help`
Example incantation to download a list of models:
`asset-tool download --cookie-file Documents\mycookie.txt 255299419 1213190363`

View File

@ -14,26 +14,27 @@ pub struct CreateAssetRequest{
pub displayName:String, pub displayName:String,
} }
#[derive(Debug)] #[derive(Debug)]
pub enum CreateAssetResponseGetAssetError{ pub enum AssetOperationError{
Operation(OperationError), Operation(OperationError),
Serialize(serde_json::Error), Serialize(serde_json::Error),
} }
impl std::fmt::Display for CreateAssetResponseGetAssetError{ impl std::fmt::Display for AssetOperationError{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{ fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
write!(f,"{self:?}") write!(f,"{self:?}")
} }
} }
impl std::error::Error for CreateAssetResponseGetAssetError{} impl std::error::Error for AssetOperationError{}
pub struct CreateAssetResponse{ #[derive(Debug)]
pub struct AssetOperation{
operation:RobloxOperation, operation:RobloxOperation,
} }
impl CreateAssetResponse{ impl AssetOperation{
pub async fn try_get_asset(&self,context:&CloudContext)->Result<AssetResponse,CreateAssetResponseGetAssetError>{ pub async fn try_get_asset(&self,context:&CloudContext)->Result<AssetResponse,AssetOperationError>{
serde_json::from_value( serde_json::from_value(
self.operation self.operation
.try_get_reponse(context).await .try_get_reponse(context).await
.map_err(CreateAssetResponseGetAssetError::Operation)? .map_err(AssetOperationError::Operation)?
).map_err(CreateAssetResponseGetAssetError::Serialize) ).map_err(AssetOperationError::Serialize)
} }
} }
#[derive(Debug)] #[derive(Debug)]
@ -111,8 +112,8 @@ impl std::fmt::Display for UpdateError{
} }
impl std::error::Error for UpdateError{} impl std::error::Error for UpdateError{}
pub struct GetAssetOperationRequest{ struct GetAssetOperationRequest{
pub operation_id:String, operation_id:String,
} }
pub struct GetAssetInfoRequest{ pub struct GetAssetInfoRequest{
pub asset_id:u64, pub asset_id:u64,
@ -260,7 +261,7 @@ impl std::fmt::Display for OperationError{
impl std::error::Error for OperationError{} impl std::error::Error for OperationError{}
#[derive(Debug,serde::Deserialize,serde::Serialize)] #[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)] #[allow(nonstandard_style,dead_code)]
pub struct RobloxOperation{ struct RobloxOperation{
pub path:Option<String>, pub path:Option<String>,
pub metadata:Option<String>, pub metadata:Option<String>,
pub done:Option<bool>, pub done:Option<bool>,
@ -354,7 +355,7 @@ impl CloudContext{
.multipart(form) .multipart(form)
.send().await .send().await
} }
pub async fn create_asset(&self,config:CreateAssetRequest,body:impl Into<std::borrow::Cow<'static,[u8]>>)->Result<CreateAssetResponse,CreateError>{ pub async fn create_asset(&self,config:CreateAssetRequest,body:impl Into<std::borrow::Cow<'static,[u8]>>)->Result<AssetOperation,CreateError>{
let url=reqwest::Url::parse("https://apis.roblox.com/assets/v1/assets").map_err(CreateError::Parse)?; let url=reqwest::Url::parse("https://apis.roblox.com/assets/v1/assets").map_err(CreateError::Parse)?;
let request_config=serde_json::to_string(&config).map_err(CreateError::Serialize)?; let request_config=serde_json::to_string(&config).map_err(CreateError::Serialize)?;
@ -367,15 +368,16 @@ impl CloudContext{
.text("request",request_config) .text("request",request_config)
.part("fileContent",part); .part("fileContent",part);
let operation=self.post_form(url,form).await.map_err(CreateError::Reqwest)? let operation=self.post_form(url,form).await
.map_err(CreateError::Reqwest)?
.error_for_status().map_err(CreateError::Reqwest)? .error_for_status().map_err(CreateError::Reqwest)?
.json::<RobloxOperation>().await.map_err(CreateError::Reqwest)?; .json::<RobloxOperation>().await.map_err(CreateError::Reqwest)?;
Ok(CreateAssetResponse{ Ok(AssetOperation{
operation, operation,
}) })
} }
pub async fn update_asset(&self,config:UpdateAssetRequest,body:impl Into<std::borrow::Cow<'static,[u8]>>)->Result<RobloxOperation,UpdateError>{ pub async fn update_asset(&self,config:UpdateAssetRequest,body:impl Into<std::borrow::Cow<'static,[u8]>>)->Result<AssetOperation,UpdateError>{
let raw_url=format!("https://apis.roblox.com/assets/v1/assets/{}",config.assetId); let raw_url=format!("https://apis.roblox.com/assets/v1/assets/{}",config.assetId);
let url=reqwest::Url::parse(raw_url.as_str()).map_err(UpdateError::ParseError)?; let url=reqwest::Url::parse(raw_url.as_str()).map_err(UpdateError::ParseError)?;
@ -385,13 +387,17 @@ impl CloudContext{
.text("request",request_config) .text("request",request_config)
.part("fileContent",reqwest::multipart::Part::bytes(body)); .part("fileContent",reqwest::multipart::Part::bytes(body));
self.patch_form(url,form).await let operation=self.patch_form(url,form).await
.map_err(UpdateError::Reqwest)? .map_err(UpdateError::Reqwest)?
//roblox api documentation is very poor, just give the status code and drop the json //roblox api documentation is very poor, just give the status code and drop the json
.error_for_status().map_err(UpdateError::Reqwest)? .error_for_status().map_err(UpdateError::Reqwest)?
.json::<RobloxOperation>().await.map_err(UpdateError::Reqwest) .json::<RobloxOperation>().await.map_err(UpdateError::Reqwest)?;
Ok(AssetOperation{
operation,
})
} }
pub async fn get_asset_operation(&self,config:GetAssetOperationRequest)->Result<RobloxOperation,GetError>{ async fn get_asset_operation(&self,config:GetAssetOperationRequest)->Result<RobloxOperation,GetError>{
let raw_url=format!("https://apis.roblox.com/assets/v1/operations/{}",config.operation_id); let raw_url=format!("https://apis.roblox.com/assets/v1/operations/{}",config.operation_id);
let url=reqwest::Url::parse(raw_url.as_str()).map_err(GetError::ParseError)?; let url=reqwest::Url::parse(raw_url.as_str()).map_err(GetError::ParseError)?;

View File

@ -234,7 +234,7 @@ impl DecompiledContext{
WriteStackInstruction::Node(node,name_count)=>{ WriteStackInstruction::Node(node,name_count)=>{
//track properties that must be overriden to compile folder structure back into a place file //track properties that must be overriden to compile folder structure back into a place file
let mut properties=PropertiesOverride::default(); let mut properties=PropertiesOverride::default();
let has_children=node.children.is_empty(); let has_children=!node.children.is_empty();
match node.class{ match node.class{
Class::Folder=>(), Class::Folder=>(),
Class::ModuleScript=>(),//.lua files are ModuleScript by default Class::ModuleScript=>(),//.lua files are ModuleScript by default

View File

@ -38,6 +38,7 @@ enum Commands{
DownloadAndDecompileHistoryIntoGit(DownloadAndDecompileHistoryIntoGitSubcommand), DownloadAndDecompileHistoryIntoGit(DownloadAndDecompileHistoryIntoGitSubcommand),
} }
/// Download a range of assets from the asset version history. Download summary is saved to `output_folder/versions.json`, and can be optionally used to download only new versions the next time.
#[derive(Args)] #[derive(Args)]
struct DownloadHistorySubcommand{ struct DownloadHistorySubcommand{
#[arg(long)] #[arg(long)]
@ -57,6 +58,7 @@ struct DownloadHistorySubcommand{
#[arg(long)] #[arg(long)]
end_version:Option<u64>, end_version:Option<u64>,
} }
/// Download a single asset by id.
#[derive(Args)] #[derive(Args)]
struct DownloadSubcommand{ struct DownloadSubcommand{
#[arg(long,group="cookie",required=true)] #[arg(long,group="cookie",required=true)]
@ -70,6 +72,7 @@ struct DownloadSubcommand{
#[arg(required=true)] #[arg(required=true)]
asset_ids:Vec<AssetID>, asset_ids:Vec<AssetID>,
} }
/// Download the list of asset ids (not the assets themselves) in a group inventory. The output is written to `output_folder/versions.json`
#[derive(Args)] #[derive(Args)]
struct DownloadGroupInventoryJsonSubcommand{ struct DownloadGroupInventoryJsonSubcommand{
#[arg(long,group="cookie",required=true)] #[arg(long,group="cookie",required=true)]
@ -83,6 +86,7 @@ struct DownloadGroupInventoryJsonSubcommand{
#[arg(long)] #[arg(long)]
group:u64, group:u64,
} }
/// Upload a (.rbxm, .rbxmx) model file, creating a new asset. Can be any type of model, including modulescripts.
#[derive(Args)] #[derive(Args)]
struct CreateAssetSubcommand{ struct CreateAssetSubcommand{
#[arg(long,group="cookie",required=true)] #[arg(long,group="cookie",required=true)]
@ -104,6 +108,7 @@ struct CreateAssetSubcommand{
#[arg(long)] #[arg(long)]
allow_comments:Option<bool>, allow_comments:Option<bool>,
} }
/// Upload a media file (.jpg, .png) to a new asset and print the asset id
#[derive(Args)] #[derive(Args)]
struct CreateAssetMediaSubcommand{ struct CreateAssetMediaSubcommand{
#[arg(long,group="api_key",required=true)] #[arg(long,group="api_key",required=true)]
@ -128,8 +133,8 @@ struct CreateAssetMediaSubcommand{
#[arg(long)] #[arg(long)]
expected_price:Option<u64>, expected_price:Option<u64>,
} }
/// Upload multiple media files (.jpg, .png) Automatically detect the media type from file extension and generate asset name and description. If you want support for more file types (.fbx, .mp3, .ogg) it should be fairly straightforward, just ask.
#[derive(Args)] #[derive(Args)]
/// Automatically detect the media type from file extension and generate asset name and description
struct CreateAssetMediasSubcommand{ struct CreateAssetMediasSubcommand{
#[arg(long,group="api_key",required=true)] #[arg(long,group="api_key",required=true)]
api_key_literal:Option<String>, api_key_literal:Option<String>,
@ -154,6 +159,7 @@ struct CreateAssetMediasSubcommand{
expected_price:Option<u64>, expected_price:Option<u64>,
input_files:Vec<PathBuf>, input_files:Vec<PathBuf>,
} }
/// Upload a (.rbxm, .rbxmx) model file to an existing asset. Can be any type of model, including modulescripts.
#[derive(Args)] #[derive(Args)]
struct UpdateAssetSubcommand{ struct UpdateAssetSubcommand{
#[arg(long)] #[arg(long)]
@ -177,6 +183,7 @@ struct UpdateAssetSubcommand{
#[arg(long)] #[arg(long)]
change_allow_comments:Option<bool>, change_allow_comments:Option<bool>,
} }
/// Upload a media file (.jpg, .png) to an existing asset.
#[derive(Args)] #[derive(Args)]
struct UpdateAssetMediaSubcommand{ struct UpdateAssetMediaSubcommand{
#[arg(long)] #[arg(long)]
@ -190,6 +197,7 @@ struct UpdateAssetMediaSubcommand{
#[arg(long)] #[arg(long)]
input_file:PathBuf, input_file:PathBuf,
} }
/// Upload a place file (.rbxl, .rbxlx) to an existing place.
#[derive(Args)] #[derive(Args)]
struct UpdatePlaceSubcommand{ struct UpdatePlaceSubcommand{
#[arg(long)] #[arg(long)]
@ -205,6 +213,7 @@ struct UpdatePlaceSubcommand{
#[arg(long)] #[arg(long)]
input_file:PathBuf, input_file:PathBuf,
} }
/// Take an input folder containing scripts and models and turn it into a roblox file. The two types of files (.rbxl: place, .rbxm: model) are actually the same file format, only the contents differ.
#[derive(Args)] #[derive(Args)]
struct CompileSubcommand{ struct CompileSubcommand{
#[arg(long)] #[arg(long)]
@ -216,6 +225,7 @@ struct CompileSubcommand{
#[arg(long)] #[arg(long)]
template:Option<PathBuf>, template:Option<PathBuf>,
} }
/// Take an input folder containing scripts and models and turn it into a roblox file, then upload it to the specified asset id. Does not work for places.
#[derive(Args)] #[derive(Args)]
struct CompileUploadAssetSubcommand{ struct CompileUploadAssetSubcommand{
#[arg(long)] #[arg(long)]
@ -235,6 +245,7 @@ struct CompileUploadAssetSubcommand{
#[arg(long)] #[arg(long)]
template:Option<PathBuf>, template:Option<PathBuf>,
} }
/// Take an input folder containing scripts and models and turn it into a roblox file, then upload it to the specified place id. Does not work for model asset ids.
#[derive(Args)] #[derive(Args)]
struct CompileUploadPlaceSubcommand{ struct CompileUploadPlaceSubcommand{
#[arg(long)] #[arg(long)]
@ -254,6 +265,7 @@ struct CompileUploadPlaceSubcommand{
#[arg(long)] #[arg(long)]
template:Option<PathBuf>, template:Option<PathBuf>,
} }
/// Take a roblox file (.rbxm, .rbxl) and turn it into a folder containing scripts and models. Rox style means property overrides are written to the top of scripts, Rojo style means property overrides are written to the script file extension (Script.server.lua).
#[derive(Args)] #[derive(Args)]
struct DecompileSubcommand{ struct DecompileSubcommand{
#[arg(long)] #[arg(long)]
@ -269,6 +281,7 @@ struct DecompileSubcommand{
#[arg(long)] #[arg(long)]
write_scripts:Option<bool>, write_scripts:Option<bool>,
} }
/// Download a model from the specified asset id, and decompile it into a folder in one swift motion. The model file is not saved to disk. This also works for places.
#[derive(Args)] #[derive(Args)]
struct DownloadDecompileSubcommand{ struct DownloadDecompileSubcommand{
#[arg(long,group="cookie",required=true)] #[arg(long,group="cookie",required=true)]
@ -290,12 +303,13 @@ struct DownloadDecompileSubcommand{
#[arg(long)] #[arg(long)]
write_scripts:Option<bool>, write_scripts:Option<bool>,
} }
/// Take a folder of asset history (containing `versions.json`) and decompile each version into its own git commit. This must be run with the desired output folder as the current directory due to git2 limitations.
#[derive(Args)] #[derive(Args)]
struct DecompileHistoryIntoGitSubcommand{ struct DecompileHistoryIntoGitSubcommand{
#[arg(long)] #[arg(long)]
input_folder:PathBuf, input_folder:PathBuf,
//currently output folder must be the current folder due to git2 limitations #[arg(long)]
//output_folder:cli.output.unwrap(), output_folder:Option<PathBuf>,
#[arg(long)] #[arg(long)]
style:Style, style:Style,
#[arg(long)] #[arg(long)]
@ -309,6 +323,7 @@ struct DecompileHistoryIntoGitSubcommand{
#[arg(long)] #[arg(long)]
write_scripts:Option<bool>, write_scripts:Option<bool>,
} }
/// Download asset history, download asset versions, decompile into folder, create a git commit for each version. This is a combination of two commands (download-history, decompile-history-into-git) except without intermediate files.
#[derive(Args)] #[derive(Args)]
struct DownloadAndDecompileHistoryIntoGitSubcommand{ struct DownloadAndDecompileHistoryIntoGitSubcommand{
#[arg(long)] #[arg(long)]
@ -319,8 +334,8 @@ struct DownloadAndDecompileHistoryIntoGitSubcommand{
cookie_envvar:Option<String>, cookie_envvar:Option<String>,
#[arg(long,group="cookie",required=true)] #[arg(long,group="cookie",required=true)]
cookie_file:Option<PathBuf>, cookie_file:Option<PathBuf>,
//currently output folder must be the current folder due to git2 limitations #[arg(long)]
//output_folder:cli.output.unwrap(), output_folder:Option<PathBuf>,
#[arg(long)] #[arg(long)]
style:Style, style:Style,
#[arg(long)] #[arg(long)]
@ -546,7 +561,7 @@ async fn main()->AResult<()>{
git_committer_name:subcommand.git_committer_name, git_committer_name:subcommand.git_committer_name,
git_committer_email:subcommand.git_committer_email, git_committer_email:subcommand.git_committer_email,
input_folder:subcommand.input_folder, input_folder:subcommand.input_folder,
output_folder:std::env::current_dir()?, output_folder:subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
style:subcommand.style.rox(), style:subcommand.style.rox(),
write_template:subcommand.write_template.unwrap_or(false), write_template:subcommand.write_template.unwrap_or(false),
write_models:subcommand.write_models.unwrap_or(false), write_models:subcommand.write_models.unwrap_or(false),
@ -561,7 +576,7 @@ async fn main()->AResult<()>{
subcommand.cookie_file, subcommand.cookie_file,
).await?, ).await?,
asset_id:subcommand.asset_id, asset_id:subcommand.asset_id,
output_folder:std::env::current_dir()?, output_folder:subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
style:subcommand.style.rox(), style:subcommand.style.rox(),
write_template:subcommand.write_template.unwrap_or(false), write_template:subcommand.write_template.unwrap_or(false),
write_models:subcommand.write_models.unwrap_or(false), write_models:subcommand.write_models.unwrap_or(false),
@ -624,20 +639,20 @@ struct CreateAssetMediaConfig{
async fn get_asset_exp_backoff( async fn get_asset_exp_backoff(
context:&CloudContext, context:&CloudContext,
create_asset_response:&rbx_asset::cloud::CreateAssetResponse asset_operation:&rbx_asset::cloud::AssetOperation
)->Result<rbx_asset::cloud::AssetResponse,rbx_asset::cloud::CreateAssetResponseGetAssetError>{ )->Result<rbx_asset::cloud::AssetResponse,rbx_asset::cloud::AssetOperationError>{
let mut backoff:u64=0; const BACKOFF_MUL:f32=1.3956124250860895286;//exp(1/3)
let mut backoff=1000f32;
loop{ loop{
match create_asset_response.try_get_asset(&context).await{ match asset_operation.try_get_asset(&context).await{
//try again when the operation is not done //try again when the operation is not done
Err(rbx_asset::cloud::CreateAssetResponseGetAssetError::Operation(rbx_asset::cloud::OperationError::NotDone))=>(), Err(rbx_asset::cloud::AssetOperationError::Operation(rbx_asset::cloud::OperationError::NotDone))=>(),
//return all other results //return all other results
other_result=>return other_result, other_result=>return other_result,
} }
let wait=f32::exp(backoff as f32/3.0)*1000f32; println!("Operation not complete; waiting {:.0}ms...",backoff);
println!("Operation not complete; waiting {:.0}ms...",wait); tokio::time::sleep(std::time::Duration::from_millis(backoff as u64)).await;
tokio::time::sleep(std::time::Duration::from_millis(wait as u64)).await; backoff*=BACKOFF_MUL;
backoff+=1;
} }
} }
@ -672,9 +687,12 @@ struct CreateAssetMediasConfig{
} }
#[derive(Debug)] #[derive(Debug)]
#[allow(dead_code)]
enum CreateAssetMediasError{ enum CreateAssetMediasError{
NoFileStem(PathBuf), NoFileStem(PathBuf),
UnknownFourCC(Option<Vec<u8>>), IO(std::io::Error),
UnknownFourCC(Option<[u8;4]>),
Create(rbx_asset::cloud::CreateError),
} }
impl std::fmt::Display for CreateAssetMediasError{ impl std::fmt::Display for CreateAssetMediasError{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>)->std::fmt::Result{ fn fmt(&self, f: &mut std::fmt::Formatter<'_>)->std::fmt::Result{
@ -684,7 +702,22 @@ impl std::fmt::Display for CreateAssetMediasError{
impl std::error::Error for CreateAssetMediasError{} impl std::error::Error for CreateAssetMediasError{}
#[derive(Debug)] #[derive(Debug)]
#[allow(dead_code)]
enum PollOperationError{
CreateAssetMedias(CreateAssetMediasError),
AssetOperation(rbx_asset::cloud::AssetOperationError),
}
impl std::fmt::Display for PollOperationError{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>)->std::fmt::Result{
write!(f,"{self:?}")
}
}
impl std::error::Error for PollOperationError{}
#[derive(Debug)]
#[allow(dead_code)]
enum DownloadDecalError{ enum DownloadDecalError{
PollOperation(PollOperationError),
ParseInt(std::num::ParseIntError), ParseInt(std::num::ParseIntError),
Get(rbx_asset::cookie::GetError), Get(rbx_asset::cookie::GetError),
LoadDom(LoadDomError), LoadDom(LoadDomError),
@ -693,7 +726,7 @@ enum DownloadDecalError{
TexturePropertyInvalid, TexturePropertyInvalid,
} }
impl std::fmt::Display for DownloadDecalError{ impl std::fmt::Display for DownloadDecalError{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>)->std::fmt::Result{ fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
write!(f,"{self:?}") write!(f,"{self:?}")
} }
} }
@ -703,18 +736,19 @@ async fn create_asset_medias(config:CreateAssetMediasConfig)->AResult<()>{
let context=CloudContext::new(config.api_key); let context=CloudContext::new(config.api_key);
let cookie_context=CookieContext::new(config.cookie); let cookie_context=CookieContext::new(config.cookie);
let expected_price=Some(config.expected_price.unwrap_or(0)); let expected_price=Some(config.expected_price.unwrap_or(0));
let asset_id_list=futures::stream::iter(config.input_files.into_iter() futures::stream::iter(config.input_files.into_iter()
//step 1: read file, make create request //step 1: read file, make create request
.map(|path|{ .map(|path|{
let description=&config.description; let description=&config.description;
let creator=&config.creator; let creator=&config.creator;
let context=&context; let context=&context;
async move{(path.clone(),
async move{ async move{
let model_name=path.file_stem() let model_name=path.file_stem()
.and_then(std::ffi::OsStr::to_str) .and_then(std::ffi::OsStr::to_str)
.ok_or(CreateAssetMediasError::NoFileStem(path.clone()))? .ok_or_else(||CreateAssetMediasError::NoFileStem(path.clone()))?
.to_owned(); .to_owned();
let file=tokio::fs::read(path).await?; let file=tokio::fs::read(path).await.map_err(CreateAssetMediasError::IO)?;
let asset_type=match file.get(0..4){ let asset_type=match file.get(0..4){
//png //png
Some(b"\x89PNG")=>rbx_asset::cloud::AssetType::Decal, Some(b"\x89PNG")=>rbx_asset::cloud::AssetType::Decal,
@ -722,9 +756,9 @@ async fn create_asset_medias(config:CreateAssetMediasConfig)->AResult<()>{
Some(b"\xFF\xD8\xFF\xE0")=>rbx_asset::cloud::AssetType::Decal, Some(b"\xFF\xD8\xFF\xE0")=>rbx_asset::cloud::AssetType::Decal,
//Some("fbx")=>rbx_asset::cloud::AssetType::Model, //Some("fbx")=>rbx_asset::cloud::AssetType::Model,
//Some("ogg")=>rbx_asset::cloud::AssetType::Audio, //Some("ogg")=>rbx_asset::cloud::AssetType::Audio,
fourcc=>Err(CreateAssetMediasError::UnknownFourCC(fourcc.map(<[u8]>::to_owned)))?, fourcc=>Err(CreateAssetMediasError::UnknownFourCC(fourcc.map(|s|s.try_into().unwrap())))?,
}; };
Ok(context.create_asset(rbx_asset::cloud::CreateAssetRequest{ context.create_asset(rbx_asset::cloud::CreateAssetRequest{
assetType:asset_type, assetType:asset_type,
displayName:model_name, displayName:model_name,
description:description.clone(), description:description.clone(),
@ -732,59 +766,51 @@ async fn create_asset_medias(config:CreateAssetMediasConfig)->AResult<()>{
creator:creator.clone(), creator:creator.clone(),
expectedPrice:expected_price, expectedPrice:expected_price,
} }
},file).await?) },file).await.map_err(CreateAssetMediasError::Create)
} }
.await)}
})) }))
//parallel requests //parallel requests
.buffer_unordered(CONCURRENT_REQUESTS) .buffer_unordered(CONCURRENT_REQUESTS)
//step 2: poll operation until it completes (as fast as possible no exp backoff or anything just hammer roblox) //step 2: poll operation until it completes
.filter_map(|create_result:AResult<_>|{ .then(|(path,create_result)|{
let context=&context; let context=&context;
async{(path,
async{ async{
match create_result{ let asset_operation=create_result.map_err(PollOperationError::CreateAssetMedias)?;
Ok(create_asset_response)=>match get_asset_exp_backoff(context,&create_asset_response).await{ get_asset_exp_backoff(context,&asset_operation).await.map_err(PollOperationError::AssetOperation)
Ok(asset_response)=>Some(asset_response),
Err(e)=>{
eprintln!("operation error: {}",e);
None
},
},
Err(e)=>{
eprintln!("create_asset error: {}",e);
None
},
}
} }
.await)}
}) })
//step 3: read decal id from operation and download it //step 3: read decal id from operation and download it, decode it as a roblox file and extract the texture content url
.filter_map(|asset_response|{ .then(|(path,asset_response_result)|{
let parse_result=asset_response.assetId.parse(); let cookie_context=&cookie_context;
async{ async move{(path,
match async{ async move{
let file=cookie_context.get_asset(rbx_asset::cookie::GetAssetRequest{ let asset_response=asset_response_result.map_err(DownloadDecalError::PollOperation)?;
asset_id:parse_result.map_err(DownloadDecalError::ParseInt)?, let file=cookie_context.get_asset(rbx_asset::cookie::GetAssetRequest{
version:None, asset_id:asset_response.assetId.parse().map_err(DownloadDecalError::ParseInt)?,
}).await.map_err(DownloadDecalError::Get)?; version:None,
let dom=load_dom(std::io::Cursor::new(file)).map_err(DownloadDecalError::LoadDom)?; }).await.map_err(DownloadDecalError::Get)?;
let instance=dom.get_by_ref( let dom=load_dom(std::io::Cursor::new(file)).map_err(DownloadDecalError::LoadDom)?;
*dom.root().children().first().ok_or(DownloadDecalError::NoFirstInstance)? let instance=dom.get_by_ref(
).ok_or(DownloadDecalError::NoFirstInstance)?; *dom.root().children().first().ok_or(DownloadDecalError::NoFirstInstance)?
match instance.properties.get("Texture").ok_or(DownloadDecalError::NoTextureProperty)?{ ).ok_or(DownloadDecalError::NoFirstInstance)?;
rbx_dom_weak::types::Variant::Content(url)=>Ok(url.clone().into_string()), let texture=instance.properties.get("Texture").ok_or(DownloadDecalError::NoTextureProperty)?;
_=>Err(DownloadDecalError::TexturePropertyInvalid), let asset_url=match texture{
} rbx_dom_weak::types::Variant::Content(url)=>url.clone().into_string(),
}.await{ _=>Err(DownloadDecalError::TexturePropertyInvalid)?,
Ok(asset_url)=>Some((asset_response.displayName,asset_url)), };
Err(e)=>{ Ok::<_,DownloadDecalError>((asset_response.displayName,asset_url))
eprintln!("get_asset error: {}",e);
None
},
}
} }
}).collect::<Vec<(String,String)>>().await; .await)}
for (file_name,asset_url) in asset_id_list{ })
println!("{}={}",file_name,asset_url); .for_each(|(path,download_decal_result)|async move{
} match download_decal_result{
Ok((file_name,asset_url))=>println!("{}={}",file_name,asset_url),
Err(e)=>eprintln!("ERROR file={:?} error={e}",path),
}
}).await;
Ok(()) Ok(())
} }
@ -1034,6 +1060,7 @@ async fn download_history(mut config:DownloadHistoryConfig)->AResult<()>{
} }
#[derive(Debug)] #[derive(Debug)]
#[allow(dead_code)]
enum LoadDomError{ enum LoadDomError{
IO(std::io::Error), IO(std::io::Error),
RbxBinary(rbx_binary::DecodeError), RbxBinary(rbx_binary::DecodeError),
@ -1171,11 +1198,11 @@ async fn write_commit(config:WriteCommitConfig,b:Result<AResult<(AssetVersion,ro
let sig=git2::Signature::new(config.git_committer_name.as_str(),config.git_committer_email.as_str(),&git2::Time::new(date.timestamp(),0)).unwrap(); let sig=git2::Signature::new(config.git_committer_name.as_str(),config.git_committer_email.as_str(),&git2::Time::new(date.timestamp(),0)).unwrap();
let tree_id={ let tree_id={
let mut tree_index = repo.index()?; let mut tree_index = repo.index()?;
match tree_index.add_all(std::iter::once(config.output_folder.as_path()),git2::IndexAddOption::DEFAULT,None){ match tree_index.add_all(std::iter::once("*"),git2::IndexAddOption::DEFAULT,None){
Ok(_)=>(), Ok(_)=>(),
Err(e)=>println!("tree_index.add_all error: {}",e), Err(e)=>println!("tree_index.add_all error: {}",e),
} }
match tree_index.update_all(std::iter::once(config.output_folder.as_path()),None){ match tree_index.update_all(std::iter::once("*"),None){
Ok(_)=>(), Ok(_)=>(),
Err(e)=>println!("tree_index.update_all error: {}",e), Err(e)=>println!("tree_index.update_all error: {}",e),
} }