refactor for new api

This commit is contained in:
Quaternions 2024-07-02 14:26:14 -07:00
parent b89a787af2
commit 452c00d53e
4 changed files with 451 additions and 283 deletions

29
Cargo.lock generated
View File

@ -890,6 +890,16 @@ version = "0.3.17"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
[[package]]
name = "mime_guess"
version = "2.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4192263c238a5f0d0c6bfd21f336a313a4ce1c450542449ca191bb657b4642ef"
dependencies = [
"mime",
"unicase",
]
[[package]] [[package]]
name = "miniz_oxide" name = "miniz_oxide"
version = "0.7.4" version = "0.7.4"
@ -1156,12 +1166,13 @@ dependencies = [
[[package]] [[package]]
name = "rbx_asset" name = "rbx_asset"
version = "0.1.0" version = "0.2.0"
dependencies = [ dependencies = [
"chrono", "chrono",
"flate2", "flate2",
"reqwest", "reqwest",
"serde", "serde",
"serde_json",
"url", "url",
] ]
@ -1294,6 +1305,7 @@ dependencies = [
"js-sys", "js-sys",
"log", "log",
"mime", "mime",
"mime_guess",
"native-tls", "native-tls",
"once_cell", "once_cell",
"percent-encoding", "percent-encoding",
@ -1745,6 +1757,15 @@ version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b"
[[package]]
name = "unicase"
version = "2.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89"
dependencies = [
"version_check",
]
[[package]] [[package]]
name = "unicode-bidi" name = "unicode-bidi"
version = "0.3.15" version = "0.3.15"
@ -1795,6 +1816,12 @@ version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
[[package]]
name = "version_check"
version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
[[package]] [[package]]
name = "want" name = "want"
version = "0.3.1" version = "0.3.1"

View File

@ -1,6 +1,6 @@
[package] [package]
name = "rbx_asset" name = "rbx_asset"
version = "0.1.0" version = "0.2.0"
edition = "2021" edition = "2021"
publish = ["strafesnet"] publish = ["strafesnet"]
@ -9,6 +9,7 @@ publish = ["strafesnet"]
[dependencies] [dependencies]
chrono = { version = "0.4.38", features = ["serde"] } chrono = { version = "0.4.38", features = ["serde"] }
flate2 = "1.0.29" flate2 = "1.0.29"
reqwest = { version = "0.12.4", features = ["json"] } reqwest = { version = "0.12.4", features = ["json","multipart"] }
serde = { version = "1.0.199", features = ["derive"] } serde = { version = "1.0.199", features = ["derive"] }
serde_json = "1.0.111"
url = "2.5.0" url = "2.5.0"

View File

@ -1,28 +1,22 @@
#[derive(Debug)]
pub enum PostError{
Reqwest(reqwest::Error),
CSRF,
}
impl std::fmt::Display for PostError{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f,"{self:?}")
}
}
impl std::error::Error for PostError{}
#[derive(Debug,serde::Deserialize,serde::Serialize)] #[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)] #[allow(nonstandard_style,dead_code)]
pub struct CreateRequest{ pub enum AssetType{
pub name:String, Audio,
Decal,
Model,
}
#[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
pub struct CreateAssetRequest{
pub assetType:AssetType,
pub creationContext:CreationContext,
pub description:String, pub description:String,
pub ispublic:bool, pub displayName:String,
pub allowComments:bool,
pub groupId:Option<u64>,
} }
#[derive(Debug)] #[derive(Debug)]
pub enum CreateError{ pub enum CreateError{
ParseError(url::ParseError), ParseError(url::ParseError),
PostError(PostError), SerializeError(serde_json::Error),
Reqwest(reqwest::Error), Reqwest(reqwest::Error),
} }
impl std::fmt::Display for CreateError{ impl std::fmt::Display for CreateError{
@ -32,58 +26,86 @@ impl std::fmt::Display for CreateError{
} }
impl std::error::Error for CreateError{} impl std::error::Error for CreateError{}
#[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)] #[allow(nonstandard_style,dead_code)]
pub struct UploadRequest{ pub struct UpdateAssetRequest{
pub assetid:u64, pub assetId:u64,
pub name:Option<String>, pub displayName:Option<String>,
pub description:Option<String>, pub description:Option<String>,
pub ispublic:Option<bool>, }
pub allowComments:Option<bool>,
pub groupId:Option<u64>, //woo nested roblox stuff
#[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
pub struct Creator{
pub userId:u64,
pub groupId:u64,
}
#[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
pub struct CreationContext{
pub creator:Creator,
pub expectedPrice:u64,
}
#[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
pub enum ModerationResult{
MODERATION_STATE_REVIEWING,
MODERATION_STATE_REJECTED,
MODERATION_STATE_APPROVED,
}
#[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
pub struct Preview{
pub asset:String,
pub altText:String,
}
#[allow(nonstandard_style,dead_code)]
pub struct UpdatePlaceRequest{
pub universeId:u64,
pub placeId:u64,
}
#[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
pub struct UpdatePlaceResponse{
pub versionNumber:u64,
} }
#[derive(Debug)] #[derive(Debug)]
pub enum UploadError{ pub enum UpdateError{
ParseError(url::ParseError), ParseError(url::ParseError),
PostError(PostError), SerializeError(serde_json::Error),
Reqwest(reqwest::Error), Reqwest(reqwest::Error),
AssetIdIsZero,
} }
impl std::fmt::Display for UploadError{ impl std::fmt::Display for UpdateError{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f,"{self:?}") write!(f,"{self:?}")
} }
} }
impl std::error::Error for UploadError{} impl std::error::Error for UpdateError{}
#[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
pub struct UploadResponse{
pub AssetId:u64,
pub AssetVersionId:u64,
}
#[allow(nonstandard_style,dead_code)] #[allow(nonstandard_style,dead_code)]
pub struct DownloadRequest{ pub struct GetAssetRequest{
pub asset_id:u64, pub asset_id:u64,
pub version:Option<u64>, pub version:Option<u64>,
} }
#[derive(Debug)] #[derive(Debug)]
pub enum DownloadError{ pub enum GetError{
ParseError(url::ParseError), ParseError(url::ParseError),
Reqwest(reqwest::Error), Reqwest(reqwest::Error),
IO(std::io::Error) IO(std::io::Error)
} }
impl std::fmt::Display for DownloadError{ impl std::fmt::Display for GetError{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f,"{self:?}") write!(f,"{self:?}")
} }
} }
impl std::error::Error for DownloadError{} impl std::error::Error for GetError{}
pub struct HistoryPageRequest{ pub struct AssetVersionsRequest{
pub asset_id:u64, pub asset_id:u64,
pub cursor:Option<String>, pub cursor:Option<String>,
} }
#[derive(serde::Deserialize,serde::Serialize)] #[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)] #[allow(nonstandard_style,dead_code)]
pub struct AssetVersion{ pub struct AssetVersion{
pub Id:u64, pub Id:u64,
@ -95,36 +117,36 @@ pub struct AssetVersion{
pub created:chrono::DateTime<chrono::Utc>, pub created:chrono::DateTime<chrono::Utc>,
pub isPublished:bool, pub isPublished:bool,
} }
#[derive(serde::Deserialize)] #[derive(Debug,serde::Deserialize)]
#[allow(nonstandard_style,dead_code)] #[allow(nonstandard_style,dead_code)]
pub struct HistoryPageResponse{ pub struct AssetVersionsResponse{
pub previousPageCursor:Option<String>, pub previousPageCursor:Option<String>,
pub nextPageCursor:Option<String>, pub nextPageCursor:Option<String>,
pub data:Vec<AssetVersion>, pub data:Vec<AssetVersion>,
} }
#[derive(Debug)] #[derive(Debug)]
pub enum HistoryPageError{ pub enum AssetVersionsError{
ParseError(url::ParseError), ParseError(url::ParseError),
Reqwest(reqwest::Error), Reqwest(reqwest::Error),
} }
impl std::fmt::Display for HistoryPageError{ impl std::fmt::Display for AssetVersionsError{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f,"{self:?}") write!(f,"{self:?}")
} }
} }
impl std::error::Error for HistoryPageError{} impl std::error::Error for AssetVersionsError{}
pub struct InventoryPageRequest{ pub struct InventoryPageRequest{
pub group:u64, pub group:u64,
pub cursor:Option<String>, pub cursor:Option<String>,
} }
#[derive(serde::Deserialize,serde::Serialize)] #[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)] #[allow(nonstandard_style,dead_code)]
pub struct InventoryItem{ pub struct InventoryItem{
pub id:u64, pub id:u64,
pub name:String, pub name:String,
} }
#[derive(serde::Deserialize,serde::Serialize)] #[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)] #[allow(nonstandard_style,dead_code)]
pub struct InventoryPageResponse{ pub struct InventoryPageResponse{
pub totalResults:u64,//up to 50 pub totalResults:u64,//up to 50
@ -149,6 +171,16 @@ impl std::fmt::Display for InventoryPageError{
} }
impl std::error::Error for InventoryPageError{} impl std::error::Error for InventoryPageError{}
#[derive(Debug,serde::Deserialize,serde::Serialize)]
#[allow(nonstandard_style,dead_code)]
pub struct RobloxOperation{
pub path:Option<std::path::PathBuf>,
pub metadata:Option<String>,
pub done:Option<bool>,
pub error:Option<String>,
pub response:Option<String>,
}
//idk how to do this better //idk how to do this better
enum ReaderType<R:std::io::Read>{ enum ReaderType<R:std::io::Read>{
GZip(flate2::read::GzDecoder<std::io::BufReader<R>>), GZip(flate2::read::GzDecoder<std::io::BufReader<R>>),
@ -170,97 +202,73 @@ fn read_readable(mut readable:impl std::io::Read)->std::io::Result<Vec<u8>>{
#[derive(Clone)] #[derive(Clone)]
pub struct RobloxContext{ pub struct RobloxContext{
pub cookie:String, pub api_key:String,
pub client:reqwest::Client, pub client:reqwest::Client,
} }
impl RobloxContext{ impl RobloxContext{
pub fn new(cookie:String)->Self{ pub fn new(api_key:String)->Self{
Self{ Self{
cookie, api_key,
client:reqwest::Client::new(), client:reqwest::Client::new(),
} }
} }
async fn get(&self,url:impl reqwest::IntoUrl)->Result<reqwest::Response,reqwest::Error>{ async fn get(&self,url:impl reqwest::IntoUrl)->Result<reqwest::Response,reqwest::Error>{
self.client.get(url) self.client.get(url)
.header("Cookie",self.cookie.as_str()) .header("x-api-key",self.api_key.as_str())
.send().await .send().await
} }
async fn post(&self,url:url::Url,body:impl Into<reqwest::Body>+Clone)->Result<reqwest::Response,PostError>{ async fn post(&self,url:url::Url,body:impl Into<reqwest::Body>+Clone)->Result<reqwest::Response,reqwest::Error>{
let mut resp=self.client.post(url.clone()) self.client.post(url)
.header("Cookie",self.cookie.as_str()) .header("x-api-key",self.api_key.as_str())
.body(body.clone())
.send().await.map_err(PostError::Reqwest)?;
//This is called a CSRF challenge apparently
if resp.status()==reqwest::StatusCode::FORBIDDEN{
if let Some(csrf_token)=resp.headers().get("X-CSRF-Token"){
resp=self.client.post(url)
.header("X-CSRF-Token",csrf_token)
.header("Cookie",self.cookie.as_str())
.body(body) .body(body)
.send().await.map_err(PostError::Reqwest)?; .send().await
}else{
Err(PostError::CSRF)?;
} }
async fn patch_form(&self,url:url::Url,form:reqwest::multipart::Form)->Result<reqwest::Response,reqwest::Error>{
self.client.patch(url)
.header("x-api-key",self.api_key.as_str())
.multipart(form)
.send().await
} }
async fn post_form(&self,url:url::Url,form:reqwest::multipart::Form)->Result<reqwest::Response,reqwest::Error>{
self.client.post(url)
.header("x-api-key",self.api_key.as_str())
.multipart(form)
.send().await
}
pub async fn create_asset(&self,config:CreateAssetRequest,body:impl Into<std::borrow::Cow<'static,[u8]>>)->Result<RobloxOperation,CreateError>{
let url=reqwest::Url::parse("https://apis.roblox.com/assets/v1/assets").map_err(CreateError::ParseError)?;
Ok(resp) let request_config=serde_json::to_string(&config).map_err(CreateError::SerializeError)?;
}
pub async fn create(&self,config:CreateRequest,body:impl Into<reqwest::Body>+Clone)->Result<UploadResponse,CreateError>{
let mut url=reqwest::Url::parse("https://data.roblox.com/Data/Upload.ashx?json=1&type=Model&genreTypeId=1").map_err(CreateError::ParseError)?;
//url borrow scope
{
let mut query=url.query_pairs_mut();//borrow here
//archaic roblox api uses 0 for new asset
query.append_pair("assetid","0");
query.append_pair("name",config.name.as_str());
query.append_pair("description",config.description.as_str());
query.append_pair("ispublic",if config.ispublic{"True"}else{"False"});
query.append_pair("allowComments",if config.allowComments{"True"}else{"False"});
match config.groupId{
Some(group_id)=>{query.append_pair("groupId",group_id.to_string().as_str());},
None=>(),
}
}
let resp=self.post(url,body).await.map_err(CreateError::PostError)?; let form=reqwest::multipart::Form::new()
.text("request",request_config)
.part("fileContent",reqwest::multipart::Part::bytes(body));
Ok(resp.json::<UploadResponse>().await.map_err(CreateError::Reqwest)?) let resp=self.post_form(url,form).await.map_err(CreateError::Reqwest)?
} .error_for_status().map_err(CreateError::Reqwest)?;
pub async fn upload(&self,config:UploadRequest,body:impl Into<reqwest::Body>+Clone)->Result<UploadResponse,UploadError>{
let mut url=reqwest::Url::parse("https://data.roblox.com/Data/Upload.ashx?json=1&type=Model&genreTypeId=1").map_err(UploadError::ParseError)?;
//url borrow scope
{
let mut query=url.query_pairs_mut();//borrow here
//archaic roblox api uses 0 for new asset
match config.assetid{
0=>return Err(UploadError::AssetIdIsZero),
assetid=>{query.append_pair("assetid",assetid.to_string().as_str());},
}
if let Some(name)=config.name.as_deref(){
query.append_pair("name",name);
}
if let Some(description)=config.description.as_deref(){
query.append_pair("description",description);
}
if let Some(ispublic)=config.ispublic{
query.append_pair("ispublic",if ispublic{"True"}else{"False"});
}
if let Some(allow_comments)=config.allowComments{
query.append_pair("allowComments",if allow_comments{"True"}else{"False"});
}
if let Some(group_id)=config.groupId{
query.append_pair("groupId",group_id.to_string().as_str());
}
}
let resp=self.post(url,body).await.map_err(UploadError::PostError)?; Ok(resp.json::<RobloxOperation>().await.map_err(CreateError::Reqwest)?)
Ok(resp.json::<UploadResponse>().await.map_err(UploadError::Reqwest)?)
} }
pub async fn download(&self,config:DownloadRequest)->Result<Vec<u8>,DownloadError>{ pub async fn update_asset(&self,config:UpdateAssetRequest,body:impl Into<std::borrow::Cow<'static,[u8]>>)->Result<RobloxOperation,UpdateError>{
let mut url=reqwest::Url::parse("https://assetdelivery.roblox.com/v1/asset/").map_err(DownloadError::ParseError)?; let raw_url=format!("https://apis.roblox.com/assets/v1/assets/{}",config.assetId);
let url=reqwest::Url::parse(raw_url.as_str()).map_err(UpdateError::ParseError)?;
let request_config=serde_json::to_string(&config).map_err(UpdateError::SerializeError)?;
let form=reqwest::multipart::Form::new()
.text("request",request_config)
.part("fileContent",reqwest::multipart::Part::bytes(body));
let resp=self.patch_form(url,form).await
.map_err(UpdateError::Reqwest)?
//roblox api documentation is very poor, just give the status code and drop the json
.error_for_status().map_err(UpdateError::Reqwest)?;
Ok(resp.json::<RobloxOperation>().await.map_err(UpdateError::Reqwest)?)
}
pub async fn get_asset(&self,config:GetAssetRequest)->Result<Vec<u8>,GetError>{
let mut url=reqwest::Url::parse("https://assetdelivery.roblox.com/v1/asset/").map_err(GetError::ParseError)?;
//url borrow scope //url borrow scope
{ {
let mut query=url.query_pairs_mut();//borrow here let mut query=url.query_pairs_mut();//borrow here
@ -269,31 +277,22 @@ impl RobloxContext{
query.append_pair("version",version.to_string().as_str()); query.append_pair("version",version.to_string().as_str());
} }
} }
let resp=self.get(url).await.map_err(DownloadError::Reqwest)?; let resp=self.get(url).await.map_err(GetError::Reqwest)?;
let body=resp.bytes().await.map_err(DownloadError::Reqwest)?; let body=resp.bytes().await.map_err(GetError::Reqwest)?;
match maybe_gzip_decode(&mut std::io::Cursor::new(body)){ match maybe_gzip_decode(&mut std::io::Cursor::new(body)){
Ok(ReaderType::GZip(readable))=>read_readable(readable), Ok(ReaderType::GZip(readable))=>read_readable(readable),
Ok(ReaderType::Raw(readable))=>read_readable(readable), Ok(ReaderType::Raw(readable))=>read_readable(readable),
Err(e)=>Err(e), Err(e)=>Err(e),
}.map_err(DownloadError::IO) }.map_err(GetError::IO)
}
pub async fn history_page(&self,config:HistoryPageRequest)->Result<HistoryPageResponse,HistoryPageError>{
let mut url=reqwest::Url::parse(format!("https://develop.roblox.com/v1/assets/{}/saved-versions",config.asset_id).as_str()).map_err(HistoryPageError::ParseError)?;
//url borrow scope
{
let mut query=url.query_pairs_mut();//borrow here
//query.append_pair("sortOrder","Asc");
//query.append_pair("limit","100");
//query.append_pair("count","100");
if let Some(cursor)=config.cursor.as_deref(){
query.append_pair("cursor",cursor);
}
} }
pub async fn get_asset_versions(&self,config:AssetVersionsRequest)->Result<AssetVersionsResponse,AssetVersionsError>{
let raw_url=format!("https://apis.roblox.com/assets/v1/assets/{}/versions",config.asset_id);
let url=reqwest::Url::parse(raw_url.as_str()).map_err(AssetVersionsError::ParseError)?;
Ok(self.get(url).await.map_err(HistoryPageError::Reqwest)? Ok(self.get(url).await.map_err(AssetVersionsError::Reqwest)?
.json::<HistoryPageResponse>().await.map_err(HistoryPageError::Reqwest)?) .json::<AssetVersionsResponse>().await.map_err(AssetVersionsError::Reqwest)?)
} }
pub async fn inventory_page(&self,config:InventoryPageRequest)->Result<InventoryPageResponse,InventoryPageError>{ pub async fn inventory_page(&self,config:InventoryPageRequest)->Result<InventoryPageResponse,InventoryPageError>{
let mut url=reqwest::Url::parse(format!("https://apis.roblox.com/toolbox-service/v1/creations/group/{}/10?limit=50",config.group).as_str()).map_err(InventoryPageError::ParseError)?; let mut url=reqwest::Url::parse(format!("https://apis.roblox.com/toolbox-service/v1/creations/group/{}/10?limit=50",config.group).as_str()).map_err(InventoryPageError::ParseError)?;
@ -308,4 +307,18 @@ impl RobloxContext{
Ok(self.get(url).await.map_err(InventoryPageError::Reqwest)? Ok(self.get(url).await.map_err(InventoryPageError::Reqwest)?
.json::<InventoryPageResponse>().await.map_err(InventoryPageError::Reqwest)?) .json::<InventoryPageResponse>().await.map_err(InventoryPageError::Reqwest)?)
} }
pub async fn update_place(&self,config:UpdatePlaceRequest,body:impl Into<reqwest::Body>+Clone)->Result<UpdatePlaceResponse,UpdateError>{
let raw_url=format!("https://apis.roblox.com/universes/v1/{}/places/{}/versions",config.universeId,config.placeId);
let mut url=reqwest::Url::parse(raw_url.as_str()).map_err(UpdateError::ParseError)?;
//url borrow scope
{
let mut query=url.query_pairs_mut();//borrow here
query.append_pair("versionType","Published");
}
let resp=self.post(url,body).await.map_err(UpdateError::Reqwest)?
.error_for_status().map_err(UpdateError::Reqwest)?;
Ok(resp.json::<UpdatePlaceResponse>().await.map_err(UpdateError::Reqwest)?)
}
} }

View File

@ -2,7 +2,7 @@ use std::{io::Read,path::PathBuf};
use clap::{Args,Parser,Subcommand}; use clap::{Args,Parser,Subcommand};
use anyhow::Result as AResult; use anyhow::Result as AResult;
use futures::StreamExt; use futures::StreamExt;
use rbx_asset::context::{RobloxContext,InventoryItem,AssetVersion}; use rbx_asset::context::{AssetVersion,InventoryItem,RobloxContext};
type AssetID=u64; type AssetID=u64;
type AssetIDFileMap=Vec<(AssetID,PathBuf)>; type AssetIDFileMap=Vec<(AssetID,PathBuf)>;
@ -23,10 +23,12 @@ enum Commands{
Download(DownloadSubcommand), Download(DownloadSubcommand),
DownloadDecompile(DownloadDecompileSubcommand), DownloadDecompile(DownloadDecompileSubcommand),
DownloadGroupInventoryJson(DownloadGroupInventoryJsonSubcommand), DownloadGroupInventoryJson(DownloadGroupInventoryJsonSubcommand),
Create(CreateSubcommand), CreateAsset(CreateAssetSubcommand),
Upload(UploadSubcommand), UploadAsset(UpdateAssetSubcommand),
UploadPlace(UpdatePlaceSubcommand),
Compile(CompileSubcommand), Compile(CompileSubcommand),
CompileUpload(CompileUploadSubcommand), CompileUploadAsset(CompileUploadAssetSubcommand),
CompileUploadPlace(CompileUploadPlaceSubcommand),
Decompile(DecompileSubcommand), Decompile(DecompileSubcommand),
DecompileHistoryIntoGit(DecompileHistoryIntoGitSubcommand), DecompileHistoryIntoGit(DecompileHistoryIntoGitSubcommand),
DownloadAndDecompileHistoryIntoGit(DownloadAndDecompileHistoryIntoGitSubcommand), DownloadAndDecompileHistoryIntoGit(DownloadAndDecompileHistoryIntoGitSubcommand),
@ -36,10 +38,12 @@ enum Commands{
struct DownloadHistorySubcommand{ struct DownloadHistorySubcommand{
#[arg(long)] #[arg(long)]
asset_id:AssetID, asset_id:AssetID,
#[arg(long)] #[arg(long,group="api_key",required=true)]
cookie_type:CookieType, api_key_literal:Option<String>,
#[arg(long)] #[arg(long,group="api_key",required=true)]
cookie:String, api_key_envvar:Option<String>,
#[arg(long,group="api_key",required=true)]
api_key_file:Option<PathBuf>,
#[arg(long)] #[arg(long)]
output_folder:Option<PathBuf>, output_folder:Option<PathBuf>,
#[arg(long)] #[arg(long)]
@ -51,10 +55,12 @@ struct DownloadHistorySubcommand{
} }
#[derive(Args)] #[derive(Args)]
struct DownloadSubcommand{ struct DownloadSubcommand{
#[arg(long)] #[arg(long,group="api_key",required=true)]
cookie_type:CookieType, api_key_literal:Option<String>,
#[arg(long)] #[arg(long,group="api_key",required=true)]
cookie:String, api_key_envvar:Option<String>,
#[arg(long,group="api_key",required=true)]
api_key_file:Option<PathBuf>,
#[arg(long)] #[arg(long)]
output_folder:Option<PathBuf>, output_folder:Option<PathBuf>,
#[arg(required=true)] #[arg(required=true)]
@ -62,21 +68,25 @@ struct DownloadSubcommand{
} }
#[derive(Args)] #[derive(Args)]
struct DownloadGroupInventoryJsonSubcommand{ struct DownloadGroupInventoryJsonSubcommand{
#[arg(long)] #[arg(long,group="api_key",required=true)]
cookie_type:CookieType, api_key_literal:Option<String>,
#[arg(long)] #[arg(long,group="api_key",required=true)]
cookie:String, api_key_envvar:Option<String>,
#[arg(long,group="api_key",required=true)]
api_key_file:Option<PathBuf>,
#[arg(long)] #[arg(long)]
output_folder:Option<PathBuf>, output_folder:Option<PathBuf>,
#[arg(long)] #[arg(long)]
group:u64, group:u64,
} }
#[derive(Args)] #[derive(Args)]
struct CreateSubcommand{ struct CreateAssetSubcommand{
#[arg(long)] #[arg(long,group="api_key",required=true)]
cookie_type:CookieType, api_key_literal:Option<String>,
#[arg(long)] #[arg(long,group="api_key",required=true)]
cookie:String, api_key_envvar:Option<String>,
#[arg(long,group="api_key",required=true)]
api_key_file:Option<PathBuf>,
#[arg(long)] #[arg(long)]
model_name:String, model_name:String,
#[arg(long)] #[arg(long)]
@ -85,23 +95,34 @@ struct CreateSubcommand{
input_file:PathBuf, input_file:PathBuf,
#[arg(long)] #[arg(long)]
group:Option<u64>, group:Option<u64>,
#[arg(long)]
free_model:Option<bool>,
#[arg(long)]
allow_comments:Option<bool>,
} }
#[derive(Args)] #[derive(Args)]
struct UploadSubcommand{ struct UpdateAssetSubcommand{
#[arg(long)] #[arg(long)]
asset_id:AssetID, asset_id:AssetID,
#[arg(long)] #[arg(long,group="api_key",required=true)]
cookie_type:CookieType, api_key_literal:Option<String>,
#[arg(long)] #[arg(long,group="api_key",required=true)]
cookie:String, api_key_envvar:Option<String>,
#[arg(long,group="api_key",required=true)]
api_key_file:Option<PathBuf>,
#[arg(long)] #[arg(long)]
input_file:PathBuf, input_file:PathBuf,
}
#[derive(Args)]
struct UpdatePlaceSubcommand{
#[arg(long)] #[arg(long)]
group:Option<u64>, place_id:u64,
#[arg(long)]
universe_id:u64,
#[arg(long,group="api_key",required=true)]
api_key_literal:Option<String>,
#[arg(long,group="api_key",required=true)]
api_key_envvar:Option<String>,
#[arg(long,group="api_key",required=true)]
api_key_file:Option<PathBuf>,
#[arg(long)]
input_file:PathBuf,
} }
#[derive(Args)] #[derive(Args)]
struct CompileSubcommand{ struct CompileSubcommand{
@ -115,17 +136,34 @@ struct CompileSubcommand{
template:Option<PathBuf>, template:Option<PathBuf>,
} }
#[derive(Args)] #[derive(Args)]
struct CompileUploadSubcommand{ struct CompileUploadAssetSubcommand{
#[arg(long)] #[arg(long)]
asset_id:AssetID, asset_id:AssetID,
#[arg(long,group="api_key",required=true)]
api_key_literal:Option<String>,
#[arg(long,group="api_key",required=true)]
api_key_envvar:Option<String>,
#[arg(long,group="api_key",required=true)]
api_key_file:Option<PathBuf>,
#[arg(long)] #[arg(long)]
cookie_type:CookieType, input_folder:Option<PathBuf>,
#[arg(long)] #[arg(long)]
cookie:String, style:Option<Style>,
#[arg(long)] #[arg(long)]
input_file:PathBuf, template:Option<PathBuf>,
}
#[derive(Args)]
struct CompileUploadPlaceSubcommand{
#[arg(long)] #[arg(long)]
group:Option<u64>, place_id:u64,
#[arg(long)]
universe_id:u64,
#[arg(long,group="api_key",required=true)]
api_key_literal:Option<String>,
#[arg(long,group="api_key",required=true)]
api_key_envvar:Option<String>,
#[arg(long,group="api_key",required=true)]
api_key_file:Option<PathBuf>,
#[arg(long)] #[arg(long)]
input_folder:Option<PathBuf>, input_folder:Option<PathBuf>,
#[arg(long)] #[arg(long)]
@ -150,10 +188,12 @@ struct DecompileSubcommand{
} }
#[derive(Args)] #[derive(Args)]
struct DownloadDecompileSubcommand{ struct DownloadDecompileSubcommand{
#[arg(long)] #[arg(long,group="api_key",required=true)]
cookie_type:CookieType, api_key_literal:Option<String>,
#[arg(long)] #[arg(long,group="api_key",required=true)]
cookie:String, api_key_envvar:Option<String>,
#[arg(long,group="api_key",required=true)]
api_key_file:Option<PathBuf>,
#[arg(long)] #[arg(long)]
output_folder:Option<PathBuf>, output_folder:Option<PathBuf>,
#[arg(long)] #[arg(long)]
@ -190,10 +230,12 @@ struct DecompileHistoryIntoGitSubcommand{
struct DownloadAndDecompileHistoryIntoGitSubcommand{ struct DownloadAndDecompileHistoryIntoGitSubcommand{
#[arg(long)] #[arg(long)]
asset_id:AssetID, asset_id:AssetID,
#[arg(long)] #[arg(long,group="api_key",required=true)]
cookie_type:CookieType, api_key_literal:Option<String>,
#[arg(long)] #[arg(long,group="api_key",required=true)]
cookie:String, api_key_envvar:Option<String>,
#[arg(long,group="api_key",required=true)]
api_key_file:Option<PathBuf>,
//currently output folder must be the current folder due to git2 limitations //currently output folder must be the current folder due to git2 limitations
//output_folder:cli.output.unwrap(), //output_folder:cli.output.unwrap(),
#[arg(long)] #[arg(long)]
@ -210,13 +252,6 @@ struct DownloadAndDecompileHistoryIntoGitSubcommand{
write_scripts:Option<bool>, write_scripts:Option<bool>,
} }
#[derive(Clone,clap::ValueEnum)]
enum CookieType{
Literal,
Environment,
File,
}
#[derive(Clone,Copy,Debug,clap::ValueEnum)] #[derive(Clone,Copy,Debug,clap::ValueEnum)]
enum Style{ enum Style{
Rox, Rox,
@ -242,13 +277,21 @@ async fn main()->AResult<()>{
end_version:subcommand.end_version, end_version:subcommand.end_version,
start_version:subcommand.start_version.unwrap_or(0), start_version:subcommand.start_version.unwrap_or(0),
output_folder:subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()), output_folder:subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
cookie:Cookie::from_type(subcommand.cookie_type,subcommand.cookie).await?.0, api_key:ApiKey::from_args(
subcommand.api_key_literal,
subcommand.api_key_envvar,
subcommand.api_key_file,
).await?.get(),
asset_id:subcommand.asset_id, asset_id:subcommand.asset_id,
}).await, }).await,
Commands::Download(subcommand)=>{ Commands::Download(subcommand)=>{
let output_folder=subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()); let output_folder=subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap());
download_list( download_list(
Cookie::from_type(subcommand.cookie_type,subcommand.cookie).await?.0, ApiKey::from_args(
subcommand.api_key_literal,
subcommand.api_key_envvar,
subcommand.api_key_file,
).await?.get(),
subcommand.asset_ids.into_iter().map(|asset_id|{ subcommand.asset_ids.into_iter().map(|asset_id|{
let mut path=output_folder.clone(); let mut path=output_folder.clone();
path.push(asset_id.to_string()); path.push(asset_id.to_string());
@ -258,7 +301,11 @@ async fn main()->AResult<()>{
}, },
Commands::DownloadDecompile(subcommand)=>{ Commands::DownloadDecompile(subcommand)=>{
download_decompile(DownloadDecompileConfig{ download_decompile(DownloadDecompileConfig{
cookie:Cookie::from_type(subcommand.cookie_type,subcommand.cookie).await?.0, api_key:ApiKey::from_args(
subcommand.api_key_literal,
subcommand.api_key_envvar,
subcommand.api_key_file,
).await?.get(),
asset_id:subcommand.asset_id, asset_id:subcommand.asset_id,
output_folder:subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()), output_folder:subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
style:subcommand.style.rox(), style:subcommand.style.rox(),
@ -268,38 +315,73 @@ async fn main()->AResult<()>{
}).await }).await
}, },
Commands::DownloadGroupInventoryJson(subcommand)=>download_group_inventory_json( Commands::DownloadGroupInventoryJson(subcommand)=>download_group_inventory_json(
Cookie::from_type(subcommand.cookie_type,subcommand.cookie).await?.0, ApiKey::from_args(
subcommand.api_key_literal,
subcommand.api_key_envvar,
subcommand.api_key_file,
).await?.get(),
subcommand.group, subcommand.group,
subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()), subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
).await, ).await,
Commands::Create(subcommand)=>create(CreateConfig{ Commands::CreateAsset(subcommand)=>create(CreateConfig{
cookie:Cookie::from_type(subcommand.cookie_type,subcommand.cookie).await?.0, api_key:ApiKey::from_args(
subcommand.api_key_literal,
subcommand.api_key_envvar,
subcommand.api_key_file,
).await?.get(),
group:subcommand.group, group:subcommand.group,
input_file:subcommand.input_file, input_file:subcommand.input_file,
model_name:subcommand.model_name, model_name:subcommand.model_name,
description:subcommand.description.unwrap_or_else(||String::with_capacity(0)), description:subcommand.description.unwrap_or_else(||String::with_capacity(0)),
free_model:subcommand.free_model.unwrap_or(false),
allow_comments:subcommand.allow_comments.unwrap_or(false),
}).await, }).await,
Commands::Upload(subcommand)=>upload_list( Commands::UploadAsset(subcommand)=>upload_asset(UploadAssetConfig{
Cookie::from_type(subcommand.cookie_type,subcommand.cookie).await?.0, api_key:ApiKey::from_args(
subcommand.group, subcommand.api_key_literal,
vec![(subcommand.asset_id,subcommand.input_file)] subcommand.api_key_envvar,
).await, subcommand.api_key_file,
).await?.get(),
asset_id:subcommand.asset_id,
input_file:subcommand.input_file,
}).await,
Commands::UploadPlace(subcommand)=>upload_place(UploadPlaceConfig{
api_key:ApiKey::from_args(
subcommand.api_key_literal,
subcommand.api_key_envvar,
subcommand.api_key_file,
).await?.get(),
place_id:subcommand.place_id,
universe_id:subcommand.universe_id,
input_file:subcommand.input_file,
}).await,
Commands::Compile(subcommand)=>compile(CompileConfig{ Commands::Compile(subcommand)=>compile(CompileConfig{
input_folder:subcommand.input_folder.unwrap_or_else(||std::env::current_dir().unwrap()), input_folder:subcommand.input_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
output_file:subcommand.output_file, output_file:subcommand.output_file,
template:subcommand.template, template:subcommand.template,
style:subcommand.style.map(|s|s.rox()), style:subcommand.style.map(|s|s.rox()),
}).await, }).await,
Commands::CompileUpload(subcommand)=>compile_upload(CompileUploadConfig{ Commands::CompileUploadAsset(subcommand)=>compile_upload_asset(CompileUploadAssetConfig{
input_folder:subcommand.input_folder.unwrap_or_else(||std::env::current_dir().unwrap()), input_folder:subcommand.input_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
template:subcommand.template, template:subcommand.template,
style:subcommand.style.map(|s|s.rox()), style:subcommand.style.map(|s|s.rox()),
cookie:Cookie::from_type(subcommand.cookie_type,subcommand.cookie).await?.0, api_key:ApiKey::from_args(
group:subcommand.group, subcommand.api_key_literal,
subcommand.api_key_envvar,
subcommand.api_key_file,
).await?.get(),
asset_id:subcommand.asset_id, asset_id:subcommand.asset_id,
}).await, }).await,
Commands::CompileUploadPlace(subcommand)=>compile_upload_place(CompileUploadPlaceConfig{
input_folder:subcommand.input_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
template:subcommand.template,
style:subcommand.style.map(|s|s.rox()),
api_key:ApiKey::from_args(
subcommand.api_key_literal,
subcommand.api_key_envvar,
subcommand.api_key_file,
).await?.get(),
place_id:subcommand.place_id,
universe_id:subcommand.universe_id,
}).await,
Commands::Decompile(subcommand)=>decompile(DecompileConfig{ Commands::Decompile(subcommand)=>decompile(DecompileConfig{
style:subcommand.style.rox(), style:subcommand.style.rox(),
input_file:subcommand.input_file, input_file:subcommand.input_file,
@ -321,7 +403,11 @@ async fn main()->AResult<()>{
Commands::DownloadAndDecompileHistoryIntoGit(subcommand)=>download_and_decompile_history_into_git(DownloadAndDecompileHistoryConfig{ Commands::DownloadAndDecompileHistoryIntoGit(subcommand)=>download_and_decompile_history_into_git(DownloadAndDecompileHistoryConfig{
git_committer_name:subcommand.git_committer_name, git_committer_name:subcommand.git_committer_name,
git_committer_email:subcommand.git_committer_email, git_committer_email:subcommand.git_committer_email,
cookie:Cookie::from_type(subcommand.cookie_type,subcommand.cookie).await?.0, api_key:ApiKey::from_args(
subcommand.api_key_literal,
subcommand.api_key_envvar,
subcommand.api_key_file,
).await?.get(),
asset_id:subcommand.asset_id, asset_id:subcommand.asset_id,
output_folder:std::env::current_dir()?, output_folder:std::env::current_dir()?,
style:subcommand.style.rox(), style:subcommand.style.rox(),
@ -332,76 +418,87 @@ async fn main()->AResult<()>{
} }
} }
struct Cookie(String); struct ApiKey(String);
impl Cookie{ impl ApiKey{
async fn from_type(cookie_type:CookieType,cookie_string:String)->AResult<Self>{ fn get(self)->String{
Ok(Self(format!(".ROBLOSECURITY={}",match cookie_type{ self.0
CookieType::Literal=>cookie_string, }
CookieType::Environment=>std::env::var(cookie_string)?, async fn from_args(literal:Option<String>,environment:Option<String>,file:Option<PathBuf>)->AResult<Self>{
CookieType::File=>tokio::fs::read_to_string(cookie_string).await?, let api_key=match (literal,environment,file){
}))) (Some(api_key_literal),None,None)=>api_key_literal,
(None,Some(api_key_environment),None)=>std::env::var(api_key_environment)?,
(None,None,Some(api_key_file))=>tokio::fs::read_to_string(api_key_file).await?,
_=>Err(anyhow::Error::msg("Illegal api key argument triple"))?,
};
Ok(Self(api_key))
} }
} }
struct CreateConfig{ struct CreateConfig{
cookie:String, api_key:String,
model_name:String, model_name:String,
description:String, description:String,
input_file:PathBuf, input_file:PathBuf,
group:Option<u64>, group:Option<u64>,
free_model:bool,
allow_comments:bool,
} }
///This is hardcoded to create models atm
async fn create(config:CreateConfig)->AResult<()>{ async fn create(config:CreateConfig)->AResult<()>{
let resp=RobloxContext::new(config.cookie) let resp=RobloxContext::new(config.api_key)
.create(rbx_asset::context::CreateRequest{ .create_asset(rbx_asset::context::CreateAssetRequest{
name:config.model_name, assetType:rbx_asset::context::AssetType::Model,
displayName:config.model_name,
description:config.description, description:config.description,
ispublic:config.free_model, creationContext:rbx_asset::context::CreationContext{
allowComments:config.allow_comments, creator:rbx_asset::context::Creator{
groupId:config.group, userId:0,//ever needed? roblox should implicitly know this
groupId:config.group.unwrap_or(0),
},
expectedPrice:0,
}
},tokio::fs::read(config.input_file).await?).await?;
println!("CreateResponse={:?}",resp);
Ok(())
}
struct UploadAssetConfig{
api_key:String,
asset_id:u64,
input_file:PathBuf,
}
async fn upload_asset(config:UploadAssetConfig)->AResult<()>{
let context=RobloxContext::new(config.api_key);
let resp=context.update_asset(rbx_asset::cloud::UpdateAssetRequest{
assetId:config.asset_id,
displayName:None,
description:None,
},tokio::fs::read(config.input_file).await?).await?; },tokio::fs::read(config.input_file).await?).await?;
println!("UploadResponse={:?}",resp); println!("UploadResponse={:?}",resp);
Ok(()) Ok(())
} }
async fn upload_list(cookie:String,group:Option<u64>,asset_id_file_map:AssetIDFileMap)->AResult<()>{ struct UploadPlaceConfig{
let context=RobloxContext::new(cookie); api_key:String,
//this is calling map on the vec because the closure produces an iterator of futures place_id:u64,
futures::stream::iter(asset_id_file_map.into_iter() universe_id:u64,
.map(|(asset_id,file)|{ input_file:PathBuf,
let context=&context; }
async move{ async fn upload_place(config:UploadPlaceConfig)->AResult<()>{
Ok((asset_id,context.upload(rbx_asset::context::UploadRequest{ let context=RobloxContext::new(config.api_key);
assetid:asset_id, context.update_place(rbx_asset::context::UpdatePlaceRequest{
name:None, placeId:config.place_id,
description:None, universeId:config.universe_id,
ispublic:None, },tokio::fs::read(config.input_file).await?).await?;
allowComments:None,
groupId:group,
},tokio::fs::read(file).await?).await?))
}
}))
.buffer_unordered(CONCURRENT_REQUESTS)
.for_each(|b:AResult<_>|async{
match b{
Ok((asset_id,body))=>{
println!("asset_id={} UploadResponse={:?}",asset_id,body);
},
Err(e)=>eprintln!("ul error: {}",e),
}
}).await;
Ok(()) Ok(())
} }
async fn download_list(cookie:String,asset_id_file_map:AssetIDFileMap)->AResult<()>{ async fn download_list(api_key:String,asset_id_file_map:AssetIDFileMap)->AResult<()>{
let context=RobloxContext::new(cookie); let context=RobloxContext::new(api_key);
futures::stream::iter(asset_id_file_map.into_iter() futures::stream::iter(asset_id_file_map.into_iter()
.map(|(asset_id,file)|{ .map(|(asset_id,file)|{
let context=&context; let context=&context;
async move{ async move{
Ok((file,context.download(rbx_asset::context::DownloadRequest{asset_id,version:None}).await?)) Ok((file,context.get_asset(rbx_asset::context::GetAssetRequest{asset_id,version:None}).await?))
} }
})) }))
.buffer_unordered(CONCURRENT_REQUESTS) .buffer_unordered(CONCURRENT_REQUESTS)
@ -433,8 +530,8 @@ async fn get_inventory_pages(context:&RobloxContext,group:u64)->AResult<Vec<Inve
Ok(asset_list) Ok(asset_list)
} }
async fn download_group_inventory_json(cookie:String,group:u64,output_folder:PathBuf)->AResult<()>{ async fn download_group_inventory_json(api_key:String,group:u64,output_folder:PathBuf)->AResult<()>{
let context=RobloxContext::new(cookie); let context=RobloxContext::new(api_key);
let item_list=get_inventory_pages(&context,group).await?; let item_list=get_inventory_pages(&context,group).await?;
let mut path=output_folder.clone(); let mut path=output_folder.clone();
@ -448,7 +545,7 @@ async fn get_version_history(context:&RobloxContext,asset_id:AssetID)->AResult<V
let mut cursor:Option<String>=None; let mut cursor:Option<String>=None;
let mut asset_list=Vec::new(); let mut asset_list=Vec::new();
loop{ loop{
let mut page=context.history_page(rbx_asset::context::HistoryPageRequest{asset_id,cursor}).await?; let mut page=context.get_asset_versions(rbx_asset::context::AssetVersionsRequest{asset_id,cursor}).await?;
asset_list.append(&mut page.data); asset_list.append(&mut page.data);
if page.nextPageCursor.is_none(){ if page.nextPageCursor.is_none(){
break; break;
@ -464,7 +561,7 @@ struct DownloadHistoryConfig{
end_version:Option<u64>, end_version:Option<u64>,
start_version:u64, start_version:u64,
output_folder:PathBuf, output_folder:PathBuf,
cookie:String, api_key:String,
asset_id:AssetID, asset_id:AssetID,
} }
@ -505,7 +602,7 @@ async fn download_history(mut config:DownloadHistoryConfig)->AResult<()>{
None=>Err(anyhow::Error::msg("Cannot continue from versions.json - there are no previous versions"))?, None=>Err(anyhow::Error::msg("Cannot continue from versions.json - there are no previous versions"))?,
} }
} }
let context=RobloxContext::new(config.cookie); let context=RobloxContext::new(config.api_key);
//limit concurrent downloads //limit concurrent downloads
let mut join_set=tokio::task::JoinSet::new(); let mut join_set=tokio::task::JoinSet::new();
@ -513,7 +610,7 @@ async fn download_history(mut config:DownloadHistoryConfig)->AResult<()>{
//poll paged list of all asset versions //poll paged list of all asset versions
let mut cursor:Option<String>=None; let mut cursor:Option<String>=None;
loop{ loop{
let mut page=context.history_page(rbx_asset::context::HistoryPageRequest{asset_id:config.asset_id,cursor}).await?; let mut page=context.get_asset_versions(rbx_asset::context::AssetVersionsRequest{asset_id:config.asset_id,cursor}).await?;
let context=&context; let context=&context;
let output_folder=config.output_folder.clone(); let output_folder=config.output_folder.clone();
let data=&page.data; let data=&page.data;
@ -543,7 +640,7 @@ async fn download_history(mut config:DownloadHistoryConfig)->AResult<()>{
let mut path=output_folder.clone(); let mut path=output_folder.clone();
path.push(format!("{}_v{}.rbxl",config.asset_id,version_number)); path.push(format!("{}_v{}.rbxl",config.asset_id,version_number));
join_set.spawn(async move{ join_set.spawn(async move{
let file=context.download(rbx_asset::context::DownloadRequest{asset_id:config.asset_id,version:Some(version_number)}).await?; let file=context.get_asset(rbx_asset::context::GetAssetRequest{asset_id:config.asset_id,version:Some(version_number)}).await?;
tokio::fs::write(path,file).await?; tokio::fs::write(path,file).await?;
@ -638,7 +735,7 @@ async fn decompile(config:DecompileConfig)->AResult<()>{
} }
struct DownloadDecompileConfig{ struct DownloadDecompileConfig{
cookie:String, api_key:String,
asset_id:AssetID, asset_id:AssetID,
style:rox_compiler::Style, style:rox_compiler::Style,
output_folder:PathBuf, output_folder:PathBuf,
@ -648,8 +745,8 @@ struct DownloadDecompileConfig{
} }
async fn download_decompile(config:DownloadDecompileConfig)->AResult<()>{ async fn download_decompile(config:DownloadDecompileConfig)->AResult<()>{
let context=RobloxContext::new(config.cookie); let context=RobloxContext::new(config.api_key);
let file=context.download(rbx_asset::context::DownloadRequest{asset_id:config.asset_id,version:None}).await?; let file=context.get_asset(rbx_asset::context::GetAssetRequest{asset_id:config.asset_id,version:None}).await?;
let dom=load_dom(std::io::Cursor::new(file))?; let dom=load_dom(std::io::Cursor::new(file))?;
let context=rox_compiler::DecompiledContext::from_dom(dom); let context=rox_compiler::DecompiledContext::from_dom(dom);
@ -806,7 +903,7 @@ async fn decompile_history_into_git(config:DecompileHistoryConfig)->AResult<()>{
} }
struct DownloadAndDecompileHistoryConfig{ struct DownloadAndDecompileHistoryConfig{
cookie:String, api_key:String,
asset_id:AssetID, asset_id:AssetID,
git_committer_name:String, git_committer_name:String,
git_committer_email:String, git_committer_email:String,
@ -818,7 +915,7 @@ struct DownloadAndDecompileHistoryConfig{
} }
async fn download_and_decompile_history_into_git(config:DownloadAndDecompileHistoryConfig)->AResult<()>{ async fn download_and_decompile_history_into_git(config:DownloadAndDecompileHistoryConfig)->AResult<()>{
let context=RobloxContext::new(config.cookie); let context=RobloxContext::new(config.api_key);
//poll paged list of all asset versions //poll paged list of all asset versions
let asset_list=get_version_history(&context,config.asset_id).await?; let asset_list=get_version_history(&context,config.asset_id).await?;
@ -831,7 +928,7 @@ async fn download_and_decompile_history_into_git(config:DownloadAndDecompileHist
.map(|asset_version|{ .map(|asset_version|{
let context=context.clone(); let context=context.clone();
tokio::task::spawn(async move{ tokio::task::spawn(async move{
let file=context.download(rbx_asset::context::DownloadRequest{asset_id,version:Some(asset_version.assetVersionNumber)}).await?; let file=context.get_asset(rbx_asset::context::GetAssetRequest{asset_id,version:Some(asset_version.assetVersionNumber)}).await?;
let dom=load_dom(std::io::Cursor::new(file))?; let dom=load_dom(std::io::Cursor::new(file))?;
Ok::<_,anyhow::Error>((asset_version,rox_compiler::DecompiledContext::from_dom(dom))) Ok::<_,anyhow::Error>((asset_version,rox_compiler::DecompiledContext::from_dom(dom)))
}) })
@ -885,15 +982,14 @@ async fn compile(config:CompileConfig)->AResult<()>{
Ok(()) Ok(())
} }
struct CompileUploadConfig{ struct CompileUploadAssetConfig{
input_folder:PathBuf, input_folder:PathBuf,
template:Option<PathBuf>, template:Option<PathBuf>,
style:Option<rox_compiler::Style>, style:Option<rox_compiler::Style>,
cookie:String, api_key:String,
group:Option<u64>,
asset_id:AssetID, asset_id:AssetID,
} }
async fn compile_upload(config:CompileUploadConfig)->AResult<()>{ async fn compile_upload_asset(config:CompileUploadAssetConfig)->AResult<()>{
let mut dom=match config.template{ let mut dom=match config.template{
//mr dom doesn't like tokio files //mr dom doesn't like tokio files
Some(template_path)=>load_dom(std::io::BufReader::new(std::fs::File::open(template_path)?))?, Some(template_path)=>load_dom(std::io::BufReader::new(std::fs::File::open(template_path)?))?,
@ -910,14 +1006,45 @@ async fn compile_upload(config:CompileUploadConfig)->AResult<()>{
rbx_binary::to_writer(std::io::Cursor::new(&mut data),&dom,dom.root().children())?; rbx_binary::to_writer(std::io::Cursor::new(&mut data),&dom,dom.root().children())?;
//upload it //upload it
let context=RobloxContext::new(config.cookie); let context=RobloxContext::new(config.api_key);
context.upload(rbx_asset::context::UploadRequest{ let resp=context.update_asset(rbx_asset::cloud::UpdateAssetRequest{
assetid:config.asset_id, assetId:config.asset_id,
name:None, displayName:None,
description:None, description:None,
ispublic:None, },data).await?;
allowComments:None, println!("UploadResponse={:?}",resp);
groupId:config.group, Ok(())
}
struct CompileUploadPlaceConfig{
input_folder:PathBuf,
template:Option<PathBuf>,
style:Option<rox_compiler::Style>,
api_key:String,
place_id:u64,
universe_id:u64,
}
async fn compile_upload_place(config:CompileUploadPlaceConfig)->AResult<()>{
let mut dom=match config.template{
//mr dom doesn't like tokio files
Some(template_path)=>load_dom(std::io::BufReader::new(std::fs::File::open(template_path)?))?,
None=>rbx_dom_weak::WeakDom::new(rbx_dom_weak::InstanceBuilder::new("DataModel")),
};
rox_compiler::compile(rox_compiler::CompileConfig{
input_folder:config.input_folder,
style:config.style,
},&mut dom).await?;
//make a binary file in a buffer in memory
let mut data=Vec::new();
rbx_binary::to_writer(std::io::Cursor::new(&mut data),&dom,dom.root().children())?;
//upload it
let context=RobloxContext::new(config.api_key);
context.update_place(rbx_asset::context::UpdatePlaceRequest{
universeId:config.universe_id,
placeId:config.place_id,
},data).await?; },data).await?;
Ok(()) Ok(())
} }