Compare commits
12 Commits
revive-coo
...
new-api
Author | SHA1 | Date | |
---|---|---|---|
980d3cb05b | |||
d53efd7441 | |||
cc7e445498 | |||
7e4f96a19c | |||
8a40ec3380 | |||
5ea1845555 | |||
d5f3467ddd | |||
b988f59221 | |||
89302d46fa | |||
ee034a93ee | |||
9808c2ac0c | |||
e1710ff8bf |
2
Cargo.lock
generated
2
Cargo.lock
generated
@ -110,7 +110,7 @@ checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711"
|
||||
|
||||
[[package]]
|
||||
name = "asset-tool"
|
||||
version = "0.4.0"
|
||||
version = "0.3.4"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
|
@ -1,7 +1,7 @@
|
||||
workspace = { members = ["rbx_asset", "rox_compiler"] }
|
||||
[package]
|
||||
name = "asset-tool"
|
||||
version = "0.4.0"
|
||||
version = "0.3.4"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
@ -205,26 +205,15 @@ fn read_readable(mut readable:impl std::io::Read)->std::io::Result<Vec<u8>>{
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ApiKey(String);
|
||||
impl ApiKey{
|
||||
pub fn new(api_key:String)->Self{
|
||||
Self(api_key)
|
||||
}
|
||||
pub fn get(self)->String{
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct CloudContext{
|
||||
pub struct RobloxContext{
|
||||
pub api_key:String,
|
||||
pub client:reqwest::Client,
|
||||
}
|
||||
|
||||
impl CloudContext{
|
||||
pub fn new(api_key:ApiKey)->Self{
|
||||
impl RobloxContext{
|
||||
pub fn new(api_key:String)->Self{
|
||||
Self{
|
||||
api_key:api_key.get(),
|
||||
api_key,
|
||||
client:reqwest::Client::new(),
|
||||
}
|
||||
}
|
@ -1,321 +0,0 @@
|
||||
#[derive(Debug)]
|
||||
pub enum PostError{
|
||||
Reqwest(reqwest::Error),
|
||||
CSRF,
|
||||
}
|
||||
impl std::fmt::Display for PostError{
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for PostError{}
|
||||
|
||||
#[derive(Debug,serde::Deserialize,serde::Serialize)]
|
||||
#[allow(nonstandard_style,dead_code)]
|
||||
pub struct CreateRequest{
|
||||
pub name:String,
|
||||
pub description:String,
|
||||
pub ispublic:bool,
|
||||
pub allowComments:bool,
|
||||
pub groupId:Option<u64>,
|
||||
}
|
||||
#[derive(Debug)]
|
||||
pub enum CreateError{
|
||||
ParseError(url::ParseError),
|
||||
PostError(PostError),
|
||||
Reqwest(reqwest::Error),
|
||||
}
|
||||
impl std::fmt::Display for CreateError{
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for CreateError{}
|
||||
|
||||
#[allow(nonstandard_style,dead_code)]
|
||||
pub struct UploadRequest{
|
||||
pub assetid:u64,
|
||||
pub name:Option<String>,
|
||||
pub description:Option<String>,
|
||||
pub ispublic:Option<bool>,
|
||||
pub allowComments:Option<bool>,
|
||||
pub groupId:Option<u64>,
|
||||
}
|
||||
#[derive(Debug)]
|
||||
pub enum UploadError{
|
||||
ParseError(url::ParseError),
|
||||
PostError(PostError),
|
||||
Reqwest(reqwest::Error),
|
||||
AssetIdIsZero,
|
||||
}
|
||||
impl std::fmt::Display for UploadError{
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for UploadError{}
|
||||
#[derive(Debug,serde::Deserialize,serde::Serialize)]
|
||||
#[allow(nonstandard_style,dead_code)]
|
||||
pub struct UploadResponse{
|
||||
pub AssetId:u64,
|
||||
pub AssetVersionId:u64,
|
||||
}
|
||||
|
||||
#[allow(nonstandard_style,dead_code)]
|
||||
pub struct DownloadRequest{
|
||||
pub asset_id:u64,
|
||||
pub version:Option<u64>,
|
||||
}
|
||||
#[derive(Debug)]
|
||||
pub enum DownloadError{
|
||||
ParseError(url::ParseError),
|
||||
Reqwest(reqwest::Error),
|
||||
IO(std::io::Error)
|
||||
}
|
||||
impl std::fmt::Display for DownloadError{
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for DownloadError{}
|
||||
|
||||
pub struct AssetVersionsPageRequest{
|
||||
pub asset_id:u64,
|
||||
pub cursor:Option<String>,
|
||||
}
|
||||
#[derive(serde::Deserialize,serde::Serialize)]
|
||||
#[allow(nonstandard_style,dead_code)]
|
||||
pub struct AssetVersion{
|
||||
pub Id:u64,
|
||||
pub assetId:u64,
|
||||
pub assetVersionNumber:u64,
|
||||
pub creatorType:String,
|
||||
pub creatorTargetId:u64,
|
||||
pub creatingUniverseId:Option<u64>,
|
||||
pub created:chrono::DateTime<chrono::Utc>,
|
||||
pub isPublished:bool,
|
||||
}
|
||||
#[derive(serde::Deserialize)]
|
||||
#[allow(nonstandard_style,dead_code)]
|
||||
pub struct AssetVersionsPageResponse{
|
||||
pub previousPageCursor:Option<String>,
|
||||
pub nextPageCursor:Option<String>,
|
||||
pub data:Vec<AssetVersion>,
|
||||
}
|
||||
#[derive(Debug)]
|
||||
pub enum AssetVersionsPageError{
|
||||
ParseError(url::ParseError),
|
||||
Reqwest(reqwest::Error),
|
||||
}
|
||||
impl std::fmt::Display for AssetVersionsPageError{
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for AssetVersionsPageError{}
|
||||
|
||||
pub struct InventoryPageRequest{
|
||||
pub group:u64,
|
||||
pub cursor:Option<String>,
|
||||
}
|
||||
#[derive(serde::Deserialize,serde::Serialize)]
|
||||
#[allow(nonstandard_style,dead_code)]
|
||||
pub struct InventoryItem{
|
||||
pub id:u64,
|
||||
pub name:String,
|
||||
}
|
||||
#[derive(serde::Deserialize,serde::Serialize)]
|
||||
#[allow(nonstandard_style,dead_code)]
|
||||
pub struct InventoryPageResponse{
|
||||
pub totalResults:u64,//up to 50
|
||||
pub filteredKeyword:Option<String>,//""
|
||||
pub searchDebugInfo:Option<String>,//null
|
||||
pub spellCheckerResult:Option<String>,//null
|
||||
pub queryFacets:Option<String>,//null
|
||||
pub imageSearchStatus:Option<String>,//null
|
||||
pub previousPageCursor:Option<String>,
|
||||
pub nextPageCursor:Option<String>,
|
||||
pub data:Vec<InventoryItem>,
|
||||
}
|
||||
#[derive(Debug)]
|
||||
pub enum InventoryPageError{
|
||||
ParseError(url::ParseError),
|
||||
Reqwest(reqwest::Error),
|
||||
}
|
||||
impl std::fmt::Display for InventoryPageError{
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for InventoryPageError{}
|
||||
|
||||
//idk how to do this better
|
||||
enum ReaderType<R:std::io::Read>{
|
||||
GZip(flate2::read::GzDecoder<std::io::BufReader<R>>),
|
||||
Raw(std::io::BufReader<R>),
|
||||
}
|
||||
fn maybe_gzip_decode<R:std::io::Read>(input:R)->std::io::Result<ReaderType<R>>{
|
||||
let mut buf=std::io::BufReader::new(input);
|
||||
let peek=std::io::BufRead::fill_buf(&mut buf)?;
|
||||
match &peek[0..2]{
|
||||
b"\x1f\x8b"=>Ok(ReaderType::GZip(flate2::read::GzDecoder::new(buf))),
|
||||
_=>Ok(ReaderType::Raw(buf)),
|
||||
}
|
||||
}
|
||||
fn read_readable(mut readable:impl std::io::Read)->std::io::Result<Vec<u8>>{
|
||||
let mut contents=Vec::new();
|
||||
readable.read_to_end(&mut contents)?;
|
||||
Ok(contents)
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Cookie(String);
|
||||
impl Cookie{
|
||||
pub fn new(cookie:String)->Self{
|
||||
Self(cookie)
|
||||
}
|
||||
pub fn get(self)->String{
|
||||
self.0
|
||||
}
|
||||
}
|
||||
#[derive(Clone)]
|
||||
pub struct CookieContext{
|
||||
pub cookie:String,
|
||||
pub client:reqwest::Client,
|
||||
}
|
||||
|
||||
impl CookieContext{
|
||||
pub fn new(cookie:Cookie)->Self{
|
||||
Self{
|
||||
cookie:cookie.get(),
|
||||
client:reqwest::Client::new(),
|
||||
}
|
||||
}
|
||||
async fn get(&self,url:impl reqwest::IntoUrl)->Result<reqwest::Response,reqwest::Error>{
|
||||
self.client.get(url)
|
||||
.header("Cookie",self.cookie.as_str())
|
||||
.send().await
|
||||
}
|
||||
async fn post(&self,url:url::Url,body:impl Into<reqwest::Body>+Clone)->Result<reqwest::Response,PostError>{
|
||||
let mut resp=self.client.post(url.clone())
|
||||
.header("Cookie",self.cookie.as_str())
|
||||
.body(body.clone())
|
||||
.send().await.map_err(PostError::Reqwest)?;
|
||||
|
||||
//This is called a CSRF challenge apparently
|
||||
if resp.status()==reqwest::StatusCode::FORBIDDEN{
|
||||
if let Some(csrf_token)=resp.headers().get("X-CSRF-Token"){
|
||||
resp=self.client.post(url)
|
||||
.header("X-CSRF-Token",csrf_token)
|
||||
.header("Cookie",self.cookie.as_str())
|
||||
.body(body)
|
||||
.send().await.map_err(PostError::Reqwest)?;
|
||||
}else{
|
||||
Err(PostError::CSRF)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(resp)
|
||||
}
|
||||
pub async fn create(&self,config:CreateRequest,body:impl Into<reqwest::Body>+Clone)->Result<UploadResponse,CreateError>{
|
||||
let mut url=reqwest::Url::parse("https://data.roblox.com/Data/Upload.ashx?json=1&type=Model&genreTypeId=1").map_err(CreateError::ParseError)?;
|
||||
//url borrow scope
|
||||
{
|
||||
let mut query=url.query_pairs_mut();//borrow here
|
||||
//archaic roblox api uses 0 for new asset
|
||||
query.append_pair("assetid","0");
|
||||
query.append_pair("name",config.name.as_str());
|
||||
query.append_pair("description",config.description.as_str());
|
||||
query.append_pair("ispublic",if config.ispublic{"True"}else{"False"});
|
||||
query.append_pair("allowComments",if config.allowComments{"True"}else{"False"});
|
||||
match config.groupId{
|
||||
Some(group_id)=>{query.append_pair("groupId",group_id.to_string().as_str());},
|
||||
None=>(),
|
||||
}
|
||||
}
|
||||
|
||||
let resp=self.post(url,body).await.map_err(CreateError::PostError)?;
|
||||
|
||||
Ok(resp.json::<UploadResponse>().await.map_err(CreateError::Reqwest)?)
|
||||
}
|
||||
pub async fn upload(&self,config:UploadRequest,body:impl Into<reqwest::Body>+Clone)->Result<UploadResponse,UploadError>{
|
||||
let mut url=reqwest::Url::parse("https://data.roblox.com/Data/Upload.ashx?json=1&type=Model&genreTypeId=1").map_err(UploadError::ParseError)?;
|
||||
//url borrow scope
|
||||
{
|
||||
let mut query=url.query_pairs_mut();//borrow here
|
||||
//archaic roblox api uses 0 for new asset
|
||||
match config.assetid{
|
||||
0=>return Err(UploadError::AssetIdIsZero),
|
||||
assetid=>{query.append_pair("assetid",assetid.to_string().as_str());},
|
||||
}
|
||||
if let Some(name)=config.name.as_deref(){
|
||||
query.append_pair("name",name);
|
||||
}
|
||||
if let Some(description)=config.description.as_deref(){
|
||||
query.append_pair("description",description);
|
||||
}
|
||||
if let Some(ispublic)=config.ispublic{
|
||||
query.append_pair("ispublic",if ispublic{"True"}else{"False"});
|
||||
}
|
||||
if let Some(allow_comments)=config.allowComments{
|
||||
query.append_pair("allowComments",if allow_comments{"True"}else{"False"});
|
||||
}
|
||||
if let Some(group_id)=config.groupId{
|
||||
query.append_pair("groupId",group_id.to_string().as_str());
|
||||
}
|
||||
}
|
||||
|
||||
let resp=self.post(url,body).await.map_err(UploadError::PostError)?;
|
||||
|
||||
Ok(resp.json::<UploadResponse>().await.map_err(UploadError::Reqwest)?)
|
||||
}
|
||||
pub async fn download(&self,config:DownloadRequest)->Result<Vec<u8>,DownloadError>{
|
||||
let mut url=reqwest::Url::parse("https://assetdelivery.roblox.com/v1/asset/").map_err(DownloadError::ParseError)?;
|
||||
//url borrow scope
|
||||
{
|
||||
let mut query=url.query_pairs_mut();//borrow here
|
||||
query.append_pair("ID",config.asset_id.to_string().as_str());
|
||||
if let Some(version)=config.version{
|
||||
query.append_pair("version",version.to_string().as_str());
|
||||
}
|
||||
}
|
||||
let resp=self.get(url).await.map_err(DownloadError::Reqwest)?;
|
||||
|
||||
let body=resp.bytes().await.map_err(DownloadError::Reqwest)?;
|
||||
|
||||
match maybe_gzip_decode(&mut std::io::Cursor::new(body)){
|
||||
Ok(ReaderType::GZip(readable))=>read_readable(readable),
|
||||
Ok(ReaderType::Raw(readable))=>read_readable(readable),
|
||||
Err(e)=>Err(e),
|
||||
}.map_err(DownloadError::IO)
|
||||
}
|
||||
pub async fn get_asset_versions_page(&self,config:AssetVersionsPageRequest)->Result<AssetVersionsPageResponse,AssetVersionsPageError>{
|
||||
let mut url=reqwest::Url::parse(format!("https://develop.roblox.com/v1/assets/{}/saved-versions",config.asset_id).as_str()).map_err(AssetVersionsPageError::ParseError)?;
|
||||
//url borrow scope
|
||||
{
|
||||
let mut query=url.query_pairs_mut();//borrow here
|
||||
//query.append_pair("sortOrder","Asc");
|
||||
//query.append_pair("limit","100");
|
||||
//query.append_pair("count","100");
|
||||
if let Some(cursor)=config.cursor.as_deref(){
|
||||
query.append_pair("cursor",cursor);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(self.get(url).await.map_err(AssetVersionsPageError::Reqwest)?
|
||||
.json::<AssetVersionsPageResponse>().await.map_err(AssetVersionsPageError::Reqwest)?)
|
||||
}
|
||||
pub async fn inventory_page(&self,config:InventoryPageRequest)->Result<InventoryPageResponse,InventoryPageError>{
|
||||
let mut url=reqwest::Url::parse(format!("https://apis.roblox.com/toolbox-service/v1/creations/group/{}/10?limit=50",config.group).as_str()).map_err(InventoryPageError::ParseError)?;
|
||||
//url borrow scope
|
||||
{
|
||||
let mut query=url.query_pairs_mut();//borrow here
|
||||
if let Some(cursor)=config.cursor.as_deref(){
|
||||
query.append_pair("cursor",cursor);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(self.get(url).await.map_err(InventoryPageError::Reqwest)?
|
||||
.json::<InventoryPageResponse>().await.map_err(InventoryPageError::Reqwest)?)
|
||||
}
|
||||
}
|
@ -1,2 +1 @@
|
||||
pub mod cloud;
|
||||
pub mod cookie;
|
||||
pub mod context;
|
||||
|
@ -525,17 +525,20 @@ pub async fn compile(config:CompileConfig,mut dom:&mut rbx_dom_weak::WeakDom)->R
|
||||
//TODO: fix dom being &mut &mut inside the closure
|
||||
.try_fold((&mut stack,&mut dom),|(stack,dom),bog|async{
|
||||
//push child objects onto dom serially as they arrive
|
||||
if let Some((blacklist,data))=bog{
|
||||
let referent=match data{
|
||||
PreparedData::Model(mut model_dom)=>{
|
||||
let referent=model_dom.root().children()[0];
|
||||
model_dom.transfer(referent,dom,item_ref);
|
||||
referent
|
||||
},
|
||||
PreparedData::Builder(script)=>dom.insert(item_ref,script),
|
||||
};
|
||||
//new children need to be traversed
|
||||
stack.push(CompileStackInstruction::TraverseReferent(referent,blacklist));
|
||||
match bog{
|
||||
Some((blacklist,data))=>{
|
||||
let referent=match data{
|
||||
PreparedData::Model(mut model_dom)=>{
|
||||
let referent=model_dom.root().children()[0];
|
||||
model_dom.transfer(referent,dom,item_ref);
|
||||
referent
|
||||
},
|
||||
PreparedData::Builder(script)=>dom.insert(item_ref,script),
|
||||
};
|
||||
//new children need to be traversed
|
||||
stack.push(CompileStackInstruction::TraverseReferent(referent,blacklist));
|
||||
},
|
||||
None=>(),
|
||||
}
|
||||
Ok((stack,dom))
|
||||
}).await?;
|
||||
@ -544,5 +547,5 @@ pub async fn compile(config:CompileConfig,mut dom:&mut rbx_dom_weak::WeakDom)->R
|
||||
CompileStackInstruction::PopFolder=>assert!(folder.pop(),"pop folder bad"),
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
unreachable!();
|
||||
}
|
||||
|
198
src/main.rs
198
src/main.rs
@ -2,8 +2,7 @@ use std::{io::Read,path::PathBuf};
|
||||
use clap::{Args,Parser,Subcommand};
|
||||
use anyhow::Result as AResult;
|
||||
use futures::StreamExt;
|
||||
use rbx_asset::cloud::{ApiKey,CloudContext,InventoryItem};
|
||||
use rbx_asset::cookie::{Cookie,CookieContext,AssetVersion};
|
||||
use rbx_asset::context::{AssetVersion,InventoryItem,RobloxContext};
|
||||
|
||||
type AssetID=u64;
|
||||
type AssetIDFileMap=Vec<(AssetID,PathBuf)>;
|
||||
@ -39,12 +38,12 @@ enum Commands{
|
||||
struct DownloadHistorySubcommand{
|
||||
#[arg(long)]
|
||||
asset_id:AssetID,
|
||||
#[arg(long,group="cookie",required=true)]
|
||||
cookie_literal:Option<String>,
|
||||
#[arg(long,group="cookie",required=true)]
|
||||
cookie_envvar:Option<String>,
|
||||
#[arg(long,group="cookie",required=true)]
|
||||
cookie_file:Option<PathBuf>,
|
||||
#[arg(long,group="api_key",required=true)]
|
||||
api_key_literal:Option<String>,
|
||||
#[arg(long,group="api_key",required=true)]
|
||||
api_key_envvar:Option<String>,
|
||||
#[arg(long,group="api_key",required=true)]
|
||||
api_key_file:Option<PathBuf>,
|
||||
#[arg(long)]
|
||||
output_folder:Option<PathBuf>,
|
||||
#[arg(long)]
|
||||
@ -69,12 +68,12 @@ struct DownloadSubcommand{
|
||||
}
|
||||
#[derive(Args)]
|
||||
struct DownloadGroupInventoryJsonSubcommand{
|
||||
#[arg(long,group="cookie",required=true)]
|
||||
cookie_literal:Option<String>,
|
||||
#[arg(long,group="cookie",required=true)]
|
||||
cookie_envvar:Option<String>,
|
||||
#[arg(long,group="cookie",required=true)]
|
||||
cookie_file:Option<PathBuf>,
|
||||
#[arg(long,group="api_key",required=true)]
|
||||
api_key_literal:Option<String>,
|
||||
#[arg(long,group="api_key",required=true)]
|
||||
api_key_envvar:Option<String>,
|
||||
#[arg(long,group="api_key",required=true)]
|
||||
api_key_file:Option<PathBuf>,
|
||||
#[arg(long)]
|
||||
output_folder:Option<PathBuf>,
|
||||
#[arg(long)]
|
||||
@ -95,9 +94,7 @@ struct CreateAssetSubcommand{
|
||||
#[arg(long)]
|
||||
input_file:PathBuf,
|
||||
#[arg(long)]
|
||||
creator_user_id:u64,
|
||||
#[arg(long)]
|
||||
creator_group_id:Option<u64>,
|
||||
group:Option<u64>,
|
||||
}
|
||||
#[derive(Args)]
|
||||
struct UpdateAssetSubcommand{
|
||||
@ -284,21 +281,21 @@ async fn main()->AResult<()>{
|
||||
end_version:subcommand.end_version,
|
||||
start_version:subcommand.start_version.unwrap_or(0),
|
||||
output_folder:subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
|
||||
cookie:cookie_from_args(
|
||||
subcommand.cookie_literal,
|
||||
subcommand.cookie_envvar,
|
||||
subcommand.cookie_file,
|
||||
).await?,
|
||||
api_key:ApiKey::from_args(
|
||||
subcommand.api_key_literal,
|
||||
subcommand.api_key_envvar,
|
||||
subcommand.api_key_file,
|
||||
).await?.get(),
|
||||
asset_id:subcommand.asset_id,
|
||||
}).await,
|
||||
Commands::Download(subcommand)=>{
|
||||
let output_folder=subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap());
|
||||
download_list(
|
||||
api_key_from_args(
|
||||
ApiKey::from_args(
|
||||
subcommand.api_key_literal,
|
||||
subcommand.api_key_envvar,
|
||||
subcommand.api_key_file,
|
||||
).await?,
|
||||
).await?.get(),
|
||||
subcommand.asset_ids.into_iter().map(|asset_id|{
|
||||
let mut path=output_folder.clone();
|
||||
path.push(asset_id.to_string());
|
||||
@ -308,11 +305,11 @@ async fn main()->AResult<()>{
|
||||
},
|
||||
Commands::DownloadDecompile(subcommand)=>{
|
||||
download_decompile(DownloadDecompileConfig{
|
||||
api_key:api_key_from_args(
|
||||
api_key:ApiKey::from_args(
|
||||
subcommand.api_key_literal,
|
||||
subcommand.api_key_envvar,
|
||||
subcommand.api_key_file,
|
||||
).await?,
|
||||
).await?.get(),
|
||||
asset_id:subcommand.asset_id,
|
||||
output_folder:subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
|
||||
style:subcommand.style.rox(),
|
||||
@ -322,41 +319,40 @@ async fn main()->AResult<()>{
|
||||
}).await
|
||||
},
|
||||
Commands::DownloadGroupInventoryJson(subcommand)=>download_group_inventory_json(
|
||||
cookie_from_args(
|
||||
subcommand.cookie_literal,
|
||||
subcommand.cookie_envvar,
|
||||
subcommand.cookie_file,
|
||||
).await?,
|
||||
ApiKey::from_args(
|
||||
subcommand.api_key_literal,
|
||||
subcommand.api_key_envvar,
|
||||
subcommand.api_key_file,
|
||||
).await?.get(),
|
||||
subcommand.group,
|
||||
subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
|
||||
).await,
|
||||
Commands::CreateAsset(subcommand)=>create(CreateConfig{
|
||||
api_key:api_key_from_args(
|
||||
api_key:ApiKey::from_args(
|
||||
subcommand.api_key_literal,
|
||||
subcommand.api_key_envvar,
|
||||
subcommand.api_key_file,
|
||||
).await?,
|
||||
creator_user_id:subcommand.creator_user_id,
|
||||
creator_group_id:subcommand.creator_group_id,
|
||||
).await?.get(),
|
||||
group:subcommand.group,
|
||||
input_file:subcommand.input_file,
|
||||
model_name:subcommand.model_name,
|
||||
description:subcommand.description.unwrap_or_else(||String::with_capacity(0)),
|
||||
}).await,
|
||||
Commands::UploadAsset(subcommand)=>upload_asset(UploadAssetConfig{
|
||||
api_key:api_key_from_args(
|
||||
api_key:ApiKey::from_args(
|
||||
subcommand.api_key_literal,
|
||||
subcommand.api_key_envvar,
|
||||
subcommand.api_key_file,
|
||||
).await?,
|
||||
).await?.get(),
|
||||
asset_id:subcommand.asset_id,
|
||||
input_file:subcommand.input_file,
|
||||
}).await,
|
||||
Commands::UploadPlace(subcommand)=>upload_place(UploadPlaceConfig{
|
||||
api_key:api_key_from_args(
|
||||
api_key:ApiKey::from_args(
|
||||
subcommand.api_key_literal,
|
||||
subcommand.api_key_envvar,
|
||||
subcommand.api_key_file,
|
||||
).await?,
|
||||
).await?.get(),
|
||||
place_id:subcommand.place_id,
|
||||
universe_id:subcommand.universe_id,
|
||||
input_file:subcommand.input_file,
|
||||
@ -371,22 +367,22 @@ async fn main()->AResult<()>{
|
||||
input_folder:subcommand.input_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
|
||||
template:subcommand.template,
|
||||
style:subcommand.style.map(|s|s.rox()),
|
||||
api_key:api_key_from_args(
|
||||
api_key:ApiKey::from_args(
|
||||
subcommand.api_key_literal,
|
||||
subcommand.api_key_envvar,
|
||||
subcommand.api_key_file,
|
||||
).await?,
|
||||
).await?.get(),
|
||||
asset_id:subcommand.asset_id,
|
||||
}).await,
|
||||
Commands::CompileUploadPlace(subcommand)=>compile_upload_place(CompileUploadPlaceConfig{
|
||||
input_folder:subcommand.input_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
|
||||
template:subcommand.template,
|
||||
style:subcommand.style.map(|s|s.rox()),
|
||||
api_key:api_key_from_args(
|
||||
api_key:ApiKey::from_args(
|
||||
subcommand.api_key_literal,
|
||||
subcommand.api_key_envvar,
|
||||
subcommand.api_key_file,
|
||||
).await?,
|
||||
).await?.get(),
|
||||
place_id:subcommand.place_id,
|
||||
universe_id:subcommand.universe_id,
|
||||
}).await,
|
||||
@ -411,11 +407,11 @@ async fn main()->AResult<()>{
|
||||
Commands::DownloadAndDecompileHistoryIntoGit(subcommand)=>download_and_decompile_history_into_git(DownloadAndDecompileHistoryConfig{
|
||||
git_committer_name:subcommand.git_committer_name,
|
||||
git_committer_email:subcommand.git_committer_email,
|
||||
api_key:api_key_from_args(
|
||||
api_key:ApiKey::from_args(
|
||||
subcommand.api_key_literal,
|
||||
subcommand.api_key_envvar,
|
||||
subcommand.api_key_file,
|
||||
).await?,
|
||||
).await?.get(),
|
||||
asset_id:subcommand.asset_id,
|
||||
output_folder:std::env::current_dir()?,
|
||||
style:subcommand.style.rox(),
|
||||
@ -426,45 +422,41 @@ async fn main()->AResult<()>{
|
||||
}
|
||||
}
|
||||
|
||||
async fn cookie_from_args(literal:Option<String>,environment:Option<String>,file:Option<PathBuf>)->AResult<Cookie>{
|
||||
let cookie=match (literal,environment,file){
|
||||
(Some(cookie_literal),None,None)=>cookie_literal,
|
||||
(None,Some(cookie_environment),None)=>std::env::var(cookie_environment)?,
|
||||
(None,None,Some(cookie_file))=>tokio::fs::read_to_string(cookie_file).await?,
|
||||
_=>Err(anyhow::Error::msg("Illegal api key argument triple"))?,
|
||||
};
|
||||
Ok(Cookie::new(cookie))
|
||||
}
|
||||
async fn api_key_from_args(literal:Option<String>,environment:Option<String>,file:Option<PathBuf>)->AResult<ApiKey>{
|
||||
let api_key=match (literal,environment,file){
|
||||
(Some(api_key_literal),None,None)=>api_key_literal,
|
||||
(None,Some(api_key_environment),None)=>std::env::var(api_key_environment)?,
|
||||
(None,None,Some(api_key_file))=>tokio::fs::read_to_string(api_key_file).await?,
|
||||
_=>Err(anyhow::Error::msg("Illegal api key argument triple"))?,
|
||||
};
|
||||
Ok(ApiKey::new(api_key))
|
||||
struct ApiKey(String);
|
||||
impl ApiKey{
|
||||
fn get(self)->String{
|
||||
self.0
|
||||
}
|
||||
async fn from_args(literal:Option<String>,environment:Option<String>,file:Option<PathBuf>)->AResult<Self>{
|
||||
let api_key=match (literal,environment,file){
|
||||
(Some(api_key_literal),None,None)=>api_key_literal,
|
||||
(None,Some(api_key_environment),None)=>std::env::var(api_key_environment)?,
|
||||
(None,None,Some(api_key_file))=>tokio::fs::read_to_string(api_key_file).await?,
|
||||
_=>Err(anyhow::Error::msg("Illegal api key argument triple"))?,
|
||||
};
|
||||
Ok(Self(api_key))
|
||||
}
|
||||
}
|
||||
|
||||
struct CreateConfig{
|
||||
api_key:ApiKey,
|
||||
api_key:String,
|
||||
model_name:String,
|
||||
description:String,
|
||||
input_file:PathBuf,
|
||||
creator_user_id:u64,
|
||||
creator_group_id:Option<u64>,
|
||||
group:Option<u64>,
|
||||
}
|
||||
|
||||
///This is hardcoded to create models atm
|
||||
async fn create(config:CreateConfig)->AResult<()>{
|
||||
let resp=CloudContext::new(config.api_key)
|
||||
.create_asset(rbx_asset::cloud::CreateAssetRequest{
|
||||
assetType:rbx_asset::cloud::AssetType::Model,
|
||||
let resp=RobloxContext::new(config.api_key)
|
||||
.create_asset(rbx_asset::context::CreateAssetRequest{
|
||||
assetType:rbx_asset::context::AssetType::Model,
|
||||
displayName:config.model_name,
|
||||
description:config.description,
|
||||
creationContext:rbx_asset::cloud::CreationContext{
|
||||
creator:rbx_asset::cloud::Creator{
|
||||
userId:config.creator_user_id,
|
||||
groupId:config.creator_group_id.unwrap_or(0),
|
||||
creationContext:rbx_asset::context::CreationContext{
|
||||
creator:rbx_asset::context::Creator{
|
||||
userId:0,//ever needed? roblox should implicitly know this
|
||||
groupId:config.group.unwrap_or(0),
|
||||
},
|
||||
expectedPrice:0,
|
||||
}
|
||||
@ -474,13 +466,13 @@ async fn create(config:CreateConfig)->AResult<()>{
|
||||
}
|
||||
|
||||
struct UploadAssetConfig{
|
||||
api_key:ApiKey,
|
||||
api_key:String,
|
||||
asset_id:u64,
|
||||
input_file:PathBuf,
|
||||
}
|
||||
async fn upload_asset(config:UploadAssetConfig)->AResult<()>{
|
||||
let context=CloudContext::new(config.api_key);
|
||||
context.update_asset(rbx_asset::cloud::UpdateAssetRequest{
|
||||
let context=RobloxContext::new(config.api_key);
|
||||
context.update_asset(rbx_asset::context::UpdateAssetRequest{
|
||||
assetId:config.asset_id,
|
||||
displayName:None,
|
||||
description:None,
|
||||
@ -489,27 +481,27 @@ async fn upload_asset(config:UploadAssetConfig)->AResult<()>{
|
||||
}
|
||||
|
||||
struct UploadPlaceConfig{
|
||||
api_key:ApiKey,
|
||||
api_key:String,
|
||||
place_id:u64,
|
||||
universe_id:u64,
|
||||
input_file:PathBuf,
|
||||
}
|
||||
async fn upload_place(config:UploadPlaceConfig)->AResult<()>{
|
||||
let context=CloudContext::new(config.api_key);
|
||||
context.update_place(rbx_asset::cloud::UpdatePlaceRequest{
|
||||
let context=RobloxContext::new(config.api_key);
|
||||
context.update_place(rbx_asset::context::UpdatePlaceRequest{
|
||||
placeId:config.place_id,
|
||||
universeId:config.universe_id,
|
||||
},tokio::fs::read(config.input_file).await?).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn download_list(api_key:ApiKey,asset_id_file_map:AssetIDFileMap)->AResult<()>{
|
||||
let context=CloudContext::new(api_key);
|
||||
async fn download_list(api_key:String,asset_id_file_map:AssetIDFileMap)->AResult<()>{
|
||||
let context=RobloxContext::new(api_key);
|
||||
futures::stream::iter(asset_id_file_map.into_iter()
|
||||
.map(|(asset_id,file)|{
|
||||
let context=&context;
|
||||
async move{
|
||||
Ok((file,context.get_asset(rbx_asset::cloud::GetAssetRequest{asset_id,version:None}).await?))
|
||||
Ok((file,context.get_asset(rbx_asset::context::GetAssetRequest{asset_id,version:None}).await?))
|
||||
}
|
||||
}))
|
||||
.buffer_unordered(CONCURRENT_REQUESTS)
|
||||
@ -527,11 +519,11 @@ async fn download_list(api_key:ApiKey,asset_id_file_map:AssetIDFileMap)->AResult
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn get_inventory_pages(context:&CookieContext,group:u64)->AResult<Vec<InventoryItem>>{
|
||||
async fn get_inventory_pages(context:&RobloxContext,group:u64)->AResult<Vec<InventoryItem>>{
|
||||
let mut cursor:Option<String>=None;
|
||||
let mut asset_list=Vec::new();
|
||||
loop{
|
||||
let mut page=context.inventory_page(rbx_asset::cookie::InventoryPageRequest{group,cursor}).await?;
|
||||
let mut page=context.inventory_page(rbx_asset::context::InventoryPageRequest{group,cursor}).await?;
|
||||
asset_list.append(&mut page.data);
|
||||
if page.nextPageCursor.is_none(){
|
||||
break;
|
||||
@ -541,8 +533,8 @@ async fn get_inventory_pages(context:&CookieContext,group:u64)->AResult<Vec<Inve
|
||||
Ok(asset_list)
|
||||
}
|
||||
|
||||
async fn download_group_inventory_json(cookie:Cookie,group:u64,output_folder:PathBuf)->AResult<()>{
|
||||
let context=CookieContext::new(cookie);
|
||||
async fn download_group_inventory_json(api_key:String,group:u64,output_folder:PathBuf)->AResult<()>{
|
||||
let context=RobloxContext::new(api_key);
|
||||
let item_list=get_inventory_pages(&context,group).await?;
|
||||
|
||||
let mut path=output_folder.clone();
|
||||
@ -552,11 +544,11 @@ async fn download_group_inventory_json(cookie:Cookie,group:u64,output_folder:Pat
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn get_version_history(context:&CloudContext,asset_id:AssetID)->AResult<Vec<AssetVersion>>{
|
||||
async fn get_version_history(context:&RobloxContext,asset_id:AssetID)->AResult<Vec<AssetVersion>>{
|
||||
let mut cursor:Option<String>=None;
|
||||
let mut asset_list=Vec::new();
|
||||
loop{
|
||||
let mut page=context.get_asset_versions(rbx_asset::cloud::AssetVersionsRequest{asset_id,cursor}).await?;
|
||||
let mut page=context.get_asset_versions(rbx_asset::context::AssetVersionsRequest{asset_id,cursor}).await?;
|
||||
asset_list.append(&mut page.data);
|
||||
if page.nextPageCursor.is_none(){
|
||||
break;
|
||||
@ -572,7 +564,7 @@ struct DownloadHistoryConfig{
|
||||
end_version:Option<u64>,
|
||||
start_version:u64,
|
||||
output_folder:PathBuf,
|
||||
cookie:Cookie,
|
||||
api_key:String,
|
||||
asset_id:AssetID,
|
||||
}
|
||||
|
||||
@ -613,7 +605,7 @@ async fn download_history(mut config:DownloadHistoryConfig)->AResult<()>{
|
||||
None=>Err(anyhow::Error::msg("Cannot continue from versions.json - there are no previous versions"))?,
|
||||
}
|
||||
}
|
||||
let context=CookieContext::new(config.cookie);
|
||||
let context=RobloxContext::new(config.api_key);
|
||||
|
||||
//limit concurrent downloads
|
||||
let mut join_set=tokio::task::JoinSet::new();
|
||||
@ -621,7 +613,7 @@ async fn download_history(mut config:DownloadHistoryConfig)->AResult<()>{
|
||||
//poll paged list of all asset versions
|
||||
let mut cursor:Option<String>=None;
|
||||
loop{
|
||||
let mut page=context.get_asset_versions_page(rbx_asset::cookie::AssetVersionsPageRequest{asset_id:config.asset_id,cursor}).await?;
|
||||
let mut page=context.get_asset_versions(rbx_asset::context::AssetVersionsRequest{asset_id:config.asset_id,cursor}).await?;
|
||||
let context=&context;
|
||||
let output_folder=config.output_folder.clone();
|
||||
let data=&page.data;
|
||||
@ -651,7 +643,7 @@ async fn download_history(mut config:DownloadHistoryConfig)->AResult<()>{
|
||||
let mut path=output_folder.clone();
|
||||
path.push(format!("{}_v{}.rbxl",config.asset_id,version_number));
|
||||
join_set.spawn(async move{
|
||||
let file=context.get_asset(rbx_asset::cloud::GetAssetRequest{asset_id:config.asset_id,version:Some(version_number)}).await?;
|
||||
let file=context.get_asset(rbx_asset::context::GetAssetRequest{asset_id:config.asset_id,version:Some(version_number)}).await?;
|
||||
|
||||
tokio::fs::write(path,file).await?;
|
||||
|
||||
@ -746,7 +738,7 @@ async fn decompile(config:DecompileConfig)->AResult<()>{
|
||||
}
|
||||
|
||||
struct DownloadDecompileConfig{
|
||||
api_key:ApiKey,
|
||||
api_key:String,
|
||||
asset_id:AssetID,
|
||||
style:rox_compiler::Style,
|
||||
output_folder:PathBuf,
|
||||
@ -756,8 +748,8 @@ struct DownloadDecompileConfig{
|
||||
}
|
||||
|
||||
async fn download_decompile(config:DownloadDecompileConfig)->AResult<()>{
|
||||
let context=CloudContext::new(config.api_key);
|
||||
let file=context.get_asset(rbx_asset::cloud::GetAssetRequest{asset_id:config.asset_id,version:None}).await?;
|
||||
let context=RobloxContext::new(config.api_key);
|
||||
let file=context.get_asset(rbx_asset::context::GetAssetRequest{asset_id:config.asset_id,version:None}).await?;
|
||||
|
||||
let dom=load_dom(std::io::Cursor::new(file))?;
|
||||
let context=rox_compiler::DecompiledContext::from_dom(dom);
|
||||
@ -914,7 +906,7 @@ async fn decompile_history_into_git(config:DecompileHistoryConfig)->AResult<()>{
|
||||
}
|
||||
|
||||
struct DownloadAndDecompileHistoryConfig{
|
||||
api_key:ApiKey,
|
||||
api_key:String,
|
||||
asset_id:AssetID,
|
||||
git_committer_name:String,
|
||||
git_committer_email:String,
|
||||
@ -926,7 +918,7 @@ struct DownloadAndDecompileHistoryConfig{
|
||||
}
|
||||
|
||||
async fn download_and_decompile_history_into_git(config:DownloadAndDecompileHistoryConfig)->AResult<()>{
|
||||
let context=CloudContext::new(config.api_key);
|
||||
let context=RobloxContext::new(config.api_key);
|
||||
|
||||
//poll paged list of all asset versions
|
||||
let asset_list=get_version_history(&context,config.asset_id).await?;
|
||||
@ -939,7 +931,7 @@ async fn download_and_decompile_history_into_git(config:DownloadAndDecompileHist
|
||||
.map(|asset_version|{
|
||||
let context=context.clone();
|
||||
tokio::task::spawn(async move{
|
||||
let file=context.get_asset(rbx_asset::cloud::GetAssetRequest{asset_id,version:Some(asset_version.assetVersionNumber)}).await?;
|
||||
let file=context.get_asset(rbx_asset::context::GetAssetRequest{asset_id,version:Some(asset_version.assetVersionNumber)}).await?;
|
||||
let dom=load_dom(std::io::Cursor::new(file))?;
|
||||
Ok::<_,anyhow::Error>((asset_version,rox_compiler::DecompiledContext::from_dom(dom)))
|
||||
})
|
||||
@ -997,7 +989,7 @@ struct CompileUploadAssetConfig{
|
||||
input_folder:PathBuf,
|
||||
template:Option<PathBuf>,
|
||||
style:Option<rox_compiler::Style>,
|
||||
api_key:ApiKey,
|
||||
api_key:String,
|
||||
asset_id:AssetID,
|
||||
}
|
||||
async fn compile_upload_asset(config:CompileUploadAssetConfig)->AResult<()>{
|
||||
@ -1017,8 +1009,8 @@ async fn compile_upload_asset(config:CompileUploadAssetConfig)->AResult<()>{
|
||||
rbx_binary::to_writer(std::io::Cursor::new(&mut data),&dom,dom.root().children())?;
|
||||
|
||||
//upload it
|
||||
let context=CloudContext::new(config.api_key);
|
||||
context.update_asset(rbx_asset::cloud::UpdateAssetRequest{
|
||||
let context=RobloxContext::new(config.api_key);
|
||||
context.update_asset(rbx_asset::context::UpdateAssetRequest{
|
||||
assetId:config.asset_id,
|
||||
displayName:None,
|
||||
description:None,
|
||||
@ -1030,7 +1022,7 @@ struct CompileUploadPlaceConfig{
|
||||
input_folder:PathBuf,
|
||||
template:Option<PathBuf>,
|
||||
style:Option<rox_compiler::Style>,
|
||||
api_key:ApiKey,
|
||||
api_key:String,
|
||||
place_id:u64,
|
||||
universe_id:u64,
|
||||
}
|
||||
@ -1051,8 +1043,8 @@ async fn compile_upload_place(config:CompileUploadPlaceConfig)->AResult<()>{
|
||||
rbx_binary::to_writer(std::io::Cursor::new(&mut data),&dom,dom.root().children())?;
|
||||
|
||||
//upload it
|
||||
let context=CloudContext::new(config.api_key);
|
||||
context.update_place(rbx_asset::cloud::UpdatePlaceRequest{
|
||||
let context=RobloxContext::new(config.api_key);
|
||||
context.update_place(rbx_asset::context::UpdatePlaceRequest{
|
||||
universeId:config.universe_id,
|
||||
placeId:config.place_id,
|
||||
},data).await?;
|
||||
|
Reference in New Issue
Block a user