use old api for some things

This commit is contained in:
Quaternions 2024-07-03 17:24:27 -07:00
parent 9638672dde
commit 4805f3bc08

View File

@ -2,7 +2,8 @@ use std::{io::Read,path::PathBuf};
use clap::{Args,Parser,Subcommand}; use clap::{Args,Parser,Subcommand};
use anyhow::Result as AResult; use anyhow::Result as AResult;
use futures::StreamExt; use futures::StreamExt;
use rbx_asset::cloud::{ApiKey,AssetVersion,CloudContext,InventoryItem}; use rbx_asset::cloud::{ApiKey,CloudContext};
use rbx_asset::cookie::{Cookie,CookieContext,AssetVersion,InventoryItem};
type AssetID=u64; type AssetID=u64;
type AssetIDFileMap=Vec<(AssetID,PathBuf)>; type AssetIDFileMap=Vec<(AssetID,PathBuf)>;
@ -38,12 +39,12 @@ enum Commands{
struct DownloadHistorySubcommand{ struct DownloadHistorySubcommand{
#[arg(long)] #[arg(long)]
asset_id:AssetID, asset_id:AssetID,
#[arg(long,group="api_key",required=true)] #[arg(long,group="cookie",required=true)]
api_key_literal:Option<String>, cookie_literal:Option<String>,
#[arg(long,group="api_key",required=true)] #[arg(long,group="cookie",required=true)]
api_key_envvar:Option<String>, cookie_envvar:Option<String>,
#[arg(long,group="api_key",required=true)] #[arg(long,group="cookie",required=true)]
api_key_file:Option<PathBuf>, cookie_file:Option<PathBuf>,
#[arg(long)] #[arg(long)]
output_folder:Option<PathBuf>, output_folder:Option<PathBuf>,
#[arg(long)] #[arg(long)]
@ -68,12 +69,12 @@ struct DownloadSubcommand{
} }
#[derive(Args)] #[derive(Args)]
struct DownloadGroupInventoryJsonSubcommand{ struct DownloadGroupInventoryJsonSubcommand{
#[arg(long,group="api_key",required=true)] #[arg(long,group="cookie",required=true)]
api_key_literal:Option<String>, cookie_literal:Option<String>,
#[arg(long,group="api_key",required=true)] #[arg(long,group="cookie",required=true)]
api_key_envvar:Option<String>, cookie_envvar:Option<String>,
#[arg(long,group="api_key",required=true)] #[arg(long,group="cookie",required=true)]
api_key_file:Option<PathBuf>, cookie_file:Option<PathBuf>,
#[arg(long)] #[arg(long)]
output_folder:Option<PathBuf>, output_folder:Option<PathBuf>,
#[arg(long)] #[arg(long)]
@ -190,12 +191,12 @@ struct DecompileSubcommand{
} }
#[derive(Args)] #[derive(Args)]
struct DownloadDecompileSubcommand{ struct DownloadDecompileSubcommand{
#[arg(long,group="api_key",required=true)] #[arg(long,group="cookie",required=true)]
api_key_literal:Option<String>, cookie_literal:Option<String>,
#[arg(long,group="api_key",required=true)] #[arg(long,group="cookie",required=true)]
api_key_envvar:Option<String>, cookie_envvar:Option<String>,
#[arg(long,group="api_key",required=true)] #[arg(long,group="cookie",required=true)]
api_key_file:Option<PathBuf>, cookie_file:Option<PathBuf>,
#[arg(long)] #[arg(long)]
output_folder:Option<PathBuf>, output_folder:Option<PathBuf>,
#[arg(long)] #[arg(long)]
@ -232,12 +233,12 @@ struct DecompileHistoryIntoGitSubcommand{
struct DownloadAndDecompileHistoryIntoGitSubcommand{ struct DownloadAndDecompileHistoryIntoGitSubcommand{
#[arg(long)] #[arg(long)]
asset_id:AssetID, asset_id:AssetID,
#[arg(long,group="api_key",required=true)] #[arg(long,group="cookie",required=true)]
api_key_literal:Option<String>, cookie_literal:Option<String>,
#[arg(long,group="api_key",required=true)] #[arg(long,group="cookie",required=true)]
api_key_envvar:Option<String>, cookie_envvar:Option<String>,
#[arg(long,group="api_key",required=true)] #[arg(long,group="cookie",required=true)]
api_key_file:Option<PathBuf>, cookie_file:Option<PathBuf>,
//currently output folder must be the current folder due to git2 limitations //currently output folder must be the current folder due to git2 limitations
//output_folder:cli.output.unwrap(), //output_folder:cli.output.unwrap(),
#[arg(long)] #[arg(long)]
@ -279,10 +280,10 @@ async fn main()->AResult<()>{
end_version:subcommand.end_version, end_version:subcommand.end_version,
start_version:subcommand.start_version.unwrap_or(0), start_version:subcommand.start_version.unwrap_or(0),
output_folder:subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()), output_folder:subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
api_key:api_key_from_args( cookie:cookie_from_args(
subcommand.api_key_literal, subcommand.cookie_literal,
subcommand.api_key_envvar, subcommand.cookie_envvar,
subcommand.api_key_file, subcommand.cookie_file,
).await?, ).await?,
asset_id:subcommand.asset_id, asset_id:subcommand.asset_id,
}).await, }).await,
@ -303,10 +304,10 @@ async fn main()->AResult<()>{
}, },
Commands::DownloadDecompile(subcommand)=>{ Commands::DownloadDecompile(subcommand)=>{
download_decompile(DownloadDecompileConfig{ download_decompile(DownloadDecompileConfig{
api_key:api_key_from_args( cookie:cookie_from_args(
subcommand.api_key_literal, subcommand.cookie_literal,
subcommand.api_key_envvar, subcommand.cookie_envvar,
subcommand.api_key_file, subcommand.cookie_file,
).await?, ).await?,
asset_id:subcommand.asset_id, asset_id:subcommand.asset_id,
output_folder:subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()), output_folder:subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
@ -317,10 +318,10 @@ async fn main()->AResult<()>{
}).await }).await
}, },
Commands::DownloadGroupInventoryJson(subcommand)=>download_group_inventory_json( Commands::DownloadGroupInventoryJson(subcommand)=>download_group_inventory_json(
api_key_from_args( cookie_from_args(
subcommand.api_key_literal, subcommand.cookie_literal,
subcommand.api_key_envvar, subcommand.cookie_envvar,
subcommand.api_key_file, subcommand.cookie_file,
).await?, ).await?,
subcommand.group, subcommand.group,
subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()), subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
@ -406,10 +407,10 @@ async fn main()->AResult<()>{
Commands::DownloadAndDecompileHistoryIntoGit(subcommand)=>download_and_decompile_history_into_git(DownloadAndDecompileHistoryConfig{ Commands::DownloadAndDecompileHistoryIntoGit(subcommand)=>download_and_decompile_history_into_git(DownloadAndDecompileHistoryConfig{
git_committer_name:subcommand.git_committer_name, git_committer_name:subcommand.git_committer_name,
git_committer_email:subcommand.git_committer_email, git_committer_email:subcommand.git_committer_email,
api_key:api_key_from_args( cookie:cookie_from_args(
subcommand.api_key_literal, subcommand.cookie_literal,
subcommand.api_key_envvar, subcommand.cookie_envvar,
subcommand.api_key_file, subcommand.cookie_file,
).await?, ).await?,
asset_id:subcommand.asset_id, asset_id:subcommand.asset_id,
output_folder:std::env::current_dir()?, output_folder:std::env::current_dir()?,
@ -421,6 +422,15 @@ async fn main()->AResult<()>{
} }
} }
async fn cookie_from_args(literal:Option<String>,environment:Option<String>,file:Option<PathBuf>)->AResult<Cookie>{
let cookie=match (literal,environment,file){
(Some(cookie_literal),None,None)=>cookie_literal,
(None,Some(cookie_environment),None)=>std::env::var(cookie_environment)?,
(None,None,Some(cookie_file))=>tokio::fs::read_to_string(cookie_file).await?,
_=>Err(anyhow::Error::msg("Illegal api key argument triple"))?,
};
Ok(Cookie::new(format!(".ROBLOSECURITY={cookie}")))
}
async fn api_key_from_args(literal:Option<String>,environment:Option<String>,file:Option<PathBuf>)->AResult<ApiKey>{ async fn api_key_from_args(literal:Option<String>,environment:Option<String>,file:Option<PathBuf>)->AResult<ApiKey>{
let api_key=match (literal,environment,file){ let api_key=match (literal,environment,file){
(Some(api_key_literal),None,None)=>api_key_literal, (Some(api_key_literal),None,None)=>api_key_literal,
@ -514,11 +524,11 @@ async fn download_list(api_key:ApiKey,asset_id_file_map:AssetIDFileMap)->AResult
Ok(()) Ok(())
} }
async fn get_inventory_pages(context:&CloudContext,group:u64)->AResult<Vec<InventoryItem>>{ async fn get_inventory_pages(context:&CookieContext,group:u64)->AResult<Vec<InventoryItem>>{
let mut cursor:Option<String>=None; let mut cursor:Option<String>=None;
let mut asset_list=Vec::new(); let mut asset_list=Vec::new();
loop{ loop{
let mut page=context.inventory_page(rbx_asset::cloud::InventoryPageRequest{group,cursor}).await?; let mut page=context.get_inventory_page(rbx_asset::cookie::InventoryPageRequest{group,cursor}).await?;
asset_list.append(&mut page.data); asset_list.append(&mut page.data);
if page.nextPageCursor.is_none(){ if page.nextPageCursor.is_none(){
break; break;
@ -528,8 +538,8 @@ async fn get_inventory_pages(context:&CloudContext,group:u64)->AResult<Vec<Inven
Ok(asset_list) Ok(asset_list)
} }
async fn download_group_inventory_json(api_key:ApiKey,group:u64,output_folder:PathBuf)->AResult<()>{ async fn download_group_inventory_json(cookie:Cookie,group:u64,output_folder:PathBuf)->AResult<()>{
let context=CloudContext::new(api_key); let context=CookieContext::new(cookie);
let item_list=get_inventory_pages(&context,group).await?; let item_list=get_inventory_pages(&context,group).await?;
let mut path=output_folder.clone(); let mut path=output_folder.clone();
@ -539,11 +549,11 @@ async fn download_group_inventory_json(api_key:ApiKey,group:u64,output_folder:Pa
Ok(()) Ok(())
} }
async fn get_version_history(context:&CloudContext,asset_id:AssetID)->AResult<Vec<AssetVersion>>{ async fn get_version_history(context:&CookieContext,asset_id:AssetID)->AResult<Vec<AssetVersion>>{
let mut cursor:Option<String>=None; let mut cursor:Option<String>=None;
let mut asset_list=Vec::new(); let mut asset_list=Vec::new();
loop{ loop{
let mut page=context.get_asset_versions(rbx_asset::cloud::AssetVersionsRequest{asset_id,cursor}).await?; let mut page=context.get_asset_versions_page(rbx_asset::cookie::AssetVersionsPageRequest{asset_id,cursor}).await?;
asset_list.append(&mut page.data); asset_list.append(&mut page.data);
if page.nextPageCursor.is_none(){ if page.nextPageCursor.is_none(){
break; break;
@ -559,7 +569,7 @@ struct DownloadHistoryConfig{
end_version:Option<u64>, end_version:Option<u64>,
start_version:u64, start_version:u64,
output_folder:PathBuf, output_folder:PathBuf,
api_key:ApiKey, cookie:Cookie,
asset_id:AssetID, asset_id:AssetID,
} }
@ -600,7 +610,7 @@ async fn download_history(mut config:DownloadHistoryConfig)->AResult<()>{
None=>Err(anyhow::Error::msg("Cannot continue from versions.json - there are no previous versions"))?, None=>Err(anyhow::Error::msg("Cannot continue from versions.json - there are no previous versions"))?,
} }
} }
let context=CloudContext::new(config.api_key); let context=CookieContext::new(config.cookie);
//limit concurrent downloads //limit concurrent downloads
let mut join_set=tokio::task::JoinSet::new(); let mut join_set=tokio::task::JoinSet::new();
@ -608,7 +618,7 @@ async fn download_history(mut config:DownloadHistoryConfig)->AResult<()>{
//poll paged list of all asset versions //poll paged list of all asset versions
let mut cursor:Option<String>=None; let mut cursor:Option<String>=None;
loop{ loop{
let mut page=context.get_asset_versions(rbx_asset::cloud::AssetVersionsRequest{asset_id:config.asset_id,cursor}).await?; let mut page=context.get_asset_versions_page(rbx_asset::cookie::AssetVersionsPageRequest{asset_id:config.asset_id,cursor}).await?;
let context=&context; let context=&context;
let output_folder=config.output_folder.clone(); let output_folder=config.output_folder.clone();
let data=&page.data; let data=&page.data;
@ -638,7 +648,7 @@ async fn download_history(mut config:DownloadHistoryConfig)->AResult<()>{
let mut path=output_folder.clone(); let mut path=output_folder.clone();
path.push(format!("{}_v{}.rbxl",config.asset_id,version_number)); path.push(format!("{}_v{}.rbxl",config.asset_id,version_number));
join_set.spawn(async move{ join_set.spawn(async move{
let file=context.get_asset(rbx_asset::cloud::GetAssetRequest{asset_id:config.asset_id,version:Some(version_number)}).await?; let file=context.get_asset(rbx_asset::cookie::GetAssetRequest{asset_id:config.asset_id,version:Some(version_number)}).await?;
tokio::fs::write(path,file).await?; tokio::fs::write(path,file).await?;
@ -733,7 +743,7 @@ async fn decompile(config:DecompileConfig)->AResult<()>{
} }
struct DownloadDecompileConfig{ struct DownloadDecompileConfig{
api_key:ApiKey, cookie:Cookie,
asset_id:AssetID, asset_id:AssetID,
style:rox_compiler::Style, style:rox_compiler::Style,
output_folder:PathBuf, output_folder:PathBuf,
@ -743,8 +753,8 @@ struct DownloadDecompileConfig{
} }
async fn download_decompile(config:DownloadDecompileConfig)->AResult<()>{ async fn download_decompile(config:DownloadDecompileConfig)->AResult<()>{
let context=CloudContext::new(config.api_key); let context=CookieContext::new(config.cookie);
let file=context.get_asset(rbx_asset::cloud::GetAssetRequest{asset_id:config.asset_id,version:None}).await?; let file=context.get_asset(rbx_asset::cookie::GetAssetRequest{asset_id:config.asset_id,version:None}).await?;
let dom=load_dom(std::io::Cursor::new(file))?; let dom=load_dom(std::io::Cursor::new(file))?;
let context=rox_compiler::DecompiledContext::from_dom(dom); let context=rox_compiler::DecompiledContext::from_dom(dom);
@ -901,7 +911,7 @@ async fn decompile_history_into_git(config:DecompileHistoryConfig)->AResult<()>{
} }
struct DownloadAndDecompileHistoryConfig{ struct DownloadAndDecompileHistoryConfig{
api_key:ApiKey, cookie:Cookie,
asset_id:AssetID, asset_id:AssetID,
git_committer_name:String, git_committer_name:String,
git_committer_email:String, git_committer_email:String,
@ -913,7 +923,7 @@ struct DownloadAndDecompileHistoryConfig{
} }
async fn download_and_decompile_history_into_git(config:DownloadAndDecompileHistoryConfig)->AResult<()>{ async fn download_and_decompile_history_into_git(config:DownloadAndDecompileHistoryConfig)->AResult<()>{
let context=CloudContext::new(config.api_key); let context=CookieContext::new(config.cookie);
//poll paged list of all asset versions //poll paged list of all asset versions
let asset_list=get_version_history(&context,config.asset_id).await?; let asset_list=get_version_history(&context,config.asset_id).await?;
@ -926,7 +936,7 @@ async fn download_and_decompile_history_into_git(config:DownloadAndDecompileHist
.map(|asset_version|{ .map(|asset_version|{
let context=context.clone(); let context=context.clone();
tokio::task::spawn(async move{ tokio::task::spawn(async move{
let file=context.get_asset(rbx_asset::cloud::GetAssetRequest{asset_id,version:Some(asset_version.assetVersionNumber)}).await?; let file=context.get_asset(rbx_asset::cookie::GetAssetRequest{asset_id,version:Some(asset_version.assetVersionNumber)}).await?;
let dom=load_dom(std::io::Cursor::new(file))?; let dom=load_dom(std::io::Cursor::new(file))?;
Ok::<_,anyhow::Error>((asset_version,rox_compiler::DecompiledContext::from_dom(dom))) Ok::<_,anyhow::Error>((asset_version,rox_compiler::DecompiledContext::from_dom(dom)))
}) })