From 4805f3bc08b9baa4e4a9cd5af4f13a5a1df075a8 Mon Sep 17 00:00:00 2001 From: Quaternions Date: Wed, 3 Jul 2024 17:24:27 -0700 Subject: [PATCH] use old api for some things --- src/main.rs | 124 ++++++++++++++++++++++++++++------------------------ 1 file changed, 67 insertions(+), 57 deletions(-) diff --git a/src/main.rs b/src/main.rs index 56e5266..e356e80 100644 --- a/src/main.rs +++ b/src/main.rs @@ -2,7 +2,8 @@ use std::{io::Read,path::PathBuf}; use clap::{Args,Parser,Subcommand}; use anyhow::Result as AResult; use futures::StreamExt; -use rbx_asset::cloud::{ApiKey,AssetVersion,CloudContext,InventoryItem}; +use rbx_asset::cloud::{ApiKey,CloudContext}; +use rbx_asset::cookie::{Cookie,CookieContext,AssetVersion,InventoryItem}; type AssetID=u64; type AssetIDFileMap=Vec<(AssetID,PathBuf)>; @@ -38,12 +39,12 @@ enum Commands{ struct DownloadHistorySubcommand{ #[arg(long)] asset_id:AssetID, - #[arg(long,group="api_key",required=true)] - api_key_literal:Option, - #[arg(long,group="api_key",required=true)] - api_key_envvar:Option, - #[arg(long,group="api_key",required=true)] - api_key_file:Option, + #[arg(long,group="cookie",required=true)] + cookie_literal:Option, + #[arg(long,group="cookie",required=true)] + cookie_envvar:Option, + #[arg(long,group="cookie",required=true)] + cookie_file:Option, #[arg(long)] output_folder:Option, #[arg(long)] @@ -68,12 +69,12 @@ struct DownloadSubcommand{ } #[derive(Args)] struct DownloadGroupInventoryJsonSubcommand{ - #[arg(long,group="api_key",required=true)] - api_key_literal:Option, - #[arg(long,group="api_key",required=true)] - api_key_envvar:Option, - #[arg(long,group="api_key",required=true)] - api_key_file:Option, + #[arg(long,group="cookie",required=true)] + cookie_literal:Option, + #[arg(long,group="cookie",required=true)] + cookie_envvar:Option, + #[arg(long,group="cookie",required=true)] + cookie_file:Option, #[arg(long)] output_folder:Option, #[arg(long)] @@ -190,12 +191,12 @@ struct DecompileSubcommand{ } #[derive(Args)] struct DownloadDecompileSubcommand{ - #[arg(long,group="api_key",required=true)] - api_key_literal:Option, - #[arg(long,group="api_key",required=true)] - api_key_envvar:Option, - #[arg(long,group="api_key",required=true)] - api_key_file:Option, + #[arg(long,group="cookie",required=true)] + cookie_literal:Option, + #[arg(long,group="cookie",required=true)] + cookie_envvar:Option, + #[arg(long,group="cookie",required=true)] + cookie_file:Option, #[arg(long)] output_folder:Option, #[arg(long)] @@ -232,12 +233,12 @@ struct DecompileHistoryIntoGitSubcommand{ struct DownloadAndDecompileHistoryIntoGitSubcommand{ #[arg(long)] asset_id:AssetID, - #[arg(long,group="api_key",required=true)] - api_key_literal:Option, - #[arg(long,group="api_key",required=true)] - api_key_envvar:Option, - #[arg(long,group="api_key",required=true)] - api_key_file:Option, + #[arg(long,group="cookie",required=true)] + cookie_literal:Option, + #[arg(long,group="cookie",required=true)] + cookie_envvar:Option, + #[arg(long,group="cookie",required=true)] + cookie_file:Option, //currently output folder must be the current folder due to git2 limitations //output_folder:cli.output.unwrap(), #[arg(long)] @@ -279,10 +280,10 @@ async fn main()->AResult<()>{ end_version:subcommand.end_version, start_version:subcommand.start_version.unwrap_or(0), output_folder:subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()), - api_key:api_key_from_args( - subcommand.api_key_literal, - subcommand.api_key_envvar, - subcommand.api_key_file, + cookie:cookie_from_args( + subcommand.cookie_literal, + subcommand.cookie_envvar, + subcommand.cookie_file, ).await?, asset_id:subcommand.asset_id, }).await, @@ -303,10 +304,10 @@ async fn main()->AResult<()>{ }, Commands::DownloadDecompile(subcommand)=>{ download_decompile(DownloadDecompileConfig{ - api_key:api_key_from_args( - subcommand.api_key_literal, - subcommand.api_key_envvar, - subcommand.api_key_file, + cookie:cookie_from_args( + subcommand.cookie_literal, + subcommand.cookie_envvar, + subcommand.cookie_file, ).await?, asset_id:subcommand.asset_id, output_folder:subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()), @@ -317,10 +318,10 @@ async fn main()->AResult<()>{ }).await }, Commands::DownloadGroupInventoryJson(subcommand)=>download_group_inventory_json( - api_key_from_args( - subcommand.api_key_literal, - subcommand.api_key_envvar, - subcommand.api_key_file, + cookie_from_args( + subcommand.cookie_literal, + subcommand.cookie_envvar, + subcommand.cookie_file, ).await?, subcommand.group, subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()), @@ -406,10 +407,10 @@ async fn main()->AResult<()>{ Commands::DownloadAndDecompileHistoryIntoGit(subcommand)=>download_and_decompile_history_into_git(DownloadAndDecompileHistoryConfig{ git_committer_name:subcommand.git_committer_name, git_committer_email:subcommand.git_committer_email, - api_key:api_key_from_args( - subcommand.api_key_literal, - subcommand.api_key_envvar, - subcommand.api_key_file, + cookie:cookie_from_args( + subcommand.cookie_literal, + subcommand.cookie_envvar, + subcommand.cookie_file, ).await?, asset_id:subcommand.asset_id, output_folder:std::env::current_dir()?, @@ -421,6 +422,15 @@ async fn main()->AResult<()>{ } } +async fn cookie_from_args(literal:Option,environment:Option,file:Option)->AResult{ + let cookie=match (literal,environment,file){ + (Some(cookie_literal),None,None)=>cookie_literal, + (None,Some(cookie_environment),None)=>std::env::var(cookie_environment)?, + (None,None,Some(cookie_file))=>tokio::fs::read_to_string(cookie_file).await?, + _=>Err(anyhow::Error::msg("Illegal api key argument triple"))?, + }; + Ok(Cookie::new(format!(".ROBLOSECURITY={cookie}"))) +} async fn api_key_from_args(literal:Option,environment:Option,file:Option)->AResult{ let api_key=match (literal,environment,file){ (Some(api_key_literal),None,None)=>api_key_literal, @@ -514,11 +524,11 @@ async fn download_list(api_key:ApiKey,asset_id_file_map:AssetIDFileMap)->AResult Ok(()) } -async fn get_inventory_pages(context:&CloudContext,group:u64)->AResult>{ +async fn get_inventory_pages(context:&CookieContext,group:u64)->AResult>{ let mut cursor:Option=None; let mut asset_list=Vec::new(); loop{ - let mut page=context.inventory_page(rbx_asset::cloud::InventoryPageRequest{group,cursor}).await?; + let mut page=context.get_inventory_page(rbx_asset::cookie::InventoryPageRequest{group,cursor}).await?; asset_list.append(&mut page.data); if page.nextPageCursor.is_none(){ break; @@ -528,8 +538,8 @@ async fn get_inventory_pages(context:&CloudContext,group:u64)->AResultAResult<()>{ - let context=CloudContext::new(api_key); +async fn download_group_inventory_json(cookie:Cookie,group:u64,output_folder:PathBuf)->AResult<()>{ + let context=CookieContext::new(cookie); let item_list=get_inventory_pages(&context,group).await?; let mut path=output_folder.clone(); @@ -539,11 +549,11 @@ async fn download_group_inventory_json(api_key:ApiKey,group:u64,output_folder:Pa Ok(()) } -async fn get_version_history(context:&CloudContext,asset_id:AssetID)->AResult>{ +async fn get_version_history(context:&CookieContext,asset_id:AssetID)->AResult>{ let mut cursor:Option=None; let mut asset_list=Vec::new(); loop{ - let mut page=context.get_asset_versions(rbx_asset::cloud::AssetVersionsRequest{asset_id,cursor}).await?; + let mut page=context.get_asset_versions_page(rbx_asset::cookie::AssetVersionsPageRequest{asset_id,cursor}).await?; asset_list.append(&mut page.data); if page.nextPageCursor.is_none(){ break; @@ -559,7 +569,7 @@ struct DownloadHistoryConfig{ end_version:Option, start_version:u64, output_folder:PathBuf, - api_key:ApiKey, + cookie:Cookie, asset_id:AssetID, } @@ -600,7 +610,7 @@ async fn download_history(mut config:DownloadHistoryConfig)->AResult<()>{ None=>Err(anyhow::Error::msg("Cannot continue from versions.json - there are no previous versions"))?, } } - let context=CloudContext::new(config.api_key); + let context=CookieContext::new(config.cookie); //limit concurrent downloads let mut join_set=tokio::task::JoinSet::new(); @@ -608,7 +618,7 @@ async fn download_history(mut config:DownloadHistoryConfig)->AResult<()>{ //poll paged list of all asset versions let mut cursor:Option=None; loop{ - let mut page=context.get_asset_versions(rbx_asset::cloud::AssetVersionsRequest{asset_id:config.asset_id,cursor}).await?; + let mut page=context.get_asset_versions_page(rbx_asset::cookie::AssetVersionsPageRequest{asset_id:config.asset_id,cursor}).await?; let context=&context; let output_folder=config.output_folder.clone(); let data=&page.data; @@ -638,7 +648,7 @@ async fn download_history(mut config:DownloadHistoryConfig)->AResult<()>{ let mut path=output_folder.clone(); path.push(format!("{}_v{}.rbxl",config.asset_id,version_number)); join_set.spawn(async move{ - let file=context.get_asset(rbx_asset::cloud::GetAssetRequest{asset_id:config.asset_id,version:Some(version_number)}).await?; + let file=context.get_asset(rbx_asset::cookie::GetAssetRequest{asset_id:config.asset_id,version:Some(version_number)}).await?; tokio::fs::write(path,file).await?; @@ -733,7 +743,7 @@ async fn decompile(config:DecompileConfig)->AResult<()>{ } struct DownloadDecompileConfig{ - api_key:ApiKey, + cookie:Cookie, asset_id:AssetID, style:rox_compiler::Style, output_folder:PathBuf, @@ -743,8 +753,8 @@ struct DownloadDecompileConfig{ } async fn download_decompile(config:DownloadDecompileConfig)->AResult<()>{ - let context=CloudContext::new(config.api_key); - let file=context.get_asset(rbx_asset::cloud::GetAssetRequest{asset_id:config.asset_id,version:None}).await?; + let context=CookieContext::new(config.cookie); + let file=context.get_asset(rbx_asset::cookie::GetAssetRequest{asset_id:config.asset_id,version:None}).await?; let dom=load_dom(std::io::Cursor::new(file))?; let context=rox_compiler::DecompiledContext::from_dom(dom); @@ -901,7 +911,7 @@ async fn decompile_history_into_git(config:DecompileHistoryConfig)->AResult<()>{ } struct DownloadAndDecompileHistoryConfig{ - api_key:ApiKey, + cookie:Cookie, asset_id:AssetID, git_committer_name:String, git_committer_email:String, @@ -913,7 +923,7 @@ struct DownloadAndDecompileHistoryConfig{ } async fn download_and_decompile_history_into_git(config:DownloadAndDecompileHistoryConfig)->AResult<()>{ - let context=CloudContext::new(config.api_key); + let context=CookieContext::new(config.cookie); //poll paged list of all asset versions let asset_list=get_version_history(&context,config.asset_id).await?; @@ -926,7 +936,7 @@ async fn download_and_decompile_history_into_git(config:DownloadAndDecompileHist .map(|asset_version|{ let context=context.clone(); tokio::task::spawn(async move{ - let file=context.get_asset(rbx_asset::cloud::GetAssetRequest{asset_id,version:Some(asset_version.assetVersionNumber)}).await?; + let file=context.get_asset(rbx_asset::cookie::GetAssetRequest{asset_id,version:Some(asset_version.assetVersionNumber)}).await?; let dom=load_dom(std::io::Cursor::new(file))?; Ok::<_,anyhow::Error>((asset_version,rox_compiler::DecompiledContext::from_dom(dom))) })