asset-tool/src/main.rs

865 lines
25 KiB
Rust
Raw Normal View History

2024-03-08 17:48:47 +00:00
use std::{io::Read,path::PathBuf};
2023-12-31 01:59:40 +00:00
use clap::{Args,Parser,Subcommand};
use anyhow::Result as AResult;
2023-12-31 02:00:51 +00:00
use futures::StreamExt;
2024-04-28 06:36:21 +00:00
use rbx_asset::context::{RobloxContext,InventoryItem,AssetVersion};
2023-12-31 01:59:40 +00:00
type AssetID=u64;
2024-03-08 17:48:47 +00:00
type AssetIDFileMap=Vec<(AssetID,PathBuf)>;
2024-01-12 01:11:44 +00:00
const CONCURRENT_DECODE:usize=8;
2024-01-06 19:48:05 +00:00
const CONCURRENT_REQUESTS:usize=32;
2023-12-31 01:59:40 +00:00
#[derive(Parser)]
#[command(author,version,about,long_about=None)]
#[command(propagate_version = true)]
struct Cli{
#[command(subcommand)]
command:Commands,
}
#[derive(Subcommand)]
enum Commands{
2024-03-08 17:35:10 +00:00
DownloadHistory(DownloadHistorySubcommand),
Download(DownloadSubcommand),
2024-04-26 00:38:06 +00:00
DownloadGroupInventoryJson(DownloadGroupInventoryJsonSubcommand),
2024-04-25 04:31:53 +00:00
Create(CreateSubcommand),
2024-03-08 17:35:10 +00:00
Upload(UploadSubcommand),
Compile(CompileSubcommand),
2024-07-02 01:03:36 +00:00
CompileUpload(CompileUploadSubcommand),
2024-03-08 17:35:10 +00:00
Decompile(DecompileSubcommand),
DecompileHistoryIntoGit(DecompileHistoryIntoGitSubcommand),
DownloadAndDecompileHistoryIntoGit(DownloadAndDecompileHistoryIntoGitSubcommand),
2023-12-31 01:59:40 +00:00
}
2024-03-08 17:35:10 +00:00
#[derive(Args)]
struct DownloadHistorySubcommand{
2024-04-19 07:01:27 +00:00
#[arg(long)]
2024-03-08 17:35:10 +00:00
asset_id:AssetID,
2024-04-19 07:01:27 +00:00
#[arg(long)]
2024-03-08 17:35:10 +00:00
cookie_type:CookieType,
2024-04-19 07:01:27 +00:00
#[arg(long)]
2024-03-08 17:35:10 +00:00
cookie:String,
2024-04-19 07:01:27 +00:00
#[arg(long)]
2024-03-08 17:35:10 +00:00
output_folder:Option<PathBuf>,
2024-04-19 07:01:27 +00:00
#[arg(long)]
2024-03-15 17:52:49 +00:00
continue_from_versions:Option<bool>,
2024-04-19 07:01:27 +00:00
#[arg(long)]
2024-03-08 17:35:10 +00:00
start_version:Option<u64>,
2024-04-19 07:01:27 +00:00
#[arg(long)]
2024-03-08 17:35:10 +00:00
end_version:Option<u64>,
2024-01-12 19:24:03 +00:00
}
2024-01-17 05:50:35 +00:00
#[derive(Args)]
2024-03-08 17:35:10 +00:00
struct DownloadSubcommand{
2024-04-19 07:01:27 +00:00
#[arg(long)]
2024-03-08 17:35:10 +00:00
cookie_type:CookieType,
2024-04-19 07:01:27 +00:00
#[arg(long)]
2024-03-08 17:35:10 +00:00
cookie:String,
2024-04-19 07:01:27 +00:00
#[arg(long)]
2024-03-08 17:35:10 +00:00
output_folder:Option<PathBuf>,
2024-04-20 21:43:02 +00:00
#[arg(required=true)]
asset_ids:Vec<AssetID>,
2024-03-08 17:35:10 +00:00
}
#[derive(Args)]
2024-04-26 00:38:06 +00:00
struct DownloadGroupInventoryJsonSubcommand{
#[arg(long)]
cookie_type:CookieType,
#[arg(long)]
cookie:String,
#[arg(long)]
output_folder:Option<PathBuf>,
#[arg(long)]
group:u64,
}
#[derive(Args)]
2024-04-25 04:31:53 +00:00
struct CreateSubcommand{
#[arg(long)]
cookie_type:CookieType,
#[arg(long)]
cookie:String,
#[arg(long)]
model_name:String,
#[arg(long)]
2024-04-26 01:50:57 +00:00
description:Option<String>,
#[arg(long)]
2024-04-25 04:31:53 +00:00
input_file:PathBuf,
#[arg(long)]
group:Option<u64>,
2024-04-26 01:50:57 +00:00
#[arg(long)]
free_model:Option<bool>,
#[arg(long)]
allow_comments:Option<bool>,
2024-04-25 04:31:53 +00:00
}
#[derive(Args)]
2024-03-08 17:35:10 +00:00
struct UploadSubcommand{
2024-04-19 07:01:27 +00:00
#[arg(long)]
2024-03-08 17:35:10 +00:00
asset_id:AssetID,
2024-04-19 07:01:27 +00:00
#[arg(long)]
2024-03-08 17:35:10 +00:00
cookie_type:CookieType,
2024-04-19 07:01:27 +00:00
#[arg(long)]
2024-03-08 17:35:10 +00:00
cookie:String,
2024-04-19 07:01:27 +00:00
#[arg(long)]
2024-03-08 17:35:10 +00:00
input_file:PathBuf,
2024-04-19 07:01:27 +00:00
#[arg(long)]
2024-03-08 17:35:10 +00:00
group:Option<u64>,
}
#[derive(Args)]
struct CompileSubcommand{
2024-04-19 07:01:27 +00:00
#[arg(long)]
input_folder:Option<PathBuf>,
2024-04-19 07:01:27 +00:00
#[arg(long)]
2024-03-08 17:35:10 +00:00
output_file:PathBuf,
2024-04-19 07:01:27 +00:00
#[arg(long)]
2024-07-01 18:18:34 +00:00
style:Option<Style>,
2024-04-19 07:01:27 +00:00
#[arg(long)]
2024-03-08 17:35:10 +00:00
template:Option<PathBuf>,
}
#[derive(Args)]
2024-07-02 01:03:36 +00:00
struct CompileUploadSubcommand{
#[arg(long)]
asset_id:AssetID,
#[arg(long)]
cookie_type:CookieType,
#[arg(long)]
cookie:String,
#[arg(long)]
input_file:PathBuf,
#[arg(long)]
group:Option<u64>,
#[arg(long)]
input_folder:Option<PathBuf>,
#[arg(long)]
style:Option<Style>,
#[arg(long)]
template:Option<PathBuf>,
}
#[derive(Args)]
2024-03-08 17:35:10 +00:00
struct DecompileSubcommand{
2024-04-19 07:01:27 +00:00
#[arg(long)]
2024-03-08 17:35:10 +00:00
input_file:PathBuf,
2024-04-19 07:01:27 +00:00
#[arg(long)]
output_folder:Option<PathBuf>,
2024-04-19 07:01:27 +00:00
#[arg(long)]
2024-07-01 18:18:34 +00:00
style:Style,
2024-04-19 07:01:27 +00:00
#[arg(long)]
2024-03-15 17:52:49 +00:00
write_template:Option<bool>,
2024-04-19 07:01:27 +00:00
#[arg(long)]
2024-03-15 17:52:49 +00:00
write_models:Option<bool>,
2024-04-19 07:01:27 +00:00
#[arg(long)]
2024-03-15 17:52:49 +00:00
write_scripts:Option<bool>,
2024-01-17 05:50:35 +00:00
}
2023-12-31 01:59:40 +00:00
#[derive(Args)]
2024-03-08 17:35:10 +00:00
struct DecompileHistoryIntoGitSubcommand{
2024-04-19 07:01:27 +00:00
#[arg(long)]
2024-03-08 17:35:10 +00:00
input_folder:PathBuf,
//currently output folder must be the current folder due to git2 limitations
//output_folder:cli.output.unwrap(),
2024-04-19 07:01:27 +00:00
#[arg(long)]
2024-07-01 18:18:34 +00:00
style:Style,
2024-04-19 07:01:27 +00:00
#[arg(long)]
2024-03-08 17:35:10 +00:00
git_committer_name:String,
2024-04-19 07:01:27 +00:00
#[arg(long)]
2024-03-08 17:35:10 +00:00
git_committer_email:String,
2024-04-19 07:01:27 +00:00
#[arg(long)]
2024-03-15 17:52:49 +00:00
write_template:Option<bool>,
2024-04-19 07:01:27 +00:00
#[arg(long)]
2024-03-15 17:52:49 +00:00
write_models:Option<bool>,
2024-04-19 07:01:27 +00:00
#[arg(long)]
2024-03-15 17:52:49 +00:00
write_scripts:Option<bool>,
2024-03-08 17:35:10 +00:00
}
#[derive(Args)]
struct DownloadAndDecompileHistoryIntoGitSubcommand{
2024-04-19 07:01:27 +00:00
#[arg(long)]
2024-03-08 17:35:10 +00:00
asset_id:AssetID,
2024-04-19 07:01:27 +00:00
#[arg(long)]
2024-03-08 17:35:10 +00:00
cookie_type:CookieType,
2024-04-19 07:01:27 +00:00
#[arg(long)]
2024-03-08 17:35:10 +00:00
cookie:String,
//currently output folder must be the current folder due to git2 limitations
//output_folder:cli.output.unwrap(),
2024-04-19 07:01:27 +00:00
#[arg(long)]
2024-07-01 18:18:34 +00:00
style:Style,
2024-04-19 07:01:27 +00:00
#[arg(long)]
2024-03-08 17:35:10 +00:00
git_committer_name:String,
2024-04-19 07:01:27 +00:00
#[arg(long)]
2024-03-08 17:35:10 +00:00
git_committer_email:String,
2024-04-19 07:01:27 +00:00
#[arg(long)]
2024-03-15 17:52:49 +00:00
write_template:Option<bool>,
2024-04-19 07:01:27 +00:00
#[arg(long)]
2024-03-15 17:52:49 +00:00
write_models:Option<bool>,
2024-04-19 07:01:27 +00:00
#[arg(long)]
2024-03-15 17:52:49 +00:00
write_scripts:Option<bool>,
2024-03-08 17:35:10 +00:00
}
#[derive(Clone,clap::ValueEnum)]
enum CookieType{
Literal,
Environment,
File,
}
#[derive(Clone,Copy,Debug,clap::ValueEnum)]
2024-07-01 18:18:34 +00:00
enum Style{
2024-03-08 17:35:10 +00:00
Rox,
Rojo,
RoxRojo,
2023-12-31 01:59:40 +00:00
}
2024-07-01 18:18:34 +00:00
impl Style{
fn rox(&self)->rox_compiler::Style{
match self{
Style::Rox=>rox_compiler::Style::Rox,
Style::Rojo=>rox_compiler::Style::Rojo,
Style::RoxRojo=>rox_compiler::Style::RoxRojo,
}
}
}
2023-12-31 01:59:40 +00:00
2023-12-31 02:00:51 +00:00
#[tokio::main]
async fn main()->AResult<()>{
2023-12-31 01:59:40 +00:00
let cli=Cli::parse();
match cli.command{
2024-03-08 17:35:10 +00:00
Commands::DownloadHistory(subcommand)=>download_history(DownloadHistoryConfig{
2024-03-15 17:52:49 +00:00
continue_from_versions:subcommand.continue_from_versions.unwrap_or(false),
2024-03-08 17:35:10 +00:00
end_version:subcommand.end_version,
start_version:subcommand.start_version.unwrap_or(0),
output_folder:subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
cookie:Cookie::from_type(subcommand.cookie_type,subcommand.cookie).await?.0,
asset_id:subcommand.asset_id,
}).await,
2024-03-08 17:35:10 +00:00
Commands::Download(subcommand)=>{
let output_folder=subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap());
download_list(
Cookie::from_type(subcommand.cookie_type,subcommand.cookie).await?.0,
2024-04-20 21:43:02 +00:00
subcommand.asset_ids.into_iter().map(|asset_id|{
2024-03-08 17:35:10 +00:00
let mut path=output_folder.clone();
path.push(asset_id.to_string());
(asset_id,path)
}).collect()
).await
},
2024-04-26 00:38:06 +00:00
Commands::DownloadGroupInventoryJson(subcommand)=>download_group_inventory_json(
Cookie::from_type(subcommand.cookie_type,subcommand.cookie).await?.0,
subcommand.group,
subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
).await,
2024-04-26 01:50:57 +00:00
Commands::Create(subcommand)=>create(CreateConfig{
cookie:Cookie::from_type(subcommand.cookie_type,subcommand.cookie).await?.0,
group:subcommand.group,
input_file:subcommand.input_file,
model_name:subcommand.model_name,
description:subcommand.description.unwrap_or_else(||String::with_capacity(0)),
free_model:subcommand.free_model.unwrap_or(false),
allow_comments:subcommand.allow_comments.unwrap_or(false),
}).await,
2024-03-08 17:35:10 +00:00
Commands::Upload(subcommand)=>upload_list(
Cookie::from_type(subcommand.cookie_type,subcommand.cookie).await?.0,
subcommand.group,
vec![(subcommand.asset_id,subcommand.input_file)]
2024-01-17 05:50:35 +00:00
).await,
2024-03-08 17:35:10 +00:00
Commands::Compile(subcommand)=>compile(CompileConfig{
input_folder:subcommand.input_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
2024-03-08 17:35:10 +00:00
output_file:subcommand.output_file,
template:subcommand.template,
style:subcommand.style,
2024-03-08 17:39:27 +00:00
}).await,
2024-07-02 01:03:36 +00:00
Commands::CompileUpload(subcommand)=>compile_upload(CompileUploadConfig{
input_folder:subcommand.input_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
template:subcommand.template,
style:subcommand.style,
cookie:Cookie::from_type(subcommand.cookie_type,subcommand.cookie).await?.0,
group:subcommand.group,
asset_id:subcommand.asset_id,
}).await,
2024-03-08 17:35:10 +00:00
Commands::Decompile(subcommand)=>decompile(DecompileConfig{
style:subcommand.style,
input_file:subcommand.input_file,
output_folder:subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
2024-03-15 17:52:49 +00:00
write_template:subcommand.write_template.unwrap_or(false),
write_models:subcommand.write_models.unwrap_or(false),
write_scripts:subcommand.write_scripts.unwrap_or(true),
2024-01-11 21:59:32 +00:00
}).await,
2024-03-08 17:35:10 +00:00
Commands::DecompileHistoryIntoGit(subcommand)=>decompile_history_into_git(DecompileHistoryConfig{
git_committer_name:subcommand.git_committer_name,
git_committer_email:subcommand.git_committer_email,
input_folder:subcommand.input_folder,
output_folder:std::env::current_dir()?,
style:subcommand.style,
2024-03-15 17:52:49 +00:00
write_template:subcommand.write_template.unwrap_or(false),
write_models:subcommand.write_models.unwrap_or(false),
write_scripts:subcommand.write_scripts.unwrap_or(true),
2024-01-12 00:06:24 +00:00
}).await,
2024-03-08 17:35:10 +00:00
Commands::DownloadAndDecompileHistoryIntoGit(subcommand)=>download_and_decompile_history_into_git(DownloadAndDecompileHistoryConfig{
git_committer_name:subcommand.git_committer_name,
git_committer_email:subcommand.git_committer_email,
cookie:Cookie::from_type(subcommand.cookie_type,subcommand.cookie).await?.0,
asset_id:subcommand.asset_id,
output_folder:std::env::current_dir()?,
style:subcommand.style,
2024-03-15 17:52:49 +00:00
write_template:subcommand.write_template.unwrap_or(false),
write_models:subcommand.write_models.unwrap_or(false),
write_scripts:subcommand.write_scripts.unwrap_or(true),
}).await,
2023-12-31 01:59:40 +00:00
}
}
2024-03-08 17:35:10 +00:00
struct Cookie(String);
impl Cookie{
async fn from_type(cookie_type:CookieType,cookie_string:String)->AResult<Self>{
Ok(Self(format!(".ROBLOSECURITY={}",match cookie_type{
CookieType::Literal=>cookie_string,
CookieType::Environment=>std::env::var(cookie_string)?,
CookieType::File=>tokio::fs::read_to_string(cookie_string).await?,
})))
}
}
2024-04-26 01:50:57 +00:00
struct CreateConfig{
cookie:String,
model_name:String,
description:String,
input_file:PathBuf,
group:Option<u64>,
free_model:bool,
allow_comments:bool,
}
async fn create(config:CreateConfig)->AResult<()>{
2024-04-28 06:36:21 +00:00
let resp=RobloxContext::new(config.cookie)
.create(rbx_asset::context::CreateRequest{
name:config.model_name,
description:config.description,
ispublic:config.free_model,
allowComments:config.allow_comments,
groupId:config.group,
},tokio::fs::read(config.input_file).await?).await?;
println!("UploadResponse={:?}",resp);
2024-04-25 04:31:53 +00:00
Ok(())
}
2024-01-05 21:47:32 +00:00
async fn upload_list(cookie:String,group:Option<u64>,asset_id_file_map:AssetIDFileMap)->AResult<()>{
2024-04-28 06:36:21 +00:00
let context=RobloxContext::new(cookie);
2024-04-25 04:40:49 +00:00
//this is calling map on the vec because the closure produces an iterator of futures
futures::stream::iter(asset_id_file_map.into_iter()
2023-12-31 19:15:27 +00:00
.map(|(asset_id,file)|{
2024-04-28 06:36:21 +00:00
let context=&context;
2023-12-31 19:15:27 +00:00
async move{
2024-04-28 06:36:21 +00:00
Ok((asset_id,context.upload(rbx_asset::context::UploadRequest{
assetid:asset_id,
name:None,
description:None,
ispublic:None,
allowComments:None,
groupId:group,
},tokio::fs::read(file).await?).await?))
2023-12-31 19:15:27 +00:00
}
}))
2023-12-31 19:15:27 +00:00
.buffer_unordered(CONCURRENT_REQUESTS)
.for_each(|b:AResult<_>|async{
match b{
Ok((asset_id,body))=>{
2024-04-26 06:28:55 +00:00
println!("asset_id={} UploadResponse={:?}",asset_id,body);
2023-12-31 19:15:27 +00:00
},
Err(e)=>eprintln!("ul error: {}",e),
}
}).await;
2023-12-31 01:59:40 +00:00
Ok(())
}
async fn download_list(cookie:String,asset_id_file_map:AssetIDFileMap)->AResult<()>{
2024-04-28 06:36:21 +00:00
let context=RobloxContext::new(cookie);
futures::stream::iter(asset_id_file_map.into_iter()
.map(|(asset_id,file)|{
2024-04-28 06:36:21 +00:00
let context=&context;
2023-12-31 02:00:51 +00:00
async move{
2024-04-28 06:36:21 +00:00
Ok((file,context.download(rbx_asset::context::DownloadRequest{asset_id,version:None}).await?))
2023-12-31 02:00:51 +00:00
}
}))
2023-12-31 02:00:51 +00:00
.buffer_unordered(CONCURRENT_REQUESTS)
2024-01-06 20:38:29 +00:00
.for_each(|b:AResult<_>|async{
match b{
2024-04-28 06:36:21 +00:00
Ok((dest,data))=>{
match tokio::fs::write(dest,data).await{
Err(e)=>eprintln!("fs error: {}",e),
_=>(),
}
2024-01-06 20:38:29 +00:00
},
Err(e)=>eprintln!("dl error: {}",e),
}
}).await;
Ok(())
}
2024-04-26 00:38:06 +00:00
2024-04-28 06:36:21 +00:00
async fn get_inventory_pages(context:&RobloxContext,group:u64)->AResult<Vec<InventoryItem>>{
2024-04-26 00:38:06 +00:00
let mut cursor:Option<String>=None;
let mut asset_list=Vec::new();
loop{
2024-04-28 06:36:21 +00:00
let mut page=context.inventory_page(rbx_asset::context::InventoryPageRequest{group,cursor}).await?;
2024-04-26 00:38:06 +00:00
asset_list.append(&mut page.data);
if page.nextPageCursor.is_none(){
break;
}
cursor=page.nextPageCursor;
}
Ok(asset_list)
}
async fn download_group_inventory_json(cookie:String,group:u64,output_folder:PathBuf)->AResult<()>{
2024-04-28 06:36:21 +00:00
let context=RobloxContext::new(cookie);
let item_list=get_inventory_pages(&context,group).await?;
2024-04-26 00:38:06 +00:00
let mut path=output_folder.clone();
path.set_file_name("versions.json");
tokio::fs::write(path,serde_json::to_string(&item_list)?).await?;
Ok(())
}
2024-01-06 20:38:29 +00:00
2024-04-28 06:36:21 +00:00
async fn get_version_history(context:&RobloxContext,asset_id:AssetID)->AResult<Vec<AssetVersion>>{
2024-01-06 20:38:29 +00:00
let mut cursor:Option<String>=None;
let mut asset_list=Vec::new();
loop{
2024-04-28 06:36:21 +00:00
let mut page=context.history_page(rbx_asset::context::HistoryPageRequest{asset_id,cursor}).await?;
2024-01-13 00:19:01 +00:00
asset_list.append(&mut page.data);
if page.nextPageCursor.is_none(){
break;
2024-01-06 20:38:29 +00:00
}
2024-01-13 00:19:01 +00:00
cursor=page.nextPageCursor;
2024-01-06 20:38:29 +00:00
}
asset_list.sort_by(|a,b|a.assetVersionNumber.cmp(&b.assetVersionNumber));
2024-01-11 07:57:50 +00:00
Ok(asset_list)
}
struct DownloadHistoryConfig{
continue_from_versions:bool,
end_version:Option<u64>,
2024-01-13 00:27:31 +00:00
start_version:u64,
2024-03-08 17:48:47 +00:00
output_folder:PathBuf,
2024-01-11 07:57:50 +00:00
cookie:String,
asset_id:AssetID,
}
async fn download_history(mut config:DownloadHistoryConfig)->AResult<()>{
2024-01-14 06:56:12 +00:00
let mut asset_list_contents=std::collections::HashSet::new();
let mut asset_list:Vec<AssetVersion>=Vec::new();
if config.end_version.is_none()&&config.continue_from_versions{
//load prexisting versions list
let mut versions_path=config.output_folder.clone();
versions_path.push("versions.json");
match std::fs::File::open(versions_path){
Ok(versions_file)=>asset_list.append(&mut serde_json::from_reader(versions_file)?),
Err(e)=>match e.kind(){
2024-03-08 17:39:57 +00:00
std::io::ErrorKind::NotFound=>Err(anyhow::Error::msg("Cannot continue from versions.json - file does not exist"))?,
_=>Err(e)?,
}
}
//write down which versions are contained
for asset_version in &asset_list{
2024-01-14 06:56:12 +00:00
asset_list_contents.insert(asset_version.assetVersionNumber);
}
//find the highest number
match asset_list.iter().map(|asset_version|asset_version.assetVersionNumber).max(){
Some(max)=>{
//count down contiguously until a number is missing
for i in (1..=max).rev(){
2024-01-14 06:56:12 +00:00
if !asset_list_contents.contains(&i){
//that is end_version
config.end_version=Some(i);
break;
}
}
//if all versions are contained, set start_version to the max + 1
if config.end_version.is_none(){
config.start_version=max+1;
}
},
None=>Err(anyhow::Error::msg("Cannot continue from versions.json - there are no previous versions"))?,
}
}
2024-04-28 06:36:21 +00:00
let context=RobloxContext::new(config.cookie);
2024-01-13 00:19:01 +00:00
//limit concurrent downloads
let mut join_set=tokio::task::JoinSet::new();
2024-01-11 07:57:50 +00:00
//poll paged list of all asset versions
2024-01-13 00:19:01 +00:00
let mut cursor:Option<String>=None;
loop{
2024-04-28 06:36:21 +00:00
let mut page=context.history_page(rbx_asset::context::HistoryPageRequest{asset_id:config.asset_id,cursor}).await?;
let context=&context;
let output_folder=config.output_folder.clone();
let data=&page.data;
2024-01-14 06:56:12 +00:00
let asset_list_contents=&asset_list_contents;
let join_set=&mut join_set;
let error_catcher=||async move{
2024-01-13 00:27:31 +00:00
let mut cancel_paging=false;
for asset_version in data{
2024-01-13 00:19:01 +00:00
let version_number=asset_version.assetVersionNumber;
2024-01-14 06:56:12 +00:00
//skip assets beyond specified end_version
if config.end_version.is_some_and(|v|v<version_number){
continue;
}
2024-01-14 06:56:12 +00:00
//skip assets lower than start_version and cancel paging asset versions
2024-01-13 00:27:31 +00:00
if version_number<config.start_version{
cancel_paging=true;
continue;//don't trust roblox returned order
}
2024-01-14 06:56:12 +00:00
//skip previously downloaded assets
if asset_list_contents.contains(&version_number){
continue;
}
while CONCURRENT_REQUESTS<=join_set.len(){
join_set.join_next().await.unwrap()??;
}
2024-04-28 06:36:21 +00:00
let context=context.clone();
2024-01-15 02:43:03 +00:00
let mut path=output_folder.clone();
path.push(format!("{}_v{}.rbxl",config.asset_id,version_number));
join_set.spawn(async move{
2024-04-28 06:36:21 +00:00
let file=context.download(rbx_asset::context::DownloadRequest{asset_id:config.asset_id,version:Some(version_number)}).await?;
2024-01-13 00:19:01 +00:00
2024-04-28 06:36:21 +00:00
tokio::fs::write(path,file).await?;
2024-01-13 00:19:01 +00:00
Ok::<_,anyhow::Error>(())
});
2024-01-13 00:19:01 +00:00
}
Ok::<_,anyhow::Error>(cancel_paging)
};
let cancel_paging=match error_catcher().await{
Ok(cancel)=>cancel,
Err(e)=>{
println!("download error: {}",e);
//cancel download and write versions
true
},
};
2024-01-13 00:27:31 +00:00
if page.nextPageCursor.is_none()||cancel_paging{
for asset_version in page.data.into_iter(){
2024-01-14 06:56:12 +00:00
if !(asset_list_contents.contains(&asset_version.assetVersionNumber)
||config.end_version.is_some_and(|v|v<asset_version.assetVersionNumber)
||asset_version.assetVersionNumber<config.start_version){
2024-01-13 00:27:31 +00:00
asset_list.push(asset_version);
}
}
2024-01-13 00:19:01 +00:00
break;
2024-01-13 00:27:31 +00:00
}else{
asset_list.append(&mut page.data);
2024-01-13 00:19:01 +00:00
}
cursor=page.nextPageCursor;
}
asset_list.sort_by(|a,b|a.assetVersionNumber.cmp(&b.assetVersionNumber));
let mut path=config.output_folder.clone();
2024-01-06 20:38:29 +00:00
path.set_file_name("versions.json");
tokio::fs::write(path,serde_json::to_string(&asset_list)?).await?;
while let Some(result)=join_set.join_next().await{
result??;
2024-01-13 00:19:01 +00:00
}
2024-01-11 07:57:50 +00:00
2023-12-31 01:59:40 +00:00
Ok(())
2024-01-01 20:21:33 +00:00
}
2024-01-06 01:54:13 +00:00
2024-01-11 10:46:47 +00:00
fn load_dom<R:Read>(input:R)->AResult<rbx_dom_weak::WeakDom>{
let mut buf=std::io::BufReader::new(input);
let peek=std::io::BufRead::fill_buf(&mut buf)?;
match &peek[0..4]{
b"<rob"=>{
match &peek[4..8]{
2024-04-28 06:36:21 +00:00
b"lox!"=>rbx_binary::from_reader(buf).map_err(anyhow::Error::msg),
b"lox "=>rbx_xml::from_reader_default(buf).map_err(anyhow::Error::msg),
2024-01-11 10:46:47 +00:00
other=>Err(anyhow::Error::msg(format!("Unknown Roblox file type {:?}",other))),
}
},
_=>Err(anyhow::Error::msg("unsupported file type")),
2024-01-06 01:54:13 +00:00
}
}
2024-01-11 07:57:50 +00:00
struct DecompileConfig{
2024-07-01 18:18:34 +00:00
style:Style,
2024-03-08 17:48:47 +00:00
input_file:PathBuf,
output_folder:PathBuf,
2024-01-11 07:57:50 +00:00
write_template:bool,
write_models:bool,
write_scripts:bool,
}
2024-01-11 21:59:32 +00:00
async fn decompile(config:DecompileConfig)->AResult<()>{
2024-01-11 07:57:50 +00:00
//rules:
//Class Script|LocalScript|ModuleScript->$Name.lua
//Class Model->$Name.rbxmx
//overrides.json per-folder [Override{name,class}]
//Everything else goes into template.rbxlx
//read file
2024-07-01 18:18:34 +00:00
let dom=load_dom(std::io::BufReader::new(std::fs::File::open(config.input_file)?))?;
let context=rox_compiler::DecompiledContext::from_dom(dom);
2024-01-11 07:57:50 +00:00
//generate folders, models, and scripts
//delete models and scripts from dom
2024-07-01 18:18:34 +00:00
context.write_files(rox_compiler::WriteConfig{
style:config.style.rox(),
2024-01-11 07:57:50 +00:00
output_folder:config.output_folder,
write_template:config.write_template,
write_models:config.write_models,
write_scripts:config.write_scripts,
2024-07-01 18:18:34 +00:00
}).await?;
2024-01-11 07:57:50 +00:00
Ok(())
}
struct WriteCommitConfig{
2024-01-11 21:48:57 +00:00
git_committer_name:String,
git_committer_email:String,
2024-03-08 17:48:47 +00:00
output_folder:PathBuf,
2024-07-01 18:18:34 +00:00
style:Style,
write_template:bool,
write_models:bool,
write_scripts:bool,
}
2024-07-01 18:18:34 +00:00
async fn write_commit(config:WriteCommitConfig,b:Result<AResult<(AssetVersion,rox_compiler::DecompiledContext)>,tokio::task::JoinError>,repo:&git2::Repository)->AResult<()>{
let (asset_version,context)=b??;
2024-01-12 00:37:48 +00:00
println!("writing files for version {}",asset_version.assetVersionNumber);
//clean output dir
2024-01-12 04:23:44 +00:00
if config.write_models||config.write_scripts{
let mut src=config.output_folder.clone();
src.push("src");
match std::fs::remove_dir_all(src){
Ok(())=>(),
Err(e)=>println!("remove_dir_all src failed {}",e),
}
2024-01-12 04:23:44 +00:00
}
if config.write_template{
let mut template=config.output_folder.clone();
template.push("template.rbxlx");
match std::fs::remove_file(template){
Ok(())=>(),
Err(e)=>println!("remove_file template.rbxlx failed {}",e),
}
}
//write files
2024-07-01 18:18:34 +00:00
context.write_files(rox_compiler::WriteConfig{
style:config.style.rox(),
output_folder:config.output_folder.clone(),
write_template:config.write_template,
write_models:config.write_models,
write_scripts:config.write_scripts,
2024-07-01 18:18:34 +00:00
}).await?;
let date=asset_version.created;
2024-01-12 04:29:22 +00:00
//let sig=repo.signature()?; //this pulls default name and email
2024-01-11 21:48:57 +00:00
let sig=git2::Signature::new(config.git_committer_name.as_str(),config.git_committer_email.as_str(),&git2::Time::new(date.timestamp(),0)).unwrap();
let tree_id={
let mut tree_index = repo.index()?;
2024-03-08 17:40:25 +00:00
match tree_index.add_all(std::iter::once(config.output_folder.as_path()),git2::IndexAddOption::DEFAULT,None){
Ok(_)=>(),
Err(e)=>println!("tree_index.add_all error: {}",e),
}
match tree_index.update_all(std::iter::once(config.output_folder.as_path()),None){
2024-01-14 19:12:57 +00:00
Ok(_)=>(),
Err(e)=>println!("tree_index.update_all error: {}",e),
2024-01-12 04:33:12 +00:00
}
tree_index.write()?;
tree_index.write_tree()?
};
let tree=repo.find_tree(tree_id)?;
2024-01-12 04:33:04 +00:00
let mut parents=Vec::new();
match repo.head(){
2024-01-12 04:33:12 +00:00
Ok(reference)=>{
let commit=reference.peel_to_commit()?;
//test tree against commit tree to see if there is any changes
let commit_tree=commit.tree()?;
let diff=repo.diff_tree_to_tree(Some(&commit_tree),Some(&tree),None)?;
2024-03-08 17:40:25 +00:00
if diff.get_delta(0).is_none(){
2024-01-12 04:33:12 +00:00
println!("no changes");
return Ok(());
}
parents.push(commit);
},
2024-01-12 04:33:04 +00:00
Err(e)=>println!("repo head error {:?}",e),
};
2024-01-12 04:33:04 +00:00
repo.commit(
Some("HEAD"),//update_ref
&sig,//author
&sig,//commiter
&format!("v{}", asset_version.assetVersionNumber),//message
&tree,//tree (basically files)
parents.iter().collect::<Vec<&git2::Commit<'_>>>().as_slice(),//parents
)?;
//commit
Ok(())
}
struct DecompileHistoryConfig{
2024-01-12 00:06:24 +00:00
git_committer_name:String,
git_committer_email:String,
2024-03-08 17:48:47 +00:00
input_folder:PathBuf,
2024-07-01 18:18:34 +00:00
style:Style,
2024-03-08 17:48:47 +00:00
output_folder:PathBuf,
2024-01-12 00:06:24 +00:00
write_template:bool,
write_models:bool,
write_scripts:bool,
}
async fn decompile_history_into_git(config:DecompileHistoryConfig)->AResult<()>{
//use prexisting versions list
2024-01-12 00:06:24 +00:00
let mut versions_path=config.input_folder.clone();
versions_path.push("versions.json");
let asset_list:Vec<AssetVersion>=serde_json::from_reader(std::fs::File::open(versions_path)?)?;
2024-01-22 20:28:32 +00:00
let repo=git2::Repository::init(config.output_folder.as_path())?;
2024-01-12 00:06:24 +00:00
//decompile all versions
futures::stream::iter(asset_list.into_iter()
2024-01-12 00:06:24 +00:00
.map(|asset_version|{
let mut file_path=config.input_folder.clone();
2024-01-12 06:25:00 +00:00
tokio::task::spawn_blocking(move||{
2024-01-12 00:06:24 +00:00
file_path.push(format!("{}_v{}.rbxl",asset_version.assetId,asset_version.assetVersionNumber));
2024-01-12 00:37:26 +00:00
let file=std::fs::File::open(file_path)?;
2024-07-01 18:18:34 +00:00
let dom=load_dom(file)?;
let contents=rox_compiler::DecompiledContext::from_dom(dom);
2024-01-12 00:06:24 +00:00
Ok::<_,anyhow::Error>((asset_version,contents))
})
}))
2024-01-12 01:11:44 +00:00
.buffered(CONCURRENT_DECODE)
2024-01-12 00:06:24 +00:00
.for_each(|join_handle_result|async{
match write_commit(WriteCommitConfig{
git_committer_name:config.git_committer_name.clone(),
git_committer_email:config.git_committer_email.clone(),
2024-01-12 19:24:03 +00:00
style:config.style,
2024-01-12 00:06:24 +00:00
output_folder:config.output_folder.clone(),
write_template:config.write_template,
write_models:config.write_models,
write_scripts:config.write_scripts,
},join_handle_result,&repo).await{
Ok(())=>(),
2024-01-14 21:24:33 +00:00
Err(e)=>println!("decompile/write/commit error: {}",e),
2024-01-12 00:06:24 +00:00
}
}).await;
Ok(())
}
struct DownloadAndDecompileHistoryConfig{
cookie:String,
asset_id:AssetID,
2024-01-11 21:48:57 +00:00
git_committer_name:String,
git_committer_email:String,
2024-07-01 18:18:34 +00:00
style:Style,
2024-03-08 17:48:47 +00:00
output_folder:PathBuf,
write_template:bool,
write_models:bool,
write_scripts:bool,
}
2024-01-12 00:06:24 +00:00
async fn download_and_decompile_history_into_git(config:DownloadAndDecompileHistoryConfig)->AResult<()>{
2024-04-28 06:36:21 +00:00
let context=RobloxContext::new(config.cookie);
//poll paged list of all asset versions
2024-04-28 06:36:21 +00:00
let asset_list=get_version_history(&context,config.asset_id).await?;
let repo=git2::Repository::init(config.output_folder.clone())?;
//download all versions
2024-04-28 06:36:21 +00:00
let asset_id=config.asset_id;
futures::stream::iter(asset_list.into_iter()
.map(|asset_version|{
2024-04-28 06:36:21 +00:00
let context=context.clone();
tokio::task::spawn(async move{
2024-04-28 06:36:21 +00:00
let file=context.download(rbx_asset::context::DownloadRequest{asset_id,version:Some(asset_version.assetVersionNumber)}).await?;
2024-07-01 18:18:34 +00:00
let dom=load_dom(std::io::Cursor::new(file))?;
Ok::<_,anyhow::Error>((asset_version,rox_compiler::DecompiledContext::from_dom(dom)))
})
}))
2024-01-12 01:11:44 +00:00
.buffered(CONCURRENT_DECODE)
.for_each(|join_handle_result|async{
match write_commit(WriteCommitConfig{
2024-01-12 19:24:03 +00:00
style:config.style,
2024-01-11 21:48:57 +00:00
git_committer_name:config.git_committer_name.clone(),
git_committer_email:config.git_committer_email.clone(),
output_folder:config.output_folder.clone(),
write_template:config.write_template,
write_models:config.write_models,
write_scripts:config.write_scripts,
},join_handle_result,&repo).await{
Ok(())=>(),
Err(e)=>println!("download/unzip/decompile/write/commit error: {}",e),
}
}).await;
Ok(())
}
2024-03-08 17:39:27 +00:00
struct CompileConfig{
2024-03-08 17:48:47 +00:00
input_folder:PathBuf,
output_file:PathBuf,
template:Option<PathBuf>,
2024-07-01 18:18:34 +00:00
style:Option<Style>,
2024-03-08 17:39:27 +00:00
}
async fn compile(config:CompileConfig)->AResult<()>{
//basically decompile in reverse order
//load template dom
let mut dom=match config.template{
2024-03-08 17:39:27 +00:00
//mr dom doesn't like tokio files
Some(template_path)=>load_dom(std::io::BufReader::new(std::fs::File::open(template_path)?))?,
2024-07-01 21:42:36 +00:00
None=>rbx_dom_weak::WeakDom::new(rbx_dom_weak::InstanceBuilder::new("DataModel")),
2024-03-08 17:39:27 +00:00
};
2024-07-01 18:18:34 +00:00
rox_compiler::compile(rox_compiler::CompileConfig{
input_folder:config.input_folder,
style:config.style.map(|s|s.rox()),
},&mut dom).await?;
2024-03-08 17:39:27 +00:00
let mut output_place=config.output_file.clone();
if output_place.extension().is_none()&&tokio::fs::try_exists(output_place.as_path()).await?{
output_place.push("place.rbxl");
}
let output=std::io::BufWriter::new(std::fs::File::create(output_place)?);
//write inner objects
rbx_binary::to_writer(output,&dom,dom.root().children())?;
2024-01-06 01:54:13 +00:00
Ok(())
}
2024-07-02 01:03:36 +00:00
struct CompileUploadConfig{
input_folder:PathBuf,
template:Option<PathBuf>,
style:Option<Style>,
cookie:String,
group:Option<u64>,
asset_id:AssetID,
}
async fn compile_upload(config:CompileUploadConfig)->AResult<()>{
let mut dom=match config.template{
//mr dom doesn't like tokio files
Some(template_path)=>load_dom(std::io::BufReader::new(std::fs::File::open(template_path)?))?,
None=>rbx_dom_weak::WeakDom::new(rbx_dom_weak::InstanceBuilder::new("DataModel")),
};
rox_compiler::compile(rox_compiler::CompileConfig{
input_folder:config.input_folder,
style:config.style.map(|s|s.rox()),
},&mut dom).await?;
//make a binary file in a buffer in memory
let mut data=Vec::new();
rbx_binary::to_writer(std::io::Cursor::new(&mut data),&dom,dom.root().children())?;
//upload it
let context=RobloxContext::new(config.cookie);
context.upload(rbx_asset::context::UploadRequest{
assetid:config.asset_id,
name:None,
description:None,
ispublic:None,
allowComments:None,
groupId:config.group,
},data).await?;
Ok(())
}