forked from StrafesNET/asset-tool
Compare commits
12 Commits
Author | SHA1 | Date | |
---|---|---|---|
01c61c71d9 | |||
8e933111e4 | |||
13786f941b | |||
dde85fbf8a | |||
59deac0155 | |||
66104be2d9 | |||
ca81d06998 | |||
4bea61558e | |||
bd382ef885 | |||
809d6df3f0 | |||
e28a58d747 | |||
c29396f44b |
648
Cargo.lock
generated
648
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -1,26 +1,24 @@
|
||||
workspace = { members = ["rbx_asset"] }
|
||||
[package]
|
||||
name = "asset-tool"
|
||||
version = "0.2.0"
|
||||
version = "0.3.1"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0.75"
|
||||
chrono = { version = "0.4.31", features = ["serde"] }
|
||||
clap = { version = "4.4.2", features = ["derive"] }
|
||||
flate2 = "1.0.28"
|
||||
futures = "0.3.30"
|
||||
git2 = "0.18.1"
|
||||
lazy-regex = "3.1.0"
|
||||
pollster = "0.3.0"
|
||||
rayon = "1.8.0"
|
||||
rbx_asset = { path = "rbx_asset" }
|
||||
rbx_binary = "0.7.4"
|
||||
rbx_dom_weak = "2.7.0"
|
||||
rbx_reflection_database = "0.2.10"
|
||||
rbx_xml = "0.13.3"
|
||||
reqwest = { version = "0.11.23", features = ["cookies", "json"] }
|
||||
serde = { version = "1.0.195", features = ["derive"] }
|
||||
serde_json = "1.0.111"
|
||||
tokio = { version = "1.35.1", features = ["macros", "rt-multi-thread", "fs"] }
|
||||
|
||||
|
14
rbx_asset/Cargo.toml
Normal file
14
rbx_asset/Cargo.toml
Normal file
@ -0,0 +1,14 @@
|
||||
[package]
|
||||
name = "rbx_asset"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = ["strafesnet"]
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
chrono = { version = "0.4.38", features = ["serde"] }
|
||||
flate2 = "1.0.29"
|
||||
reqwest = { version = "0.12.4", features = ["json"] }
|
||||
serde = { version = "1.0.199", features = ["derive"] }
|
||||
url = "2.5.0"
|
311
rbx_asset/src/context.rs
Normal file
311
rbx_asset/src/context.rs
Normal file
@ -0,0 +1,311 @@
|
||||
#[derive(Debug)]
|
||||
pub enum PostError{
|
||||
Reqwest(reqwest::Error),
|
||||
CSRF,
|
||||
}
|
||||
impl std::fmt::Display for PostError{
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for PostError{}
|
||||
|
||||
#[derive(Debug,serde::Deserialize,serde::Serialize)]
|
||||
#[allow(nonstandard_style,dead_code)]
|
||||
pub struct CreateRequest{
|
||||
pub name:String,
|
||||
pub description:String,
|
||||
pub ispublic:bool,
|
||||
pub allowComments:bool,
|
||||
pub groupId:Option<u64>,
|
||||
}
|
||||
#[derive(Debug)]
|
||||
pub enum CreateError{
|
||||
ParseError(url::ParseError),
|
||||
PostError(PostError),
|
||||
Reqwest(reqwest::Error),
|
||||
}
|
||||
impl std::fmt::Display for CreateError{
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for CreateError{}
|
||||
|
||||
#[allow(nonstandard_style,dead_code)]
|
||||
pub struct UploadRequest{
|
||||
pub assetid:u64,
|
||||
pub name:Option<String>,
|
||||
pub description:Option<String>,
|
||||
pub ispublic:Option<bool>,
|
||||
pub allowComments:Option<bool>,
|
||||
pub groupId:Option<u64>,
|
||||
}
|
||||
#[derive(Debug)]
|
||||
pub enum UploadError{
|
||||
ParseError(url::ParseError),
|
||||
PostError(PostError),
|
||||
Reqwest(reqwest::Error),
|
||||
AssetIdIsZero,
|
||||
}
|
||||
impl std::fmt::Display for UploadError{
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for UploadError{}
|
||||
#[derive(Debug,serde::Deserialize,serde::Serialize)]
|
||||
#[allow(nonstandard_style,dead_code)]
|
||||
pub struct UploadResponse{
|
||||
pub AssetId:u64,
|
||||
pub AssetVersionId:u64,
|
||||
}
|
||||
|
||||
#[allow(nonstandard_style,dead_code)]
|
||||
pub struct DownloadRequest{
|
||||
pub asset_id:u64,
|
||||
pub version:Option<u64>,
|
||||
}
|
||||
#[derive(Debug)]
|
||||
pub enum DownloadError{
|
||||
ParseError(url::ParseError),
|
||||
Reqwest(reqwest::Error),
|
||||
IO(std::io::Error)
|
||||
}
|
||||
impl std::fmt::Display for DownloadError{
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for DownloadError{}
|
||||
|
||||
pub struct HistoryPageRequest{
|
||||
pub asset_id:u64,
|
||||
pub cursor:Option<String>,
|
||||
}
|
||||
#[derive(serde::Deserialize,serde::Serialize)]
|
||||
#[allow(nonstandard_style,dead_code)]
|
||||
pub struct AssetVersion{
|
||||
pub Id:u64,
|
||||
pub assetId:u64,
|
||||
pub assetVersionNumber:u64,
|
||||
pub creatorType:String,
|
||||
pub creatorTargetId:u64,
|
||||
pub creatingUniverseId:Option<u64>,
|
||||
pub created:chrono::DateTime<chrono::Utc>,
|
||||
pub isPublished:bool,
|
||||
}
|
||||
#[derive(serde::Deserialize)]
|
||||
#[allow(nonstandard_style,dead_code)]
|
||||
pub struct HistoryPageResponse{
|
||||
pub previousPageCursor:Option<String>,
|
||||
pub nextPageCursor:Option<String>,
|
||||
pub data:Vec<AssetVersion>,
|
||||
}
|
||||
#[derive(Debug)]
|
||||
pub enum HistoryPageError{
|
||||
ParseError(url::ParseError),
|
||||
Reqwest(reqwest::Error),
|
||||
}
|
||||
impl std::fmt::Display for HistoryPageError{
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for HistoryPageError{}
|
||||
|
||||
pub struct InventoryPageRequest{
|
||||
pub group:u64,
|
||||
pub cursor:Option<String>,
|
||||
}
|
||||
#[derive(serde::Deserialize,serde::Serialize)]
|
||||
#[allow(nonstandard_style,dead_code)]
|
||||
pub struct InventoryItem{
|
||||
pub id:u64,
|
||||
pub name:String,
|
||||
}
|
||||
#[derive(serde::Deserialize,serde::Serialize)]
|
||||
#[allow(nonstandard_style,dead_code)]
|
||||
pub struct InventoryPageResponse{
|
||||
pub totalResults:u64,//up to 50
|
||||
pub filteredKeyword:Option<String>,//""
|
||||
pub searchDebugInfo:Option<String>,//null
|
||||
pub spellCheckerResult:Option<String>,//null
|
||||
pub queryFacets:Option<String>,//null
|
||||
pub imageSearchStatus:Option<String>,//null
|
||||
pub previousPageCursor:Option<String>,
|
||||
pub nextPageCursor:Option<String>,
|
||||
pub data:Vec<InventoryItem>,
|
||||
}
|
||||
#[derive(Debug)]
|
||||
pub enum InventoryPageError{
|
||||
ParseError(url::ParseError),
|
||||
Reqwest(reqwest::Error),
|
||||
}
|
||||
impl std::fmt::Display for InventoryPageError{
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for InventoryPageError{}
|
||||
|
||||
//idk how to do this better
|
||||
enum ReaderType<R:std::io::Read>{
|
||||
GZip(flate2::read::GzDecoder<std::io::BufReader<R>>),
|
||||
Raw(std::io::BufReader<R>),
|
||||
}
|
||||
fn maybe_gzip_decode<R:std::io::Read>(input:R)->std::io::Result<ReaderType<R>>{
|
||||
let mut buf=std::io::BufReader::new(input);
|
||||
let peek=std::io::BufRead::fill_buf(&mut buf)?;
|
||||
match &peek[0..2]{
|
||||
b"\x1f\x8b"=>Ok(ReaderType::GZip(flate2::read::GzDecoder::new(buf))),
|
||||
_=>Ok(ReaderType::Raw(buf)),
|
||||
}
|
||||
}
|
||||
fn read_readable(mut readable:impl std::io::Read)->std::io::Result<Vec<u8>>{
|
||||
let mut contents=Vec::new();
|
||||
readable.read_to_end(&mut contents)?;
|
||||
Ok(contents)
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct RobloxContext{
|
||||
pub cookie:String,
|
||||
pub client:reqwest::Client,
|
||||
}
|
||||
|
||||
impl RobloxContext{
|
||||
pub fn new(cookie:String)->Self{
|
||||
Self{
|
||||
cookie,
|
||||
client:reqwest::Client::new(),
|
||||
}
|
||||
}
|
||||
async fn get(&self,url:impl reqwest::IntoUrl)->Result<reqwest::Response,reqwest::Error>{
|
||||
self.client.get(url)
|
||||
.header("Cookie",self.cookie.as_str())
|
||||
.send().await
|
||||
}
|
||||
async fn post(&self,url:url::Url,body:impl Into<reqwest::Body>+Clone)->Result<reqwest::Response,PostError>{
|
||||
let mut resp=self.client.post(url.clone())
|
||||
.header("Cookie",self.cookie.as_str())
|
||||
.body(body.clone())
|
||||
.send().await.map_err(PostError::Reqwest)?;
|
||||
|
||||
//This is called a CSRF challenge apparently
|
||||
if resp.status()==reqwest::StatusCode::FORBIDDEN{
|
||||
if let Some(csrf_token)=resp.headers().get("X-CSRF-Token"){
|
||||
resp=self.client.post(url)
|
||||
.header("X-CSRF-Token",csrf_token)
|
||||
.header("Cookie",self.cookie.as_str())
|
||||
.body(body)
|
||||
.send().await.map_err(PostError::Reqwest)?;
|
||||
}else{
|
||||
Err(PostError::CSRF)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(resp)
|
||||
}
|
||||
pub async fn create(&self,config:CreateRequest,body:impl Into<reqwest::Body>+Clone)->Result<UploadResponse,CreateError>{
|
||||
let mut url=reqwest::Url::parse("https://data.roblox.com/Data/Upload.ashx?json=1&type=Model&genreTypeId=1").map_err(CreateError::ParseError)?;
|
||||
//url borrow scope
|
||||
{
|
||||
let mut query=url.query_pairs_mut();//borrow here
|
||||
//archaic roblox api uses 0 for new asset
|
||||
query.append_pair("assetid","0");
|
||||
query.append_pair("name",config.name.as_str());
|
||||
query.append_pair("description",config.description.as_str());
|
||||
query.append_pair("ispublic",if config.ispublic{"True"}else{"False"});
|
||||
query.append_pair("allowComments",if config.allowComments{"True"}else{"False"});
|
||||
match config.groupId{
|
||||
Some(group_id)=>{query.append_pair("groupId",group_id.to_string().as_str());},
|
||||
None=>(),
|
||||
}
|
||||
}
|
||||
|
||||
let resp=self.post(url,body).await.map_err(CreateError::PostError)?;
|
||||
|
||||
Ok(resp.json::<UploadResponse>().await.map_err(CreateError::Reqwest)?)
|
||||
}
|
||||
pub async fn upload(&self,config:UploadRequest,body:impl Into<reqwest::Body>+Clone)->Result<UploadResponse,UploadError>{
|
||||
let mut url=reqwest::Url::parse("https://data.roblox.com/Data/Upload.ashx?json=1&type=Model&genreTypeId=1").map_err(UploadError::ParseError)?;
|
||||
//url borrow scope
|
||||
{
|
||||
let mut query=url.query_pairs_mut();//borrow here
|
||||
//archaic roblox api uses 0 for new asset
|
||||
match config.assetid{
|
||||
0=>return Err(UploadError::AssetIdIsZero),
|
||||
assetid=>{query.append_pair("assetid",assetid.to_string().as_str());},
|
||||
}
|
||||
if let Some(name)=config.name.as_deref(){
|
||||
query.append_pair("name",name);
|
||||
}
|
||||
if let Some(description)=config.description.as_deref(){
|
||||
query.append_pair("description",description);
|
||||
}
|
||||
if let Some(ispublic)=config.ispublic{
|
||||
query.append_pair("ispublic",if ispublic{"True"}else{"False"});
|
||||
}
|
||||
if let Some(allow_comments)=config.allowComments{
|
||||
query.append_pair("allowComments",if allow_comments{"True"}else{"False"});
|
||||
}
|
||||
if let Some(group_id)=config.groupId{
|
||||
query.append_pair("groupId",group_id.to_string().as_str());
|
||||
}
|
||||
}
|
||||
|
||||
let resp=self.post(url,body).await.map_err(UploadError::PostError)?;
|
||||
|
||||
Ok(resp.json::<UploadResponse>().await.map_err(UploadError::Reqwest)?)
|
||||
}
|
||||
pub async fn download(&self,config:DownloadRequest)->Result<Vec<u8>,DownloadError>{
|
||||
let mut url=reqwest::Url::parse("https://assetdelivery.roblox.com/v1/asset/").map_err(DownloadError::ParseError)?;
|
||||
//url borrow scope
|
||||
{
|
||||
let mut query=url.query_pairs_mut();//borrow here
|
||||
query.append_pair("ID",config.asset_id.to_string().as_str());
|
||||
if let Some(version)=config.version{
|
||||
query.append_pair("version",version.to_string().as_str());
|
||||
}
|
||||
}
|
||||
let resp=self.get(url).await.map_err(DownloadError::Reqwest)?;
|
||||
|
||||
let body=resp.bytes().await.map_err(DownloadError::Reqwest)?;
|
||||
|
||||
match maybe_gzip_decode(&mut std::io::Cursor::new(body)){
|
||||
Ok(ReaderType::GZip(readable))=>read_readable(readable),
|
||||
Ok(ReaderType::Raw(readable))=>read_readable(readable),
|
||||
Err(e)=>Err(e),
|
||||
}.map_err(DownloadError::IO)
|
||||
}
|
||||
pub async fn history_page(&self,config:HistoryPageRequest)->Result<HistoryPageResponse,HistoryPageError>{
|
||||
let mut url=reqwest::Url::parse(format!("https://develop.roblox.com/v1/assets/{}/saved-versions",config.asset_id).as_str()).map_err(HistoryPageError::ParseError)?;
|
||||
//url borrow scope
|
||||
{
|
||||
let mut query=url.query_pairs_mut();//borrow here
|
||||
//query.append_pair("sortOrder","Asc");
|
||||
//query.append_pair("limit","100");
|
||||
//query.append_pair("count","100");
|
||||
if let Some(cursor)=config.cursor.as_deref(){
|
||||
query.append_pair("cursor",cursor);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(self.get(url).await.map_err(HistoryPageError::Reqwest)?
|
||||
.json::<HistoryPageResponse>().await.map_err(HistoryPageError::Reqwest)?)
|
||||
}
|
||||
pub async fn inventory_page(&self,config:InventoryPageRequest)->Result<InventoryPageResponse,InventoryPageError>{
|
||||
let mut url=reqwest::Url::parse(format!("https://apis.roblox.com/toolbox-service/v1/creations/group/{}/10?limit=50",config.group).as_str()).map_err(InventoryPageError::ParseError)?;
|
||||
//url borrow scope
|
||||
{
|
||||
let mut query=url.query_pairs_mut();//borrow here
|
||||
if let Some(cursor)=config.cursor.as_deref(){
|
||||
query.append_pair("cursor",cursor);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(self.get(url).await.map_err(InventoryPageError::Reqwest)?
|
||||
.json::<InventoryPageResponse>().await.map_err(InventoryPageError::Reqwest)?)
|
||||
}
|
||||
}
|
1
rbx_asset/src/lib.rs
Normal file
1
rbx_asset/src/lib.rs
Normal file
@ -0,0 +1 @@
|
||||
pub mod context;
|
331
src/main.rs
331
src/main.rs
@ -4,6 +4,7 @@ use anyhow::Result as AResult;
|
||||
use futures::StreamExt;
|
||||
use rbx_dom_weak::types::Ref;
|
||||
use tokio::io::AsyncReadExt;
|
||||
use rbx_asset::context::{RobloxContext,InventoryItem,AssetVersion};
|
||||
|
||||
type AssetID=u64;
|
||||
type AssetIDFileMap=Vec<(AssetID,PathBuf)>;
|
||||
@ -105,7 +106,7 @@ struct UploadSubcommand{
|
||||
#[derive(Args)]
|
||||
struct CompileSubcommand{
|
||||
#[arg(long)]
|
||||
input_folder:PathBuf,
|
||||
input_folder:Option<PathBuf>,
|
||||
#[arg(long)]
|
||||
output_file:PathBuf,
|
||||
#[arg(long)]
|
||||
@ -118,7 +119,7 @@ struct DecompileSubcommand{
|
||||
#[arg(long)]
|
||||
input_file:PathBuf,
|
||||
#[arg(long)]
|
||||
output_folder:PathBuf,
|
||||
output_folder:Option<PathBuf>,
|
||||
#[arg(long)]
|
||||
style:DecompileStyle,
|
||||
#[arg(long)]
|
||||
@ -185,46 +186,6 @@ enum DecompileStyle{
|
||||
RoxRojo,
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize)]
|
||||
#[allow(nonstandard_style,dead_code)]
|
||||
struct VersionPage{
|
||||
previousPageCursor:Option<String>,
|
||||
nextPageCursor:Option<String>,
|
||||
data:Vec<AssetVersion>,
|
||||
}
|
||||
#[derive(serde::Deserialize,serde::Serialize)]
|
||||
#[allow(nonstandard_style,dead_code)]
|
||||
struct AssetVersion{
|
||||
Id:u64,
|
||||
assetId:AssetID,
|
||||
assetVersionNumber:u64,
|
||||
creatorType:String,
|
||||
creatorTargetId:u64,
|
||||
creatingUniverseId:Option<u64>,
|
||||
created:chrono::DateTime<chrono::Utc>,
|
||||
isPublished:bool,
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize)]
|
||||
#[allow(nonstandard_style,dead_code)]
|
||||
struct InventoryPage{
|
||||
totalResults:u64,//up to 50
|
||||
filteredKeyword:Option<String>,//""
|
||||
searchDebugInfo:Option<String>,//null
|
||||
spellCheckerResult:Option<String>,//null
|
||||
queryFacets:Option<String>,//null
|
||||
imageSearchStatus:Option<String>,//null
|
||||
previousPageCursor:Option<String>,
|
||||
nextPageCursor:Option<String>,
|
||||
data:Vec<InventoryItem>,
|
||||
}
|
||||
#[derive(serde::Deserialize,serde::Serialize)]
|
||||
#[allow(nonstandard_style,dead_code)]
|
||||
struct InventoryItem{
|
||||
id:u64,
|
||||
name:String,
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main()->AResult<()>{
|
||||
let cli=Cli::parse();
|
||||
@ -268,7 +229,7 @@ async fn main()->AResult<()>{
|
||||
vec![(subcommand.asset_id,subcommand.input_file)]
|
||||
).await,
|
||||
Commands::Compile(subcommand)=>compile(CompileConfig{
|
||||
input_folder:subcommand.input_folder,
|
||||
input_folder:subcommand.input_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
|
||||
output_file:subcommand.output_file,
|
||||
template:subcommand.template,
|
||||
style:subcommand.style,
|
||||
@ -276,7 +237,7 @@ async fn main()->AResult<()>{
|
||||
Commands::Decompile(subcommand)=>decompile(DecompileConfig{
|
||||
style:subcommand.style,
|
||||
input_file:subcommand.input_file,
|
||||
output_folder:subcommand.output_folder,
|
||||
output_folder:subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
|
||||
write_template:subcommand.write_template.unwrap_or(false),
|
||||
write_models:subcommand.write_models.unwrap_or(false),
|
||||
write_scripts:subcommand.write_scripts.unwrap_or(true),
|
||||
@ -316,20 +277,6 @@ impl Cookie{
|
||||
}
|
||||
}
|
||||
|
||||
enum ReaderType<R:Read>{
|
||||
GZip(flate2::read::GzDecoder<std::io::BufReader<R>>),
|
||||
Raw(std::io::BufReader<R>),
|
||||
}
|
||||
|
||||
fn maybe_gzip_decode<R:Read>(input:R)->AResult<ReaderType<R>>{
|
||||
let mut buf=std::io::BufReader::new(input);
|
||||
let peek=std::io::BufRead::fill_buf(&mut buf)?;
|
||||
match &peek[0..2]{
|
||||
b"\x1f\x8b"=>Ok(ReaderType::GZip(flate2::read::GzDecoder::new(buf))),
|
||||
_=>Ok(ReaderType::Raw(buf)),
|
||||
}
|
||||
}
|
||||
|
||||
struct CreateConfig{
|
||||
cookie:String,
|
||||
model_name:String,
|
||||
@ -341,102 +288,40 @@ struct CreateConfig{
|
||||
}
|
||||
|
||||
async fn create(config:CreateConfig)->AResult<()>{
|
||||
let client=reqwest::Client::new();
|
||||
let client=&client;
|
||||
let cookie=config.cookie.as_str();
|
||||
let group=&config.group;
|
||||
let mut url=reqwest::Url::parse("https://data.roblox.com/Data/Upload.ashx?json=1&type=Model&genreTypeId=1")?;
|
||||
//url borrow scope
|
||||
{
|
||||
let mut query=url.query_pairs_mut();//borrow here
|
||||
//archaic roblox api uses 0 for new asset
|
||||
query.append_pair("assetid","0");
|
||||
query.append_pair("name",config.model_name.as_str());
|
||||
query.append_pair("description",config.description.as_str());
|
||||
query.append_pair("ispublic",if config.free_model{"True"}else{"False"});
|
||||
query.append_pair("allowComments",if config.allow_comments{"True"}else{"False"});
|
||||
match group{
|
||||
Some(group_id)=>{query.append_pair("groupId",group_id.to_string().as_str());},
|
||||
None=>(),
|
||||
}
|
||||
}
|
||||
|
||||
let body=tokio::fs::read(config.input_file).await?;
|
||||
let mut resp=client.post(url.clone())
|
||||
.header("Cookie",cookie)
|
||||
.body(body.clone())
|
||||
.send().await?;
|
||||
|
||||
//This is called a CSRF challenge apparently
|
||||
if resp.status()==reqwest::StatusCode::FORBIDDEN{
|
||||
if let Some(csrf_token)=resp.headers().get("X-CSRF-Token"){
|
||||
resp=client.post(url)
|
||||
.header("X-CSRF-Token",csrf_token)
|
||||
.header("Cookie",cookie)
|
||||
.body(body)
|
||||
.send().await?;
|
||||
}else{
|
||||
Err(anyhow::Error::msg("Roblox returned 403 with no CSRF"))?;
|
||||
}
|
||||
}
|
||||
let body=match resp.status(){
|
||||
reqwest::StatusCode::OK=>Ok(resp.bytes().await?),
|
||||
other=>Err(anyhow::Error::msg(other)),
|
||||
};
|
||||
|
||||
println!("response.body={:?}",body?);
|
||||
let resp=RobloxContext::new(config.cookie)
|
||||
.create(rbx_asset::context::CreateRequest{
|
||||
name:config.model_name,
|
||||
description:config.description,
|
||||
ispublic:config.free_model,
|
||||
allowComments:config.allow_comments,
|
||||
groupId:config.group,
|
||||
},tokio::fs::read(config.input_file).await?).await?;
|
||||
println!("UploadResponse={:?}",resp);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn upload_list(cookie:String,group:Option<u64>,asset_id_file_map:AssetIDFileMap)->AResult<()>{
|
||||
let client=reqwest::Client::new();
|
||||
let context=RobloxContext::new(cookie);
|
||||
//this is calling map on the vec because the closure produces an iterator of futures
|
||||
futures::stream::iter(asset_id_file_map.into_iter()
|
||||
.map(|(asset_id,file)|{
|
||||
let client=&client;
|
||||
let cookie=cookie.as_str();
|
||||
let group=&group;
|
||||
let context=&context;
|
||||
async move{
|
||||
let mut url=reqwest::Url::parse("https://data.roblox.com/Data/Upload.ashx?json=1&type=Model&genreTypeId=1")?;
|
||||
//url borrow scope
|
||||
{
|
||||
let mut query=url.query_pairs_mut();//borrow here
|
||||
query.append_pair("assetid",asset_id.to_string().as_str());
|
||||
match group{
|
||||
Some(group_id)=>{query.append_pair("groupId",group_id.to_string().as_str());},
|
||||
None=>(),
|
||||
}
|
||||
}
|
||||
|
||||
let body=tokio::fs::read(file).await?;
|
||||
let mut resp=client.post(url.clone())
|
||||
.header("Cookie",cookie)
|
||||
.body(body.clone())
|
||||
.send().await?;
|
||||
|
||||
//This is called a CSRF challenge apparently
|
||||
if resp.status()==reqwest::StatusCode::FORBIDDEN{
|
||||
if let Some(csrf_token)=resp.headers().get("X-CSRF-Token"){
|
||||
resp=client.post(url)
|
||||
.header("X-CSRF-Token",csrf_token)
|
||||
.header("Cookie",cookie)
|
||||
.body(body)
|
||||
.send().await?;
|
||||
}else{
|
||||
Err(anyhow::Error::msg("Roblox returned 403 with no CSRF"))?;
|
||||
}
|
||||
}
|
||||
match resp.status(){
|
||||
reqwest::StatusCode::OK=>Ok((asset_id,resp.bytes().await?)),
|
||||
other=>Err(anyhow::Error::msg(other)),
|
||||
}
|
||||
Ok((asset_id,context.upload(rbx_asset::context::UploadRequest{
|
||||
assetid:asset_id,
|
||||
name:None,
|
||||
description:None,
|
||||
ispublic:None,
|
||||
allowComments:None,
|
||||
groupId:group,
|
||||
},tokio::fs::read(file).await?).await?))
|
||||
}
|
||||
}))
|
||||
.buffer_unordered(CONCURRENT_REQUESTS)
|
||||
.for_each(|b:AResult<_>|async{
|
||||
match b{
|
||||
Ok((asset_id,body))=>{
|
||||
println!("asset_id={} response.body={:?}",asset_id,body);
|
||||
println!("asset_id={} UploadResponse={:?}",asset_id,body);
|
||||
},
|
||||
Err(e)=>eprintln!("ul error: {}",e),
|
||||
}
|
||||
@ -444,69 +329,41 @@ async fn upload_list(cookie:String,group:Option<u64>,asset_id_file_map:AssetIDFi
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn read_readable(mut readable:impl Read)->AResult<Vec<u8>>{
|
||||
let mut contents=Vec::new();
|
||||
readable.read_to_end(&mut contents)?;
|
||||
Ok(contents)
|
||||
}
|
||||
|
||||
async fn download_list(cookie:String,asset_id_file_map:AssetIDFileMap)->AResult<()>{
|
||||
let client=reqwest::Client::new();
|
||||
let context=RobloxContext::new(cookie);
|
||||
futures::stream::iter(asset_id_file_map.into_iter()
|
||||
.map(|(asset_id,file)|{
|
||||
let client=&client;
|
||||
let cookie=cookie.as_str();
|
||||
let context=&context;
|
||||
async move{
|
||||
let resp=client.get(format!("https://assetdelivery.roblox.com/v1/asset/?ID={}",asset_id))
|
||||
.header("Cookie",cookie)
|
||||
.send().await?;
|
||||
Ok((file,resp.bytes().await?))
|
||||
Ok((file,context.download(rbx_asset::context::DownloadRequest{asset_id,version:None}).await?))
|
||||
}
|
||||
}))
|
||||
.buffer_unordered(CONCURRENT_REQUESTS)
|
||||
.for_each(|b:AResult<_>|async{
|
||||
match b{
|
||||
Ok((dest,body))=>{
|
||||
let contents=match maybe_gzip_decode(&mut std::io::Cursor::new(body)){
|
||||
Ok(ReaderType::GZip(readable))=>read_readable(readable),
|
||||
Ok(ReaderType::Raw(readable))=>read_readable(readable),
|
||||
Err(e)=>Err(e),
|
||||
};
|
||||
match contents{
|
||||
Ok(data)=>match tokio::fs::write(dest,data).await{
|
||||
Err(e)=>eprintln!("fs error: {}",e),
|
||||
_=>(),
|
||||
},
|
||||
Err(e)=>eprintln!("gzip error: {}",e),
|
||||
Ok((mut dest,data))=>{
|
||||
//known file types
|
||||
match &data[0..4]{
|
||||
b"<rob"=>dest.set_extension("rbxm"),
|
||||
b"\x89PNG"=>dest.set_extension("png"),
|
||||
_=>false,
|
||||
};
|
||||
match tokio::fs::write(dest,data).await{
|
||||
Err(e)=>eprintln!("fs error: {}",e),
|
||||
_=>(),
|
||||
}
|
||||
},
|
||||
Err(e)=>eprintln!("dl error: {}",e),
|
||||
}
|
||||
}).await;
|
||||
Ok(())
|
||||
}
|
||||
async fn download_inventory_page(client:&reqwest::Client,cookie:&str,group:u64,cursor:Option<String>)->AResult<InventoryPage>{
|
||||
let mut url=reqwest::Url::parse(format!("https://apis.roblox.com/toolbox-service/v1/creations/group/{}/10?limit=50",group).as_str())?;
|
||||
//url borrow scope
|
||||
{
|
||||
let mut query=url.query_pairs_mut();//borrow here
|
||||
match cursor.as_deref(){
|
||||
Some(next_page)=>{query.append_pair("cursor",next_page);}
|
||||
None=>(),
|
||||
}
|
||||
}
|
||||
println!("page url={}",url);
|
||||
let resp=client.get(url)
|
||||
.header("Cookie",cookie)
|
||||
.send().await?;
|
||||
Ok(resp.json::<InventoryPage>().await?)
|
||||
}
|
||||
|
||||
async fn get_inventory_pages(client:&reqwest::Client,cookie:&str,group:u64)->AResult<Vec<InventoryItem>>{
|
||||
async fn get_inventory_pages(context:&RobloxContext,group:u64)->AResult<Vec<InventoryItem>>{
|
||||
let mut cursor:Option<String>=None;
|
||||
let mut asset_list=Vec::new();
|
||||
loop{
|
||||
let mut page=download_inventory_page(client,cookie,group,cursor).await?;
|
||||
let mut page=context.inventory_page(rbx_asset::context::InventoryPageRequest{group,cursor}).await?;
|
||||
asset_list.append(&mut page.data);
|
||||
if page.nextPageCursor.is_none(){
|
||||
break;
|
||||
@ -517,8 +374,8 @@ async fn get_inventory_pages(client:&reqwest::Client,cookie:&str,group:u64)->ARe
|
||||
}
|
||||
|
||||
async fn download_group_inventory_json(cookie:String,group:u64,output_folder:PathBuf)->AResult<()>{
|
||||
let client=reqwest::Client::new();
|
||||
let item_list=get_inventory_pages(&client,cookie.as_str(),group).await?;
|
||||
let context=RobloxContext::new(cookie);
|
||||
let item_list=get_inventory_pages(&context,group).await?;
|
||||
|
||||
let mut path=output_folder.clone();
|
||||
path.set_file_name("versions.json");
|
||||
@ -527,31 +384,11 @@ async fn download_group_inventory_json(cookie:String,group:u64,output_folder:Pat
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn download_page(client:&reqwest::Client,cookie:&str,asset_id:AssetID,cursor:Option<String>)->AResult<VersionPage>{
|
||||
let mut url=reqwest::Url::parse(format!("https://develop.roblox.com/v1/assets/{}/saved-versions",asset_id).as_str())?;
|
||||
//url borrow scope
|
||||
{
|
||||
let mut query=url.query_pairs_mut();//borrow here
|
||||
//query.append_pair("sortOrder","Asc");
|
||||
//query.append_pair("limit","100");
|
||||
//query.append_pair("count","100");
|
||||
match cursor.as_deref(){
|
||||
Some(next_page)=>{query.append_pair("cursor",next_page);}
|
||||
None=>(),
|
||||
}
|
||||
}
|
||||
println!("page url={}",url);
|
||||
let resp=client.get(url)
|
||||
.header("Cookie",cookie)
|
||||
.send().await?;
|
||||
Ok(resp.json::<VersionPage>().await?)
|
||||
}
|
||||
|
||||
async fn get_version_history(client:&reqwest::Client,cookie:&str,asset_id:AssetID)->AResult<Vec<AssetVersion>>{
|
||||
async fn get_version_history(context:&RobloxContext,asset_id:AssetID)->AResult<Vec<AssetVersion>>{
|
||||
let mut cursor:Option<String>=None;
|
||||
let mut asset_list=Vec::new();
|
||||
loop{
|
||||
let mut page=download_page(client,cookie,asset_id,cursor).await?;
|
||||
let mut page=context.history_page(rbx_asset::context::HistoryPageRequest{asset_id,cursor}).await?;
|
||||
asset_list.append(&mut page.data);
|
||||
if page.nextPageCursor.is_none(){
|
||||
break;
|
||||
@ -562,30 +399,6 @@ async fn get_version_history(client:&reqwest::Client,cookie:&str,asset_id:AssetI
|
||||
Ok(asset_list)
|
||||
}
|
||||
|
||||
async fn download_asset_version(client:&reqwest::Client,cookie:&str,asset_id_str:&str,asset_version_str:&str)->AResult<reqwest::Response>{
|
||||
let mut url=reqwest::Url::parse("https://assetdelivery.roblox.com/v1/asset/")?;
|
||||
//url borrow scope
|
||||
{
|
||||
let mut query=url.query_pairs_mut();//borrow here
|
||||
query.append_pair("ID",asset_id_str);
|
||||
query.append_pair("version",asset_version_str);
|
||||
}
|
||||
println!("download url={}",url);
|
||||
for i in 0..8{
|
||||
let resp=client.get(url.clone())
|
||||
.header("Cookie",cookie)
|
||||
.send().await?;
|
||||
|
||||
if !resp.status().is_success(){
|
||||
println!("request {} failed",i);
|
||||
continue;
|
||||
}
|
||||
|
||||
return Ok(resp);
|
||||
}
|
||||
Err(anyhow::Error::msg("all requests failed"))
|
||||
}
|
||||
|
||||
struct DownloadHistoryConfig{
|
||||
continue_from_versions:bool,
|
||||
end_version:Option<u64>,
|
||||
@ -632,9 +445,7 @@ async fn download_history(mut config:DownloadHistoryConfig)->AResult<()>{
|
||||
None=>Err(anyhow::Error::msg("Cannot continue from versions.json - there are no previous versions"))?,
|
||||
}
|
||||
}
|
||||
let client=reqwest::Client::new();
|
||||
|
||||
let asset_id_string=config.asset_id.to_string();
|
||||
let context=RobloxContext::new(config.cookie);
|
||||
|
||||
//limit concurrent downloads
|
||||
let mut join_set=tokio::task::JoinSet::new();
|
||||
@ -642,10 +453,8 @@ async fn download_history(mut config:DownloadHistoryConfig)->AResult<()>{
|
||||
//poll paged list of all asset versions
|
||||
let mut cursor:Option<String>=None;
|
||||
loop{
|
||||
let mut page=download_page(&client,config.cookie.as_str(),config.asset_id,cursor).await?;
|
||||
let client=&client;
|
||||
let cookie=config.cookie.clone();
|
||||
let asset_id_str=asset_id_string.clone();
|
||||
let mut page=context.history_page(rbx_asset::context::HistoryPageRequest{asset_id:config.asset_id,cursor}).await?;
|
||||
let context=&context;
|
||||
let output_folder=config.output_folder.clone();
|
||||
let data=&page.data;
|
||||
let asset_list_contents=&asset_list_contents;
|
||||
@ -670,19 +479,13 @@ async fn download_history(mut config:DownloadHistoryConfig)->AResult<()>{
|
||||
while CONCURRENT_REQUESTS<=join_set.len(){
|
||||
join_set.join_next().await.unwrap()??;
|
||||
}
|
||||
let client=client.clone();
|
||||
let cookie=cookie.clone();
|
||||
let asset_id_str=asset_id_str.clone();
|
||||
let context=context.clone();
|
||||
let mut path=output_folder.clone();
|
||||
path.push(format!("{}_v{}.rbxl",config.asset_id,version_number));
|
||||
join_set.spawn(async move{
|
||||
let resp=download_asset_version(&client,cookie.as_str(),asset_id_str.as_str(),version_number.to_string().as_str()).await?;
|
||||
let contents=match maybe_gzip_decode(std::io::Cursor::new(resp.bytes().await?))?{
|
||||
ReaderType::GZip(readable)=>read_readable(readable)?,
|
||||
ReaderType::Raw(readable)=>read_readable(readable)?,
|
||||
};
|
||||
let file=context.download(rbx_asset::context::DownloadRequest{asset_id:config.asset_id,version:Some(version_number)}).await?;
|
||||
|
||||
tokio::fs::write(path,contents).await?;
|
||||
tokio::fs::write(path,file).await?;
|
||||
|
||||
Ok::<_,anyhow::Error>(())
|
||||
});
|
||||
@ -731,8 +534,8 @@ fn load_dom<R:Read>(input:R)->AResult<rbx_dom_weak::WeakDom>{
|
||||
match &peek[0..4]{
|
||||
b"<rob"=>{
|
||||
match &peek[4..8]{
|
||||
b"lox!"=>return rbx_binary::from_reader(buf).map_err(anyhow::Error::msg),
|
||||
b"lox "=>return rbx_xml::from_reader_default(buf).map_err(anyhow::Error::msg),
|
||||
b"lox!"=>rbx_binary::from_reader(buf).map_err(anyhow::Error::msg),
|
||||
b"lox "=>rbx_xml::from_reader_default(buf).map_err(anyhow::Error::msg),
|
||||
other=>Err(anyhow::Error::msg(format!("Unknown Roblox file type {:?}",other))),
|
||||
}
|
||||
},
|
||||
@ -1258,27 +1061,21 @@ struct DownloadAndDecompileHistoryConfig{
|
||||
}
|
||||
|
||||
async fn download_and_decompile_history_into_git(config:DownloadAndDecompileHistoryConfig)->AResult<()>{
|
||||
let client=reqwest::Client::new();
|
||||
let context=RobloxContext::new(config.cookie);
|
||||
|
||||
//poll paged list of all asset versions
|
||||
let asset_list=get_version_history(&client,&config.cookie.as_str(),config.asset_id).await?;
|
||||
let asset_list=get_version_history(&context,config.asset_id).await?;
|
||||
|
||||
let repo=git2::Repository::init(config.output_folder.clone())?;
|
||||
|
||||
//download all versions
|
||||
let asset_id_string=config.asset_id.to_string();
|
||||
let asset_id=config.asset_id;
|
||||
futures::stream::iter(asset_list.into_iter()
|
||||
.map(|asset_version|{
|
||||
let client=client.clone();
|
||||
let cookie=config.cookie.clone();
|
||||
let asset_id_str=asset_id_string.clone();
|
||||
let context=context.clone();
|
||||
tokio::task::spawn(async move{
|
||||
let resp=download_asset_version(&client,cookie.as_str(),asset_id_str.as_str(),asset_version.assetVersionNumber.to_string().as_str()).await?;
|
||||
let contents=match maybe_gzip_decode(std::io::Cursor::new(resp.bytes().await?))?{
|
||||
ReaderType::GZip(readable)=>generate_decompiled_context(readable)?,
|
||||
ReaderType::Raw(readable)=>generate_decompiled_context(readable)?,
|
||||
};
|
||||
Ok::<_,anyhow::Error>((asset_version,contents))
|
||||
let file=context.download(rbx_asset::context::DownloadRequest{asset_id,version:Some(asset_version.assetVersionNumber)}).await?;
|
||||
Ok::<_,anyhow::Error>((asset_version,generate_decompiled_context(std::io::Cursor::new(file))?))
|
||||
})
|
||||
}))
|
||||
.buffered(CONCURRENT_DECODE)
|
||||
@ -1311,6 +1108,7 @@ async fn download_and_decompile_history_into_git(config:DownloadAndDecompileHist
|
||||
//I could use a function!
|
||||
//eventually:
|
||||
#[derive(Debug)]
|
||||
#[allow(dead_code)]//idk why this thinks it's dead code, the errors are printed out in various places
|
||||
enum QueryResolveError{
|
||||
NotFound,//0 results
|
||||
Ambiguous,//>1 results
|
||||
@ -1684,16 +1482,11 @@ enum TooComplicated<T>{
|
||||
async fn compile(config:CompileConfig)->AResult<()>{
|
||||
//basically decompile in reverse order
|
||||
//load template dom
|
||||
let input={
|
||||
let template_path=config.template.unwrap_or_else(||{
|
||||
let mut template_path=config.input_folder.clone();
|
||||
template_path.push("template.rbxlx");
|
||||
template_path
|
||||
});
|
||||
let mut dom=match config.template{
|
||||
//mr dom doesn't like tokio files
|
||||
std::io::BufReader::new(std::fs::File::open(template_path)?)
|
||||
Some(template_path)=>load_dom(std::io::BufReader::new(std::fs::File::open(template_path)?))?,
|
||||
None=>rbx_dom_weak::WeakDom::default(),
|
||||
};
|
||||
let mut dom=load_dom(input)?;
|
||||
//hack to traverse root folder as the root object
|
||||
dom.root_mut().name="src".to_owned();
|
||||
|
||||
|
Reference in New Issue
Block a user