forked from StrafesNET/asset-tool
clippy W
This commit is contained in:
parent
9a5afb9953
commit
fc4bca9802
@ -306,10 +306,9 @@ impl CloudContext{
|
||||
.text("request",request_config)
|
||||
.part("fileContent",reqwest::multipart::Part::bytes(body));
|
||||
|
||||
let resp=self.post_form(url,form).await.map_err(CreateError::Reqwest)?
|
||||
.error_for_status().map_err(CreateError::Reqwest)?;
|
||||
|
||||
Ok(resp.json::<RobloxOperation>().await.map_err(CreateError::Reqwest)?)
|
||||
self.post_form(url,form).await.map_err(CreateError::Reqwest)?
|
||||
.error_for_status().map_err(CreateError::Reqwest)?
|
||||
.json::<RobloxOperation>().await.map_err(CreateError::Reqwest)
|
||||
}
|
||||
pub async fn update_asset(&self,config:UpdateAssetRequest,body:impl Into<std::borrow::Cow<'static,[u8]>>)->Result<RobloxOperation,UpdateError>{
|
||||
let raw_url=format!("https://apis.roblox.com/assets/v1/assets/{}",config.assetId);
|
||||
@ -321,20 +320,19 @@ impl CloudContext{
|
||||
.text("request",request_config)
|
||||
.part("fileContent",reqwest::multipart::Part::bytes(body));
|
||||
|
||||
let resp=self.patch_form(url,form).await
|
||||
self.patch_form(url,form).await
|
||||
.map_err(UpdateError::Reqwest)?
|
||||
//roblox api documentation is very poor, just give the status code and drop the json
|
||||
.error_for_status().map_err(UpdateError::Reqwest)?;
|
||||
|
||||
Ok(resp.json::<RobloxOperation>().await.map_err(UpdateError::Reqwest)?)
|
||||
.error_for_status().map_err(UpdateError::Reqwest)?
|
||||
.json::<RobloxOperation>().await.map_err(UpdateError::Reqwest)
|
||||
}
|
||||
pub async fn get_asset_info(&self,config:GetAssetInfoRequest)->Result<AssetResponse,GetError>{
|
||||
let raw_url=format!("https://apis.roblox.com/assets/v1/assets/{}",config.asset_id);
|
||||
let url=reqwest::Url::parse(raw_url.as_str()).map_err(GetError::ParseError)?;
|
||||
|
||||
Ok(self.get(url).await.map_err(GetError::Reqwest)?
|
||||
self.get(url).await.map_err(GetError::Reqwest)?
|
||||
.error_for_status().map_err(GetError::Reqwest)?
|
||||
.json::<AssetResponse>().await.map_err(GetError::Reqwest)?)
|
||||
.json::<AssetResponse>().await.map_err(GetError::Reqwest)
|
||||
}
|
||||
pub async fn get_asset_version(&self,config:GetAssetVersionRequest)->Result<Vec<u8>,GetError>{
|
||||
let raw_url=format!("https://apis.roblox.com/assets/v1/assets/{}/versions/{}",config.asset_id,config.version);
|
||||
@ -364,9 +362,9 @@ impl CloudContext{
|
||||
let raw_url=format!("https://apis.roblox.com/assets/v1/assets/{}/versions",config.asset_id);
|
||||
let url=reqwest::Url::parse(raw_url.as_str()).map_err(AssetVersionsError::ParseError)?;
|
||||
|
||||
Ok(self.get(url).await.map_err(AssetVersionsError::Reqwest)?
|
||||
self.get(url).await.map_err(AssetVersionsError::Reqwest)?
|
||||
.error_for_status().map_err(AssetVersionsError::Reqwest)?
|
||||
.json::<AssetVersionsResponse>().await.map_err(AssetVersionsError::Reqwest)?)
|
||||
.json::<AssetVersionsResponse>().await.map_err(AssetVersionsError::Reqwest)
|
||||
}
|
||||
pub async fn inventory_page(&self,config:InventoryPageRequest)->Result<InventoryPageResponse,InventoryPageError>{
|
||||
let mut url=reqwest::Url::parse(format!("https://apis.roblox.com/toolbox-service/v1/creations/group/{}/10?limit=50",config.group).as_str()).map_err(InventoryPageError::ParseError)?;
|
||||
@ -378,9 +376,9 @@ impl CloudContext{
|
||||
}
|
||||
}
|
||||
|
||||
Ok(self.get(url).await.map_err(InventoryPageError::Reqwest)?
|
||||
self.get(url).await.map_err(InventoryPageError::Reqwest)?
|
||||
.error_for_status().map_err(InventoryPageError::Reqwest)?
|
||||
.json::<InventoryPageResponse>().await.map_err(InventoryPageError::Reqwest)?)
|
||||
.json::<InventoryPageResponse>().await.map_err(InventoryPageError::Reqwest)
|
||||
}
|
||||
pub async fn update_place(&self,config:UpdatePlaceRequest,body:impl Into<reqwest::Body>+Clone)->Result<UpdatePlaceResponse,UpdateError>{
|
||||
let raw_url=format!("https://apis.roblox.com/universes/v1/{}/places/{}/versions",config.universeId,config.placeId);
|
||||
@ -391,8 +389,8 @@ impl CloudContext{
|
||||
query.append_pair("versionType","Published");
|
||||
}
|
||||
|
||||
Ok(self.post(url,body).await.map_err(UpdateError::Reqwest)?
|
||||
self.post(url,body).await.map_err(UpdateError::Reqwest)?
|
||||
.error_for_status().map_err(UpdateError::Reqwest)?
|
||||
.json::<UpdatePlaceResponse>().await.map_err(UpdateError::Reqwest)?)
|
||||
.json::<UpdatePlaceResponse>().await.map_err(UpdateError::Reqwest)
|
||||
}
|
||||
}
|
||||
|
@ -228,15 +228,14 @@ impl CookieContext{
|
||||
query.append_pair("description",config.description.as_str());
|
||||
query.append_pair("ispublic",if config.ispublic{"True"}else{"False"});
|
||||
query.append_pair("allowComments",if config.allowComments{"True"}else{"False"});
|
||||
match config.groupId{
|
||||
Some(group_id)=>{query.append_pair("groupId",group_id.to_string().as_str());},
|
||||
None=>(),
|
||||
if let Some(group_id)=config.groupId{
|
||||
query.append_pair("groupId",group_id.to_string().as_str());
|
||||
}
|
||||
}
|
||||
|
||||
Ok(self.post(url,body).await.map_err(CreateError::PostError)?
|
||||
self.post(url,body).await.map_err(CreateError::PostError)?
|
||||
.error_for_status().map_err(CreateError::Reqwest)?
|
||||
.json::<UploadResponse>().await.map_err(CreateError::Reqwest)?)
|
||||
.json::<UploadResponse>().await.map_err(CreateError::Reqwest)
|
||||
}
|
||||
pub async fn upload(&self,config:UploadRequest,body:impl Into<reqwest::Body>+Clone)->Result<UploadResponse,UploadError>{
|
||||
let mut url=reqwest::Url::parse("https://data.roblox.com/Data/Upload.ashx?json=1&type=Model&genreTypeId=1").map_err(UploadError::ParseError)?;
|
||||
@ -265,9 +264,9 @@ impl CookieContext{
|
||||
}
|
||||
}
|
||||
|
||||
Ok(self.post(url,body).await.map_err(UploadError::PostError)?
|
||||
self.post(url,body).await.map_err(UploadError::PostError)?
|
||||
.error_for_status().map_err(UploadError::Reqwest)?
|
||||
.json::<UploadResponse>().await.map_err(UploadError::Reqwest)?)
|
||||
.json::<UploadResponse>().await.map_err(UploadError::Reqwest)
|
||||
}
|
||||
pub async fn get_asset(&self,config:GetAssetRequest)->Result<Vec<u8>,GetError>{
|
||||
let mut url=reqwest::Url::parse("https://assetdelivery.roblox.com/v1/asset/").map_err(GetError::ParseError)?;
|
||||
@ -302,9 +301,9 @@ impl CookieContext{
|
||||
}
|
||||
}
|
||||
|
||||
Ok(self.get(url).await.map_err(AssetVersionsPageError::Reqwest)?
|
||||
self.get(url).await.map_err(AssetVersionsPageError::Reqwest)?
|
||||
.error_for_status().map_err(AssetVersionsPageError::Reqwest)?
|
||||
.json::<AssetVersionsPageResponse>().await.map_err(AssetVersionsPageError::Reqwest)?)
|
||||
.json::<AssetVersionsPageResponse>().await.map_err(AssetVersionsPageError::Reqwest)
|
||||
}
|
||||
pub async fn get_inventory_page(&self,config:InventoryPageRequest)->Result<InventoryPageResponse,InventoryPageError>{
|
||||
let mut url=reqwest::Url::parse(format!("https://apis.roblox.com/toolbox-service/v1/creations/group/{}/10?limit=50",config.group).as_str()).map_err(InventoryPageError::ParseError)?;
|
||||
@ -316,8 +315,8 @@ impl CookieContext{
|
||||
}
|
||||
}
|
||||
|
||||
Ok(self.get(url).await.map_err(InventoryPageError::Reqwest)?
|
||||
self.get(url).await.map_err(InventoryPageError::Reqwest)?
|
||||
.error_for_status().map_err(InventoryPageError::Reqwest)?
|
||||
.json::<InventoryPageResponse>().await.map_err(InventoryPageError::Reqwest)?)
|
||||
.json::<InventoryPageResponse>().await.map_err(InventoryPageError::Reqwest)
|
||||
}
|
||||
}
|
||||
|
@ -28,6 +28,6 @@ impl std::fmt::Display for PropertiesOverride{
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn sanitize<'a>(s:&'a str)->std::borrow::Cow<'a,str>{
|
||||
pub(crate) fn sanitize(s:&str)->std::borrow::Cow<'_,str>{
|
||||
lazy_regex::regex!(r"[^A-Za-z0-9.-]").replace_all(s,"_")
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
use std::path::PathBuf;
|
||||
use std::path::{Path,PathBuf};
|
||||
use futures::{StreamExt, TryStreamExt};
|
||||
use tokio::io::AsyncReadExt;
|
||||
|
||||
@ -55,9 +55,9 @@ struct QuerySingle{
|
||||
script:QueryHandle,
|
||||
}
|
||||
impl QuerySingle{
|
||||
fn rox(search_path:&PathBuf,search_name:&str)->Self{
|
||||
fn rox(search_path:&Path,search_name:&str)->Self{
|
||||
Self{
|
||||
script:tokio::spawn(get_file_async(search_path.clone(),format!("{}.lua",search_name)))
|
||||
script:tokio::spawn(get_file_async(search_path.to_owned(),format!("{}.lua",search_name)))
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -76,7 +76,7 @@ struct QueryTriple{
|
||||
client:QueryHandle,
|
||||
}
|
||||
impl QueryTriple{
|
||||
fn rox_rojo(search_path:&PathBuf,search_name:&str,search_module:bool)->Self{
|
||||
fn rox_rojo(search_path:&Path,search_name:&str,search_module:bool)->Self{
|
||||
//this should be implemented as constructors of Triplet and Quadruplet to fully support Trey's suggestion
|
||||
let module_name=if search_module{
|
||||
format!("{}.module.lua",search_name)
|
||||
@ -84,12 +84,12 @@ impl QueryTriple{
|
||||
format!("{}.lua",search_name)
|
||||
};
|
||||
Self{
|
||||
module:tokio::spawn(get_file_async(search_path.clone(),module_name)),
|
||||
server:tokio::spawn(get_file_async(search_path.clone(),format!("{}.server.lua",search_name))),
|
||||
client:tokio::spawn(get_file_async(search_path.clone(),format!("{}.client.lua",search_name))),
|
||||
module:tokio::spawn(get_file_async(search_path.to_owned(),module_name)),
|
||||
server:tokio::spawn(get_file_async(search_path.to_owned(),format!("{}.server.lua",search_name))),
|
||||
client:tokio::spawn(get_file_async(search_path.to_owned(),format!("{}.client.lua",search_name))),
|
||||
}
|
||||
}
|
||||
fn rojo(search_path:&PathBuf)->Self{
|
||||
fn rojo(search_path:&Path)->Self{
|
||||
QueryTriple::rox_rojo(search_path,"init",false)
|
||||
}
|
||||
}
|
||||
@ -146,9 +146,9 @@ impl Query for QueryTriple{
|
||||
async fn resolve(self)->QueryHintResult{
|
||||
let (module,server,client)=tokio::join!(self.module,self.server,self.client);
|
||||
mega_triple_join((
|
||||
module.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::ModuleScript}),
|
||||
server.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::Script}),
|
||||
client.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::LocalScript}),
|
||||
module.map_err(QueryResolveError::JoinError)?.map(|file|FileHint{file,hint:ScriptHint::ModuleScript}),
|
||||
server.map_err(QueryResolveError::JoinError)?.map(|file|FileHint{file,hint:ScriptHint::Script}),
|
||||
client.map_err(QueryResolveError::JoinError)?.map(|file|FileHint{file,hint:ScriptHint::LocalScript}),
|
||||
))
|
||||
}
|
||||
}
|
||||
@ -159,7 +159,7 @@ struct QueryQuad{
|
||||
client:QueryHandle,
|
||||
}
|
||||
impl QueryQuad{
|
||||
fn rox_rojo(search_path:&PathBuf,search_name:&str)->Self{
|
||||
fn rox_rojo(search_path:&Path,search_name:&str)->Self{
|
||||
let fill=QueryTriple::rox_rojo(search_path,search_name,true);
|
||||
Self{
|
||||
module_implicit:QuerySingle::rox(search_path,search_name).script,//Script.lua
|
||||
@ -173,10 +173,10 @@ impl Query for QueryQuad{
|
||||
async fn resolve(self)->QueryHintResult{
|
||||
let (module_implicit,module_explicit,server,client)=tokio::join!(self.module_implicit,self.module_explicit,self.server,self.client);
|
||||
mega_quadruple_join((
|
||||
module_implicit.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::ModuleScript}),
|
||||
module_explicit.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::ModuleScript}),
|
||||
server.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::Script}),
|
||||
client.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::LocalScript}),
|
||||
module_implicit.map_err(QueryResolveError::JoinError)?.map(|file|FileHint{file,hint:ScriptHint::ModuleScript}),
|
||||
module_explicit.map_err(QueryResolveError::JoinError)?.map(|file|FileHint{file,hint:ScriptHint::ModuleScript}),
|
||||
server.map_err(QueryResolveError::JoinError)?.map(|file|FileHint{file,hint:ScriptHint::Script}),
|
||||
client.map_err(QueryResolveError::JoinError)?.map(|file|FileHint{file,hint:ScriptHint::LocalScript}),
|
||||
))
|
||||
}
|
||||
}
|
||||
@ -338,10 +338,7 @@ impl CompileNode{
|
||||
.into_string()
|
||||
.map_err(CompileNodeError::FileName)?;
|
||||
//reject goobers
|
||||
let is_goober=match style{
|
||||
Some(Style::Rojo)=>true,
|
||||
_=>false,
|
||||
};
|
||||
let is_goober=matches!(style,Some(Style::Rojo));
|
||||
let (ext_len,file_discernment)={
|
||||
if let Some(captures)=lazy_regex::regex!(r"^.*(\.module\.lua|\.client\.lua|\.server\.lua)$")
|
||||
.captures(file_name.as_str()){
|
||||
@ -439,7 +436,7 @@ impl std::error::Error for CompileError{}
|
||||
|
||||
pub async fn compile(config:CompileConfig,mut dom:&mut rbx_dom_weak::WeakDom)->Result<(),CompileError>{
|
||||
//hack to traverse root folder as the root object
|
||||
dom.root_mut().name="src".to_owned();
|
||||
"src".clone_into(&mut dom.root_mut().name);
|
||||
//add in scripts and models
|
||||
let mut folder=config.input_folder.clone();
|
||||
let mut stack:Vec<CompileStackInstruction>=vec![CompileStackInstruction::TraverseReferent(dom.root_ref(),None)];
|
||||
@ -459,9 +456,9 @@ pub async fn compile(config:CompileConfig,mut dom:&mut rbx_dom_weak::WeakDom)->R
|
||||
let mut exist_names:std::collections::HashSet<String>={
|
||||
let item=dom.get_by_ref(item_ref).ok_or(CompileError::NullChildRef)?;
|
||||
//push existing dom children objects onto stack (unrelated to exist_names)
|
||||
stack.extend(item.children().into_iter().map(|&referent|CompileStackInstruction::TraverseReferent(referent,None)));
|
||||
stack.extend(item.children().iter().map(|&referent|CompileStackInstruction::TraverseReferent(referent,None)));
|
||||
//get names of existing objects
|
||||
item.children().into_iter().map(|&child_ref|{
|
||||
item.children().iter().map(|&child_ref|{
|
||||
let child=dom.get_by_ref(child_ref).ok_or(CompileError::NullChildRef)?;
|
||||
Ok::<_,CompileError>(sanitize(child.name.as_str()).to_string())
|
||||
}).collect::<Result<_,CompileError>>()?
|
||||
@ -478,7 +475,7 @@ pub async fn compile(config:CompileConfig,mut dom:&mut rbx_dom_weak::WeakDom)->R
|
||||
let ret1={
|
||||
//capture a scoped mutable reference so we can forward dir to the next call even on an error
|
||||
let dir2=&mut dir1;
|
||||
(||async move{//error catcher so I can use ?
|
||||
async move{//error catcher so I can use ?
|
||||
let ret2=if let Some(entry)=dir2.next_entry().await?{
|
||||
//cull early even if supporting things with identical names is possible
|
||||
if exist_names.contains(entry.file_name().to_str().unwrap()){
|
||||
@ -490,7 +487,7 @@ pub async fn compile(config:CompileConfig,mut dom:&mut rbx_dom_weak::WeakDom)->R
|
||||
TooComplicated::Stop
|
||||
};
|
||||
Ok(ret2)
|
||||
})().await
|
||||
}.await
|
||||
};
|
||||
match ret1{
|
||||
Ok(TooComplicated::Stop)=>None,
|
||||
|
@ -147,10 +147,7 @@ impl DecompiledContext{
|
||||
"Model"=>Class::Model,
|
||||
_=>Class::Folder,
|
||||
};
|
||||
let skip=match class{
|
||||
Class::Model=>true,
|
||||
_=>false,
|
||||
};
|
||||
let skip=class==Class::Model;
|
||||
if let Some(parent_node)=tree_refs.get_mut(&item.parent()){
|
||||
let referent=item.referent();
|
||||
let node=TreeNode::new(item.name.clone(),referent,parent_node.referent,class);
|
||||
@ -182,14 +179,14 @@ impl DecompiledContext{
|
||||
if node.class==Class::Folder&&script_count!=0{
|
||||
node.class=Class::Model
|
||||
}
|
||||
if node.class==Class::Folder&&node.children.len()==0{
|
||||
if node.class==Class::Folder&&node.children.is_empty(){
|
||||
delete=Some(node.parent);
|
||||
}else{
|
||||
//how the hell do I do this better without recursion
|
||||
let is_script=match node.class{
|
||||
Class::ModuleScript|Class::LocalScript|Class::Script=>true,
|
||||
_=>false,
|
||||
};
|
||||
let is_script=matches!(
|
||||
node.class,
|
||||
Class::ModuleScript|Class::LocalScript|Class::Script
|
||||
);
|
||||
//stack is popped from back
|
||||
if is_script{
|
||||
stack.push(TrimStackInstruction::DecrementScript);
|
||||
@ -237,7 +234,7 @@ impl DecompiledContext{
|
||||
WriteStackInstruction::Node(node,name_count)=>{
|
||||
//track properties that must be overriden to compile folder structure back into a place file
|
||||
let mut properties=PropertiesOverride::default();
|
||||
let has_children=node.children.len()!=0;
|
||||
let has_children=node.children.is_empty();
|
||||
match node.class{
|
||||
Class::Folder=>(),
|
||||
Class::ModuleScript=>(),//.lua files are ModuleScript by default
|
||||
@ -297,7 +294,7 @@ impl DecompiledContext{
|
||||
let write_models=config.write_models;
|
||||
let write_scripts=config.write_scripts;
|
||||
let results:Vec<Result<(),WriteError>>=rayon::iter::ParallelIterator::collect(rayon::iter::ParallelIterator::map(rayon::iter::IntoParallelIterator::into_par_iter(write_queue),|(write_path,node,node_name_override,properties,style)|{
|
||||
write_item(&dom,write_path,node,node_name_override,properties,style,write_models,write_scripts)
|
||||
write_item(dom,write_path,node,node_name_override,properties,style,write_models,write_scripts)
|
||||
}));
|
||||
for result in results{
|
||||
result?;
|
||||
|
@ -657,11 +657,8 @@ async fn download_list(cookie:Cookie,asset_id_file_map:AssetIDFileMap)->AResult<
|
||||
.buffer_unordered(CONCURRENT_REQUESTS)
|
||||
.for_each(|b:AResult<_>|async{
|
||||
match b{
|
||||
Ok((dest,data))=>{
|
||||
match tokio::fs::write(dest,data).await{
|
||||
Err(e)=>eprintln!("fs error: {}",e),
|
||||
_=>(),
|
||||
}
|
||||
Ok((dest,data))=>if let Err(e)=tokio::fs::write(dest,data).await{
|
||||
eprintln!("fs error: {}",e);
|
||||
},
|
||||
Err(e)=>eprintln!("dl error: {}",e),
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user