This commit is contained in:
Quaternions 2024-07-19 11:34:51 -07:00
parent 9a5afb9953
commit fc4bca9802
6 changed files with 57 additions and 69 deletions

View File

@ -306,10 +306,9 @@ impl CloudContext{
.text("request",request_config) .text("request",request_config)
.part("fileContent",reqwest::multipart::Part::bytes(body)); .part("fileContent",reqwest::multipart::Part::bytes(body));
let resp=self.post_form(url,form).await.map_err(CreateError::Reqwest)? self.post_form(url,form).await.map_err(CreateError::Reqwest)?
.error_for_status().map_err(CreateError::Reqwest)?; .error_for_status().map_err(CreateError::Reqwest)?
.json::<RobloxOperation>().await.map_err(CreateError::Reqwest)
Ok(resp.json::<RobloxOperation>().await.map_err(CreateError::Reqwest)?)
} }
pub async fn update_asset(&self,config:UpdateAssetRequest,body:impl Into<std::borrow::Cow<'static,[u8]>>)->Result<RobloxOperation,UpdateError>{ pub async fn update_asset(&self,config:UpdateAssetRequest,body:impl Into<std::borrow::Cow<'static,[u8]>>)->Result<RobloxOperation,UpdateError>{
let raw_url=format!("https://apis.roblox.com/assets/v1/assets/{}",config.assetId); let raw_url=format!("https://apis.roblox.com/assets/v1/assets/{}",config.assetId);
@ -321,20 +320,19 @@ impl CloudContext{
.text("request",request_config) .text("request",request_config)
.part("fileContent",reqwest::multipart::Part::bytes(body)); .part("fileContent",reqwest::multipart::Part::bytes(body));
let resp=self.patch_form(url,form).await self.patch_form(url,form).await
.map_err(UpdateError::Reqwest)? .map_err(UpdateError::Reqwest)?
//roblox api documentation is very poor, just give the status code and drop the json //roblox api documentation is very poor, just give the status code and drop the json
.error_for_status().map_err(UpdateError::Reqwest)?; .error_for_status().map_err(UpdateError::Reqwest)?
.json::<RobloxOperation>().await.map_err(UpdateError::Reqwest)
Ok(resp.json::<RobloxOperation>().await.map_err(UpdateError::Reqwest)?)
} }
pub async fn get_asset_info(&self,config:GetAssetInfoRequest)->Result<AssetResponse,GetError>{ pub async fn get_asset_info(&self,config:GetAssetInfoRequest)->Result<AssetResponse,GetError>{
let raw_url=format!("https://apis.roblox.com/assets/v1/assets/{}",config.asset_id); let raw_url=format!("https://apis.roblox.com/assets/v1/assets/{}",config.asset_id);
let url=reqwest::Url::parse(raw_url.as_str()).map_err(GetError::ParseError)?; let url=reqwest::Url::parse(raw_url.as_str()).map_err(GetError::ParseError)?;
Ok(self.get(url).await.map_err(GetError::Reqwest)? self.get(url).await.map_err(GetError::Reqwest)?
.error_for_status().map_err(GetError::Reqwest)? .error_for_status().map_err(GetError::Reqwest)?
.json::<AssetResponse>().await.map_err(GetError::Reqwest)?) .json::<AssetResponse>().await.map_err(GetError::Reqwest)
} }
pub async fn get_asset_version(&self,config:GetAssetVersionRequest)->Result<Vec<u8>,GetError>{ pub async fn get_asset_version(&self,config:GetAssetVersionRequest)->Result<Vec<u8>,GetError>{
let raw_url=format!("https://apis.roblox.com/assets/v1/assets/{}/versions/{}",config.asset_id,config.version); let raw_url=format!("https://apis.roblox.com/assets/v1/assets/{}/versions/{}",config.asset_id,config.version);
@ -364,9 +362,9 @@ impl CloudContext{
let raw_url=format!("https://apis.roblox.com/assets/v1/assets/{}/versions",config.asset_id); let raw_url=format!("https://apis.roblox.com/assets/v1/assets/{}/versions",config.asset_id);
let url=reqwest::Url::parse(raw_url.as_str()).map_err(AssetVersionsError::ParseError)?; let url=reqwest::Url::parse(raw_url.as_str()).map_err(AssetVersionsError::ParseError)?;
Ok(self.get(url).await.map_err(AssetVersionsError::Reqwest)? self.get(url).await.map_err(AssetVersionsError::Reqwest)?
.error_for_status().map_err(AssetVersionsError::Reqwest)? .error_for_status().map_err(AssetVersionsError::Reqwest)?
.json::<AssetVersionsResponse>().await.map_err(AssetVersionsError::Reqwest)?) .json::<AssetVersionsResponse>().await.map_err(AssetVersionsError::Reqwest)
} }
pub async fn inventory_page(&self,config:InventoryPageRequest)->Result<InventoryPageResponse,InventoryPageError>{ pub async fn inventory_page(&self,config:InventoryPageRequest)->Result<InventoryPageResponse,InventoryPageError>{
let mut url=reqwest::Url::parse(format!("https://apis.roblox.com/toolbox-service/v1/creations/group/{}/10?limit=50",config.group).as_str()).map_err(InventoryPageError::ParseError)?; let mut url=reqwest::Url::parse(format!("https://apis.roblox.com/toolbox-service/v1/creations/group/{}/10?limit=50",config.group).as_str()).map_err(InventoryPageError::ParseError)?;
@ -378,9 +376,9 @@ impl CloudContext{
} }
} }
Ok(self.get(url).await.map_err(InventoryPageError::Reqwest)? self.get(url).await.map_err(InventoryPageError::Reqwest)?
.error_for_status().map_err(InventoryPageError::Reqwest)? .error_for_status().map_err(InventoryPageError::Reqwest)?
.json::<InventoryPageResponse>().await.map_err(InventoryPageError::Reqwest)?) .json::<InventoryPageResponse>().await.map_err(InventoryPageError::Reqwest)
} }
pub async fn update_place(&self,config:UpdatePlaceRequest,body:impl Into<reqwest::Body>+Clone)->Result<UpdatePlaceResponse,UpdateError>{ pub async fn update_place(&self,config:UpdatePlaceRequest,body:impl Into<reqwest::Body>+Clone)->Result<UpdatePlaceResponse,UpdateError>{
let raw_url=format!("https://apis.roblox.com/universes/v1/{}/places/{}/versions",config.universeId,config.placeId); let raw_url=format!("https://apis.roblox.com/universes/v1/{}/places/{}/versions",config.universeId,config.placeId);
@ -391,8 +389,8 @@ impl CloudContext{
query.append_pair("versionType","Published"); query.append_pair("versionType","Published");
} }
Ok(self.post(url,body).await.map_err(UpdateError::Reqwest)? self.post(url,body).await.map_err(UpdateError::Reqwest)?
.error_for_status().map_err(UpdateError::Reqwest)? .error_for_status().map_err(UpdateError::Reqwest)?
.json::<UpdatePlaceResponse>().await.map_err(UpdateError::Reqwest)?) .json::<UpdatePlaceResponse>().await.map_err(UpdateError::Reqwest)
} }
} }

View File

@ -228,15 +228,14 @@ impl CookieContext{
query.append_pair("description",config.description.as_str()); query.append_pair("description",config.description.as_str());
query.append_pair("ispublic",if config.ispublic{"True"}else{"False"}); query.append_pair("ispublic",if config.ispublic{"True"}else{"False"});
query.append_pair("allowComments",if config.allowComments{"True"}else{"False"}); query.append_pair("allowComments",if config.allowComments{"True"}else{"False"});
match config.groupId{ if let Some(group_id)=config.groupId{
Some(group_id)=>{query.append_pair("groupId",group_id.to_string().as_str());}, query.append_pair("groupId",group_id.to_string().as_str());
None=>(),
} }
} }
Ok(self.post(url,body).await.map_err(CreateError::PostError)? self.post(url,body).await.map_err(CreateError::PostError)?
.error_for_status().map_err(CreateError::Reqwest)? .error_for_status().map_err(CreateError::Reqwest)?
.json::<UploadResponse>().await.map_err(CreateError::Reqwest)?) .json::<UploadResponse>().await.map_err(CreateError::Reqwest)
} }
pub async fn upload(&self,config:UploadRequest,body:impl Into<reqwest::Body>+Clone)->Result<UploadResponse,UploadError>{ pub async fn upload(&self,config:UploadRequest,body:impl Into<reqwest::Body>+Clone)->Result<UploadResponse,UploadError>{
let mut url=reqwest::Url::parse("https://data.roblox.com/Data/Upload.ashx?json=1&type=Model&genreTypeId=1").map_err(UploadError::ParseError)?; let mut url=reqwest::Url::parse("https://data.roblox.com/Data/Upload.ashx?json=1&type=Model&genreTypeId=1").map_err(UploadError::ParseError)?;
@ -265,9 +264,9 @@ impl CookieContext{
} }
} }
Ok(self.post(url,body).await.map_err(UploadError::PostError)? self.post(url,body).await.map_err(UploadError::PostError)?
.error_for_status().map_err(UploadError::Reqwest)? .error_for_status().map_err(UploadError::Reqwest)?
.json::<UploadResponse>().await.map_err(UploadError::Reqwest)?) .json::<UploadResponse>().await.map_err(UploadError::Reqwest)
} }
pub async fn get_asset(&self,config:GetAssetRequest)->Result<Vec<u8>,GetError>{ pub async fn get_asset(&self,config:GetAssetRequest)->Result<Vec<u8>,GetError>{
let mut url=reqwest::Url::parse("https://assetdelivery.roblox.com/v1/asset/").map_err(GetError::ParseError)?; let mut url=reqwest::Url::parse("https://assetdelivery.roblox.com/v1/asset/").map_err(GetError::ParseError)?;
@ -302,9 +301,9 @@ impl CookieContext{
} }
} }
Ok(self.get(url).await.map_err(AssetVersionsPageError::Reqwest)? self.get(url).await.map_err(AssetVersionsPageError::Reqwest)?
.error_for_status().map_err(AssetVersionsPageError::Reqwest)? .error_for_status().map_err(AssetVersionsPageError::Reqwest)?
.json::<AssetVersionsPageResponse>().await.map_err(AssetVersionsPageError::Reqwest)?) .json::<AssetVersionsPageResponse>().await.map_err(AssetVersionsPageError::Reqwest)
} }
pub async fn get_inventory_page(&self,config:InventoryPageRequest)->Result<InventoryPageResponse,InventoryPageError>{ pub async fn get_inventory_page(&self,config:InventoryPageRequest)->Result<InventoryPageResponse,InventoryPageError>{
let mut url=reqwest::Url::parse(format!("https://apis.roblox.com/toolbox-service/v1/creations/group/{}/10?limit=50",config.group).as_str()).map_err(InventoryPageError::ParseError)?; let mut url=reqwest::Url::parse(format!("https://apis.roblox.com/toolbox-service/v1/creations/group/{}/10?limit=50",config.group).as_str()).map_err(InventoryPageError::ParseError)?;
@ -316,8 +315,8 @@ impl CookieContext{
} }
} }
Ok(self.get(url).await.map_err(InventoryPageError::Reqwest)? self.get(url).await.map_err(InventoryPageError::Reqwest)?
.error_for_status().map_err(InventoryPageError::Reqwest)? .error_for_status().map_err(InventoryPageError::Reqwest)?
.json::<InventoryPageResponse>().await.map_err(InventoryPageError::Reqwest)?) .json::<InventoryPageResponse>().await.map_err(InventoryPageError::Reqwest)
} }
} }

View File

@ -28,6 +28,6 @@ impl std::fmt::Display for PropertiesOverride{
} }
} }
pub(crate) fn sanitize<'a>(s:&'a str)->std::borrow::Cow<'a,str>{ pub(crate) fn sanitize(s:&str)->std::borrow::Cow<'_,str>{
lazy_regex::regex!(r"[^A-Za-z0-9.-]").replace_all(s,"_") lazy_regex::regex!(r"[^A-Za-z0-9.-]").replace_all(s,"_")
} }

View File

@ -1,4 +1,4 @@
use std::path::PathBuf; use std::path::{Path,PathBuf};
use futures::{StreamExt, TryStreamExt}; use futures::{StreamExt, TryStreamExt};
use tokio::io::AsyncReadExt; use tokio::io::AsyncReadExt;
@ -55,9 +55,9 @@ struct QuerySingle{
script:QueryHandle, script:QueryHandle,
} }
impl QuerySingle{ impl QuerySingle{
fn rox(search_path:&PathBuf,search_name:&str)->Self{ fn rox(search_path:&Path,search_name:&str)->Self{
Self{ Self{
script:tokio::spawn(get_file_async(search_path.clone(),format!("{}.lua",search_name))) script:tokio::spawn(get_file_async(search_path.to_owned(),format!("{}.lua",search_name)))
} }
} }
} }
@ -76,7 +76,7 @@ struct QueryTriple{
client:QueryHandle, client:QueryHandle,
} }
impl QueryTriple{ impl QueryTriple{
fn rox_rojo(search_path:&PathBuf,search_name:&str,search_module:bool)->Self{ fn rox_rojo(search_path:&Path,search_name:&str,search_module:bool)->Self{
//this should be implemented as constructors of Triplet and Quadruplet to fully support Trey's suggestion //this should be implemented as constructors of Triplet and Quadruplet to fully support Trey's suggestion
let module_name=if search_module{ let module_name=if search_module{
format!("{}.module.lua",search_name) format!("{}.module.lua",search_name)
@ -84,12 +84,12 @@ impl QueryTriple{
format!("{}.lua",search_name) format!("{}.lua",search_name)
}; };
Self{ Self{
module:tokio::spawn(get_file_async(search_path.clone(),module_name)), module:tokio::spawn(get_file_async(search_path.to_owned(),module_name)),
server:tokio::spawn(get_file_async(search_path.clone(),format!("{}.server.lua",search_name))), server:tokio::spawn(get_file_async(search_path.to_owned(),format!("{}.server.lua",search_name))),
client:tokio::spawn(get_file_async(search_path.clone(),format!("{}.client.lua",search_name))), client:tokio::spawn(get_file_async(search_path.to_owned(),format!("{}.client.lua",search_name))),
} }
} }
fn rojo(search_path:&PathBuf)->Self{ fn rojo(search_path:&Path)->Self{
QueryTriple::rox_rojo(search_path,"init",false) QueryTriple::rox_rojo(search_path,"init",false)
} }
} }
@ -146,9 +146,9 @@ impl Query for QueryTriple{
async fn resolve(self)->QueryHintResult{ async fn resolve(self)->QueryHintResult{
let (module,server,client)=tokio::join!(self.module,self.server,self.client); let (module,server,client)=tokio::join!(self.module,self.server,self.client);
mega_triple_join(( mega_triple_join((
module.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::ModuleScript}), module.map_err(QueryResolveError::JoinError)?.map(|file|FileHint{file,hint:ScriptHint::ModuleScript}),
server.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::Script}), server.map_err(QueryResolveError::JoinError)?.map(|file|FileHint{file,hint:ScriptHint::Script}),
client.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::LocalScript}), client.map_err(QueryResolveError::JoinError)?.map(|file|FileHint{file,hint:ScriptHint::LocalScript}),
)) ))
} }
} }
@ -159,7 +159,7 @@ struct QueryQuad{
client:QueryHandle, client:QueryHandle,
} }
impl QueryQuad{ impl QueryQuad{
fn rox_rojo(search_path:&PathBuf,search_name:&str)->Self{ fn rox_rojo(search_path:&Path,search_name:&str)->Self{
let fill=QueryTriple::rox_rojo(search_path,search_name,true); let fill=QueryTriple::rox_rojo(search_path,search_name,true);
Self{ Self{
module_implicit:QuerySingle::rox(search_path,search_name).script,//Script.lua module_implicit:QuerySingle::rox(search_path,search_name).script,//Script.lua
@ -173,10 +173,10 @@ impl Query for QueryQuad{
async fn resolve(self)->QueryHintResult{ async fn resolve(self)->QueryHintResult{
let (module_implicit,module_explicit,server,client)=tokio::join!(self.module_implicit,self.module_explicit,self.server,self.client); let (module_implicit,module_explicit,server,client)=tokio::join!(self.module_implicit,self.module_explicit,self.server,self.client);
mega_quadruple_join(( mega_quadruple_join((
module_implicit.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::ModuleScript}), module_implicit.map_err(QueryResolveError::JoinError)?.map(|file|FileHint{file,hint:ScriptHint::ModuleScript}),
module_explicit.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::ModuleScript}), module_explicit.map_err(QueryResolveError::JoinError)?.map(|file|FileHint{file,hint:ScriptHint::ModuleScript}),
server.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::Script}), server.map_err(QueryResolveError::JoinError)?.map(|file|FileHint{file,hint:ScriptHint::Script}),
client.map_err(|e|QueryResolveError::JoinError(e))?.map(|file|FileHint{file,hint:ScriptHint::LocalScript}), client.map_err(QueryResolveError::JoinError)?.map(|file|FileHint{file,hint:ScriptHint::LocalScript}),
)) ))
} }
} }
@ -338,10 +338,7 @@ impl CompileNode{
.into_string() .into_string()
.map_err(CompileNodeError::FileName)?; .map_err(CompileNodeError::FileName)?;
//reject goobers //reject goobers
let is_goober=match style{ let is_goober=matches!(style,Some(Style::Rojo));
Some(Style::Rojo)=>true,
_=>false,
};
let (ext_len,file_discernment)={ let (ext_len,file_discernment)={
if let Some(captures)=lazy_regex::regex!(r"^.*(\.module\.lua|\.client\.lua|\.server\.lua)$") if let Some(captures)=lazy_regex::regex!(r"^.*(\.module\.lua|\.client\.lua|\.server\.lua)$")
.captures(file_name.as_str()){ .captures(file_name.as_str()){
@ -439,7 +436,7 @@ impl std::error::Error for CompileError{}
pub async fn compile(config:CompileConfig,mut dom:&mut rbx_dom_weak::WeakDom)->Result<(),CompileError>{ pub async fn compile(config:CompileConfig,mut dom:&mut rbx_dom_weak::WeakDom)->Result<(),CompileError>{
//hack to traverse root folder as the root object //hack to traverse root folder as the root object
dom.root_mut().name="src".to_owned(); "src".clone_into(&mut dom.root_mut().name);
//add in scripts and models //add in scripts and models
let mut folder=config.input_folder.clone(); let mut folder=config.input_folder.clone();
let mut stack:Vec<CompileStackInstruction>=vec![CompileStackInstruction::TraverseReferent(dom.root_ref(),None)]; let mut stack:Vec<CompileStackInstruction>=vec![CompileStackInstruction::TraverseReferent(dom.root_ref(),None)];
@ -459,9 +456,9 @@ pub async fn compile(config:CompileConfig,mut dom:&mut rbx_dom_weak::WeakDom)->R
let mut exist_names:std::collections::HashSet<String>={ let mut exist_names:std::collections::HashSet<String>={
let item=dom.get_by_ref(item_ref).ok_or(CompileError::NullChildRef)?; let item=dom.get_by_ref(item_ref).ok_or(CompileError::NullChildRef)?;
//push existing dom children objects onto stack (unrelated to exist_names) //push existing dom children objects onto stack (unrelated to exist_names)
stack.extend(item.children().into_iter().map(|&referent|CompileStackInstruction::TraverseReferent(referent,None))); stack.extend(item.children().iter().map(|&referent|CompileStackInstruction::TraverseReferent(referent,None)));
//get names of existing objects //get names of existing objects
item.children().into_iter().map(|&child_ref|{ item.children().iter().map(|&child_ref|{
let child=dom.get_by_ref(child_ref).ok_or(CompileError::NullChildRef)?; let child=dom.get_by_ref(child_ref).ok_or(CompileError::NullChildRef)?;
Ok::<_,CompileError>(sanitize(child.name.as_str()).to_string()) Ok::<_,CompileError>(sanitize(child.name.as_str()).to_string())
}).collect::<Result<_,CompileError>>()? }).collect::<Result<_,CompileError>>()?
@ -478,7 +475,7 @@ pub async fn compile(config:CompileConfig,mut dom:&mut rbx_dom_weak::WeakDom)->R
let ret1={ let ret1={
//capture a scoped mutable reference so we can forward dir to the next call even on an error //capture a scoped mutable reference so we can forward dir to the next call even on an error
let dir2=&mut dir1; let dir2=&mut dir1;
(||async move{//error catcher so I can use ? async move{//error catcher so I can use ?
let ret2=if let Some(entry)=dir2.next_entry().await?{ let ret2=if let Some(entry)=dir2.next_entry().await?{
//cull early even if supporting things with identical names is possible //cull early even if supporting things with identical names is possible
if exist_names.contains(entry.file_name().to_str().unwrap()){ if exist_names.contains(entry.file_name().to_str().unwrap()){
@ -490,7 +487,7 @@ pub async fn compile(config:CompileConfig,mut dom:&mut rbx_dom_weak::WeakDom)->R
TooComplicated::Stop TooComplicated::Stop
}; };
Ok(ret2) Ok(ret2)
})().await }.await
}; };
match ret1{ match ret1{
Ok(TooComplicated::Stop)=>None, Ok(TooComplicated::Stop)=>None,

View File

@ -147,10 +147,7 @@ impl DecompiledContext{
"Model"=>Class::Model, "Model"=>Class::Model,
_=>Class::Folder, _=>Class::Folder,
}; };
let skip=match class{ let skip=class==Class::Model;
Class::Model=>true,
_=>false,
};
if let Some(parent_node)=tree_refs.get_mut(&item.parent()){ if let Some(parent_node)=tree_refs.get_mut(&item.parent()){
let referent=item.referent(); let referent=item.referent();
let node=TreeNode::new(item.name.clone(),referent,parent_node.referent,class); let node=TreeNode::new(item.name.clone(),referent,parent_node.referent,class);
@ -182,14 +179,14 @@ impl DecompiledContext{
if node.class==Class::Folder&&script_count!=0{ if node.class==Class::Folder&&script_count!=0{
node.class=Class::Model node.class=Class::Model
} }
if node.class==Class::Folder&&node.children.len()==0{ if node.class==Class::Folder&&node.children.is_empty(){
delete=Some(node.parent); delete=Some(node.parent);
}else{ }else{
//how the hell do I do this better without recursion //how the hell do I do this better without recursion
let is_script=match node.class{ let is_script=matches!(
Class::ModuleScript|Class::LocalScript|Class::Script=>true, node.class,
_=>false, Class::ModuleScript|Class::LocalScript|Class::Script
}; );
//stack is popped from back //stack is popped from back
if is_script{ if is_script{
stack.push(TrimStackInstruction::DecrementScript); stack.push(TrimStackInstruction::DecrementScript);
@ -237,7 +234,7 @@ impl DecompiledContext{
WriteStackInstruction::Node(node,name_count)=>{ WriteStackInstruction::Node(node,name_count)=>{
//track properties that must be overriden to compile folder structure back into a place file //track properties that must be overriden to compile folder structure back into a place file
let mut properties=PropertiesOverride::default(); let mut properties=PropertiesOverride::default();
let has_children=node.children.len()!=0; let has_children=node.children.is_empty();
match node.class{ match node.class{
Class::Folder=>(), Class::Folder=>(),
Class::ModuleScript=>(),//.lua files are ModuleScript by default Class::ModuleScript=>(),//.lua files are ModuleScript by default
@ -297,7 +294,7 @@ impl DecompiledContext{
let write_models=config.write_models; let write_models=config.write_models;
let write_scripts=config.write_scripts; let write_scripts=config.write_scripts;
let results:Vec<Result<(),WriteError>>=rayon::iter::ParallelIterator::collect(rayon::iter::ParallelIterator::map(rayon::iter::IntoParallelIterator::into_par_iter(write_queue),|(write_path,node,node_name_override,properties,style)|{ let results:Vec<Result<(),WriteError>>=rayon::iter::ParallelIterator::collect(rayon::iter::ParallelIterator::map(rayon::iter::IntoParallelIterator::into_par_iter(write_queue),|(write_path,node,node_name_override,properties,style)|{
write_item(&dom,write_path,node,node_name_override,properties,style,write_models,write_scripts) write_item(dom,write_path,node,node_name_override,properties,style,write_models,write_scripts)
})); }));
for result in results{ for result in results{
result?; result?;

View File

@ -657,11 +657,8 @@ async fn download_list(cookie:Cookie,asset_id_file_map:AssetIDFileMap)->AResult<
.buffer_unordered(CONCURRENT_REQUESTS) .buffer_unordered(CONCURRENT_REQUESTS)
.for_each(|b:AResult<_>|async{ .for_each(|b:AResult<_>|async{
match b{ match b{
Ok((dest,data))=>{ Ok((dest,data))=>if let Err(e)=tokio::fs::write(dest,data).await{
match tokio::fs::write(dest,data).await{ eprintln!("fs error: {}",e);
Err(e)=>eprintln!("fs error: {}",e),
_=>(),
}
}, },
Err(e)=>eprintln!("dl error: {}",e), Err(e)=>eprintln!("dl error: {}",e),
} }