use std::io::Read; use clap::{Args,Parser,Subcommand}; use anyhow::Result as AResult; use futures::StreamExt; use rbx_dom_weak::types::Ref; type AssetID=u64; type AssetIDFileMap=Vec<(AssetID,std::path::PathBuf)>; const CONCURRENT_DECODE:usize=8; const CONCURRENT_REQUESTS:usize=32; #[derive(Parser)] #[command(author,version,about,long_about=None)] #[command(propagate_version = true)] struct Cli{ #[arg(short,long)] group:Option, //idk how to do this better #[arg(long)] cookie_literal:Option, #[arg(long)] cookie_env:Option, #[arg(long)] cookie_file:Option, #[arg(long)] no_models:Option, #[arg(long)] no_scripts:Option, #[arg(long)] no_template:Option, #[arg(long)] git_committer_name:Option, #[arg(long)] git_committer_email:Option, #[arg(long)] asset_id:Option, #[arg(short,long)] input:Option, #[arg(short,long)] output:Option, #[command(subcommand)] command:Commands, } #[derive(Subcommand)] enum Commands{ DownloadHistory, Download, Upload, Compile, Decompile, DecompileHistoryIntoGit, DownloadAndDecompileHistoryIntoGit, } #[derive(Args)] struct PathBufList{ paths:Vec } #[derive(serde::Deserialize)] struct VersionPage{ previousPageCursor:Option, nextPageCursor:Option, data:Vec, } #[derive(serde::Deserialize,serde::Serialize)] struct AssetVersion{ Id:u64, assetId:AssetID, assetVersionNumber:u64, creatorType:String, creatorTargetId:u64, creatingUniverseId:Option, created:chrono::DateTime, isPublished:bool, } #[tokio::main] async fn main()->AResult<()>{ let cli=Cli::parse(); let cookie_enum={ match (cli.cookie_literal,cli.cookie_env,cli.cookie_file){ (Some(literal),None,None)=>Some(Cookie::Literal(literal)), (None,Some(env_var),None)=>Some(Cookie::Environment(env_var)), (None,None,Some(path))=>Some(Cookie::File(path)), (None,None,None)=>None, _=>return Err(anyhow::Error::msg("Cookie was specified multiple times.")), } }; let cookie=match cookie_enum{ Some(c)=>Some(format!(".ROBLOSECURITY={}",match c{ Cookie::Literal(s)=>s, Cookie::Environment(var)=>std::env::var(var)?, Cookie::File(path)=>tokio::fs::read_to_string(path).await?, })), None=>None, }; match cli.command{ Commands::DownloadHistory=>download_history(DownloadHistoryConfig{ output_folder:cli.output.unwrap(), cookie:cookie.unwrap(), asset_id:cli.asset_id.unwrap(), }).await, Commands::Download=>download_list(cookie.unwrap(),vec![(cli.asset_id.unwrap(),cli.output.unwrap())]).await, Commands::Upload=>upload_list(cookie.unwrap(),cli.group,vec![(cli.asset_id.unwrap(),cli.output.unwrap())]).await, Commands::Compile=>compile(cli.input.unwrap(),cli.output.unwrap()), Commands::Decompile=>decompile(DecompileConfig{ input_file:cli.input.unwrap(), output_folder:cli.output.unwrap(), write_template:!cli.no_template.unwrap_or(false), write_models:!cli.no_models.unwrap_or(false), write_scripts:!cli.no_scripts.unwrap_or(false), }).await, Commands::DecompileHistoryIntoGit=>decompile_history_into_git(DecompileHistoryConfig{ git_committer_name:cli.git_committer_name.unwrap(), git_committer_email:cli.git_committer_email.unwrap(), input_folder:cli.input.unwrap(), output_folder:cli.output.unwrap(), write_template:!cli.no_template.unwrap_or(false), write_models:!cli.no_models.unwrap_or(false), write_scripts:!cli.no_scripts.unwrap_or(false), }).await, Commands::DownloadAndDecompileHistoryIntoGit=>download_and_decompile_history_into_git(DownloadAndDecompileHistoryConfig{ git_committer_name:cli.git_committer_name.unwrap(), git_committer_email:cli.git_committer_email.unwrap(), cookie:cookie.unwrap(), asset_id:cli.asset_id.unwrap(), output_folder:cli.output.unwrap(), write_template:!cli.no_template.unwrap_or(false), write_models:!cli.no_models.unwrap_or(false), write_scripts:!cli.no_scripts.unwrap_or(false), }).await, } } enum Cookie{ Literal(String), Environment(String), File(std::path::PathBuf), } enum ReaderType{ GZip(flate2::read::GzDecoder>), Raw(std::io::BufReader), } fn maybe_gzip_decode(input:R)->AResult>{ let mut buf=std::io::BufReader::new(input); let peek=std::io::BufRead::fill_buf(&mut buf)?; match &peek[0..2]{ b"\x1f\x8b"=>Ok(ReaderType::GZip(flate2::read::GzDecoder::new(buf))), _=>Ok(ReaderType::Raw(buf)), } } async fn upload_list(cookie:String,group:Option,asset_id_file_map:AssetIDFileMap)->AResult<()>{ let client=reqwest::Client::new(); futures::stream::iter(asset_id_file_map) .map(|(asset_id,file)|{ let client=&client; let cookie=cookie.as_str(); let group=&group; async move{ let mut url=reqwest::Url::parse("https://data.roblox.com/Data/Upload.ashx?json=1&type=Model&genreTypeId=1")?; //url borrow scope { let mut query=url.query_pairs_mut();//borrow here query.append_pair("assetid",asset_id.to_string().as_str()); match group{ Some(group_id)=>{query.append_pair("groupId",group_id.to_string().as_str());}, None=>(), } } let body=tokio::fs::read_to_string(file).await?; let mut resp=client.post(url.clone()) .header("Cookie",cookie) .body(body.clone()) .send().await?; //This is called a CSRF challenge apparently if resp.status()==reqwest::StatusCode::FORBIDDEN{ if let Some(csrf_token)=resp.headers().get("X-CSRF-Token"){ resp=client.post(url) .header("X-CSRF-Token",csrf_token) .header("Cookie",cookie) .body(body) .send().await?; }else{ return Err(anyhow::Error::msg("Roblox returned 403 with no CSRF")); } } Ok((asset_id,resp.bytes().await?)) } }) .buffer_unordered(CONCURRENT_REQUESTS) .for_each(|b:AResult<_>|async{ match b{ Ok((asset_id,body))=>{ println!("asset_id={} response.body={:?}",asset_id,body); }, Err(e)=>eprintln!("ul error: {}",e), } }).await; Ok(()) } fn read_readable(mut readable:impl Read)->AResult>{ let mut contents=Vec::new(); readable.read_to_end(&mut contents)?; Ok(contents) } async fn download_list(cookie:String,asset_id_file_map:AssetIDFileMap)->AResult<()>{ let client=reqwest::Client::new(); futures::stream::iter(asset_id_file_map) .map(|(asset_id,file)|{ let client=&client; let cookie=cookie.as_str(); async move{ let resp=client.get(format!("https://assetdelivery.roblox.com/v1/asset/?ID={}",asset_id)) .header("Cookie",cookie) .send().await?; Ok((file,resp.bytes().await?)) } }) .buffer_unordered(CONCURRENT_REQUESTS) .for_each(|b:AResult<_>|async{ match b{ Ok((dest,body))=>{ let contents=match maybe_gzip_decode(&mut std::io::Cursor::new(body)){ Ok(ReaderType::GZip(readable))=>read_readable(readable), Ok(ReaderType::Raw(readable))=>read_readable(readable), Err(e)=>Err(e), }; match contents{ Ok(data)=>match tokio::fs::write(dest,data).await{ Err(e)=>eprintln!("fs error: {}",e), _=>(), }, Err(e)=>eprintln!("gzip error: {}",e), }; }, Err(e)=>eprintln!("dl error: {}",e), } }).await; Ok(()) } async fn get_version_history(client:&reqwest::Client,cookie:&str,asset_id:AssetID)->AResult>{ let mut cursor:Option=None; let mut asset_list=Vec::new(); loop{ let mut url=reqwest::Url::parse(format!("https://develop.roblox.com/v1/assets/{}/saved-versions",asset_id).as_str())?; //url borrow scope { let mut query=url.query_pairs_mut();//borrow here //query.append_pair("sortOrder","Asc"); //query.append_pair("limit","100"); //query.append_pair("count","100"); match &cursor{ Some(next_page)=>{query.append_pair("cursor",next_page);} None=>(), } } println!("page url={}",url); let resp=client.get(url) .header("Cookie",cookie) .send().await?; match resp.json::().await{ Ok(mut page)=>{ asset_list.append(&mut page.data); if page.nextPageCursor.is_none(){ break; } cursor=page.nextPageCursor; }, Err(e)=>panic!("error: {}",e), } } asset_list.sort_by(|a,b|a.assetVersionNumber.cmp(&b.assetVersionNumber)); Ok(asset_list) } async fn download_asset_version(client:&reqwest::Client,cookie:&str,asset_id_str:&str,asset_version_str:&str)->AResult{ let mut url=reqwest::Url::parse("https://assetdelivery.roblox.com/v1/asset/")?; //url borrow scope { let mut query=url.query_pairs_mut();//borrow here query.append_pair("ID",asset_id_str); query.append_pair("version",asset_version_str); } println!("download url={}",url); for i in 0..8{ let resp=client.get(url.clone()) .header("Cookie",cookie) .send().await?; if !resp.status().is_success(){ println!("request {} failed",i); continue; } return Ok(resp); } Err(anyhow::Error::msg("all requests failed")) } struct DownloadHistoryConfig{ output_folder:std::path::PathBuf, cookie:String, asset_id:AssetID, } async fn download_history(config:DownloadHistoryConfig)->AResult<()>{ let client=reqwest::Client::new(); //poll paged list of all asset versions let asset_list=get_version_history(&client,&config.cookie.as_str(),config.asset_id).await?; let mut path=config.output_folder.clone(); path.set_file_name("versions.json"); tokio::fs::write(path,serde_json::to_string(&asset_list)?).await?; //download all versions let asset_id_string=config.asset_id.to_string(); futures::stream::iter(asset_list) .map(|asset_version|{ let client=&client; let cookie=config.cookie.as_str(); let asset_id_str=asset_id_string.as_str(); let output_folder=config.output_folder.clone(); async move{ let resp=download_asset_version(client,cookie,asset_id_str,asset_version.assetVersionNumber.to_string().as_str()).await?; let contents=match maybe_gzip_decode(std::io::Cursor::new(resp.bytes().await?))?{ ReaderType::GZip(readable)=>read_readable(readable)?, ReaderType::Raw(readable)=>read_readable(readable)?, }; let mut path=output_folder; path.set_file_name(format!("{}_v{}.rbxl",config.asset_id,asset_version.assetVersionNumber)); Ok((path,contents)) } }) .buffer_unordered(CONCURRENT_REQUESTS) .for_each(|b:AResult<_>|async{ match b{ Ok((dest,data))=>{ match tokio::fs::write(dest,data).await{ Err(e)=>eprintln!("fs error: {}",e), _=>(), } }, Err(e)=>eprintln!("dl error: {}",e), } }).await; Ok(()) } fn load_dom(input:R)->AResult{ let mut buf=std::io::BufReader::new(input); let peek=std::io::BufRead::fill_buf(&mut buf)?; match &peek[0..4]{ b"{ match &peek[4..8]{ b"lox!"=>return rbx_binary::from_reader(buf).map_err(anyhow::Error::msg), b"lox "=>return rbx_xml::from_reader_default(buf).map_err(anyhow::Error::msg), other=>Err(anyhow::Error::msg(format!("Unknown Roblox file type {:?}",other))), } }, _=>Err(anyhow::Error::msg("unsupported file type")), } } #[derive(PartialEq)] enum Class{ Folder, ModuleScript, LocalScript, Script, Model, } struct TreeNode{ name:String, referent:Ref, parent:Ref, class:Class, children:Vec, } impl TreeNode{ fn new(name:String,referent:Ref,parent:Ref,class:Class)->Self{ Self{ name, referent, parent, class, children:Vec::new(), } } } enum TrimStackInstruction{ Referent(Ref), IncrementScript, DecrementScript, } enum WriteStackInstruction<'a>{ Node(&'a TreeNode,u32),//(Node,NameTally) PushFolder(String), PopFolder, Destroy(Ref), } #[derive(Default,serde::Deserialize,serde::Serialize)] struct PropertiesOverride{ //Name:Option, ClassName:Option, } impl PropertiesOverride{ fn is_some(&self)->bool{ self.ClassName.is_some() } } fn sanitize<'a>(s:&'a str)->std::borrow::Cow<'a,str>{ lazy_regex::regex!(r"[^a-zA-Z0-9._-]").replace_all(s,"_") } fn write_item(dom:&rbx_dom_weak::WeakDom,mut file:std::path::PathBuf,node:&TreeNode,node_name_override:String,write_models:bool,write_scripts:bool)->AResult<()>{ file.push(sanitize(node_name_override.as_str()).as_ref()); match node.class{ Class::Folder=>(), Class::ModuleScript|Class::LocalScript|Class::Script=>{ if !write_scripts{ return Ok(()) } assert!(file.set_extension("lua"),"could not set extension"); assert!(dom.get_by_ref(node.referent).is_some_and(|item|{ //TODO: delete disabled scripts if let Some(rbx_dom_weak::types::Variant::String(source))=item.properties.get("Source"){ std::fs::write(file,source).is_ok() }else{false} }),"no string property or file failed to write"); }, Class::Model=>{ if !write_models{ return Ok(()) } assert!(file.set_extension("rbxmx")); let output=std::io::BufWriter::new(std::fs::File::create(file)?); rbx_xml::to_writer_default(output,dom,&[node.referent])?; }, } Ok(()) } struct DecompiledContext{ dom:rbx_dom_weak::WeakDom, tree_refs:std::collections::HashMap, } fn generate_decompiled_context(input:R)->AResult{ let dom=load_dom(input)?; let mut tree_refs=std::collections::HashMap::new(); tree_refs.insert(dom.root_ref(),TreeNode::new( "src".to_string(), dom.root_ref(), Ref::none(), Class::Folder )); //run rules let mut stack=vec![dom.root()]; while let Some(item)=stack.pop(){ let class=match item.class.as_str(){ "ModuleScript"=>Class::ModuleScript, "LocalScript"=>Class::LocalScript, "Script"=>Class::Script, "Model"=>Class::Model, _=>Class::Folder, }; let skip=match class{ Class::Model=>true, _=>false, }; if let Some(parent_node)=tree_refs.get_mut(&item.parent()){ let referent=item.referent(); let node=TreeNode::new(item.name.clone(),referent,parent_node.referent,class); parent_node.children.push(referent); tree_refs.insert(referent,node); } //look no further, turn this node and all its children into a model if skip{ continue; } for &referent in item.children(){ if let Some(c)=dom.get_by_ref(referent){ stack.push(c); } } } //trim empty folders let mut script_count=0; let mut stack:Vec=tree_refs.get(&dom.root_ref()).unwrap().children .iter().map(|&c|TrimStackInstruction::Referent(c)).collect(); while let Some(instruction)=stack.pop(){ match instruction{ TrimStackInstruction::IncrementScript=>script_count+=1, TrimStackInstruction::DecrementScript=>script_count-=1, TrimStackInstruction::Referent(referent)=>{ let mut delete=None; if let Some(node)=tree_refs.get_mut(&referent){ if node.class==Class::Folder&&script_count!=0{ node.class=Class::Model } if node.class==Class::Folder&&node.children.len()==0{ delete=Some(node.parent); }else{ //how the hell do I do this better without recursion let is_script=match node.class{ Class::ModuleScript|Class::LocalScript|Class::Script=>true, _=>false, }; //stack is popped from back if is_script{ stack.push(TrimStackInstruction::DecrementScript); } for &child_referent in &node.children{ stack.push(TrimStackInstruction::Referent(child_referent)); } if is_script{ stack.push(TrimStackInstruction::IncrementScript); } } } //trim referent if let Some(parent_ref)=delete{ let parent_node=tree_refs.get_mut(&parent_ref) .expect("parent_ref does not exist in tree_refs"); parent_node.children.remove( parent_node.children.iter() .position(|&r|r==referent) .expect("parent.children does not contain referent") ); tree_refs.remove(&referent); } }, } } Ok(DecompiledContext{ dom, tree_refs, }) } struct WriteConfig{ output_folder:std::path::PathBuf, write_template:bool, write_models:bool, write_scripts:bool, } async fn write_files(config:WriteConfig,mut context:DecompiledContext)->AResult<()>{ let mut write_queue=Vec::new(); let mut destroy_queue=Vec::new(); let mut name_tally=std::collections::HashMap::::new(); let mut folder=config.output_folder.clone(); let mut stack=vec![WriteStackInstruction::Node(context.tree_refs.get(&context.dom.root_ref()).unwrap(),0)]; while let Some(instruction)=stack.pop(){ match instruction{ WriteStackInstruction::PushFolder(component)=>folder.push(component), WriteStackInstruction::PopFolder=>assert!(folder.pop(),"weirdness"), WriteStackInstruction::Destroy(referent)=>destroy_queue.push(referent), WriteStackInstruction::Node(node,name_count)=>{ //properties.json to override class or other simple properties let mut properties=PropertiesOverride::default(); let has_children=node.children.len()!=0; match node.class{ Class::Folder=>(), Class::ModuleScript=>{ //.lua files are ModuleScript by default if has_children{ properties.ClassName=Some("ModuleScript".to_string()) } }, Class::LocalScript=>properties.ClassName=Some("LocalScript".to_string()), Class::Script=>properties.ClassName=Some("Script".to_string()), Class::Model=>(), } let name_override=if 0(), _=>stack.push(WriteStackInstruction::Destroy(node.referent)), } if has_children{ stack.push(WriteStackInstruction::PopFolder); name_tally.clear(); for referent in &node.children{ if let Some(c)=context.tree_refs.get(referent){ let v=name_tally.entry(c.name.clone()).and_modify(|v|*v+=1).or_default(); stack.push(WriteStackInstruction::Node(c,*v)); } } stack.push(WriteStackInstruction::PushFolder(sanitize(name_override.as_str()).to_string())); } }, } } //run the async std::thread::scope(|s|{ let threads:Vec>>=write_queue.into_iter().map(|(write_path,node,node_name_override)| s.spawn(||write_item(&context.dom,write_path,node,node_name_override,config.write_models,config.write_scripts)) ).collect(); for thread in threads{ match thread.join(){ Ok(_)=>(), Err(e)=>println!("thread error: {:?}",e), } } }); //run the destroy for destroy_ref in destroy_queue{ context.dom.destroy(destroy_ref); } //write what remains in template.rbxlx if config.write_template{ let mut file=config.output_folder.clone(); file.push("template"); assert!(file.set_extension("rbxlx")); let output=std::io::BufWriter::new(std::fs::File::create(file)?); rbx_xml::to_writer_default(output,&context.dom,&[context.dom.root_ref()])?; } Ok(()) } struct DecompileConfig{ input_file:std::path::PathBuf, output_folder:std::path::PathBuf, write_template:bool, write_models:bool, write_scripts:bool, } async fn decompile(config:DecompileConfig)->AResult<()>{ //rules: //Class Script|LocalScript|ModuleScript->$Name.lua //Class Model->$Name.rbxmx //overrides.json per-folder [Override{name,class}] //Everything else goes into template.rbxlx //read file let context=generate_decompiled_context(std::io::BufReader::new(std::fs::File::open(config.input_file)?))?; //generate folders, models, and scripts //delete models and scripts from dom write_files(WriteConfig{ output_folder:config.output_folder, write_template:config.write_template, write_models:config.write_models, write_scripts:config.write_scripts, },context).await?; Ok(()) } struct WriteCommitConfig{ git_committer_name:String, git_committer_email:String, output_folder:std::path::PathBuf, write_template:bool, write_models:bool, write_scripts:bool, } async fn write_commit(config:WriteCommitConfig,b:Result,tokio::task::JoinError>,repo:&git2::Repository)->AResult<()>{ let (asset_version,context)=b??; println!("writing files for version {}",asset_version.assetVersionNumber); //clean output dir if config.write_models||config.write_scripts{ let mut src=config.output_folder.clone(); src.push("src"); match std::fs::remove_dir_all(src){ Ok(())=>(), Err(e)=>println!("remove_dir_all src failed {}",e), } } if config.write_template{ let mut template=config.output_folder.clone(); template.push("template.rbxlx"); match std::fs::remove_file(template){ Ok(())=>(), Err(e)=>println!("remove_file template.rbxlx failed {}",e), } } //write files write_files(WriteConfig{ output_folder:config.output_folder.clone(), write_template:config.write_template, write_models:config.write_models, write_scripts:config.write_scripts, },context).await?; let date=asset_version.created; let sig=git2::Signature::new(config.git_committer_name.as_str(),config.git_committer_email.as_str(),&git2::Time::new(date.timestamp(),0)).unwrap(); let tree_id={ let mut tree_index = repo.index()?; tree_index.add_all([config.output_folder].iter(),git2::IndexAddOption::DEFAULT,None)?; tree_index.write_tree()? }; let tree=repo.find_tree(tree_id)?; match repo.head(){ Ok(reference)=>repo.commit( Some("HEAD"),//update_ref &sig,//author &sig,//commiter &format!("v{}", asset_version.assetVersionNumber),//message &tree,//tree (basically files) &[&reference.peel_to_commit()?],//parents )?, Err(_)=>repo.commit( Some("HEAD"),//update_ref &sig,//author &sig,//commiter &format!("v{}", asset_version.assetVersionNumber),//message &tree,//tree (basically files) &[],//parents )?, }; //commit Ok(()) } struct DecompileHistoryConfig{ git_committer_name:String, git_committer_email:String, input_folder:std::path::PathBuf, output_folder:std::path::PathBuf, write_template:bool, write_models:bool, write_scripts:bool, } async fn decompile_history_into_git(config:DecompileHistoryConfig)->AResult<()>{ //poll paged list of all asset versions let mut versions_path=config.input_folder.clone(); versions_path.push("versions.json"); let asset_list:Vec=serde_json::from_reader(std::fs::File::open(versions_path)?)?; let repo=git2::Repository::init(config.output_folder.clone())?; //decompile all versions futures::stream::iter(asset_list) .map(|asset_version|{ let mut file_path=config.input_folder.clone(); tokio::task::spawn(async move{ file_path.push(format!("{}_v{}.rbxl",asset_version.assetId,asset_version.assetVersionNumber)); let file=std::fs::File::open(file_path)?; let contents=generate_decompiled_context(file)?; Ok::<_,anyhow::Error>((asset_version,contents)) }) }) .buffered(CONCURRENT_DECODE) .for_each(|join_handle_result|async{ match write_commit(WriteCommitConfig{ git_committer_name:config.git_committer_name.clone(), git_committer_email:config.git_committer_email.clone(), output_folder:config.output_folder.clone(), write_template:config.write_template, write_models:config.write_models, write_scripts:config.write_scripts, },join_handle_result,&repo).await{ Ok(())=>(), Err(e)=>println!("download/unzip/decompile/write/commit error: {}",e), } }).await; Ok(()) } struct DownloadAndDecompileHistoryConfig{ cookie:String, asset_id:AssetID, git_committer_name:String, git_committer_email:String, output_folder:std::path::PathBuf, write_template:bool, write_models:bool, write_scripts:bool, } async fn download_and_decompile_history_into_git(config:DownloadAndDecompileHistoryConfig)->AResult<()>{ let client=reqwest::Client::new(); //poll paged list of all asset versions let asset_list=get_version_history(&client,&config.cookie.as_str(),config.asset_id).await?; let repo=git2::Repository::init(config.output_folder.clone())?; //download all versions let asset_id_string=config.asset_id.to_string(); futures::stream::iter(asset_list) .map(|asset_version|{ let client=client.clone(); let cookie=config.cookie.clone(); let asset_id_str=asset_id_string.clone(); tokio::task::spawn(async move{ let resp=download_asset_version(&client,cookie.as_str(),asset_id_str.as_str(),asset_version.assetVersionNumber.to_string().as_str()).await?; let contents=match maybe_gzip_decode(std::io::Cursor::new(resp.bytes().await?))?{ ReaderType::GZip(readable)=>generate_decompiled_context(readable)?, ReaderType::Raw(readable)=>generate_decompiled_context(readable)?, }; Ok::<_,anyhow::Error>((asset_version,contents)) }) }) .buffered(CONCURRENT_DECODE) .for_each(|join_handle_result|async{ match write_commit(WriteCommitConfig{ git_committer_name:config.git_committer_name.clone(), git_committer_email:config.git_committer_email.clone(), output_folder:config.output_folder.clone(), write_template:config.write_template, write_models:config.write_models, write_scripts:config.write_scripts, },join_handle_result,&repo).await{ Ok(())=>(), Err(e)=>println!("download/unzip/decompile/write/commit error: {}",e), } }).await; Ok(()) } fn compile(_folder:std::path::PathBuf,_file:std::path::PathBuf)->AResult<()>{ Ok(()) }