perform a perfect backflip

This replaces the join set with a futures stream but does not fulfill the goal of scanning ahead with a cap.
This commit is contained in:
Quaternions 2024-01-25 00:14:04 -08:00
parent dfe899a7d8
commit 44725f65df

View File

@ -1462,6 +1462,12 @@ fn script_builder(class:&str,name:&str,source:String)->rbx_dom_weak::InstanceBui
builder
}
enum TooComplicated<T>{
Stop,
Value(T),
Skip,
}
async fn compile(config:CompileConfig)->AResult<()>{
//basically decompile in reverse order
//load template dom
@ -1490,7 +1496,7 @@ async fn compile(config:CompileConfig)->AResult<()>{
folder.push(sans.as_str());
stack.push(CompileStackInstruction::PopFolder);
//check if a folder exists with item.name
if let Ok(mut dir)=tokio::fs::read_dir(folder.as_path()).await{
if let Ok(dir)=tokio::fs::read_dir(folder.as_path()).await{
let mut exist_names:std::collections::HashSet<String>={
let item=dom.get_by_ref(item_ref).ok_or(anyhow::Error::msg("null child ref"))?;
//push existing dom children objects onto stack (unrelated to exist_names)
@ -1505,49 +1511,73 @@ async fn compile(config:CompileConfig)->AResult<()>{
exist_names.insert(dont);
}
//generate children from folder contents UNLESS! item already has a child of the same name
let mut join_set=tokio::task::JoinSet::new();
//I wish I could make the join_next() loop begin processing immediately,
//but I don't know an ergonomic way to do that.
//this will probably be fine considering there won't be millions of files in the directories
while let Some(entry)=dir.next_entry().await?{
//cull early even if supporting things with identical names is possible
if !exist_names.contains(entry.file_name().to_str().unwrap()){
let style=config.style;
join_set.spawn(async move{
let met=entry.metadata().await?;
let compile_class=match met.is_dir(){
true=>locate_override_file(&entry,style).await?,
false=>discern_file(&entry,style).await?,
let style=config.style;
let exist_names=&exist_names;
//thread the needle! follow the patch that dir takes!
let u=futures::stream::unfold(dir,|mut dir1|async{
let ret1={
//capture a scoped mutable reference so we can forward dir to the next call even on an error
let dir2=&mut dir1;
(||async move{//error catcher so I can use ?
let ret2=if let Some(entry)=dir2.next_entry().await?{
//cull early even if supporting things with identical names is possible
if !exist_names.contains(entry.file_name().to_str().unwrap()){
let met=entry.metadata().await?;
//discern that bad boy
let compile_class=match met.is_dir(){
true=>locate_override_file(&entry,style).await?,
false=>discern_file(&entry,style).await?,
};
//prepare data structure
TooComplicated::Value((compile_class.blacklist,match compile_class.class{
CompileClass::Folder=>PreparedData::Builder(rbx_dom_weak::InstanceBuilder::new("Folder").with_name(compile_class.name.as_str())),
CompileClass::Script(source)=>PreparedData::Builder(script_builder("Script",compile_class.name.as_str(),source)),
CompileClass::LocalScript(source)=>PreparedData::Builder(script_builder("LocalScript",compile_class.name.as_str(),source)),
CompileClass::ModuleScript(source)=>PreparedData::Builder(script_builder("ModuleScript",compile_class.name.as_str(),source)),
CompileClass::Model(buf)=>PreparedData::Model(rbx_xml::from_reader_default(std::io::Cursor::new(buf))?),
}))
}else{
TooComplicated::Skip
}
}else{
TooComplicated::Stop
};
//discern that bad boy
Ok::<_,anyhow::Error>(
//prepare data structure
(compile_class.blacklist,match compile_class.class{
CompileClass::Folder=>PreparedData::Builder(rbx_dom_weak::InstanceBuilder::new("Folder").with_name(compile_class.name.as_str())),
CompileClass::Script(source)=>PreparedData::Builder(script_builder("Script",compile_class.name.as_str(),source)),
CompileClass::LocalScript(source)=>PreparedData::Builder(script_builder("LocalScript",compile_class.name.as_str(),source)),
CompileClass::ModuleScript(source)=>PreparedData::Builder(script_builder("ModuleScript",compile_class.name.as_str(),source)),
CompileClass::Model(buf)=>PreparedData::Model(rbx_xml::from_reader_default(std::io::Cursor::new(buf))?),
})
)
});
}
}
//push child objects onto dom
//this is only able to begin after dir iterator is exhausted
while let Some(goober)=join_set.join_next().await{
let (blacklist,data)=goober??;
let referent=match data{
PreparedData::Model(mut model_dom)=>{
let referent=model_dom.root().children()[0];
model_dom.transfer(referent,&mut dom,item_ref);
referent
},
PreparedData::Builder(script)=>dom.insert(item_ref,script),
Ok::<_,anyhow::Error>(ret2)
})().await
};
//new children need to be traversed
stack.push(CompileStackInstruction::TraverseReferent(referent,blacklist));
}
match ret1{
Ok(TooComplicated::Stop)=>None,
Ok(TooComplicated::Skip)=>Some((Ok(None),dir1)),
Ok(TooComplicated::Value(v))=>Some((Ok(Some(v)),dir1)),
Err(e)=>Some((Err(e),dir1)),
}
});
//why can't I do this? is this even what I want?
//let u=u.buffer_unordered(32);
//begin processing immediately
u.fold((&mut stack,&mut dom),|(stack,dom),bog:Result<_,anyhow::Error>|async{
//push child objects onto dom serially as they arrive
match bog{
Ok(Some((blacklist,data)))=>{
let referent=match data{
PreparedData::Model(mut model_dom)=>{
let referent=model_dom.root().children()[0];
model_dom.transfer(referent,dom,item_ref);
referent
},
PreparedData::Builder(script)=>dom.insert(item_ref,script),
};
//new children need to be traversed
stack.push(CompileStackInstruction::TraverseReferent(referent,blacklist));
},
Ok(None)=>println!("entry skipped"),
Err(e)=>println!("error lole {e:?}"),
}
(stack,dom)
}).await;
}
},
CompileStackInstruction::PopFolder=>assert!(folder.pop(),"pop folder bad"),