4 Commits

Author SHA1 Message Date
1f8a66638f rewrite get_full_name function 2025-03-27 12:20:29 -07:00
0995ced783 skip non-files (directories) 2025-02-12 13:47:59 -08:00
49d071fd56 move writeattributes to map fixer 2025-01-27 07:06:24 -08:00
d9a39cc046 print output on failure 2024-12-23 19:53:08 -08:00
3 changed files with 95 additions and 1513 deletions

1346
Cargo.lock generated

File diff suppressed because it is too large Load Diff

@ -8,15 +8,11 @@ edition = "2021"
[dependencies] [dependencies]
anyhow = "1.0.75" anyhow = "1.0.75"
clap = { version = "4.4.2", features = ["derive"] } clap = { version = "4.4.2", features = ["derive"] }
futures = "0.3.31"
lazy-regex = "3.1.0" lazy-regex = "3.1.0"
rbx_binary = { version = "0.7.4", registry = "strafesnet"} rbx_binary = { version = "0.7.4", registry = "strafesnet"}
rbx_dom_weak = { version = "2.7.0", registry = "strafesnet"} rbx_dom_weak = { version = "2.7.0", registry = "strafesnet"}
rbx_reflection_database = { version = "0.2.10", registry = "strafesnet"} rbx_reflection_database = { version = "0.2.10", registry = "strafesnet"}
rbx_xml = { version = "0.13.3", registry = "strafesnet"} rbx_xml = { version = "0.13.3", registry = "strafesnet"}
siphasher = "1.0.1"
submissions-api = { path = "../maps-service/validation/api", features = ["external"], default-features = false}
tokio = { version = "1.41.1", features = ["macros", "rt-multi-thread", "fs"] }
[profile.release] [profile.release]
lto = true lto = true

@ -1,12 +1,13 @@
use std::{io::{Read, Seek}, path::PathBuf}; use std::{io::{Read, Seek}, path::PathBuf};
use clap::{Args, Parser, Subcommand}; use clap::{Args, Parser, Subcommand};
use anyhow::Result as AResult; use anyhow::Result as AResult;
use futures::{StreamExt,TryStreamExt};
#[derive(Parser)] #[derive(Parser)]
#[command(author, version, about, long_about = None)] #[command(author, version, about, long_about = None)]
#[command(propagate_version = true)] #[command(propagate_version = true)]
struct Cli { struct Cli {
#[arg(long)]
path:Option<PathBuf>,
#[command(subcommand)] #[command(subcommand)]
command: Commands, command: Commands,
} }
@ -18,7 +19,7 @@ enum Commands{
Replace, Replace,
Scan, Scan,
Upload, Upload,
UploadScripts(UploadScriptsCommand) WriteAttributes,
} }
#[derive(Args)] #[derive(Args)]
@ -31,14 +32,7 @@ struct MapList {
maps: Vec<u64>, maps: Vec<u64>,
} }
#[derive(Args)] fn main() -> AResult<()> {
struct UploadScriptsCommand{
#[arg(long)]
session_id:PathBuf,
}
#[tokio::main]
async fn main() -> AResult<()> {
let cli = Cli::parse(); let cli = Cli::parse();
match cli.command { match cli.command {
Commands::ExtractScripts(pathlist)=>extract_scripts(pathlist.paths), Commands::ExtractScripts(pathlist)=>extract_scripts(pathlist.paths),
@ -46,7 +40,7 @@ async fn main() -> AResult<()> {
Commands::Replace=>replace(), Commands::Replace=>replace(),
Commands::Scan=>scan(), Commands::Scan=>scan(),
Commands::Upload=>upload(), Commands::Upload=>upload(),
Commands::UploadScripts(command)=>upload_scripts(command.session_id).await, Commands::WriteAttributes=>write_attributes(),
} }
} }
@ -72,15 +66,16 @@ fn recursive_collect_superclass(objects: &mut std::vec::Vec<rbx_dom_weak::types:
} }
} }
} }
fn get_full_name(dom:&rbx_dom_weak::WeakDom,instance:&rbx_dom_weak::Instance)->String{ fn get_full_name(dom:&rbx_dom_weak::WeakDom,instance:&rbx_dom_weak::Instance)->String{
let mut full_name=instance.name.clone(); let mut names:Vec<_>=core::iter::successors(
let mut pref=instance.parent(); Some(instance),
while let Some(parent)=dom.get_by_ref(pref){ |i|dom.get_by_ref(i.parent())
full_name.insert(0, '.'); ).map(
full_name.insert_str(0, &parent.name); |i|i.name.as_str()
pref=parent.parent(); ).collect();
} names.reverse();
full_name names.join(".")
} }
//scan (scripts) //scan (scripts)
@ -382,6 +377,10 @@ fn upload() -> AResult<()>{
//interactive prompt per upload: //interactive prompt per upload:
for entry in std::fs::read_dir("maps/passed")? { for entry in std::fs::read_dir("maps/passed")? {
let file_thing=entry?; let file_thing=entry?;
if !file_thing.file_type()?.is_file(){
println!("skipping non-file: {:?}",file_thing.file_name());
continue;
}
println!("map file: {:?}",file_thing.file_name()); println!("map file: {:?}",file_thing.file_name());
let mut input = std::io::BufReader::new(std::fs::File::open(file_thing.path())?); let mut input = std::io::BufReader::new(std::fs::File::open(file_thing.path())?);
@ -405,7 +404,7 @@ fn upload() -> AResult<()>{
} }
match upload_action { match upload_action {
UploadAction::Upload(asset_id) => { UploadAction::Upload(asset_id) => {
let status=std::process::Command::new("asset-tool") let output=std::process::Command::new("asset-tool")
.args([ .args([
"upload-asset", "upload-asset",
"--cookie-envvar","RBXCOOKIE", "--cookie-envvar","RBXCOOKIE",
@ -413,16 +412,17 @@ fn upload() -> AResult<()>{
]) ])
.arg("--asset-id").arg(asset_id.to_string()) .arg("--asset-id").arg(asset_id.to_string())
.arg("--input-file").arg(file_thing.path().into_os_string().into_string().unwrap()) .arg("--input-file").arg(file_thing.path().into_os_string().into_string().unwrap())
.status()?; .output()?;
match status.code() { match output.status.code() {
Some(0)=>{ Some(0)=>{
//move file //move file
let mut dest=PathBuf::from("maps/uploaded"); let mut dest=PathBuf::from("maps/uploaded");
dest.push(file_thing.file_name()); dest.push(file_thing.file_name());
std::fs::rename(file_thing.path(), dest)?; std::fs::rename(file_thing.path(), dest)?;
} }
Some(code)=>println!("upload failed! code={}",code), other=>{
None => println!("no status code!"), println!("upload failed! code={:?}\noutput={}\nerr={}",other,String::from_utf8_lossy(&output.stdout),String::from_utf8_lossy(&output.stderr));
},
} }
} }
UploadAction::Skip => continue, UploadAction::Skip => continue,
@ -444,9 +444,10 @@ fn upload() -> AResult<()>{
let mut dest=PathBuf::from("maps/uploaded"); let mut dest=PathBuf::from("maps/uploaded");
dest.push(file_thing.file_name()); dest.push(file_thing.file_name());
std::fs::rename(file_thing.path(), dest)?; std::fs::rename(file_thing.path(), dest)?;
} },
Some(code)=>println!("upload failed! code={}",code), other=>{
None => println!("no status code!"), println!("upload failed! code={:?}\noutput={}\nerr={}",other,String::from_utf8_lossy(&output.stdout),String::from_utf8_lossy(&output.stderr));
},
} }
} }
UploadAction::Delete => std::fs::remove_file(file_thing.path())?, UploadAction::Delete => std::fs::remove_file(file_thing.path())?,
@ -762,143 +763,64 @@ fn interactive() -> AResult<()>{
Ok(()) Ok(())
} }
fn hash_source(source:&str)->u64{ fn recursive_collect_regex(objects: &mut std::vec::Vec<rbx_dom_weak::types::Ref>,dom: &rbx_dom_weak::WeakDom, instance: &rbx_dom_weak::Instance, regex: &lazy_regex::Lazy<lazy_regex::Regex>){
let mut hasher=siphasher::sip::SipHasher::new(); for &referent in instance.children() {
std::hash::Hasher::write(&mut hasher,source.as_bytes()); if let Some(c) = dom.get_by_ref(referent) {
std::hash::Hasher::finish(&hasher) if regex.captures(c.name.as_str()).is_some(){
objects.push(c.referent());//copy ref
}
recursive_collect_regex(objects,dom,c,regex);
}
}
} }
fn hash_format(hash:u64)->String{ fn get_button_refs(dom:&rbx_dom_weak::WeakDom) -> Vec<rbx_dom_weak::types::Ref>{
format!("{:016x}",hash) let mut buttons = std::vec::Vec::new();
recursive_collect_regex(&mut buttons, dom, dom.root(),lazy_regex::regex!(r"Button(\d+)$"));
buttons
} }
type GOCError=submissions_api::types::SingleItemError; fn write_attributes() -> AResult<()>{
type GOCResult=Result<submissions_api::types::ScriptID,GOCError>; for entry in std::fs::read_dir("maps/unprocessed")? {
let file_thing=entry?;
println!("processing map={:?}",file_thing.file_name());
let mut input = std::io::BufReader::new(std::fs::File::open(file_thing.path())?);
let mut dom = load_dom(&mut input)?;
async fn get_or_create_script(api:&submissions_api::external::Context,source:&str)->GOCResult{ let button_refs = get_button_refs(&dom);
let script_response=api.get_script_from_hash(submissions_api::types::HashRequest{
hash:hash_format(hash_source(source)).as_str(),
}).await?;
Ok(match script_response{ for &button_ref in &button_refs {
Some(script_response)=>script_response.ID, if let Some(button)=dom.get_by_ref_mut(button_ref){
None=>api.create_script(submissions_api::types::CreateScriptRequest{ match button.properties.get_mut("Attributes"){
Name:"Script", Some(rbx_dom_weak::types::Variant::Attributes(attributes))=>{
Source:source, println!("Appending Ref={} to existing attributes for {}",button_ref,button.name);
SubmissionID:None, attributes.insert("Ref".to_string(),rbx_dom_weak::types::Variant::String(button_ref.to_string()));
}).await.map_err(GOCError::Other)?.ID
})
}
async fn check_or_create_script_poicy(
api:&submissions_api::external::Context,
hash:&str,
script_policy:submissions_api::types::CreateScriptPolicyRequest,
)->Result<(),GOCError>{
let script_policy_result=api.get_script_policy_from_hash(submissions_api::types::HashRequest{
hash,
}).await?;
match script_policy_result{
Some(script_policy_reponse)=>{
// check that everything matches the expectation
assert!(hash==script_policy_reponse.FromScriptHash);
assert!(script_policy.ToScriptID==script_policy_reponse.ToScriptID);
assert!(script_policy.Policy==script_policy_reponse.Policy);
}, },
None=>{ None=>{
// create a new policy println!("Creating new attributes with Ref={} for {}",button_ref,button.name);
api.create_script_policy(script_policy).await.map_err(GOCError::Other)?; let mut attributes=rbx_dom_weak::types::Attributes::new();
attributes.insert("Ref".to_string(),rbx_dom_weak::types::Variant::String(button_ref.to_string()));
button.properties.insert("Attributes".to_string(),rbx_dom_weak::types::Variant::Attributes(attributes));
}
_=>unreachable!("Fetching attributes did not return attributes."),
} }
} }
Ok(())
} }
let mut dest={
async fn do_policy( let mut dest=PathBuf::from("maps/attributes");
api:&submissions_api::external::Context, dest.push(file_thing.file_name());
script_ids:&std::collections::HashMap<&str,submissions_api::types::ScriptID>, let output = std::io::BufWriter::new(std::fs::File::create(dest)?);
source:&str, //write workspace:GetChildren()[1]
to_script_id:submissions_api::types::ScriptID, let workspace_children=dom.root().children();
policy:submissions_api::types::Policy, if workspace_children.len()!=1{
)->Result<(),GOCError>{ return Err(anyhow::Error::msg("there can only be one model"));
let hash=hash_format(hash_source(source));
check_or_create_script_poicy(api,hash.as_str(),submissions_api::types::CreateScriptPolicyRequest{
FromScriptID:script_ids[source],
ToScriptID:to_script_id,
Policy:policy,
}).await
} }
rbx_binary::to_writer(output, &dom, &[workspace_children[0]])?;
async fn upload_scripts(session_id:PathBuf)->AResult<()>{ //move original to processed folder
let cookie={ PathBuf::from("maps/unaltered")
let mut cookie=String::new();
std::fs::File::open(session_id)?.read_to_string(&mut cookie)?;
submissions_api::Cookie::new(&cookie)?
}; };
let api=&submissions_api::external::Context::new("http://localhost:8083".to_owned(),cookie)?; dest.push(file_thing.file_name());
std::fs::rename(file_thing.path(), dest)?;
let allowed_set=get_allowed_set()?; }
let allowed_map=get_allowed_map()?;
let replace_map=get_replace_map()?;
let blocked=get_blocked()?;
// create a unified deduplicated set of all scripts
let script_set:std::collections::HashSet<&str>=allowed_set.iter()
.map(|s|s.as_str())
.chain(
replace_map.keys().map(|s|s.as_str())
).chain(
blocked.iter().map(|s|s.as_str())
).collect();
// get or create every unique script
let script_ids:std::collections::HashMap<&str,submissions_api::types::ScriptID>=
futures::stream::iter(script_set)
.map(|source|async move{
let script_id=get_or_create_script(api,source).await?;
Ok::<_,GOCError>((source,script_id))
})
.buffer_unordered(16)
.try_collect().await?;
// get or create policy for each script in each category
//
// replace
futures::stream::iter(replace_map.iter().map(Ok))
.try_for_each_concurrent(Some(16),|(source,id)|async{
do_policy(
api,
&script_ids,
source,
script_ids[allowed_map[id].as_str()],
submissions_api::types::Policy::Replace
).await
}).await?;
// allowed
futures::stream::iter(allowed_set.iter().map(Ok))
.try_for_each_concurrent(Some(16),|source|async{
do_policy(
api,
&script_ids,
source,
script_ids[source.as_str()],
submissions_api::types::Policy::Allowed
).await
}).await?;
// blocked
futures::stream::iter(blocked.iter().map(Ok))
.try_for_each_concurrent(Some(16),|source|async{
do_policy(
api,
&script_ids,
source,
script_ids[source.as_str()],
submissions_api::types::Policy::Blocked
).await
}).await?;
Ok(()) Ok(())
} }