Compare commits

..

1 Commits

Author SHA1 Message Date
d9a39cc046 print output on failure 2024-12-23 19:53:08 -08:00
3 changed files with 23 additions and 1510 deletions

1346
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -8,15 +8,11 @@ edition = "2021"
[dependencies] [dependencies]
anyhow = "1.0.75" anyhow = "1.0.75"
clap = { version = "4.4.2", features = ["derive"] } clap = { version = "4.4.2", features = ["derive"] }
futures = "0.3.31"
lazy-regex = "3.1.0" lazy-regex = "3.1.0"
rbx_binary = { version = "0.7.4", registry = "strafesnet"} rbx_binary = { version = "0.7.4", registry = "strafesnet"}
rbx_dom_weak = { version = "2.7.0", registry = "strafesnet"} rbx_dom_weak = { version = "2.7.0", registry = "strafesnet"}
rbx_reflection_database = { version = "0.2.10", registry = "strafesnet"} rbx_reflection_database = { version = "0.2.10", registry = "strafesnet"}
rbx_xml = { version = "0.13.3", registry = "strafesnet"} rbx_xml = { version = "0.13.3", registry = "strafesnet"}
siphasher = "1.0.1"
submissions-api = { path = "../maps-service/validation/api", features = ["external"], default-features = false}
tokio = { version = "1.41.1", features = ["macros", "rt-multi-thread", "fs"] }
[profile.release] [profile.release]
lto = true lto = true

View File

@ -1,12 +1,13 @@
use std::{io::{Read, Seek}, path::PathBuf}; use std::{io::{Read, Seek}, path::PathBuf};
use clap::{Args, Parser, Subcommand}; use clap::{Args, Parser, Subcommand};
use anyhow::Result as AResult; use anyhow::Result as AResult;
use futures::{StreamExt,TryStreamExt};
#[derive(Parser)] #[derive(Parser)]
#[command(author, version, about, long_about = None)] #[command(author, version, about, long_about = None)]
#[command(propagate_version = true)] #[command(propagate_version = true)]
struct Cli { struct Cli {
#[arg(long)]
path:Option<PathBuf>,
#[command(subcommand)] #[command(subcommand)]
command: Commands, command: Commands,
} }
@ -18,7 +19,6 @@ enum Commands{
Replace, Replace,
Scan, Scan,
Upload, Upload,
UploadScripts(UploadScriptsCommand)
} }
#[derive(Args)] #[derive(Args)]
@ -31,14 +31,7 @@ struct MapList {
maps: Vec<u64>, maps: Vec<u64>,
} }
#[derive(Args)] fn main() -> AResult<()> {
struct UploadScriptsCommand{
#[arg(long)]
session_id:PathBuf,
}
#[tokio::main]
async fn main() -> AResult<()> {
let cli = Cli::parse(); let cli = Cli::parse();
match cli.command { match cli.command {
Commands::ExtractScripts(pathlist)=>extract_scripts(pathlist.paths), Commands::ExtractScripts(pathlist)=>extract_scripts(pathlist.paths),
@ -46,7 +39,6 @@ async fn main() -> AResult<()> {
Commands::Replace=>replace(), Commands::Replace=>replace(),
Commands::Scan=>scan(), Commands::Scan=>scan(),
Commands::Upload=>upload(), Commands::Upload=>upload(),
Commands::UploadScripts(command)=>upload_scripts(command.session_id).await,
} }
} }
@ -405,7 +397,7 @@ fn upload() -> AResult<()>{
} }
match upload_action { match upload_action {
UploadAction::Upload(asset_id) => { UploadAction::Upload(asset_id) => {
let status=std::process::Command::new("asset-tool") let output=std::process::Command::new("asset-tool")
.args([ .args([
"upload-asset", "upload-asset",
"--cookie-envvar","RBXCOOKIE", "--cookie-envvar","RBXCOOKIE",
@ -413,16 +405,17 @@ fn upload() -> AResult<()>{
]) ])
.arg("--asset-id").arg(asset_id.to_string()) .arg("--asset-id").arg(asset_id.to_string())
.arg("--input-file").arg(file_thing.path().into_os_string().into_string().unwrap()) .arg("--input-file").arg(file_thing.path().into_os_string().into_string().unwrap())
.status()?; .output()?;
match status.code() { match output.status.code() {
Some(0)=>{ Some(0)=>{
//move file //move file
let mut dest=PathBuf::from("maps/uploaded"); let mut dest=PathBuf::from("maps/uploaded");
dest.push(file_thing.file_name()); dest.push(file_thing.file_name());
std::fs::rename(file_thing.path(), dest)?; std::fs::rename(file_thing.path(), dest)?;
} }
Some(code)=>println!("upload failed! code={}",code), other=>{
None => println!("no status code!"), println!("upload failed! code={:?}\noutput={}\nerr={}",other,String::from_utf8_lossy(&output.stdout),String::from_utf8_lossy(&output.stderr));
},
} }
} }
UploadAction::Skip => continue, UploadAction::Skip => continue,
@ -444,9 +437,10 @@ fn upload() -> AResult<()>{
let mut dest=PathBuf::from("maps/uploaded"); let mut dest=PathBuf::from("maps/uploaded");
dest.push(file_thing.file_name()); dest.push(file_thing.file_name());
std::fs::rename(file_thing.path(), dest)?; std::fs::rename(file_thing.path(), dest)?;
} },
Some(code)=>println!("upload failed! code={}",code), other=>{
None => println!("no status code!"), println!("upload failed! code={:?}\noutput={}\nerr={}",other,String::from_utf8_lossy(&output.stdout),String::from_utf8_lossy(&output.stderr));
},
} }
} }
UploadAction::Delete => std::fs::remove_file(file_thing.path())?, UploadAction::Delete => std::fs::remove_file(file_thing.path())?,
@ -761,144 +755,3 @@ fn interactive() -> AResult<()>{
std::fs::write("id",id.to_string())?; std::fs::write("id",id.to_string())?;
Ok(()) Ok(())
} }
fn hash_source(source:&str)->u64{
let mut hasher=siphasher::sip::SipHasher::new();
std::hash::Hasher::write(&mut hasher,source.as_bytes());
std::hash::Hasher::finish(&hasher)
}
fn hash_format(hash:u64)->String{
format!("{:016x}",hash)
}
type GOCError=submissions_api::types::SingleItemError;
type GOCResult=Result<submissions_api::types::ScriptID,GOCError>;
async fn get_or_create_script(api:&submissions_api::external::Context,source:&str)->GOCResult{
let script_response=api.get_script_from_hash(submissions_api::types::HashRequest{
hash:hash_format(hash_source(source)).as_str(),
}).await?;
Ok(match script_response{
Some(script_response)=>script_response.ID,
None=>api.create_script(submissions_api::types::CreateScriptRequest{
Name:"Script",
Source:source,
SubmissionID:None,
}).await.map_err(GOCError::Other)?.ID
})
}
async fn check_or_create_script_poicy(
api:&submissions_api::external::Context,
hash:&str,
script_policy:submissions_api::types::CreateScriptPolicyRequest,
)->Result<(),GOCError>{
let script_policy_result=api.get_script_policy_from_hash(submissions_api::types::HashRequest{
hash,
}).await?;
match script_policy_result{
Some(script_policy_reponse)=>{
// check that everything matches the expectation
assert!(hash==script_policy_reponse.FromScriptHash);
assert!(script_policy.ToScriptID==script_policy_reponse.ToScriptID);
assert!(script_policy.Policy==script_policy_reponse.Policy);
},
None=>{
// create a new policy
api.create_script_policy(script_policy).await.map_err(GOCError::Other)?;
}
}
Ok(())
}
async fn do_policy(
api:&submissions_api::external::Context,
script_ids:&std::collections::HashMap<&str,submissions_api::types::ScriptID>,
source:&str,
to_script_id:submissions_api::types::ScriptID,
policy:submissions_api::types::Policy,
)->Result<(),GOCError>{
let hash=hash_format(hash_source(source));
check_or_create_script_poicy(api,hash.as_str(),submissions_api::types::CreateScriptPolicyRequest{
FromScriptID:script_ids[source],
ToScriptID:to_script_id,
Policy:policy,
}).await
}
async fn upload_scripts(session_id:PathBuf)->AResult<()>{
let cookie={
let mut cookie=String::new();
std::fs::File::open(session_id)?.read_to_string(&mut cookie)?;
submissions_api::Cookie::new(&cookie)?
};
let api=&submissions_api::external::Context::new("http://localhost:8083".to_owned(),cookie)?;
let allowed_set=get_allowed_set()?;
let allowed_map=get_allowed_map()?;
let replace_map=get_replace_map()?;
let blocked=get_blocked()?;
// create a unified deduplicated set of all scripts
let script_set:std::collections::HashSet<&str>=allowed_set.iter()
.map(|s|s.as_str())
.chain(
replace_map.keys().map(|s|s.as_str())
).chain(
blocked.iter().map(|s|s.as_str())
).collect();
// get or create every unique script
let script_ids:std::collections::HashMap<&str,submissions_api::types::ScriptID>=
futures::stream::iter(script_set)
.map(|source|async move{
let script_id=get_or_create_script(api,source).await?;
Ok::<_,GOCError>((source,script_id))
})
.buffer_unordered(16)
.try_collect().await?;
// get or create policy for each script in each category
//
// replace
futures::stream::iter(replace_map.iter().map(Ok))
.try_for_each_concurrent(Some(16),|(source,id)|async{
do_policy(
api,
&script_ids,
source,
script_ids[allowed_map[id].as_str()],
submissions_api::types::Policy::Replace
).await
}).await?;
// allowed
futures::stream::iter(allowed_set.iter().map(Ok))
.try_for_each_concurrent(Some(16),|source|async{
do_policy(
api,
&script_ids,
source,
script_ids[source.as_str()],
submissions_api::types::Policy::Allowed
).await
}).await?;
// blocked
futures::stream::iter(blocked.iter().map(Ok))
.try_for_each_concurrent(Some(16),|source|async{
do_policy(
api,
&script_ids,
source,
script_ids[source.as_str()],
submissions_api::types::Policy::Blocked
).await
}).await?;
Ok(())
}