Compare commits

..

4 Commits

Author SHA1 Message Date
f1576076b5 use auth bypass 2024-12-19 17:51:16 -08:00
6f3d3b170d upload scripts 2024-12-19 16:19:40 -08:00
79ba77e7d9 add script upload deps 2024-12-19 16:19:36 -08:00
24f8b88ac5 include submissions api 2024-12-19 16:19:36 -08:00
3 changed files with 1510 additions and 23 deletions

1346
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -8,11 +8,15 @@ edition = "2021"
[dependencies]
anyhow = "1.0.75"
clap = { version = "4.4.2", features = ["derive"] }
futures = "0.3.31"
lazy-regex = "3.1.0"
rbx_binary = { version = "0.7.4", registry = "strafesnet"}
rbx_dom_weak = { version = "2.7.0", registry = "strafesnet"}
rbx_reflection_database = { version = "0.2.10", registry = "strafesnet"}
rbx_xml = { version = "0.13.3", registry = "strafesnet"}
siphasher = "1.0.1"
submissions-api = { path = "../maps-service/validation/api", features = ["external"], default-features = false}
tokio = { version = "1.41.1", features = ["macros", "rt-multi-thread", "fs"] }
[profile.release]
lto = true

View File

@ -1,24 +1,24 @@
use std::{io::{Read, Seek}, path::PathBuf};
use clap::{Args, Parser, Subcommand};
use anyhow::Result as AResult;
use futures::{StreamExt,TryStreamExt};
#[derive(Parser)]
#[command(author, version, about, long_about = None)]
#[command(propagate_version = true)]
struct Cli {
#[arg(long)]
path:Option<PathBuf>,
#[command(author,version,about,long_about=None)]
#[command(propagate_version=true)]
struct Cli{
#[command(subcommand)]
command: Commands,
command:Commands,
}
#[derive(Subcommand)]
enum Commands {
enum Commands{
ExtractScripts(PathBufList),
Interactive,
Replace,
Scan,
Upload,
UploadScripts(UploadScriptsCommand)
}
#[derive(Args)]
@ -31,7 +31,14 @@ struct MapList {
maps: Vec<u64>,
}
fn main() -> AResult<()> {
#[derive(Args)]
struct UploadScriptsCommand{
#[arg(long)]
session_id:PathBuf,
}
#[tokio::main]
async fn main() -> AResult<()> {
let cli = Cli::parse();
match cli.command {
Commands::ExtractScripts(pathlist)=>extract_scripts(pathlist.paths),
@ -39,6 +46,7 @@ fn main() -> AResult<()> {
Commands::Replace=>replace(),
Commands::Scan=>scan(),
Commands::Upload=>upload(),
Commands::UploadScripts(command)=>upload_scripts(command.session_id).await,
}
}
@ -397,7 +405,7 @@ fn upload() -> AResult<()>{
}
match upload_action {
UploadAction::Upload(asset_id) => {
let output=std::process::Command::new("asset-tool")
let status=std::process::Command::new("asset-tool")
.args([
"upload-asset",
"--cookie-envvar","RBXCOOKIE",
@ -405,17 +413,16 @@ fn upload() -> AResult<()>{
])
.arg("--asset-id").arg(asset_id.to_string())
.arg("--input-file").arg(file_thing.path().into_os_string().into_string().unwrap())
.output()?;
match output.status.code() {
.status()?;
match status.code() {
Some(0)=>{
//move file
let mut dest=PathBuf::from("maps/uploaded");
dest.push(file_thing.file_name());
std::fs::rename(file_thing.path(), dest)?;
}
other=>{
println!("upload failed! code={:?}\noutput={}\nerr={}",other,String::from_utf8_lossy(&output.stdout),String::from_utf8_lossy(&output.stderr));
},
Some(code)=>println!("upload failed! code={}",code),
None => println!("no status code!"),
}
}
UploadAction::Skip => continue,
@ -437,10 +444,9 @@ fn upload() -> AResult<()>{
let mut dest=PathBuf::from("maps/uploaded");
dest.push(file_thing.file_name());
std::fs::rename(file_thing.path(), dest)?;
},
other=>{
println!("upload failed! code={:?}\noutput={}\nerr={}",other,String::from_utf8_lossy(&output.stdout),String::from_utf8_lossy(&output.stderr));
},
}
Some(code)=>println!("upload failed! code={}",code),
None => println!("no status code!"),
}
}
UploadAction::Delete => std::fs::remove_file(file_thing.path())?,
@ -755,3 +761,144 @@ fn interactive() -> AResult<()>{
std::fs::write("id",id.to_string())?;
Ok(())
}
fn hash_source(source:&str)->u64{
let mut hasher=siphasher::sip::SipHasher::new();
std::hash::Hasher::write(&mut hasher,source.as_bytes());
std::hash::Hasher::finish(&hasher)
}
fn hash_format(hash:u64)->String{
format!("{:016x}",hash)
}
type GOCError=submissions_api::types::SingleItemError;
type GOCResult=Result<submissions_api::types::ScriptID,GOCError>;
async fn get_or_create_script(api:&submissions_api::external::Context,source:&str)->GOCResult{
let script_response=api.get_script_from_hash(submissions_api::types::HashRequest{
hash:hash_format(hash_source(source)).as_str(),
}).await?;
Ok(match script_response{
Some(script_response)=>script_response.ID,
None=>api.create_script(submissions_api::types::CreateScriptRequest{
Name:"Script",
Source:source,
SubmissionID:None,
}).await.map_err(GOCError::Other)?.ID
})
}
async fn check_or_create_script_poicy(
api:&submissions_api::external::Context,
hash:&str,
script_policy:submissions_api::types::CreateScriptPolicyRequest,
)->Result<(),GOCError>{
let script_policy_result=api.get_script_policy_from_hash(submissions_api::types::HashRequest{
hash,
}).await?;
match script_policy_result{
Some(script_policy_reponse)=>{
// check that everything matches the expectation
assert!(hash==script_policy_reponse.FromScriptHash);
assert!(script_policy.ToScriptID==script_policy_reponse.ToScriptID);
assert!(script_policy.Policy==script_policy_reponse.Policy);
},
None=>{
// create a new policy
api.create_script_policy(script_policy).await.map_err(GOCError::Other)?;
}
}
Ok(())
}
async fn do_policy(
api:&submissions_api::external::Context,
script_ids:&std::collections::HashMap<&str,submissions_api::types::ScriptID>,
source:&str,
to_script_id:submissions_api::types::ScriptID,
policy:submissions_api::types::Policy,
)->Result<(),GOCError>{
let hash=hash_format(hash_source(source));
check_or_create_script_poicy(api,hash.as_str(),submissions_api::types::CreateScriptPolicyRequest{
FromScriptID:script_ids[source],
ToScriptID:to_script_id,
Policy:policy,
}).await
}
async fn upload_scripts(session_id:PathBuf)->AResult<()>{
let cookie={
let mut cookie=String::new();
std::fs::File::open(session_id)?.read_to_string(&mut cookie)?;
submissions_api::Cookie::new(&cookie)?
};
let api=&submissions_api::external::Context::new("http://localhost:8083".to_owned(),cookie)?;
let allowed_set=get_allowed_set()?;
let allowed_map=get_allowed_map()?;
let replace_map=get_replace_map()?;
let blocked=get_blocked()?;
// create a unified deduplicated set of all scripts
let script_set:std::collections::HashSet<&str>=allowed_set.iter()
.map(|s|s.as_str())
.chain(
replace_map.keys().map(|s|s.as_str())
).chain(
blocked.iter().map(|s|s.as_str())
).collect();
// get or create every unique script
let script_ids:std::collections::HashMap<&str,submissions_api::types::ScriptID>=
futures::stream::iter(script_set)
.map(|source|async move{
let script_id=get_or_create_script(api,source).await?;
Ok::<_,GOCError>((source,script_id))
})
.buffer_unordered(16)
.try_collect().await?;
// get or create policy for each script in each category
//
// replace
futures::stream::iter(replace_map.iter().map(Ok))
.try_for_each_concurrent(Some(16),|(source,id)|async{
do_policy(
api,
&script_ids,
source,
script_ids[allowed_map[id].as_str()],
submissions_api::types::Policy::Replace
).await
}).await?;
// allowed
futures::stream::iter(allowed_set.iter().map(Ok))
.try_for_each_concurrent(Some(16),|source|async{
do_policy(
api,
&script_ids,
source,
script_ids[source.as_str()],
submissions_api::types::Policy::Allowed
).await
}).await?;
// blocked
futures::stream::iter(blocked.iter().map(Ok))
.try_for_each_concurrent(Some(16),|source|async{
do_policy(
api,
&script_ids,
source,
script_ids[source.as_str()],
submissions_api::types::Policy::Blocked
).await
}).await?;
Ok(())
}