forked from StrafesNET/remote-script-review
Compare commits
28 Commits
Author | SHA1 | Date | |
---|---|---|---|
2ed3a6b11c
|
|||
0398fb8d9d | |||
0f4b57b349
|
|||
726212cb64
|
|||
1b1c20a9e4
|
|||
dcc31e46b3
|
|||
5230b8a371
|
|||
bfe55c2af6
|
|||
a9e838d445
|
|||
b2e61016ad
|
|||
01a1d8f0c5
|
|||
e1e781ed25
|
|||
c766b8b0cc
|
|||
cee52e31be
|
|||
2f25efe07e
|
|||
834d67f8f6
|
|||
e97ad7f102 | |||
8786ff0c79 | |||
d23cd57850 | |||
80e133c27a | |||
f9fb1fb23c | |||
4116eaf829 | |||
c4508480c1 | |||
a6b8b326f1 | |||
3eb39f2c6c | |||
af2cf4b7a8 | |||
bc11f918aa | |||
a16e8faf8b |
783
Cargo.lock
generated
783
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "rreview"
|
||||
version = "1.0.0"
|
||||
version = "1.1.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
@ -8,8 +8,9 @@ edition = "2021"
|
||||
[dependencies]
|
||||
clap = { version = "4.4.2", features = ["derive"] }
|
||||
futures = "0.3.31"
|
||||
rand = "0.9.1"
|
||||
siphasher = "1.0.1"
|
||||
submissions-api = { version = "0.3.0", features = ["external"], default-features = false, registry = "strafesnet" }
|
||||
submissions-api = { version = "0.8.1", registry = "strafesnet" }
|
||||
tokio = { version = "1.42.0", features = ["fs", "macros", "rt-multi-thread"] }
|
||||
|
||||
[profile.release]
|
||||
|
@ -1,26 +0,0 @@
|
||||
use clap::{Args,Parser,Subcommand};
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(author,version,about,long_about=None)]
|
||||
#[command(propagate_version=true)]
|
||||
pub struct Cli{
|
||||
#[command(subcommand)]
|
||||
command:Commands,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
pub enum Commands{
|
||||
Review(ReviewCommand),
|
||||
UploadScripts(UploadScriptsCommand),
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
struct ReviewCommand{
|
||||
#[arg(long)]
|
||||
cookie:String,
|
||||
}
|
||||
#[derive(Args)]
|
||||
struct UploadScriptsCommand{
|
||||
#[arg(long)]
|
||||
session_id:PathBuf,
|
||||
}
|
633
src/main.rs
633
src/main.rs
@ -1,16 +1,89 @@
|
||||
mod cmd;
|
||||
|
||||
use cmd::{Cli,Commands};
|
||||
use clap::{Args,Parser,Subcommand};
|
||||
use futures::{StreamExt,TryStreamExt};
|
||||
use rand::seq::SliceRandom;
|
||||
use std::io::Write;
|
||||
use std::path::PathBuf;
|
||||
|
||||
const READ_CONCURRENCY:usize=16;
|
||||
const REMOTE_CONCURRENCY:usize=16;
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(author,version,about,long_about=None)]
|
||||
#[command(propagate_version=true)]
|
||||
struct Cli{
|
||||
#[command(subcommand)]
|
||||
command:Commands,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
enum Commands{
|
||||
Release(ReleaseCommand),
|
||||
RepairDuplicates(RepairDuplicatesCommand),
|
||||
RepairPolicies(RepairPoliciesCommand),
|
||||
Review(ReviewCommand),
|
||||
UploadScripts(UploadScriptsCommand),
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
struct ReleaseCommand{
|
||||
#[arg(long)]
|
||||
session_id_file:PathBuf,
|
||||
#[arg(long)]
|
||||
api_url:String,
|
||||
}
|
||||
#[derive(Args)]
|
||||
struct RepairDuplicatesCommand{
|
||||
#[arg(long)]
|
||||
session_id_file:PathBuf,
|
||||
#[arg(long)]
|
||||
api_url:String,
|
||||
}
|
||||
#[derive(Args)]
|
||||
struct RepairPoliciesCommand{
|
||||
#[arg(long)]
|
||||
session_id_file:PathBuf,
|
||||
#[arg(long)]
|
||||
api_url:String,
|
||||
}
|
||||
#[derive(Args)]
|
||||
struct ReviewCommand{
|
||||
#[arg(long)]
|
||||
session_id_file:PathBuf,
|
||||
#[arg(long)]
|
||||
api_url:String,
|
||||
}
|
||||
#[derive(Args)]
|
||||
struct UploadScriptsCommand{
|
||||
#[arg(long)]
|
||||
session_id_file:PathBuf,
|
||||
#[arg(long)]
|
||||
api_url:String,
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main(){
|
||||
let cli=Cli::parse();
|
||||
match cli.command{
|
||||
Commands::Review(command)=>review(ReviewConfig{
|
||||
cookie:command.cookie,
|
||||
Commands::Release(command)=>release(ReleaseConfig{
|
||||
session_id:std::fs::read_to_string(command.session_id_file).unwrap(),
|
||||
api_url:command.api_url,
|
||||
}).await.unwrap(),
|
||||
Commands::RepairDuplicates(command)=>repair_duplicates(RepairDuplicatesConfig{
|
||||
session_id:std::fs::read_to_string(command.session_id_file).unwrap(),
|
||||
api_url:command.api_url,
|
||||
}).await.unwrap(),
|
||||
Commands::RepairPolicies(command)=>repair_policies(RepairPoliciesConfig{
|
||||
session_id:std::fs::read_to_string(command.session_id_file).unwrap(),
|
||||
api_url:command.api_url,
|
||||
}).await.unwrap(),
|
||||
Commands::Review(command)=>review(ReviewConfig{
|
||||
session_id:std::fs::read_to_string(command.session_id_file).unwrap(),
|
||||
api_url:command.api_url,
|
||||
}).await.unwrap(),
|
||||
Commands::UploadScripts(command)=>upload_scripts(UploadConfig{
|
||||
session_id:std::fs::read_to_string(command.session_id_file).unwrap(),
|
||||
api_url:command.api_url,
|
||||
}).await.unwrap(),
|
||||
Commands::UploadScripts(command)=>upload_scripts(command.session_id).await.unwrap(),
|
||||
}
|
||||
}
|
||||
|
||||
@ -19,6 +92,8 @@ enum ScriptActionParseResult{
|
||||
Block,
|
||||
Exit,
|
||||
Delete,
|
||||
Purge,
|
||||
Skip,
|
||||
}
|
||||
struct ParseScriptActionErr;
|
||||
impl std::str::FromStr for ScriptActionParseResult{
|
||||
@ -29,6 +104,8 @@ impl std::str::FromStr for ScriptActionParseResult{
|
||||
"block\n"=>Ok(Self::Block),
|
||||
"exit\n"=>Ok(Self::Exit),
|
||||
"delete\n"=>Ok(Self::Delete),
|
||||
"purge\n"=>Ok(Self::Purge),
|
||||
"skip\n"=>Ok(Self::Skip),
|
||||
_=>Err(ParseScriptActionErr),
|
||||
}
|
||||
}
|
||||
@ -40,25 +117,27 @@ enum ReviewError{
|
||||
Cookie(submissions_api::CookieError),
|
||||
Reqwest(submissions_api::ReqwestError),
|
||||
GetPolicies(submissions_api::Error),
|
||||
GetScriptFromHash(submissions_api::types::SingleItemError),
|
||||
GetScriptFromHash(submissions_api::types::ScriptSingleItemError),
|
||||
NoScript,
|
||||
WriteCurrent(std::io::Error),
|
||||
ActionIO(std::io::Error),
|
||||
PurgeScript(submissions_api::Error),
|
||||
ReadCurrent(std::io::Error),
|
||||
DeduplicateModified(submissions_api::types::SingleItemError),
|
||||
DeduplicateModified(submissions_api::types::ScriptSingleItemError),
|
||||
UploadModified(submissions_api::Error),
|
||||
UpdateScriptPolicy(submissions_api::Error),
|
||||
}
|
||||
|
||||
struct ReviewConfig{
|
||||
cookie:String,
|
||||
session_id:String,
|
||||
api_url:String,
|
||||
}
|
||||
|
||||
async fn review(config:ReviewConfig)->Result<(),ReviewError>{
|
||||
// download unreviewed policies
|
||||
// review them
|
||||
let cookie=submissions_api::Cookie::new(&config.cookie).map_err(ReviewError::Cookie)?;
|
||||
let api=submissions_api::external::Context::new("http://localhost:8083".to_owned(),cookie).map_err(ReviewError::Reqwest)?;
|
||||
let cookie=submissions_api::Cookie::new(&config.session_id).map_err(ReviewError::Cookie)?;
|
||||
let api=submissions_api::external::Context::new(config.api_url,cookie).map_err(ReviewError::Reqwest)?;
|
||||
|
||||
let unreviewed_policies=api.get_script_policies(submissions_api::types::GetScriptPoliciesRequest{
|
||||
Page:1,
|
||||
@ -68,7 +147,8 @@ async fn review(config:ReviewConfig)->Result<(),ReviewError>{
|
||||
Policy:Some(submissions_api::types::Policy::None),
|
||||
}).await.map_err(ReviewError::GetPolicies)?;
|
||||
|
||||
for unreviewed_policy in unreviewed_policies{
|
||||
let unreviewed_policy_count=unreviewed_policies.len();
|
||||
for (i,unreviewed_policy) in unreviewed_policies.into_iter().enumerate(){
|
||||
// download source code
|
||||
let script_response=api.get_script_from_hash(submissions_api::types::HashRequest{
|
||||
hash:unreviewed_policy.FromScriptHash.as_str(),
|
||||
@ -80,6 +160,10 @@ async fn review(config:ReviewConfig)->Result<(),ReviewError>{
|
||||
//load source into current.lua
|
||||
tokio::fs::write("current.lua",source.as_str()).await.map_err(ReviewError::WriteCurrent)?;
|
||||
|
||||
// print script name
|
||||
println!("policy {}/{unreviewed_policy_count}",i+1);
|
||||
println!("script name: {}",script_response.Name);
|
||||
|
||||
//prompt action in terminal
|
||||
//wait for input
|
||||
let script_action;
|
||||
@ -97,6 +181,17 @@ async fn review(config:ReviewConfig)->Result<(),ReviewError>{
|
||||
let mut to_script_id=None;
|
||||
// interpret action
|
||||
let reviewed_policy=match script_action{
|
||||
ScriptActionParseResult::Purge=>{
|
||||
// remove script and policy from the database.
|
||||
let remove_script_fut=api.delete_script(submissions_api::types::GetScriptRequest{
|
||||
ScriptID:script_response.ID,
|
||||
});
|
||||
let remove_script_policy_fut=api.delete_script_policy(submissions_api::types::GetScriptPolicyRequest{
|
||||
ScriptPolicyID:unreviewed_policy.ID,
|
||||
});
|
||||
tokio::try_join!(remove_script_fut,remove_script_policy_fut).map_err(ReviewError::PurgeScript)?;
|
||||
continue;
|
||||
},
|
||||
ScriptActionParseResult::Pass=>{
|
||||
//if current.lua was updated, create an allowed and replace file and set script_action to replace(new_id)
|
||||
let modified_source=tokio::fs::read_to_string("current.lua").await.map_err(ReviewError::ReadCurrent)?;
|
||||
@ -104,9 +199,7 @@ async fn review(config:ReviewConfig)->Result<(),ReviewError>{
|
||||
submissions_api::types::Policy::Allowed
|
||||
}else{
|
||||
// compute hash
|
||||
let mut hasher=siphasher::sip::SipHasher::new();
|
||||
std::hash::Hasher::write(&mut hasher,source.as_bytes());
|
||||
let hash=std::hash::Hasher::finish(&hasher);
|
||||
let hash=hash_source(modified_source.as_str());
|
||||
|
||||
// check if modified script already exists
|
||||
let maybe_script_response=api.get_script_from_hash(submissions_api::types::HashRequest{
|
||||
@ -119,8 +212,9 @@ async fn review(config:ReviewConfig)->Result<(),ReviewError>{
|
||||
None=>api.create_script(submissions_api::types::CreateScriptRequest{
|
||||
Name:script_response.Name.as_str(),
|
||||
Source:modified_source.as_str(),
|
||||
SubmissionID:Some(script_response.SubmissionID),
|
||||
}).await.map_err(ReviewError::UploadModified)?.ID
|
||||
ResourceType:script_response.ResourceType,
|
||||
ResourceID:Some(script_response.ResourceID),
|
||||
}).await.map_err(ReviewError::UploadModified)?.ScriptID
|
||||
});
|
||||
|
||||
// use replace policy
|
||||
@ -130,20 +224,104 @@ async fn review(config:ReviewConfig)->Result<(),ReviewError>{
|
||||
ScriptActionParseResult::Block=>submissions_api::types::Policy::Blocked,
|
||||
ScriptActionParseResult::Exit=>break,
|
||||
ScriptActionParseResult::Delete=>submissions_api::types::Policy::Delete,
|
||||
ScriptActionParseResult::Skip=>continue,
|
||||
};
|
||||
|
||||
// update policy
|
||||
api.update_script_policy(submissions_api::types::UpdateScriptPolicyRequest{
|
||||
ScriptPolicyID:unreviewed_policy.ID,
|
||||
ID:unreviewed_policy.ID,
|
||||
FromScriptID:None,
|
||||
ToScriptID:to_script_id,
|
||||
Policy:Some(reviewed_policy),
|
||||
}).await.map_err(ReviewError::UpdateScriptPolicy)?;
|
||||
|
||||
println!("updated {:?} From: {:?} To: {:?} with policy {reviewed_policy:?}",unreviewed_policy.ID,script_response.ID,to_script_id);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(Debug)]
|
||||
enum ScriptUploadError{
|
||||
Cookie(submissions_api::CookieError),
|
||||
Reqwest(submissions_api::ReqwestError),
|
||||
AllowedSet(std::io::Error),
|
||||
AllowedMap(GetMapError),
|
||||
ReplaceMap(GetMapError),
|
||||
BlockedSet(std::io::Error),
|
||||
GetOrCreate(GOCScriptError),
|
||||
GetOrCreatePolicyReplace(GOCScriptPolicyError),
|
||||
GetOrCreatePolicyAllowed(GOCScriptPolicyError),
|
||||
GetOrCreatePolicyBlocked(GOCScriptPolicyError),
|
||||
}
|
||||
|
||||
fn read_dir_stream(dir:tokio::fs::ReadDir)->impl futures::stream::Stream<Item=std::io::Result<tokio::fs::DirEntry>>{
|
||||
futures::stream::unfold(dir,|mut dir|async{
|
||||
match dir.next_entry().await{
|
||||
Ok(Some(entry))=>Some((Ok(entry),dir)),
|
||||
Ok(None)=>None, // End of directory
|
||||
Err(e)=>Some((Err(e),dir)), // Error encountered
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async fn get_set_from_file(path:impl AsRef<std::path::Path>)->std::io::Result<std::collections::HashSet<String>>{
|
||||
read_dir_stream(tokio::fs::read_dir(path).await?)
|
||||
.map(|dir_entry|async{
|
||||
tokio::fs::read_to_string(dir_entry?.path()).await
|
||||
})
|
||||
.buffer_unordered(READ_CONCURRENCY)
|
||||
.try_collect().await
|
||||
}
|
||||
|
||||
async fn get_allowed_set()->std::io::Result<std::collections::HashSet<String>>{
|
||||
get_set_from_file("scripts/allowed").await
|
||||
}
|
||||
|
||||
async fn get_blocked_set()->std::io::Result<std::collections::HashSet<String>>{
|
||||
get_set_from_file("scripts/blocked").await
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(Debug)]
|
||||
enum GetMapError{
|
||||
IO(std::io::Error),
|
||||
FileStem,
|
||||
ToStr,
|
||||
ParseInt(std::num::ParseIntError),
|
||||
}
|
||||
|
||||
async fn get_allowed_map()->Result<std::collections::HashMap::<u32,String>,GetMapError>{
|
||||
read_dir_stream(tokio::fs::read_dir("scripts/allowed").await.map_err(GetMapError::IO)?)
|
||||
.map(|dir_entry|async{
|
||||
let path=dir_entry.map_err(GetMapError::IO)?.path();
|
||||
let id:u32=path
|
||||
.file_stem().ok_or(GetMapError::FileStem)?
|
||||
.to_str().ok_or(GetMapError::ToStr)?
|
||||
.parse().map_err(GetMapError::ParseInt)?;
|
||||
let source=tokio::fs::read_to_string(path).await.map_err(GetMapError::IO)?;
|
||||
Ok((id,source))
|
||||
})
|
||||
.buffer_unordered(READ_CONCURRENCY)
|
||||
.try_collect().await
|
||||
}
|
||||
|
||||
async fn get_replace_map()->Result<std::collections::HashMap::<String,u32>,GetMapError>{
|
||||
read_dir_stream(tokio::fs::read_dir("scripts/replace").await.map_err(GetMapError::IO)?)
|
||||
.map(|dir_entry|async{
|
||||
let path=dir_entry.map_err(GetMapError::IO)?.path();
|
||||
let id:u32=path
|
||||
.file_stem().ok_or(GetMapError::FileStem)?
|
||||
.to_str().ok_or(GetMapError::ToStr)?
|
||||
.parse().map_err(GetMapError::ParseInt)?;
|
||||
let source=tokio::fs::read_to_string(path).await.map_err(GetMapError::IO)?;
|
||||
Ok((source,id))
|
||||
})
|
||||
.buffer_unordered(READ_CONCURRENCY)
|
||||
.try_collect().await
|
||||
}
|
||||
|
||||
fn hash_source(source:&str)->u64{
|
||||
let mut hasher=siphasher::sip::SipHasher::new();
|
||||
std::hash::Hasher::write(&mut hasher,source.as_bytes());
|
||||
@ -154,10 +332,11 @@ fn hash_format(hash:u64)->String{
|
||||
format!("{:016x}",hash)
|
||||
}
|
||||
|
||||
type GOCError=submissions_api::types::SingleItemError;
|
||||
type GOCResult=Result<submissions_api::types::ScriptID,GOCError>;
|
||||
type GOCScriptError=submissions_api::types::ScriptSingleItemError;
|
||||
type GOCScriptPolicyError=submissions_api::types::ScriptPolicySingleItemError;
|
||||
type GOCScriptResult=Result<submissions_api::types::ScriptID,GOCScriptError>;
|
||||
|
||||
async fn get_or_create_script(api:&submissions_api::external::Context,source:&str)->GOCResult{
|
||||
async fn get_or_create_script(api:&submissions_api::external::Context,source:&str)->GOCScriptResult{
|
||||
let script_response=api.get_script_from_hash(submissions_api::types::HashRequest{
|
||||
hash:hash_format(hash_source(source)).as_str(),
|
||||
}).await?;
|
||||
@ -167,8 +346,9 @@ async fn get_or_create_script(api:&submissions_api::external::Context,source:&st
|
||||
None=>api.create_script(submissions_api::types::CreateScriptRequest{
|
||||
Name:"Script",
|
||||
Source:source,
|
||||
SubmissionID:None,
|
||||
}).await.map_err(GOCError::Other)?.ID
|
||||
ResourceType:submissions_api::types::ResourceType::Unknown,
|
||||
ResourceID:None,
|
||||
}).await.map_err(GOCScriptError::Other)?.ScriptID
|
||||
})
|
||||
}
|
||||
|
||||
@ -176,7 +356,7 @@ async fn check_or_create_script_poicy(
|
||||
api:&submissions_api::external::Context,
|
||||
hash:&str,
|
||||
script_policy:submissions_api::types::CreateScriptPolicyRequest,
|
||||
)->Result<(),GOCError>{
|
||||
)->Result<(),GOCScriptPolicyError>{
|
||||
let script_policy_result=api.get_script_policy_from_hash(submissions_api::types::HashRequest{
|
||||
hash,
|
||||
}).await?;
|
||||
@ -184,54 +364,53 @@ async fn check_or_create_script_poicy(
|
||||
match script_policy_result{
|
||||
Some(script_policy_reponse)=>{
|
||||
// check that everything matches the expectation
|
||||
assert!(hash==script_policy_reponse.FromScriptHash);
|
||||
assert!(script_policy.ToScriptID==script_policy_reponse.ToScriptID);
|
||||
assert!(script_policy.Policy==script_policy_reponse.Policy);
|
||||
assert_eq!(hash,script_policy_reponse.FromScriptHash);
|
||||
assert_eq!(script_policy.ToScriptID,script_policy_reponse.ToScriptID);
|
||||
assert_eq!(script_policy.Policy,script_policy_reponse.Policy);
|
||||
},
|
||||
None=>{
|
||||
// create a new policy
|
||||
api.create_script_policy(script_policy).await.map_err(GOCError::Other)?;
|
||||
api.create_script_policy(script_policy).await.map_err(GOCScriptPolicyError::Other)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn do_policy(
|
||||
api:&submissions_api::external::Context,
|
||||
script_ids:&std::collections::HashMap<&str,submissions_api::types::ScriptID>,
|
||||
source:&str,
|
||||
to_script_id:submissions_api::types::ScriptID,
|
||||
policy:submissions_api::types::Policy,
|
||||
)->Result<(),GOCError>{
|
||||
let hash=hash_format(hash_source(source));
|
||||
check_or_create_script_poicy(api,hash.as_str(),submissions_api::types::CreateScriptPolicyRequest{
|
||||
FromScriptID:script_ids[source],
|
||||
ToScriptID:to_script_id,
|
||||
Policy:policy,
|
||||
}).await
|
||||
struct UploadConfig{
|
||||
session_id:String,
|
||||
api_url:String,
|
||||
}
|
||||
|
||||
async fn upload_scripts(session_id:PathBuf)->Result<()>{
|
||||
let cookie={
|
||||
let mut cookie=String::new();
|
||||
std::fs::File::open(session_id)?.read_to_string(&mut cookie)?;
|
||||
submissions_api::Cookie::new(&cookie)?
|
||||
};
|
||||
let api=&submissions_api::external::Context::new("http://localhost:8083".to_owned(),cookie)?;
|
||||
async fn upload_scripts(config:UploadConfig)->Result<(),ScriptUploadError>{
|
||||
let cookie=submissions_api::Cookie::new(&config.session_id).map_err(ScriptUploadError::Cookie)?;
|
||||
let api=&submissions_api::external::Context::new(config.api_url,cookie).map_err(ScriptUploadError::Reqwest)?;
|
||||
|
||||
let allowed_set=get_allowed_set()?;
|
||||
let allowed_map=get_allowed_map()?;
|
||||
let replace_map=get_replace_map()?;
|
||||
let blocked=get_blocked()?;
|
||||
// load all script files
|
||||
let (
|
||||
allowed_set_result,
|
||||
allowed_map_result,
|
||||
replace_map_result,
|
||||
blocked_set_result,
|
||||
)=tokio::join!(
|
||||
get_allowed_set(),
|
||||
get_allowed_map(),
|
||||
get_replace_map(),
|
||||
get_blocked_set(),
|
||||
);
|
||||
|
||||
let allowed_set=allowed_set_result.map_err(ScriptUploadError::AllowedSet)?;
|
||||
let allowed_map=allowed_map_result.map_err(ScriptUploadError::AllowedMap)?;
|
||||
let replace_map=replace_map_result.map_err(ScriptUploadError::ReplaceMap)?;
|
||||
let blocked_set=blocked_set_result.map_err(ScriptUploadError::BlockedSet)?;
|
||||
|
||||
// create a unified deduplicated set of all scripts
|
||||
let script_set:std::collections::HashSet<&str>=allowed_set.iter()
|
||||
.map(|s|s.as_str())
|
||||
.map(String::as_str)
|
||||
.chain(
|
||||
replace_map.keys().map(|s|s.as_str())
|
||||
replace_map.keys().map(String::as_str)
|
||||
).chain(
|
||||
blocked.iter().map(|s|s.as_str())
|
||||
blocked_set.iter().map(String::as_str)
|
||||
).collect();
|
||||
|
||||
// get or create every unique script
|
||||
@ -239,48 +418,332 @@ async fn upload_scripts(session_id:PathBuf)->Result<()>{
|
||||
futures::stream::iter(script_set)
|
||||
.map(|source|async move{
|
||||
let script_id=get_or_create_script(api,source).await?;
|
||||
Ok::<_,GOCError>((source,script_id))
|
||||
Ok((source,script_id))
|
||||
})
|
||||
.buffer_unordered(16)
|
||||
.try_collect().await?;
|
||||
.buffer_unordered(REMOTE_CONCURRENCY)
|
||||
.try_collect().await.map_err(ScriptUploadError::GetOrCreate)?;
|
||||
|
||||
// get or create policy for each script in each category
|
||||
//
|
||||
// replace
|
||||
futures::stream::iter(replace_map.iter().map(Ok))
|
||||
.try_for_each_concurrent(Some(16),|(source,id)|async{
|
||||
do_policy(
|
||||
let replace_fut=futures::stream::iter(replace_map.iter().map(Ok))
|
||||
.try_for_each_concurrent(Some(REMOTE_CONCURRENCY),|(source,id)|async{
|
||||
check_or_create_script_poicy(
|
||||
api,
|
||||
&script_ids,
|
||||
source,
|
||||
script_ids[allowed_map[id].as_str()],
|
||||
submissions_api::types::Policy::Replace
|
||||
).await
|
||||
}).await?;
|
||||
hash_format(hash_source(source)).as_str(),
|
||||
submissions_api::types::CreateScriptPolicyRequest{
|
||||
FromScriptID:script_ids[source.as_str()],
|
||||
ToScriptID:script_ids[allowed_map[id].as_str()],
|
||||
Policy:submissions_api::types::Policy::Replace,
|
||||
}
|
||||
).await.map_err(ScriptUploadError::GetOrCreatePolicyReplace)
|
||||
});
|
||||
|
||||
// allowed
|
||||
futures::stream::iter(allowed_set.iter().map(Ok))
|
||||
.try_for_each_concurrent(Some(16),|source|async{
|
||||
do_policy(
|
||||
let allowed_fut=futures::stream::iter(allowed_set.iter().map(Ok))
|
||||
.try_for_each_concurrent(Some(REMOTE_CONCURRENCY),|source|async{
|
||||
check_or_create_script_poicy(
|
||||
api,
|
||||
&script_ids,
|
||||
source,
|
||||
script_ids[source.as_str()],
|
||||
submissions_api::types::Policy::Allowed
|
||||
).await
|
||||
}).await?;
|
||||
hash_format(hash_source(source)).as_str(),
|
||||
submissions_api::types::CreateScriptPolicyRequest{
|
||||
FromScriptID:script_ids[source.as_str()],
|
||||
ToScriptID:script_ids[source.as_str()],
|
||||
Policy:submissions_api::types::Policy::Allowed,
|
||||
}
|
||||
).await.map_err(ScriptUploadError::GetOrCreatePolicyAllowed)
|
||||
});
|
||||
|
||||
// blocked
|
||||
futures::stream::iter(blocked.iter().map(Ok))
|
||||
.try_for_each_concurrent(Some(16),|source|async{
|
||||
do_policy(
|
||||
let blocked_fut=futures::stream::iter(blocked_set.iter().map(Ok))
|
||||
.try_for_each_concurrent(Some(REMOTE_CONCURRENCY),|source|async{
|
||||
check_or_create_script_poicy(
|
||||
api,
|
||||
&script_ids,
|
||||
source,
|
||||
script_ids[source.as_str()],
|
||||
submissions_api::types::Policy::Blocked
|
||||
).await
|
||||
}).await?;
|
||||
hash_format(hash_source(source)).as_str(),
|
||||
submissions_api::types::CreateScriptPolicyRequest{
|
||||
FromScriptID:script_ids[source.as_str()],
|
||||
ToScriptID:script_ids[source.as_str()],
|
||||
Policy:submissions_api::types::Policy::Blocked,
|
||||
}
|
||||
).await.map_err(ScriptUploadError::GetOrCreatePolicyBlocked)
|
||||
});
|
||||
|
||||
// run futures
|
||||
tokio::try_join!(replace_fut,allowed_fut,blocked_fut)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(Debug)]
|
||||
enum RepairPoliciesError{
|
||||
Cookie(submissions_api::CookieError),
|
||||
Reqwest(submissions_api::ReqwestError),
|
||||
GetPolicies(submissions_api::Error),
|
||||
GetScripts(submissions_api::types::ScriptSingleItemError),
|
||||
UpdateScriptPolicy(submissions_api::Error),
|
||||
}
|
||||
|
||||
struct RepairPoliciesConfig{
|
||||
session_id:String,
|
||||
api_url:String,
|
||||
}
|
||||
async fn repair_policies(config:RepairPoliciesConfig)->Result<(),RepairPoliciesError>{
|
||||
let cookie=submissions_api::Cookie::new(&config.session_id).map_err(RepairPoliciesError::Cookie)?;
|
||||
let api=&submissions_api::external::Context::new(config.api_url,cookie).map_err(RepairPoliciesError::Reqwest)?;
|
||||
|
||||
const LIMIT:u32=100;
|
||||
let mut page=1;
|
||||
loop{
|
||||
println!("Downloading page {page}...");
|
||||
let policies=api.get_script_policies(submissions_api::types::GetScriptPoliciesRequest{
|
||||
Page:page,
|
||||
Limit:LIMIT,
|
||||
FromScriptHash:None,
|
||||
ToScriptID:None,
|
||||
Policy:Some(submissions_api::types::Policy::Replace),
|
||||
}).await.map_err(RepairPoliciesError::GetPolicies)?;
|
||||
|
||||
futures::stream::iter(policies.iter().map(Ok)).try_for_each_concurrent(REMOTE_CONCURRENCY,async|policy|{
|
||||
let from_script=api.get_script_from_hash(submissions_api::types::HashRequest{
|
||||
hash:policy.FromScriptHash.as_str(),
|
||||
}).await.map_err(RepairPoliciesError::GetScripts)?;
|
||||
|
||||
if let Some(from_script)=from_script{
|
||||
if policy.ToScriptID==from_script.ID{
|
||||
// invalid policy. Reset the policy to None
|
||||
api.update_script_policy(submissions_api::types::UpdateScriptPolicyRequest{
|
||||
ID:policy.ID,
|
||||
FromScriptID:None,
|
||||
ToScriptID:None,
|
||||
Policy:Some(submissions_api::types::Policy::None),
|
||||
}).await.map_err(RepairPoliciesError::UpdateScriptPolicy)?;
|
||||
println!("Policy updated! {:?}",policy.ID);
|
||||
}
|
||||
}else{
|
||||
println!("Script did not exist! hash={}",policy.FromScriptHash);
|
||||
}
|
||||
Ok(())
|
||||
}).await?;
|
||||
|
||||
if policies.len()<LIMIT as usize{
|
||||
// We scanned all policies
|
||||
println!("Done!");
|
||||
break;
|
||||
}else{
|
||||
page+=1;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(Debug)]
|
||||
enum RepairDuplicatesError{
|
||||
Cookie(submissions_api::CookieError),
|
||||
Reqwest(submissions_api::ReqwestError),
|
||||
GetScripts(submissions_api::Error),
|
||||
DeleteScript(submissions_api::Error),
|
||||
}
|
||||
|
||||
struct RepairDuplicatesConfig{
|
||||
session_id:String,
|
||||
api_url:String,
|
||||
}
|
||||
async fn repair_duplicates(config:RepairDuplicatesConfig)->Result<(),RepairDuplicatesError>{
|
||||
let cookie=submissions_api::Cookie::new(&config.session_id).map_err(RepairDuplicatesError::Cookie)?;
|
||||
let api=&submissions_api::external::Context::new(config.api_url,cookie).map_err(RepairDuplicatesError::Reqwest)?;
|
||||
|
||||
let mut sources=std::collections::HashSet::new();
|
||||
|
||||
const LIMIT:u32=100;
|
||||
let mut page=1;
|
||||
loop{
|
||||
println!("Downloading page {page}...");
|
||||
let scripts=api.get_scripts(submissions_api::types::GetScriptsRequest{
|
||||
Page:page,
|
||||
Limit:LIMIT,
|
||||
Name:None,
|
||||
Hash:None,
|
||||
Source:None,
|
||||
ResourceType:None,
|
||||
ResourceID:None,
|
||||
}).await.map_err(RepairDuplicatesError::GetScripts)?;
|
||||
|
||||
let done=scripts.len()<LIMIT as usize;
|
||||
|
||||
for script in scripts{
|
||||
if !sources.insert(script.Source){
|
||||
println!("Deleting duplicate script {:?}",script.ID);
|
||||
api.delete_script(submissions_api::types::GetScriptRequest{
|
||||
ScriptID:script.ID,
|
||||
}).await.map_err(RepairDuplicatesError::DeleteScript)?;
|
||||
}
|
||||
}
|
||||
|
||||
if done{
|
||||
// We scanned all policies
|
||||
println!("Done!");
|
||||
break;
|
||||
}else{
|
||||
page+=1;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(Debug)]
|
||||
enum ReleaseError{
|
||||
Cookie(submissions_api::CookieError),
|
||||
Reqwest(submissions_api::ReqwestError),
|
||||
GetSubmissions(submissions_api::Error),
|
||||
GetMaps(submissions_api::Error),
|
||||
Io(std::io::Error),
|
||||
Release(submissions_api::Error),
|
||||
}
|
||||
|
||||
struct ReleaseConfig{
|
||||
session_id:String,
|
||||
api_url:String,
|
||||
}
|
||||
async fn release(config:ReleaseConfig)->Result<(),ReleaseError>{
|
||||
let cookie=submissions_api::Cookie::new(&config.session_id).map_err(ReleaseError::Cookie)?;
|
||||
let api=&submissions_api::external::Context::new(config.api_url,cookie).map_err(ReleaseError::Reqwest)?;
|
||||
|
||||
const LIMIT:u32=100;
|
||||
const ONE_HOUR:i64=60*60;
|
||||
const ONE_DAY:i64=24*ONE_HOUR;
|
||||
const ONE_WEEK:i64=7*ONE_DAY;
|
||||
const FRIDAY:i64=2*ONE_DAY;
|
||||
const PEAK_HOURS:i64=-7*ONE_HOUR;
|
||||
|
||||
// determine maps ready to be released
|
||||
let mut submissions_pending_release=std::collections::BTreeMap::new();
|
||||
{
|
||||
println!("Downloading submissions pending release...");
|
||||
let mut page=1;
|
||||
loop{
|
||||
let submissions=api.get_submissions(submissions_api::types::GetSubmissionsRequest{
|
||||
Page:page,
|
||||
Limit:LIMIT,
|
||||
DisplayName:None,
|
||||
Creator:None,
|
||||
GameID:None,
|
||||
Sort:None,
|
||||
Submitter:None,
|
||||
AssetID:None,
|
||||
UploadedAssetID:None,
|
||||
StatusID:Some(submissions_api::types::SubmissionStatus::Uploaded),
|
||||
}).await.map_err(ReleaseError::GetSubmissions)?;
|
||||
let len=submissions.Submissions.len();
|
||||
for submission in submissions.Submissions{
|
||||
submissions_pending_release.entry(submission.GameID).or_insert(Vec::new()).push(submission);
|
||||
}
|
||||
if len<LIMIT as usize{
|
||||
break;
|
||||
}else{
|
||||
page+=1;
|
||||
}
|
||||
}
|
||||
}
|
||||
// If there is nothing to release, exit immediately
|
||||
if submissions_pending_release.is_empty(){
|
||||
println!("Nothing to release!");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// determine the most recent map release date
|
||||
// if it's in the past, generate a Friday 10AM timestamp instead
|
||||
let it={
|
||||
println!("Determining most recent release dates...");
|
||||
let mut latest_date=std::collections::HashMap::new();
|
||||
let mut page=1;
|
||||
loop{
|
||||
let maps=api.get_maps(submissions_api::types::GetMapsRequest{
|
||||
Page:page,
|
||||
Limit:LIMIT,
|
||||
DisplayName:None,
|
||||
Creator:None,
|
||||
GameID:None,
|
||||
Sort:None,//TODO: sort by date to cut down requests
|
||||
}).await.map_err(ReleaseError::GetMaps)?;
|
||||
let len=maps.len();
|
||||
for map in maps{
|
||||
latest_date
|
||||
.entry(map.GameID)
|
||||
.and_modify(|date|
|
||||
*date=map.Date.max(*date)
|
||||
)
|
||||
.or_insert(map.Date);
|
||||
}
|
||||
if len<LIMIT as usize{
|
||||
break;
|
||||
}else{
|
||||
page+=1;
|
||||
}
|
||||
}
|
||||
|
||||
// breaks on Sun 4 Dec 292277026596
|
||||
let now=std::time::UNIX_EPOCH.elapsed().unwrap().as_secs() as i64;
|
||||
|
||||
// If the date is in the past, unset it
|
||||
latest_date.retain(|_,&mut date|now<date);
|
||||
|
||||
submissions_pending_release.into_iter().map(move|(game,pending)|{
|
||||
let start_date=match latest_date.get(&game){
|
||||
Some(&date)=>{
|
||||
// round to friday
|
||||
(date+(ONE_WEEK>>1)-FRIDAY)/ONE_WEEK*ONE_WEEK+FRIDAY+PEAK_HOURS
|
||||
// add a week
|
||||
+ONE_WEEK
|
||||
},
|
||||
// find soonest friday
|
||||
None=>((now-FRIDAY) as u64).next_multiple_of(ONE_WEEK as u64) as i64+FRIDAY+PEAK_HOURS
|
||||
};
|
||||
|
||||
(game,start_date,pending)
|
||||
})
|
||||
};
|
||||
|
||||
let mut rng=rand::rng();
|
||||
|
||||
for (game,start_date,mut pending) in it{
|
||||
// shuffle maps
|
||||
pending.shuffle(&mut rng);
|
||||
|
||||
// schedule one per week
|
||||
let schedule:&Vec<_>=&pending.into_iter().enumerate().map(|(i,submission)|{
|
||||
let release_date=(std::time::UNIX_EPOCH+std::time::Duration::from_secs((
|
||||
start_date+i as i64*ONE_WEEK
|
||||
) as u64)).into();
|
||||
println!("Schedule {:?} {} at {}",submission.ID,submission.DisplayName,release_date);
|
||||
submissions_api::types::ReleaseInfo{
|
||||
Date:release_date,
|
||||
SubmissionID:submission.ID,
|
||||
}
|
||||
}).collect();
|
||||
|
||||
// ask to confirm schedule
|
||||
print!("Accept this release schedule for {game:?}? [y/N]: ");
|
||||
std::io::stdout().flush().map_err(ReleaseError::Io)?;
|
||||
|
||||
let mut input=String::new();
|
||||
std::io::stdin().read_line(&mut input).map_err(ReleaseError::Io)?;
|
||||
match input.trim(){
|
||||
"y"|"Y"=>(),
|
||||
_=>{
|
||||
println!("Quitting.");
|
||||
return Ok(());
|
||||
},
|
||||
}
|
||||
|
||||
// send it
|
||||
api.release_submissions(submissions_api::types::ReleaseRequest{
|
||||
schedule,
|
||||
}).await.map_err(ReleaseError::Release)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
Reference in New Issue
Block a user