Compare commits
3 Commits
Author | SHA1 | Date | |
---|---|---|---|
4b99f2028d | |||
e9e51d455b | |||
3252927df7 |
918
Cargo.lock
generated
918
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "rreview"
|
name = "rreview"
|
||||||
version = "1.2.2"
|
version = "1.0.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
@ -8,9 +8,8 @@ edition = "2021"
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
clap = { version = "4.4.2", features = ["derive"] }
|
clap = { version = "4.4.2", features = ["derive"] }
|
||||||
futures = "0.3.31"
|
futures = "0.3.31"
|
||||||
rand = "0.9.1"
|
|
||||||
siphasher = "1.0.1"
|
siphasher = "1.0.1"
|
||||||
submissions-api = { version = "0.8.1", registry = "strafesnet" }
|
submissions-api = { version = "0.3.0", features = ["external"], default-features = false, registry = "strafesnet" }
|
||||||
tokio = { version = "1.42.0", features = ["fs", "macros", "rt-multi-thread"] }
|
tokio = { version = "1.42.0", features = ["fs", "macros", "rt-multi-thread"] }
|
||||||
|
|
||||||
[profile.release]
|
[profile.release]
|
||||||
|
26
src/cmd/mod.rs
Normal file
26
src/cmd/mod.rs
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
use clap::{Args,Parser,Subcommand};
|
||||||
|
|
||||||
|
#[derive(Parser)]
|
||||||
|
#[command(author,version,about,long_about=None)]
|
||||||
|
#[command(propagate_version=true)]
|
||||||
|
pub struct Cli{
|
||||||
|
#[command(subcommand)]
|
||||||
|
command:Commands,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Subcommand)]
|
||||||
|
pub enum Commands{
|
||||||
|
Review(ReviewCommand),
|
||||||
|
UploadScripts(UploadScriptsCommand),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Args)]
|
||||||
|
struct ReviewCommand{
|
||||||
|
#[arg(long)]
|
||||||
|
cookie:String,
|
||||||
|
}
|
||||||
|
#[derive(Args)]
|
||||||
|
struct UploadScriptsCommand{
|
||||||
|
#[arg(long)]
|
||||||
|
session_id:PathBuf,
|
||||||
|
}
|
792
src/main.rs
792
src/main.rs
@ -1,78 +1,16 @@
|
|||||||
use clap::{Args,Parser,Subcommand};
|
mod cmd;
|
||||||
|
|
||||||
|
use cmd::{Cli,Commands};
|
||||||
use futures::{StreamExt,TryStreamExt};
|
use futures::{StreamExt,TryStreamExt};
|
||||||
use rand::seq::SliceRandom;
|
|
||||||
use submissions_api::types::{Policy,ScriptResponse,ScriptPolicyResponse};
|
|
||||||
use std::io::Write;
|
|
||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
const READ_CONCURRENCY:usize=16;
|
|
||||||
const REMOTE_CONCURRENCY:usize=16;
|
|
||||||
|
|
||||||
#[derive(Parser)]
|
|
||||||
#[command(author,version,about,long_about=None)]
|
|
||||||
#[command(propagate_version=true)]
|
|
||||||
struct Cli{
|
|
||||||
#[command(subcommand)]
|
|
||||||
command:Commands,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Subcommand)]
|
|
||||||
enum Commands{
|
|
||||||
Release(ReleaseCommand),
|
|
||||||
Repair(RepairCommand),
|
|
||||||
Review(ReviewCommand),
|
|
||||||
UploadScripts(UploadScriptsCommand),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Args)]
|
|
||||||
struct ReleaseCommand{
|
|
||||||
#[arg(long)]
|
|
||||||
session_id_file:PathBuf,
|
|
||||||
#[arg(long)]
|
|
||||||
api_url:String,
|
|
||||||
}
|
|
||||||
#[derive(Args)]
|
|
||||||
struct RepairCommand{
|
|
||||||
#[arg(long)]
|
|
||||||
session_id_file:PathBuf,
|
|
||||||
#[arg(long)]
|
|
||||||
api_url:String,
|
|
||||||
}
|
|
||||||
#[derive(Args)]
|
|
||||||
struct ReviewCommand{
|
|
||||||
#[arg(long)]
|
|
||||||
session_id_file:PathBuf,
|
|
||||||
#[arg(long)]
|
|
||||||
api_url:String,
|
|
||||||
}
|
|
||||||
#[derive(Args)]
|
|
||||||
struct UploadScriptsCommand{
|
|
||||||
#[arg(long)]
|
|
||||||
session_id_file:PathBuf,
|
|
||||||
#[arg(long)]
|
|
||||||
api_url:String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn main(){
|
async fn main(){
|
||||||
let cli=Cli::parse();
|
let cli=Cli::parse();
|
||||||
match cli.command{
|
match cli.command{
|
||||||
Commands::Release(command)=>release(ReleaseConfig{
|
|
||||||
session_id:std::fs::read_to_string(command.session_id_file).unwrap(),
|
|
||||||
api_url:command.api_url,
|
|
||||||
}).await.unwrap(),
|
|
||||||
Commands::Repair(command)=>repair(RepairConfig{
|
|
||||||
session_id:std::fs::read_to_string(command.session_id_file).unwrap(),
|
|
||||||
api_url:command.api_url,
|
|
||||||
}).await.unwrap(),
|
|
||||||
Commands::Review(command)=>review(ReviewConfig{
|
Commands::Review(command)=>review(ReviewConfig{
|
||||||
session_id:std::fs::read_to_string(command.session_id_file).unwrap(),
|
cookie:command.cookie,
|
||||||
api_url:command.api_url,
|
|
||||||
}).await.unwrap(),
|
|
||||||
Commands::UploadScripts(command)=>upload_scripts(UploadConfig{
|
|
||||||
session_id:std::fs::read_to_string(command.session_id_file).unwrap(),
|
|
||||||
api_url:command.api_url,
|
|
||||||
}).await.unwrap(),
|
}).await.unwrap(),
|
||||||
|
Commands::UploadScripts(command)=>upload_scripts(command.session_id).await.unwrap(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -81,8 +19,6 @@ enum ScriptActionParseResult{
|
|||||||
Block,
|
Block,
|
||||||
Exit,
|
Exit,
|
||||||
Delete,
|
Delete,
|
||||||
Purge,
|
|
||||||
Skip,
|
|
||||||
}
|
}
|
||||||
struct ParseScriptActionErr;
|
struct ParseScriptActionErr;
|
||||||
impl std::str::FromStr for ScriptActionParseResult{
|
impl std::str::FromStr for ScriptActionParseResult{
|
||||||
@ -93,8 +29,6 @@ impl std::str::FromStr for ScriptActionParseResult{
|
|||||||
"block\n"=>Ok(Self::Block),
|
"block\n"=>Ok(Self::Block),
|
||||||
"exit\n"=>Ok(Self::Exit),
|
"exit\n"=>Ok(Self::Exit),
|
||||||
"delete\n"=>Ok(Self::Delete),
|
"delete\n"=>Ok(Self::Delete),
|
||||||
"purge\n"=>Ok(Self::Purge),
|
|
||||||
"skip\n"=>Ok(Self::Skip),
|
|
||||||
_=>Err(ParseScriptActionErr),
|
_=>Err(ParseScriptActionErr),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -106,39 +40,35 @@ enum ReviewError{
|
|||||||
Cookie(submissions_api::CookieError),
|
Cookie(submissions_api::CookieError),
|
||||||
Reqwest(submissions_api::ReqwestError),
|
Reqwest(submissions_api::ReqwestError),
|
||||||
GetPolicies(submissions_api::Error),
|
GetPolicies(submissions_api::Error),
|
||||||
GetScriptFromHash(submissions_api::types::ScriptSingleItemError),
|
GetScriptFromHash(submissions_api::types::SingleItemError),
|
||||||
NoScript,
|
NoScript,
|
||||||
WriteCurrent(std::io::Error),
|
WriteCurrent(std::io::Error),
|
||||||
ActionIO(std::io::Error),
|
ActionIO(std::io::Error),
|
||||||
PurgeScript(submissions_api::Error),
|
|
||||||
ReadCurrent(std::io::Error),
|
ReadCurrent(std::io::Error),
|
||||||
DeduplicateModified(submissions_api::types::ScriptSingleItemError),
|
DeduplicateModified(submissions_api::types::SingleItemError),
|
||||||
UploadModified(submissions_api::Error),
|
UploadModified(submissions_api::Error),
|
||||||
UploadModifiedPolicy(submissions_api::Error),
|
|
||||||
UpdateScriptPolicy(submissions_api::Error),
|
UpdateScriptPolicy(submissions_api::Error),
|
||||||
}
|
}
|
||||||
|
|
||||||
struct ReviewConfig{
|
struct ReviewConfig{
|
||||||
session_id:String,
|
cookie:String,
|
||||||
api_url:String,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn review(config:ReviewConfig)->Result<(),ReviewError>{
|
async fn review(config:ReviewConfig)->Result<(),ReviewError>{
|
||||||
// download unreviewed policies
|
// download unreviewed policies
|
||||||
// review them
|
// review them
|
||||||
let cookie=submissions_api::Cookie::new(&config.session_id).map_err(ReviewError::Cookie)?;
|
let cookie=submissions_api::Cookie::new(&config.cookie).map_err(ReviewError::Cookie)?;
|
||||||
let api=submissions_api::external::Context::new(config.api_url,cookie).map_err(ReviewError::Reqwest)?;
|
let api=submissions_api::external::Context::new("http://localhost:8083".to_owned(),cookie).map_err(ReviewError::Reqwest)?;
|
||||||
|
|
||||||
let unreviewed_policies=api.get_script_policies(submissions_api::types::GetScriptPoliciesRequest{
|
let unreviewed_policies=api.get_script_policies(submissions_api::types::GetScriptPoliciesRequest{
|
||||||
Page:1,
|
Page:1,
|
||||||
Limit:100,
|
Limit:100,
|
||||||
FromScriptHash:None,
|
FromScriptHash:None,
|
||||||
ToScriptID:None,
|
ToScriptID:None,
|
||||||
Policy:Some(Policy::None),
|
Policy:Some(submissions_api::types::Policy::None),
|
||||||
}).await.map_err(ReviewError::GetPolicies)?;
|
}).await.map_err(ReviewError::GetPolicies)?;
|
||||||
|
|
||||||
let unreviewed_policy_count=unreviewed_policies.len();
|
for unreviewed_policy in unreviewed_policies{
|
||||||
for (i,unreviewed_policy) in unreviewed_policies.into_iter().enumerate(){
|
|
||||||
// download source code
|
// download source code
|
||||||
let script_response=api.get_script_from_hash(submissions_api::types::HashRequest{
|
let script_response=api.get_script_from_hash(submissions_api::types::HashRequest{
|
||||||
hash:unreviewed_policy.FromScriptHash.as_str(),
|
hash:unreviewed_policy.FromScriptHash.as_str(),
|
||||||
@ -150,10 +80,6 @@ async fn review(config:ReviewConfig)->Result<(),ReviewError>{
|
|||||||
//load source into current.lua
|
//load source into current.lua
|
||||||
tokio::fs::write("current.lua",source.as_str()).await.map_err(ReviewError::WriteCurrent)?;
|
tokio::fs::write("current.lua",source.as_str()).await.map_err(ReviewError::WriteCurrent)?;
|
||||||
|
|
||||||
// print script name
|
|
||||||
println!("policy {}/{unreviewed_policy_count}",i+1);
|
|
||||||
println!("script name: {}",script_response.Name);
|
|
||||||
|
|
||||||
//prompt action in terminal
|
//prompt action in terminal
|
||||||
//wait for input
|
//wait for input
|
||||||
let script_action;
|
let script_action;
|
||||||
@ -171,25 +97,16 @@ async fn review(config:ReviewConfig)->Result<(),ReviewError>{
|
|||||||
let mut to_script_id=None;
|
let mut to_script_id=None;
|
||||||
// interpret action
|
// interpret action
|
||||||
let reviewed_policy=match script_action{
|
let reviewed_policy=match script_action{
|
||||||
ScriptActionParseResult::Purge=>{
|
|
||||||
// remove script and policy from the database.
|
|
||||||
let remove_script_fut=api.delete_script(submissions_api::types::GetScriptRequest{
|
|
||||||
ScriptID:script_response.ID,
|
|
||||||
});
|
|
||||||
let remove_script_policy_fut=api.delete_script_policy(submissions_api::types::GetScriptPolicyRequest{
|
|
||||||
ScriptPolicyID:unreviewed_policy.ID,
|
|
||||||
});
|
|
||||||
tokio::try_join!(remove_script_fut,remove_script_policy_fut).map_err(ReviewError::PurgeScript)?;
|
|
||||||
continue;
|
|
||||||
},
|
|
||||||
ScriptActionParseResult::Pass=>{
|
ScriptActionParseResult::Pass=>{
|
||||||
//if current.lua was updated, create an allowed and replace file and set script_action to replace(new_id)
|
//if current.lua was updated, create an allowed and replace file and set script_action to replace(new_id)
|
||||||
let modified_source=tokio::fs::read_to_string("current.lua").await.map_err(ReviewError::ReadCurrent)?;
|
let modified_source=tokio::fs::read_to_string("current.lua").await.map_err(ReviewError::ReadCurrent)?;
|
||||||
if modified_source==source{
|
if modified_source==source{
|
||||||
Policy::Allowed
|
submissions_api::types::Policy::Allowed
|
||||||
}else{
|
}else{
|
||||||
// compute hash
|
// compute hash
|
||||||
let hash=hash_source(modified_source.as_str());
|
let mut hasher=siphasher::sip::SipHasher::new();
|
||||||
|
std::hash::Hasher::write(&mut hasher,source.as_bytes());
|
||||||
|
let hash=std::hash::Hasher::finish(&hasher);
|
||||||
|
|
||||||
// check if modified script already exists
|
// check if modified script already exists
|
||||||
let maybe_script_response=api.get_script_from_hash(submissions_api::types::HashRequest{
|
let maybe_script_response=api.get_script_from_hash(submissions_api::types::HashRequest{
|
||||||
@ -199,132 +116,34 @@ async fn review(config:ReviewConfig)->Result<(),ReviewError>{
|
|||||||
// write to_script_id, uploading modified script if necessary
|
// write to_script_id, uploading modified script if necessary
|
||||||
to_script_id=Some(match maybe_script_response{
|
to_script_id=Some(match maybe_script_response{
|
||||||
Some(script_response)=>script_response.ID,
|
Some(script_response)=>script_response.ID,
|
||||||
None=>{
|
None=>api.create_script(submissions_api::types::CreateScriptRequest{
|
||||||
// create a new script on the fly
|
Name:script_response.Name.as_str(),
|
||||||
let new_script=api.create_script(submissions_api::types::CreateScriptRequest{
|
Source:modified_source.as_str(),
|
||||||
Name:script_response.Name.as_str(),
|
SubmissionID:Some(script_response.SubmissionID),
|
||||||
Source:modified_source.as_str(),
|
}).await.map_err(ReviewError::UploadModified)?.ID
|
||||||
ResourceType:script_response.ResourceType,
|
|
||||||
ResourceID:Some(script_response.ResourceID),
|
|
||||||
}).await.map_err(ReviewError::UploadModified)?;
|
|
||||||
|
|
||||||
// create an "allowed" policy for this script to auto-pass it
|
|
||||||
// if it shows up in a map fix
|
|
||||||
api.create_script_policy(submissions_api::types::CreateScriptPolicyRequest{
|
|
||||||
FromScriptID:new_script.ScriptID,
|
|
||||||
ToScriptID:new_script.ScriptID,
|
|
||||||
Policy:Policy::Allowed,
|
|
||||||
}).await.map_err(ReviewError::UploadModifiedPolicy)?;
|
|
||||||
|
|
||||||
new_script.ScriptID
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// use replace policy
|
// use replace policy
|
||||||
Policy::Replace
|
submissions_api::types::Policy::Replace
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
ScriptActionParseResult::Block=>Policy::Blocked,
|
ScriptActionParseResult::Block=>submissions_api::types::Policy::Blocked,
|
||||||
ScriptActionParseResult::Exit=>break,
|
ScriptActionParseResult::Exit=>break,
|
||||||
ScriptActionParseResult::Delete=>Policy::Delete,
|
ScriptActionParseResult::Delete=>submissions_api::types::Policy::Delete,
|
||||||
ScriptActionParseResult::Skip=>continue,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// update policy
|
// update policy
|
||||||
api.update_script_policy(submissions_api::types::UpdateScriptPolicyRequest{
|
api.update_script_policy(submissions_api::types::UpdateScriptPolicyRequest{
|
||||||
ID:unreviewed_policy.ID,
|
ScriptPolicyID:unreviewed_policy.ID,
|
||||||
FromScriptID:None,
|
FromScriptID:None,
|
||||||
ToScriptID:to_script_id,
|
ToScriptID:to_script_id,
|
||||||
Policy:Some(reviewed_policy),
|
Policy:Some(reviewed_policy),
|
||||||
}).await.map_err(ReviewError::UpdateScriptPolicy)?;
|
}).await.map_err(ReviewError::UpdateScriptPolicy)?;
|
||||||
|
|
||||||
println!("updated {:?} From: {:?} To: {:?} with policy {reviewed_policy:?}",unreviewed_policy.ID,script_response.ID,to_script_id);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
#[derive(Debug)]
|
|
||||||
enum ScriptUploadError{
|
|
||||||
Cookie(submissions_api::CookieError),
|
|
||||||
Reqwest(submissions_api::ReqwestError),
|
|
||||||
AllowedSet(std::io::Error),
|
|
||||||
AllowedMap(GetMapError),
|
|
||||||
ReplaceMap(GetMapError),
|
|
||||||
BlockedSet(std::io::Error),
|
|
||||||
GetOrCreate(GOCScriptError),
|
|
||||||
GetOrCreatePolicyReplace(GOCScriptPolicyError),
|
|
||||||
GetOrCreatePolicyAllowed(GOCScriptPolicyError),
|
|
||||||
GetOrCreatePolicyBlocked(GOCScriptPolicyError),
|
|
||||||
}
|
|
||||||
|
|
||||||
fn read_dir_stream(dir:tokio::fs::ReadDir)->impl futures::stream::Stream<Item=std::io::Result<tokio::fs::DirEntry>>{
|
|
||||||
futures::stream::unfold(dir,|mut dir|async{
|
|
||||||
match dir.next_entry().await{
|
|
||||||
Ok(Some(entry))=>Some((Ok(entry),dir)),
|
|
||||||
Ok(None)=>None, // End of directory
|
|
||||||
Err(e)=>Some((Err(e),dir)), // Error encountered
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn get_set_from_file(path:impl AsRef<std::path::Path>)->std::io::Result<std::collections::HashSet<String>>{
|
|
||||||
read_dir_stream(tokio::fs::read_dir(path).await?)
|
|
||||||
.map(|dir_entry|async{
|
|
||||||
tokio::fs::read_to_string(dir_entry?.path()).await
|
|
||||||
})
|
|
||||||
.buffer_unordered(READ_CONCURRENCY)
|
|
||||||
.try_collect().await
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn get_allowed_set()->std::io::Result<std::collections::HashSet<String>>{
|
|
||||||
get_set_from_file("scripts/allowed").await
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn get_blocked_set()->std::io::Result<std::collections::HashSet<String>>{
|
|
||||||
get_set_from_file("scripts/blocked").await
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
#[derive(Debug)]
|
|
||||||
enum GetMapError{
|
|
||||||
IO(std::io::Error),
|
|
||||||
FileStem,
|
|
||||||
ToStr,
|
|
||||||
ParseInt(std::num::ParseIntError),
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn get_allowed_map()->Result<std::collections::HashMap::<u32,String>,GetMapError>{
|
|
||||||
read_dir_stream(tokio::fs::read_dir("scripts/allowed").await.map_err(GetMapError::IO)?)
|
|
||||||
.map(|dir_entry|async{
|
|
||||||
let path=dir_entry.map_err(GetMapError::IO)?.path();
|
|
||||||
let id:u32=path
|
|
||||||
.file_stem().ok_or(GetMapError::FileStem)?
|
|
||||||
.to_str().ok_or(GetMapError::ToStr)?
|
|
||||||
.parse().map_err(GetMapError::ParseInt)?;
|
|
||||||
let source=tokio::fs::read_to_string(path).await.map_err(GetMapError::IO)?;
|
|
||||||
Ok((id,source))
|
|
||||||
})
|
|
||||||
.buffer_unordered(READ_CONCURRENCY)
|
|
||||||
.try_collect().await
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn get_replace_map()->Result<std::collections::HashMap::<String,u32>,GetMapError>{
|
|
||||||
read_dir_stream(tokio::fs::read_dir("scripts/replace").await.map_err(GetMapError::IO)?)
|
|
||||||
.map(|dir_entry|async{
|
|
||||||
let path=dir_entry.map_err(GetMapError::IO)?.path();
|
|
||||||
let id:u32=path
|
|
||||||
.file_stem().ok_or(GetMapError::FileStem)?
|
|
||||||
.to_str().ok_or(GetMapError::ToStr)?
|
|
||||||
.parse().map_err(GetMapError::ParseInt)?;
|
|
||||||
let source=tokio::fs::read_to_string(path).await.map_err(GetMapError::IO)?;
|
|
||||||
Ok((source,id))
|
|
||||||
})
|
|
||||||
.buffer_unordered(READ_CONCURRENCY)
|
|
||||||
.try_collect().await
|
|
||||||
}
|
|
||||||
|
|
||||||
fn hash_source(source:&str)->u64{
|
fn hash_source(source:&str)->u64{
|
||||||
let mut hasher=siphasher::sip::SipHasher::new();
|
let mut hasher=siphasher::sip::SipHasher::new();
|
||||||
std::hash::Hasher::write(&mut hasher,source.as_bytes());
|
std::hash::Hasher::write(&mut hasher,source.as_bytes());
|
||||||
@ -335,11 +154,10 @@ fn hash_format(hash:u64)->String{
|
|||||||
format!("{:016x}",hash)
|
format!("{:016x}",hash)
|
||||||
}
|
}
|
||||||
|
|
||||||
type GOCScriptError=submissions_api::types::ScriptSingleItemError;
|
type GOCError=submissions_api::types::SingleItemError;
|
||||||
type GOCScriptPolicyError=submissions_api::types::ScriptPolicySingleItemError;
|
type GOCResult=Result<submissions_api::types::ScriptID,GOCError>;
|
||||||
type GOCScriptResult=Result<submissions_api::types::ScriptID,GOCScriptError>;
|
|
||||||
|
|
||||||
async fn get_or_create_script(api:&submissions_api::external::Context,source:&str)->GOCScriptResult{
|
async fn get_or_create_script(api:&submissions_api::external::Context,source:&str)->GOCResult{
|
||||||
let script_response=api.get_script_from_hash(submissions_api::types::HashRequest{
|
let script_response=api.get_script_from_hash(submissions_api::types::HashRequest{
|
||||||
hash:hash_format(hash_source(source)).as_str(),
|
hash:hash_format(hash_source(source)).as_str(),
|
||||||
}).await?;
|
}).await?;
|
||||||
@ -349,9 +167,8 @@ async fn get_or_create_script(api:&submissions_api::external::Context,source:&st
|
|||||||
None=>api.create_script(submissions_api::types::CreateScriptRequest{
|
None=>api.create_script(submissions_api::types::CreateScriptRequest{
|
||||||
Name:"Script",
|
Name:"Script",
|
||||||
Source:source,
|
Source:source,
|
||||||
ResourceType:submissions_api::types::ResourceType::Unknown,
|
SubmissionID:None,
|
||||||
ResourceID:None,
|
}).await.map_err(GOCError::Other)?.ID
|
||||||
}).await.map_err(GOCScriptError::Other)?.ScriptID
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -359,7 +176,7 @@ async fn check_or_create_script_poicy(
|
|||||||
api:&submissions_api::external::Context,
|
api:&submissions_api::external::Context,
|
||||||
hash:&str,
|
hash:&str,
|
||||||
script_policy:submissions_api::types::CreateScriptPolicyRequest,
|
script_policy:submissions_api::types::CreateScriptPolicyRequest,
|
||||||
)->Result<(),GOCScriptPolicyError>{
|
)->Result<(),GOCError>{
|
||||||
let script_policy_result=api.get_script_policy_from_hash(submissions_api::types::HashRequest{
|
let script_policy_result=api.get_script_policy_from_hash(submissions_api::types::HashRequest{
|
||||||
hash,
|
hash,
|
||||||
}).await?;
|
}).await?;
|
||||||
@ -367,53 +184,54 @@ async fn check_or_create_script_poicy(
|
|||||||
match script_policy_result{
|
match script_policy_result{
|
||||||
Some(script_policy_reponse)=>{
|
Some(script_policy_reponse)=>{
|
||||||
// check that everything matches the expectation
|
// check that everything matches the expectation
|
||||||
assert_eq!(hash,script_policy_reponse.FromScriptHash);
|
assert!(hash==script_policy_reponse.FromScriptHash);
|
||||||
assert_eq!(script_policy.ToScriptID,script_policy_reponse.ToScriptID);
|
assert!(script_policy.ToScriptID==script_policy_reponse.ToScriptID);
|
||||||
assert_eq!(script_policy.Policy,script_policy_reponse.Policy);
|
assert!(script_policy.Policy==script_policy_reponse.Policy);
|
||||||
},
|
},
|
||||||
None=>{
|
None=>{
|
||||||
// create a new policy
|
// create a new policy
|
||||||
api.create_script_policy(script_policy).await.map_err(GOCScriptPolicyError::Other)?;
|
api.create_script_policy(script_policy).await.map_err(GOCError::Other)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
struct UploadConfig{
|
async fn do_policy(
|
||||||
session_id:String,
|
api:&submissions_api::external::Context,
|
||||||
api_url:String,
|
script_ids:&std::collections::HashMap<&str,submissions_api::types::ScriptID>,
|
||||||
|
source:&str,
|
||||||
|
to_script_id:submissions_api::types::ScriptID,
|
||||||
|
policy:submissions_api::types::Policy,
|
||||||
|
)->Result<(),GOCError>{
|
||||||
|
let hash=hash_format(hash_source(source));
|
||||||
|
check_or_create_script_poicy(api,hash.as_str(),submissions_api::types::CreateScriptPolicyRequest{
|
||||||
|
FromScriptID:script_ids[source],
|
||||||
|
ToScriptID:to_script_id,
|
||||||
|
Policy:policy,
|
||||||
|
}).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn upload_scripts(config:UploadConfig)->Result<(),ScriptUploadError>{
|
async fn upload_scripts(session_id:PathBuf)->Result<()>{
|
||||||
let cookie=submissions_api::Cookie::new(&config.session_id).map_err(ScriptUploadError::Cookie)?;
|
let cookie={
|
||||||
let api=&submissions_api::external::Context::new(config.api_url,cookie).map_err(ScriptUploadError::Reqwest)?;
|
let mut cookie=String::new();
|
||||||
|
std::fs::File::open(session_id)?.read_to_string(&mut cookie)?;
|
||||||
|
submissions_api::Cookie::new(&cookie)?
|
||||||
|
};
|
||||||
|
let api=&submissions_api::external::Context::new("http://localhost:8083".to_owned(),cookie)?;
|
||||||
|
|
||||||
// load all script files
|
let allowed_set=get_allowed_set()?;
|
||||||
let (
|
let allowed_map=get_allowed_map()?;
|
||||||
allowed_set_result,
|
let replace_map=get_replace_map()?;
|
||||||
allowed_map_result,
|
let blocked=get_blocked()?;
|
||||||
replace_map_result,
|
|
||||||
blocked_set_result,
|
|
||||||
)=tokio::join!(
|
|
||||||
get_allowed_set(),
|
|
||||||
get_allowed_map(),
|
|
||||||
get_replace_map(),
|
|
||||||
get_blocked_set(),
|
|
||||||
);
|
|
||||||
|
|
||||||
let allowed_set=allowed_set_result.map_err(ScriptUploadError::AllowedSet)?;
|
|
||||||
let allowed_map=allowed_map_result.map_err(ScriptUploadError::AllowedMap)?;
|
|
||||||
let replace_map=replace_map_result.map_err(ScriptUploadError::ReplaceMap)?;
|
|
||||||
let blocked_set=blocked_set_result.map_err(ScriptUploadError::BlockedSet)?;
|
|
||||||
|
|
||||||
// create a unified deduplicated set of all scripts
|
// create a unified deduplicated set of all scripts
|
||||||
let script_set:std::collections::HashSet<&str>=allowed_set.iter()
|
let script_set:std::collections::HashSet<&str>=allowed_set.iter()
|
||||||
.map(String::as_str)
|
.map(|s|s.as_str())
|
||||||
.chain(
|
.chain(
|
||||||
replace_map.keys().map(String::as_str)
|
replace_map.keys().map(|s|s.as_str())
|
||||||
).chain(
|
).chain(
|
||||||
blocked_set.iter().map(String::as_str)
|
blocked.iter().map(|s|s.as_str())
|
||||||
).collect();
|
).collect();
|
||||||
|
|
||||||
// get or create every unique script
|
// get or create every unique script
|
||||||
@ -421,474 +239,48 @@ async fn upload_scripts(config:UploadConfig)->Result<(),ScriptUploadError>{
|
|||||||
futures::stream::iter(script_set)
|
futures::stream::iter(script_set)
|
||||||
.map(|source|async move{
|
.map(|source|async move{
|
||||||
let script_id=get_or_create_script(api,source).await?;
|
let script_id=get_or_create_script(api,source).await?;
|
||||||
Ok((source,script_id))
|
Ok::<_,GOCError>((source,script_id))
|
||||||
})
|
})
|
||||||
.buffer_unordered(REMOTE_CONCURRENCY)
|
.buffer_unordered(16)
|
||||||
.try_collect().await.map_err(ScriptUploadError::GetOrCreate)?;
|
.try_collect().await?;
|
||||||
|
|
||||||
// get or create policy for each script in each category
|
// get or create policy for each script in each category
|
||||||
//
|
//
|
||||||
// replace
|
// replace
|
||||||
let replace_fut=futures::stream::iter(replace_map.iter().map(Ok))
|
futures::stream::iter(replace_map.iter().map(Ok))
|
||||||
.try_for_each_concurrent(Some(REMOTE_CONCURRENCY),|(source,id)|async{
|
.try_for_each_concurrent(Some(16),|(source,id)|async{
|
||||||
check_or_create_script_poicy(
|
do_policy(
|
||||||
api,
|
api,
|
||||||
hash_format(hash_source(source)).as_str(),
|
&script_ids,
|
||||||
submissions_api::types::CreateScriptPolicyRequest{
|
source,
|
||||||
FromScriptID:script_ids[source.as_str()],
|
script_ids[allowed_map[id].as_str()],
|
||||||
ToScriptID:script_ids[allowed_map[id].as_str()],
|
submissions_api::types::Policy::Replace
|
||||||
Policy:Policy::Replace,
|
).await
|
||||||
}
|
}).await?;
|
||||||
).await.map_err(ScriptUploadError::GetOrCreatePolicyReplace)
|
|
||||||
});
|
|
||||||
|
|
||||||
// allowed
|
// allowed
|
||||||
let allowed_fut=futures::stream::iter(allowed_set.iter().map(Ok))
|
futures::stream::iter(allowed_set.iter().map(Ok))
|
||||||
.try_for_each_concurrent(Some(REMOTE_CONCURRENCY),|source|async{
|
.try_for_each_concurrent(Some(16),|source|async{
|
||||||
check_or_create_script_poicy(
|
do_policy(
|
||||||
api,
|
api,
|
||||||
hash_format(hash_source(source)).as_str(),
|
&script_ids,
|
||||||
submissions_api::types::CreateScriptPolicyRequest{
|
source,
|
||||||
FromScriptID:script_ids[source.as_str()],
|
script_ids[source.as_str()],
|
||||||
ToScriptID:script_ids[source.as_str()],
|
submissions_api::types::Policy::Allowed
|
||||||
Policy:Policy::Allowed,
|
).await
|
||||||
}
|
}).await?;
|
||||||
).await.map_err(ScriptUploadError::GetOrCreatePolicyAllowed)
|
|
||||||
});
|
|
||||||
|
|
||||||
// blocked
|
// blocked
|
||||||
let blocked_fut=futures::stream::iter(blocked_set.iter().map(Ok))
|
futures::stream::iter(blocked.iter().map(Ok))
|
||||||
.try_for_each_concurrent(Some(REMOTE_CONCURRENCY),|source|async{
|
.try_for_each_concurrent(Some(16),|source|async{
|
||||||
check_or_create_script_poicy(
|
do_policy(
|
||||||
api,
|
api,
|
||||||
hash_format(hash_source(source)).as_str(),
|
&script_ids,
|
||||||
submissions_api::types::CreateScriptPolicyRequest{
|
source,
|
||||||
FromScriptID:script_ids[source.as_str()],
|
script_ids[source.as_str()],
|
||||||
ToScriptID:script_ids[source.as_str()],
|
submissions_api::types::Policy::Blocked
|
||||||
Policy:Policy::Blocked,
|
).await
|
||||||
}
|
}).await?;
|
||||||
).await.map_err(ScriptUploadError::GetOrCreatePolicyBlocked)
|
|
||||||
});
|
|
||||||
|
|
||||||
// run futures
|
|
||||||
tokio::try_join!(replace_fut,allowed_fut,blocked_fut)?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
#[derive(Debug)]
|
|
||||||
enum RepairError{
|
|
||||||
Io(std::io::Error),
|
|
||||||
Cookie(submissions_api::CookieError),
|
|
||||||
Reqwest(submissions_api::ReqwestError),
|
|
||||||
GetPolicies(submissions_api::Error),
|
|
||||||
GetScripts(submissions_api::types::Error),
|
|
||||||
UpdateScriptPolicy(submissions_api::Error),
|
|
||||||
DeleteScript(submissions_api::Error),
|
|
||||||
DeleteScriptPolicy(submissions_api::Error),
|
|
||||||
CreateScriptPolicy(submissions_api::Error),
|
|
||||||
}
|
|
||||||
struct RepairConfig{
|
|
||||||
session_id:String,
|
|
||||||
api_url:String,
|
|
||||||
}
|
|
||||||
async fn download_scripts(api:&submissions_api::external::Context)->Result<Vec<ScriptResponse>,RepairError>{
|
|
||||||
let mut scripts=Vec::new();
|
|
||||||
const LIMIT:u32=100;
|
|
||||||
let mut page=1;
|
|
||||||
loop{
|
|
||||||
println!("Downloading scripts page {page}...");
|
|
||||||
let new_scripts=api.get_scripts(submissions_api::types::GetScriptsRequest{
|
|
||||||
Page:page,
|
|
||||||
Limit:LIMIT,
|
|
||||||
Name:None,
|
|
||||||
Hash:None,
|
|
||||||
Source:None,
|
|
||||||
ResourceType:None,
|
|
||||||
ResourceID:None,
|
|
||||||
}).await.map_err(RepairError::GetScripts)?;
|
|
||||||
|
|
||||||
let done=new_scripts.len()<LIMIT as usize;
|
|
||||||
scripts.extend(new_scripts);
|
|
||||||
|
|
||||||
if done{
|
|
||||||
// We scanned all scripts
|
|
||||||
println!("Downloaded all scripts!");
|
|
||||||
break;
|
|
||||||
}else{
|
|
||||||
page+=1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(scripts)
|
|
||||||
}
|
|
||||||
async fn download_policies(api:&submissions_api::external::Context)->Result<Vec<ScriptPolicyResponse>,RepairError>{
|
|
||||||
let mut policies=Vec::new();
|
|
||||||
const LIMIT:u32=100;
|
|
||||||
let mut page=1;
|
|
||||||
loop{
|
|
||||||
println!("Downloading policies page {page}...");
|
|
||||||
let new_policies=api.get_script_policies(submissions_api::types::GetScriptPoliciesRequest{
|
|
||||||
Page:page,
|
|
||||||
Limit:LIMIT,
|
|
||||||
FromScriptHash:None,
|
|
||||||
ToScriptID:None,
|
|
||||||
Policy:None,
|
|
||||||
}).await.map_err(RepairError::GetPolicies)?;
|
|
||||||
|
|
||||||
let done=new_policies.len()<LIMIT as usize;
|
|
||||||
policies.extend(new_policies);
|
|
||||||
|
|
||||||
if done{
|
|
||||||
// We scanned all policies
|
|
||||||
println!("Downloaded all policies!");
|
|
||||||
break;
|
|
||||||
}else{
|
|
||||||
page+=1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(policies)
|
|
||||||
}
|
|
||||||
async fn repair(config:RepairConfig)->Result<(),RepairError>{
|
|
||||||
let cookie=submissions_api::Cookie::new(&config.session_id).map_err(RepairError::Cookie)?;
|
|
||||||
let api=&submissions_api::external::Context::new(config.api_url,cookie).map_err(RepairError::Reqwest)?;
|
|
||||||
|
|
||||||
let (scripts,policies)=tokio::try_join!(
|
|
||||||
download_scripts(api),
|
|
||||||
download_policies(api),
|
|
||||||
)?;
|
|
||||||
|
|
||||||
let mut script_from_id=std::collections::HashMap::new();
|
|
||||||
let mut script_from_hash=std::collections::HashMap::new();
|
|
||||||
let mut unique_sources=std::collections::HashSet::new();
|
|
||||||
|
|
||||||
let mut duplicate_scripts=Vec::new();
|
|
||||||
|
|
||||||
for script in &scripts{
|
|
||||||
// if not unique
|
|
||||||
if !unique_sources.insert(script.Source.as_str()){
|
|
||||||
println!("Identified duplicate script {:?}",script.ID);
|
|
||||||
duplicate_scripts.push(submissions_api::types::GetScriptRequest{
|
|
||||||
ScriptID:script.ID,
|
|
||||||
});
|
|
||||||
}else{
|
|
||||||
script_from_id.insert(script.ID,script);
|
|
||||||
script_from_hash.insert(script.Hash.as_str(),script);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !duplicate_scripts.is_empty(){
|
|
||||||
// ask to confirm delete scripts
|
|
||||||
print!("Delete {} duplicate scripts? [y/N]: ",duplicate_scripts.len());
|
|
||||||
std::io::stdout().flush().map_err(RepairError::Io)?;
|
|
||||||
|
|
||||||
let mut input=String::new();
|
|
||||||
std::io::stdin().read_line(&mut input).map_err(RepairError::Io)?;
|
|
||||||
match input.trim(){
|
|
||||||
"y"|"Y"=>(),
|
|
||||||
_=>{
|
|
||||||
println!("Quitting.");
|
|
||||||
return Ok(());
|
|
||||||
},
|
|
||||||
}
|
|
||||||
futures::stream::iter(duplicate_scripts).map(Ok).try_for_each_concurrent(REMOTE_CONCURRENCY,|request|{
|
|
||||||
api.delete_script(request)
|
|
||||||
}).await.map_err(RepairError::DeleteScript)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut policy_from_hash=std::collections::HashMap::new();
|
|
||||||
|
|
||||||
let mut update_policies=Vec::new();
|
|
||||||
let mut policies_not_unique=Vec::new();
|
|
||||||
let mut policies_missing_from_script=Vec::new();
|
|
||||||
let mut check_policy_exists=Vec::new();
|
|
||||||
|
|
||||||
for policy in &policies{
|
|
||||||
let from_script=script_from_hash.get(policy.FromScriptHash.as_str());
|
|
||||||
|
|
||||||
if let Some(&from_script)=from_script{
|
|
||||||
if policy.Policy==Policy::Replace&&policy.ToScriptID==from_script.ID{
|
|
||||||
// invalid policy. Reset the policy to None
|
|
||||||
println!("Invalid policy {:?}, queueing update...",policy.ID);
|
|
||||||
update_policies.push(submissions_api::types::UpdateScriptPolicyRequest{
|
|
||||||
ID:policy.ID,
|
|
||||||
FromScriptID:None,
|
|
||||||
ToScriptID:None,
|
|
||||||
Policy:Some(Policy::None),
|
|
||||||
});
|
|
||||||
}else{
|
|
||||||
// if not unique
|
|
||||||
if let Some(old_policy)=policy_from_hash.insert(policy.FromScriptHash.as_str(),policy){
|
|
||||||
println!("Policy is not unique! hash={} {:?} {:?}",policy.FromScriptHash,policy.ID,old_policy.ID);
|
|
||||||
policies_not_unique.push(submissions_api::types::GetScriptPolicyRequest{
|
|
||||||
ScriptPolicyID:policy.ID,
|
|
||||||
});
|
|
||||||
}else{
|
|
||||||
// if policy is replace, but destination script is not allowed
|
|
||||||
if let Some(&to_script)=script_from_id.get(&policy.ToScriptID){
|
|
||||||
check_policy_exists.push((policy,to_script));
|
|
||||||
}else{
|
|
||||||
println!("ToScript does not exist! {:?} {:?} DOING NOTHING",policy.ToScriptID,policy.ID);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}else{
|
|
||||||
println!("FromScript does not exist! hash={} id={:?}",policy.FromScriptHash,policy.ID);
|
|
||||||
policies_missing_from_script.push(submissions_api::types::GetScriptPolicyRequest{
|
|
||||||
ScriptPolicyID:policy.ID,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut create_allow_policy=std::collections::HashSet::new();
|
|
||||||
for (policy,to_script) in check_policy_exists{
|
|
||||||
if let Some(&allow_policy)=policy_from_hash.get(to_script.Hash.as_str()){
|
|
||||||
if policy.Policy==Policy::Replace&&allow_policy.Policy!=Policy::Allowed{
|
|
||||||
println!("Policy {:?} ToScript {:?} Policy {:?} was expected to be Allowed, but was {:?}!",policy.ID,to_script.ID,allow_policy.ID,allow_policy.Policy);
|
|
||||||
}
|
|
||||||
}else{
|
|
||||||
if policy.Policy==Policy::Replace{
|
|
||||||
println!("Policy {:?} ToScript {:?} has no Allowed policy!",policy.ID,to_script.ID);
|
|
||||||
create_allow_policy.insert(to_script.ID);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// All scripts should have exactly one policy
|
|
||||||
for script in &scripts{
|
|
||||||
if !policy_from_hash.contains_key(script.Hash.as_str()){
|
|
||||||
println!("Script {:?} has no policy!",script.ID);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !update_policies.is_empty(){
|
|
||||||
print!("Update {} policies? [y/N]: ",update_policies.len());
|
|
||||||
std::io::stdout().flush().map_err(RepairError::Io)?;
|
|
||||||
|
|
||||||
let mut input=String::new();
|
|
||||||
std::io::stdin().read_line(&mut input).map_err(RepairError::Io)?;
|
|
||||||
match input.trim(){
|
|
||||||
"y"|"Y"=>(),
|
|
||||||
_=>{
|
|
||||||
println!("Quitting.");
|
|
||||||
return Ok(());
|
|
||||||
},
|
|
||||||
}
|
|
||||||
futures::stream::iter(update_policies).map(Ok).try_for_each_concurrent(REMOTE_CONCURRENCY,|request|{
|
|
||||||
api.update_script_policy(request)
|
|
||||||
}).await.map_err(RepairError::UpdateScriptPolicy)?;
|
|
||||||
}
|
|
||||||
if !policies_not_unique.is_empty(){
|
|
||||||
print!("Delete {} duplicate policies? [y/N]: ",policies_not_unique.len());
|
|
||||||
std::io::stdout().flush().map_err(RepairError::Io)?;
|
|
||||||
|
|
||||||
let mut input=String::new();
|
|
||||||
std::io::stdin().read_line(&mut input).map_err(RepairError::Io)?;
|
|
||||||
match input.trim(){
|
|
||||||
"y"|"Y"=>(),
|
|
||||||
_=>{
|
|
||||||
println!("Quitting.");
|
|
||||||
return Ok(());
|
|
||||||
},
|
|
||||||
}
|
|
||||||
futures::stream::iter(policies_not_unique).map(Ok).try_for_each_concurrent(REMOTE_CONCURRENCY,|request|{
|
|
||||||
api.delete_script_policy(request)
|
|
||||||
}).await.map_err(RepairError::DeleteScriptPolicy)?;
|
|
||||||
}
|
|
||||||
if !policies_missing_from_script.is_empty(){
|
|
||||||
print!("Delete {} orphaned policies? [y/N]: ",policies_missing_from_script.len());
|
|
||||||
std::io::stdout().flush().map_err(RepairError::Io)?;
|
|
||||||
|
|
||||||
let mut input=String::new();
|
|
||||||
std::io::stdin().read_line(&mut input).map_err(RepairError::Io)?;
|
|
||||||
match input.trim(){
|
|
||||||
"y"|"Y"=>(),
|
|
||||||
_=>{
|
|
||||||
println!("Quitting.");
|
|
||||||
return Ok(());
|
|
||||||
},
|
|
||||||
}
|
|
||||||
futures::stream::iter(policies_missing_from_script).map(Ok).try_for_each_concurrent(REMOTE_CONCURRENCY,|request|{
|
|
||||||
api.delete_script_policy(request)
|
|
||||||
}).await.map_err(RepairError::DeleteScriptPolicy)?;
|
|
||||||
}
|
|
||||||
if !create_allow_policy.is_empty(){
|
|
||||||
print!("Create {} missing Allowed policies? [y/N]: ",create_allow_policy.len());
|
|
||||||
std::io::stdout().flush().map_err(RepairError::Io)?;
|
|
||||||
|
|
||||||
let mut input=String::new();
|
|
||||||
std::io::stdin().read_line(&mut input).map_err(RepairError::Io)?;
|
|
||||||
match input.trim(){
|
|
||||||
"y"|"Y"=>(),
|
|
||||||
_=>{
|
|
||||||
println!("Quitting.");
|
|
||||||
return Ok(());
|
|
||||||
},
|
|
||||||
}
|
|
||||||
futures::stream::iter(create_allow_policy).map(Ok).try_for_each_concurrent(REMOTE_CONCURRENCY,async|script_id|{
|
|
||||||
api.create_script_policy(submissions_api::types::CreateScriptPolicyRequest{
|
|
||||||
FromScriptID:script_id,
|
|
||||||
ToScriptID:script_id,
|
|
||||||
Policy:Policy::Allowed,
|
|
||||||
}).await?;
|
|
||||||
Ok(())
|
|
||||||
}).await.map_err(RepairError::CreateScriptPolicy)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
#[derive(Debug)]
|
|
||||||
enum ReleaseError{
|
|
||||||
Cookie(submissions_api::CookieError),
|
|
||||||
Reqwest(submissions_api::ReqwestError),
|
|
||||||
GetSubmissions(submissions_api::Error),
|
|
||||||
GetMaps(submissions_api::Error),
|
|
||||||
Io(std::io::Error),
|
|
||||||
Release(submissions_api::Error),
|
|
||||||
}
|
|
||||||
|
|
||||||
struct ReleaseConfig{
|
|
||||||
session_id:String,
|
|
||||||
api_url:String,
|
|
||||||
}
|
|
||||||
async fn release(config:ReleaseConfig)->Result<(),ReleaseError>{
|
|
||||||
let cookie=submissions_api::Cookie::new(&config.session_id).map_err(ReleaseError::Cookie)?;
|
|
||||||
let api=&submissions_api::external::Context::new(config.api_url,cookie).map_err(ReleaseError::Reqwest)?;
|
|
||||||
|
|
||||||
const LIMIT:u32=100;
|
|
||||||
const ONE_HOUR:i64=60*60;
|
|
||||||
const ONE_DAY:i64=24*ONE_HOUR;
|
|
||||||
const ONE_WEEK:i64=7*ONE_DAY;
|
|
||||||
const FRIDAY:i64=2*ONE_DAY;
|
|
||||||
const PEAK_HOURS:i64=-7*ONE_HOUR;
|
|
||||||
|
|
||||||
// determine maps ready to be released
|
|
||||||
let mut submissions_pending_release=std::collections::BTreeMap::new();
|
|
||||||
{
|
|
||||||
println!("Downloading submissions pending release...");
|
|
||||||
let mut page=1;
|
|
||||||
loop{
|
|
||||||
let submissions=api.get_submissions(submissions_api::types::GetSubmissionsRequest{
|
|
||||||
Page:page,
|
|
||||||
Limit:LIMIT,
|
|
||||||
DisplayName:None,
|
|
||||||
Creator:None,
|
|
||||||
GameID:None,
|
|
||||||
Sort:None,
|
|
||||||
Submitter:None,
|
|
||||||
AssetID:None,
|
|
||||||
UploadedAssetID:None,
|
|
||||||
StatusID:Some(submissions_api::types::SubmissionStatus::Uploaded),
|
|
||||||
}).await.map_err(ReleaseError::GetSubmissions)?;
|
|
||||||
let len=submissions.Submissions.len();
|
|
||||||
for submission in submissions.Submissions{
|
|
||||||
submissions_pending_release.entry(submission.GameID).or_insert(Vec::new()).push(submission);
|
|
||||||
}
|
|
||||||
if len<LIMIT as usize{
|
|
||||||
break;
|
|
||||||
}else{
|
|
||||||
page+=1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// If there is nothing to release, exit immediately
|
|
||||||
if submissions_pending_release.is_empty(){
|
|
||||||
println!("Nothing to release!");
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
// determine the most recent map release date
|
|
||||||
// if it's in the past, generate a Friday 10AM timestamp instead
|
|
||||||
let it={
|
|
||||||
println!("Determining most recent release dates...");
|
|
||||||
let mut latest_date=std::collections::HashMap::new();
|
|
||||||
let mut page=1;
|
|
||||||
loop{
|
|
||||||
let maps=api.get_maps(submissions_api::types::GetMapsRequest{
|
|
||||||
Page:page,
|
|
||||||
Limit:LIMIT,
|
|
||||||
DisplayName:None,
|
|
||||||
Creator:None,
|
|
||||||
GameID:None,
|
|
||||||
Sort:None,//TODO: sort by date to cut down requests
|
|
||||||
}).await.map_err(ReleaseError::GetMaps)?;
|
|
||||||
let len=maps.len();
|
|
||||||
for map in maps{
|
|
||||||
latest_date
|
|
||||||
.entry(map.GameID)
|
|
||||||
.and_modify(|date|
|
|
||||||
*date=map.Date.max(*date)
|
|
||||||
)
|
|
||||||
.or_insert(map.Date);
|
|
||||||
}
|
|
||||||
if len<LIMIT as usize{
|
|
||||||
break;
|
|
||||||
}else{
|
|
||||||
page+=1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// breaks on Sun 4 Dec 292277026596
|
|
||||||
let now=std::time::UNIX_EPOCH.elapsed().unwrap().as_secs() as i64;
|
|
||||||
|
|
||||||
// If the date is in the past, unset it
|
|
||||||
latest_date.retain(|_,&mut date|now<date);
|
|
||||||
|
|
||||||
submissions_pending_release.into_iter().map(move|(game,pending)|{
|
|
||||||
let start_date=match latest_date.get(&game){
|
|
||||||
Some(&date)=>{
|
|
||||||
// round to friday
|
|
||||||
(date+(ONE_WEEK>>1)-FRIDAY)/ONE_WEEK*ONE_WEEK+FRIDAY+PEAK_HOURS
|
|
||||||
// add a week
|
|
||||||
+ONE_WEEK
|
|
||||||
},
|
|
||||||
// find soonest friday
|
|
||||||
None=>((now-FRIDAY) as u64).next_multiple_of(ONE_WEEK as u64) as i64+FRIDAY+PEAK_HOURS
|
|
||||||
};
|
|
||||||
|
|
||||||
(game,start_date,pending)
|
|
||||||
})
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut rng=rand::rng();
|
|
||||||
|
|
||||||
for (game,start_date,mut pending) in it{
|
|
||||||
// shuffle maps
|
|
||||||
pending.shuffle(&mut rng);
|
|
||||||
|
|
||||||
// schedule one per week
|
|
||||||
let schedule:&Vec<_>=&pending.into_iter().enumerate().map(|(i,submission)|{
|
|
||||||
let release_date=(std::time::UNIX_EPOCH+std::time::Duration::from_secs((
|
|
||||||
start_date+i as i64*ONE_WEEK
|
|
||||||
) as u64)).into();
|
|
||||||
println!("Schedule {:?} {} at {}",submission.ID,submission.DisplayName,release_date);
|
|
||||||
submissions_api::types::ReleaseInfo{
|
|
||||||
Date:release_date,
|
|
||||||
SubmissionID:submission.ID,
|
|
||||||
}
|
|
||||||
}).collect();
|
|
||||||
|
|
||||||
// ask to confirm schedule
|
|
||||||
print!("Accept this release schedule for {game:?}? [y/N]: ");
|
|
||||||
std::io::stdout().flush().map_err(ReleaseError::Io)?;
|
|
||||||
|
|
||||||
let mut input=String::new();
|
|
||||||
std::io::stdin().read_line(&mut input).map_err(ReleaseError::Io)?;
|
|
||||||
match input.trim(){
|
|
||||||
"y"|"Y"=>(),
|
|
||||||
_=>{
|
|
||||||
println!("Quitting.");
|
|
||||||
return Ok(());
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
// send it
|
|
||||||
api.release_submissions(submissions_api::types::ReleaseRequest{
|
|
||||||
schedule,
|
|
||||||
}).await.map_err(ReleaseError::Release)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
Reference in New Issue
Block a user