Compare commits
3 Commits
Author | SHA1 | Date | |
---|---|---|---|
4b99f2028d | |||
e9e51d455b | |||
3252927df7 |
611
Cargo.lock
generated
611
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -9,7 +9,7 @@ edition = "2021"
|
||||
clap = { version = "4.4.2", features = ["derive"] }
|
||||
futures = "0.3.31"
|
||||
siphasher = "1.0.1"
|
||||
submissions-api = { version = "0.7.2", registry = "strafesnet" }
|
||||
submissions-api = { version = "0.3.0", features = ["external"], default-features = false, registry = "strafesnet" }
|
||||
tokio = { version = "1.42.0", features = ["fs", "macros", "rt-multi-thread"] }
|
||||
|
||||
[profile.release]
|
||||
|
26
src/cmd/mod.rs
Normal file
26
src/cmd/mod.rs
Normal file
@ -0,0 +1,26 @@
|
||||
use clap::{Args,Parser,Subcommand};
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(author,version,about,long_about=None)]
|
||||
#[command(propagate_version=true)]
|
||||
pub struct Cli{
|
||||
#[command(subcommand)]
|
||||
command:Commands,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
pub enum Commands{
|
||||
Review(ReviewCommand),
|
||||
UploadScripts(UploadScriptsCommand),
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
struct ReviewCommand{
|
||||
#[arg(long)]
|
||||
cookie:String,
|
||||
}
|
||||
#[derive(Args)]
|
||||
struct UploadScriptsCommand{
|
||||
#[arg(long)]
|
||||
session_id:PathBuf,
|
||||
}
|
455
src/main.rs
455
src/main.rs
@ -1,75 +1,16 @@
|
||||
use clap::{Args,Parser,Subcommand};
|
||||
mod cmd;
|
||||
|
||||
use cmd::{Cli,Commands};
|
||||
use futures::{StreamExt,TryStreamExt};
|
||||
use std::path::PathBuf;
|
||||
|
||||
const READ_CONCURRENCY:usize=16;
|
||||
const REMOTE_CONCURRENCY:usize=16;
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(author,version,about,long_about=None)]
|
||||
#[command(propagate_version=true)]
|
||||
struct Cli{
|
||||
#[command(subcommand)]
|
||||
command:Commands,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
enum Commands{
|
||||
RepairDuplicates(RepairDuplicatesCommand),
|
||||
RepairPolicies(RepairPoliciesCommand),
|
||||
Review(ReviewCommand),
|
||||
UploadScripts(UploadScriptsCommand),
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
struct RepairDuplicatesCommand{
|
||||
#[arg(long)]
|
||||
session_id_file:PathBuf,
|
||||
#[arg(long)]
|
||||
api_url:String,
|
||||
}
|
||||
#[derive(Args)]
|
||||
struct RepairPoliciesCommand{
|
||||
#[arg(long)]
|
||||
session_id_file:PathBuf,
|
||||
#[arg(long)]
|
||||
api_url:String,
|
||||
}
|
||||
#[derive(Args)]
|
||||
struct ReviewCommand{
|
||||
#[arg(long)]
|
||||
session_id_file:PathBuf,
|
||||
#[arg(long)]
|
||||
api_url:String,
|
||||
}
|
||||
#[derive(Args)]
|
||||
struct UploadScriptsCommand{
|
||||
#[arg(long)]
|
||||
session_id_file:PathBuf,
|
||||
#[arg(long)]
|
||||
api_url:String,
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main(){
|
||||
let cli=Cli::parse();
|
||||
match cli.command{
|
||||
Commands::RepairDuplicates(command)=>repair_duplicates(RepairDuplicatesConfig{
|
||||
session_id:std::fs::read_to_string(command.session_id_file).unwrap(),
|
||||
api_url:command.api_url,
|
||||
}).await.unwrap(),
|
||||
Commands::RepairPolicies(command)=>repair_policies(RepairPoliciesConfig{
|
||||
session_id:std::fs::read_to_string(command.session_id_file).unwrap(),
|
||||
api_url:command.api_url,
|
||||
}).await.unwrap(),
|
||||
Commands::Review(command)=>review(ReviewConfig{
|
||||
session_id:std::fs::read_to_string(command.session_id_file).unwrap(),
|
||||
api_url:command.api_url,
|
||||
}).await.unwrap(),
|
||||
Commands::UploadScripts(command)=>upload_scripts(UploadConfig{
|
||||
session_id:std::fs::read_to_string(command.session_id_file).unwrap(),
|
||||
api_url:command.api_url,
|
||||
cookie:command.cookie,
|
||||
}).await.unwrap(),
|
||||
Commands::UploadScripts(command)=>upload_scripts(command.session_id).await.unwrap(),
|
||||
}
|
||||
}
|
||||
|
||||
@ -78,8 +19,6 @@ enum ScriptActionParseResult{
|
||||
Block,
|
||||
Exit,
|
||||
Delete,
|
||||
Purge,
|
||||
Skip,
|
||||
}
|
||||
struct ParseScriptActionErr;
|
||||
impl std::str::FromStr for ScriptActionParseResult{
|
||||
@ -90,8 +29,6 @@ impl std::str::FromStr for ScriptActionParseResult{
|
||||
"block\n"=>Ok(Self::Block),
|
||||
"exit\n"=>Ok(Self::Exit),
|
||||
"delete\n"=>Ok(Self::Delete),
|
||||
"purge\n"=>Ok(Self::Purge),
|
||||
"skip\n"=>Ok(Self::Skip),
|
||||
_=>Err(ParseScriptActionErr),
|
||||
}
|
||||
}
|
||||
@ -107,7 +44,6 @@ enum ReviewError{
|
||||
NoScript,
|
||||
WriteCurrent(std::io::Error),
|
||||
ActionIO(std::io::Error),
|
||||
PurgeScript(submissions_api::Error),
|
||||
ReadCurrent(std::io::Error),
|
||||
DeduplicateModified(submissions_api::types::SingleItemError),
|
||||
UploadModified(submissions_api::Error),
|
||||
@ -115,15 +51,14 @@ enum ReviewError{
|
||||
}
|
||||
|
||||
struct ReviewConfig{
|
||||
session_id:String,
|
||||
api_url:String,
|
||||
cookie:String,
|
||||
}
|
||||
|
||||
async fn review(config:ReviewConfig)->Result<(),ReviewError>{
|
||||
// download unreviewed policies
|
||||
// review them
|
||||
let cookie=submissions_api::Cookie::new(&config.session_id).map_err(ReviewError::Cookie)?;
|
||||
let api=submissions_api::external::Context::new(config.api_url,cookie).map_err(ReviewError::Reqwest)?;
|
||||
let cookie=submissions_api::Cookie::new(&config.cookie).map_err(ReviewError::Cookie)?;
|
||||
let api=submissions_api::external::Context::new("http://localhost:8083".to_owned(),cookie).map_err(ReviewError::Reqwest)?;
|
||||
|
||||
let unreviewed_policies=api.get_script_policies(submissions_api::types::GetScriptPoliciesRequest{
|
||||
Page:1,
|
||||
@ -133,8 +68,7 @@ async fn review(config:ReviewConfig)->Result<(),ReviewError>{
|
||||
Policy:Some(submissions_api::types::Policy::None),
|
||||
}).await.map_err(ReviewError::GetPolicies)?;
|
||||
|
||||
let unreviewed_policy_count=unreviewed_policies.len();
|
||||
for (i,unreviewed_policy) in unreviewed_policies.into_iter().enumerate(){
|
||||
for unreviewed_policy in unreviewed_policies{
|
||||
// download source code
|
||||
let script_response=api.get_script_from_hash(submissions_api::types::HashRequest{
|
||||
hash:unreviewed_policy.FromScriptHash.as_str(),
|
||||
@ -146,10 +80,6 @@ async fn review(config:ReviewConfig)->Result<(),ReviewError>{
|
||||
//load source into current.lua
|
||||
tokio::fs::write("current.lua",source.as_str()).await.map_err(ReviewError::WriteCurrent)?;
|
||||
|
||||
// print script name
|
||||
println!("policy {}/{unreviewed_policy_count}",i+1);
|
||||
println!("script name: {}",script_response.Name);
|
||||
|
||||
//prompt action in terminal
|
||||
//wait for input
|
||||
let script_action;
|
||||
@ -167,17 +97,6 @@ async fn review(config:ReviewConfig)->Result<(),ReviewError>{
|
||||
let mut to_script_id=None;
|
||||
// interpret action
|
||||
let reviewed_policy=match script_action{
|
||||
ScriptActionParseResult::Purge=>{
|
||||
// remove script and policy from the database.
|
||||
let remove_script_fut=api.delete_script(submissions_api::types::GetScriptRequest{
|
||||
ScriptID:script_response.ID,
|
||||
});
|
||||
let remove_script_policy_fut=api.delete_script_policy(submissions_api::types::GetScriptPolicyRequest{
|
||||
ScriptPolicyID:unreviewed_policy.ID,
|
||||
});
|
||||
tokio::try_join!(remove_script_fut,remove_script_policy_fut).map_err(ReviewError::PurgeScript)?;
|
||||
continue;
|
||||
},
|
||||
ScriptActionParseResult::Pass=>{
|
||||
//if current.lua was updated, create an allowed and replace file and set script_action to replace(new_id)
|
||||
let modified_source=tokio::fs::read_to_string("current.lua").await.map_err(ReviewError::ReadCurrent)?;
|
||||
@ -185,7 +104,9 @@ async fn review(config:ReviewConfig)->Result<(),ReviewError>{
|
||||
submissions_api::types::Policy::Allowed
|
||||
}else{
|
||||
// compute hash
|
||||
let hash=hash_source(modified_source.as_str());
|
||||
let mut hasher=siphasher::sip::SipHasher::new();
|
||||
std::hash::Hasher::write(&mut hasher,source.as_bytes());
|
||||
let hash=std::hash::Hasher::finish(&hasher);
|
||||
|
||||
// check if modified script already exists
|
||||
let maybe_script_response=api.get_script_from_hash(submissions_api::types::HashRequest{
|
||||
@ -198,9 +119,8 @@ async fn review(config:ReviewConfig)->Result<(),ReviewError>{
|
||||
None=>api.create_script(submissions_api::types::CreateScriptRequest{
|
||||
Name:script_response.Name.as_str(),
|
||||
Source:modified_source.as_str(),
|
||||
ResourceType:script_response.ResourceType,
|
||||
ResourceID:Some(script_response.ResourceID),
|
||||
}).await.map_err(ReviewError::UploadModified)?.ScriptID
|
||||
SubmissionID:Some(script_response.SubmissionID),
|
||||
}).await.map_err(ReviewError::UploadModified)?.ID
|
||||
});
|
||||
|
||||
// use replace policy
|
||||
@ -210,104 +130,20 @@ async fn review(config:ReviewConfig)->Result<(),ReviewError>{
|
||||
ScriptActionParseResult::Block=>submissions_api::types::Policy::Blocked,
|
||||
ScriptActionParseResult::Exit=>break,
|
||||
ScriptActionParseResult::Delete=>submissions_api::types::Policy::Delete,
|
||||
ScriptActionParseResult::Skip=>continue,
|
||||
};
|
||||
|
||||
// update policy
|
||||
api.update_script_policy(submissions_api::types::UpdateScriptPolicyRequest{
|
||||
ID:unreviewed_policy.ID,
|
||||
ScriptPolicyID:unreviewed_policy.ID,
|
||||
FromScriptID:None,
|
||||
ToScriptID:to_script_id,
|
||||
Policy:Some(reviewed_policy),
|
||||
}).await.map_err(ReviewError::UpdateScriptPolicy)?;
|
||||
|
||||
println!("updated {:?} From: {:?} To: {:?} with policy {reviewed_policy:?}",unreviewed_policy.ID,script_response.ID,to_script_id);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(Debug)]
|
||||
enum ScriptUploadError{
|
||||
Cookie(submissions_api::CookieError),
|
||||
Reqwest(submissions_api::ReqwestError),
|
||||
AllowedSet(std::io::Error),
|
||||
AllowedMap(GetMapError),
|
||||
ReplaceMap(GetMapError),
|
||||
BlockedSet(std::io::Error),
|
||||
GetOrCreate(GOCError),
|
||||
GetOrCreatePolicyReplace(GOCError),
|
||||
GetOrCreatePolicyAllowed(GOCError),
|
||||
GetOrCreatePolicyBlocked(GOCError),
|
||||
}
|
||||
|
||||
fn read_dir_stream(dir:tokio::fs::ReadDir)->impl futures::stream::Stream<Item=std::io::Result<tokio::fs::DirEntry>>{
|
||||
futures::stream::unfold(dir,|mut dir|async{
|
||||
match dir.next_entry().await{
|
||||
Ok(Some(entry))=>Some((Ok(entry),dir)),
|
||||
Ok(None)=>None, // End of directory
|
||||
Err(e)=>Some((Err(e),dir)), // Error encountered
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async fn get_set_from_file(path:impl AsRef<std::path::Path>)->std::io::Result<std::collections::HashSet<String>>{
|
||||
read_dir_stream(tokio::fs::read_dir(path).await?)
|
||||
.map(|dir_entry|async{
|
||||
tokio::fs::read_to_string(dir_entry?.path()).await
|
||||
})
|
||||
.buffer_unordered(READ_CONCURRENCY)
|
||||
.try_collect().await
|
||||
}
|
||||
|
||||
async fn get_allowed_set()->std::io::Result<std::collections::HashSet<String>>{
|
||||
get_set_from_file("scripts/allowed").await
|
||||
}
|
||||
|
||||
async fn get_blocked_set()->std::io::Result<std::collections::HashSet<String>>{
|
||||
get_set_from_file("scripts/blocked").await
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(Debug)]
|
||||
enum GetMapError{
|
||||
IO(std::io::Error),
|
||||
FileStem,
|
||||
ToStr,
|
||||
ParseInt(std::num::ParseIntError),
|
||||
}
|
||||
|
||||
async fn get_allowed_map()->Result<std::collections::HashMap::<u32,String>,GetMapError>{
|
||||
read_dir_stream(tokio::fs::read_dir("scripts/allowed").await.map_err(GetMapError::IO)?)
|
||||
.map(|dir_entry|async{
|
||||
let path=dir_entry.map_err(GetMapError::IO)?.path();
|
||||
let id:u32=path
|
||||
.file_stem().ok_or(GetMapError::FileStem)?
|
||||
.to_str().ok_or(GetMapError::ToStr)?
|
||||
.parse().map_err(GetMapError::ParseInt)?;
|
||||
let source=tokio::fs::read_to_string(path).await.map_err(GetMapError::IO)?;
|
||||
Ok((id,source))
|
||||
})
|
||||
.buffer_unordered(READ_CONCURRENCY)
|
||||
.try_collect().await
|
||||
}
|
||||
|
||||
async fn get_replace_map()->Result<std::collections::HashMap::<String,u32>,GetMapError>{
|
||||
read_dir_stream(tokio::fs::read_dir("scripts/replace").await.map_err(GetMapError::IO)?)
|
||||
.map(|dir_entry|async{
|
||||
let path=dir_entry.map_err(GetMapError::IO)?.path();
|
||||
let id:u32=path
|
||||
.file_stem().ok_or(GetMapError::FileStem)?
|
||||
.to_str().ok_or(GetMapError::ToStr)?
|
||||
.parse().map_err(GetMapError::ParseInt)?;
|
||||
let source=tokio::fs::read_to_string(path).await.map_err(GetMapError::IO)?;
|
||||
Ok((source,id))
|
||||
})
|
||||
.buffer_unordered(READ_CONCURRENCY)
|
||||
.try_collect().await
|
||||
}
|
||||
|
||||
fn hash_source(source:&str)->u64{
|
||||
let mut hasher=siphasher::sip::SipHasher::new();
|
||||
std::hash::Hasher::write(&mut hasher,source.as_bytes());
|
||||
@ -331,9 +167,8 @@ async fn get_or_create_script(api:&submissions_api::external::Context,source:&st
|
||||
None=>api.create_script(submissions_api::types::CreateScriptRequest{
|
||||
Name:"Script",
|
||||
Source:source,
|
||||
ResourceType:submissions_api::types::ResourceType::Unknown,
|
||||
ResourceID:None,
|
||||
}).await.map_err(GOCError::Other)?.ScriptID
|
||||
SubmissionID:None,
|
||||
}).await.map_err(GOCError::Other)?.ID
|
||||
})
|
||||
}
|
||||
|
||||
@ -349,9 +184,9 @@ async fn check_or_create_script_poicy(
|
||||
match script_policy_result{
|
||||
Some(script_policy_reponse)=>{
|
||||
// check that everything matches the expectation
|
||||
assert_eq!(hash,script_policy_reponse.FromScriptHash);
|
||||
assert_eq!(script_policy.ToScriptID,script_policy_reponse.ToScriptID);
|
||||
assert_eq!(script_policy.Policy,script_policy_reponse.Policy);
|
||||
assert!(hash==script_policy_reponse.FromScriptHash);
|
||||
assert!(script_policy.ToScriptID==script_policy_reponse.ToScriptID);
|
||||
assert!(script_policy.Policy==script_policy_reponse.Policy);
|
||||
},
|
||||
None=>{
|
||||
// create a new policy
|
||||
@ -362,40 +197,41 @@ async fn check_or_create_script_poicy(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct UploadConfig{
|
||||
session_id:String,
|
||||
api_url:String,
|
||||
async fn do_policy(
|
||||
api:&submissions_api::external::Context,
|
||||
script_ids:&std::collections::HashMap<&str,submissions_api::types::ScriptID>,
|
||||
source:&str,
|
||||
to_script_id:submissions_api::types::ScriptID,
|
||||
policy:submissions_api::types::Policy,
|
||||
)->Result<(),GOCError>{
|
||||
let hash=hash_format(hash_source(source));
|
||||
check_or_create_script_poicy(api,hash.as_str(),submissions_api::types::CreateScriptPolicyRequest{
|
||||
FromScriptID:script_ids[source],
|
||||
ToScriptID:to_script_id,
|
||||
Policy:policy,
|
||||
}).await
|
||||
}
|
||||
|
||||
async fn upload_scripts(config:UploadConfig)->Result<(),ScriptUploadError>{
|
||||
let cookie=submissions_api::Cookie::new(&config.session_id).map_err(ScriptUploadError::Cookie)?;
|
||||
let api=&submissions_api::external::Context::new(config.api_url,cookie).map_err(ScriptUploadError::Reqwest)?;
|
||||
async fn upload_scripts(session_id:PathBuf)->Result<()>{
|
||||
let cookie={
|
||||
let mut cookie=String::new();
|
||||
std::fs::File::open(session_id)?.read_to_string(&mut cookie)?;
|
||||
submissions_api::Cookie::new(&cookie)?
|
||||
};
|
||||
let api=&submissions_api::external::Context::new("http://localhost:8083".to_owned(),cookie)?;
|
||||
|
||||
// load all script files
|
||||
let (
|
||||
allowed_set_result,
|
||||
allowed_map_result,
|
||||
replace_map_result,
|
||||
blocked_set_result,
|
||||
)=tokio::join!(
|
||||
get_allowed_set(),
|
||||
get_allowed_map(),
|
||||
get_replace_map(),
|
||||
get_blocked_set(),
|
||||
);
|
||||
|
||||
let allowed_set=allowed_set_result.map_err(ScriptUploadError::AllowedSet)?;
|
||||
let allowed_map=allowed_map_result.map_err(ScriptUploadError::AllowedMap)?;
|
||||
let replace_map=replace_map_result.map_err(ScriptUploadError::ReplaceMap)?;
|
||||
let blocked_set=blocked_set_result.map_err(ScriptUploadError::BlockedSet)?;
|
||||
let allowed_set=get_allowed_set()?;
|
||||
let allowed_map=get_allowed_map()?;
|
||||
let replace_map=get_replace_map()?;
|
||||
let blocked=get_blocked()?;
|
||||
|
||||
// create a unified deduplicated set of all scripts
|
||||
let script_set:std::collections::HashSet<&str>=allowed_set.iter()
|
||||
.map(String::as_str)
|
||||
.map(|s|s.as_str())
|
||||
.chain(
|
||||
replace_map.keys().map(String::as_str)
|
||||
replace_map.keys().map(|s|s.as_str())
|
||||
).chain(
|
||||
blocked_set.iter().map(String::as_str)
|
||||
blocked.iter().map(|s|s.as_str())
|
||||
).collect();
|
||||
|
||||
// get or create every unique script
|
||||
@ -403,177 +239,48 @@ async fn upload_scripts(config:UploadConfig)->Result<(),ScriptUploadError>{
|
||||
futures::stream::iter(script_set)
|
||||
.map(|source|async move{
|
||||
let script_id=get_or_create_script(api,source).await?;
|
||||
Ok((source,script_id))
|
||||
Ok::<_,GOCError>((source,script_id))
|
||||
})
|
||||
.buffer_unordered(REMOTE_CONCURRENCY)
|
||||
.try_collect().await.map_err(ScriptUploadError::GetOrCreate)?;
|
||||
.buffer_unordered(16)
|
||||
.try_collect().await?;
|
||||
|
||||
// get or create policy for each script in each category
|
||||
//
|
||||
// replace
|
||||
let replace_fut=futures::stream::iter(replace_map.iter().map(Ok))
|
||||
.try_for_each_concurrent(Some(REMOTE_CONCURRENCY),|(source,id)|async{
|
||||
check_or_create_script_poicy(
|
||||
futures::stream::iter(replace_map.iter().map(Ok))
|
||||
.try_for_each_concurrent(Some(16),|(source,id)|async{
|
||||
do_policy(
|
||||
api,
|
||||
hash_format(hash_source(source)).as_str(),
|
||||
submissions_api::types::CreateScriptPolicyRequest{
|
||||
FromScriptID:script_ids[source.as_str()],
|
||||
ToScriptID:script_ids[allowed_map[id].as_str()],
|
||||
Policy:submissions_api::types::Policy::Replace,
|
||||
}
|
||||
).await.map_err(ScriptUploadError::GetOrCreatePolicyReplace)
|
||||
});
|
||||
|
||||
// allowed
|
||||
let allowed_fut=futures::stream::iter(allowed_set.iter().map(Ok))
|
||||
.try_for_each_concurrent(Some(REMOTE_CONCURRENCY),|source|async{
|
||||
check_or_create_script_poicy(
|
||||
api,
|
||||
hash_format(hash_source(source)).as_str(),
|
||||
submissions_api::types::CreateScriptPolicyRequest{
|
||||
FromScriptID:script_ids[source.as_str()],
|
||||
ToScriptID:script_ids[source.as_str()],
|
||||
Policy:submissions_api::types::Policy::Allowed,
|
||||
}
|
||||
).await.map_err(ScriptUploadError::GetOrCreatePolicyAllowed)
|
||||
});
|
||||
|
||||
// blocked
|
||||
let blocked_fut=futures::stream::iter(blocked_set.iter().map(Ok))
|
||||
.try_for_each_concurrent(Some(REMOTE_CONCURRENCY),|source|async{
|
||||
check_or_create_script_poicy(
|
||||
api,
|
||||
hash_format(hash_source(source)).as_str(),
|
||||
submissions_api::types::CreateScriptPolicyRequest{
|
||||
FromScriptID:script_ids[source.as_str()],
|
||||
ToScriptID:script_ids[source.as_str()],
|
||||
Policy:submissions_api::types::Policy::Blocked,
|
||||
}
|
||||
).await.map_err(ScriptUploadError::GetOrCreatePolicyBlocked)
|
||||
});
|
||||
|
||||
// run futures
|
||||
tokio::try_join!(replace_fut,allowed_fut,blocked_fut)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(Debug)]
|
||||
enum RepairPoliciesError{
|
||||
Cookie(submissions_api::CookieError),
|
||||
Reqwest(submissions_api::ReqwestError),
|
||||
GetPolicies(submissions_api::Error),
|
||||
GetScripts(submissions_api::types::SingleItemError),
|
||||
UpdateScriptPolicy(submissions_api::Error),
|
||||
}
|
||||
|
||||
struct RepairPoliciesConfig{
|
||||
session_id:String,
|
||||
api_url:String,
|
||||
}
|
||||
async fn repair_policies(config:RepairPoliciesConfig)->Result<(),RepairPoliciesError>{
|
||||
let cookie=submissions_api::Cookie::new(&config.session_id).map_err(RepairPoliciesError::Cookie)?;
|
||||
let api=&submissions_api::external::Context::new(config.api_url,cookie).map_err(RepairPoliciesError::Reqwest)?;
|
||||
|
||||
const LIMIT:u32=100;
|
||||
let mut page=1;
|
||||
loop{
|
||||
println!("Downloading page {page}...");
|
||||
let policies=api.get_script_policies(submissions_api::types::GetScriptPoliciesRequest{
|
||||
Page:page,
|
||||
Limit:LIMIT,
|
||||
FromScriptHash:None,
|
||||
ToScriptID:None,
|
||||
Policy:Some(submissions_api::types::Policy::Replace),
|
||||
}).await.map_err(RepairPoliciesError::GetPolicies)?;
|
||||
|
||||
futures::stream::iter(policies.iter().map(Ok)).try_for_each_concurrent(REMOTE_CONCURRENCY,async|policy|{
|
||||
let from_script=api.get_script_from_hash(submissions_api::types::HashRequest{
|
||||
hash:policy.FromScriptHash.as_str(),
|
||||
}).await.map_err(RepairPoliciesError::GetScripts)?;
|
||||
|
||||
if let Some(from_script)=from_script{
|
||||
if policy.ToScriptID==from_script.ID{
|
||||
// invalid policy. Reset the policy to None
|
||||
api.update_script_policy(submissions_api::types::UpdateScriptPolicyRequest{
|
||||
ID:policy.ID,
|
||||
FromScriptID:None,
|
||||
ToScriptID:None,
|
||||
Policy:Some(submissions_api::types::Policy::None),
|
||||
}).await.map_err(RepairPoliciesError::UpdateScriptPolicy)?;
|
||||
println!("Policy updated! {:?}",policy.ID);
|
||||
}
|
||||
}else{
|
||||
println!("Script did not exist! hash={}",policy.FromScriptHash);
|
||||
}
|
||||
Ok(())
|
||||
&script_ids,
|
||||
source,
|
||||
script_ids[allowed_map[id].as_str()],
|
||||
submissions_api::types::Policy::Replace
|
||||
).await
|
||||
}).await?;
|
||||
|
||||
if policies.len()<LIMIT as usize{
|
||||
// We scanned all policies
|
||||
println!("Done!");
|
||||
break;
|
||||
}else{
|
||||
page+=1;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(Debug)]
|
||||
enum RepairDuplicatesError{
|
||||
Cookie(submissions_api::CookieError),
|
||||
Reqwest(submissions_api::ReqwestError),
|
||||
GetScripts(submissions_api::Error),
|
||||
DeleteScript(submissions_api::Error),
|
||||
}
|
||||
|
||||
struct RepairDuplicatesConfig{
|
||||
session_id:String,
|
||||
api_url:String,
|
||||
}
|
||||
async fn repair_duplicates(config:RepairDuplicatesConfig)->Result<(),RepairDuplicatesError>{
|
||||
let cookie=submissions_api::Cookie::new(&config.session_id).map_err(RepairDuplicatesError::Cookie)?;
|
||||
let api=&submissions_api::external::Context::new(config.api_url,cookie).map_err(RepairDuplicatesError::Reqwest)?;
|
||||
|
||||
let mut sources=std::collections::HashSet::new();
|
||||
|
||||
const LIMIT:u32=100;
|
||||
let mut page=1;
|
||||
loop{
|
||||
println!("Downloading page {page}...");
|
||||
let scripts=api.get_scripts(submissions_api::types::GetScriptsRequest{
|
||||
Page:page,
|
||||
Limit:LIMIT,
|
||||
Name:None,
|
||||
Hash:None,
|
||||
Source:None,
|
||||
ResourceType:None,
|
||||
ResourceID:None,
|
||||
}).await.map_err(RepairDuplicatesError::GetScripts)?;
|
||||
|
||||
let done=scripts.len()<LIMIT as usize;
|
||||
|
||||
for script in scripts{
|
||||
if !sources.insert(script.Source){
|
||||
println!("Deleting duplicate script {:?}",script.ID);
|
||||
api.delete_script(submissions_api::types::GetScriptRequest{
|
||||
ScriptID:script.ID,
|
||||
}).await.map_err(RepairDuplicatesError::DeleteScript)?;
|
||||
}
|
||||
}
|
||||
|
||||
if done{
|
||||
// We scanned all policies
|
||||
println!("Done!");
|
||||
break;
|
||||
}else{
|
||||
page+=1;
|
||||
}
|
||||
}
|
||||
// allowed
|
||||
futures::stream::iter(allowed_set.iter().map(Ok))
|
||||
.try_for_each_concurrent(Some(16),|source|async{
|
||||
do_policy(
|
||||
api,
|
||||
&script_ids,
|
||||
source,
|
||||
script_ids[source.as_str()],
|
||||
submissions_api::types::Policy::Allowed
|
||||
).await
|
||||
}).await?;
|
||||
|
||||
// blocked
|
||||
futures::stream::iter(blocked.iter().map(Ok))
|
||||
.try_for_each_concurrent(Some(16),|source|async{
|
||||
do_policy(
|
||||
api,
|
||||
&script_ids,
|
||||
source,
|
||||
script_ids[source.as_str()],
|
||||
submissions_api::types::Policy::Blocked
|
||||
).await
|
||||
}).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user