28 Commits

Author SHA1 Message Date
2ed3a6b11c fix scheduling bug 2025-06-12 19:41:47 -07:00
0398fb8d9d Implement Releaser ()
- Discover submissions in `Uploaded` status
- Discover most recent release date per game
- Shuffle order
- Create release schedule hardcoded to 1 map per week, Fridays at peak hours
- Confirm each schedule before release

Reviewed-on: 
Co-authored-by: Quaternions <krakow20@gmail.com>
Co-committed-by: Quaternions <krakow20@gmail.com>
2025-06-11 05:09:50 +00:00
0f4b57b349 add repair duplicates 2025-06-08 01:04:31 -07:00
726212cb64 update deps 2025-06-06 22:47:27 -07:00
1b1c20a9e4 add purge action 2025-06-06 22:33:09 -07:00
dcc31e46b3 update submissions-api 2025-06-06 22:32:53 -07:00
5230b8a371 database repair tool 2025-06-06 15:47:52 -07:00
bfe55c2af6 fix review tool 2025-06-06 15:04:50 -07:00
a9e838d445 update deps 2025-06-01 16:40:02 -07:00
b2e61016ad print script policy 2025-04-08 15:35:08 -07:00
01a1d8f0c5 skip review 2025-04-08 14:47:52 -07:00
e1e781ed25 inform how many reviews 2025-04-08 14:42:26 -07:00
c766b8b0cc print script name 2025-04-08 14:34:09 -07:00
cee52e31be update deps 2025-04-08 14:30:13 -07:00
2f25efe07e use assert_eq 2025-03-26 15:00:07 -07:00
834d67f8f6 update deps 2025-03-26 14:59:59 -07:00
e97ad7f102 use words 2025-03-18 13:40:23 -07:00
8786ff0c79 update submissions-api 2025-03-18 13:35:20 -07:00
d23cd57850 update deps 2025-03-18 13:11:29 -07:00
80e133c27a load session id from file 2025-03-18 13:10:55 -07:00
f9fb1fb23c inline very thin function 2024-12-30 00:40:36 -08:00
4116eaf829 comment code 2024-12-28 21:10:10 -08:00
c4508480c1 reuse hash source function 2024-12-26 19:59:27 -08:00
a6b8b326f1 tidy id from filename code 2024-12-26 19:44:44 -08:00
3eb39f2c6c publish api 2024-12-26 19:44:44 -08:00
af2cf4b7a8 fix api 2024-12-26 19:44:44 -08:00
bc11f918aa uniformity 2024-12-26 19:44:44 -08:00
a16e8faf8b upload scripts 2024-12-26 19:44:44 -08:00
4 changed files with 1060 additions and 387 deletions

783
Cargo.lock generated

File diff suppressed because it is too large Load Diff

@ -1,6 +1,6 @@
[package]
name = "rreview"
version = "1.0.0"
version = "1.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@ -8,8 +8,9 @@ edition = "2021"
[dependencies]
clap = { version = "4.4.2", features = ["derive"] }
futures = "0.3.31"
rand = "0.9.1"
siphasher = "1.0.1"
submissions-api = { version = "0.3.0", features = ["external"], default-features = false, registry = "strafesnet" }
submissions-api = { version = "0.8.1", registry = "strafesnet" }
tokio = { version = "1.42.0", features = ["fs", "macros", "rt-multi-thread"] }
[profile.release]

@ -1,26 +0,0 @@
use clap::{Args,Parser,Subcommand};
#[derive(Parser)]
#[command(author,version,about,long_about=None)]
#[command(propagate_version=true)]
pub struct Cli{
#[command(subcommand)]
command:Commands,
}
#[derive(Subcommand)]
pub enum Commands{
Review(ReviewCommand),
UploadScripts(UploadScriptsCommand),
}
#[derive(Args)]
struct ReviewCommand{
#[arg(long)]
cookie:String,
}
#[derive(Args)]
struct UploadScriptsCommand{
#[arg(long)]
session_id:PathBuf,
}

@ -1,16 +1,89 @@
mod cmd;
use cmd::{Cli,Commands};
use clap::{Args,Parser,Subcommand};
use futures::{StreamExt,TryStreamExt};
use rand::seq::SliceRandom;
use std::io::Write;
use std::path::PathBuf;
const READ_CONCURRENCY:usize=16;
const REMOTE_CONCURRENCY:usize=16;
#[derive(Parser)]
#[command(author,version,about,long_about=None)]
#[command(propagate_version=true)]
struct Cli{
#[command(subcommand)]
command:Commands,
}
#[derive(Subcommand)]
enum Commands{
Release(ReleaseCommand),
RepairDuplicates(RepairDuplicatesCommand),
RepairPolicies(RepairPoliciesCommand),
Review(ReviewCommand),
UploadScripts(UploadScriptsCommand),
}
#[derive(Args)]
struct ReleaseCommand{
#[arg(long)]
session_id_file:PathBuf,
#[arg(long)]
api_url:String,
}
#[derive(Args)]
struct RepairDuplicatesCommand{
#[arg(long)]
session_id_file:PathBuf,
#[arg(long)]
api_url:String,
}
#[derive(Args)]
struct RepairPoliciesCommand{
#[arg(long)]
session_id_file:PathBuf,
#[arg(long)]
api_url:String,
}
#[derive(Args)]
struct ReviewCommand{
#[arg(long)]
session_id_file:PathBuf,
#[arg(long)]
api_url:String,
}
#[derive(Args)]
struct UploadScriptsCommand{
#[arg(long)]
session_id_file:PathBuf,
#[arg(long)]
api_url:String,
}
#[tokio::main]
async fn main(){
let cli=Cli::parse();
match cli.command{
Commands::Review(command)=>review(ReviewConfig{
cookie:command.cookie,
Commands::Release(command)=>release(ReleaseConfig{
session_id:std::fs::read_to_string(command.session_id_file).unwrap(),
api_url:command.api_url,
}).await.unwrap(),
Commands::RepairDuplicates(command)=>repair_duplicates(RepairDuplicatesConfig{
session_id:std::fs::read_to_string(command.session_id_file).unwrap(),
api_url:command.api_url,
}).await.unwrap(),
Commands::RepairPolicies(command)=>repair_policies(RepairPoliciesConfig{
session_id:std::fs::read_to_string(command.session_id_file).unwrap(),
api_url:command.api_url,
}).await.unwrap(),
Commands::Review(command)=>review(ReviewConfig{
session_id:std::fs::read_to_string(command.session_id_file).unwrap(),
api_url:command.api_url,
}).await.unwrap(),
Commands::UploadScripts(command)=>upload_scripts(UploadConfig{
session_id:std::fs::read_to_string(command.session_id_file).unwrap(),
api_url:command.api_url,
}).await.unwrap(),
Commands::UploadScripts(command)=>upload_scripts(command.session_id).await.unwrap(),
}
}
@ -19,6 +92,8 @@ enum ScriptActionParseResult{
Block,
Exit,
Delete,
Purge,
Skip,
}
struct ParseScriptActionErr;
impl std::str::FromStr for ScriptActionParseResult{
@ -29,6 +104,8 @@ impl std::str::FromStr for ScriptActionParseResult{
"block\n"=>Ok(Self::Block),
"exit\n"=>Ok(Self::Exit),
"delete\n"=>Ok(Self::Delete),
"purge\n"=>Ok(Self::Purge),
"skip\n"=>Ok(Self::Skip),
_=>Err(ParseScriptActionErr),
}
}
@ -40,25 +117,27 @@ enum ReviewError{
Cookie(submissions_api::CookieError),
Reqwest(submissions_api::ReqwestError),
GetPolicies(submissions_api::Error),
GetScriptFromHash(submissions_api::types::SingleItemError),
GetScriptFromHash(submissions_api::types::ScriptSingleItemError),
NoScript,
WriteCurrent(std::io::Error),
ActionIO(std::io::Error),
PurgeScript(submissions_api::Error),
ReadCurrent(std::io::Error),
DeduplicateModified(submissions_api::types::SingleItemError),
DeduplicateModified(submissions_api::types::ScriptSingleItemError),
UploadModified(submissions_api::Error),
UpdateScriptPolicy(submissions_api::Error),
}
struct ReviewConfig{
cookie:String,
session_id:String,
api_url:String,
}
async fn review(config:ReviewConfig)->Result<(),ReviewError>{
// download unreviewed policies
// review them
let cookie=submissions_api::Cookie::new(&config.cookie).map_err(ReviewError::Cookie)?;
let api=submissions_api::external::Context::new("http://localhost:8083".to_owned(),cookie).map_err(ReviewError::Reqwest)?;
let cookie=submissions_api::Cookie::new(&config.session_id).map_err(ReviewError::Cookie)?;
let api=submissions_api::external::Context::new(config.api_url,cookie).map_err(ReviewError::Reqwest)?;
let unreviewed_policies=api.get_script_policies(submissions_api::types::GetScriptPoliciesRequest{
Page:1,
@ -68,7 +147,8 @@ async fn review(config:ReviewConfig)->Result<(),ReviewError>{
Policy:Some(submissions_api::types::Policy::None),
}).await.map_err(ReviewError::GetPolicies)?;
for unreviewed_policy in unreviewed_policies{
let unreviewed_policy_count=unreviewed_policies.len();
for (i,unreviewed_policy) in unreviewed_policies.into_iter().enumerate(){
// download source code
let script_response=api.get_script_from_hash(submissions_api::types::HashRequest{
hash:unreviewed_policy.FromScriptHash.as_str(),
@ -80,6 +160,10 @@ async fn review(config:ReviewConfig)->Result<(),ReviewError>{
//load source into current.lua
tokio::fs::write("current.lua",source.as_str()).await.map_err(ReviewError::WriteCurrent)?;
// print script name
println!("policy {}/{unreviewed_policy_count}",i+1);
println!("script name: {}",script_response.Name);
//prompt action in terminal
//wait for input
let script_action;
@ -97,6 +181,17 @@ async fn review(config:ReviewConfig)->Result<(),ReviewError>{
let mut to_script_id=None;
// interpret action
let reviewed_policy=match script_action{
ScriptActionParseResult::Purge=>{
// remove script and policy from the database.
let remove_script_fut=api.delete_script(submissions_api::types::GetScriptRequest{
ScriptID:script_response.ID,
});
let remove_script_policy_fut=api.delete_script_policy(submissions_api::types::GetScriptPolicyRequest{
ScriptPolicyID:unreviewed_policy.ID,
});
tokio::try_join!(remove_script_fut,remove_script_policy_fut).map_err(ReviewError::PurgeScript)?;
continue;
},
ScriptActionParseResult::Pass=>{
//if current.lua was updated, create an allowed and replace file and set script_action to replace(new_id)
let modified_source=tokio::fs::read_to_string("current.lua").await.map_err(ReviewError::ReadCurrent)?;
@ -104,9 +199,7 @@ async fn review(config:ReviewConfig)->Result<(),ReviewError>{
submissions_api::types::Policy::Allowed
}else{
// compute hash
let mut hasher=siphasher::sip::SipHasher::new();
std::hash::Hasher::write(&mut hasher,source.as_bytes());
let hash=std::hash::Hasher::finish(&hasher);
let hash=hash_source(modified_source.as_str());
// check if modified script already exists
let maybe_script_response=api.get_script_from_hash(submissions_api::types::HashRequest{
@ -119,8 +212,9 @@ async fn review(config:ReviewConfig)->Result<(),ReviewError>{
None=>api.create_script(submissions_api::types::CreateScriptRequest{
Name:script_response.Name.as_str(),
Source:modified_source.as_str(),
SubmissionID:Some(script_response.SubmissionID),
}).await.map_err(ReviewError::UploadModified)?.ID
ResourceType:script_response.ResourceType,
ResourceID:Some(script_response.ResourceID),
}).await.map_err(ReviewError::UploadModified)?.ScriptID
});
// use replace policy
@ -130,20 +224,104 @@ async fn review(config:ReviewConfig)->Result<(),ReviewError>{
ScriptActionParseResult::Block=>submissions_api::types::Policy::Blocked,
ScriptActionParseResult::Exit=>break,
ScriptActionParseResult::Delete=>submissions_api::types::Policy::Delete,
ScriptActionParseResult::Skip=>continue,
};
// update policy
api.update_script_policy(submissions_api::types::UpdateScriptPolicyRequest{
ScriptPolicyID:unreviewed_policy.ID,
ID:unreviewed_policy.ID,
FromScriptID:None,
ToScriptID:to_script_id,
Policy:Some(reviewed_policy),
}).await.map_err(ReviewError::UpdateScriptPolicy)?;
println!("updated {:?} From: {:?} To: {:?} with policy {reviewed_policy:?}",unreviewed_policy.ID,script_response.ID,to_script_id);
}
Ok(())
}
#[allow(dead_code)]
#[derive(Debug)]
enum ScriptUploadError{
Cookie(submissions_api::CookieError),
Reqwest(submissions_api::ReqwestError),
AllowedSet(std::io::Error),
AllowedMap(GetMapError),
ReplaceMap(GetMapError),
BlockedSet(std::io::Error),
GetOrCreate(GOCScriptError),
GetOrCreatePolicyReplace(GOCScriptPolicyError),
GetOrCreatePolicyAllowed(GOCScriptPolicyError),
GetOrCreatePolicyBlocked(GOCScriptPolicyError),
}
fn read_dir_stream(dir:tokio::fs::ReadDir)->impl futures::stream::Stream<Item=std::io::Result<tokio::fs::DirEntry>>{
futures::stream::unfold(dir,|mut dir|async{
match dir.next_entry().await{
Ok(Some(entry))=>Some((Ok(entry),dir)),
Ok(None)=>None, // End of directory
Err(e)=>Some((Err(e),dir)), // Error encountered
}
})
}
async fn get_set_from_file(path:impl AsRef<std::path::Path>)->std::io::Result<std::collections::HashSet<String>>{
read_dir_stream(tokio::fs::read_dir(path).await?)
.map(|dir_entry|async{
tokio::fs::read_to_string(dir_entry?.path()).await
})
.buffer_unordered(READ_CONCURRENCY)
.try_collect().await
}
async fn get_allowed_set()->std::io::Result<std::collections::HashSet<String>>{
get_set_from_file("scripts/allowed").await
}
async fn get_blocked_set()->std::io::Result<std::collections::HashSet<String>>{
get_set_from_file("scripts/blocked").await
}
#[allow(dead_code)]
#[derive(Debug)]
enum GetMapError{
IO(std::io::Error),
FileStem,
ToStr,
ParseInt(std::num::ParseIntError),
}
async fn get_allowed_map()->Result<std::collections::HashMap::<u32,String>,GetMapError>{
read_dir_stream(tokio::fs::read_dir("scripts/allowed").await.map_err(GetMapError::IO)?)
.map(|dir_entry|async{
let path=dir_entry.map_err(GetMapError::IO)?.path();
let id:u32=path
.file_stem().ok_or(GetMapError::FileStem)?
.to_str().ok_or(GetMapError::ToStr)?
.parse().map_err(GetMapError::ParseInt)?;
let source=tokio::fs::read_to_string(path).await.map_err(GetMapError::IO)?;
Ok((id,source))
})
.buffer_unordered(READ_CONCURRENCY)
.try_collect().await
}
async fn get_replace_map()->Result<std::collections::HashMap::<String,u32>,GetMapError>{
read_dir_stream(tokio::fs::read_dir("scripts/replace").await.map_err(GetMapError::IO)?)
.map(|dir_entry|async{
let path=dir_entry.map_err(GetMapError::IO)?.path();
let id:u32=path
.file_stem().ok_or(GetMapError::FileStem)?
.to_str().ok_or(GetMapError::ToStr)?
.parse().map_err(GetMapError::ParseInt)?;
let source=tokio::fs::read_to_string(path).await.map_err(GetMapError::IO)?;
Ok((source,id))
})
.buffer_unordered(READ_CONCURRENCY)
.try_collect().await
}
fn hash_source(source:&str)->u64{
let mut hasher=siphasher::sip::SipHasher::new();
std::hash::Hasher::write(&mut hasher,source.as_bytes());
@ -154,10 +332,11 @@ fn hash_format(hash:u64)->String{
format!("{:016x}",hash)
}
type GOCError=submissions_api::types::SingleItemError;
type GOCResult=Result<submissions_api::types::ScriptID,GOCError>;
type GOCScriptError=submissions_api::types::ScriptSingleItemError;
type GOCScriptPolicyError=submissions_api::types::ScriptPolicySingleItemError;
type GOCScriptResult=Result<submissions_api::types::ScriptID,GOCScriptError>;
async fn get_or_create_script(api:&submissions_api::external::Context,source:&str)->GOCResult{
async fn get_or_create_script(api:&submissions_api::external::Context,source:&str)->GOCScriptResult{
let script_response=api.get_script_from_hash(submissions_api::types::HashRequest{
hash:hash_format(hash_source(source)).as_str(),
}).await?;
@ -167,8 +346,9 @@ async fn get_or_create_script(api:&submissions_api::external::Context,source:&st
None=>api.create_script(submissions_api::types::CreateScriptRequest{
Name:"Script",
Source:source,
SubmissionID:None,
}).await.map_err(GOCError::Other)?.ID
ResourceType:submissions_api::types::ResourceType::Unknown,
ResourceID:None,
}).await.map_err(GOCScriptError::Other)?.ScriptID
})
}
@ -176,7 +356,7 @@ async fn check_or_create_script_poicy(
api:&submissions_api::external::Context,
hash:&str,
script_policy:submissions_api::types::CreateScriptPolicyRequest,
)->Result<(),GOCError>{
)->Result<(),GOCScriptPolicyError>{
let script_policy_result=api.get_script_policy_from_hash(submissions_api::types::HashRequest{
hash,
}).await?;
@ -184,54 +364,53 @@ async fn check_or_create_script_poicy(
match script_policy_result{
Some(script_policy_reponse)=>{
// check that everything matches the expectation
assert!(hash==script_policy_reponse.FromScriptHash);
assert!(script_policy.ToScriptID==script_policy_reponse.ToScriptID);
assert!(script_policy.Policy==script_policy_reponse.Policy);
assert_eq!(hash,script_policy_reponse.FromScriptHash);
assert_eq!(script_policy.ToScriptID,script_policy_reponse.ToScriptID);
assert_eq!(script_policy.Policy,script_policy_reponse.Policy);
},
None=>{
// create a new policy
api.create_script_policy(script_policy).await.map_err(GOCError::Other)?;
api.create_script_policy(script_policy).await.map_err(GOCScriptPolicyError::Other)?;
}
}
Ok(())
}
async fn do_policy(
api:&submissions_api::external::Context,
script_ids:&std::collections::HashMap<&str,submissions_api::types::ScriptID>,
source:&str,
to_script_id:submissions_api::types::ScriptID,
policy:submissions_api::types::Policy,
)->Result<(),GOCError>{
let hash=hash_format(hash_source(source));
check_or_create_script_poicy(api,hash.as_str(),submissions_api::types::CreateScriptPolicyRequest{
FromScriptID:script_ids[source],
ToScriptID:to_script_id,
Policy:policy,
}).await
struct UploadConfig{
session_id:String,
api_url:String,
}
async fn upload_scripts(session_id:PathBuf)->Result<()>{
let cookie={
let mut cookie=String::new();
std::fs::File::open(session_id)?.read_to_string(&mut cookie)?;
submissions_api::Cookie::new(&cookie)?
};
let api=&submissions_api::external::Context::new("http://localhost:8083".to_owned(),cookie)?;
async fn upload_scripts(config:UploadConfig)->Result<(),ScriptUploadError>{
let cookie=submissions_api::Cookie::new(&config.session_id).map_err(ScriptUploadError::Cookie)?;
let api=&submissions_api::external::Context::new(config.api_url,cookie).map_err(ScriptUploadError::Reqwest)?;
let allowed_set=get_allowed_set()?;
let allowed_map=get_allowed_map()?;
let replace_map=get_replace_map()?;
let blocked=get_blocked()?;
// load all script files
let (
allowed_set_result,
allowed_map_result,
replace_map_result,
blocked_set_result,
)=tokio::join!(
get_allowed_set(),
get_allowed_map(),
get_replace_map(),
get_blocked_set(),
);
let allowed_set=allowed_set_result.map_err(ScriptUploadError::AllowedSet)?;
let allowed_map=allowed_map_result.map_err(ScriptUploadError::AllowedMap)?;
let replace_map=replace_map_result.map_err(ScriptUploadError::ReplaceMap)?;
let blocked_set=blocked_set_result.map_err(ScriptUploadError::BlockedSet)?;
// create a unified deduplicated set of all scripts
let script_set:std::collections::HashSet<&str>=allowed_set.iter()
.map(|s|s.as_str())
.map(String::as_str)
.chain(
replace_map.keys().map(|s|s.as_str())
replace_map.keys().map(String::as_str)
).chain(
blocked.iter().map(|s|s.as_str())
blocked_set.iter().map(String::as_str)
).collect();
// get or create every unique script
@ -239,48 +418,332 @@ async fn upload_scripts(session_id:PathBuf)->Result<()>{
futures::stream::iter(script_set)
.map(|source|async move{
let script_id=get_or_create_script(api,source).await?;
Ok::<_,GOCError>((source,script_id))
Ok((source,script_id))
})
.buffer_unordered(16)
.try_collect().await?;
.buffer_unordered(REMOTE_CONCURRENCY)
.try_collect().await.map_err(ScriptUploadError::GetOrCreate)?;
// get or create policy for each script in each category
//
// replace
futures::stream::iter(replace_map.iter().map(Ok))
.try_for_each_concurrent(Some(16),|(source,id)|async{
do_policy(
let replace_fut=futures::stream::iter(replace_map.iter().map(Ok))
.try_for_each_concurrent(Some(REMOTE_CONCURRENCY),|(source,id)|async{
check_or_create_script_poicy(
api,
&script_ids,
source,
script_ids[allowed_map[id].as_str()],
submissions_api::types::Policy::Replace
).await
}).await?;
hash_format(hash_source(source)).as_str(),
submissions_api::types::CreateScriptPolicyRequest{
FromScriptID:script_ids[source.as_str()],
ToScriptID:script_ids[allowed_map[id].as_str()],
Policy:submissions_api::types::Policy::Replace,
}
).await.map_err(ScriptUploadError::GetOrCreatePolicyReplace)
});
// allowed
futures::stream::iter(allowed_set.iter().map(Ok))
.try_for_each_concurrent(Some(16),|source|async{
do_policy(
let allowed_fut=futures::stream::iter(allowed_set.iter().map(Ok))
.try_for_each_concurrent(Some(REMOTE_CONCURRENCY),|source|async{
check_or_create_script_poicy(
api,
&script_ids,
source,
script_ids[source.as_str()],
submissions_api::types::Policy::Allowed
).await
}).await?;
hash_format(hash_source(source)).as_str(),
submissions_api::types::CreateScriptPolicyRequest{
FromScriptID:script_ids[source.as_str()],
ToScriptID:script_ids[source.as_str()],
Policy:submissions_api::types::Policy::Allowed,
}
).await.map_err(ScriptUploadError::GetOrCreatePolicyAllowed)
});
// blocked
futures::stream::iter(blocked.iter().map(Ok))
.try_for_each_concurrent(Some(16),|source|async{
do_policy(
let blocked_fut=futures::stream::iter(blocked_set.iter().map(Ok))
.try_for_each_concurrent(Some(REMOTE_CONCURRENCY),|source|async{
check_or_create_script_poicy(
api,
&script_ids,
source,
script_ids[source.as_str()],
submissions_api::types::Policy::Blocked
).await
}).await?;
hash_format(hash_source(source)).as_str(),
submissions_api::types::CreateScriptPolicyRequest{
FromScriptID:script_ids[source.as_str()],
ToScriptID:script_ids[source.as_str()],
Policy:submissions_api::types::Policy::Blocked,
}
).await.map_err(ScriptUploadError::GetOrCreatePolicyBlocked)
});
// run futures
tokio::try_join!(replace_fut,allowed_fut,blocked_fut)?;
Ok(())
}
#[allow(dead_code)]
#[derive(Debug)]
enum RepairPoliciesError{
Cookie(submissions_api::CookieError),
Reqwest(submissions_api::ReqwestError),
GetPolicies(submissions_api::Error),
GetScripts(submissions_api::types::ScriptSingleItemError),
UpdateScriptPolicy(submissions_api::Error),
}
struct RepairPoliciesConfig{
session_id:String,
api_url:String,
}
async fn repair_policies(config:RepairPoliciesConfig)->Result<(),RepairPoliciesError>{
let cookie=submissions_api::Cookie::new(&config.session_id).map_err(RepairPoliciesError::Cookie)?;
let api=&submissions_api::external::Context::new(config.api_url,cookie).map_err(RepairPoliciesError::Reqwest)?;
const LIMIT:u32=100;
let mut page=1;
loop{
println!("Downloading page {page}...");
let policies=api.get_script_policies(submissions_api::types::GetScriptPoliciesRequest{
Page:page,
Limit:LIMIT,
FromScriptHash:None,
ToScriptID:None,
Policy:Some(submissions_api::types::Policy::Replace),
}).await.map_err(RepairPoliciesError::GetPolicies)?;
futures::stream::iter(policies.iter().map(Ok)).try_for_each_concurrent(REMOTE_CONCURRENCY,async|policy|{
let from_script=api.get_script_from_hash(submissions_api::types::HashRequest{
hash:policy.FromScriptHash.as_str(),
}).await.map_err(RepairPoliciesError::GetScripts)?;
if let Some(from_script)=from_script{
if policy.ToScriptID==from_script.ID{
// invalid policy. Reset the policy to None
api.update_script_policy(submissions_api::types::UpdateScriptPolicyRequest{
ID:policy.ID,
FromScriptID:None,
ToScriptID:None,
Policy:Some(submissions_api::types::Policy::None),
}).await.map_err(RepairPoliciesError::UpdateScriptPolicy)?;
println!("Policy updated! {:?}",policy.ID);
}
}else{
println!("Script did not exist! hash={}",policy.FromScriptHash);
}
Ok(())
}).await?;
if policies.len()<LIMIT as usize{
// We scanned all policies
println!("Done!");
break;
}else{
page+=1;
}
}
Ok(())
}
#[allow(dead_code)]
#[derive(Debug)]
enum RepairDuplicatesError{
Cookie(submissions_api::CookieError),
Reqwest(submissions_api::ReqwestError),
GetScripts(submissions_api::Error),
DeleteScript(submissions_api::Error),
}
struct RepairDuplicatesConfig{
session_id:String,
api_url:String,
}
async fn repair_duplicates(config:RepairDuplicatesConfig)->Result<(),RepairDuplicatesError>{
let cookie=submissions_api::Cookie::new(&config.session_id).map_err(RepairDuplicatesError::Cookie)?;
let api=&submissions_api::external::Context::new(config.api_url,cookie).map_err(RepairDuplicatesError::Reqwest)?;
let mut sources=std::collections::HashSet::new();
const LIMIT:u32=100;
let mut page=1;
loop{
println!("Downloading page {page}...");
let scripts=api.get_scripts(submissions_api::types::GetScriptsRequest{
Page:page,
Limit:LIMIT,
Name:None,
Hash:None,
Source:None,
ResourceType:None,
ResourceID:None,
}).await.map_err(RepairDuplicatesError::GetScripts)?;
let done=scripts.len()<LIMIT as usize;
for script in scripts{
if !sources.insert(script.Source){
println!("Deleting duplicate script {:?}",script.ID);
api.delete_script(submissions_api::types::GetScriptRequest{
ScriptID:script.ID,
}).await.map_err(RepairDuplicatesError::DeleteScript)?;
}
}
if done{
// We scanned all policies
println!("Done!");
break;
}else{
page+=1;
}
}
Ok(())
}
#[allow(dead_code)]
#[derive(Debug)]
enum ReleaseError{
Cookie(submissions_api::CookieError),
Reqwest(submissions_api::ReqwestError),
GetSubmissions(submissions_api::Error),
GetMaps(submissions_api::Error),
Io(std::io::Error),
Release(submissions_api::Error),
}
struct ReleaseConfig{
session_id:String,
api_url:String,
}
async fn release(config:ReleaseConfig)->Result<(),ReleaseError>{
let cookie=submissions_api::Cookie::new(&config.session_id).map_err(ReleaseError::Cookie)?;
let api=&submissions_api::external::Context::new(config.api_url,cookie).map_err(ReleaseError::Reqwest)?;
const LIMIT:u32=100;
const ONE_HOUR:i64=60*60;
const ONE_DAY:i64=24*ONE_HOUR;
const ONE_WEEK:i64=7*ONE_DAY;
const FRIDAY:i64=2*ONE_DAY;
const PEAK_HOURS:i64=-7*ONE_HOUR;
// determine maps ready to be released
let mut submissions_pending_release=std::collections::BTreeMap::new();
{
println!("Downloading submissions pending release...");
let mut page=1;
loop{
let submissions=api.get_submissions(submissions_api::types::GetSubmissionsRequest{
Page:page,
Limit:LIMIT,
DisplayName:None,
Creator:None,
GameID:None,
Sort:None,
Submitter:None,
AssetID:None,
UploadedAssetID:None,
StatusID:Some(submissions_api::types::SubmissionStatus::Uploaded),
}).await.map_err(ReleaseError::GetSubmissions)?;
let len=submissions.Submissions.len();
for submission in submissions.Submissions{
submissions_pending_release.entry(submission.GameID).or_insert(Vec::new()).push(submission);
}
if len<LIMIT as usize{
break;
}else{
page+=1;
}
}
}
// If there is nothing to release, exit immediately
if submissions_pending_release.is_empty(){
println!("Nothing to release!");
return Ok(());
}
// determine the most recent map release date
// if it's in the past, generate a Friday 10AM timestamp instead
let it={
println!("Determining most recent release dates...");
let mut latest_date=std::collections::HashMap::new();
let mut page=1;
loop{
let maps=api.get_maps(submissions_api::types::GetMapsRequest{
Page:page,
Limit:LIMIT,
DisplayName:None,
Creator:None,
GameID:None,
Sort:None,//TODO: sort by date to cut down requests
}).await.map_err(ReleaseError::GetMaps)?;
let len=maps.len();
for map in maps{
latest_date
.entry(map.GameID)
.and_modify(|date|
*date=map.Date.max(*date)
)
.or_insert(map.Date);
}
if len<LIMIT as usize{
break;
}else{
page+=1;
}
}
// breaks on Sun 4 Dec 292277026596
let now=std::time::UNIX_EPOCH.elapsed().unwrap().as_secs() as i64;
// If the date is in the past, unset it
latest_date.retain(|_,&mut date|now<date);
submissions_pending_release.into_iter().map(move|(game,pending)|{
let start_date=match latest_date.get(&game){
Some(&date)=>{
// round to friday
(date+(ONE_WEEK>>1)-FRIDAY)/ONE_WEEK*ONE_WEEK+FRIDAY+PEAK_HOURS
// add a week
+ONE_WEEK
},
// find soonest friday
None=>((now-FRIDAY) as u64).next_multiple_of(ONE_WEEK as u64) as i64+FRIDAY+PEAK_HOURS
};
(game,start_date,pending)
})
};
let mut rng=rand::rng();
for (game,start_date,mut pending) in it{
// shuffle maps
pending.shuffle(&mut rng);
// schedule one per week
let schedule:&Vec<_>=&pending.into_iter().enumerate().map(|(i,submission)|{
let release_date=(std::time::UNIX_EPOCH+std::time::Duration::from_secs((
start_date+i as i64*ONE_WEEK
) as u64)).into();
println!("Schedule {:?} {} at {}",submission.ID,submission.DisplayName,release_date);
submissions_api::types::ReleaseInfo{
Date:release_date,
SubmissionID:submission.ID,
}
}).collect();
// ask to confirm schedule
print!("Accept this release schedule for {game:?}? [y/N]: ");
std::io::stdout().flush().map_err(ReleaseError::Io)?;
let mut input=String::new();
std::io::stdin().read_line(&mut input).map_err(ReleaseError::Io)?;
match input.trim(){
"y"|"Y"=>(),
_=>{
println!("Quitting.");
return Ok(());
},
}
// send it
api.release_submissions(submissions_api::types::ReleaseRequest{
schedule,
}).await.map_err(ReleaseError::Release)?;
}
Ok(())
}