Compare commits

..

15 Commits

Author SHA1 Message Date
0398fb8d9d Implement Releaser ()
- Discover submissions in `Uploaded` status
- Discover most recent release date per game
- Shuffle order
- Create release schedule hardcoded to 1 map per week, Fridays at peak hours
- Confirm each schedule before release

Reviewed-on: 
Co-authored-by: Quaternions <krakow20@gmail.com>
Co-committed-by: Quaternions <krakow20@gmail.com>
2025-06-11 05:09:50 +00:00
0f4b57b349
add repair duplicates 2025-06-08 01:04:31 -07:00
726212cb64
update deps 2025-06-06 22:47:27 -07:00
1b1c20a9e4
add purge action 2025-06-06 22:33:09 -07:00
dcc31e46b3
update submissions-api 2025-06-06 22:32:53 -07:00
5230b8a371
database repair tool 2025-06-06 15:47:52 -07:00
bfe55c2af6
fix review tool 2025-06-06 15:04:50 -07:00
a9e838d445
update deps 2025-06-01 16:40:02 -07:00
b2e61016ad
print script policy 2025-04-08 15:35:08 -07:00
01a1d8f0c5
skip review 2025-04-08 14:47:52 -07:00
e1e781ed25
inform how many reviews 2025-04-08 14:42:26 -07:00
c766b8b0cc
print script name 2025-04-08 14:34:09 -07:00
cee52e31be
update deps 2025-04-08 14:30:13 -07:00
2f25efe07e
use assert_eq 2025-03-26 15:00:07 -07:00
834d67f8f6
update deps 2025-03-26 14:59:59 -07:00
3 changed files with 739 additions and 296 deletions

650
Cargo.lock generated

File diff suppressed because it is too large Load Diff

@ -1,6 +1,6 @@
[package] [package]
name = "rreview" name = "rreview"
version = "1.0.0" version = "1.1.0"
edition = "2021" edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@ -8,8 +8,9 @@ edition = "2021"
[dependencies] [dependencies]
clap = { version = "4.4.2", features = ["derive"] } clap = { version = "4.4.2", features = ["derive"] }
futures = "0.3.31" futures = "0.3.31"
rand = "0.9.1"
siphasher = "1.0.1" siphasher = "1.0.1"
submissions-api = { version = "0.4.0", registry = "strafesnet" } submissions-api = { version = "0.8.1", registry = "strafesnet" }
tokio = { version = "1.42.0", features = ["fs", "macros", "rt-multi-thread"] } tokio = { version = "1.42.0", features = ["fs", "macros", "rt-multi-thread"] }
[profile.release] [profile.release]

@ -1,5 +1,7 @@
use clap::{Args,Parser,Subcommand}; use clap::{Args,Parser,Subcommand};
use futures::{StreamExt,TryStreamExt}; use futures::{StreamExt,TryStreamExt};
use rand::seq::SliceRandom;
use std::io::Write;
use std::path::PathBuf; use std::path::PathBuf;
const READ_CONCURRENCY:usize=16; const READ_CONCURRENCY:usize=16;
@ -15,10 +17,34 @@ struct Cli{
#[derive(Subcommand)] #[derive(Subcommand)]
enum Commands{ enum Commands{
Release(ReleaseCommand),
RepairDuplicates(RepairDuplicatesCommand),
RepairPolicies(RepairPoliciesCommand),
Review(ReviewCommand), Review(ReviewCommand),
UploadScripts(UploadScriptsCommand), UploadScripts(UploadScriptsCommand),
} }
#[derive(Args)]
struct ReleaseCommand{
#[arg(long)]
session_id_file:PathBuf,
#[arg(long)]
api_url:String,
}
#[derive(Args)]
struct RepairDuplicatesCommand{
#[arg(long)]
session_id_file:PathBuf,
#[arg(long)]
api_url:String,
}
#[derive(Args)]
struct RepairPoliciesCommand{
#[arg(long)]
session_id_file:PathBuf,
#[arg(long)]
api_url:String,
}
#[derive(Args)] #[derive(Args)]
struct ReviewCommand{ struct ReviewCommand{
#[arg(long)] #[arg(long)]
@ -38,6 +64,18 @@ struct UploadScriptsCommand{
async fn main(){ async fn main(){
let cli=Cli::parse(); let cli=Cli::parse();
match cli.command{ match cli.command{
Commands::Release(command)=>release(ReleaseConfig{
session_id:std::fs::read_to_string(command.session_id_file).unwrap(),
api_url:command.api_url,
}).await.unwrap(),
Commands::RepairDuplicates(command)=>repair_duplicates(RepairDuplicatesConfig{
session_id:std::fs::read_to_string(command.session_id_file).unwrap(),
api_url:command.api_url,
}).await.unwrap(),
Commands::RepairPolicies(command)=>repair_policies(RepairPoliciesConfig{
session_id:std::fs::read_to_string(command.session_id_file).unwrap(),
api_url:command.api_url,
}).await.unwrap(),
Commands::Review(command)=>review(ReviewConfig{ Commands::Review(command)=>review(ReviewConfig{
session_id:std::fs::read_to_string(command.session_id_file).unwrap(), session_id:std::fs::read_to_string(command.session_id_file).unwrap(),
api_url:command.api_url, api_url:command.api_url,
@ -54,6 +92,8 @@ enum ScriptActionParseResult{
Block, Block,
Exit, Exit,
Delete, Delete,
Purge,
Skip,
} }
struct ParseScriptActionErr; struct ParseScriptActionErr;
impl std::str::FromStr for ScriptActionParseResult{ impl std::str::FromStr for ScriptActionParseResult{
@ -64,6 +104,8 @@ impl std::str::FromStr for ScriptActionParseResult{
"block\n"=>Ok(Self::Block), "block\n"=>Ok(Self::Block),
"exit\n"=>Ok(Self::Exit), "exit\n"=>Ok(Self::Exit),
"delete\n"=>Ok(Self::Delete), "delete\n"=>Ok(Self::Delete),
"purge\n"=>Ok(Self::Purge),
"skip\n"=>Ok(Self::Skip),
_=>Err(ParseScriptActionErr), _=>Err(ParseScriptActionErr),
} }
} }
@ -75,12 +117,13 @@ enum ReviewError{
Cookie(submissions_api::CookieError), Cookie(submissions_api::CookieError),
Reqwest(submissions_api::ReqwestError), Reqwest(submissions_api::ReqwestError),
GetPolicies(submissions_api::Error), GetPolicies(submissions_api::Error),
GetScriptFromHash(submissions_api::types::SingleItemError), GetScriptFromHash(submissions_api::types::ScriptSingleItemError),
NoScript, NoScript,
WriteCurrent(std::io::Error), WriteCurrent(std::io::Error),
ActionIO(std::io::Error), ActionIO(std::io::Error),
PurgeScript(submissions_api::Error),
ReadCurrent(std::io::Error), ReadCurrent(std::io::Error),
DeduplicateModified(submissions_api::types::SingleItemError), DeduplicateModified(submissions_api::types::ScriptSingleItemError),
UploadModified(submissions_api::Error), UploadModified(submissions_api::Error),
UpdateScriptPolicy(submissions_api::Error), UpdateScriptPolicy(submissions_api::Error),
} }
@ -104,7 +147,8 @@ async fn review(config:ReviewConfig)->Result<(),ReviewError>{
Policy:Some(submissions_api::types::Policy::None), Policy:Some(submissions_api::types::Policy::None),
}).await.map_err(ReviewError::GetPolicies)?; }).await.map_err(ReviewError::GetPolicies)?;
for unreviewed_policy in unreviewed_policies{ let unreviewed_policy_count=unreviewed_policies.len();
for (i,unreviewed_policy) in unreviewed_policies.into_iter().enumerate(){
// download source code // download source code
let script_response=api.get_script_from_hash(submissions_api::types::HashRequest{ let script_response=api.get_script_from_hash(submissions_api::types::HashRequest{
hash:unreviewed_policy.FromScriptHash.as_str(), hash:unreviewed_policy.FromScriptHash.as_str(),
@ -116,6 +160,10 @@ async fn review(config:ReviewConfig)->Result<(),ReviewError>{
//load source into current.lua //load source into current.lua
tokio::fs::write("current.lua",source.as_str()).await.map_err(ReviewError::WriteCurrent)?; tokio::fs::write("current.lua",source.as_str()).await.map_err(ReviewError::WriteCurrent)?;
// print script name
println!("policy {}/{unreviewed_policy_count}",i+1);
println!("script name: {}",script_response.Name);
//prompt action in terminal //prompt action in terminal
//wait for input //wait for input
let script_action; let script_action;
@ -133,6 +181,17 @@ async fn review(config:ReviewConfig)->Result<(),ReviewError>{
let mut to_script_id=None; let mut to_script_id=None;
// interpret action // interpret action
let reviewed_policy=match script_action{ let reviewed_policy=match script_action{
ScriptActionParseResult::Purge=>{
// remove script and policy from the database.
let remove_script_fut=api.delete_script(submissions_api::types::GetScriptRequest{
ScriptID:script_response.ID,
});
let remove_script_policy_fut=api.delete_script_policy(submissions_api::types::GetScriptPolicyRequest{
ScriptPolicyID:unreviewed_policy.ID,
});
tokio::try_join!(remove_script_fut,remove_script_policy_fut).map_err(ReviewError::PurgeScript)?;
continue;
},
ScriptActionParseResult::Pass=>{ ScriptActionParseResult::Pass=>{
//if current.lua was updated, create an allowed and replace file and set script_action to replace(new_id) //if current.lua was updated, create an allowed and replace file and set script_action to replace(new_id)
let modified_source=tokio::fs::read_to_string("current.lua").await.map_err(ReviewError::ReadCurrent)?; let modified_source=tokio::fs::read_to_string("current.lua").await.map_err(ReviewError::ReadCurrent)?;
@ -140,7 +199,7 @@ async fn review(config:ReviewConfig)->Result<(),ReviewError>{
submissions_api::types::Policy::Allowed submissions_api::types::Policy::Allowed
}else{ }else{
// compute hash // compute hash
let hash=hash_source(source.as_str()); let hash=hash_source(modified_source.as_str());
// check if modified script already exists // check if modified script already exists
let maybe_script_response=api.get_script_from_hash(submissions_api::types::HashRequest{ let maybe_script_response=api.get_script_from_hash(submissions_api::types::HashRequest{
@ -153,8 +212,9 @@ async fn review(config:ReviewConfig)->Result<(),ReviewError>{
None=>api.create_script(submissions_api::types::CreateScriptRequest{ None=>api.create_script(submissions_api::types::CreateScriptRequest{
Name:script_response.Name.as_str(), Name:script_response.Name.as_str(),
Source:modified_source.as_str(), Source:modified_source.as_str(),
SubmissionID:Some(script_response.SubmissionID), ResourceType:script_response.ResourceType,
}).await.map_err(ReviewError::UploadModified)?.ID ResourceID:Some(script_response.ResourceID),
}).await.map_err(ReviewError::UploadModified)?.ScriptID
}); });
// use replace policy // use replace policy
@ -164,6 +224,7 @@ async fn review(config:ReviewConfig)->Result<(),ReviewError>{
ScriptActionParseResult::Block=>submissions_api::types::Policy::Blocked, ScriptActionParseResult::Block=>submissions_api::types::Policy::Blocked,
ScriptActionParseResult::Exit=>break, ScriptActionParseResult::Exit=>break,
ScriptActionParseResult::Delete=>submissions_api::types::Policy::Delete, ScriptActionParseResult::Delete=>submissions_api::types::Policy::Delete,
ScriptActionParseResult::Skip=>continue,
}; };
// update policy // update policy
@ -173,6 +234,8 @@ async fn review(config:ReviewConfig)->Result<(),ReviewError>{
ToScriptID:to_script_id, ToScriptID:to_script_id,
Policy:Some(reviewed_policy), Policy:Some(reviewed_policy),
}).await.map_err(ReviewError::UpdateScriptPolicy)?; }).await.map_err(ReviewError::UpdateScriptPolicy)?;
println!("updated {:?} From: {:?} To: {:?} with policy {reviewed_policy:?}",unreviewed_policy.ID,script_response.ID,to_script_id);
} }
Ok(()) Ok(())
@ -187,10 +250,10 @@ enum ScriptUploadError{
AllowedMap(GetMapError), AllowedMap(GetMapError),
ReplaceMap(GetMapError), ReplaceMap(GetMapError),
BlockedSet(std::io::Error), BlockedSet(std::io::Error),
GetOrCreate(GOCError), GetOrCreate(GOCScriptError),
GetOrCreatePolicyReplace(GOCError), GetOrCreatePolicyReplace(GOCScriptPolicyError),
GetOrCreatePolicyAllowed(GOCError), GetOrCreatePolicyAllowed(GOCScriptPolicyError),
GetOrCreatePolicyBlocked(GOCError), GetOrCreatePolicyBlocked(GOCScriptPolicyError),
} }
fn read_dir_stream(dir:tokio::fs::ReadDir)->impl futures::stream::Stream<Item=std::io::Result<tokio::fs::DirEntry>>{ fn read_dir_stream(dir:tokio::fs::ReadDir)->impl futures::stream::Stream<Item=std::io::Result<tokio::fs::DirEntry>>{
@ -269,10 +332,11 @@ fn hash_format(hash:u64)->String{
format!("{:016x}",hash) format!("{:016x}",hash)
} }
type GOCError=submissions_api::types::SingleItemError; type GOCScriptError=submissions_api::types::ScriptSingleItemError;
type GOCResult=Result<submissions_api::types::ScriptID,GOCError>; type GOCScriptPolicyError=submissions_api::types::ScriptPolicySingleItemError;
type GOCScriptResult=Result<submissions_api::types::ScriptID,GOCScriptError>;
async fn get_or_create_script(api:&submissions_api::external::Context,source:&str)->GOCResult{ async fn get_or_create_script(api:&submissions_api::external::Context,source:&str)->GOCScriptResult{
let script_response=api.get_script_from_hash(submissions_api::types::HashRequest{ let script_response=api.get_script_from_hash(submissions_api::types::HashRequest{
hash:hash_format(hash_source(source)).as_str(), hash:hash_format(hash_source(source)).as_str(),
}).await?; }).await?;
@ -282,8 +346,9 @@ async fn get_or_create_script(api:&submissions_api::external::Context,source:&st
None=>api.create_script(submissions_api::types::CreateScriptRequest{ None=>api.create_script(submissions_api::types::CreateScriptRequest{
Name:"Script", Name:"Script",
Source:source, Source:source,
SubmissionID:None, ResourceType:submissions_api::types::ResourceType::Unknown,
}).await.map_err(GOCError::Other)?.ID ResourceID:None,
}).await.map_err(GOCScriptError::Other)?.ScriptID
}) })
} }
@ -291,7 +356,7 @@ async fn check_or_create_script_poicy(
api:&submissions_api::external::Context, api:&submissions_api::external::Context,
hash:&str, hash:&str,
script_policy:submissions_api::types::CreateScriptPolicyRequest, script_policy:submissions_api::types::CreateScriptPolicyRequest,
)->Result<(),GOCError>{ )->Result<(),GOCScriptPolicyError>{
let script_policy_result=api.get_script_policy_from_hash(submissions_api::types::HashRequest{ let script_policy_result=api.get_script_policy_from_hash(submissions_api::types::HashRequest{
hash, hash,
}).await?; }).await?;
@ -299,13 +364,13 @@ async fn check_or_create_script_poicy(
match script_policy_result{ match script_policy_result{
Some(script_policy_reponse)=>{ Some(script_policy_reponse)=>{
// check that everything matches the expectation // check that everything matches the expectation
assert!(hash==script_policy_reponse.FromScriptHash); assert_eq!(hash,script_policy_reponse.FromScriptHash);
assert!(script_policy.ToScriptID==script_policy_reponse.ToScriptID); assert_eq!(script_policy.ToScriptID,script_policy_reponse.ToScriptID);
assert!(script_policy.Policy==script_policy_reponse.Policy); assert_eq!(script_policy.Policy,script_policy_reponse.Policy);
}, },
None=>{ None=>{
// create a new policy // create a new policy
api.create_script_policy(script_policy).await.map_err(GOCError::Other)?; api.create_script_policy(script_policy).await.map_err(GOCScriptPolicyError::Other)?;
} }
} }
@ -407,3 +472,278 @@ async fn upload_scripts(config:UploadConfig)->Result<(),ScriptUploadError>{
Ok(()) Ok(())
} }
#[allow(dead_code)]
#[derive(Debug)]
enum RepairPoliciesError{
Cookie(submissions_api::CookieError),
Reqwest(submissions_api::ReqwestError),
GetPolicies(submissions_api::Error),
GetScripts(submissions_api::types::ScriptSingleItemError),
UpdateScriptPolicy(submissions_api::Error),
}
struct RepairPoliciesConfig{
session_id:String,
api_url:String,
}
async fn repair_policies(config:RepairPoliciesConfig)->Result<(),RepairPoliciesError>{
let cookie=submissions_api::Cookie::new(&config.session_id).map_err(RepairPoliciesError::Cookie)?;
let api=&submissions_api::external::Context::new(config.api_url,cookie).map_err(RepairPoliciesError::Reqwest)?;
const LIMIT:u32=100;
let mut page=1;
loop{
println!("Downloading page {page}...");
let policies=api.get_script_policies(submissions_api::types::GetScriptPoliciesRequest{
Page:page,
Limit:LIMIT,
FromScriptHash:None,
ToScriptID:None,
Policy:Some(submissions_api::types::Policy::Replace),
}).await.map_err(RepairPoliciesError::GetPolicies)?;
futures::stream::iter(policies.iter().map(Ok)).try_for_each_concurrent(REMOTE_CONCURRENCY,async|policy|{
let from_script=api.get_script_from_hash(submissions_api::types::HashRequest{
hash:policy.FromScriptHash.as_str(),
}).await.map_err(RepairPoliciesError::GetScripts)?;
if let Some(from_script)=from_script{
if policy.ToScriptID==from_script.ID{
// invalid policy. Reset the policy to None
api.update_script_policy(submissions_api::types::UpdateScriptPolicyRequest{
ID:policy.ID,
FromScriptID:None,
ToScriptID:None,
Policy:Some(submissions_api::types::Policy::None),
}).await.map_err(RepairPoliciesError::UpdateScriptPolicy)?;
println!("Policy updated! {:?}",policy.ID);
}
}else{
println!("Script did not exist! hash={}",policy.FromScriptHash);
}
Ok(())
}).await?;
if policies.len()<LIMIT as usize{
// We scanned all policies
println!("Done!");
break;
}else{
page+=1;
}
}
Ok(())
}
#[allow(dead_code)]
#[derive(Debug)]
enum RepairDuplicatesError{
Cookie(submissions_api::CookieError),
Reqwest(submissions_api::ReqwestError),
GetScripts(submissions_api::Error),
DeleteScript(submissions_api::Error),
}
struct RepairDuplicatesConfig{
session_id:String,
api_url:String,
}
async fn repair_duplicates(config:RepairDuplicatesConfig)->Result<(),RepairDuplicatesError>{
let cookie=submissions_api::Cookie::new(&config.session_id).map_err(RepairDuplicatesError::Cookie)?;
let api=&submissions_api::external::Context::new(config.api_url,cookie).map_err(RepairDuplicatesError::Reqwest)?;
let mut sources=std::collections::HashSet::new();
const LIMIT:u32=100;
let mut page=1;
loop{
println!("Downloading page {page}...");
let scripts=api.get_scripts(submissions_api::types::GetScriptsRequest{
Page:page,
Limit:LIMIT,
Name:None,
Hash:None,
Source:None,
ResourceType:None,
ResourceID:None,
}).await.map_err(RepairDuplicatesError::GetScripts)?;
let done=scripts.len()<LIMIT as usize;
for script in scripts{
if !sources.insert(script.Source){
println!("Deleting duplicate script {:?}",script.ID);
api.delete_script(submissions_api::types::GetScriptRequest{
ScriptID:script.ID,
}).await.map_err(RepairDuplicatesError::DeleteScript)?;
}
}
if done{
// We scanned all policies
println!("Done!");
break;
}else{
page+=1;
}
}
Ok(())
}
#[allow(dead_code)]
#[derive(Debug)]
enum ReleaseError{
Cookie(submissions_api::CookieError),
Reqwest(submissions_api::ReqwestError),
GetSubmissions(submissions_api::Error),
GetMaps(submissions_api::Error),
Io(std::io::Error),
Release(submissions_api::Error),
}
struct ReleaseConfig{
session_id:String,
api_url:String,
}
async fn release(config:ReleaseConfig)->Result<(),ReleaseError>{
let cookie=submissions_api::Cookie::new(&config.session_id).map_err(ReleaseError::Cookie)?;
let api=&submissions_api::external::Context::new(config.api_url,cookie).map_err(ReleaseError::Reqwest)?;
const LIMIT:u32=100;
const ONE_HOUR:i64=60*60;
const ONE_DAY:i64=24*ONE_HOUR;
const ONE_WEEK:i64=7*ONE_DAY;
const FRIDAY:i64=2*ONE_DAY;
const PEAK_HOURS:i64=-7*ONE_HOUR;
// determine maps ready to be released
let mut submissions_pending_release=std::collections::BTreeMap::new();
{
println!("Downloading submissions pending release...");
let mut page=1;
loop{
let submissions=api.get_submissions(submissions_api::types::GetSubmissionsRequest{
Page:page,
Limit:LIMIT,
DisplayName:None,
Creator:None,
GameID:None,
Sort:None,
Submitter:None,
AssetID:None,
UploadedAssetID:None,
StatusID:Some(submissions_api::types::SubmissionStatus::Uploaded),
}).await.map_err(ReleaseError::GetSubmissions)?;
let len=submissions.Submissions.len();
for submission in submissions.Submissions{
submissions_pending_release.entry(submission.GameID).or_insert(Vec::new()).push(submission);
}
if len<LIMIT as usize{
break;
}else{
page+=1;
}
}
}
// If there is nothing to release, exit immediately
if submissions_pending_release.is_empty(){
println!("Nothing to release!");
return Ok(());
}
// determine the most recent map release date
// if it's in the past, generate a Friday 10AM timestamp instead
let it={
println!("Determining most recent release dates...");
let mut latest_date=std::collections::HashMap::new();
let mut page=1;
loop{
let maps=api.get_maps(submissions_api::types::GetMapsRequest{
Page:page,
Limit:LIMIT,
DisplayName:None,
Creator:None,
GameID:None,
Sort:None,//TODO: sort by date to cut down requests
}).await.map_err(ReleaseError::GetMaps)?;
let len=maps.len();
for map in maps{
latest_date
.entry(map.GameID)
.and_modify(|date|
*date=map.Date.min(*date)
)
.or_insert(map.Date);
}
if len<LIMIT as usize{
break;
}else{
page+=1;
}
}
// breaks on Sun 4 Dec 292277026596
let now=std::time::UNIX_EPOCH.elapsed().unwrap().as_secs() as i64;
// If the date is in the past, unset it
latest_date.retain(|_,&mut date|now<date);
submissions_pending_release.into_iter().map(move|(game,pending)|{
let start_date=match latest_date.get(&game){
Some(&date)=>{
// round to friday
(date+(ONE_WEEK>>1)-FRIDAY)/ONE_WEEK*ONE_WEEK+FRIDAY+PEAK_HOURS
// add a week
+ONE_WEEK
},
// find soonest friday
None=>((now-FRIDAY) as u64).next_multiple_of(ONE_WEEK as u64) as i64+FRIDAY+PEAK_HOURS
};
(game,start_date,pending)
})
};
let mut rng=rand::rng();
for (game,start_date,mut pending) in it{
// shuffle maps
pending.shuffle(&mut rng);
// schedule one per week
let schedule:&Vec<_>=&pending.into_iter().enumerate().map(|(i,submission)|{
let release_date=(std::time::UNIX_EPOCH+std::time::Duration::from_secs((
start_date+i as i64*ONE_WEEK
) as u64)).into();
println!("Schedule {:?} {} at {}",submission.ID,submission.DisplayName,release_date);
submissions_api::types::ReleaseInfo{
Date:release_date,
SubmissionID:submission.ID,
}
}).collect();
// ask to confirm schedule
print!("Accept this release schedule for {game:?}? [y/N]: ");
std::io::stdout().flush().map_err(ReleaseError::Io)?;
let mut input=String::new();
std::io::stdin().read_line(&mut input).map_err(ReleaseError::Io)?;
match input.trim(){
"y"|"Y"=>(),
_=>{
println!("Quitting.");
return Ok(());
},
}
// send it
api.release_submissions(submissions_api::types::ReleaseRequest{
schedule,
}).await.map_err(ReleaseError::Release)?;
}
Ok(())
}