Compare commits

..

26 Commits

Author SHA1 Message Date
0f4b57b349
add repair duplicates 2025-06-08 01:04:31 -07:00
726212cb64
update deps 2025-06-06 22:47:27 -07:00
1b1c20a9e4
add purge action 2025-06-06 22:33:09 -07:00
dcc31e46b3
update submissions-api 2025-06-06 22:32:53 -07:00
5230b8a371
database repair tool 2025-06-06 15:47:52 -07:00
bfe55c2af6
fix review tool 2025-06-06 15:04:50 -07:00
a9e838d445
update deps 2025-06-01 16:40:02 -07:00
b2e61016ad
print script policy 2025-04-08 15:35:08 -07:00
01a1d8f0c5
skip review 2025-04-08 14:47:52 -07:00
e1e781ed25
inform how many reviews 2025-04-08 14:42:26 -07:00
c766b8b0cc
print script name 2025-04-08 14:34:09 -07:00
cee52e31be
update deps 2025-04-08 14:30:13 -07:00
2f25efe07e
use assert_eq 2025-03-26 15:00:07 -07:00
834d67f8f6
update deps 2025-03-26 14:59:59 -07:00
e97ad7f102 use words 2025-03-18 13:40:23 -07:00
8786ff0c79 update submissions-api 2025-03-18 13:35:20 -07:00
d23cd57850 update deps 2025-03-18 13:11:29 -07:00
80e133c27a load session id from file 2025-03-18 13:10:55 -07:00
f9fb1fb23c inline very thin function 2024-12-30 00:40:36 -08:00
4116eaf829 comment code 2024-12-28 21:10:10 -08:00
c4508480c1 reuse hash source function 2024-12-26 19:59:27 -08:00
a6b8b326f1 tidy id from filename code 2024-12-26 19:44:44 -08:00
3eb39f2c6c publish api 2024-12-26 19:44:44 -08:00
af2cf4b7a8 fix api 2024-12-26 19:44:44 -08:00
bc11f918aa uniformity 2024-12-26 19:44:44 -08:00
a16e8faf8b upload scripts 2024-12-26 19:44:44 -08:00
4 changed files with 719 additions and 375 deletions

617
Cargo.lock generated

File diff suppressed because it is too large Load Diff

@ -9,7 +9,7 @@ edition = "2021"
clap = { version = "4.4.2", features = ["derive"] }
futures = "0.3.31"
siphasher = "1.0.1"
submissions-api = { version = "0.3.0", features = ["external"], default-features = false, registry = "strafesnet" }
submissions-api = { version = "0.7.2", registry = "strafesnet" }
tokio = { version = "1.42.0", features = ["fs", "macros", "rt-multi-thread"] }
[profile.release]

@ -1,26 +0,0 @@
use clap::{Args,Parser,Subcommand};
#[derive(Parser)]
#[command(author,version,about,long_about=None)]
#[command(propagate_version=true)]
pub struct Cli{
#[command(subcommand)]
command:Commands,
}
#[derive(Subcommand)]
pub enum Commands{
Review(ReviewCommand),
UploadScripts(UploadScriptsCommand),
}
#[derive(Args)]
struct ReviewCommand{
#[arg(long)]
cookie:String,
}
#[derive(Args)]
struct UploadScriptsCommand{
#[arg(long)]
session_id:PathBuf,
}

@ -1,16 +1,75 @@
mod cmd;
use cmd::{Cli,Commands};
use clap::{Args,Parser,Subcommand};
use futures::{StreamExt,TryStreamExt};
use std::path::PathBuf;
const READ_CONCURRENCY:usize=16;
const REMOTE_CONCURRENCY:usize=16;
#[derive(Parser)]
#[command(author,version,about,long_about=None)]
#[command(propagate_version=true)]
struct Cli{
#[command(subcommand)]
command:Commands,
}
#[derive(Subcommand)]
enum Commands{
RepairDuplicates(RepairDuplicatesCommand),
RepairPolicies(RepairPoliciesCommand),
Review(ReviewCommand),
UploadScripts(UploadScriptsCommand),
}
#[derive(Args)]
struct RepairDuplicatesCommand{
#[arg(long)]
session_id_file:PathBuf,
#[arg(long)]
api_url:String,
}
#[derive(Args)]
struct RepairPoliciesCommand{
#[arg(long)]
session_id_file:PathBuf,
#[arg(long)]
api_url:String,
}
#[derive(Args)]
struct ReviewCommand{
#[arg(long)]
session_id_file:PathBuf,
#[arg(long)]
api_url:String,
}
#[derive(Args)]
struct UploadScriptsCommand{
#[arg(long)]
session_id_file:PathBuf,
#[arg(long)]
api_url:String,
}
#[tokio::main]
async fn main(){
let cli=Cli::parse();
match cli.command{
Commands::Review(command)=>review(ReviewConfig{
cookie:command.cookie,
Commands::RepairDuplicates(command)=>repair_duplicates(RepairDuplicatesConfig{
session_id:std::fs::read_to_string(command.session_id_file).unwrap(),
api_url:command.api_url,
}).await.unwrap(),
Commands::RepairPolicies(command)=>repair_policies(RepairPoliciesConfig{
session_id:std::fs::read_to_string(command.session_id_file).unwrap(),
api_url:command.api_url,
}).await.unwrap(),
Commands::Review(command)=>review(ReviewConfig{
session_id:std::fs::read_to_string(command.session_id_file).unwrap(),
api_url:command.api_url,
}).await.unwrap(),
Commands::UploadScripts(command)=>upload_scripts(UploadConfig{
session_id:std::fs::read_to_string(command.session_id_file).unwrap(),
api_url:command.api_url,
}).await.unwrap(),
Commands::UploadScripts(command)=>upload_scripts(command.session_id).await.unwrap(),
}
}
@ -19,6 +78,8 @@ enum ScriptActionParseResult{
Block,
Exit,
Delete,
Purge,
Skip,
}
struct ParseScriptActionErr;
impl std::str::FromStr for ScriptActionParseResult{
@ -29,6 +90,8 @@ impl std::str::FromStr for ScriptActionParseResult{
"block\n"=>Ok(Self::Block),
"exit\n"=>Ok(Self::Exit),
"delete\n"=>Ok(Self::Delete),
"purge\n"=>Ok(Self::Purge),
"skip\n"=>Ok(Self::Skip),
_=>Err(ParseScriptActionErr),
}
}
@ -44,6 +107,7 @@ enum ReviewError{
NoScript,
WriteCurrent(std::io::Error),
ActionIO(std::io::Error),
PurgeScript(submissions_api::Error),
ReadCurrent(std::io::Error),
DeduplicateModified(submissions_api::types::SingleItemError),
UploadModified(submissions_api::Error),
@ -51,14 +115,15 @@ enum ReviewError{
}
struct ReviewConfig{
cookie:String,
session_id:String,
api_url:String,
}
async fn review(config:ReviewConfig)->Result<(),ReviewError>{
// download unreviewed policies
// review them
let cookie=submissions_api::Cookie::new(&config.cookie).map_err(ReviewError::Cookie)?;
let api=submissions_api::external::Context::new("http://localhost:8083".to_owned(),cookie).map_err(ReviewError::Reqwest)?;
let cookie=submissions_api::Cookie::new(&config.session_id).map_err(ReviewError::Cookie)?;
let api=submissions_api::external::Context::new(config.api_url,cookie).map_err(ReviewError::Reqwest)?;
let unreviewed_policies=api.get_script_policies(submissions_api::types::GetScriptPoliciesRequest{
Page:1,
@ -68,7 +133,8 @@ async fn review(config:ReviewConfig)->Result<(),ReviewError>{
Policy:Some(submissions_api::types::Policy::None),
}).await.map_err(ReviewError::GetPolicies)?;
for unreviewed_policy in unreviewed_policies{
let unreviewed_policy_count=unreviewed_policies.len();
for (i,unreviewed_policy) in unreviewed_policies.into_iter().enumerate(){
// download source code
let script_response=api.get_script_from_hash(submissions_api::types::HashRequest{
hash:unreviewed_policy.FromScriptHash.as_str(),
@ -80,6 +146,10 @@ async fn review(config:ReviewConfig)->Result<(),ReviewError>{
//load source into current.lua
tokio::fs::write("current.lua",source.as_str()).await.map_err(ReviewError::WriteCurrent)?;
// print script name
println!("policy {}/{unreviewed_policy_count}",i+1);
println!("script name: {}",script_response.Name);
//prompt action in terminal
//wait for input
let script_action;
@ -97,6 +167,17 @@ async fn review(config:ReviewConfig)->Result<(),ReviewError>{
let mut to_script_id=None;
// interpret action
let reviewed_policy=match script_action{
ScriptActionParseResult::Purge=>{
// remove script and policy from the database.
let remove_script_fut=api.delete_script(submissions_api::types::GetScriptRequest{
ScriptID:script_response.ID,
});
let remove_script_policy_fut=api.delete_script_policy(submissions_api::types::GetScriptPolicyRequest{
ScriptPolicyID:unreviewed_policy.ID,
});
tokio::try_join!(remove_script_fut,remove_script_policy_fut).map_err(ReviewError::PurgeScript)?;
continue;
},
ScriptActionParseResult::Pass=>{
//if current.lua was updated, create an allowed and replace file and set script_action to replace(new_id)
let modified_source=tokio::fs::read_to_string("current.lua").await.map_err(ReviewError::ReadCurrent)?;
@ -104,9 +185,7 @@ async fn review(config:ReviewConfig)->Result<(),ReviewError>{
submissions_api::types::Policy::Allowed
}else{
// compute hash
let mut hasher=siphasher::sip::SipHasher::new();
std::hash::Hasher::write(&mut hasher,source.as_bytes());
let hash=std::hash::Hasher::finish(&hasher);
let hash=hash_source(modified_source.as_str());
// check if modified script already exists
let maybe_script_response=api.get_script_from_hash(submissions_api::types::HashRequest{
@ -119,8 +198,9 @@ async fn review(config:ReviewConfig)->Result<(),ReviewError>{
None=>api.create_script(submissions_api::types::CreateScriptRequest{
Name:script_response.Name.as_str(),
Source:modified_source.as_str(),
SubmissionID:Some(script_response.SubmissionID),
}).await.map_err(ReviewError::UploadModified)?.ID
ResourceType:script_response.ResourceType,
ResourceID:Some(script_response.ResourceID),
}).await.map_err(ReviewError::UploadModified)?.ScriptID
});
// use replace policy
@ -130,20 +210,104 @@ async fn review(config:ReviewConfig)->Result<(),ReviewError>{
ScriptActionParseResult::Block=>submissions_api::types::Policy::Blocked,
ScriptActionParseResult::Exit=>break,
ScriptActionParseResult::Delete=>submissions_api::types::Policy::Delete,
ScriptActionParseResult::Skip=>continue,
};
// update policy
api.update_script_policy(submissions_api::types::UpdateScriptPolicyRequest{
ScriptPolicyID:unreviewed_policy.ID,
ID:unreviewed_policy.ID,
FromScriptID:None,
ToScriptID:to_script_id,
Policy:Some(reviewed_policy),
}).await.map_err(ReviewError::UpdateScriptPolicy)?;
println!("updated {:?} From: {:?} To: {:?} with policy {reviewed_policy:?}",unreviewed_policy.ID,script_response.ID,to_script_id);
}
Ok(())
}
#[allow(dead_code)]
#[derive(Debug)]
enum ScriptUploadError{
Cookie(submissions_api::CookieError),
Reqwest(submissions_api::ReqwestError),
AllowedSet(std::io::Error),
AllowedMap(GetMapError),
ReplaceMap(GetMapError),
BlockedSet(std::io::Error),
GetOrCreate(GOCError),
GetOrCreatePolicyReplace(GOCError),
GetOrCreatePolicyAllowed(GOCError),
GetOrCreatePolicyBlocked(GOCError),
}
fn read_dir_stream(dir:tokio::fs::ReadDir)->impl futures::stream::Stream<Item=std::io::Result<tokio::fs::DirEntry>>{
futures::stream::unfold(dir,|mut dir|async{
match dir.next_entry().await{
Ok(Some(entry))=>Some((Ok(entry),dir)),
Ok(None)=>None, // End of directory
Err(e)=>Some((Err(e),dir)), // Error encountered
}
})
}
async fn get_set_from_file(path:impl AsRef<std::path::Path>)->std::io::Result<std::collections::HashSet<String>>{
read_dir_stream(tokio::fs::read_dir(path).await?)
.map(|dir_entry|async{
tokio::fs::read_to_string(dir_entry?.path()).await
})
.buffer_unordered(READ_CONCURRENCY)
.try_collect().await
}
async fn get_allowed_set()->std::io::Result<std::collections::HashSet<String>>{
get_set_from_file("scripts/allowed").await
}
async fn get_blocked_set()->std::io::Result<std::collections::HashSet<String>>{
get_set_from_file("scripts/blocked").await
}
#[allow(dead_code)]
#[derive(Debug)]
enum GetMapError{
IO(std::io::Error),
FileStem,
ToStr,
ParseInt(std::num::ParseIntError),
}
async fn get_allowed_map()->Result<std::collections::HashMap::<u32,String>,GetMapError>{
read_dir_stream(tokio::fs::read_dir("scripts/allowed").await.map_err(GetMapError::IO)?)
.map(|dir_entry|async{
let path=dir_entry.map_err(GetMapError::IO)?.path();
let id:u32=path
.file_stem().ok_or(GetMapError::FileStem)?
.to_str().ok_or(GetMapError::ToStr)?
.parse().map_err(GetMapError::ParseInt)?;
let source=tokio::fs::read_to_string(path).await.map_err(GetMapError::IO)?;
Ok((id,source))
})
.buffer_unordered(READ_CONCURRENCY)
.try_collect().await
}
async fn get_replace_map()->Result<std::collections::HashMap::<String,u32>,GetMapError>{
read_dir_stream(tokio::fs::read_dir("scripts/replace").await.map_err(GetMapError::IO)?)
.map(|dir_entry|async{
let path=dir_entry.map_err(GetMapError::IO)?.path();
let id:u32=path
.file_stem().ok_or(GetMapError::FileStem)?
.to_str().ok_or(GetMapError::ToStr)?
.parse().map_err(GetMapError::ParseInt)?;
let source=tokio::fs::read_to_string(path).await.map_err(GetMapError::IO)?;
Ok((source,id))
})
.buffer_unordered(READ_CONCURRENCY)
.try_collect().await
}
fn hash_source(source:&str)->u64{
let mut hasher=siphasher::sip::SipHasher::new();
std::hash::Hasher::write(&mut hasher,source.as_bytes());
@ -167,8 +331,9 @@ async fn get_or_create_script(api:&submissions_api::external::Context,source:&st
None=>api.create_script(submissions_api::types::CreateScriptRequest{
Name:"Script",
Source:source,
SubmissionID:None,
}).await.map_err(GOCError::Other)?.ID
ResourceType:submissions_api::types::ResourceType::Unknown,
ResourceID:None,
}).await.map_err(GOCError::Other)?.ScriptID
})
}
@ -184,9 +349,9 @@ async fn check_or_create_script_poicy(
match script_policy_result{
Some(script_policy_reponse)=>{
// check that everything matches the expectation
assert!(hash==script_policy_reponse.FromScriptHash);
assert!(script_policy.ToScriptID==script_policy_reponse.ToScriptID);
assert!(script_policy.Policy==script_policy_reponse.Policy);
assert_eq!(hash,script_policy_reponse.FromScriptHash);
assert_eq!(script_policy.ToScriptID,script_policy_reponse.ToScriptID);
assert_eq!(script_policy.Policy,script_policy_reponse.Policy);
},
None=>{
// create a new policy
@ -197,41 +362,40 @@ async fn check_or_create_script_poicy(
Ok(())
}
async fn do_policy(
api:&submissions_api::external::Context,
script_ids:&std::collections::HashMap<&str,submissions_api::types::ScriptID>,
source:&str,
to_script_id:submissions_api::types::ScriptID,
policy:submissions_api::types::Policy,
)->Result<(),GOCError>{
let hash=hash_format(hash_source(source));
check_or_create_script_poicy(api,hash.as_str(),submissions_api::types::CreateScriptPolicyRequest{
FromScriptID:script_ids[source],
ToScriptID:to_script_id,
Policy:policy,
}).await
struct UploadConfig{
session_id:String,
api_url:String,
}
async fn upload_scripts(session_id:PathBuf)->Result<()>{
let cookie={
let mut cookie=String::new();
std::fs::File::open(session_id)?.read_to_string(&mut cookie)?;
submissions_api::Cookie::new(&cookie)?
};
let api=&submissions_api::external::Context::new("http://localhost:8083".to_owned(),cookie)?;
async fn upload_scripts(config:UploadConfig)->Result<(),ScriptUploadError>{
let cookie=submissions_api::Cookie::new(&config.session_id).map_err(ScriptUploadError::Cookie)?;
let api=&submissions_api::external::Context::new(config.api_url,cookie).map_err(ScriptUploadError::Reqwest)?;
let allowed_set=get_allowed_set()?;
let allowed_map=get_allowed_map()?;
let replace_map=get_replace_map()?;
let blocked=get_blocked()?;
// load all script files
let (
allowed_set_result,
allowed_map_result,
replace_map_result,
blocked_set_result,
)=tokio::join!(
get_allowed_set(),
get_allowed_map(),
get_replace_map(),
get_blocked_set(),
);
let allowed_set=allowed_set_result.map_err(ScriptUploadError::AllowedSet)?;
let allowed_map=allowed_map_result.map_err(ScriptUploadError::AllowedMap)?;
let replace_map=replace_map_result.map_err(ScriptUploadError::ReplaceMap)?;
let blocked_set=blocked_set_result.map_err(ScriptUploadError::BlockedSet)?;
// create a unified deduplicated set of all scripts
let script_set:std::collections::HashSet<&str>=allowed_set.iter()
.map(|s|s.as_str())
.map(String::as_str)
.chain(
replace_map.keys().map(|s|s.as_str())
replace_map.keys().map(String::as_str)
).chain(
blocked.iter().map(|s|s.as_str())
blocked_set.iter().map(String::as_str)
).collect();
// get or create every unique script
@ -239,48 +403,177 @@ async fn upload_scripts(session_id:PathBuf)->Result<()>{
futures::stream::iter(script_set)
.map(|source|async move{
let script_id=get_or_create_script(api,source).await?;
Ok::<_,GOCError>((source,script_id))
Ok((source,script_id))
})
.buffer_unordered(16)
.try_collect().await?;
.buffer_unordered(REMOTE_CONCURRENCY)
.try_collect().await.map_err(ScriptUploadError::GetOrCreate)?;
// get or create policy for each script in each category
//
// replace
futures::stream::iter(replace_map.iter().map(Ok))
.try_for_each_concurrent(Some(16),|(source,id)|async{
do_policy(
let replace_fut=futures::stream::iter(replace_map.iter().map(Ok))
.try_for_each_concurrent(Some(REMOTE_CONCURRENCY),|(source,id)|async{
check_or_create_script_poicy(
api,
&script_ids,
source,
script_ids[allowed_map[id].as_str()],
submissions_api::types::Policy::Replace
).await
}).await?;
hash_format(hash_source(source)).as_str(),
submissions_api::types::CreateScriptPolicyRequest{
FromScriptID:script_ids[source.as_str()],
ToScriptID:script_ids[allowed_map[id].as_str()],
Policy:submissions_api::types::Policy::Replace,
}
).await.map_err(ScriptUploadError::GetOrCreatePolicyReplace)
});
// allowed
futures::stream::iter(allowed_set.iter().map(Ok))
.try_for_each_concurrent(Some(16),|source|async{
do_policy(
let allowed_fut=futures::stream::iter(allowed_set.iter().map(Ok))
.try_for_each_concurrent(Some(REMOTE_CONCURRENCY),|source|async{
check_or_create_script_poicy(
api,
&script_ids,
source,
script_ids[source.as_str()],
submissions_api::types::Policy::Allowed
).await
}).await?;
hash_format(hash_source(source)).as_str(),
submissions_api::types::CreateScriptPolicyRequest{
FromScriptID:script_ids[source.as_str()],
ToScriptID:script_ids[source.as_str()],
Policy:submissions_api::types::Policy::Allowed,
}
).await.map_err(ScriptUploadError::GetOrCreatePolicyAllowed)
});
// blocked
futures::stream::iter(blocked.iter().map(Ok))
.try_for_each_concurrent(Some(16),|source|async{
do_policy(
let blocked_fut=futures::stream::iter(blocked_set.iter().map(Ok))
.try_for_each_concurrent(Some(REMOTE_CONCURRENCY),|source|async{
check_or_create_script_poicy(
api,
&script_ids,
source,
script_ids[source.as_str()],
submissions_api::types::Policy::Blocked
).await
}).await?;
hash_format(hash_source(source)).as_str(),
submissions_api::types::CreateScriptPolicyRequest{
FromScriptID:script_ids[source.as_str()],
ToScriptID:script_ids[source.as_str()],
Policy:submissions_api::types::Policy::Blocked,
}
).await.map_err(ScriptUploadError::GetOrCreatePolicyBlocked)
});
// run futures
tokio::try_join!(replace_fut,allowed_fut,blocked_fut)?;
Ok(())
}
#[allow(dead_code)]
#[derive(Debug)]
enum RepairPoliciesError{
Cookie(submissions_api::CookieError),
Reqwest(submissions_api::ReqwestError),
GetPolicies(submissions_api::Error),
GetScripts(submissions_api::types::SingleItemError),
UpdateScriptPolicy(submissions_api::Error),
}
struct RepairPoliciesConfig{
session_id:String,
api_url:String,
}
async fn repair_policies(config:RepairPoliciesConfig)->Result<(),RepairPoliciesError>{
let cookie=submissions_api::Cookie::new(&config.session_id).map_err(RepairPoliciesError::Cookie)?;
let api=&submissions_api::external::Context::new(config.api_url,cookie).map_err(RepairPoliciesError::Reqwest)?;
const LIMIT:u32=100;
let mut page=1;
loop{
println!("Downloading page {page}...");
let policies=api.get_script_policies(submissions_api::types::GetScriptPoliciesRequest{
Page:page,
Limit:LIMIT,
FromScriptHash:None,
ToScriptID:None,
Policy:Some(submissions_api::types::Policy::Replace),
}).await.map_err(RepairPoliciesError::GetPolicies)?;
futures::stream::iter(policies.iter().map(Ok)).try_for_each_concurrent(REMOTE_CONCURRENCY,async|policy|{
let from_script=api.get_script_from_hash(submissions_api::types::HashRequest{
hash:policy.FromScriptHash.as_str(),
}).await.map_err(RepairPoliciesError::GetScripts)?;
if let Some(from_script)=from_script{
if policy.ToScriptID==from_script.ID{
// invalid policy. Reset the policy to None
api.update_script_policy(submissions_api::types::UpdateScriptPolicyRequest{
ID:policy.ID,
FromScriptID:None,
ToScriptID:None,
Policy:Some(submissions_api::types::Policy::None),
}).await.map_err(RepairPoliciesError::UpdateScriptPolicy)?;
println!("Policy updated! {:?}",policy.ID);
}
}else{
println!("Script did not exist! hash={}",policy.FromScriptHash);
}
Ok(())
}).await?;
if policies.len()<LIMIT as usize{
// We scanned all policies
println!("Done!");
break;
}else{
page+=1;
}
}
Ok(())
}
#[allow(dead_code)]
#[derive(Debug)]
enum RepairDuplicatesError{
Cookie(submissions_api::CookieError),
Reqwest(submissions_api::ReqwestError),
GetScripts(submissions_api::Error),
DeleteScript(submissions_api::Error),
}
struct RepairDuplicatesConfig{
session_id:String,
api_url:String,
}
async fn repair_duplicates(config:RepairDuplicatesConfig)->Result<(),RepairDuplicatesError>{
let cookie=submissions_api::Cookie::new(&config.session_id).map_err(RepairDuplicatesError::Cookie)?;
let api=&submissions_api::external::Context::new(config.api_url,cookie).map_err(RepairDuplicatesError::Reqwest)?;
let mut sources=std::collections::HashSet::new();
const LIMIT:u32=100;
let mut page=1;
loop{
println!("Downloading page {page}...");
let scripts=api.get_scripts(submissions_api::types::GetScriptsRequest{
Page:page,
Limit:LIMIT,
Name:None,
Hash:None,
Source:None,
ResourceType:None,
ResourceID:None,
}).await.map_err(RepairDuplicatesError::GetScripts)?;
let done=scripts.len()<LIMIT as usize;
for script in scripts{
if !sources.insert(script.Source){
println!("Deleting duplicate script {:?}",script.ID);
api.delete_script(submissions_api::types::GetScriptRequest{
ScriptID:script.ID,
}).await.map_err(RepairDuplicatesError::DeleteScript)?;
}
}
if done{
// We scanned all policies
println!("Done!");
break;
}else{
page+=1;
}
}
Ok(())
}