Compare commits

..

26 Commits

Author SHA1 Message Date
1f8a66638f
rewrite get_full_name function 2025-03-27 12:20:29 -07:00
0995ced783 skip non-files (directories) 2025-02-12 13:47:59 -08:00
49d071fd56 move writeattributes to map fixer 2025-01-27 07:06:24 -08:00
d9a39cc046 print output on failure 2024-12-23 19:53:08 -08:00
c9d58fb7ca dead code 2024-12-19 16:19:08 -08:00
684d7a1797 v1.1.2 remove flate2 2024-12-02 19:28:19 -08:00
6d54927c88 update deps 2024-12-02 19:28:19 -08:00
a2939fbc0f cleanup: remove flate2 - files are pre-unzipped by the downloader 2024-12-02 19:24:37 -08:00
c415ffbdab default to current name and confirm override 2024-08-24 19:40:09 -07:00
cbc818bd03 remove a panic 2024-08-13 16:20:32 -07:00
53d2f7a5e8 v1.1.1 fix displayname 2024-08-12 16:23:42 -07:00
96d1cc87a2 update deps 2024-08-12 16:23:42 -07:00
5915dd730f redo displayname test 2024-08-12 16:23:42 -07:00
e626131d95 v1.1.0 asset tool upgrade (TODO: directly use rbx_asset lib) 2024-07-15 20:24:28 -07:00
69ffbf4837 update deps 2024-07-15 20:24:28 -07:00
167be8f587 update asset tool to 0.4.x 2024-07-15 20:22:54 -07:00
e92528ad83 reject uncapitalized display names 2024-06-03 08:47:16 -07:00
8e9c76d6f8 use strafesnet deps 2024-05-30 02:02:46 -07:00
a5c48d4684 v1.0.1 use asset tool + prompt for model name 2024-04-25 19:02:11 -07:00
1b5eec9eaf if model name is illegal prompt for new name 2024-04-25 04:42:31 -07:00
ef5703f282 condense prompt logic 2024-04-25 04:34:31 -07:00
9685301b30 use asset tool for upload 2024-04-25 01:10:15 -07:00
d2b455c87b rename package and de-version to pre-map-tool functionality 2024-03-17 10:29:24 -07:00
9de2790cc8 remove map-tool deps 2024-03-17 10:29:24 -07:00
47e93325ad remove map-tool + asset-tool functions 2024-03-08 10:15:00 -08:00
de9712b7a1 clarify function name 2024-03-08 10:10:26 -08:00
5 changed files with 881 additions and 4057 deletions

3216
Cargo.lock generated

File diff suppressed because it is too large Load Diff

@ -1,6 +1,6 @@
[package]
name = "map-tool"
version = "1.7.0"
name = "mapfixer"
version = "1.1.2"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@ -8,30 +8,13 @@ edition = "2021"
[dependencies]
anyhow = "1.0.75"
clap = { version = "4.4.2", features = ["derive"] }
flate2 = "1.0.27"
futures = "0.3.31"
image = "0.25.2"
image_dds = "0.7.1"
lazy-regex = "3.1.0"
rbx_asset = { version = "0.2.5", registry = "strafesnet" }
rbx_binary = { version = "0.7.4", registry = "strafesnet" }
rbx_dom_weak = { version = "2.7.0", registry = "strafesnet" }
rbx_reflection_database = { version = "0.2.10", registry = "strafesnet" }
rbx_xml = { version = "0.13.3", registry = "strafesnet" }
rbxassetid = { version = "0.1.0", registry = "strafesnet" }
strafesnet_bsp_loader = { version = "0.3.0", registry = "strafesnet" }
strafesnet_deferred_loader = { version = "0.5.0", registry = "strafesnet" }
strafesnet_rbx_loader = { version = "0.6.0", registry = "strafesnet" }
strafesnet_snf = { version = "0.3.0", registry = "strafesnet" }
thiserror = "2.0.11"
tokio = { version = "1.43.0", features = ["macros", "rt-multi-thread", "fs"] }
vbsp = "0.6.0"
vmdl = "0.2.0"
vmt-parser = "0.2.0"
vpk = "0.2.0"
vtf = "0.3.0"
rbx_binary = { version = "0.7.4", registry = "strafesnet"}
rbx_dom_weak = { version = "2.7.0", registry = "strafesnet"}
rbx_reflection_database = { version = "0.2.10", registry = "strafesnet"}
rbx_xml = { version = "0.13.3", registry = "strafesnet"}
#[profile.release]
#lto = true
#strip = true
#codegen-units = 1
[profile.release]
lto = true
strip = true
codegen-units = 1

@ -1,30 +1,826 @@
mod roblox;
mod source;
use clap::{Parser,Subcommand};
use std::{io::{Read, Seek}, path::PathBuf};
use clap::{Args, Parser, Subcommand};
use anyhow::Result as AResult;
#[derive(Parser)]
#[command(author, version, about, long_about = None)]
#[command(propagate_version = true)]
struct Cli {
#[arg(long)]
path:Option<PathBuf>,
#[command(subcommand)]
command: Commands,
}
#[derive(Subcommand)]
enum Commands{
#[command(flatten)]
Roblox(roblox::Commands),
#[command(flatten)]
Source(source::Commands),
enum Commands {
ExtractScripts(PathBufList),
Interactive,
Replace,
Scan,
Upload,
WriteAttributes,
}
#[tokio::main]
async fn main()->AResult<()>{
let cli=Cli::parse();
match cli.command{
Commands::Roblox(commands)=>commands.run().await,
Commands::Source(commands)=>commands.run().await,
#[derive(Args)]
struct PathBufList {
paths:Vec<PathBuf>
}
#[derive(Args)]
struct MapList {
maps: Vec<u64>,
}
fn main() -> AResult<()> {
let cli = Cli::parse();
match cli.command {
Commands::ExtractScripts(pathlist)=>extract_scripts(pathlist.paths),
Commands::Interactive=>interactive(),
Commands::Replace=>replace(),
Commands::Scan=>scan(),
Commands::Upload=>upload(),
Commands::WriteAttributes=>write_attributes(),
}
}
fn class_is_a(class: &str, superclass: &str) -> bool {
if class==superclass {
return true
}
let class_descriptor=rbx_reflection_database::get().classes.get(class);
if let Some(descriptor) = &class_descriptor {
if let Some(class_super) = &descriptor.superclass {
return class_is_a(&class_super, superclass)
}
}
false
}
fn recursive_collect_superclass(objects: &mut std::vec::Vec<rbx_dom_weak::types::Ref>,dom: &rbx_dom_weak::WeakDom, instance: &rbx_dom_weak::Instance, superclass: &str){
for &referent in instance.children() {
if let Some(c) = dom.get_by_ref(referent) {
if class_is_a(c.class.as_str(), superclass) {
objects.push(c.referent());//copy ref
}
recursive_collect_superclass(objects,dom,c,superclass);
}
}
}
fn get_full_name(dom:&rbx_dom_weak::WeakDom,instance:&rbx_dom_weak::Instance)->String{
let mut names:Vec<_>=core::iter::successors(
Some(instance),
|i|dom.get_by_ref(i.parent())
).map(
|i|i.name.as_str()
).collect();
names.reverse();
names.join(".")
}
//scan (scripts)
//iter maps/unprocessed
//passing moves to maps/verified
//failing moves to maps/blocked
//replace (edits & deletions)
//iter maps/blocked
//replace scripts and put in maps/unprocessed
//interactive
//iter maps/unprocessed
//for each unique script, load it into the file current.lua and have it open in sublime text
//I can edit the file and it will edit it in place
//I pass/fail(with comment)/allow each script
fn get_script_refs(dom:&rbx_dom_weak::WeakDom) -> Vec<rbx_dom_weak::types::Ref>{
let mut scripts = std::vec::Vec::new();
recursive_collect_superclass(&mut scripts, dom, dom.root(),"LuaSourceContainer");
scripts
}
fn load_dom<R:Read+Seek>(input:&mut R)->AResult<rbx_dom_weak::WeakDom>{
let mut first_8=[0u8;8];
if let (Ok(()),Ok(()))=(std::io::Read::read_exact(input, &mut first_8),std::io::Seek::rewind(input)){
match &first_8[0..4]{
b"<rob"=>{
match &first_8[4..8]{
b"lox!"=>rbx_binary::from_reader(input).map_err(anyhow::Error::msg),
b"lox "=>rbx_xml::from_reader(input,rbx_xml::DecodeOptions::default()).map_err(anyhow::Error::msg),
other=>Err(anyhow::Error::msg(format!("Unknown Roblox file type {:?}",other))),
}
},
_=>Err(anyhow::Error::msg("unsupported file type")),
}
}else{
Err(anyhow::Error::msg("peek failed"))
}
}
fn get_id() -> AResult<u32>{
match std::fs::read_to_string("id"){
Ok(id_file)=>Ok(id_file.parse::<u32>()?),
Err(e) => match e.kind() {
std::io::ErrorKind::NotFound => Ok(0),//implicitly take on id=0
_ => Err(e)?,
}
}
}
fn get_set_from_file(file:&str) -> AResult<std::collections::HashSet<String>>{
let mut set=std::collections::HashSet::<String>::new();
for entry in std::fs::read_dir(file)? {
set.insert(std::fs::read_to_string(entry?.path())?);
}
Ok(set)
}
fn get_allowed_set() -> AResult<std::collections::HashSet<String>>{
get_set_from_file("scripts/allowed")
}
fn get_blocked() -> AResult<std::collections::HashSet<String>>{
get_set_from_file("scripts/blocked")
}
fn get_allowed_map() -> AResult<std::collections::HashMap::<u32,String>>{
let mut allowed_map = std::collections::HashMap::<u32,String>::new();
for entry in std::fs::read_dir("scripts/allowed")? {
let entry=entry?;
allowed_map.insert(entry.path().file_stem().unwrap().to_str().unwrap().parse::<u32>()?,std::fs::read_to_string(entry.path())?);
}
Ok(allowed_map)
}
fn get_replace_map() -> AResult<std::collections::HashMap::<String,u32>>{
let mut replace = std::collections::HashMap::<String,u32>::new();
for entry in std::fs::read_dir("scripts/replace")? {
let entry=entry?;
replace.insert(std::fs::read_to_string(entry.path())?,entry.path().file_stem().unwrap().to_str().unwrap().parse::<u32>()?);
}
Ok(replace)
}
fn check_source_illegal_keywords(source:&String)->bool{
source.find("getfenv").is_some()||source.find("require").is_some()
}
fn find_first_child_class<'a>(dom:&'a rbx_dom_weak::WeakDom,instance:&'a rbx_dom_weak::Instance,name:&'a str,class:&'a str) -> Option<&'a rbx_dom_weak::Instance> {
for &referent in instance.children() {
if let Some(c) = dom.get_by_ref(referent) {
if c.name==name&&class_is_a(c.class.as_str(),class) {
return Some(c);
}
}
}
None
}
fn get_mapinfo(dom:&rbx_dom_weak::WeakDom) -> AResult<(String,String,String,rbx_dom_weak::types::Ref)>{
let workspace_children=dom.root().children();
if workspace_children.len()!=1{
return Err(anyhow::Error::msg("there can only be one model"));
}
if let Some(model_instance) = dom.get_by_ref(workspace_children[0]) {
if let (Some(creator),Some(displayname))=(find_first_child_class(dom, model_instance, "Creator", "StringValue"),find_first_child_class(dom, model_instance, "DisplayName", "StringValue")){
if let (
Some(rbx_dom_weak::types::Variant::String(creator_string)),
Some(rbx_dom_weak::types::Variant::String(displayname_string))
)=(
creator.properties.get("Value"),
displayname.properties.get("Value")
){
return Ok((model_instance.name.clone(),creator_string.clone(),displayname_string.clone(),displayname.referent()));
}
}
}
Err(anyhow::Error::msg("no stuff in map"))
}
enum Scan{
Passed,
Blocked,
Flagged,
}
fn scan() -> AResult<()>{
let mut id = get_id()?;
//Construct allowed scripts
let allowed_set = get_allowed_set()?;
let mut blocked = get_blocked()?;
for entry in std::fs::read_dir("maps/unprocessed")? {
let file_thing=entry?;
let mut input = std::io::BufReader::new(std::fs::File::open(file_thing.path())?);
let dom = load_dom(&mut input)?;
let script_refs = get_script_refs(&dom);
//check scribb
let mut fail_count=0;
let mut fail_type=Scan::Passed;
for &script_ref in script_refs.iter() {
if let Some(script)=dom.get_by_ref(script_ref){
if let Some(rbx_dom_weak::types::Variant::String(s)) = script.properties.get("Source") {
//flag keywords and instantly fail
if check_source_illegal_keywords(s){
println!("{:?} - flagged.",file_thing.file_name());
fail_type=Scan::Flagged;
break;
}
if allowed_set.contains(s) {
continue;
}else{
fail_type=Scan::Blocked;//no need to check for Flagged, it breaks the loop.
fail_count+=1;
if !blocked.contains(s) {
blocked.insert(s.clone());//all fixed! just clone!
std::fs::write(format!("scripts/blocked/{}.lua",id),s)?;
id+=1;
}
}
}else{
panic!("FATAL: failed to get source for {:?}",file_thing.file_name());
}
}else{
panic!("FATAL: failed to get_by_ref {:?}",script_ref);
}
}
let mut dest=match fail_type {
Scan::Passed => PathBuf::from("maps/processed"),
Scan::Blocked => {
println!("{:?} - {} {} not allowed.",file_thing.file_name(),fail_count,if fail_count==1 {"script"}else{"scripts"});
PathBuf::from("maps/blocked")
}
Scan::Flagged => PathBuf::from("maps/flagged")
};
dest.push(file_thing.file_name());
std::fs::rename(file_thing.path(), dest)?;
}
std::fs::write("id",id.to_string())?;
Ok(())
}
fn extract_scripts(paths: Vec<PathBuf>) -> AResult<()>{
let mut id = 0;
//Construct allowed scripts
let mut script_set = std::collections::HashSet::<String>::new();
for path in paths {
let file_name=path.file_name();
let mut input = std::io::BufReader::new(std::fs::File::open(&path)?);
let dom = load_dom(&mut input)?;
let script_refs = get_script_refs(&dom);
//extract scribb
for &script_ref in script_refs.iter() {
if let Some(script)=dom.get_by_ref(script_ref){
if let Some(rbx_dom_weak::types::Variant::String(s)) = script.properties.get("Source") {
if script_set.contains(s) {
continue;
}else{
script_set.insert(s.clone());
std::fs::write(format!("scripts/extracted/{:?}_{}_{}.lua",file_name,id,script.name),s)?;
id+=1;
}
}else{
panic!("FATAL: failed to get source for {:?}",file_name);
}
}else{
panic!("FATAL: failed to get_by_ref {:?}",script_ref);
}
}
}
println!("extracted {} {}",id,if id==1 {"script"}else{"scripts"});
Ok(())
}
fn replace() -> AResult<()>{
let allowed_map=get_allowed_map()?;
let replace_map=get_replace_map()?;
for entry in std::fs::read_dir("maps/blocked")? {
let file_thing=entry?;
let mut input = std::io::BufReader::new(std::fs::File::open(file_thing.path())?);
let mut dom = load_dom(&mut input)?;
let script_refs = get_script_refs(&dom);
//check scribb
let mut any_failed=false;
for &script_ref in script_refs.iter() {
if let Some(script)=dom.get_by_ref(script_ref){
if let Some(rbx_dom_weak::types::Variant::String(source)) = script.properties.get("Source") {
if let (Some(replace_id),Some(replace_script))=(replace_map.get(source),dom.get_by_ref_mut(script.referent())) {
println!("replace {}",replace_id);
//replace the source
if let Some(replace_source)=allowed_map.get(replace_id){
replace_script.properties.insert("Source".to_string(), rbx_dom_weak::types::Variant::String(replace_source.clone()));
}else{
println!("failed to get replacement source {}",replace_id);
any_failed=true;
}
}else{
println!("failed to failed to get replace_id and replace_script");
any_failed=true;
}
}else{
panic!("FATAL: failed to get source for {:?}",file_thing.file_name());
}
}else{
panic!("FATAL: failed to get_by_ref {:?}",script_ref);
}
}
if any_failed {
println!("One or more scripts failed to replace.");
}else{
let mut dest=PathBuf::from("maps/unprocessed");
dest.push(file_thing.file_name());
let output = std::io::BufWriter::new(std::fs::File::open(dest)?);
//write workspace:GetChildren()[1]
let workspace_children=dom.root().children();
if workspace_children.len()!=1{
return Err(anyhow::Error::msg("there can only be one model"));
}
rbx_binary::to_writer(output, &dom, &[workspace_children[0]])?;
}
}
Ok(())
}
enum UploadAction {
Upload(u64),
Skip,
New,
Delete,
}
struct ParseUploadActionErr;
impl std::str::FromStr for UploadAction {
type Err=ParseUploadActionErr;
fn from_str(s: &str) -> Result<Self, Self::Err>{
if s=="skip\n"{
Ok(Self::Skip)
}else if s=="new\n"{
Ok(Self::New)
}else if s=="delete\n"{
Ok(Self::Delete)
}else if let Ok(asset_id)=s[..s.len()-1].parse::<u64>(){
Ok(Self::Upload(asset_id))
}else{
Err(ParseUploadActionErr)
}
}
}
fn upload() -> AResult<()>{
//interactive prompt per upload:
for entry in std::fs::read_dir("maps/passed")? {
let file_thing=entry?;
if !file_thing.file_type()?.is_file(){
println!("skipping non-file: {:?}",file_thing.file_name());
continue;
}
println!("map file: {:?}",file_thing.file_name());
let mut input = std::io::BufReader::new(std::fs::File::open(file_thing.path())?);
let dom = load_dom(&mut input)?;
let (modelname,creator,displayname,_) = get_mapinfo(&dom)?;
//Creator: [auto fill creator]
//DisplayName: [auto fill DisplayName]
//id: ["New" for blank because of my double enter key]
print!("Model name: {}\nCreator: {}\nDisplayName: {}\n",modelname,creator,displayname);
let upload_action;
loop{
print!("Action or Upload Asset Id: ");
std::io::Write::flush(&mut std::io::stdout())?;
let mut upload_action_string = String::new();
std::io::stdin().read_line(&mut upload_action_string)?;
if let Ok(parsed_upload_action)=upload_action_string.parse::<UploadAction>(){
upload_action=parsed_upload_action;
break;
}
}
match upload_action {
UploadAction::Upload(asset_id) => {
let output=std::process::Command::new("asset-tool")
.args([
"upload-asset",
"--cookie-envvar","RBXCOOKIE",
"--group-id","6980477"
])
.arg("--asset-id").arg(asset_id.to_string())
.arg("--input-file").arg(file_thing.path().into_os_string().into_string().unwrap())
.output()?;
match output.status.code() {
Some(0)=>{
//move file
let mut dest=PathBuf::from("maps/uploaded");
dest.push(file_thing.file_name());
std::fs::rename(file_thing.path(), dest)?;
}
other=>{
println!("upload failed! code={:?}\noutput={}\nerr={}",other,String::from_utf8_lossy(&output.stdout),String::from_utf8_lossy(&output.stderr));
},
}
}
UploadAction::Skip => continue,
UploadAction::New => {
let output=std::process::Command::new("asset-tool")
.args([
"create-asset",
"--cookie-envvar","RBXCOOKIE",
"--group-id","6980477"
])
.arg("--model-name").arg(modelname.as_str())
.arg("--input-file").arg(file_thing.path().into_os_string().into_string().unwrap())
.output()?;
match output.status.code() {
Some(0)=>{
//print output
println!("{}", std::str::from_utf8(output.stdout.as_slice())?);
//move file
let mut dest=PathBuf::from("maps/uploaded");
dest.push(file_thing.file_name());
std::fs::rename(file_thing.path(), dest)?;
},
other=>{
println!("upload failed! code={:?}\noutput={}\nerr={}",other,String::from_utf8_lossy(&output.stdout),String::from_utf8_lossy(&output.stderr));
},
}
}
UploadAction::Delete => std::fs::remove_file(file_thing.path())?,
}
}
Ok(())
}
enum Interactive{
Passed,
Blocked,
Flagged,
}
enum ScriptAction {
Pass,
Replace(u32),
Flag,
Block,
Delete,
}
enum ScriptActionParseResult {
Pass,
Block,
Exit,
Delete,
}
struct ParseScriptActionErr;
impl std::str::FromStr for ScriptActionParseResult {
type Err=ParseScriptActionErr;
fn from_str(s: &str) -> Result<Self, Self::Err>{
if s=="pass\n"||s=="1\n"{
Ok(Self::Pass)
}else if s=="block\n"{
Ok(Self::Block)
}else if s=="exit\n"{
Ok(Self::Exit)
}else if s=="delete\n"{
Ok(Self::Delete)
}else{
Err(ParseScriptActionErr)
}
}
}
fn is_first_letter_lowercase(s:&str)->bool{
s.chars().next().map(char::is_lowercase).unwrap_or(false)
}
fn is_title_case(display_name:&str)->bool{
display_name.len()!=0
&&!is_first_letter_lowercase(display_name)
&&{
let display_name_pattern=lazy_regex::regex!(r"\b\S+");
display_name_pattern.find_iter(display_name)
.all(|capture|match capture.as_str(){
"a"=>true,
"an"=>true,
"and"=>true,
"the"=>true,
"of"=>true,
other=>!is_first_letter_lowercase(other),
})
}
}
fn interactive() -> AResult<()>{
let mut id=get_id()?;
//Construct allowed scripts
let mut allowed_set=get_allowed_set()?;
let mut allowed_map=get_allowed_map()?;
let mut replace_map=get_replace_map()?;
let mut blocked = get_blocked()?;
let model_name_pattern=lazy_regex::regex!(r"^[a-z0-9_]+$");
'map_loop: for entry in std::fs::read_dir("maps/unprocessed")? {
let file_thing=entry?;
println!("processing map={:?}",file_thing.file_name());
let mut input = std::io::BufReader::new(std::fs::File::open(file_thing.path())?);
let mut dom = load_dom(&mut input)?;
let (modelname,creator,displayname,displayname_ref)=get_mapinfo(&dom)?;
let mut script_count=0;
let mut replace_count=0;
let mut block_count=0;
//if model name is illegal prompt for new name
print!("Model name: {}\nCreator: {}\nDisplayName: {}\n",modelname,creator,displayname);
if !model_name_pattern.is_match(modelname.as_str()){
//illegal
let new_model_name;
loop{
print!("Enter new model name: ");
std::io::Write::flush(&mut std::io::stdout())?;
let mut input_string=String::new();
std::io::stdin().read_line(&mut input_string)?;
let input_final=match input_string.trim(){
""=>modelname.as_str(),
other=>other,
};
if model_name_pattern.is_match(input_final)
||{
//If you entered a new model name and it still doesn't like it, allow override
println!("Final model name: {}",input_final);
print!("Are you sure you want this model name? [y/N]:");
std::io::Write::flush(&mut std::io::stdout())?;
let mut input_string=String::new();
std::io::stdin().read_line(&mut input_string)?;
match input_string.trim(){
"y"=>true,
_=>false,
}
}{
new_model_name=input_final.to_owned();
break;
}
}
let model_instance=dom.get_by_ref_mut(dom.root().children()[0]).unwrap();
model_instance.name=new_model_name;
//mark file as edited so a new file is generated
replace_count+=1;
}
if !is_title_case(displayname.as_str()){
//illegal
let new_display_name;
loop{
print!("Enter new display name: ");
std::io::Write::flush(&mut std::io::stdout())?;
let mut input_string=String::new();
std::io::stdin().read_line(&mut input_string)?;
let input_final=match input_string.trim(){
""=>displayname.as_str(),
other=>other,
};
if is_title_case(input_string.trim())
||{
//If you entered a new display name and it still doesn't like it, allow override
println!("Final display name: {}",input_final);
print!("Are you sure you want this display name? [y/N]:");
std::io::Write::flush(&mut std::io::stdout())?;
let mut input_string=String::new();
std::io::stdin().read_line(&mut input_string)?;
match input_string.trim(){
"y"=>true,
_=>false,
}
}{
new_display_name=input_final.to_owned();
break;
}
}
let displayname_instance=dom.get_by_ref_mut(displayname_ref).unwrap();
assert!(displayname_instance.properties.insert("Value".to_owned(),new_display_name.into()).is_some(),"StringValue we have a problem");
//mark file as edited so a new file is generated
replace_count+=1;
}
let script_refs = get_script_refs(&dom)
//grab the full path to the object in case it's deleted by another operation
.into_iter()
.filter_map(|referent|
dom.get_by_ref(referent)
.map(|script|
(referent,get_full_name(&dom,script))
)
).collect::<Vec<_>>();
//check scribb
let mut fail_type=Interactive::Passed;
for (script_ref,script_full_name) in script_refs{
if let Some(script)=dom.get_by_ref(script_ref){
if let Some(rbx_dom_weak::types::Variant::String(source)) = script.properties.get("Source") {
script_count+=1;
let source_action=if check_source_illegal_keywords(source) {
ScriptAction::Flag//script triggers flagging -> Flag
} else if blocked.contains(source) {
ScriptAction::Block//script is blocked -> Block
} else if allowed_set.contains(source) {
ScriptAction::Pass//script is allowed -> Pass
}else if let Some(replace_id)=replace_map.get(source) {
ScriptAction::Replace(*replace_id)
}else{
//interactive logic goes here
print!("unresolved source location={}\n",get_full_name(&dom, script));
//load source into current.lua
std::fs::write("current.lua",source)?;
//prompt action in terminal
//wait for input
let script_action;
loop{
print!("action: ");
std::io::Write::flush(&mut std::io::stdout())?;
let mut action_string = String::new();
std::io::stdin().read_line(&mut action_string)?;
if let Ok(parsed_script_action)=action_string.parse::<ScriptActionParseResult>(){
script_action=parsed_script_action;
break;
}
}
//update allowed/replace/blocked
match script_action{
ScriptActionParseResult::Pass => {
//if current.lua was updated, create an allowed and replace file and set script_action to replace(new_id)
let modified_source=std::fs::read_to_string("current.lua")?;
if &modified_source==source{
//it's always new.
//insert allowed_set
allowed_set.insert(modified_source.clone());
//insert allowed_map
allowed_map.insert(id,modified_source.clone());
//write allowed/id.lua
std::fs::write(format!("scripts/allowed/{}.lua",id),modified_source)?;
id+=1;
ScriptAction::Pass
}else{
//insert allowed_set
allowed_set.insert(modified_source.clone());
//insert allowed_map
allowed_map.insert(id,modified_source.clone());
//insert replace_map
replace_map.insert(source.clone(),id);//this cannot be reached if it already exists
//write allowed/id.lua
std::fs::write(format!("scripts/allowed/{}.lua",id),modified_source)?;
//write replace/id.lua
std::fs::write(format!("scripts/replace/{}.lua",id),source)?;
let ret=ScriptAction::Replace(id);
id+=1;
ret
}
},
ScriptActionParseResult::Block => {
blocked.insert(source.clone());
std::fs::write(format!("scripts/blocked/{}.lua",id),source)?;
id+=1;
ScriptAction::Block
},
ScriptActionParseResult::Exit => break 'map_loop,
ScriptActionParseResult::Delete => ScriptAction::Delete,
}
};
let location=get_full_name(&dom, script);
match source_action{
ScriptAction::Pass => println!("passed source location={}",location),
ScriptAction::Replace(replace_id)=>{
//replace the source
if let (Some(replace_source),Some(replace_script))=(allowed_map.get(&replace_id),dom.get_by_ref_mut(script.referent())){
replace_count+=1;
println!("replaced source id={} location={}",replace_id,location);
replace_script.properties.insert("Source".to_string(), rbx_dom_weak::types::Variant::String(replace_source.clone()));
}else{
panic!("failed to get replacement source id={} location={}",replace_id,location);
}
},
ScriptAction::Delete => {
println!("deleted source location={}",location);
replace_count+=1;//trigger a new file generation
dom.destroy(script.referent());
},
ScriptAction::Flag => {
println!("flagged source location={}",location);
fail_type=Interactive::Flagged;
},
ScriptAction::Block => {
block_count+=1;
println!("blocked source location={}",location);
match fail_type{
Interactive::Passed => fail_type=Interactive::Blocked,
_=>(),
}
},
}
}else{
panic!("FATAL: failed to get source for {:?}",file_thing.file_name());
}
}else{
println!("WARNING: script was deleted: {}",script_full_name);
}
}
let mut dest=match fail_type{
Interactive::Passed => {
println!("map={:?} passed with {} {}",file_thing.file_name(),script_count,if script_count==1 {"script"}else{"scripts"});
if replace_count==0{
PathBuf::from("maps/passed")
}else{
//create new file
println!("{} {} replaced - generating new file...",replace_count,if replace_count==1 {"script was"}else{"scripts were"});
let mut dest=PathBuf::from("maps/passed");
dest.push(file_thing.file_name());
let output = std::io::BufWriter::new(std::fs::File::create(dest)?);
//write workspace:GetChildren()[1]
let workspace_children=dom.root().children();
if workspace_children.len()!=1{
return Err(anyhow::Error::msg("there can only be one model"));
}
rbx_binary::to_writer(output, &dom, &[workspace_children[0]])?;
//move original to processed folder
PathBuf::from("maps/unaltered")
}
},//write map into maps/processed
Interactive::Blocked => {
println!("map={:?} blocked with {}/{} {} blocked",file_thing.file_name(),block_count,script_count,if script_count==1 {"script"}else{"scripts"});
PathBuf::from("maps/blocked")
},//write map into maps/blocked
Interactive::Flagged => {
println!("map={:?} flagged",file_thing.file_name());
PathBuf::from("maps/flagged")
},//write map into maps/flagged
};
dest.push(file_thing.file_name());
std::fs::rename(file_thing.path(), dest)?;
}
std::fs::write("id",id.to_string())?;
Ok(())
}
fn recursive_collect_regex(objects: &mut std::vec::Vec<rbx_dom_weak::types::Ref>,dom: &rbx_dom_weak::WeakDom, instance: &rbx_dom_weak::Instance, regex: &lazy_regex::Lazy<lazy_regex::Regex>){
for &referent in instance.children() {
if let Some(c) = dom.get_by_ref(referent) {
if regex.captures(c.name.as_str()).is_some(){
objects.push(c.referent());//copy ref
}
recursive_collect_regex(objects,dom,c,regex);
}
}
}
fn get_button_refs(dom:&rbx_dom_weak::WeakDom) -> Vec<rbx_dom_weak::types::Ref>{
let mut buttons = std::vec::Vec::new();
recursive_collect_regex(&mut buttons, dom, dom.root(),lazy_regex::regex!(r"Button(\d+)$"));
buttons
}
fn write_attributes() -> AResult<()>{
for entry in std::fs::read_dir("maps/unprocessed")? {
let file_thing=entry?;
println!("processing map={:?}",file_thing.file_name());
let mut input = std::io::BufReader::new(std::fs::File::open(file_thing.path())?);
let mut dom = load_dom(&mut input)?;
let button_refs = get_button_refs(&dom);
for &button_ref in &button_refs {
if let Some(button)=dom.get_by_ref_mut(button_ref){
match button.properties.get_mut("Attributes"){
Some(rbx_dom_weak::types::Variant::Attributes(attributes))=>{
println!("Appending Ref={} to existing attributes for {}",button_ref,button.name);
attributes.insert("Ref".to_string(),rbx_dom_weak::types::Variant::String(button_ref.to_string()));
},
None=>{
println!("Creating new attributes with Ref={} for {}",button_ref,button.name);
let mut attributes=rbx_dom_weak::types::Attributes::new();
attributes.insert("Ref".to_string(),rbx_dom_weak::types::Variant::String(button_ref.to_string()));
button.properties.insert("Attributes".to_string(),rbx_dom_weak::types::Variant::Attributes(attributes));
}
_=>unreachable!("Fetching attributes did not return attributes."),
}
}
}
let mut dest={
let mut dest=PathBuf::from("maps/attributes");
dest.push(file_thing.file_name());
let output = std::io::BufWriter::new(std::fs::File::create(dest)?);
//write workspace:GetChildren()[1]
let workspace_children=dom.root().children();
if workspace_children.len()!=1{
return Err(anyhow::Error::msg("there can only be one model"));
}
rbx_binary::to_writer(output, &dom, &[workspace_children[0]])?;
//move original to processed folder
PathBuf::from("maps/unaltered")
};
dest.push(file_thing.file_name());
std::fs::rename(file_thing.path(), dest)?;
}
Ok(())
}

@ -1,431 +0,0 @@
use std::path::{Path,PathBuf};
use std::io::{Cursor,Read,Seek};
use std::collections::HashSet;
use clap::{Args,Subcommand};
use anyhow::Result as AResult;
use rbx_dom_weak::Instance;
use strafesnet_deferred_loader::deferred_loader::LoadFailureMode;
use rbxassetid::RobloxAssetId;
use tokio::io::AsyncReadExt;
const DOWNLOAD_LIMIT:usize=16;
#[derive(Subcommand)]
pub enum Commands{
RobloxToSNF(RobloxToSNFSubcommand),
DownloadAssets(DownloadAssetsSubcommand),
}
#[derive(Args)]
pub struct RobloxToSNFSubcommand {
#[arg(long)]
output_folder:PathBuf,
#[arg(required=true)]
input_files:Vec<PathBuf>,
}
#[derive(Args)]
pub struct DownloadAssetsSubcommand{
#[arg(required=true)]
roblox_files:Vec<PathBuf>,
// #[arg(long)]
// cookie_file:Option<String>,
}
impl Commands{
pub async fn run(self)->AResult<()>{
match self{
Commands::RobloxToSNF(subcommand)=>roblox_to_snf(subcommand.input_files,subcommand.output_folder).await,
Commands::DownloadAssets(subcommand)=>download_assets(
subcommand.roblox_files,
rbx_asset::cookie::Cookie::new("".to_string()),
).await,
}
}
}
#[allow(unused)]
#[derive(Debug)]
enum LoadDomError{
IO(std::io::Error),
Binary(rbx_binary::DecodeError),
Xml(rbx_xml::DecodeError),
UnknownFormat,
}
fn load_dom<R:Read+Seek>(mut input:R)->Result<rbx_dom_weak::WeakDom,LoadDomError>{
let mut first_8=[0u8;8];
input.read_exact(&mut first_8).map_err(LoadDomError::IO)?;
input.rewind().map_err(LoadDomError::IO)?;
match &first_8{
b"<roblox!"=>rbx_binary::from_reader(input).map_err(LoadDomError::Binary),
b"<roblox "=>rbx_xml::from_reader(input,rbx_xml::DecodeOptions::default()).map_err(LoadDomError::Xml),
_=>Err(LoadDomError::UnknownFormat),
}
}
/* The ones I'm interested in:
Beam.Texture
Decal.Texture
FileMesh.MeshId
FileMesh.TextureId
MaterialVariant.ColorMap
MaterialVariant.MetalnessMap
MaterialVariant.NormalMap
MaterialVariant.RoughnessMap
MeshPart.MeshId
MeshPart.TextureID
ParticleEmitter.Texture
Sky.MoonTextureId
Sky.SkyboxBk
Sky.SkyboxDn
Sky.SkyboxFt
Sky.SkyboxLf
Sky.SkyboxRt
Sky.SkyboxUp
Sky.SunTextureId
SurfaceAppearance.ColorMap
SurfaceAppearance.MetalnessMap
SurfaceAppearance.NormalMap
SurfaceAppearance.RoughnessMap
SurfaceAppearance.TexturePack
*/
fn accumulate_content_id(content_list:&mut HashSet<RobloxAssetId>,object:&Instance,property:&str){
if let Some(rbx_dom_weak::types::Variant::Content(content))=object.properties.get(property){
let url:&str=content.as_ref();
if let Ok(asset_id)=url.parse(){
content_list.insert(asset_id);
}else{
println!("Content failed to parse into AssetID: {:?}",content);
}
}else{
println!("property={} does not exist for class={}",property,object.class.as_str());
}
}
async fn read_entire_file(path:impl AsRef<Path>)->Result<Cursor<Vec<u8>>,std::io::Error>{
let mut file=tokio::fs::File::open(path).await?;
let mut data=Vec::new();
file.read_to_end(&mut data).await?;
Ok(Cursor::new(data))
}
#[derive(Default)]
struct UniqueAssets{
meshes:HashSet<RobloxAssetId>,
unions:HashSet<RobloxAssetId>,
textures:HashSet<RobloxAssetId>,
}
impl UniqueAssets{
fn collect(&mut self,object:&Instance){
match object.class.as_str(){
"Beam"=>accumulate_content_id(&mut self.textures,object,"Texture"),
"Decal"=>accumulate_content_id(&mut self.textures,object,"Texture"),
"Texture"=>accumulate_content_id(&mut self.textures,object,"Texture"),
"FileMesh"=>accumulate_content_id(&mut self.textures,object,"TextureId"),
"MeshPart"=>{
accumulate_content_id(&mut self.textures,object,"TextureID");
accumulate_content_id(&mut self.meshes,object,"MeshId");
},
"SpecialMesh"=>accumulate_content_id(&mut self.meshes,object,"MeshId"),
"ParticleEmitter"=>accumulate_content_id(&mut self.textures,object,"Texture"),
"Sky"=>{
accumulate_content_id(&mut self.textures,object,"MoonTextureId");
accumulate_content_id(&mut self.textures,object,"SkyboxBk");
accumulate_content_id(&mut self.textures,object,"SkyboxDn");
accumulate_content_id(&mut self.textures,object,"SkyboxFt");
accumulate_content_id(&mut self.textures,object,"SkyboxLf");
accumulate_content_id(&mut self.textures,object,"SkyboxRt");
accumulate_content_id(&mut self.textures,object,"SkyboxUp");
accumulate_content_id(&mut self.textures,object,"SunTextureId");
},
"UnionOperation"=>accumulate_content_id(&mut self.unions,object,"AssetId"),
_=>(),
}
}
}
#[allow(unused)]
#[derive(Debug)]
enum UniqueAssetError{
IO(std::io::Error),
LoadDom(LoadDomError),
}
async fn unique_assets(path:&Path)->Result<UniqueAssets,UniqueAssetError>{
// read entire file
let mut assets=UniqueAssets::default();
let data=read_entire_file(path).await.map_err(UniqueAssetError::IO)?;
let dom=load_dom(data).map_err(UniqueAssetError::LoadDom)?;
for object in dom.into_raw().1.into_values(){
assets.collect(&object);
}
Ok(assets)
}
enum DownloadType{
Texture(RobloxAssetId),
Mesh(RobloxAssetId),
Union(RobloxAssetId),
}
impl DownloadType{
fn path(&self)->PathBuf{
match self{
DownloadType::Texture(asset_id)=>format!("downloaded_textures/{}",asset_id.0.to_string()).into(),
DownloadType::Mesh(asset_id)=>format!("meshes/{}",asset_id.0.to_string()).into(),
DownloadType::Union(asset_id)=>format!("unions/{}",asset_id.0.to_string()).into(),
}
}
fn asset_id(&self)->u64{
match self{
DownloadType::Texture(asset_id)=>asset_id.0,
DownloadType::Mesh(asset_id)=>asset_id.0,
DownloadType::Union(asset_id)=>asset_id.0,
}
}
}
enum DownloadResult{
Cached(PathBuf),
Data(Vec<u8>),
Failed,
}
#[derive(Default,Debug)]
struct Stats{
total_assets:u32,
cached_assets:u32,
downloaded_assets:u32,
failed_downloads:u32,
timed_out_downloads:u32,
}
async fn download_retry(stats:&mut Stats,context:&rbx_asset::cookie::CookieContext,download_instruction:DownloadType)->Result<DownloadResult,std::io::Error>{
stats.total_assets+=1;
let download_instruction=download_instruction;
// check if file exists on disk
let path=download_instruction.path();
if tokio::fs::try_exists(path.as_path()).await?{
stats.cached_assets+=1;
return Ok(DownloadResult::Cached(path));
}
let asset_id=download_instruction.asset_id();
// if not, download file
let mut retry=0;
const BACKOFF_MUL:f32=1.3956124250860895286;//exp(1/3)
let mut backoff=1000f32;
loop{
let asset_result=context.get_asset(rbx_asset::cookie::GetAssetRequest{
asset_id,
version:None,
}).await;
match asset_result{
Ok(asset_result)=>{
stats.downloaded_assets+=1;
tokio::fs::write(path,&asset_result).await?;
break Ok(DownloadResult::Data(asset_result));
},
Err(rbx_asset::cookie::GetError::Response(rbx_asset::ResponseError::StatusCodeWithUrlAndBody(scwuab)))=>{
if scwuab.status_code.as_u16()==429{
if retry==12{
println!("Giving up asset download {asset_id}");
stats.timed_out_downloads+=1;
break Ok(DownloadResult::Failed);
}
println!("Hit roblox rate limit, waiting {:.0}ms...",backoff);
tokio::time::sleep(std::time::Duration::from_millis(backoff as u64)).await;
backoff*=BACKOFF_MUL;
retry+=1;
}else{
stats.failed_downloads+=1;
println!("weird scuwab error: {scwuab:?}");
break Ok(DownloadResult::Failed);
}
},
Err(e)=>{
stats.failed_downloads+=1;
println!("sadly error: {e}");
break Ok(DownloadResult::Failed);
},
}
}
}
#[derive(Debug,thiserror::Error)]
enum ConvertTextureError{
#[error("Io error {0:?}")]
Io(#[from]std::io::Error),
#[error("Image error {0:?}")]
Image(#[from]image::ImageError),
#[error("DDS create error {0:?}")]
DDS(#[from]image_dds::CreateDdsError),
#[error("DDS write error {0:?}")]
DDSWrite(#[from]image_dds::ddsfile::Error),
}
async fn convert_texture(asset_id:RobloxAssetId,download_result:DownloadResult)->Result<(),ConvertTextureError>{
let data=match download_result{
DownloadResult::Cached(path)=>tokio::fs::read(path).await?,
DownloadResult::Data(data)=>data,
DownloadResult::Failed=>return Ok(()),
};
// image::ImageFormat::Png
// image::ImageFormat::Jpeg
let image=image::load_from_memory(&data)?.to_rgba8();
// pick format
let format=if image.width()%4!=0||image.height()%4!=0{
image_dds::ImageFormat::Rgba8UnormSrgb
}else{
image_dds::ImageFormat::BC7RgbaUnormSrgb
};
//this fails if the image dimensions are not a multiple of 4
let dds=image_dds::dds_from_image(
&image,
format,
image_dds::Quality::Slow,
image_dds::Mipmaps::GeneratedAutomatic,
)?;
let file_name=format!("textures/{}.dds",asset_id.0);
let mut file=std::fs::File::create(file_name)?;
dds.write(&mut file)?;
Ok(())
}
async fn download_assets(paths:Vec<PathBuf>,cookie:rbx_asset::cookie::Cookie)->AResult<()>{
tokio::try_join!(
tokio::fs::create_dir_all("downloaded_textures"),
tokio::fs::create_dir_all("textures"),
tokio::fs::create_dir_all("meshes"),
tokio::fs::create_dir_all("unions"),
)?;
// use mpsc
let thread_limit=std::thread::available_parallelism()?.get();
let (send_assets,mut recv_assets)=tokio::sync::mpsc::channel(DOWNLOAD_LIMIT);
let (send_texture,mut recv_texture)=tokio::sync::mpsc::channel(thread_limit);
// map decode dispatcher
// read files multithreaded
// produce UniqueAssetsResult per file
tokio::spawn(async move{
// move send so it gets dropped when all maps have been decoded
// closing the channel
let mut it=paths.into_iter();
static SEM:tokio::sync::Semaphore=tokio::sync::Semaphore::const_new(0);
SEM.add_permits(thread_limit);
while let (Ok(permit),Some(path))=(SEM.acquire().await,it.next()){
let send=send_assets.clone();
tokio::spawn(async move{
let result=unique_assets(path.as_path()).await;
_=send.send(result).await;
drop(permit);
});
}
});
// download manager
// insert into global unique assets guy
// add to download queue if the asset is globally unique and does not already exist on disk
let mut stats=Stats::default();
let context=rbx_asset::cookie::CookieContext::new(cookie);
let mut globally_unique_assets=UniqueAssets::default();
// pop a job = retry_queue.pop_front() or ingest(recv.recv().await)
// SLOW MODE:
// acquire all permits
// drop all permits
// pop one job
// if it succeeds go into fast mode
// FAST MODE:
// acquire one permit
// pop a job
let download_thread=tokio::spawn(async move{
while let Some(result)=recv_assets.recv().await{
let unique_assets=match result{
Ok(unique_assets)=>unique_assets,
Err(e)=>{
println!("error: {e:?}");
continue;
},
};
for texture_id in unique_assets.textures{
if globally_unique_assets.textures.insert(texture_id){
let data=download_retry(&mut stats,&context,DownloadType::Texture(texture_id)).await?;
send_texture.send((texture_id,data)).await?;
}
}
for mesh_id in unique_assets.meshes{
if globally_unique_assets.meshes.insert(mesh_id){
download_retry(&mut stats,&context,DownloadType::Mesh(mesh_id)).await?;
}
}
for union_id in unique_assets.unions{
if globally_unique_assets.unions.insert(union_id){
download_retry(&mut stats,&context,DownloadType::Union(union_id)).await?;
}
}
}
dbg!(stats);
Ok::<(),anyhow::Error>(())
});
static SEM:tokio::sync::Semaphore=tokio::sync::Semaphore::const_new(0);
SEM.add_permits(thread_limit);
while let (Ok(permit),Some((asset_id,download_result)))=(SEM.acquire().await,recv_texture.recv().await){
tokio::spawn(async move{
let result=convert_texture(asset_id,download_result).await;
drop(permit);
result.unwrap();
});
}
download_thread.await??;
_=SEM.acquire_many(thread_limit as u32).await.unwrap();
Ok(())
}
#[derive(Debug)]
#[allow(dead_code)]
enum ConvertError{
IO(std::io::Error),
SNFMap(strafesnet_snf::map::Error),
RobloxRead(strafesnet_rbx_loader::ReadError),
RobloxLoad(strafesnet_rbx_loader::LoadError),
}
impl std::fmt::Display for ConvertError{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
write!(f,"{self:?}")
}
}
impl std::error::Error for ConvertError{}
async fn convert_to_snf(path:&Path,output_folder:PathBuf)->AResult<()>{
let entire_file=tokio::fs::read(path).await?;
let model=strafesnet_rbx_loader::read(
std::io::Cursor::new(entire_file)
).map_err(ConvertError::RobloxRead)?;
let mut place=model.into_place();
place.run_scripts();
let map=place.to_snf(LoadFailureMode::DefaultToNone).map_err(ConvertError::RobloxLoad)?;
let mut dest=output_folder;
dest.push(path.file_stem().unwrap());
dest.set_extension("snfm");
let file=std::fs::File::create(dest).map_err(ConvertError::IO)?;
strafesnet_snf::map::write_map(file,map).map_err(ConvertError::SNFMap)?;
Ok(())
}
async fn roblox_to_snf(paths:Vec<std::path::PathBuf>,output_folder:PathBuf)->AResult<()>{
let start=std::time::Instant::now();
let thread_limit=std::thread::available_parallelism()?.get();
let mut it=paths.into_iter();
static SEM:tokio::sync::Semaphore=tokio::sync::Semaphore::const_new(0);
SEM.add_permits(thread_limit);
while let (Ok(permit),Some(path))=(SEM.acquire().await,it.next()){
let output_folder=output_folder.clone();
tokio::spawn(async move{
let result=convert_to_snf(path.as_path(),output_folder).await;
drop(permit);
match result{
Ok(())=>(),
Err(e)=>println!("Convert error: {e:?}"),
}
});
}
_=SEM.acquire_many(thread_limit as u32).await.unwrap();
println!("elapsed={:?}", start.elapsed());
Ok(())
}

@ -1,428 +0,0 @@
use std::path::{Path,PathBuf};
use std::borrow::Cow;
use clap::{Args,Subcommand};
use anyhow::Result as AResult;
use futures::StreamExt;
use strafesnet_bsp_loader::loader::BspFinder;
use strafesnet_deferred_loader::loader::Loader;
use strafesnet_deferred_loader::deferred_loader::{LoadFailureMode,MeshDeferredLoader,RenderConfigDeferredLoader};
#[derive(Subcommand)]
pub enum Commands{
SourceToSNF(SourceToSNFSubcommand),
ExtractTextures(ExtractTexturesSubcommand),
VPKContents(VPKContentsSubcommand),
BSPContents(BSPContentsSubcommand),
}
#[derive(Args)]
pub struct SourceToSNFSubcommand {
#[arg(long)]
output_folder:PathBuf,
#[arg(required=true)]
input_files:Vec<PathBuf>,
#[arg(long)]
vpks:Vec<PathBuf>,
}
#[derive(Args)]
pub struct ExtractTexturesSubcommand{
#[arg(required=true)]
bsp_files:Vec<PathBuf>,
#[arg(long)]
vpks:Vec<PathBuf>,
}
#[derive(Args)]
pub struct VPKContentsSubcommand {
#[arg(long)]
input_file:PathBuf,
}
#[derive(Args)]
pub struct BSPContentsSubcommand {
#[arg(long)]
input_file:PathBuf,
}
impl Commands{
pub async fn run(self)->AResult<()>{
match self{
Commands::SourceToSNF(subcommand)=>source_to_snf(subcommand.input_files,subcommand.output_folder,subcommand.vpks).await,
Commands::ExtractTextures(subcommand)=>extract_textures(subcommand.bsp_files,subcommand.vpks).await,
Commands::VPKContents(subcommand)=>vpk_contents(subcommand.input_file),
Commands::BSPContents(subcommand)=>bsp_contents(subcommand.input_file),
}
}
}
enum VMTContent{
VMT(String),
VTF(String),
Patch(vmt_parser::material::PatchMaterial),
Unsupported,//don't want to deal with whatever vmt variant
Unresolved,//could not locate a texture because of vmt content
}
impl VMTContent{
fn vtf(opt:Option<String>)->Self{
match opt{
Some(s)=>Self::VTF(s),
None=>Self::Unresolved,
}
}
}
fn get_some_texture(material:vmt_parser::material::Material)->VMTContent{
//just grab some texture from somewhere for now
match material{
vmt_parser::material::Material::LightMappedGeneric(mat)=>VMTContent::vtf(Some(mat.base_texture)),
vmt_parser::material::Material::VertexLitGeneric(mat)=>VMTContent::vtf(mat.base_texture.or(mat.decal_texture)),//this just dies if there is none
vmt_parser::material::Material::VertexLitGenericDx6(mat)=>VMTContent::vtf(mat.base_texture.or(mat.decal_texture)),
vmt_parser::material::Material::UnlitGeneric(mat)=>VMTContent::vtf(mat.base_texture),
vmt_parser::material::Material::UnlitTwoTexture(mat)=>VMTContent::vtf(mat.base_texture),
vmt_parser::material::Material::Water(mat)=>VMTContent::vtf(mat.base_texture),
vmt_parser::material::Material::WorldVertexTransition(mat)=>VMTContent::vtf(Some(mat.base_texture)),
vmt_parser::material::Material::EyeRefract(mat)=>VMTContent::vtf(Some(mat.cornea_texture)),
vmt_parser::material::Material::SubRect(mat)=>VMTContent::VMT(mat.material),//recursive
vmt_parser::material::Material::Sprite(mat)=>VMTContent::vtf(Some(mat.base_texture)),
vmt_parser::material::Material::SpriteCard(mat)=>VMTContent::vtf(mat.base_texture),
vmt_parser::material::Material::Cable(mat)=>VMTContent::vtf(Some(mat.base_texture)),
vmt_parser::material::Material::Refract(mat)=>VMTContent::vtf(mat.base_texture),
vmt_parser::material::Material::Modulate(mat)=>VMTContent::vtf(Some(mat.base_texture)),
vmt_parser::material::Material::DecalModulate(mat)=>VMTContent::vtf(Some(mat.base_texture)),
vmt_parser::material::Material::Sky(mat)=>VMTContent::vtf(Some(mat.base_texture)),
vmt_parser::material::Material::Replacements(_mat)=>VMTContent::Unsupported,
vmt_parser::material::Material::Patch(mat)=>VMTContent::Patch(mat),
_=>unreachable!(),
}
}
#[derive(Debug,thiserror::Error)]
enum GetVMTError{
#[error("Bsp error {0:?}")]
Bsp(#[from]vbsp::BspError),
#[error("Utf8 error {0:?}")]
Utf8(#[from]std::str::Utf8Error),
#[error("Vdf error {0:?}")]
Vdf(#[from]vmt_parser::VdfError),
#[error("Vmt not found")]
NotFound,
}
fn get_vmt(finder:BspFinder,search_name:&str)->Result<vmt_parser::material::Material,GetVMTError>{
let vmt_data=finder.find(search_name)?.ok_or(GetVMTError::NotFound)?;
//decode vmt and then write
let vmt_str=core::str::from_utf8(&vmt_data)?;
let material=vmt_parser::from_str(vmt_str)?;
//println!("vmt material={:?}",material);
Ok(material)
}
#[derive(Debug,thiserror::Error)]
enum LoadVMTError{
#[error("Bsp error {0:?}")]
Bsp(#[from]vbsp::BspError),
#[error("GetVMT error {0:?}")]
GetVMT(#[from]GetVMTError),
#[error("FromUtf8 error {0:?}")]
FromUtf8(#[from]std::string::FromUtf8Error),
#[error("Vdf error {0:?}")]
Vdf(#[from]vmt_parser::VdfError),
#[error("Vmt unsupported")]
Unsupported,
#[error("Vmt unresolved")]
Unresolved,
#[error("Vmt not found")]
NotFound,
}
fn recursive_vmt_loader<'bsp,'vpk,'a>(finder:BspFinder<'bsp,'vpk>,material:vmt_parser::material::Material)->Result<Option<Cow<'a,[u8]>>,LoadVMTError>
where
'bsp:'a,
'vpk:'a,
{
match get_some_texture(material){
VMTContent::VMT(s)=>recursive_vmt_loader(finder,get_vmt(finder,s.as_str())?),
VMTContent::VTF(s)=>{
let mut texture_file_name=PathBuf::from("materials");
texture_file_name.push(s);
texture_file_name.set_extension("vtf");
Ok(finder.find(texture_file_name.to_str().unwrap())?)
},
VMTContent::Patch(mat)=>recursive_vmt_loader(finder,
mat.resolve(|search_name|
match finder.find(search_name)?{
Some(bytes)=>Ok(String::from_utf8(bytes.into_owned())?),
None=>Err(LoadVMTError::NotFound),
}
)?
),
VMTContent::Unsupported=>Err(LoadVMTError::Unsupported),
VMTContent::Unresolved=>Err(LoadVMTError::Unresolved),
}
}
fn load_texture<'bsp,'vpk,'a>(finder:BspFinder<'bsp,'vpk>,texture_name:&str)->Result<Option<Cow<'a,[u8]>>,LoadVMTError>
where
'bsp:'a,
'vpk:'a,
{
let mut texture_file_name=PathBuf::from("materials");
//lower case
let texture_file_name_lowercase=texture_name.to_lowercase();
texture_file_name.push(texture_file_name_lowercase.clone());
//remove stem and search for both vtf and vmt files
let stem=PathBuf::from(texture_file_name.file_stem().unwrap());
texture_file_name.pop();
texture_file_name.push(stem);
if let Some(stuff)=finder.find(texture_file_name.to_str().unwrap())?{
return Ok(Some(stuff))
}
//somehow search for both files
let mut texture_file_name_vmt=texture_file_name.clone();
texture_file_name.set_extension("vtf");
texture_file_name_vmt.set_extension("vmt");
recursive_vmt_loader(finder,get_vmt(finder,texture_file_name_vmt.to_str().unwrap())?)
}
#[derive(Debug,thiserror::Error)]
enum ExtractTextureError{
#[error("Io error {0:?}")]
Io(#[from]std::io::Error),
#[error("Bsp error {0:?}")]
Bsp(#[from]vbsp::BspError),
#[error("MeshLoad error {0:?}")]
MeshLoad(#[from]strafesnet_bsp_loader::loader::MeshError),
#[error("Load VMT error {0:?}")]
LoadVMT(#[from]LoadVMTError),
}
async fn gimme_them_textures(path:&Path,vpk_list:&[vpk::VPK],send_texture:tokio::sync::mpsc::Sender<(Vec<u8>,String)>)->Result<(),ExtractTextureError>{
let bsp=vbsp::Bsp::read(tokio::fs::read(path).await?.as_ref())?;
let loader_bsp=strafesnet_bsp_loader::Bsp::new(bsp);
let bsp=loader_bsp.as_ref();
let mut texture_deferred_loader=RenderConfigDeferredLoader::new();
for texture in bsp.textures(){
texture_deferred_loader.acquire_render_config_id(Some(Cow::Borrowed(texture.name())));
}
let mut mesh_deferred_loader=MeshDeferredLoader::new();
for prop in bsp.static_props(){
mesh_deferred_loader.acquire_mesh_id(prop.model());
}
let finder=BspFinder{
bsp:&loader_bsp,
vpks:vpk_list
};
let mut mesh_loader=strafesnet_bsp_loader::loader::ModelLoader::new(finder);
// load models and collect requested textures
for model_path in mesh_deferred_loader.into_indices(){
let model:vmdl::Model=match mesh_loader.load(model_path){
Ok(model)=>model,
Err(e)=>{
println!("Model={model_path} Load model error: {e}");
continue;
},
};
for texture in model.textures(){
for search_path in &texture.search_paths{
let mut path=PathBuf::from(search_path.as_str());
path.push(texture.name.as_str());
let path=path.to_str().unwrap().to_owned();
texture_deferred_loader.acquire_render_config_id(Some(Cow::Owned(path)));
}
}
}
for texture_path in texture_deferred_loader.into_indices(){
match load_texture(finder,&texture_path){
Ok(Some(texture))=>send_texture.send(
(texture.into_owned(),texture_path.into_owned())
).await.unwrap(),
Ok(None)=>(),
Err(e)=>println!("Texture={texture_path} Load error: {e}"),
}
}
Ok(())
}
#[derive(Debug,thiserror::Error)]
enum ConvertTextureError{
#[error("Bsp error {0:?}")]
Bsp(#[from]vbsp::BspError),
#[error("Vtf error {0:?}")]
Vtf(#[from]vtf::Error),
#[error("DDS create error {0:?}")]
DDS(#[from]image_dds::CreateDdsError),
#[error("DDS write error {0:?}")]
DDSWrite(#[from]image_dds::ddsfile::Error),
#[error("Io error {0:?}")]
Io(#[from]std::io::Error),
}
async fn convert_texture(texture:Vec<u8>,write_file_name:impl AsRef<Path>)->Result<(),ConvertTextureError>{
let image=vtf::from_bytes(&texture)?.highres_image.decode(0)?.to_rgba8();
let format=if image.width()%4!=0||image.height()%4!=0{
image_dds::ImageFormat::Rgba8UnormSrgb
}else{
image_dds::ImageFormat::BC7RgbaUnormSrgb
};
//this fails if the image dimensions are not a multiple of 4
let dds = image_dds::dds_from_image(
&image,
format,
image_dds::Quality::Slow,
image_dds::Mipmaps::GeneratedAutomatic,
)?;
//write dds
let mut dest=PathBuf::from("textures");
dest.push(write_file_name);
dest.set_extension("dds");
std::fs::create_dir_all(dest.parent().unwrap())?;
let mut writer=std::io::BufWriter::new(std::fs::File::create(dest)?);
dds.write(&mut writer)?;
Ok(())
}
async fn read_vpks(vpk_paths:Vec<PathBuf>,thread_limit:usize)->Vec<vpk::VPK>{
futures::stream::iter(vpk_paths).map(|vpk_path|async{
// idk why it doesn't want to pass out the errors but this is fatal anyways
tokio::task::spawn_blocking(move||vpk::VPK::read(&vpk_path)).await.unwrap().unwrap()
})
.buffer_unordered(thread_limit)
.collect().await
}
async fn extract_textures(paths:Vec<PathBuf>,vpk_paths:Vec<PathBuf>)->AResult<()>{
tokio::try_join!(
tokio::fs::create_dir_all("extracted_textures"),
tokio::fs::create_dir_all("textures"),
tokio::fs::create_dir_all("meshes"),
)?;
let thread_limit=std::thread::available_parallelism()?.get();
// load vpk list
let vpk_list=read_vpks(vpk_paths,thread_limit).await;
// leak vpk_list for static lifetime?
let vpk_list:&[vpk::VPK]=vpk_list.leak();
let (send_texture,mut recv_texture)=tokio::sync::mpsc::channel(thread_limit);
let mut it=paths.into_iter();
let extract_thread=tokio::spawn(async move{
static SEM:tokio::sync::Semaphore=tokio::sync::Semaphore::const_new(0);
SEM.add_permits(thread_limit);
while let (Ok(permit),Some(path))=(SEM.acquire().await,it.next()){
let send=send_texture.clone();
tokio::spawn(async move{
let result=gimme_them_textures(&path,vpk_list,send).await;
drop(permit);
match result{
Ok(())=>(),
Err(e)=>println!("Map={path:?} Decode error: {e:?}"),
}
});
}
});
// convert images
static SEM:tokio::sync::Semaphore=tokio::sync::Semaphore::const_new(0);
SEM.add_permits(thread_limit);
while let (Ok(permit),Some((data,dest)))=(SEM.acquire().await,recv_texture.recv().await){
// TODO: dedup dest?
tokio::spawn(async move{
let result=convert_texture(data,dest).await;
drop(permit);
match result{
Ok(())=>(),
Err(e)=>println!("Convert error: {e:?}"),
}
});
}
extract_thread.await?;
_=SEM.acquire_many(thread_limit as u32).await?;
Ok(())
}
fn vpk_contents(vpk_path:PathBuf)->AResult<()>{
let vpk_index=vpk::VPK::read(&vpk_path)?;
for (label,entry) in vpk_index.tree.into_iter(){
println!("vpk label={} entry={:?}",label,entry);
}
Ok(())
}
fn bsp_contents(path:PathBuf)->AResult<()>{
let bsp=vbsp::Bsp::read(std::fs::read(path)?.as_ref())?;
for file_name in bsp.pack.into_zip().into_inner().unwrap().file_names(){
println!("file_name={:?}",file_name);
}
Ok(())
}
#[derive(Debug)]
#[allow(dead_code)]
enum ConvertError{
IO(std::io::Error),
SNFMap(strafesnet_snf::map::Error),
BspRead(strafesnet_bsp_loader::ReadError),
BspLoad(strafesnet_bsp_loader::LoadError),
}
impl std::fmt::Display for ConvertError{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
write!(f,"{self:?}")
}
}
impl std::error::Error for ConvertError{}
async fn convert_to_snf(path:&Path,vpk_list:&[vpk::VPK],output_folder:PathBuf)->AResult<()>{
let entire_file=tokio::fs::read(path).await?;
let bsp=strafesnet_bsp_loader::read(
std::io::Cursor::new(entire_file)
).map_err(ConvertError::BspRead)?;
let map=bsp.to_snf(LoadFailureMode::DefaultToNone,vpk_list).map_err(ConvertError::BspLoad)?;
let mut dest=output_folder;
dest.push(path.file_stem().unwrap());
dest.set_extension("snfm");
let file=std::fs::File::create(dest).map_err(ConvertError::IO)?;
strafesnet_snf::map::write_map(file,map).map_err(ConvertError::SNFMap)?;
Ok(())
}
async fn source_to_snf(paths:Vec<std::path::PathBuf>,output_folder:PathBuf,vpk_paths:Vec<PathBuf>)->AResult<()>{
let start=std::time::Instant::now();
let thread_limit=std::thread::available_parallelism()?.get();
// load vpk list
let vpk_list=read_vpks(vpk_paths,thread_limit).await;
// leak vpk_list for static lifetime?
let vpk_list:&[vpk::VPK]=vpk_list.leak();
let mut it=paths.into_iter();
static SEM:tokio::sync::Semaphore=tokio::sync::Semaphore::const_new(0);
SEM.add_permits(thread_limit);
while let (Ok(permit),Some(path))=(SEM.acquire().await,it.next()){
let output_folder=output_folder.clone();
tokio::spawn(async move{
let result=convert_to_snf(path.as_path(),vpk_list,output_folder).await;
drop(permit);
match result{
Ok(())=>(),
Err(e)=>println!("Convert error: {e:?}"),
}
});
}
_=SEM.acquire_many(thread_limit as u32).await.unwrap();
println!("elapsed={:?}", start.elapsed());
Ok(())
}