2024-03-08 17:55:17 +00:00
|
|
|
use std::{io::{Read, Seek}, path::PathBuf};
|
2023-09-13 01:16:25 +00:00
|
|
|
use clap::{Args, Parser, Subcommand};
|
2023-10-02 03:07:34 +00:00
|
|
|
use anyhow::Result as AResult;
|
2023-09-13 14:13:23 +00:00
|
|
|
|
2023-09-13 01:16:25 +00:00
|
|
|
#[derive(Parser)]
|
|
|
|
#[command(author, version, about, long_about = None)]
|
|
|
|
#[command(propagate_version = true)]
|
|
|
|
struct Cli {
|
2024-01-09 06:15:46 +00:00
|
|
|
#[arg(long)]
|
2024-03-08 17:55:17 +00:00
|
|
|
path:Option<PathBuf>,
|
2024-01-09 04:21:56 +00:00
|
|
|
#[command(subcommand)]
|
|
|
|
command: Commands,
|
2023-09-13 01:16:25 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Subcommand)]
|
|
|
|
enum Commands {
|
2024-01-09 04:21:56 +00:00
|
|
|
Download(MapList),
|
|
|
|
DownloadTextures(PathBufList),
|
2024-01-09 06:15:46 +00:00
|
|
|
ExtractTextures(PathBufList),
|
2024-01-09 04:21:56 +00:00
|
|
|
ConvertTextures,
|
2024-01-09 06:15:46 +00:00
|
|
|
VPKContents,
|
|
|
|
BSPContents,
|
2024-01-09 04:21:56 +00:00
|
|
|
DownloadMeshes(PathBufList),
|
2024-03-08 18:07:50 +00:00
|
|
|
ExtractScripts(PathBufList),
|
2024-01-09 04:21:56 +00:00
|
|
|
WriteAttributes,
|
|
|
|
Interactive,
|
|
|
|
Replace,
|
|
|
|
Scan,
|
|
|
|
UnzipAll,
|
|
|
|
Upload,
|
2023-09-13 01:16:25 +00:00
|
|
|
}
|
|
|
|
|
2023-09-13 00:52:45 +00:00
|
|
|
#[derive(Args)]
|
2023-09-22 21:22:44 +00:00
|
|
|
struct PathBufList {
|
2024-03-08 17:55:17 +00:00
|
|
|
paths:Vec<PathBuf>
|
2023-09-13 00:52:45 +00:00
|
|
|
}
|
|
|
|
|
2023-09-13 01:16:25 +00:00
|
|
|
#[derive(Args)]
|
|
|
|
struct MapList {
|
2024-01-09 04:21:56 +00:00
|
|
|
maps: Vec<u64>,
|
2023-09-13 01:16:25 +00:00
|
|
|
}
|
|
|
|
|
2024-01-09 06:15:32 +00:00
|
|
|
fn main() -> AResult<()> {
|
|
|
|
let cli = Cli::parse();
|
|
|
|
match cli.command {
|
|
|
|
Commands::Download(map_list)=>download(map_list.maps),
|
|
|
|
Commands::DownloadTextures(pathlist)=>download_textures(pathlist.paths),
|
2024-01-09 06:15:46 +00:00
|
|
|
Commands::ExtractTextures(pathlist)=>extract_textures(vec![cli.path.unwrap()],pathlist.paths),
|
|
|
|
Commands::VPKContents=>vpk_contents(cli.path.unwrap()),
|
|
|
|
Commands::BSPContents=>bsp_contents(cli.path.unwrap()),
|
2024-01-09 06:15:32 +00:00
|
|
|
Commands::ConvertTextures=>convert_textures(),
|
|
|
|
Commands::DownloadMeshes(pathlist)=>download_meshes(pathlist.paths),
|
2024-03-08 18:07:50 +00:00
|
|
|
Commands::ExtractScripts(pathlist)=>extract_scripts(pathlist.paths),
|
2024-01-09 06:15:32 +00:00
|
|
|
Commands::WriteAttributes=>write_attributes(),
|
|
|
|
Commands::Interactive=>interactive(),
|
|
|
|
Commands::Replace=>replace(),
|
|
|
|
Commands::Scan=>scan(),
|
|
|
|
Commands::UnzipAll=>unzip_all(),
|
|
|
|
Commands::Upload=>upload(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-09-05 00:16:02 +00:00
|
|
|
fn class_is_a(class: &str, superclass: &str) -> bool {
|
2024-01-09 04:21:56 +00:00
|
|
|
if class==superclass {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
let class_descriptor=rbx_reflection_database::get().classes.get(class);
|
|
|
|
if let Some(descriptor) = &class_descriptor {
|
|
|
|
if let Some(class_super) = &descriptor.superclass {
|
|
|
|
return class_is_a(&class_super, superclass)
|
|
|
|
}
|
|
|
|
}
|
2024-03-08 18:01:54 +00:00
|
|
|
false
|
2023-09-05 00:16:02 +00:00
|
|
|
}
|
2023-09-22 21:21:22 +00:00
|
|
|
fn recursive_collect_superclass(objects: &mut std::vec::Vec<rbx_dom_weak::types::Ref>,dom: &rbx_dom_weak::WeakDom, instance: &rbx_dom_weak::Instance, superclass: &str){
|
2024-01-09 04:21:56 +00:00
|
|
|
for &referent in instance.children() {
|
|
|
|
if let Some(c) = dom.get_by_ref(referent) {
|
|
|
|
if class_is_a(c.class.as_str(), superclass) {
|
|
|
|
objects.push(c.referent());//copy ref
|
|
|
|
}
|
|
|
|
recursive_collect_superclass(objects,dom,c,superclass);
|
|
|
|
}
|
|
|
|
}
|
2023-09-13 01:30:34 +00:00
|
|
|
}
|
2023-11-18 23:48:30 +00:00
|
|
|
fn recursive_collect_regex(objects: &mut std::vec::Vec<rbx_dom_weak::types::Ref>,dom: &rbx_dom_weak::WeakDom, instance: &rbx_dom_weak::Instance, regex: &lazy_regex::Lazy<lazy_regex::Regex>){
|
2024-01-09 04:21:56 +00:00
|
|
|
for &referent in instance.children() {
|
|
|
|
if let Some(c) = dom.get_by_ref(referent) {
|
|
|
|
if regex.captures(c.name.as_str()).is_some(){
|
|
|
|
objects.push(c.referent());//copy ref
|
|
|
|
}
|
|
|
|
recursive_collect_regex(objects,dom,c,regex);
|
|
|
|
}
|
|
|
|
}
|
2023-11-18 23:48:30 +00:00
|
|
|
}
|
2023-09-12 20:57:47 +00:00
|
|
|
fn get_full_name(dom:&rbx_dom_weak::WeakDom,instance:&rbx_dom_weak::Instance) -> String{
|
2024-01-09 04:21:56 +00:00
|
|
|
let mut full_name=instance.name.clone();
|
|
|
|
let mut pref=instance.parent();
|
|
|
|
while let Some(parent)=dom.get_by_ref(pref){
|
|
|
|
full_name.insert(0, '.');
|
|
|
|
full_name.insert_str(0, &parent.name);
|
|
|
|
pref=parent.parent();
|
|
|
|
}
|
|
|
|
full_name
|
2023-09-12 20:57:47 +00:00
|
|
|
}
|
|
|
|
|
2023-09-13 01:16:25 +00:00
|
|
|
//download
|
2024-01-09 04:21:56 +00:00
|
|
|
//download list of maps to maps/unprocessed
|
2023-09-13 01:16:25 +00:00
|
|
|
//scan (scripts)
|
2024-01-09 04:21:56 +00:00
|
|
|
//iter maps/unprocessed
|
|
|
|
//passing moves to maps/verified
|
|
|
|
//failing moves to maps/blocked
|
2023-09-13 01:16:25 +00:00
|
|
|
//replace (edits & deletions)
|
2024-01-09 04:21:56 +00:00
|
|
|
//iter maps/blocked
|
|
|
|
//replace scripts and put in maps/unprocessed
|
2023-09-13 01:16:25 +00:00
|
|
|
//upload
|
2024-01-09 04:21:56 +00:00
|
|
|
//iter maps/verified
|
|
|
|
//interactively print DisplayName/Creator and ask for target upload ids
|
2023-09-12 02:37:37 +00:00
|
|
|
//interactive
|
2024-01-09 04:21:56 +00:00
|
|
|
//iter maps/unprocessed
|
|
|
|
//for each unique script, load it into the file current.lua and have it open in sublime text
|
|
|
|
//I can edit the file and it will edit it in place
|
|
|
|
//I pass/fail(with comment)/allow each script
|
2023-09-13 01:16:25 +00:00
|
|
|
|
2023-09-13 01:30:34 +00:00
|
|
|
fn get_script_refs(dom:&rbx_dom_weak::WeakDom) -> Vec<rbx_dom_weak::types::Ref>{
|
2024-01-09 04:21:56 +00:00
|
|
|
let mut scripts = std::vec::Vec::new();
|
|
|
|
recursive_collect_superclass(&mut scripts, dom, dom.root(),"LuaSourceContainer");
|
|
|
|
scripts
|
2023-09-05 00:16:02 +00:00
|
|
|
}
|
2023-11-18 23:48:30 +00:00
|
|
|
fn get_button_refs(dom:&rbx_dom_weak::WeakDom) -> Vec<rbx_dom_weak::types::Ref>{
|
2024-01-09 04:21:56 +00:00
|
|
|
let mut buttons = std::vec::Vec::new();
|
|
|
|
recursive_collect_regex(&mut buttons, dom, dom.root(),lazy_regex::regex!(r"Button(\d+)$"));
|
|
|
|
buttons
|
2023-11-18 23:48:30 +00:00
|
|
|
}
|
2023-09-22 21:24:28 +00:00
|
|
|
fn get_texture_refs(dom:&rbx_dom_weak::WeakDom) -> Vec<rbx_dom_weak::types::Ref>{
|
2024-01-09 04:21:56 +00:00
|
|
|
let mut objects = std::vec::Vec::new();
|
|
|
|
recursive_collect_superclass(&mut objects, dom, dom.root(),"Decal");
|
|
|
|
//get ids
|
|
|
|
//clear vec
|
|
|
|
//next class
|
|
|
|
objects
|
2023-09-22 21:24:28 +00:00
|
|
|
}
|
2023-11-09 23:51:23 +00:00
|
|
|
fn get_mesh_refs(dom:&rbx_dom_weak::WeakDom) -> Vec<rbx_dom_weak::types::Ref>{
|
2024-01-09 04:21:56 +00:00
|
|
|
let mut objects = std::vec::Vec::new();
|
|
|
|
recursive_collect_superclass(&mut objects, dom, dom.root(),"FileMesh");
|
|
|
|
recursive_collect_superclass(&mut objects, dom, dom.root(),"MeshPart");
|
|
|
|
//get ids
|
|
|
|
//clear vec
|
|
|
|
//next class
|
|
|
|
objects
|
2023-11-09 23:51:23 +00:00
|
|
|
}
|
2023-09-05 00:16:02 +00:00
|
|
|
|
2023-10-03 01:38:26 +00:00
|
|
|
enum ReaderType<'a, R:Read+Seek>{
|
2024-01-09 04:21:56 +00:00
|
|
|
GZip(flate2::read::GzDecoder<&'a mut R>),
|
|
|
|
Raw(&'a mut R),
|
2023-10-03 01:38:26 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
fn maybe_gzip_decode<R:Read+Seek>(input:&mut R)->AResult<ReaderType<R>>{
|
2024-01-09 04:21:56 +00:00
|
|
|
let mut first_2=[0u8;2];
|
|
|
|
if let (Ok(()),Ok(()))=(std::io::Read::read_exact(input, &mut first_2),std::io::Seek::rewind(input)){
|
|
|
|
match &first_2{
|
|
|
|
b"\x1f\x8b"=>Ok(ReaderType::GZip(flate2::read::GzDecoder::new(input))),
|
|
|
|
_=>Ok(ReaderType::Raw(input)),
|
|
|
|
}
|
|
|
|
}else{
|
|
|
|
Err(anyhow::Error::msg("failed to peek"))
|
|
|
|
}
|
2023-10-03 01:38:26 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
fn load_dom<R:Read+Seek>(input:&mut R)->AResult<rbx_dom_weak::WeakDom>{
|
2024-01-09 04:21:56 +00:00
|
|
|
let mut first_8=[0u8;8];
|
|
|
|
if let (Ok(()),Ok(()))=(std::io::Read::read_exact(input, &mut first_8),std::io::Seek::rewind(input)){
|
|
|
|
match &first_8[0..4]{
|
|
|
|
b"<rob"=>{
|
|
|
|
match &first_8[4..8]{
|
2024-03-08 18:01:54 +00:00
|
|
|
b"lox!"=>rbx_binary::from_reader(input).map_err(anyhow::Error::msg),
|
|
|
|
b"lox "=>rbx_xml::from_reader(input,rbx_xml::DecodeOptions::default()).map_err(anyhow::Error::msg),
|
2024-01-09 04:21:56 +00:00
|
|
|
other=>Err(anyhow::Error::msg(format!("Unknown Roblox file type {:?}",other))),
|
|
|
|
}
|
|
|
|
},
|
|
|
|
_=>Err(anyhow::Error::msg("unsupported file type")),
|
|
|
|
}
|
|
|
|
}else{
|
|
|
|
Err(anyhow::Error::msg("peek failed"))
|
|
|
|
}
|
2023-09-30 10:13:52 +00:00
|
|
|
}
|
|
|
|
|
2023-10-03 01:38:26 +00:00
|
|
|
fn get_dom<R:Read+Seek>(input:&mut R)->AResult<rbx_dom_weak::WeakDom>{
|
2024-01-09 04:21:56 +00:00
|
|
|
match maybe_gzip_decode(input){
|
|
|
|
Ok(ReaderType::GZip(mut readable)) => {
|
|
|
|
//gzip
|
|
|
|
let mut extracted:Vec<u8>=Vec::new();
|
|
|
|
readable.read_to_end(&mut extracted)?;
|
|
|
|
Ok(load_dom(&mut std::io::Cursor::new(extracted))?)
|
|
|
|
},
|
|
|
|
Ok(ReaderType::Raw(readable)) => Ok(load_dom(readable)?),
|
|
|
|
Err(e) => Err(e)?,
|
|
|
|
}
|
2023-10-03 01:38:26 +00:00
|
|
|
}
|
|
|
|
|
2023-10-02 03:07:34 +00:00
|
|
|
fn get_id() -> AResult<u32>{
|
2024-01-09 04:21:56 +00:00
|
|
|
match std::fs::read_to_string("id"){
|
|
|
|
Ok(id_file)=>Ok(id_file.parse::<u32>()?),
|
|
|
|
Err(e) => match e.kind() {
|
|
|
|
std::io::ErrorKind::NotFound => Ok(0),//implicitly take on id=0
|
|
|
|
_ => Err(e)?,
|
|
|
|
}
|
|
|
|
}
|
2023-09-12 23:28:10 +00:00
|
|
|
}
|
|
|
|
|
2023-10-02 03:07:34 +00:00
|
|
|
fn get_set_from_file(file:&str) -> AResult<std::collections::HashSet<String>>{
|
2024-01-09 04:21:56 +00:00
|
|
|
let mut set=std::collections::HashSet::<String>::new();
|
|
|
|
for entry in std::fs::read_dir(file)? {
|
|
|
|
set.insert(std::fs::read_to_string(entry?.path())?);
|
|
|
|
}
|
|
|
|
Ok(set)
|
2023-09-12 23:28:10 +00:00
|
|
|
}
|
|
|
|
|
2023-10-02 03:07:34 +00:00
|
|
|
fn get_allowed_set() -> AResult<std::collections::HashSet<String>>{
|
2024-01-09 04:21:56 +00:00
|
|
|
get_set_from_file("scripts/allowed")
|
2023-09-12 23:28:10 +00:00
|
|
|
}
|
|
|
|
|
2023-10-02 03:07:34 +00:00
|
|
|
fn get_blocked() -> AResult<std::collections::HashSet<String>>{
|
2024-01-09 04:21:56 +00:00
|
|
|
get_set_from_file("scripts/blocked")
|
2023-09-12 23:28:10 +00:00
|
|
|
}
|
|
|
|
|
2023-10-02 03:07:34 +00:00
|
|
|
fn get_allowed_map() -> AResult<std::collections::HashMap::<u32,String>>{
|
2024-01-09 04:21:56 +00:00
|
|
|
let mut allowed_map = std::collections::HashMap::<u32,String>::new();
|
|
|
|
for entry in std::fs::read_dir("scripts/allowed")? {
|
|
|
|
let entry=entry?;
|
|
|
|
allowed_map.insert(entry.path().file_stem().unwrap().to_str().unwrap().parse::<u32>()?,std::fs::read_to_string(entry.path())?);
|
|
|
|
}
|
|
|
|
Ok(allowed_map)
|
2023-09-12 23:28:10 +00:00
|
|
|
}
|
|
|
|
|
2023-10-02 03:07:34 +00:00
|
|
|
fn get_replace_map() -> AResult<std::collections::HashMap::<String,u32>>{
|
2024-01-09 04:21:56 +00:00
|
|
|
let mut replace = std::collections::HashMap::<String,u32>::new();
|
|
|
|
for entry in std::fs::read_dir("scripts/replace")? {
|
|
|
|
let entry=entry?;
|
|
|
|
replace.insert(std::fs::read_to_string(entry.path())?,entry.path().file_stem().unwrap().to_str().unwrap().parse::<u32>()?);
|
|
|
|
}
|
|
|
|
Ok(replace)
|
2023-09-12 23:28:10 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
fn check_source_illegal_keywords(source:&String)->bool{
|
2024-01-09 04:21:56 +00:00
|
|
|
source.find("getfenv").is_some()||source.find("require").is_some()
|
2023-09-12 23:28:10 +00:00
|
|
|
}
|
|
|
|
|
2023-09-14 20:54:08 +00:00
|
|
|
fn find_first_child_class<'a>(dom:&'a rbx_dom_weak::WeakDom,instance:&'a rbx_dom_weak::Instance,name:&'a str,class:&'a str) -> Option<&'a rbx_dom_weak::Instance> {
|
2024-01-09 04:21:56 +00:00
|
|
|
for &referent in instance.children() {
|
|
|
|
if let Some(c) = dom.get_by_ref(referent) {
|
|
|
|
if c.name==name&&class_is_a(c.class.as_str(),class) {
|
|
|
|
return Some(c);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
None
|
2023-09-14 20:54:08 +00:00
|
|
|
}
|
|
|
|
|
2023-10-02 03:07:34 +00:00
|
|
|
fn get_mapinfo(dom:&rbx_dom_weak::WeakDom) -> AResult<(String,String,String)>{
|
2024-01-09 04:21:56 +00:00
|
|
|
let workspace_children=dom.root().children();
|
|
|
|
if workspace_children.len()!=1{
|
|
|
|
return Err(anyhow::Error::msg("there can only be one model"));
|
|
|
|
}
|
|
|
|
if let Some(model_instance) = dom.get_by_ref(workspace_children[0]) {
|
|
|
|
if let (Some(creator),Some(displayname))=(find_first_child_class(dom, model_instance, "Creator", "StringValue"),find_first_child_class(dom, model_instance, "DisplayName", "StringValue")){
|
|
|
|
if let (
|
|
|
|
Some(rbx_dom_weak::types::Variant::String(creator_string)),
|
|
|
|
Some(rbx_dom_weak::types::Variant::String(displayname_string))
|
|
|
|
)=(
|
|
|
|
creator.properties.get("Value"),
|
|
|
|
displayname.properties.get("Value")
|
|
|
|
){
|
|
|
|
return Ok((model_instance.name.clone(),creator_string.clone(),displayname_string.clone()));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return Err(anyhow::Error::msg("no stuff in map"));
|
2023-09-14 20:54:08 +00:00
|
|
|
}
|
|
|
|
|
2023-10-02 03:07:34 +00:00
|
|
|
fn download(map_list: Vec<u64>) -> AResult<()>{
|
2024-01-09 04:21:56 +00:00
|
|
|
let header=format!("Cookie: .ROBLOSECURITY={}",std::env::var("RBXCOOKIE")?);
|
|
|
|
let shared_args=&[
|
|
|
|
"-q",
|
|
|
|
"--header",
|
|
|
|
header.as_str(),
|
|
|
|
"-O",
|
|
|
|
];
|
|
|
|
let processes_result:Result<Vec<_>, _>=map_list.iter().map(|map_id|{
|
|
|
|
std::process::Command::new("wget")
|
|
|
|
.args(shared_args)
|
|
|
|
.arg(format!("maps/unprocessed/{}.rbxm",map_id))
|
|
|
|
.arg(format!("https://assetdelivery.roblox.com/v1/asset/?ID={}",map_id))
|
|
|
|
.spawn()
|
|
|
|
}).collect();
|
|
|
|
//naively wait for all because idk how to make an async progress bar lmao
|
|
|
|
for child in processes_result?{
|
|
|
|
let output=child.wait_with_output()?;
|
|
|
|
println!("map exit_success:{}",output.status.success());
|
|
|
|
}
|
|
|
|
Ok(())
|
2023-09-13 01:16:25 +00:00
|
|
|
}
|
2023-09-12 02:18:55 +00:00
|
|
|
|
2023-09-22 21:24:28 +00:00
|
|
|
struct RobloxAssetId(u64);
|
|
|
|
struct RobloxAssetIdParseErr;
|
|
|
|
impl std::str::FromStr for RobloxAssetId {
|
2024-01-09 04:21:56 +00:00
|
|
|
type Err=RobloxAssetIdParseErr;
|
|
|
|
fn from_str(s: &str) -> Result<Self, Self::Err>{
|
|
|
|
let regman=lazy_regex::regex!(r"(\d+)$");
|
|
|
|
if let Some(captures) = regman.captures(s) {
|
|
|
|
if captures.len()==2{//captures[0] is all captures concatenated, and then each individual capture
|
|
|
|
if let Ok(id) = captures[0].parse::<u64>() {
|
|
|
|
return Ok(Self(id));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Err(RobloxAssetIdParseErr)
|
|
|
|
}
|
2023-09-22 21:24:28 +00:00
|
|
|
}
|
|
|
|
/* The ones I'm interested in:
|
|
|
|
Beam.Texture
|
|
|
|
Decal.Texture
|
|
|
|
FileMesh.MeshId
|
|
|
|
FileMesh.TextureId
|
|
|
|
MaterialVariant.ColorMap
|
|
|
|
MaterialVariant.MetalnessMap
|
|
|
|
MaterialVariant.NormalMap
|
|
|
|
MaterialVariant.RoughnessMap
|
|
|
|
MeshPart.MeshId
|
|
|
|
MeshPart.TextureID
|
|
|
|
ParticleEmitter.Texture
|
|
|
|
Sky.MoonTextureId
|
|
|
|
Sky.SkyboxBk
|
|
|
|
Sky.SkyboxDn
|
|
|
|
Sky.SkyboxFt
|
|
|
|
Sky.SkyboxLf
|
|
|
|
Sky.SkyboxRt
|
|
|
|
Sky.SkyboxUp
|
|
|
|
Sky.SunTextureId
|
|
|
|
SurfaceAppearance.ColorMap
|
|
|
|
SurfaceAppearance.MetalnessMap
|
|
|
|
SurfaceAppearance.NormalMap
|
|
|
|
SurfaceAppearance.RoughnessMap
|
|
|
|
SurfaceAppearance.TexturePack
|
|
|
|
*/
|
2024-03-08 17:55:17 +00:00
|
|
|
fn download_textures(paths: Vec<PathBuf>) -> AResult<()>{
|
2024-01-09 04:21:56 +00:00
|
|
|
println!("download_textures paths:{:?}",paths);
|
|
|
|
let header=format!("Cookie: .ROBLOSECURITY={}",std::env::var("RBXCOOKIE")?);
|
|
|
|
let shared_args=&[
|
|
|
|
"-q",
|
|
|
|
"--header",
|
|
|
|
header.as_str(),
|
|
|
|
"-O",
|
|
|
|
];
|
|
|
|
let mut texture_list=std::collections::HashSet::new();
|
|
|
|
for path in paths {
|
|
|
|
let mut input = std::io::BufReader::new(std::fs::File::open(path.clone())?);
|
2023-09-22 21:24:28 +00:00
|
|
|
|
2024-01-09 04:21:56 +00:00
|
|
|
match get_dom(&mut input){
|
|
|
|
Ok(dom)=>{
|
|
|
|
let object_refs = get_texture_refs(&dom);
|
|
|
|
for &object_ref in object_refs.iter() {
|
|
|
|
if let Some(object)=dom.get_by_ref(object_ref){
|
|
|
|
if let Some(rbx_dom_weak::types::Variant::Content(content)) = object.properties.get("Texture") {
|
|
|
|
println!("Texture content:{:?}",content);
|
|
|
|
if let Ok(asset_id)=content.clone().into_string().parse::<RobloxAssetId>(){
|
|
|
|
texture_list.insert(asset_id.0);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
},
|
|
|
|
Err(e)=>println!("error loading map {:?}: {:?}",path.file_name(),e),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
println!("Texture list:{:?}",texture_list);
|
|
|
|
let processes_result:Result<Vec<_>, _>=texture_list.iter().map(|asset_id|{
|
|
|
|
std::process::Command::new("wget")
|
|
|
|
.args(shared_args)
|
|
|
|
.arg(format!("textures/unprocessed/{}",asset_id))
|
|
|
|
.arg(format!("https://assetdelivery.roblox.com/v1/asset/?ID={}",asset_id))
|
|
|
|
.spawn()
|
|
|
|
}).collect();
|
|
|
|
//naively wait for all because idk how to make an async progress bar lmao
|
|
|
|
for child in processes_result?{
|
|
|
|
let output=child.wait_with_output()?;
|
|
|
|
println!("texture exit_success:{}",output.status.success());
|
|
|
|
}
|
|
|
|
Ok(())
|
2023-09-22 21:24:28 +00:00
|
|
|
}
|
2024-03-08 17:55:17 +00:00
|
|
|
fn download_meshes(paths: Vec<PathBuf>) -> AResult<()>{
|
2024-01-09 04:21:56 +00:00
|
|
|
println!("download_meshes paths:{:?}",paths);
|
|
|
|
let header=format!("Cookie: .ROBLOSECURITY={}",std::env::var("RBXCOOKIE")?);
|
|
|
|
let shared_args=&[
|
|
|
|
"-q",
|
|
|
|
"--header",
|
|
|
|
header.as_str(),
|
|
|
|
"-O",
|
|
|
|
];
|
|
|
|
let mut mesh_list=std::collections::HashSet::new();
|
|
|
|
for path in paths {
|
|
|
|
let mut input = std::io::BufReader::new(std::fs::File::open(path.clone())?);
|
2023-11-09 23:51:23 +00:00
|
|
|
|
2024-01-09 04:21:56 +00:00
|
|
|
match get_dom(&mut input){
|
|
|
|
Ok(dom)=>{
|
|
|
|
let object_refs = get_mesh_refs(&dom);
|
|
|
|
for &object_ref in object_refs.iter() {
|
|
|
|
if let Some(object)=dom.get_by_ref(object_ref){
|
|
|
|
if let Some(rbx_dom_weak::types::Variant::Content(content)) = object.properties.get("MeshId") {
|
|
|
|
println!("Mesh content:{:?}",content);
|
|
|
|
if let Ok(asset_id)=content.clone().into_string().parse::<RobloxAssetId>(){
|
|
|
|
mesh_list.insert(asset_id.0);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
},
|
|
|
|
Err(e)=>println!("error loading map {:?}: {:?}",path.file_name(),e),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
println!("Mesh list:{:?}",mesh_list);
|
|
|
|
let processes_result:Result<Vec<_>, _>=mesh_list.iter().map(|asset_id|{
|
|
|
|
std::process::Command::new("wget")
|
|
|
|
.args(shared_args)
|
|
|
|
.arg(format!("meshes/unprocessed/{}",asset_id))
|
|
|
|
.arg(format!("https://assetdelivery.roblox.com/v1/asset/?ID={}",asset_id))
|
|
|
|
.spawn()
|
|
|
|
}).collect();
|
|
|
|
//naively wait for all because idk how to make an async progress bar lmao
|
|
|
|
for child in processes_result?{
|
|
|
|
let output=child.wait_with_output()?;
|
|
|
|
println!("Mesh exit_success:{}",output.status.success());
|
|
|
|
}
|
|
|
|
Ok(())
|
2023-11-09 23:51:23 +00:00
|
|
|
}
|
2023-09-22 21:24:28 +00:00
|
|
|
|
2023-10-03 01:38:26 +00:00
|
|
|
fn load_image<R:Read+Seek+std::io::BufRead>(input:&mut R)->AResult<image::DynamicImage>{
|
2024-01-09 04:21:56 +00:00
|
|
|
let mut fourcc=[0u8;4];
|
|
|
|
input.read_exact(&mut fourcc)?;
|
|
|
|
input.rewind()?;
|
|
|
|
match &fourcc{
|
|
|
|
b"\x89PNG"=>Ok(image::load(input,image::ImageFormat::Png)?),
|
|
|
|
b"\xFF\xD8\xFF\xE0"=>Ok(image::load(input,image::ImageFormat::Jpeg)?),//JFIF
|
|
|
|
b"<rob"=>Err(anyhow::Error::msg("Roblox xml garbage is not supported yet")),
|
|
|
|
other=>Err(anyhow::Error::msg(format!("Unknown texture format {:?}",other))),
|
|
|
|
}
|
2023-10-03 01:38:26 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
fn convert(file_thing:std::fs::DirEntry) -> AResult<()>{
|
2024-01-09 04:21:56 +00:00
|
|
|
let mut input = std::io::BufReader::new(std::fs::File::open(file_thing.path())?);
|
2023-09-29 05:06:27 +00:00
|
|
|
|
2024-01-09 04:21:56 +00:00
|
|
|
let mut extracted_input=None;
|
|
|
|
let image=match maybe_gzip_decode(&mut input){
|
|
|
|
Ok(ReaderType::GZip(mut readable)) => {
|
|
|
|
//gzip
|
|
|
|
let mut extracted:Vec<u8>=Vec::new();
|
|
|
|
//read the entire thing to the end so that I can clone the data and write a png to processed images
|
|
|
|
readable.read_to_end(&mut extracted)?;
|
|
|
|
extracted_input=Some(extracted.clone());
|
|
|
|
load_image(&mut std::io::Cursor::new(extracted))
|
|
|
|
},
|
|
|
|
Ok(ReaderType::Raw(readable)) => load_image(readable),
|
|
|
|
Err(e) => Err(e)?,
|
|
|
|
}?.to_rgba8();//this sets a=255, arcane is actually supposed to look like that
|
2023-09-29 05:06:27 +00:00
|
|
|
|
2024-01-09 04:21:56 +00:00
|
|
|
let format=if image.width()%4!=0||image.height()%4!=0{
|
|
|
|
image_dds::ImageFormat::R8G8B8A8Srgb
|
|
|
|
}else{
|
|
|
|
image_dds::ImageFormat::BC7Srgb
|
|
|
|
};
|
|
|
|
//this fails if the image dimensions are not a multiple of 4
|
|
|
|
let dds = image_dds::dds_from_image(
|
|
|
|
&image,
|
|
|
|
format,
|
|
|
|
image_dds::Quality::Slow,
|
|
|
|
image_dds::Mipmaps::GeneratedAutomatic,
|
|
|
|
)?;
|
2023-09-29 05:06:27 +00:00
|
|
|
|
2024-01-09 04:21:56 +00:00
|
|
|
//write dds
|
2024-03-08 17:55:17 +00:00
|
|
|
let mut dest=PathBuf::from("textures/dds");
|
2024-01-09 04:21:56 +00:00
|
|
|
dest.push(file_thing.file_name());
|
|
|
|
dest.set_extension("dds");
|
|
|
|
let mut writer = std::io::BufWriter::new(std::fs::File::create(dest)?);
|
|
|
|
dds.write(&mut writer)?;
|
2023-09-29 05:06:27 +00:00
|
|
|
|
2024-01-09 04:21:56 +00:00
|
|
|
if let Some(mut extracted)=extracted_input{
|
|
|
|
//write extracted to processed
|
2024-03-08 17:55:17 +00:00
|
|
|
let mut dest=PathBuf::from("textures/processed");
|
2024-01-09 04:21:56 +00:00
|
|
|
dest.push(file_thing.file_name());
|
|
|
|
std::fs::write(dest, &mut extracted)?;
|
|
|
|
//delete ugly gzip file
|
|
|
|
std::fs::remove_file(file_thing.path())?;
|
|
|
|
}else{
|
|
|
|
//move file to processed
|
2024-03-08 17:55:17 +00:00
|
|
|
let mut dest=PathBuf::from("textures/processed");
|
2024-01-09 04:21:56 +00:00
|
|
|
dest.push(file_thing.file_name());
|
|
|
|
std::fs::rename(file_thing.path(), dest)?;
|
|
|
|
}
|
|
|
|
Ok(())
|
2023-09-29 05:06:27 +00:00
|
|
|
}
|
2023-10-02 03:07:34 +00:00
|
|
|
fn convert_textures() -> AResult<()>{
|
2024-01-09 04:21:56 +00:00
|
|
|
let start = std::time::Instant::now();
|
|
|
|
let mut threads=Vec::new();
|
|
|
|
for entry in std::fs::read_dir("textures/unprocessed")? {
|
|
|
|
let file_thing=entry?;
|
|
|
|
threads.push(std::thread::spawn(move ||{
|
|
|
|
let file_name=format!("{:?}",file_thing);
|
|
|
|
let result=convert(file_thing);
|
|
|
|
if let Err(e)=result{
|
|
|
|
println!("error processing file:{:?} error message:{:?}",file_name,e);
|
|
|
|
}
|
|
|
|
}));
|
|
|
|
}
|
|
|
|
let mut i=0;
|
|
|
|
let n_threads=threads.len();
|
|
|
|
for thread in threads{
|
|
|
|
i+=1;
|
|
|
|
if let Err(e)=thread.join(){
|
|
|
|
println!("thread error: {:?}",e);
|
|
|
|
}else{
|
|
|
|
println!("{}/{}",i,n_threads);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
println!("{:?}", start.elapsed());
|
|
|
|
Ok(())
|
2023-09-29 05:06:27 +00:00
|
|
|
}
|
|
|
|
|
2023-09-12 02:18:55 +00:00
|
|
|
enum Scan{
|
2024-01-09 04:21:56 +00:00
|
|
|
Passed,
|
|
|
|
Blocked,
|
|
|
|
Flagged,
|
2023-09-12 02:18:55 +00:00
|
|
|
}
|
2023-09-13 14:13:23 +00:00
|
|
|
|
2023-10-02 03:07:34 +00:00
|
|
|
fn scan() -> AResult<()>{
|
2024-01-09 04:21:56 +00:00
|
|
|
let mut id = get_id()?;
|
|
|
|
//Construct allowed scripts
|
|
|
|
let allowed_set = get_allowed_set()?;
|
|
|
|
let mut blocked = get_blocked()?;
|
2023-09-05 00:16:02 +00:00
|
|
|
|
2024-01-09 04:21:56 +00:00
|
|
|
for entry in std::fs::read_dir("maps/unprocessed")? {
|
|
|
|
let file_thing=entry?;
|
|
|
|
let mut input = std::io::BufReader::new(std::fs::File::open(file_thing.path())?);
|
2023-09-05 00:16:02 +00:00
|
|
|
|
2024-01-09 04:21:56 +00:00
|
|
|
let dom = get_dom(&mut input)?;
|
2023-09-13 01:16:45 +00:00
|
|
|
|
2024-01-09 04:21:56 +00:00
|
|
|
let script_refs = get_script_refs(&dom);
|
2023-09-13 01:16:45 +00:00
|
|
|
|
2024-01-09 04:21:56 +00:00
|
|
|
//check scribb
|
|
|
|
let mut fail_count=0;
|
|
|
|
let mut fail_type=Scan::Passed;
|
|
|
|
for &script_ref in script_refs.iter() {
|
|
|
|
if let Some(script)=dom.get_by_ref(script_ref){
|
|
|
|
if let Some(rbx_dom_weak::types::Variant::String(s)) = script.properties.get("Source") {
|
|
|
|
//flag keywords and instantly fail
|
|
|
|
if check_source_illegal_keywords(s){
|
|
|
|
println!("{:?} - flagged.",file_thing.file_name());
|
|
|
|
fail_type=Scan::Flagged;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
if allowed_set.contains(s) {
|
|
|
|
continue;
|
|
|
|
}else{
|
|
|
|
fail_type=Scan::Blocked;//no need to check for Flagged, it breaks the loop.
|
|
|
|
fail_count+=1;
|
|
|
|
if !blocked.contains(s) {
|
|
|
|
blocked.insert(s.clone());//all fixed! just clone!
|
|
|
|
std::fs::write(format!("scripts/blocked/{}.lua",id),s)?;
|
|
|
|
id+=1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}else{
|
|
|
|
panic!("FATAL: failed to get source for {:?}",file_thing.file_name());
|
|
|
|
}
|
|
|
|
}else{
|
|
|
|
panic!("FATAL: failed to get_by_ref {:?}",script_ref);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
let mut dest=match fail_type {
|
2024-03-08 17:55:17 +00:00
|
|
|
Scan::Passed => PathBuf::from("maps/processed"),
|
2024-01-09 04:21:56 +00:00
|
|
|
Scan::Blocked => {
|
|
|
|
println!("{:?} - {} {} not allowed.",file_thing.file_name(),fail_count,if fail_count==1 {"script"}else{"scripts"});
|
2024-03-08 17:55:17 +00:00
|
|
|
PathBuf::from("maps/blocked")
|
2024-01-09 04:21:56 +00:00
|
|
|
}
|
2024-03-08 17:55:17 +00:00
|
|
|
Scan::Flagged => PathBuf::from("maps/flagged")
|
2024-01-09 04:21:56 +00:00
|
|
|
};
|
|
|
|
dest.push(file_thing.file_name());
|
|
|
|
std::fs::rename(file_thing.path(), dest)?;
|
|
|
|
}
|
|
|
|
std::fs::write("id",id.to_string())?;
|
|
|
|
Ok(())
|
2023-09-13 01:16:25 +00:00
|
|
|
}
|
2023-09-22 21:22:44 +00:00
|
|
|
|
2024-03-08 18:07:50 +00:00
|
|
|
fn extract_scripts(paths: Vec<PathBuf>) -> AResult<()>{
|
2024-01-09 04:21:56 +00:00
|
|
|
let mut id = 0;
|
|
|
|
//Construct allowed scripts
|
|
|
|
let mut script_set = std::collections::HashSet::<String>::new();
|
2023-09-13 00:52:45 +00:00
|
|
|
|
2024-01-09 04:21:56 +00:00
|
|
|
for path in paths {
|
|
|
|
let file_name=path.file_name();
|
|
|
|
let mut input = std::io::BufReader::new(std::fs::File::open(&path)?);
|
2023-09-13 00:52:45 +00:00
|
|
|
|
2024-01-09 04:21:56 +00:00
|
|
|
let dom = get_dom(&mut input)?;
|
2023-09-13 00:52:45 +00:00
|
|
|
|
2024-01-09 04:21:56 +00:00
|
|
|
let script_refs = get_script_refs(&dom);
|
2023-09-13 00:52:45 +00:00
|
|
|
|
2024-01-09 04:21:56 +00:00
|
|
|
//extract scribb
|
|
|
|
for &script_ref in script_refs.iter() {
|
|
|
|
if let Some(script)=dom.get_by_ref(script_ref){
|
|
|
|
if let Some(rbx_dom_weak::types::Variant::String(s)) = script.properties.get("Source") {
|
|
|
|
if script_set.contains(s) {
|
|
|
|
continue;
|
|
|
|
}else{
|
|
|
|
script_set.insert(s.clone());
|
|
|
|
std::fs::write(format!("scripts/extracted/{:?}_{}_{}.lua",file_name,id,script.name),s)?;
|
|
|
|
id+=1;
|
|
|
|
}
|
|
|
|
}else{
|
|
|
|
panic!("FATAL: failed to get source for {:?}",file_name);
|
|
|
|
}
|
|
|
|
}else{
|
|
|
|
panic!("FATAL: failed to get_by_ref {:?}",script_ref);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
println!("extracted {} {}",id,if id==1 {"script"}else{"scripts"});
|
|
|
|
Ok(())
|
2023-09-13 00:52:45 +00:00
|
|
|
}
|
2023-10-02 03:07:34 +00:00
|
|
|
fn replace() -> AResult<()>{
|
2024-01-09 04:21:56 +00:00
|
|
|
let allowed_map=get_allowed_map()?;
|
|
|
|
let replace_map=get_replace_map()?;
|
2023-09-13 01:16:25 +00:00
|
|
|
|
2024-01-09 04:21:56 +00:00
|
|
|
for entry in std::fs::read_dir("maps/blocked")? {
|
|
|
|
let file_thing=entry?;
|
2023-09-13 01:16:25 +00:00
|
|
|
|
2024-01-09 04:21:56 +00:00
|
|
|
let mut input = std::io::BufReader::new(std::fs::File::open(file_thing.path())?);
|
|
|
|
let mut dom = get_dom(&mut input)?;
|
2023-09-13 01:16:25 +00:00
|
|
|
|
2024-01-09 04:21:56 +00:00
|
|
|
let script_refs = get_script_refs(&dom);
|
2023-09-13 01:16:25 +00:00
|
|
|
|
2024-01-09 04:21:56 +00:00
|
|
|
//check scribb
|
|
|
|
let mut any_failed=false;
|
|
|
|
for &script_ref in script_refs.iter() {
|
|
|
|
if let Some(script)=dom.get_by_ref(script_ref){
|
|
|
|
if let Some(rbx_dom_weak::types::Variant::String(source)) = script.properties.get("Source") {
|
|
|
|
if let (Some(replace_id),Some(replace_script))=(replace_map.get(source),dom.get_by_ref_mut(script.referent())) {
|
|
|
|
println!("replace {}",replace_id);
|
|
|
|
//replace the source
|
|
|
|
if let Some(replace_source)=allowed_map.get(replace_id){
|
|
|
|
replace_script.properties.insert("Source".to_string(), rbx_dom_weak::types::Variant::String(replace_source.clone()));
|
|
|
|
}else{
|
|
|
|
println!("failed to get replacement source {}",replace_id);
|
|
|
|
any_failed=true;
|
|
|
|
}
|
|
|
|
}else{
|
|
|
|
println!("failed to failed to get replace_id and replace_script");
|
|
|
|
any_failed=true;
|
|
|
|
}
|
|
|
|
}else{
|
|
|
|
panic!("FATAL: failed to get source for {:?}",file_thing.file_name());
|
|
|
|
}
|
|
|
|
}else{
|
|
|
|
panic!("FATAL: failed to get_by_ref {:?}",script_ref);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if any_failed {
|
|
|
|
println!("One or more scripts failed to replace.");
|
|
|
|
}else{
|
2024-03-08 17:55:17 +00:00
|
|
|
let mut dest=PathBuf::from("maps/unprocessed");
|
2024-01-31 00:39:57 +00:00
|
|
|
dest.push(file_thing.file_name());
|
2024-01-09 04:21:56 +00:00
|
|
|
let output = std::io::BufWriter::new(std::fs::File::open(dest)?);
|
2024-01-31 00:39:57 +00:00
|
|
|
//write workspace:GetChildren()[1]
|
|
|
|
let workspace_children=dom.root().children();
|
|
|
|
if workspace_children.len()!=1{
|
|
|
|
return Err(anyhow::Error::msg("there can only be one model"));
|
|
|
|
}
|
|
|
|
rbx_binary::to_writer(output, &dom, &[workspace_children[0]])?;
|
2024-01-09 04:21:56 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
Ok(())
|
2023-09-13 01:16:25 +00:00
|
|
|
}
|
2023-09-14 21:17:00 +00:00
|
|
|
|
|
|
|
enum UploadAction {
|
2024-01-09 04:21:56 +00:00
|
|
|
Upload(u64),
|
|
|
|
Skip,
|
|
|
|
New,
|
|
|
|
Delete,
|
2023-09-14 21:17:00 +00:00
|
|
|
}
|
|
|
|
struct ParseUploadActionErr;
|
|
|
|
impl std::str::FromStr for UploadAction {
|
2024-01-09 04:21:56 +00:00
|
|
|
type Err=ParseUploadActionErr;
|
|
|
|
fn from_str(s: &str) -> Result<Self, Self::Err>{
|
|
|
|
if s=="skip\n"{
|
|
|
|
Ok(Self::Skip)
|
|
|
|
}else if s=="new\n"{
|
|
|
|
Ok(Self::New)
|
|
|
|
}else if s=="delete\n"{
|
|
|
|
Ok(Self::Delete)
|
|
|
|
}else if let Ok(asset_id)=s[..s.len()-1].parse::<u64>(){
|
|
|
|
Ok(Self::Upload(asset_id))
|
|
|
|
}else{
|
|
|
|
Err(ParseUploadActionErr)
|
|
|
|
}
|
|
|
|
}
|
2023-09-14 21:17:00 +00:00
|
|
|
}
|
|
|
|
|
2023-10-02 03:07:34 +00:00
|
|
|
fn upload() -> AResult<()>{
|
2024-01-09 04:21:56 +00:00
|
|
|
//interactive prompt per upload:
|
|
|
|
for entry in std::fs::read_dir("maps/passed")? {
|
|
|
|
let file_thing=entry?;
|
|
|
|
println!("map file: {:?}",file_thing.file_name());
|
|
|
|
let mut input = std::io::BufReader::new(std::fs::File::open(file_thing.path())?);
|
2023-09-14 20:54:08 +00:00
|
|
|
|
2024-01-09 04:21:56 +00:00
|
|
|
let dom = get_dom(&mut input)?;
|
|
|
|
let (modelname,creator,displayname) = get_mapinfo(&dom)?;
|
2023-09-14 20:54:08 +00:00
|
|
|
|
2024-01-09 04:21:56 +00:00
|
|
|
//Creator: [auto fill creator]
|
|
|
|
//DisplayName: [auto fill DisplayName]
|
|
|
|
//id: ["New" for blank because of my double enter key]
|
|
|
|
print!("Model name: {}\nCreator: {}\nDisplayName: {}\nAction or Upload Asset Id: ",modelname,creator,displayname);
|
|
|
|
std::io::Write::flush(&mut std::io::stdout())?;
|
|
|
|
let upload_action;
|
|
|
|
loop{
|
|
|
|
let mut upload_action_string = String::new();
|
|
|
|
std::io::stdin().read_line(&mut upload_action_string)?;
|
|
|
|
if let Ok(parsed_upload_action)=upload_action_string.parse::<UploadAction>(){
|
|
|
|
upload_action=parsed_upload_action;
|
|
|
|
break;
|
|
|
|
}else{
|
|
|
|
print!("Action or Upload Asset Id: ");
|
|
|
|
std::io::Write::flush(&mut std::io::stdout())?;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
match upload_action {
|
|
|
|
UploadAction::Upload(asset_id) => {
|
|
|
|
let status=std::process::Command::new("../rbxcompiler-linux-amd64")
|
|
|
|
.arg("--compile=false")
|
|
|
|
.arg("--group=6980477")
|
|
|
|
.arg(format!("--asset={}",asset_id))
|
|
|
|
.arg(format!("--input={}",file_thing.path().into_os_string().into_string().unwrap()))
|
|
|
|
.status()?;
|
|
|
|
match status.code() {
|
|
|
|
Some(0)=>{
|
|
|
|
//move file
|
2024-03-08 17:55:17 +00:00
|
|
|
let mut dest=PathBuf::from("maps/uploaded");
|
2024-01-09 04:21:56 +00:00
|
|
|
dest.push(file_thing.file_name());
|
|
|
|
std::fs::rename(file_thing.path(), dest)?;
|
|
|
|
}
|
|
|
|
Some(code)=>println!("upload failed! code={}",code),
|
|
|
|
None => println!("no status code!"),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
UploadAction::Skip => continue,
|
|
|
|
UploadAction::New => {
|
|
|
|
let output=std::process::Command::new("../rbxcompiler-linux-amd64")
|
|
|
|
.arg("--compile=false")
|
|
|
|
.arg("--group=6980477")
|
|
|
|
.arg("--new-asset=true")
|
|
|
|
.arg(format!("--input={}",file_thing.path().into_os_string().into_string().unwrap()))
|
|
|
|
.output()?;
|
|
|
|
match output.status.code() {
|
|
|
|
Some(0)=>{
|
|
|
|
//print output
|
|
|
|
println!("{}", std::str::from_utf8(output.stdout.as_slice())?);
|
|
|
|
//move file
|
2024-03-08 17:55:17 +00:00
|
|
|
let mut dest=PathBuf::from("maps/uploaded");
|
2024-01-09 04:21:56 +00:00
|
|
|
dest.push(file_thing.file_name());
|
|
|
|
std::fs::rename(file_thing.path(), dest)?;
|
|
|
|
}
|
|
|
|
Some(code)=>println!("upload failed! code={}",code),
|
|
|
|
None => println!("no status code!"),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
UploadAction::Delete => std::fs::remove_file(file_thing.path())?,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Ok(())
|
2023-09-13 01:16:25 +00:00
|
|
|
}
|
|
|
|
|
2023-09-12 23:41:24 +00:00
|
|
|
enum Interactive{
|
2024-01-09 04:21:56 +00:00
|
|
|
Passed,
|
|
|
|
Blocked,
|
|
|
|
Flagged,
|
2023-09-12 23:41:24 +00:00
|
|
|
}
|
|
|
|
enum ScriptAction {
|
2024-01-09 04:21:56 +00:00
|
|
|
Pass,
|
|
|
|
Replace(u32),
|
|
|
|
Flag,
|
|
|
|
Block,
|
|
|
|
Delete,
|
2023-09-12 23:41:24 +00:00
|
|
|
}
|
|
|
|
enum ScriptActionParseResult {
|
2024-01-09 04:21:56 +00:00
|
|
|
Pass,
|
|
|
|
Block,
|
|
|
|
Exit,
|
|
|
|
Delete,
|
2023-09-12 23:41:24 +00:00
|
|
|
}
|
|
|
|
struct ParseScriptActionErr;
|
|
|
|
impl std::str::FromStr for ScriptActionParseResult {
|
2024-01-09 04:21:56 +00:00
|
|
|
type Err=ParseScriptActionErr;
|
|
|
|
fn from_str(s: &str) -> Result<Self, Self::Err>{
|
|
|
|
if s=="pass\n"||s=="1\n"{
|
|
|
|
Ok(Self::Pass)
|
|
|
|
}else if s=="block\n"{
|
|
|
|
Ok(Self::Block)
|
|
|
|
}else if s=="exit\n"{
|
|
|
|
Ok(Self::Exit)
|
|
|
|
}else if s=="delete\n"{
|
|
|
|
Ok(Self::Delete)
|
|
|
|
}else{
|
|
|
|
Err(ParseScriptActionErr)
|
|
|
|
}
|
|
|
|
}
|
2023-09-12 23:41:24 +00:00
|
|
|
}
|
|
|
|
|
2023-10-02 03:07:34 +00:00
|
|
|
fn interactive() -> AResult<()>{
|
2024-01-09 04:21:56 +00:00
|
|
|
let mut id=get_id()?;
|
|
|
|
//Construct allowed scripts
|
|
|
|
let mut allowed_set=get_allowed_set()?;
|
|
|
|
let mut allowed_map=get_allowed_map()?;
|
|
|
|
let mut replace_map=get_replace_map()?;
|
|
|
|
let mut blocked = get_blocked()?;
|
2023-09-12 23:41:24 +00:00
|
|
|
|
2024-01-09 04:21:56 +00:00
|
|
|
'map_loop: for entry in std::fs::read_dir("maps/unprocessed")? {
|
|
|
|
let file_thing=entry?;
|
|
|
|
println!("processing map={:?}",file_thing.file_name());
|
|
|
|
let mut input = std::io::BufReader::new(std::fs::File::open(file_thing.path())?);
|
|
|
|
let mut dom = get_dom(&mut input)?;
|
2023-09-12 23:41:24 +00:00
|
|
|
|
2024-01-09 04:21:56 +00:00
|
|
|
let script_refs = get_script_refs(&dom);
|
2023-09-12 23:41:24 +00:00
|
|
|
|
2024-01-09 04:21:56 +00:00
|
|
|
//check scribb
|
|
|
|
let mut script_count=0;
|
|
|
|
let mut replace_count=0;
|
|
|
|
let mut block_count=0;
|
|
|
|
let mut fail_type=Interactive::Passed;
|
|
|
|
for &script_ref in script_refs.iter() {
|
|
|
|
if let Some(script)=dom.get_by_ref(script_ref){
|
|
|
|
if let Some(rbx_dom_weak::types::Variant::String(source)) = script.properties.get("Source") {
|
|
|
|
script_count+=1;
|
|
|
|
let source_action=if check_source_illegal_keywords(source) {
|
|
|
|
ScriptAction::Flag//script triggers flagging -> Flag
|
|
|
|
} else if blocked.contains(source) {
|
|
|
|
ScriptAction::Block//script is blocked -> Block
|
|
|
|
} else if allowed_set.contains(source) {
|
|
|
|
ScriptAction::Pass//script is allowed -> Pass
|
|
|
|
}else if let Some(replace_id)=replace_map.get(source) {
|
|
|
|
ScriptAction::Replace(*replace_id)
|
|
|
|
}else{
|
|
|
|
//interactive logic goes here
|
|
|
|
print!("unresolved source location={}\naction: ",get_full_name(&dom, script));
|
|
|
|
std::io::Write::flush(&mut std::io::stdout())?;
|
|
|
|
//load source into current.lua
|
|
|
|
std::fs::write("current.lua",source)?;
|
|
|
|
//prompt action in terminal
|
|
|
|
//wait for input
|
|
|
|
let script_action;
|
|
|
|
loop{
|
|
|
|
let mut action_string = String::new();
|
|
|
|
std::io::stdin().read_line(&mut action_string)?;
|
|
|
|
if let Ok(parsed_script_action)=action_string.parse::<ScriptActionParseResult>(){
|
|
|
|
script_action=parsed_script_action;
|
|
|
|
break;
|
|
|
|
}else{
|
|
|
|
print!("action: ");
|
|
|
|
std::io::Write::flush(&mut std::io::stdout())?;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
//update allowed/replace/blocked
|
|
|
|
match script_action{
|
|
|
|
ScriptActionParseResult::Pass => {
|
|
|
|
//if current.lua was updated, create an allowed and replace file and set script_action to replace(new_id)
|
|
|
|
let modified_source=std::fs::read_to_string("current.lua")?;
|
|
|
|
if &modified_source==source{
|
|
|
|
//it's always new.
|
|
|
|
//insert allowed_set
|
|
|
|
allowed_set.insert(modified_source.clone());
|
|
|
|
//insert allowed_map
|
|
|
|
allowed_map.insert(id,modified_source.clone());
|
|
|
|
//write allowed/id.lua
|
|
|
|
std::fs::write(format!("scripts/allowed/{}.lua",id),modified_source)?;
|
|
|
|
id+=1;
|
|
|
|
ScriptAction::Pass
|
|
|
|
}else{
|
|
|
|
//insert allowed_set
|
|
|
|
allowed_set.insert(modified_source.clone());
|
|
|
|
//insert allowed_map
|
|
|
|
allowed_map.insert(id,modified_source.clone());
|
|
|
|
//insert replace_map
|
|
|
|
replace_map.insert(source.clone(),id);//this cannot be reached if it already exists
|
|
|
|
//write allowed/id.lua
|
|
|
|
std::fs::write(format!("scripts/allowed/{}.lua",id),modified_source)?;
|
|
|
|
//write replace/id.lua
|
|
|
|
std::fs::write(format!("scripts/replace/{}.lua",id),source)?;
|
|
|
|
let ret=ScriptAction::Replace(id);
|
|
|
|
id+=1;
|
|
|
|
ret
|
|
|
|
}
|
|
|
|
},
|
|
|
|
ScriptActionParseResult::Block => {
|
|
|
|
blocked.insert(source.clone());
|
|
|
|
std::fs::write(format!("scripts/blocked/{}.lua",id),source)?;
|
|
|
|
id+=1;
|
|
|
|
ScriptAction::Block
|
|
|
|
},
|
|
|
|
ScriptActionParseResult::Exit => break 'map_loop,
|
|
|
|
ScriptActionParseResult::Delete => ScriptAction::Delete,
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
let location=get_full_name(&dom, script);
|
|
|
|
match source_action{
|
|
|
|
ScriptAction::Pass => println!("passed source location={}",location),
|
|
|
|
ScriptAction::Replace(replace_id)=>{
|
|
|
|
//replace the source
|
|
|
|
if let (Some(replace_source),Some(replace_script))=(allowed_map.get(&replace_id),dom.get_by_ref_mut(script.referent())){
|
|
|
|
replace_count+=1;
|
|
|
|
println!("replaced source id={} location={}",replace_id,location);
|
|
|
|
replace_script.properties.insert("Source".to_string(), rbx_dom_weak::types::Variant::String(replace_source.clone()));
|
|
|
|
}else{
|
|
|
|
panic!("failed to get replacement source id={} location={}",replace_id,location);
|
|
|
|
}
|
|
|
|
},
|
|
|
|
ScriptAction::Delete => {
|
|
|
|
println!("deleted source location={}",location);
|
|
|
|
replace_count+=1;//trigger a new file generation
|
|
|
|
dom.destroy(script.referent());
|
|
|
|
},
|
|
|
|
ScriptAction::Flag => {
|
|
|
|
println!("flagged source location={}",location);
|
|
|
|
fail_type=Interactive::Flagged;
|
|
|
|
},
|
|
|
|
ScriptAction::Block => {
|
|
|
|
block_count+=1;
|
|
|
|
println!("blocked source location={}",location);
|
|
|
|
match fail_type{
|
|
|
|
Interactive::Passed => fail_type=Interactive::Blocked,
|
|
|
|
_=>(),
|
|
|
|
}
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}else{
|
|
|
|
panic!("FATAL: failed to get source for {:?}",file_thing.file_name());
|
|
|
|
}
|
|
|
|
}else{
|
|
|
|
panic!("FATAL: failed to get_by_ref {:?}",script_ref);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
let mut dest=match fail_type{
|
|
|
|
Interactive::Passed => {
|
|
|
|
println!("map={:?} passed with {} {}",file_thing.file_name(),script_count,if script_count==1 {"script"}else{"scripts"});
|
|
|
|
if replace_count==0{
|
2024-03-08 17:55:17 +00:00
|
|
|
PathBuf::from("maps/passed")
|
2024-01-09 04:21:56 +00:00
|
|
|
}else{
|
|
|
|
//create new file
|
|
|
|
println!("{} {} replaced - generating new file...",replace_count,if replace_count==1 {"script was"}else{"scripts were"});
|
2024-03-08 17:55:17 +00:00
|
|
|
let mut dest=PathBuf::from("maps/passed");
|
2024-01-09 04:21:56 +00:00
|
|
|
dest.push(file_thing.file_name());
|
|
|
|
let output = std::io::BufWriter::new(std::fs::File::create(dest)?);
|
|
|
|
//write workspace:GetChildren()[1]
|
|
|
|
let workspace_children=dom.root().children();
|
|
|
|
if workspace_children.len()!=1{
|
|
|
|
return Err(anyhow::Error::msg("there can only be one model"));
|
|
|
|
}
|
|
|
|
rbx_binary::to_writer(output, &dom, &[workspace_children[0]])?;
|
|
|
|
//move original to processed folder
|
2024-03-08 17:55:17 +00:00
|
|
|
PathBuf::from("maps/unaltered")
|
2024-01-09 04:21:56 +00:00
|
|
|
}
|
|
|
|
},//write map into maps/processed
|
|
|
|
Interactive::Blocked => {
|
|
|
|
println!("map={:?} blocked with {}/{} {} blocked",file_thing.file_name(),block_count,script_count,if script_count==1 {"script"}else{"scripts"});
|
2024-03-08 17:55:17 +00:00
|
|
|
PathBuf::from("maps/blocked")
|
2024-01-09 04:21:56 +00:00
|
|
|
},//write map into maps/blocked
|
|
|
|
Interactive::Flagged => {
|
|
|
|
println!("map={:?} flagged",file_thing.file_name());
|
2024-03-08 17:55:17 +00:00
|
|
|
PathBuf::from("maps/flagged")
|
2024-01-09 04:21:56 +00:00
|
|
|
},//write map into maps/flagged
|
|
|
|
};
|
|
|
|
dest.push(file_thing.file_name());
|
|
|
|
std::fs::rename(file_thing.path(), dest)?;
|
|
|
|
}
|
|
|
|
std::fs::write("id",id.to_string())?;
|
|
|
|
Ok(())
|
2023-09-12 02:37:37 +00:00
|
|
|
}
|
|
|
|
|
2023-10-03 02:04:48 +00:00
|
|
|
|
|
|
|
fn unzip_all()->AResult<()>{
|
2024-01-09 04:21:56 +00:00
|
|
|
for entry in std::fs::read_dir("maps/unprocessed")? {
|
|
|
|
let file_thing=entry?;
|
|
|
|
println!("processing map={:?}",file_thing.file_name());
|
|
|
|
let mut input = std::io::BufReader::new(std::fs::File::open(file_thing.path())?);
|
|
|
|
match maybe_gzip_decode(&mut input){
|
|
|
|
Ok(ReaderType::GZip(mut readable)) => {
|
|
|
|
//gzip
|
|
|
|
let mut extracted:Vec<u8>=Vec::new();
|
|
|
|
//read the entire thing to the end so that I can clone the data and write a png to processed images
|
|
|
|
readable.read_to_end(&mut extracted)?;
|
|
|
|
//write extracted
|
2024-03-08 17:55:17 +00:00
|
|
|
let mut dest=PathBuf::from("maps/unzipped");
|
2024-01-09 04:21:56 +00:00
|
|
|
dest.push(file_thing.file_name());
|
|
|
|
std::fs::write(dest, &mut extracted)?;
|
|
|
|
//delete ugly gzip file
|
|
|
|
std::fs::remove_file(file_thing.path())?;
|
|
|
|
},
|
|
|
|
Ok(ReaderType::Raw(_)) => (),
|
|
|
|
Err(e) => Err(e)?,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Ok(())
|
2023-10-03 02:04:48 +00:00
|
|
|
}
|
|
|
|
|
2023-11-18 23:48:30 +00:00
|
|
|
fn write_attributes() -> AResult<()>{
|
2024-01-09 04:21:56 +00:00
|
|
|
for entry in std::fs::read_dir("maps/unprocessed")? {
|
|
|
|
let file_thing=entry?;
|
|
|
|
println!("processing map={:?}",file_thing.file_name());
|
|
|
|
let mut input = std::io::BufReader::new(std::fs::File::open(file_thing.path())?);
|
|
|
|
let mut dom = get_dom(&mut input)?;
|
2023-11-18 23:48:30 +00:00
|
|
|
|
2024-01-09 04:21:56 +00:00
|
|
|
let button_refs = get_button_refs(&dom);
|
2023-11-18 23:48:30 +00:00
|
|
|
|
2024-01-09 04:21:56 +00:00
|
|
|
for &button_ref in &button_refs {
|
|
|
|
if let Some(button)=dom.get_by_ref_mut(button_ref){
|
|
|
|
match button.properties.get_mut("Attributes"){
|
|
|
|
Some(rbx_dom_weak::types::Variant::Attributes(attributes))=>{
|
|
|
|
println!("Appending Ref={} to existing attributes for {}",button_ref,button.name);
|
|
|
|
attributes.insert("Ref".to_string(),rbx_dom_weak::types::Variant::String(button_ref.to_string()));
|
|
|
|
},
|
|
|
|
None=>{
|
|
|
|
println!("Creating new attributes with Ref={} for {}",button_ref,button.name);
|
|
|
|
let mut attributes=rbx_dom_weak::types::Attributes::new();
|
|
|
|
attributes.insert("Ref".to_string(),rbx_dom_weak::types::Variant::String(button_ref.to_string()));
|
|
|
|
button.properties.insert("Attributes".to_string(),rbx_dom_weak::types::Variant::Attributes(attributes));
|
|
|
|
}
|
|
|
|
_=>unreachable!("Fetching attributes did not return attributes."),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
let mut dest={
|
2024-03-08 17:55:17 +00:00
|
|
|
let mut dest=PathBuf::from("maps/attributes");
|
2024-01-09 04:21:56 +00:00
|
|
|
dest.push(file_thing.file_name());
|
|
|
|
let output = std::io::BufWriter::new(std::fs::File::create(dest)?);
|
|
|
|
//write workspace:GetChildren()[1]
|
|
|
|
let workspace_children=dom.root().children();
|
|
|
|
if workspace_children.len()!=1{
|
|
|
|
return Err(anyhow::Error::msg("there can only be one model"));
|
|
|
|
}
|
|
|
|
rbx_binary::to_writer(output, &dom, &[workspace_children[0]])?;
|
|
|
|
//move original to processed folder
|
2024-03-08 17:55:17 +00:00
|
|
|
PathBuf::from("maps/unaltered")
|
2024-01-09 04:21:56 +00:00
|
|
|
};
|
|
|
|
dest.push(file_thing.file_name());
|
|
|
|
std::fs::rename(file_thing.path(), dest)?;
|
|
|
|
}
|
|
|
|
Ok(())
|
2023-11-18 23:48:30 +00:00
|
|
|
}
|
2024-01-09 06:15:46 +00:00
|
|
|
|
|
|
|
enum VMTContent{
|
|
|
|
VMT(String),
|
|
|
|
VTF(String),
|
|
|
|
Patch(vmt_parser::material::PatchMaterial),
|
|
|
|
Unsupported,//don't want to deal with whatever vmt variant
|
|
|
|
Unresolved,//could not locate a texture because of vmt content
|
|
|
|
}
|
|
|
|
impl VMTContent{
|
|
|
|
fn vtf(opt:Option<String>)->Self{
|
|
|
|
match opt{
|
|
|
|
Some(s)=>Self::VTF(s),
|
|
|
|
None=>Self::Unresolved,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn get_some_texture(material:vmt_parser::material::Material)->AResult<VMTContent>{
|
|
|
|
//just grab some texture from somewhere for now
|
|
|
|
Ok(match material{
|
|
|
|
vmt_parser::material::Material::LightMappedGeneric(mat)=>VMTContent::vtf(Some(mat.base_texture)),
|
|
|
|
vmt_parser::material::Material::VertexLitGeneric(mat)=>VMTContent::vtf(mat.base_texture.or(mat.decal_texture)),//this just dies if there is none
|
|
|
|
vmt_parser::material::Material::VertexLitGenericDx6(mat)=>VMTContent::vtf(mat.base_texture.or(mat.decal_texture)),
|
|
|
|
vmt_parser::material::Material::UnlitGeneric(mat)=>VMTContent::vtf(mat.base_texture),
|
|
|
|
vmt_parser::material::Material::UnlitTwoTexture(mat)=>VMTContent::vtf(mat.base_texture),
|
|
|
|
vmt_parser::material::Material::Water(mat)=>VMTContent::vtf(mat.base_texture),
|
|
|
|
vmt_parser::material::Material::WorldVertexTransition(mat)=>VMTContent::vtf(Some(mat.base_texture)),
|
|
|
|
vmt_parser::material::Material::EyeRefract(mat)=>VMTContent::vtf(Some(mat.cornea_texture)),
|
|
|
|
vmt_parser::material::Material::SubRect(mat)=>VMTContent::VMT(mat.material),//recursive
|
|
|
|
vmt_parser::material::Material::Sprite(mat)=>VMTContent::vtf(Some(mat.base_texture)),
|
|
|
|
vmt_parser::material::Material::SpriteCard(mat)=>VMTContent::vtf(mat.base_texture),
|
|
|
|
vmt_parser::material::Material::Cable(mat)=>VMTContent::vtf(Some(mat.base_texture)),
|
|
|
|
vmt_parser::material::Material::Refract(mat)=>VMTContent::vtf(mat.base_texture),
|
|
|
|
vmt_parser::material::Material::Modulate(mat)=>VMTContent::vtf(Some(mat.base_texture)),
|
|
|
|
vmt_parser::material::Material::DecalModulate(mat)=>VMTContent::vtf(Some(mat.base_texture)),
|
|
|
|
vmt_parser::material::Material::Sky(mat)=>VMTContent::vtf(Some(mat.base_texture)),
|
|
|
|
vmt_parser::material::Material::Replacements(_mat)=>VMTContent::Unsupported,
|
|
|
|
vmt_parser::material::Material::Patch(mat)=>VMTContent::Patch(mat),
|
|
|
|
_=>return Err(anyhow::Error::msg("vmt failed to parse")),
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
fn get_vmt<F:Fn(String)->AResult<Option<Vec<u8>>>>(find_stuff:&F,search_name:String)->AResult<vmt_parser::material::Material>{
|
|
|
|
if let Some(stuff)=find_stuff(search_name)?{
|
|
|
|
//decode vmt and then write
|
|
|
|
let stuff=String::from_utf8(stuff)?;
|
|
|
|
let material=vmt_parser::from_str(stuff.as_str())?;
|
|
|
|
println!("vmt material={:?}",material);
|
|
|
|
return Ok(material);
|
|
|
|
}
|
|
|
|
Err(anyhow::Error::msg("vmt not found"))
|
|
|
|
}
|
|
|
|
|
|
|
|
fn recursive_vmt_loader<F:Fn(String)->AResult<Option<Vec<u8>>>>(find_stuff:&F,material:vmt_parser::material::Material)->AResult<Option<Vec<u8>>>{
|
|
|
|
match get_some_texture(material)?{
|
|
|
|
VMTContent::VMT(s)=>recursive_vmt_loader(find_stuff,get_vmt(find_stuff,s)?),
|
|
|
|
VMTContent::VTF(s)=>{
|
2024-03-08 17:55:17 +00:00
|
|
|
let mut texture_file_name=PathBuf::from("materials");
|
2024-01-09 06:15:46 +00:00
|
|
|
texture_file_name.push(s);
|
|
|
|
texture_file_name.set_extension("vtf");
|
|
|
|
find_stuff(texture_file_name.into_os_string().into_string().unwrap())
|
|
|
|
},
|
|
|
|
VMTContent::Patch(mat)=>recursive_vmt_loader(find_stuff,
|
|
|
|
mat.resolve(|search_name|{
|
|
|
|
match find_stuff(search_name.to_string())?{
|
|
|
|
Some(bytes)=>Ok(String::from_utf8(bytes)?),
|
|
|
|
None=>Err(anyhow::Error::msg("could not find vmt")),
|
|
|
|
}
|
|
|
|
})?
|
|
|
|
),
|
|
|
|
VMTContent::Unsupported=>{println!("Unsupported vmt");Ok(None)},//print and move on
|
|
|
|
VMTContent::Unresolved=>{println!("Unresolved vmt");Ok(None)},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-03-08 17:55:17 +00:00
|
|
|
fn extract_textures(paths:Vec<PathBuf>,vpk_paths:Vec<PathBuf>)->AResult<()>{
|
2024-01-09 06:15:46 +00:00
|
|
|
let vpk_list:Vec<vpk::VPK>=vpk_paths.into_iter().map(|vpk_path|vpk::VPK::read(&vpk_path).expect("vpk file does not exist")).collect();
|
|
|
|
for path in paths{
|
|
|
|
let mut deduplicate=std::collections::HashSet::new();
|
|
|
|
let bsp=vbsp::Bsp::read(std::fs::read(path)?.as_ref())?;
|
|
|
|
for texture in bsp.textures(){
|
2024-03-08 17:55:17 +00:00
|
|
|
deduplicate.insert(PathBuf::from(texture.name()));
|
2024-01-09 06:15:46 +00:00
|
|
|
}
|
|
|
|
//dedupe prop models
|
|
|
|
let mut model_dedupe=std::collections::HashSet::new();
|
|
|
|
for prop in bsp.static_props(){
|
|
|
|
model_dedupe.insert(prop.model());
|
|
|
|
}
|
|
|
|
|
|
|
|
//grab texture names from props
|
|
|
|
for model_name in model_dedupe{
|
|
|
|
//.mdl, .vvd, .dx90.vtx
|
2024-03-08 17:55:17 +00:00
|
|
|
let mut path=PathBuf::from(model_name);
|
|
|
|
let file_name=PathBuf::from(path.file_stem().unwrap());
|
2024-01-09 06:15:46 +00:00
|
|
|
path.pop();
|
|
|
|
path.push(file_name);
|
|
|
|
let mut vvd_path=path.clone();
|
|
|
|
let mut vtx_path=path.clone();
|
|
|
|
vvd_path.set_extension("vvd");
|
|
|
|
vtx_path.set_extension("dx90.vtx");
|
|
|
|
match (bsp.pack.get(model_name),bsp.pack.get(vvd_path.as_os_str().to_str().unwrap()),bsp.pack.get(vtx_path.as_os_str().to_str().unwrap())){
|
|
|
|
(Ok(Some(mdl_file)),Ok(Some(vvd_file)),Ok(Some(vtx_file)))=>{
|
|
|
|
match (vmdl::mdl::Mdl::read(mdl_file.as_ref()),vmdl::vvd::Vvd::read(vvd_file.as_ref()),vmdl::vtx::Vtx::read(vtx_file.as_ref())){
|
|
|
|
(Ok(mdl),Ok(vvd),Ok(vtx))=>{
|
|
|
|
let model=vmdl::Model::from_parts(mdl,vtx,vvd);
|
|
|
|
for texture in model.textures(){
|
|
|
|
for search_path in &texture.search_paths{
|
2024-03-08 17:55:17 +00:00
|
|
|
let mut path=PathBuf::from(search_path.as_str());
|
2024-01-09 06:15:46 +00:00
|
|
|
path.push(texture.name.as_str());
|
|
|
|
deduplicate.insert(path);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
},
|
|
|
|
_=>println!("model_name={} error",model_name),
|
|
|
|
}
|
|
|
|
},
|
|
|
|
_=>println!("no model name={}",model_name),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let pack=&bsp.pack;
|
|
|
|
let vpk_list=&vpk_list;
|
|
|
|
std::thread::scope(move|s|{
|
|
|
|
let mut thread_handles=Vec::new();
|
|
|
|
for texture_name in deduplicate{
|
|
|
|
let mut found_texture=false;
|
|
|
|
//LMAO imagine having to write type names
|
|
|
|
let write_image=|mut stuff,write_file_name|{
|
|
|
|
let image=vtf::from_bytes(&mut stuff)?.highres_image.decode(0)?.to_rgba8();
|
|
|
|
|
|
|
|
let format=if image.width()%4!=0||image.height()%4!=0{
|
|
|
|
image_dds::ImageFormat::R8G8B8A8Srgb
|
|
|
|
}else{
|
|
|
|
image_dds::ImageFormat::BC7Srgb
|
|
|
|
};
|
|
|
|
//this fails if the image dimensions are not a multiple of 4
|
|
|
|
let dds = image_dds::dds_from_image(
|
|
|
|
&image,
|
|
|
|
format,
|
|
|
|
image_dds::Quality::Slow,
|
|
|
|
image_dds::Mipmaps::GeneratedAutomatic,
|
|
|
|
)?;
|
|
|
|
|
|
|
|
//write dds
|
2024-03-08 17:55:17 +00:00
|
|
|
let mut dest=PathBuf::from("textures/dds");
|
2024-01-09 06:15:46 +00:00
|
|
|
dest.push(write_file_name);
|
|
|
|
dest.set_extension("dds");
|
|
|
|
std::fs::create_dir_all(dest.parent().unwrap())?;
|
|
|
|
let mut writer = std::io::BufWriter::new(std::fs::File::create(dest)?);
|
|
|
|
dds.write(&mut writer)?;
|
|
|
|
Ok::<(),anyhow::Error>(())
|
|
|
|
};
|
|
|
|
let find_stuff=|search_file_name:String|{
|
|
|
|
println!("search_file_name={}",search_file_name);
|
|
|
|
match pack.get(search_file_name.as_str())?{
|
|
|
|
Some(file)=>return Ok(Some(file)),
|
|
|
|
_=>(),
|
|
|
|
}
|
|
|
|
//search pak list
|
|
|
|
for vpk_index in vpk_list{
|
|
|
|
if let Some(vpk_entry)=vpk_index.tree.get(search_file_name.as_str()){
|
|
|
|
return Ok(Some(match vpk_entry.get()?{
|
|
|
|
std::borrow::Cow::Borrowed(bytes)=>bytes.to_vec(),
|
|
|
|
std::borrow::Cow::Owned(bytes)=>bytes,
|
|
|
|
}));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Ok::<Option<Vec<u8>>,anyhow::Error>(None)
|
|
|
|
};
|
|
|
|
let loader=|texture_name:String|{
|
2024-03-08 17:55:17 +00:00
|
|
|
let mut texture_file_name=PathBuf::from("materials");
|
2024-01-09 06:15:46 +00:00
|
|
|
//lower case
|
|
|
|
let texture_file_name_lowercase=texture_name.to_lowercase();
|
|
|
|
texture_file_name.push(texture_file_name_lowercase.clone());
|
|
|
|
//remove stem and search for both vtf and vmt files
|
2024-03-08 17:55:17 +00:00
|
|
|
let stem=PathBuf::from(texture_file_name.file_stem().unwrap());
|
2024-01-09 06:15:46 +00:00
|
|
|
texture_file_name.pop();
|
|
|
|
texture_file_name.push(stem);
|
|
|
|
//somehow search for both files
|
|
|
|
let mut texture_file_name_vmt=texture_file_name.clone();
|
|
|
|
texture_file_name.set_extension("vtf");
|
|
|
|
texture_file_name_vmt.set_extension("vmt");
|
|
|
|
if let Some(stuff)=find_stuff(texture_file_name.to_string_lossy().to_string())?{
|
|
|
|
return Ok(Some(stuff))
|
|
|
|
}
|
|
|
|
recursive_vmt_loader(&find_stuff,get_vmt(&find_stuff,texture_file_name_vmt.to_string_lossy().to_string())?)
|
|
|
|
};
|
|
|
|
if let Some(stuff)=loader(texture_name.to_string_lossy().to_string())?{
|
|
|
|
found_texture=true;
|
|
|
|
let texture_name=texture_name.clone();
|
|
|
|
thread_handles.push(s.spawn(move||write_image(stuff,texture_name)));
|
|
|
|
}
|
|
|
|
if !found_texture{
|
|
|
|
println!("no data");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
for thread in thread_handles{
|
|
|
|
match thread.join(){
|
|
|
|
Ok(Err(e))=>println!("write error: {:?}",e),
|
|
|
|
Err(e)=>println!("thread error: {:?}",e),
|
|
|
|
Ok(_)=>(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Ok::<(),anyhow::Error>(())
|
|
|
|
})?
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2024-03-08 17:55:17 +00:00
|
|
|
fn vpk_contents(vpk_path:PathBuf)->AResult<()>{
|
2024-01-09 06:15:46 +00:00
|
|
|
let vpk_index=vpk::VPK::read(&vpk_path)?;
|
|
|
|
for (label,entry) in vpk_index.tree.into_iter(){
|
|
|
|
println!("vpk label={} entry={:?}",label,entry);
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2024-03-08 17:55:17 +00:00
|
|
|
fn bsp_contents(path:PathBuf)->AResult<()>{
|
2024-01-09 06:15:46 +00:00
|
|
|
let bsp=vbsp::Bsp::read(std::fs::read(path)?.as_ref())?;
|
|
|
|
for file_name in bsp.pack.into_zip().into_inner().unwrap().file_names(){
|
|
|
|
println!("file_name={:?}",file_name);
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|