Compare commits

..

113 Commits

Author SHA1 Message Date
165e6cfae8 Add strafesnet registry 2024-10-11 09:21:55 -07:00
41b28fa7d2 v1.5.7 update rbx_loader 2024-10-04 20:04:13 -07:00
a2ab23097b update rbx_loader 2024-10-04 20:03:11 -07:00
602061b44c v1.5.6 update rbx_loader 2024-10-03 20:35:54 -07:00
1989369956 update rbx_loader 2024-10-03 20:33:59 -07:00
a18aea828c v1.5.5 update deps 2024-10-01 17:24:43 -07:00
b7000ee9af update deps 2024-10-01 17:20:43 -07:00
2b77ea5712 v1.5.4 update to asset-tool 0.4.X + improve asset id parsing 2024-10-01 13:15:48 -07:00
cf98f8e7bb use rbxassetid parser from deferred_loader 2024-10-01 13:02:54 -07:00
a56c114d08
Automatically create sub-directories when downloading assets; move dds files into the textures folder
the latter change is because roblox-to-snf expects the files in textures, not textures/dds
2024-09-25 23:48:59 +01:00
b6a5324ae7
Fix asset-tool invocations when downloading assets 2024-09-25 23:43:54 +01:00
6f5a3c5176 v1.5.3 roblox emulator 2024-09-21 13:48:23 -07:00
6bab31f3b3 silence dead code 2024-09-21 13:47:40 -07:00
9cdeed160f update rbx_loader & run scripts 2024-09-21 13:45:32 -07:00
d0c59b51a4 v1.5.2 2024-07-31 11:51:38 -07:00
451f3ccecb source to snf 2024-07-31 11:51:18 -07:00
ed9701981d limit parallel threads by waiting for the first thread to complete 2024-07-30 12:24:23 -07:00
60e0197344 v1.5.1 2024-07-29 16:48:02 -07:00
4d97a490c1 convert snf 2024-07-29 16:48:02 -07:00
52ba44c6be named args 2024-04-19 00:44:05 -07:00
95b6272b18 more texture sources + use asset tool to download meshes & textures 2024-04-19 00:44:05 -07:00
0172675b04 v1.5.0 rewrite clap usage + remove mapfixer stuff 2024-03-08 10:43:36 -08:00
982b4aecac rewrite clap usage 2024-03-08 10:43:36 -08:00
c1ddcdb0c5 remove mapfixer + asset-tool functions 2024-03-08 10:43:36 -08:00
c2d0a4487c misc edits 2024-03-08 10:01:54 -08:00
dc9fd2c442 import PathBuf 2024-03-08 09:55:17 -08:00
4199d41d3f timeless License 2024-01-30 18:38:47 -08:00
7fbcb206ff probably was wrong but idc about testing it 2024-01-30 16:39:57 -08:00
a17901d473 v1.4.0 valve maps 2024-01-12 11:34:09 -08:00
b88c6b899a commands for valve maps 2024-01-12 11:32:12 -08:00
835d4bbecd add valve map deps 2024-01-12 11:32:12 -08:00
b756dc979c move main to top 2024-01-12 11:32:12 -08:00
1e888ebb01 tabs 2024-01-09 01:36:14 -08:00
b9dccb1af5 update deps 2023-11-18 16:13:24 -08:00
c6d293cc6b write "Ref" attribute based on internal roblox part id 2023-11-18 15:53:37 -08:00
a386f90f51 switch to lazy_regex 2023-11-18 15:47:54 -08:00
43115cbac6 mesh downloader 2023-11-09 15:51:23 -08:00
35b5aff9a7 stop being cringe 2023-11-09 15:51:11 -08:00
36419af870 update deps 2023-10-27 18:10:11 -07:00
a7518bef46 print full path 2023-10-27 18:10:05 -07:00
6df1f41599 not a bug 2023-10-19 03:00:30 -07:00
422d0a160d print special message for roblox xml 2023-10-19 03:00:30 -07:00
1727f9213c it's actually jfif 2023-10-19 03:00:30 -07:00
afa9e7447d print fourcc on unrecognized image format 2023-10-19 03:00:30 -07:00
ff85efa54f add unzip-all command 2023-10-19 03:00:30 -07:00
fa69c53cfc JPEG support 2023-10-19 03:00:30 -07:00
a57c228580 styling 2023-10-19 03:00:30 -07:00
5dc69db885 extremely fancy code to transparently handle gzip 2023-10-19 03:00:30 -07:00
e54400a436 alphabetize for no reason 2023-10-19 03:00:30 -07:00
e2a5edf8df Fill in placeholder values
Nobody told me I was chilling with placeholder values in my license
2023-10-02 03:46:17 -04:00
d6dd1b8abd drop Jeftai Error, convert errors to anyhow 2023-10-01 20:08:43 -07:00
a2b793fcd3 add anyhow dep 2023-10-01 20:07:11 -07:00
9cb34f14c8 fixme 2023-09-30 03:38:34 -07:00
bd2e3aa2d3 v1.3.0 srgb textures + non mod 4 images export to uncompressed + rbxmx support 2023-09-30 03:15:57 -07:00
07f6053839 multiplex roblox format 2023-09-30 03:14:06 -07:00
0d5b918ea1 add rbx_xml dep 2023-09-30 03:14:05 -07:00
20a568220a use plain format if image is not the right shape 2023-09-29 18:30:01 -07:00
d670d4129e use srgb 2023-09-29 13:27:23 -07:00
de7b0bd5cc stop failing when maps are corrupted while downloading textures 2023-09-29 03:14:29 -07:00
01524146c7 fixme 2023-09-29 02:58:49 -07:00
45e8e415d0 v1.2.0 convert-textures
gzip no workie yet
some pngs upset the decoder lib
2023-09-29 02:58:49 -07:00
4417bafc5c naive multithreaded convert downloaded textures to dds 2023-09-29 02:58:32 -07:00
8553625738 change texture download path 2023-09-28 22:05:25 -07:00
3a3749eaeb add deps image, gzip (flate2), image_dds 2023-09-28 22:05:00 -07:00
53539f290b print map file 2023-09-23 13:07:52 -07:00
479dd37f53 v1.1.0 download-textures 2023-09-22 14:27:06 -07:00
34b6a869f0 add download textures command 2023-09-22 14:24:28 -07:00
19a455ee5e print wget exit status 2023-09-22 14:23:29 -07:00
9904b7a044 switch extract to pathbuf 2023-09-22 14:22:44 -07:00
6efa811eb6 generalize recursive_collect 2023-09-22 14:21:22 -07:00
81e4a201bd add delete to interactive upload 2023-09-15 17:10:33 -07:00
8fd5618af2 tweak prompt 2023-09-14 17:05:22 -07:00
54c26d6e1e ARE YOU STUPID 2023-09-14 16:23:53 -07:00
110ec94a08 v1.0.0 map tool implements some subset of operations at every step the map verification process from start to finish 2023-09-14 16:23:53 -07:00
980da5a6a7 write approximate code for uploading new asset 2023-09-14 16:22:15 -07:00
1cd77984d4 wait for downloads before exiting 2023-09-14 14:32:25 -07:00
b0fe231388 Merge pull request 'stop macro from returning' (#2) from Jeftaei/map-tool:master into master
Reviewed-on: StrafesNET/map-tool#2
2023-09-14 17:19:01 -04:00
5a4a39ab75 add upload action + wait for status code 2023-09-14 14:17:00 -07:00
Jeft
1b2324deeb stop macro from returning 2023-09-14 16:15:39 -05:00
4c485e76e4 implement upload 2023-09-14 13:54:08 -07:00
7bbb9ca24f have to write the model ref to make a valid model file 2023-09-14 13:53:52 -07:00
eff55af1b4 models are rbxm 2023-09-14 12:06:02 -07:00
0d05cc9996 comment on laziness 2023-09-13 21:22:28 -07:00
2a55ef90df don't replace std Result 2023-09-13 21:04:33 -07:00
1a6202ae66 Merge pull request 'Nice looking errors' (#1) from Jeftaei/map-tool:master into master
Reviewed-on: StrafesNET/map-tool#1
2023-09-13 16:51:24 -04:00
Jeft
742f7b4ec0 we Love errors 2023-09-13 09:13:23 -05:00
2cb346f49a re-prompt action 2023-09-13 00:28:59 -07:00
e5cca9ed04 common location 2023-09-12 19:29:51 -07:00
52d911a25a implement Exit/Delete 2023-09-12 19:29:51 -07:00
7ab20f36a7 fix parse 2023-09-12 19:05:01 -07:00
a7554da1c5 flush prompt 2023-09-12 19:05:01 -07:00
37f0dad7a1 ref cannot refer to object in another dom 2023-09-12 19:05:01 -07:00
e309f15cb8 implement extract 2023-09-12 19:04:46 -07:00
29374e4ff5 write edited file and report how many scripts were replaced 2023-09-12 19:04:46 -07:00
b7d04d1f40 this is wrong 2023-09-12 18:17:04 -07:00
432ec11ea6 fix write_dom 2023-09-12 18:17:04 -07:00
01449b1850 fix int parsing 2023-09-12 18:17:04 -07:00
327d0a4992 print map script stats on completion 2023-09-12 18:17:04 -07:00
420dbaa022 implement blocked in interactive 2023-09-12 18:17:04 -07:00
cad29af4bb panic lol 2023-09-12 18:17:04 -07:00
e0e8744bfd interactive mode v1 2023-09-12 18:17:04 -07:00
b434dce0f6 helper funcs 2023-09-12 18:17:04 -07:00
6ef8fd2f69 get_full_name 2023-09-12 18:17:04 -07:00
7234065bd8 fail better 2023-09-12 18:17:04 -07:00
41d8e700c5 Commands::Interactive 2023-09-12 18:17:04 -07:00
4ca3d56f0f upload comments 2023-09-12 18:17:04 -07:00
593b6902fd Scan::Flagged 2023-09-12 18:17:04 -07:00
7523c4313a scan enum, Source property missing if fatal 2023-09-12 18:17:04 -07:00
694440bd29 use wget to download maps 2023-09-12 18:17:04 -07:00
755e1d4d5b optimize release binary size 2023-09-12 18:17:04 -07:00
4334a6f330 v2 with clap 2023-09-12 18:17:04 -07:00
553ad2cca5 add clap dep 2023-09-12 18:17:03 -07:00
3f15d2f5a8 delete recursive function 2023-09-12 18:16:45 -07:00
10 changed files with 3012 additions and 13628 deletions

2
.cargo/config.toml Normal file
View File

@ -0,0 +1,2 @@
[registries.strafesnet]
index = "sparse+https://git.itzana.me/api/packages/strafesnet/cargo/"

2280
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,11 +1,32 @@
[package]
name = "map-tool"
version = "0.1.0"
version = "1.5.7"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
rbx_binary = "0.7.1"
rbx_dom_weak = "2.5.0"
rbx_reflection_database = "0.2.7"
anyhow = "1.0.75"
clap = { version = "4.4.2", features = ["derive"] }
flate2 = "1.0.27"
image = "0.25.2"
image_dds = "0.6.0"
lazy-regex = "3.1.0"
rbx_binary = { version = "0.7.4", registry = "strafesnet" }
rbx_dom_weak = { version = "2.7.0", registry = "strafesnet" }
rbx_reflection_database = { version = "0.2.10", registry = "strafesnet" }
rbx_xml = { version = "0.13.3", registry = "strafesnet" }
strafesnet_bsp_loader = { version = "0.2.1", registry = "strafesnet" }
strafesnet_deferred_loader = { version = "0.4.0", features = ["legacy"], registry = "strafesnet" }
strafesnet_rbx_loader = { version = "0.5.1", registry = "strafesnet" }
strafesnet_snf = { version = "0.2.0", registry = "strafesnet" }
vbsp = "0.6.0"
vmdl = "0.2.0"
vmt-parser = "0.2.0"
vpk = "0.2.0"
vtf = "0.3.0"
#[profile.release]
#lto = true
#strip = true
#codegen-units = 1

28
LICENSE
View File

@ -1,9 +1,23 @@
MIT License
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
Copyright (c) <year> <copyright holders>
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

Binary file not shown.

View File

@ -1,126 +0,0 @@
--local Model=game:GetService'InsertService':LoadAsset(1079831188):GetChildren()[1] Model:SetModelCFrame(CFrame.new(0,Model:GetModelSize().y/2,0))
--[[ Load ID list
local ids={5692157375}
local ServerStorage=game:GetService'ServerStorage'
local function load(id)
local Model=game:GetObjects("rbxassetid://"..id)[1]
Model.Parent=workspace
Model:MoveTo(Vector3.new(0,Model:GetExtentsSize().y/2,0))
wait()
Model.Parent=ServerStorage
end
for i=1,#ids do
local succ,err=ypcall(load,ids[i])
if not succ then
print(ids[i],"error",err)
end
end
--]]
--[[ Format map names
local c=game:GetService'ServerStorage':GetChildren()
for i=1,#c do
local le_name=c[i].Name:gsub("%s+","_"):lower()
c[i].Name=le_name
local DisplayName=c[i]:FindFirstChild("DisplayName",true)
if DisplayName and DisplayName.ClassName=="StringValue" then
local dn=DisplayName.Value
local ndn={}
for w in dn:gmatch'%S+' do
ndn[#ndn+1]=w:sub(1,1):upper()..w:sub(2)
end
if table.concat(ndn," ")~=dn then
print("Fix name:",le_name)
end
end
end
--]]
--[[ Duplicate script labeler
local IsA=game.IsA
local ID=0
local SourceHash={}
local SourceHashCount={}
local NameHash={}
local IDHash={}
local c=game:GetService'ServerStorage':GetDescendants()
for i=1,#c do
local s=c[i]
if IsA(s,"LuaSourceContainer") then
local src=s.Source
NameHash[s]=s.Name
local id=SourceHash[src]
if id then
s.Name="copy "..id
SourceHashCount[id]=SourceHashCount[id]+1
else
ID=ID+1
IDHash[ID]=s
SourceHash[src]=ID
SourceHashCount[ID]=1
if src:find'getfenv' or src:find'require' then
s.Name="flagged "..ID
else
s.Name="unique "..ID
end
end
end
end
for i=1,ID do
local s=IDHash[i]
local hc=SourceHashCount[i]
s.Name=s.Name..(hc==1 and " (1 copy)" or " ("..hc.." copies)")
end
_G.NameHash=NameHash
--]]
--[[ Undo labeler
local NameHash=_G.NameHash
for s,n in next,NameHash do
s.Name=n
end
--]]
local IsA=workspace.IsA
local GetChildren=workspace.GetChildren
local function rsearch(search,cond1,cond2)
local found={}
for _,thing in next,GetChildren(search) do
if not cond1 or cond1(thing) then
found[#found+1]=thing
end
if not cond2 or cond2(thing) then
local nfound=#found
local r=rsearch(thing,cond1,cond2)
for i=1,#r do
found[nfound+i]=r[i]
end
end
end
return found
end
local function cond1(thing)
return IsA(thing,"LuaSourceContainer")
end
local Maps=GetChildren(game:GetService'ServerStorage')
for i=1,#Maps do
local Map=Maps[i]
if Map.ClassName=="Model" then
local Scripts=rsearch(Map,cond1)
if #Scripts>0 then
local ScriptHolder=Instance.new("Model",workspace)
ScriptHolder.Name=Map.Name.."("..#Scripts..")"
for i=1,#Scripts do
local sc=Scripts[i]
local scd
if sc.ClassName~="ModuleScript" then
sc.Disabled=true
end
local s=sc:Clone()
s.Name=sc:GetFullName()
s.Parent=ScriptHolder
if sc.ClassName~="ModuleScript" then
sc.Disabled=scd
end
end
end
end
end

Binary file not shown.

File diff suppressed because it is too large Load Diff

View File

@ -1,126 +0,0 @@
--local Model=game:GetService'InsertService':LoadAsset(1079831188):GetChildren()[1] Model:SetModelCFrame(CFrame.new(0,Model:GetModelSize().y/2,0))
--[[ Load ID list
local ids={5692157375}
local ServerStorage=game:GetService'ServerStorage'
local function load(id)
local Model=game:GetObjects("rbxassetid://"..id)[1]
Model.Parent=workspace
Model:MoveTo(Vector3.new(0,Model:GetExtentsSize().y/2,0))
wait()
Model.Parent=ServerStorage
end
for i=1,#ids do
local succ,err=ypcall(load,ids[i])
if not succ then
print(ids[i],"error",err)
end
end
--]]
--[[ Format map names
local c=game:GetService'ServerStorage':GetChildren()
for i=1,#c do
local le_name=c[i].Name:gsub("%s+","_"):lower()
c[i].Name=le_name
local DisplayName=c[i]:FindFirstChild("DisplayName",true)
if DisplayName and DisplayName.ClassName=="StringValue" then
local dn=DisplayName.Value
local ndn={}
for w in dn:gmatch'%S+' do
ndn[#ndn+1]=w:sub(1,1):upper()..w:sub(2)
end
if table.concat(ndn," ")~=dn then
print("Fix name:",le_name)
end
end
end
--]]
--[[ Duplicate script labeler
local IsA=game.IsA
local ID=0
local SourceHash={}
local SourceHashCount={}
local NameHash={}
local IDHash={}
local c=game:GetService'ServerStorage':GetDescendants()
for i=1,#c do
local s=c[i]
if IsA(s,"LuaSourceContainer") then
local src=s.Source
NameHash[s]=s.Name
local id=SourceHash[src]
if id then
s.Name="copy "..id
SourceHashCount[id]=SourceHashCount[id]+1
else
ID=ID+1
IDHash[ID]=s
SourceHash[src]=ID
SourceHashCount[ID]=1
if src:find'getfenv' or src:find'require' then
s.Name="flagged "..ID
else
s.Name="unique "..ID
end
end
end
end
for i=1,ID do
local s=IDHash[i]
local hc=SourceHashCount[i]
s.Name=s.Name..(hc==1 and " (1 copy)" or " ("..hc.." copies)")
end
_G.NameHash=NameHash
--]]
--[[ Undo labeler
local NameHash=_G.NameHash
for s,n in next,NameHash do
s.Name=n
end
--]]
local IsA=workspace.IsA
local GetChildren=workspace.GetChildren
local function rsearch(search,cond1,cond2)
local found={}
for _,thing in next,GetChildren(search) do
if not cond1 or cond1(thing) then
found[#found+1]=thing
end
if not cond2 or cond2(thing) then
local nfound=#found
local r=rsearch(thing,cond1,cond2)
for i=1,#r do
found[nfound+i]=r[i]
end
end
end
return found
end
local function cond1(thing)
return IsA(thing,"LuaSourceContainer")
end
local Maps=GetChildren(game:GetService'ServerStorage')
for i=1,#Maps do
local Map=Maps[i]
if Map.ClassName=="Model" then
local Scripts=rsearch(Map,cond1)
if #Scripts>0 then
local ScriptHolder=Instance.new("Model",workspace)
ScriptHolder.Name=Map.Name.."("..#Scripts..")"
for i=1,#Scripts do
local sc=Scripts[i]
local scd
if sc.ClassName~="ModuleScript" then
sc.Disabled=true
end
local s=sc:Clone()
s.Name=sc:GetFullName()
s.Parent=ScriptHolder
if sc.ClassName~="ModuleScript" then
sc.Disabled=scd
end
end
end
end
end

View File

@ -1,73 +1,806 @@
fn class_is_a(class: &str, superclass: &str) -> bool {
if class==superclass {
return true
}
let class_descriptor=rbx_reflection_database::get().classes.get(class);
if let Some(descriptor) = &class_descriptor {
if let Some(class_super) = &descriptor.superclass {
return class_is_a(&class_super, superclass)
}
}
return false
use std::{collections::HashSet,io::{Read,Seek},path::PathBuf};
use clap::{Args,Parser,Subcommand};
use anyhow::Result as AResult;
use rbx_dom_weak::Instance;
use strafesnet_deferred_loader::rbxassetid::RobloxAssetId;
#[derive(Parser)]
#[command(author, version, about, long_about = None)]
#[command(propagate_version = true)]
struct Cli {
#[command(subcommand)]
command: Commands,
}
fn recursive_collect_scripts(scripts: &mut std::vec::Vec<rbx_dom_weak::types::Ref>,dom: &rbx_dom_weak::WeakDom, instance: &rbx_dom_weak::Instance){
#[derive(Subcommand)]
enum Commands {
RobloxToSNF(RobloxToSNFSubcommand),
SourceToSNF(SourceToSNFSubcommand),
DownloadTextures(DownloadTexturesSubcommand),
ExtractTextures(ExtractTexturesSubcommand),
ConvertTextures(ConvertTexturesSubcommand),
VPKContents(VPKContentsSubcommand),
BSPContents(BSPContentsSubcommand),
DownloadMeshes(DownloadMeshesSubcommand),
WriteAttributes(WriteAttributesSubcommand),
}
#[derive(Args)]
struct RobloxToSNFSubcommand {
#[arg(long)]
output_folder:PathBuf,
#[arg(required=true)]
input_files:Vec<PathBuf>,
}
#[derive(Args)]
struct SourceToSNFSubcommand {
#[arg(long)]
output_folder:PathBuf,
#[arg(required=true)]
input_files:Vec<PathBuf>,
}
#[derive(Args)]
struct DownloadTexturesSubcommand {
#[arg(long,required=true)]
roblox_files:Vec<PathBuf>
}
#[derive(Args)]
struct ExtractTexturesSubcommand {
#[arg(long)]
bsp_file:PathBuf,
#[arg(long)]
vpk_dir_files:Vec<PathBuf>
}
#[derive(Args)]
struct ConvertTexturesSubcommand {
}
#[derive(Args)]
struct VPKContentsSubcommand {
#[arg(long)]
input_file:PathBuf,
}
#[derive(Args)]
struct BSPContentsSubcommand {
#[arg(long)]
input_file:PathBuf,
}
#[derive(Args)]
struct DownloadMeshesSubcommand {
#[arg(long,required=true)]
roblox_files:Vec<PathBuf>
}
#[derive(Args)]
struct WriteAttributesSubcommand {
}
fn main() -> AResult<()> {
let cli = Cli::parse();
match cli.command {
Commands::RobloxToSNF(subcommand)=>roblox_to_snf(subcommand.input_files,subcommand.output_folder),
Commands::SourceToSNF(subcommand)=>source_to_snf(subcommand.input_files,subcommand.output_folder),
Commands::DownloadTextures(subcommand)=>download_textures(subcommand.roblox_files),
Commands::ExtractTextures(subcommand)=>extract_textures(vec![subcommand.bsp_file],subcommand.vpk_dir_files),
Commands::VPKContents(subcommand)=>vpk_contents(subcommand.input_file),
Commands::BSPContents(subcommand)=>bsp_contents(subcommand.input_file),
Commands::ConvertTextures(_subcommand)=>convert_textures(),
Commands::DownloadMeshes(subcommand)=>download_meshes(subcommand.roblox_files),
Commands::WriteAttributes(_subcommand)=>write_attributes(),
}
}
fn recursive_collect_regex(objects: &mut std::vec::Vec<rbx_dom_weak::types::Ref>,dom: &rbx_dom_weak::WeakDom, instance: &rbx_dom_weak::Instance, regex: &lazy_regex::Lazy<lazy_regex::Regex>){
for &referent in instance.children() {
if let Some(c) = dom.get_by_ref(referent) {
if class_is_a(c.class.as_str(), "LuaSourceContainer") {
scripts.push(c.referent());//copy ref
if regex.captures(c.name.as_str()).is_some(){
objects.push(c.referent());//copy ref
}
recursive_collect_scripts(scripts,dom,c);
recursive_collect_regex(objects,dom,c,regex);
}
}
}
fn main() -> Result<(), Box<dyn std::error::Error>> {
// Using buffered I/O is recommended with rbx_binary
let input = std::io::BufReader::new(std::fs::File::open("map.rbxm")?);
let dom = rbx_binary::from_reader(input)?;
//Construct allowed scripts
let mut allowed = std::collections::HashSet::<String>::new();
for entry in std::fs::read_dir("allowed")? {
allowed.insert(std::fs::read_to_string(entry?.path())?);
fn get_button_refs(dom:&rbx_dom_weak::WeakDom) -> Vec<rbx_dom_weak::types::Ref>{
let mut buttons = std::vec::Vec::new();
recursive_collect_regex(&mut buttons, dom, dom.root(),lazy_regex::regex!(r"Button(\d+)$"));
buttons
}
let mut scripts = std::vec::Vec::<rbx_dom_weak::types::Ref>::new();
recursive_collect_scripts(&mut scripts, &dom, dom.root());
enum ReaderType<'a, R:Read+Seek>{
GZip(flate2::read::GzDecoder<&'a mut R>),
Raw(&'a mut R),
}
//check scribb
let mut any_failed=false;
for (i,&referent) in scripts.iter().enumerate() {
if let Some(script) = dom.get_by_ref(referent) {
if let Some(rbx_dom_weak::types::Variant::String(s)) = script.properties.get("Source") {
if allowed.contains(s) {
println!("pass");
}else{
println!("fail");
any_failed=true;
std::fs::write(format!("blocked/{}.lua",i),s)?;
fn maybe_gzip_decode<R:Read+Seek>(input:&mut R)->AResult<ReaderType<R>>{
let mut first_2=[0u8;2];
if let (Ok(()),Ok(()))=(std::io::Read::read_exact(input, &mut first_2),std::io::Seek::rewind(input)){
match &first_2{
b"\x1f\x8b"=>Ok(ReaderType::GZip(flate2::read::GzDecoder::new(input))),
_=>Ok(ReaderType::Raw(input)),
}
}else{
println!("failed to get source");
any_failed=true;
Err(anyhow::Error::msg("failed to peek"))
}
}
fn load_dom<R:Read+Seek>(input:&mut R)->AResult<rbx_dom_weak::WeakDom>{
let mut first_8=[0u8;8];
if let (Ok(()),Ok(()))=(std::io::Read::read_exact(input, &mut first_8),std::io::Seek::rewind(input)){
match &first_8[0..4]{
b"<rob"=>{
match &first_8[4..8]{
b"lox!"=>rbx_binary::from_reader(input).map_err(anyhow::Error::msg),
b"lox "=>rbx_xml::from_reader(input,rbx_xml::DecodeOptions::default()).map_err(anyhow::Error::msg),
other=>Err(anyhow::Error::msg(format!("Unknown Roblox file type {:?}",other))),
}
},
_=>Err(anyhow::Error::msg("unsupported file type")),
}
}else{
println!("failed to deref script");
any_failed=true;
Err(anyhow::Error::msg("peek failed"))
}
}
if any_failed {
println!("One or more scripts are not allowed.");
return Ok(())//everything is not ok but idk how to return an error LMAO
fn get_dom<R:Read+Seek>(input:&mut R)->AResult<rbx_dom_weak::WeakDom>{
match maybe_gzip_decode(input){
Ok(ReaderType::GZip(mut readable)) => {
//gzip
let mut extracted:Vec<u8>=Vec::new();
readable.read_to_end(&mut extracted)?;
Ok(load_dom(&mut std::io::Cursor::new(extracted))?)
},
Ok(ReaderType::Raw(readable)) => Ok(load_dom(readable)?),
Err(e) => Err(e)?,
}
println!("All scripts passed!");
// std::process::Command::new("rbxcompiler")
// .arg("--compile=false")
// .arg("--group=6980477")
// .arg("--asset=5692139100")
// .arg("--input=map.rbxm")
// .spawn()?;
}
/* The ones I'm interested in:
Beam.Texture
Decal.Texture
FileMesh.MeshId
FileMesh.TextureId
MaterialVariant.ColorMap
MaterialVariant.MetalnessMap
MaterialVariant.NormalMap
MaterialVariant.RoughnessMap
MeshPart.MeshId
MeshPart.TextureID
ParticleEmitter.Texture
Sky.MoonTextureId
Sky.SkyboxBk
Sky.SkyboxDn
Sky.SkyboxFt
Sky.SkyboxLf
Sky.SkyboxRt
Sky.SkyboxUp
Sky.SunTextureId
SurfaceAppearance.ColorMap
SurfaceAppearance.MetalnessMap
SurfaceAppearance.NormalMap
SurfaceAppearance.RoughnessMap
SurfaceAppearance.TexturePack
*/
fn accumulate_content_id(content_list:&mut HashSet<u64>,object:&Instance,property:&str){
if let Some(rbx_dom_weak::types::Variant::Content(content))=object.properties.get(property){
if let Ok(asset_id)=AsRef::<str>::as_ref(content).parse::<RobloxAssetId>(){
content_list.insert(asset_id.0);
}else{
println!("Content failed to parse into AssetID: {:?}",content);
}
}else{
println!("property={} does not exist for class={}",object.class.as_str(),property);
}
}
fn download_textures(paths:Vec<PathBuf>)->AResult<()>{
println!("Reading files, this could take a hot minute...");
let mut texture_list=HashSet::new();
for path in paths{
let file=match std::fs::File::open(path.as_path()){
Ok(file)=>file,
Err(e)=>{
println!("file error {e}");
continue;
}
};
let mut input=std::io::BufReader::new(file);
match get_dom(&mut input){
Ok(dom)=>{
for object in dom.into_raw().1.into_values(){
match object.class.as_str(){
"Beam"=>accumulate_content_id(&mut texture_list,&object,"Texture"),
"Decal"=>accumulate_content_id(&mut texture_list,&object,"Texture"),
"Texture"=>accumulate_content_id(&mut texture_list,&object,"Texture"),
"FileMesh"=>accumulate_content_id(&mut texture_list,&object,"TextureId"),
"MeshPart"=>accumulate_content_id(&mut texture_list,&object,"TextureID"),
"ParticleEmitter"=>accumulate_content_id(&mut texture_list,&object,"Texture"),
"Sky"=>{
accumulate_content_id(&mut texture_list,&object,"MoonTextureId");
accumulate_content_id(&mut texture_list,&object,"SkyboxBk");
accumulate_content_id(&mut texture_list,&object,"SkyboxDn");
accumulate_content_id(&mut texture_list,&object,"SkyboxFt");
accumulate_content_id(&mut texture_list,&object,"SkyboxLf");
accumulate_content_id(&mut texture_list,&object,"SkyboxRt");
accumulate_content_id(&mut texture_list,&object,"SkyboxUp");
accumulate_content_id(&mut texture_list,&object,"SunTextureId");
},
_=>(),
}
}
},
Err(e)=>println!("error loading map {:?}: {:?}",path.file_name(),e),
}
}
let texture_list_string=texture_list.into_iter().map(|id|id.to_string()).collect::<Vec<String>>();
println!("Texture list:{:?}",texture_list_string.join(" "));
std::fs::create_dir_all("textures/unprocessed")?;
let output=std::process::Command::new("asset-tool")
.args(["download","--cookie-literal","","--output-folder","textures/unprocessed/"])
.args(texture_list_string)
.spawn()?
.wait_with_output()?;
println!("Asset tool exit_success:{}",output.status.success());
Ok(())
}
fn download_meshes(paths:Vec<PathBuf>)->AResult<()>{
println!("Reading files, this could take a hot minute...");
let mut mesh_list=HashSet::new();
for path in paths{
let file=match std::fs::File::open(path.as_path()){
Ok(file)=>file,
Err(e)=>{
println!("file error {e}");
continue;
}
};
let mut input=std::io::BufReader::new(file);
match get_dom(&mut input){
Ok(dom)=>{
for object in dom.into_raw().1.into_values(){
match object.class.as_str(){
"MeshPart"=>accumulate_content_id(&mut mesh_list,&object,"MeshId"),
"SpecialMesh"=>accumulate_content_id(&mut mesh_list,&object,"MeshId"),
_=>(),
}
}
},
Err(e)=>println!("error loading map {:?}: {:?}",path.file_name(),e),
}
}
let mesh_list_string=mesh_list.into_iter().map(|id|id.to_string()).collect::<Vec<String>>();
println!("Mesh list:{:?}",mesh_list_string.join(" "));
std::fs::create_dir_all("meshes/")?;
let output=std::process::Command::new("asset-tool")
.args(["download","--cookie-literal","","--output-folder","meshes/"])
.args(mesh_list_string)
.spawn()?
.wait_with_output()?;
println!("Asset tool exit_success:{}",output.status.success());
Ok(())
}
fn load_image<R:Read+Seek+std::io::BufRead>(input:&mut R)->AResult<image::DynamicImage>{
let mut fourcc=[0u8;4];
input.read_exact(&mut fourcc)?;
input.rewind()?;
match &fourcc{
b"\x89PNG"=>Ok(image::load(input,image::ImageFormat::Png)?),
b"\xFF\xD8\xFF\xE0"=>Ok(image::load(input,image::ImageFormat::Jpeg)?),//JFIF
b"<rob"=>Err(anyhow::Error::msg("Roblox xml garbage is not supported yet")),
other=>Err(anyhow::Error::msg(format!("Unknown texture format {:?}",other))),
}
}
fn convert(file_thing:std::fs::DirEntry) -> AResult<()>{
let mut input = std::io::BufReader::new(std::fs::File::open(file_thing.path())?);
let mut extracted_input=None;
let image=match maybe_gzip_decode(&mut input){
Ok(ReaderType::GZip(mut readable)) => {
//gzip
let mut extracted:Vec<u8>=Vec::new();
//read the entire thing to the end so that I can clone the data and write a png to processed images
readable.read_to_end(&mut extracted)?;
extracted_input=Some(extracted.clone());
load_image(&mut std::io::Cursor::new(extracted))
},
Ok(ReaderType::Raw(readable)) => load_image(readable),
Err(e) => Err(e)?,
}?.to_rgba8();//this sets a=255, arcane is actually supposed to look like that
let format=if image.width()%4!=0||image.height()%4!=0{
image_dds::ImageFormat::Rgba8UnormSrgb
}else{
image_dds::ImageFormat::BC7RgbaUnormSrgb
};
//this fails if the image dimensions are not a multiple of 4
let dds = image_dds::dds_from_image(
&image,
format,
image_dds::Quality::Slow,
image_dds::Mipmaps::GeneratedAutomatic,
)?;
//write dds
let mut dest=PathBuf::from("textures");
dest.push(file_thing.file_name());
dest.set_extension("dds");
let mut writer = std::io::BufWriter::new(std::fs::File::create(dest)?);
dds.write(&mut writer)?;
if let Some(mut extracted)=extracted_input{
//write extracted to processed
let mut dest=PathBuf::from("textures/processed");
dest.push(file_thing.file_name());
std::fs::write(dest, &mut extracted)?;
//delete ugly gzip file
std::fs::remove_file(file_thing.path())?;
}else{
//move file to processed
let mut dest=PathBuf::from("textures/processed");
dest.push(file_thing.file_name());
std::fs::rename(file_thing.path(), dest)?;
}
Ok(())
}
fn convert_textures() -> AResult<()>{
std::fs::create_dir_all("textures/unprocessed")?;
std::fs::create_dir_all("textures/processed")?;
let start = std::time::Instant::now();
let mut threads=Vec::new();
for entry in std::fs::read_dir("textures/unprocessed")? {
let file_thing=entry?;
threads.push(std::thread::spawn(move ||{
let file_name=format!("{:?}",file_thing);
let result=convert(file_thing);
if let Err(e)=result{
println!("error processing file:{:?} error message:{:?}",file_name,e);
}
}));
}
let mut i=0;
let n_threads=threads.len();
for thread in threads{
i+=1;
if let Err(e)=thread.join(){
println!("thread error: {:?}",e);
}else{
println!("{}/{}",i,n_threads);
}
}
println!("{:?}", start.elapsed());
Ok(())
}
fn write_attributes() -> AResult<()>{
for entry in std::fs::read_dir("maps/unprocessed")? {
let file_thing=entry?;
println!("processing map={:?}",file_thing.file_name());
let mut input = std::io::BufReader::new(std::fs::File::open(file_thing.path())?);
let mut dom = get_dom(&mut input)?;
let button_refs = get_button_refs(&dom);
for &button_ref in &button_refs {
if let Some(button)=dom.get_by_ref_mut(button_ref){
match button.properties.get_mut("Attributes"){
Some(rbx_dom_weak::types::Variant::Attributes(attributes))=>{
println!("Appending Ref={} to existing attributes for {}",button_ref,button.name);
attributes.insert("Ref".to_string(),rbx_dom_weak::types::Variant::String(button_ref.to_string()));
},
None=>{
println!("Creating new attributes with Ref={} for {}",button_ref,button.name);
let mut attributes=rbx_dom_weak::types::Attributes::new();
attributes.insert("Ref".to_string(),rbx_dom_weak::types::Variant::String(button_ref.to_string()));
button.properties.insert("Attributes".to_string(),rbx_dom_weak::types::Variant::Attributes(attributes));
}
_=>unreachable!("Fetching attributes did not return attributes."),
}
}
}
let mut dest={
let mut dest=PathBuf::from("maps/attributes");
dest.push(file_thing.file_name());
let output = std::io::BufWriter::new(std::fs::File::create(dest)?);
//write workspace:GetChildren()[1]
let workspace_children=dom.root().children();
if workspace_children.len()!=1{
return Err(anyhow::Error::msg("there can only be one model"));
}
rbx_binary::to_writer(output, &dom, &[workspace_children[0]])?;
//move original to processed folder
PathBuf::from("maps/unaltered")
};
dest.push(file_thing.file_name());
std::fs::rename(file_thing.path(), dest)?;
}
Ok(())
}
enum VMTContent{
VMT(String),
VTF(String),
Patch(vmt_parser::material::PatchMaterial),
Unsupported,//don't want to deal with whatever vmt variant
Unresolved,//could not locate a texture because of vmt content
}
impl VMTContent{
fn vtf(opt:Option<String>)->Self{
match opt{
Some(s)=>Self::VTF(s),
None=>Self::Unresolved,
}
}
}
fn get_some_texture(material:vmt_parser::material::Material)->AResult<VMTContent>{
//just grab some texture from somewhere for now
Ok(match material{
vmt_parser::material::Material::LightMappedGeneric(mat)=>VMTContent::vtf(Some(mat.base_texture)),
vmt_parser::material::Material::VertexLitGeneric(mat)=>VMTContent::vtf(mat.base_texture.or(mat.decal_texture)),//this just dies if there is none
vmt_parser::material::Material::VertexLitGenericDx6(mat)=>VMTContent::vtf(mat.base_texture.or(mat.decal_texture)),
vmt_parser::material::Material::UnlitGeneric(mat)=>VMTContent::vtf(mat.base_texture),
vmt_parser::material::Material::UnlitTwoTexture(mat)=>VMTContent::vtf(mat.base_texture),
vmt_parser::material::Material::Water(mat)=>VMTContent::vtf(mat.base_texture),
vmt_parser::material::Material::WorldVertexTransition(mat)=>VMTContent::vtf(Some(mat.base_texture)),
vmt_parser::material::Material::EyeRefract(mat)=>VMTContent::vtf(Some(mat.cornea_texture)),
vmt_parser::material::Material::SubRect(mat)=>VMTContent::VMT(mat.material),//recursive
vmt_parser::material::Material::Sprite(mat)=>VMTContent::vtf(Some(mat.base_texture)),
vmt_parser::material::Material::SpriteCard(mat)=>VMTContent::vtf(mat.base_texture),
vmt_parser::material::Material::Cable(mat)=>VMTContent::vtf(Some(mat.base_texture)),
vmt_parser::material::Material::Refract(mat)=>VMTContent::vtf(mat.base_texture),
vmt_parser::material::Material::Modulate(mat)=>VMTContent::vtf(Some(mat.base_texture)),
vmt_parser::material::Material::DecalModulate(mat)=>VMTContent::vtf(Some(mat.base_texture)),
vmt_parser::material::Material::Sky(mat)=>VMTContent::vtf(Some(mat.base_texture)),
vmt_parser::material::Material::Replacements(_mat)=>VMTContent::Unsupported,
vmt_parser::material::Material::Patch(mat)=>VMTContent::Patch(mat),
_=>return Err(anyhow::Error::msg("vmt failed to parse")),
})
}
fn get_vmt<F:Fn(String)->AResult<Option<Vec<u8>>>>(find_stuff:&F,search_name:String)->AResult<vmt_parser::material::Material>{
if let Some(stuff)=find_stuff(search_name)?{
//decode vmt and then write
let stuff=String::from_utf8(stuff)?;
let material=vmt_parser::from_str(stuff.as_str())?;
println!("vmt material={:?}",material);
return Ok(material);
}
Err(anyhow::Error::msg("vmt not found"))
}
fn recursive_vmt_loader<F:Fn(String)->AResult<Option<Vec<u8>>>>(find_stuff:&F,material:vmt_parser::material::Material)->AResult<Option<Vec<u8>>>{
match get_some_texture(material)?{
VMTContent::VMT(s)=>recursive_vmt_loader(find_stuff,get_vmt(find_stuff,s)?),
VMTContent::VTF(s)=>{
let mut texture_file_name=PathBuf::from("materials");
texture_file_name.push(s);
texture_file_name.set_extension("vtf");
find_stuff(texture_file_name.into_os_string().into_string().unwrap())
},
VMTContent::Patch(mat)=>recursive_vmt_loader(find_stuff,
mat.resolve(|search_name|{
match find_stuff(search_name.to_string())?{
Some(bytes)=>Ok(String::from_utf8(bytes)?),
None=>Err(anyhow::Error::msg("could not find vmt")),
}
})?
),
VMTContent::Unsupported=>{println!("Unsupported vmt");Ok(None)},//print and move on
VMTContent::Unresolved=>{println!("Unresolved vmt");Ok(None)},
}
}
fn extract_textures(paths:Vec<PathBuf>,vpk_paths:Vec<PathBuf>)->AResult<()>{
std::fs::create_dir_all("textures")?;
let vpk_list:Vec<vpk::VPK>=vpk_paths.into_iter().map(|vpk_path|vpk::VPK::read(&vpk_path).expect("vpk file does not exist")).collect();
for path in paths{
let mut deduplicate=std::collections::HashSet::new();
let bsp=vbsp::Bsp::read(std::fs::read(path)?.as_ref())?;
for texture in bsp.textures(){
deduplicate.insert(PathBuf::from(texture.name()));
}
//dedupe prop models
let mut model_dedupe=std::collections::HashSet::new();
for prop in bsp.static_props(){
model_dedupe.insert(prop.model());
}
//grab texture names from props
for model_name in model_dedupe{
//.mdl, .vvd, .dx90.vtx
let mut path=PathBuf::from(model_name);
let file_name=PathBuf::from(path.file_stem().unwrap());
path.pop();
path.push(file_name);
let mut vvd_path=path.clone();
let mut vtx_path=path.clone();
vvd_path.set_extension("vvd");
vtx_path.set_extension("dx90.vtx");
match (bsp.pack.get(model_name),bsp.pack.get(vvd_path.as_os_str().to_str().unwrap()),bsp.pack.get(vtx_path.as_os_str().to_str().unwrap())){
(Ok(Some(mdl_file)),Ok(Some(vvd_file)),Ok(Some(vtx_file)))=>{
match (vmdl::mdl::Mdl::read(mdl_file.as_ref()),vmdl::vvd::Vvd::read(vvd_file.as_ref()),vmdl::vtx::Vtx::read(vtx_file.as_ref())){
(Ok(mdl),Ok(vvd),Ok(vtx))=>{
let model=vmdl::Model::from_parts(mdl,vtx,vvd);
for texture in model.textures(){
for search_path in &texture.search_paths{
let mut path=PathBuf::from(search_path.as_str());
path.push(texture.name.as_str());
deduplicate.insert(path);
}
}
},
_=>println!("model_name={} error",model_name),
}
},
_=>println!("no model name={}",model_name),
}
}
let pack=&bsp.pack;
let vpk_list=&vpk_list;
std::thread::scope(move|s|{
let mut thread_handles=Vec::new();
for texture_name in deduplicate{
let mut found_texture=false;
//LMAO imagine having to write type names
let write_image=|mut stuff,write_file_name|{
let image=vtf::from_bytes(&mut stuff)?.highres_image.decode(0)?.to_rgba8();
let format=if image.width()%4!=0||image.height()%4!=0{
image_dds::ImageFormat::Rgba8UnormSrgb
}else{
image_dds::ImageFormat::BC7RgbaUnormSrgb
};
//this fails if the image dimensions are not a multiple of 4
let dds = image_dds::dds_from_image(
&image,
format,
image_dds::Quality::Slow,
image_dds::Mipmaps::GeneratedAutomatic,
)?;
//write dds
let mut dest=PathBuf::from("textures");
dest.push(write_file_name);
dest.set_extension("dds");
std::fs::create_dir_all(dest.parent().unwrap())?;
let mut writer = std::io::BufWriter::new(std::fs::File::create(dest)?);
dds.write(&mut writer)?;
Ok::<(),anyhow::Error>(())
};
let find_stuff=|search_file_name:String|{
println!("search_file_name={}",search_file_name);
match pack.get(search_file_name.as_str())?{
Some(file)=>return Ok(Some(file)),
_=>(),
}
//search pak list
for vpk_index in vpk_list{
if let Some(vpk_entry)=vpk_index.tree.get(search_file_name.as_str()){
return Ok(Some(match vpk_entry.get()?{
std::borrow::Cow::Borrowed(bytes)=>bytes.to_vec(),
std::borrow::Cow::Owned(bytes)=>bytes,
}));
}
}
Ok::<Option<Vec<u8>>,anyhow::Error>(None)
};
let loader=|texture_name:String|{
let mut texture_file_name=PathBuf::from("materials");
//lower case
let texture_file_name_lowercase=texture_name.to_lowercase();
texture_file_name.push(texture_file_name_lowercase.clone());
//remove stem and search for both vtf and vmt files
let stem=PathBuf::from(texture_file_name.file_stem().unwrap());
texture_file_name.pop();
texture_file_name.push(stem);
//somehow search for both files
let mut texture_file_name_vmt=texture_file_name.clone();
texture_file_name.set_extension("vtf");
texture_file_name_vmt.set_extension("vmt");
if let Some(stuff)=find_stuff(texture_file_name.to_string_lossy().to_string())?{
return Ok(Some(stuff))
}
recursive_vmt_loader(&find_stuff,get_vmt(&find_stuff,texture_file_name_vmt.to_string_lossy().to_string())?)
};
if let Some(stuff)=loader(texture_name.to_string_lossy().to_string())?{
found_texture=true;
let texture_name=texture_name.clone();
thread_handles.push(s.spawn(move||write_image(stuff,texture_name)));
}
if !found_texture{
println!("no data");
}
}
for thread in thread_handles{
match thread.join(){
Ok(Err(e))=>println!("write error: {:?}",e),
Err(e)=>println!("thread error: {:?}",e),
Ok(_)=>(),
}
}
Ok::<(),anyhow::Error>(())
})?
}
Ok(())
}
fn vpk_contents(vpk_path:PathBuf)->AResult<()>{
let vpk_index=vpk::VPK::read(&vpk_path)?;
for (label,entry) in vpk_index.tree.into_iter(){
println!("vpk label={} entry={:?}",label,entry);
}
Ok(())
}
fn bsp_contents(path:PathBuf)->AResult<()>{
let bsp=vbsp::Bsp::read(std::fs::read(path)?.as_ref())?;
for file_name in bsp.pack.into_zip().into_inner().unwrap().file_names(){
println!("file_name={:?}",file_name);
}
Ok(())
}
#[derive(Debug)]
#[allow(dead_code)]
enum ConvertError{
IO(std::io::Error),
SNFMap(strafesnet_snf::map::Error),
RbxLoader(strafesnet_rbx_loader::ReadError),
BspLoader(strafesnet_bsp_loader::ReadError),
}
impl std::fmt::Display for ConvertError{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
write!(f,"{self:?}")
}
}
impl std::error::Error for ConvertError{}
type MapThread=std::thread::JoinHandle<Result<(),ConvertError>>;
fn roblox_to_snf(pathlist:Vec<std::path::PathBuf>,output_folder:PathBuf)->AResult<()>{
let n_paths=pathlist.len();
let start = std::time::Instant::now();
let mut threads:std::collections::VecDeque<MapThread>=std::collections::VecDeque::new();
let mut i=0;
let mut join_thread=|thread:MapThread|{
i+=1;
if let Err(e)=thread.join(){
println!("thread error: {:?}",e);
}else{
println!("{}/{}",i,n_paths);
}
};
for path in pathlist{
if 32<=threads.len(){
join_thread(threads.pop_front().unwrap());
}
let output_folder=output_folder.clone();
threads.push_back(std::thread::spawn(move ||{
let model=strafesnet_rbx_loader::read(
std::fs::File::open(path.as_path())
.map_err(ConvertError::IO)?
).map_err(ConvertError::RbxLoader)?;
let mut place=model.into_place();
place.run_scripts();
let mut loader=strafesnet_deferred_loader::roblox_legacy();
let (texture_loader,mesh_loader)=loader.get_inner_mut();
let map_step1=strafesnet_rbx_loader::convert(
&place,
|name|texture_loader.acquire_render_config_id(name),
|name|mesh_loader.acquire_mesh_id(name),
);
let meshpart_meshes=mesh_loader.load_meshes().map_err(ConvertError::IO)?;
let map_step2=map_step1.add_meshpart_meshes_and_calculate_attributes(
meshpart_meshes.into_iter().map(|(mesh_id,loader_model)|
(mesh_id,strafesnet_rbx_loader::data::RobloxMeshBytes::new(loader_model.get()))
)
);
let (textures,render_configs)=loader.into_render_configs().map_err(ConvertError::IO)?.consume();
let map=map_step2.add_render_configs_and_textures(
render_configs.into_iter(),
textures.into_iter().map(|(texture_id,texture)|
(texture_id,match texture{
strafesnet_deferred_loader::texture::Texture::ImageDDS(data)=>data,
})
)
);
let mut dest=output_folder.clone();
dest.push(path.file_stem().unwrap());
dest.set_extension("snfm");
let file=std::fs::File::create(dest).map_err(ConvertError::IO)?;
strafesnet_snf::map::write_map(file,map).map_err(ConvertError::SNFMap)?;
Ok(())
}));
}
for thread in threads{
join_thread(thread);
}
println!("{:?}", start.elapsed());
Ok(())
}
fn source_to_snf(pathlist:Vec<std::path::PathBuf>,output_folder:PathBuf)->AResult<()>{
let n_paths=pathlist.len();
let start = std::time::Instant::now();
let mut threads:std::collections::VecDeque<MapThread>=std::collections::VecDeque::new();
let mut i=0;
let mut join_thread=|thread:MapThread|{
i+=1;
if let Err(e)=thread.join(){
println!("thread error: {:?}",e);
}else{
println!("{}/{}",i,n_paths);
}
};
for path in pathlist{
if 32<=threads.len(){
join_thread(threads.pop_front().unwrap());
}
let output_folder=output_folder.clone();
threads.push_back(std::thread::spawn(move ||{
let bsp=strafesnet_bsp_loader::read(
std::fs::File::open(path.as_path())
.map_err(ConvertError::IO)?
).map_err(ConvertError::BspLoader)?;
let mut loader=strafesnet_deferred_loader::source_legacy();
let (texture_loader,mesh_loader)=loader.get_inner_mut();
let map_step1=strafesnet_bsp_loader::convert(
&bsp,
|name|texture_loader.acquire_render_config_id(name),
|name|mesh_loader.acquire_mesh_id(name),
);
let prop_meshes=mesh_loader.load_meshes(&bsp.as_ref());
let map_step2=map_step1.add_prop_meshes(
//the type conflagulator 9000
prop_meshes.into_iter().map(|(mesh_id,loader_model)|
(mesh_id,strafesnet_bsp_loader::data::ModelData{
mdl:strafesnet_bsp_loader::data::MdlData::new(loader_model.mdl.get()),
vtx:strafesnet_bsp_loader::data::VtxData::new(loader_model.vtx.get()),
vvd:strafesnet_bsp_loader::data::VvdData::new(loader_model.vvd.get()),
})
),
|name|texture_loader.acquire_render_config_id(name),
);
let (textures,render_configs)=loader.into_render_configs().map_err(ConvertError::IO)?.consume();
let map=map_step2.add_render_configs_and_textures(
render_configs.into_iter(),
textures.into_iter().map(|(texture_id,texture)|
(texture_id,match texture{
strafesnet_deferred_loader::texture::Texture::ImageDDS(data)=>data,
})
),
);
let mut dest=output_folder.clone();
dest.push(path.file_stem().unwrap());
dest.set_extension("snfm");
let file=std::fs::File::create(dest).map_err(ConvertError::IO)?;
strafesnet_snf::map::write_map(file,map).map_err(ConvertError::SNFMap)?;
Ok(())
}));
}
for thread in threads{
join_thread(thread);
}
println!("{:?}", start.elapsed());
Ok(())
}