diff --git a/map-tool/Cargo.toml b/map-tool/Cargo.toml
new file mode 100644
index 0000000..0f541f5
--- /dev/null
+++ b/map-tool/Cargo.toml
@@ -0,0 +1,37 @@
+[package]
+name = "map-tool"
+version = "1.7.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+anyhow = "1.0.75"
+clap = { version = "4.4.2", features = ["derive"] }
+flate2 = "1.0.27"
+futures = "0.3.31"
+image = "0.25.2"
+image_dds = "0.7.1"
+lazy-regex = "3.1.0"
+rbx_asset = { version = "0.2.5", registry = "strafesnet" }
+rbx_binary = { version = "0.7.4", registry = "strafesnet" }
+rbx_dom_weak = { version = "2.7.0", registry = "strafesnet" }
+rbx_reflection_database = { version = "0.2.10", registry = "strafesnet" }
+rbx_xml = { version = "0.13.3", registry = "strafesnet" }
+rbxassetid = { version = "0.1.0", registry = "strafesnet" }
+strafesnet_bsp_loader = { version = "0.3.0", registry = "strafesnet" }
+strafesnet_deferred_loader = { version = "0.5.0", registry = "strafesnet" }
+strafesnet_rbx_loader = { version = "0.6.0", registry = "strafesnet" }
+strafesnet_snf = { version = "0.3.0", registry = "strafesnet" }
+thiserror = "2.0.11"
+tokio = { version = "1.43.0", features = ["macros", "rt-multi-thread", "fs"] }
+vbsp = "0.6.0"
+vmdl = "0.2.0"
+vmt-parser = "0.2.0"
+vpk = "0.2.0"
+vtf = "0.3.0"
+
+#[profile.release]
+#lto = true
+#strip = true
+#codegen-units = 1
diff --git a/map-tool/LICENSE b/map-tool/LICENSE
new file mode 100644
index 0000000..468cd79
--- /dev/null
+++ b/map-tool/LICENSE
@@ -0,0 +1,23 @@
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
\ No newline at end of file
diff --git a/map-tool/README.md b/map-tool/README.md
new file mode 100644
index 0000000..dc0184d
--- /dev/null
+++ b/map-tool/README.md
@@ -0,0 +1,2 @@
+# map-tool
+
diff --git a/map-tool/src/main.rs b/map-tool/src/main.rs
new file mode 100644
index 0000000..da5352b
--- /dev/null
+++ b/map-tool/src/main.rs
@@ -0,0 +1,30 @@
+mod roblox;
+mod source;
+
+use clap::{Parser,Subcommand};
+use anyhow::Result as AResult;
+
+#[derive(Parser)]
+#[command(author, version, about, long_about = None)]
+#[command(propagate_version = true)]
+struct Cli {
+	#[command(subcommand)]
+	command: Commands,
+}
+
+#[derive(Subcommand)]
+enum Commands{
+	#[command(flatten)]
+	Roblox(roblox::Commands),
+	#[command(flatten)]
+	Source(source::Commands),
+}
+
+#[tokio::main]
+async fn main()->AResult<()>{
+	let cli=Cli::parse();
+	match cli.command{
+		Commands::Roblox(commands)=>commands.run().await,
+		Commands::Source(commands)=>commands.run().await,
+	}
+}
diff --git a/map-tool/src/roblox.rs b/map-tool/src/roblox.rs
new file mode 100644
index 0000000..da2c0d1
--- /dev/null
+++ b/map-tool/src/roblox.rs
@@ -0,0 +1,431 @@
+use std::path::{Path,PathBuf};
+use std::io::{Cursor,Read,Seek};
+use std::collections::HashSet;
+use clap::{Args,Subcommand};
+use anyhow::Result as AResult;
+use rbx_dom_weak::Instance;
+use strafesnet_deferred_loader::deferred_loader::LoadFailureMode;
+use rbxassetid::RobloxAssetId;
+use tokio::io::AsyncReadExt;
+
+const DOWNLOAD_LIMIT:usize=16;
+
+#[derive(Subcommand)]
+pub enum Commands{
+	RobloxToSNF(RobloxToSNFSubcommand),
+	DownloadAssets(DownloadAssetsSubcommand),
+}
+
+#[derive(Args)]
+pub struct RobloxToSNFSubcommand {
+	#[arg(long)]
+	output_folder:PathBuf,
+	#[arg(required=true)]
+	input_files:Vec<PathBuf>,
+}
+#[derive(Args)]
+pub struct DownloadAssetsSubcommand{
+	#[arg(required=true)]
+	roblox_files:Vec<PathBuf>,
+	// #[arg(long)]
+	// cookie_file:Option<String>,
+}
+
+impl Commands{
+	pub async fn run(self)->AResult<()>{
+		match self{
+			Commands::RobloxToSNF(subcommand)=>roblox_to_snf(subcommand.input_files,subcommand.output_folder).await,
+			Commands::DownloadAssets(subcommand)=>download_assets(
+				subcommand.roblox_files,
+				rbx_asset::cookie::Cookie::new("".to_string()),
+			).await,
+		}
+	}
+}
+
+#[allow(unused)]
+#[derive(Debug)]
+enum LoadDomError{
+	IO(std::io::Error),
+	Binary(rbx_binary::DecodeError),
+	Xml(rbx_xml::DecodeError),
+	UnknownFormat,
+}
+fn load_dom<R:Read+Seek>(mut input:R)->Result<rbx_dom_weak::WeakDom,LoadDomError>{
+	let mut first_8=[0u8;8];
+	input.read_exact(&mut first_8).map_err(LoadDomError::IO)?;
+	input.rewind().map_err(LoadDomError::IO)?;
+	match &first_8{
+		b"<roblox!"=>rbx_binary::from_reader(input).map_err(LoadDomError::Binary),
+		b"<roblox "=>rbx_xml::from_reader(input,rbx_xml::DecodeOptions::default()).map_err(LoadDomError::Xml),
+		_=>Err(LoadDomError::UnknownFormat),
+	}
+}
+
+/* The ones I'm interested in:
+Beam.Texture
+Decal.Texture
+FileMesh.MeshId
+FileMesh.TextureId
+MaterialVariant.ColorMap
+MaterialVariant.MetalnessMap
+MaterialVariant.NormalMap
+MaterialVariant.RoughnessMap
+MeshPart.MeshId
+MeshPart.TextureID
+ParticleEmitter.Texture
+Sky.MoonTextureId
+Sky.SkyboxBk
+Sky.SkyboxDn
+Sky.SkyboxFt
+Sky.SkyboxLf
+Sky.SkyboxRt
+Sky.SkyboxUp
+Sky.SunTextureId
+SurfaceAppearance.ColorMap
+SurfaceAppearance.MetalnessMap
+SurfaceAppearance.NormalMap
+SurfaceAppearance.RoughnessMap
+SurfaceAppearance.TexturePack
+*/
+fn accumulate_content_id(content_list:&mut HashSet<RobloxAssetId>,object:&Instance,property:&str){
+	if let Some(rbx_dom_weak::types::Variant::Content(content))=object.properties.get(property){
+		let url:&str=content.as_ref();
+		if let Ok(asset_id)=url.parse(){
+			content_list.insert(asset_id);
+		}else{
+			println!("Content failed to parse into AssetID: {:?}",content);
+		}
+	}else{
+		println!("property={} does not exist for class={}",property,object.class.as_str());
+	}
+}
+async fn read_entire_file(path:impl AsRef<Path>)->Result<Cursor<Vec<u8>>,std::io::Error>{
+	let mut file=tokio::fs::File::open(path).await?;
+	let mut data=Vec::new();
+	file.read_to_end(&mut data).await?;
+	Ok(Cursor::new(data))
+}
+#[derive(Default)]
+struct UniqueAssets{
+	meshes:HashSet<RobloxAssetId>,
+	unions:HashSet<RobloxAssetId>,
+	textures:HashSet<RobloxAssetId>,
+}
+impl UniqueAssets{
+	fn collect(&mut self,object:&Instance){
+		match object.class.as_str(){
+			"Beam"=>accumulate_content_id(&mut self.textures,object,"Texture"),
+			"Decal"=>accumulate_content_id(&mut self.textures,object,"Texture"),
+			"Texture"=>accumulate_content_id(&mut self.textures,object,"Texture"),
+			"FileMesh"=>accumulate_content_id(&mut self.textures,object,"TextureId"),
+			"MeshPart"=>{
+				accumulate_content_id(&mut self.textures,object,"TextureID");
+				accumulate_content_id(&mut self.meshes,object,"MeshId");
+			},
+			"SpecialMesh"=>accumulate_content_id(&mut self.meshes,object,"MeshId"),
+			"ParticleEmitter"=>accumulate_content_id(&mut self.textures,object,"Texture"),
+			"Sky"=>{
+				accumulate_content_id(&mut self.textures,object,"MoonTextureId");
+				accumulate_content_id(&mut self.textures,object,"SkyboxBk");
+				accumulate_content_id(&mut self.textures,object,"SkyboxDn");
+				accumulate_content_id(&mut self.textures,object,"SkyboxFt");
+				accumulate_content_id(&mut self.textures,object,"SkyboxLf");
+				accumulate_content_id(&mut self.textures,object,"SkyboxRt");
+				accumulate_content_id(&mut self.textures,object,"SkyboxUp");
+				accumulate_content_id(&mut self.textures,object,"SunTextureId");
+			},
+			"UnionOperation"=>accumulate_content_id(&mut self.unions,object,"AssetId"),
+			_=>(),
+		}
+	}
+}
+
+#[allow(unused)]
+#[derive(Debug)]
+enum UniqueAssetError{
+	IO(std::io::Error),
+	LoadDom(LoadDomError),
+}
+async fn unique_assets(path:&Path)->Result<UniqueAssets,UniqueAssetError>{
+	// read entire file
+	let mut assets=UniqueAssets::default();
+	let data=read_entire_file(path).await.map_err(UniqueAssetError::IO)?;
+	let dom=load_dom(data).map_err(UniqueAssetError::LoadDom)?;
+	for object in dom.into_raw().1.into_values(){
+		assets.collect(&object);
+	}
+	Ok(assets)
+}
+enum DownloadType{
+	Texture(RobloxAssetId),
+	Mesh(RobloxAssetId),
+	Union(RobloxAssetId),
+}
+impl DownloadType{
+	fn path(&self)->PathBuf{
+		match self{
+			DownloadType::Texture(asset_id)=>format!("downloaded_textures/{}",asset_id.0.to_string()).into(),
+			DownloadType::Mesh(asset_id)=>format!("meshes/{}",asset_id.0.to_string()).into(),
+			DownloadType::Union(asset_id)=>format!("unions/{}",asset_id.0.to_string()).into(),
+		}
+	}
+	fn asset_id(&self)->u64{
+		match self{
+			DownloadType::Texture(asset_id)=>asset_id.0,
+			DownloadType::Mesh(asset_id)=>asset_id.0,
+			DownloadType::Union(asset_id)=>asset_id.0,
+		}
+	}
+}
+enum DownloadResult{
+	Cached(PathBuf),
+	Data(Vec<u8>),
+	Failed,
+}
+#[derive(Default,Debug)]
+struct Stats{
+	total_assets:u32,
+	cached_assets:u32,
+	downloaded_assets:u32,
+	failed_downloads:u32,
+	timed_out_downloads:u32,
+}
+async fn download_retry(stats:&mut Stats,context:&rbx_asset::cookie::CookieContext,download_instruction:DownloadType)->Result<DownloadResult,std::io::Error>{
+	stats.total_assets+=1;
+	let download_instruction=download_instruction;
+	// check if file exists on disk
+	let path=download_instruction.path();
+	if tokio::fs::try_exists(path.as_path()).await?{
+		stats.cached_assets+=1;
+		return Ok(DownloadResult::Cached(path));
+	}
+	let asset_id=download_instruction.asset_id();
+	// if not, download file
+	let mut retry=0;
+	const BACKOFF_MUL:f32=1.3956124250860895286;//exp(1/3)
+	let mut backoff=1000f32;
+	loop{
+		let asset_result=context.get_asset(rbx_asset::cookie::GetAssetRequest{
+			asset_id,
+			version:None,
+		}).await;
+		match asset_result{
+			Ok(asset_result)=>{
+				stats.downloaded_assets+=1;
+				tokio::fs::write(path,&asset_result).await?;
+				break Ok(DownloadResult::Data(asset_result));
+			},
+			Err(rbx_asset::cookie::GetError::Response(rbx_asset::ResponseError::StatusCodeWithUrlAndBody(scwuab)))=>{
+				if scwuab.status_code.as_u16()==429{
+					if retry==12{
+						println!("Giving up asset download {asset_id}");
+						stats.timed_out_downloads+=1;
+						break Ok(DownloadResult::Failed);
+					}
+					println!("Hit roblox rate limit, waiting {:.0}ms...",backoff);
+					tokio::time::sleep(std::time::Duration::from_millis(backoff as u64)).await;
+					backoff*=BACKOFF_MUL;
+					retry+=1;
+				}else{
+					stats.failed_downloads+=1;
+					println!("weird scuwab error: {scwuab:?}");
+					break Ok(DownloadResult::Failed);
+				}
+			},
+			Err(e)=>{
+				stats.failed_downloads+=1;
+				println!("sadly error: {e}");
+				break Ok(DownloadResult::Failed);
+			},
+		}
+	}
+}
+#[derive(Debug,thiserror::Error)]
+enum ConvertTextureError{
+	#[error("Io error {0:?}")]
+	Io(#[from]std::io::Error),
+	#[error("Image error {0:?}")]
+	Image(#[from]image::ImageError),
+	#[error("DDS create error {0:?}")]
+	DDS(#[from]image_dds::CreateDdsError),
+	#[error("DDS write error {0:?}")]
+	DDSWrite(#[from]image_dds::ddsfile::Error),
+}
+async fn convert_texture(asset_id:RobloxAssetId,download_result:DownloadResult)->Result<(),ConvertTextureError>{
+	let data=match download_result{
+		DownloadResult::Cached(path)=>tokio::fs::read(path).await?,
+		DownloadResult::Data(data)=>data,
+		DownloadResult::Failed=>return Ok(()),
+	};
+	// image::ImageFormat::Png
+	// image::ImageFormat::Jpeg
+	let image=image::load_from_memory(&data)?.to_rgba8();
+
+	// pick format
+	let format=if image.width()%4!=0||image.height()%4!=0{
+		image_dds::ImageFormat::Rgba8UnormSrgb
+	}else{
+		image_dds::ImageFormat::BC7RgbaUnormSrgb
+	};
+
+	//this fails if the image dimensions are not a multiple of 4
+	let dds=image_dds::dds_from_image(
+		&image,
+		format,
+		image_dds::Quality::Slow,
+		image_dds::Mipmaps::GeneratedAutomatic,
+	)?;
+
+	let file_name=format!("textures/{}.dds",asset_id.0);
+	let mut file=std::fs::File::create(file_name)?;
+	dds.write(&mut file)?;
+	Ok(())
+}
+async fn download_assets(paths:Vec<PathBuf>,cookie:rbx_asset::cookie::Cookie)->AResult<()>{
+	tokio::try_join!(
+		tokio::fs::create_dir_all("downloaded_textures"),
+		tokio::fs::create_dir_all("textures"),
+		tokio::fs::create_dir_all("meshes"),
+		tokio::fs::create_dir_all("unions"),
+	)?;
+	// use mpsc
+	let thread_limit=std::thread::available_parallelism()?.get();
+	let (send_assets,mut recv_assets)=tokio::sync::mpsc::channel(DOWNLOAD_LIMIT);
+	let (send_texture,mut recv_texture)=tokio::sync::mpsc::channel(thread_limit);
+	// map decode dispatcher
+	// read files multithreaded
+	// produce UniqueAssetsResult per file
+	tokio::spawn(async move{
+		// move send so it gets dropped when all maps have been decoded
+		// closing the channel
+		let mut it=paths.into_iter();
+		static SEM:tokio::sync::Semaphore=tokio::sync::Semaphore::const_new(0);
+		SEM.add_permits(thread_limit);
+		while let (Ok(permit),Some(path))=(SEM.acquire().await,it.next()){
+			let send=send_assets.clone();
+			tokio::spawn(async move{
+				let result=unique_assets(path.as_path()).await;
+				_=send.send(result).await;
+				drop(permit);
+			});
+		}
+	});
+	// download manager
+	// insert into global unique assets guy
+	// add to download queue if the asset is globally unique and does not already exist on disk
+	let mut stats=Stats::default();
+	let context=rbx_asset::cookie::CookieContext::new(cookie);
+	let mut globally_unique_assets=UniqueAssets::default();
+	// pop a job = retry_queue.pop_front() or ingest(recv.recv().await)
+	// SLOW MODE:
+	// acquire all permits
+	// drop all permits
+	// pop one job
+	// if it succeeds go into fast mode
+	// FAST MODE:
+	// acquire one permit
+	// pop a job
+	let download_thread=tokio::spawn(async move{
+		while let Some(result)=recv_assets.recv().await{
+			let unique_assets=match result{
+				Ok(unique_assets)=>unique_assets,
+				Err(e)=>{
+					println!("error: {e:?}");
+					continue;
+				},
+			};
+			for texture_id in unique_assets.textures{
+				if globally_unique_assets.textures.insert(texture_id){
+					let data=download_retry(&mut stats,&context,DownloadType::Texture(texture_id)).await?;
+					send_texture.send((texture_id,data)).await?;
+				}
+			}
+			for mesh_id in unique_assets.meshes{
+				if globally_unique_assets.meshes.insert(mesh_id){
+					download_retry(&mut stats,&context,DownloadType::Mesh(mesh_id)).await?;
+				}
+			}
+			for union_id in unique_assets.unions{
+				if globally_unique_assets.unions.insert(union_id){
+					download_retry(&mut stats,&context,DownloadType::Union(union_id)).await?;
+				}
+			}
+		}
+		dbg!(stats);
+		Ok::<(),anyhow::Error>(())
+	});
+	static SEM:tokio::sync::Semaphore=tokio::sync::Semaphore::const_new(0);
+	SEM.add_permits(thread_limit);
+	while let (Ok(permit),Some((asset_id,download_result)))=(SEM.acquire().await,recv_texture.recv().await){
+		tokio::spawn(async move{
+			let result=convert_texture(asset_id,download_result).await;
+			drop(permit);
+			result.unwrap();
+		});
+	}
+	download_thread.await??;
+	_=SEM.acquire_many(thread_limit as u32).await.unwrap();
+	Ok(())
+}
+
+#[derive(Debug)]
+#[allow(dead_code)]
+enum ConvertError{
+	IO(std::io::Error),
+	SNFMap(strafesnet_snf::map::Error),
+	RobloxRead(strafesnet_rbx_loader::ReadError),
+	RobloxLoad(strafesnet_rbx_loader::LoadError),
+}
+impl std::fmt::Display for ConvertError{
+	fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
+		write!(f,"{self:?}")
+	}
+}
+impl std::error::Error for ConvertError{}
+async fn convert_to_snf(path:&Path,output_folder:PathBuf)->AResult<()>{
+	let entire_file=tokio::fs::read(path).await?;
+
+	let model=strafesnet_rbx_loader::read(
+		std::io::Cursor::new(entire_file)
+	).map_err(ConvertError::RobloxRead)?;
+
+	let mut place=model.into_place();
+	place.run_scripts();
+
+	let map=place.to_snf(LoadFailureMode::DefaultToNone).map_err(ConvertError::RobloxLoad)?;
+
+	let mut dest=output_folder;
+	dest.push(path.file_stem().unwrap());
+	dest.set_extension("snfm");
+	let file=std::fs::File::create(dest).map_err(ConvertError::IO)?;
+
+	strafesnet_snf::map::write_map(file,map).map_err(ConvertError::SNFMap)?;
+
+	Ok(())
+}
+
+async fn roblox_to_snf(paths:Vec<std::path::PathBuf>,output_folder:PathBuf)->AResult<()>{
+	let start=std::time::Instant::now();
+
+	let thread_limit=std::thread::available_parallelism()?.get();
+	let mut it=paths.into_iter();
+	static SEM:tokio::sync::Semaphore=tokio::sync::Semaphore::const_new(0);
+	SEM.add_permits(thread_limit);
+
+	while let (Ok(permit),Some(path))=(SEM.acquire().await,it.next()){
+		let output_folder=output_folder.clone();
+		tokio::spawn(async move{
+			let result=convert_to_snf(path.as_path(),output_folder).await;
+			drop(permit);
+			match result{
+				Ok(())=>(),
+				Err(e)=>println!("Convert error: {e:?}"),
+			}
+		});
+	}
+	_=SEM.acquire_many(thread_limit as u32).await.unwrap();
+
+	println!("elapsed={:?}", start.elapsed());
+	Ok(())
+}
diff --git a/map-tool/src/source.rs b/map-tool/src/source.rs
new file mode 100644
index 0000000..5fcecea
--- /dev/null
+++ b/map-tool/src/source.rs
@@ -0,0 +1,428 @@
+use std::path::{Path,PathBuf};
+use std::borrow::Cow;
+use clap::{Args,Subcommand};
+use anyhow::Result as AResult;
+use futures::StreamExt;
+use strafesnet_bsp_loader::loader::BspFinder;
+use strafesnet_deferred_loader::loader::Loader;
+use strafesnet_deferred_loader::deferred_loader::{LoadFailureMode,MeshDeferredLoader,RenderConfigDeferredLoader};
+
+#[derive(Subcommand)]
+pub enum Commands{
+	SourceToSNF(SourceToSNFSubcommand),
+	ExtractTextures(ExtractTexturesSubcommand),
+	VPKContents(VPKContentsSubcommand),
+	BSPContents(BSPContentsSubcommand),
+}
+
+#[derive(Args)]
+pub struct SourceToSNFSubcommand {
+	#[arg(long)]
+	output_folder:PathBuf,
+	#[arg(required=true)]
+	input_files:Vec<PathBuf>,
+	#[arg(long)]
+	vpks:Vec<PathBuf>,
+}
+#[derive(Args)]
+pub struct ExtractTexturesSubcommand{
+	#[arg(required=true)]
+	bsp_files:Vec<PathBuf>,
+	#[arg(long)]
+	vpks:Vec<PathBuf>,
+}
+#[derive(Args)]
+pub struct VPKContentsSubcommand {
+	#[arg(long)]
+	input_file:PathBuf,
+}
+#[derive(Args)]
+pub struct BSPContentsSubcommand {
+	#[arg(long)]
+	input_file:PathBuf,
+}
+
+impl Commands{
+	pub async fn run(self)->AResult<()>{
+		match self{
+			Commands::SourceToSNF(subcommand)=>source_to_snf(subcommand.input_files,subcommand.output_folder,subcommand.vpks).await,
+			Commands::ExtractTextures(subcommand)=>extract_textures(subcommand.bsp_files,subcommand.vpks).await,
+			Commands::VPKContents(subcommand)=>vpk_contents(subcommand.input_file),
+			Commands::BSPContents(subcommand)=>bsp_contents(subcommand.input_file),
+		}
+	}
+}
+
+
+enum VMTContent{
+	VMT(String),
+	VTF(String),
+	Patch(vmt_parser::material::PatchMaterial),
+	Unsupported,//don't want to deal with whatever vmt variant
+	Unresolved,//could not locate a texture because of vmt content
+}
+impl VMTContent{
+	fn vtf(opt:Option<String>)->Self{
+		match opt{
+			Some(s)=>Self::VTF(s),
+			None=>Self::Unresolved,
+		}
+	}
+}
+
+fn get_some_texture(material:vmt_parser::material::Material)->VMTContent{
+	//just grab some texture from somewhere for now
+	match material{
+		vmt_parser::material::Material::LightMappedGeneric(mat)=>VMTContent::vtf(Some(mat.base_texture)),
+		vmt_parser::material::Material::VertexLitGeneric(mat)=>VMTContent::vtf(mat.base_texture.or(mat.decal_texture)),//this just dies if there is none
+		vmt_parser::material::Material::VertexLitGenericDx6(mat)=>VMTContent::vtf(mat.base_texture.or(mat.decal_texture)),
+		vmt_parser::material::Material::UnlitGeneric(mat)=>VMTContent::vtf(mat.base_texture),
+		vmt_parser::material::Material::UnlitTwoTexture(mat)=>VMTContent::vtf(mat.base_texture),
+		vmt_parser::material::Material::Water(mat)=>VMTContent::vtf(mat.base_texture),
+		vmt_parser::material::Material::WorldVertexTransition(mat)=>VMTContent::vtf(Some(mat.base_texture)),
+		vmt_parser::material::Material::EyeRefract(mat)=>VMTContent::vtf(Some(mat.cornea_texture)),
+		vmt_parser::material::Material::SubRect(mat)=>VMTContent::VMT(mat.material),//recursive
+		vmt_parser::material::Material::Sprite(mat)=>VMTContent::vtf(Some(mat.base_texture)),
+		vmt_parser::material::Material::SpriteCard(mat)=>VMTContent::vtf(mat.base_texture),
+		vmt_parser::material::Material::Cable(mat)=>VMTContent::vtf(Some(mat.base_texture)),
+		vmt_parser::material::Material::Refract(mat)=>VMTContent::vtf(mat.base_texture),
+		vmt_parser::material::Material::Modulate(mat)=>VMTContent::vtf(Some(mat.base_texture)),
+		vmt_parser::material::Material::DecalModulate(mat)=>VMTContent::vtf(Some(mat.base_texture)),
+		vmt_parser::material::Material::Sky(mat)=>VMTContent::vtf(Some(mat.base_texture)),
+		vmt_parser::material::Material::Replacements(_mat)=>VMTContent::Unsupported,
+		vmt_parser::material::Material::Patch(mat)=>VMTContent::Patch(mat),
+		_=>unreachable!(),
+	}
+}
+
+#[derive(Debug,thiserror::Error)]
+enum GetVMTError{
+	#[error("Bsp error {0:?}")]
+	Bsp(#[from]vbsp::BspError),
+	#[error("Utf8 error {0:?}")]
+	Utf8(#[from]std::str::Utf8Error),
+	#[error("Vdf error {0:?}")]
+	Vdf(#[from]vmt_parser::VdfError),
+	#[error("Vmt not found")]
+	NotFound,
+}
+
+fn get_vmt(finder:BspFinder,search_name:&str)->Result<vmt_parser::material::Material,GetVMTError>{
+	let vmt_data=finder.find(search_name)?.ok_or(GetVMTError::NotFound)?;
+	//decode vmt and then write
+	let vmt_str=core::str::from_utf8(&vmt_data)?;
+	let material=vmt_parser::from_str(vmt_str)?;
+	//println!("vmt material={:?}",material);
+	Ok(material)
+}
+
+#[derive(Debug,thiserror::Error)]
+enum LoadVMTError{
+	#[error("Bsp error {0:?}")]
+	Bsp(#[from]vbsp::BspError),
+	#[error("GetVMT error {0:?}")]
+	GetVMT(#[from]GetVMTError),
+	#[error("FromUtf8 error {0:?}")]
+	FromUtf8(#[from]std::string::FromUtf8Error),
+	#[error("Vdf error {0:?}")]
+	Vdf(#[from]vmt_parser::VdfError),
+	#[error("Vmt unsupported")]
+	Unsupported,
+	#[error("Vmt unresolved")]
+	Unresolved,
+	#[error("Vmt not found")]
+	NotFound,
+}
+fn recursive_vmt_loader<'bsp,'vpk,'a>(finder:BspFinder<'bsp,'vpk>,material:vmt_parser::material::Material)->Result<Option<Cow<'a,[u8]>>,LoadVMTError>
+	where
+		'bsp:'a,
+		'vpk:'a,
+{
+	match get_some_texture(material){
+		VMTContent::VMT(s)=>recursive_vmt_loader(finder,get_vmt(finder,s.as_str())?),
+		VMTContent::VTF(s)=>{
+			let mut texture_file_name=PathBuf::from("materials");
+			texture_file_name.push(s);
+			texture_file_name.set_extension("vtf");
+			Ok(finder.find(texture_file_name.to_str().unwrap())?)
+		},
+		VMTContent::Patch(mat)=>recursive_vmt_loader(finder,
+			mat.resolve(|search_name|
+				match finder.find(search_name)?{
+					Some(bytes)=>Ok(String::from_utf8(bytes.into_owned())?),
+					None=>Err(LoadVMTError::NotFound),
+				}
+			)?
+		),
+		VMTContent::Unsupported=>Err(LoadVMTError::Unsupported),
+		VMTContent::Unresolved=>Err(LoadVMTError::Unresolved),
+	}
+}
+fn load_texture<'bsp,'vpk,'a>(finder:BspFinder<'bsp,'vpk>,texture_name:&str)->Result<Option<Cow<'a,[u8]>>,LoadVMTError>
+	where
+		'bsp:'a,
+		'vpk:'a,
+{
+	let mut texture_file_name=PathBuf::from("materials");
+	//lower case
+	let texture_file_name_lowercase=texture_name.to_lowercase();
+	texture_file_name.push(texture_file_name_lowercase.clone());
+	//remove stem and search for both vtf and vmt files
+	let stem=PathBuf::from(texture_file_name.file_stem().unwrap());
+	texture_file_name.pop();
+	texture_file_name.push(stem);
+	if let Some(stuff)=finder.find(texture_file_name.to_str().unwrap())?{
+		return Ok(Some(stuff))
+	}
+	//somehow search for both files
+	let mut texture_file_name_vmt=texture_file_name.clone();
+	texture_file_name.set_extension("vtf");
+	texture_file_name_vmt.set_extension("vmt");
+	recursive_vmt_loader(finder,get_vmt(finder,texture_file_name_vmt.to_str().unwrap())?)
+}
+#[derive(Debug,thiserror::Error)]
+enum ExtractTextureError{
+	#[error("Io error {0:?}")]
+	Io(#[from]std::io::Error),
+	#[error("Bsp error {0:?}")]
+	Bsp(#[from]vbsp::BspError),
+	#[error("MeshLoad error {0:?}")]
+	MeshLoad(#[from]strafesnet_bsp_loader::loader::MeshError),
+	#[error("Load VMT error {0:?}")]
+	LoadVMT(#[from]LoadVMTError),
+}
+async fn gimme_them_textures(path:&Path,vpk_list:&[vpk::VPK],send_texture:tokio::sync::mpsc::Sender<(Vec<u8>,String)>)->Result<(),ExtractTextureError>{
+	let bsp=vbsp::Bsp::read(tokio::fs::read(path).await?.as_ref())?;
+	let loader_bsp=strafesnet_bsp_loader::Bsp::new(bsp);
+	let bsp=loader_bsp.as_ref();
+
+	let mut texture_deferred_loader=RenderConfigDeferredLoader::new();
+	for texture in bsp.textures(){
+		texture_deferred_loader.acquire_render_config_id(Some(Cow::Borrowed(texture.name())));
+	}
+
+	let mut mesh_deferred_loader=MeshDeferredLoader::new();
+	for prop in bsp.static_props(){
+		mesh_deferred_loader.acquire_mesh_id(prop.model());
+	}
+
+	let finder=BspFinder{
+		bsp:&loader_bsp,
+		vpks:vpk_list
+	};
+
+	let mut mesh_loader=strafesnet_bsp_loader::loader::ModelLoader::new(finder);
+	// load models and collect requested textures
+	for model_path in mesh_deferred_loader.into_indices(){
+		let model:vmdl::Model=match mesh_loader.load(model_path){
+			Ok(model)=>model,
+			Err(e)=>{
+				println!("Model={model_path} Load model error: {e}");
+				continue;
+			},
+		};
+		for texture in model.textures(){
+			for search_path in &texture.search_paths{
+				let mut path=PathBuf::from(search_path.as_str());
+				path.push(texture.name.as_str());
+				let path=path.to_str().unwrap().to_owned();
+				texture_deferred_loader.acquire_render_config_id(Some(Cow::Owned(path)));
+			}
+		}
+	}
+
+	for texture_path in texture_deferred_loader.into_indices(){
+		match load_texture(finder,&texture_path){
+			Ok(Some(texture))=>send_texture.send(
+				(texture.into_owned(),texture_path.into_owned())
+			).await.unwrap(),
+			Ok(None)=>(),
+			Err(e)=>println!("Texture={texture_path} Load error: {e}"),
+		}
+	}
+
+	Ok(())
+}
+
+
+#[derive(Debug,thiserror::Error)]
+enum ConvertTextureError{
+	#[error("Bsp error {0:?}")]
+	Bsp(#[from]vbsp::BspError),
+	#[error("Vtf error {0:?}")]
+	Vtf(#[from]vtf::Error),
+	#[error("DDS create error {0:?}")]
+	DDS(#[from]image_dds::CreateDdsError),
+	#[error("DDS write error {0:?}")]
+	DDSWrite(#[from]image_dds::ddsfile::Error),
+	#[error("Io error {0:?}")]
+	Io(#[from]std::io::Error),
+}
+
+async fn convert_texture(texture:Vec<u8>,write_file_name:impl AsRef<Path>)->Result<(),ConvertTextureError>{
+	let image=vtf::from_bytes(&texture)?.highres_image.decode(0)?.to_rgba8();
+
+	let format=if image.width()%4!=0||image.height()%4!=0{
+		image_dds::ImageFormat::Rgba8UnormSrgb
+	}else{
+		image_dds::ImageFormat::BC7RgbaUnormSrgb
+	};
+	//this fails if the image dimensions are not a multiple of 4
+	let dds = image_dds::dds_from_image(
+		&image,
+		format,
+		image_dds::Quality::Slow,
+		image_dds::Mipmaps::GeneratedAutomatic,
+	)?;
+
+	//write dds
+	let mut dest=PathBuf::from("textures");
+	dest.push(write_file_name);
+	dest.set_extension("dds");
+	std::fs::create_dir_all(dest.parent().unwrap())?;
+	let mut writer=std::io::BufWriter::new(std::fs::File::create(dest)?);
+	dds.write(&mut writer)?;
+
+	Ok(())
+}
+
+async fn read_vpks(vpk_paths:Vec<PathBuf>,thread_limit:usize)->Vec<vpk::VPK>{
+	futures::stream::iter(vpk_paths).map(|vpk_path|async{
+		// idk why it doesn't want to pass out the errors but this is fatal anyways
+		tokio::task::spawn_blocking(move||vpk::VPK::read(&vpk_path)).await.unwrap().unwrap()
+	})
+	.buffer_unordered(thread_limit)
+	.collect().await
+}
+
+async fn extract_textures(paths:Vec<PathBuf>,vpk_paths:Vec<PathBuf>)->AResult<()>{
+	tokio::try_join!(
+		tokio::fs::create_dir_all("extracted_textures"),
+		tokio::fs::create_dir_all("textures"),
+		tokio::fs::create_dir_all("meshes"),
+	)?;
+	let thread_limit=std::thread::available_parallelism()?.get();
+
+	// load vpk list
+	let vpk_list=read_vpks(vpk_paths,thread_limit).await;
+
+	// leak vpk_list for static lifetime?
+	let vpk_list:&[vpk::VPK]=vpk_list.leak();
+
+	let (send_texture,mut recv_texture)=tokio::sync::mpsc::channel(thread_limit);
+	let mut it=paths.into_iter();
+	let extract_thread=tokio::spawn(async move{
+		static SEM:tokio::sync::Semaphore=tokio::sync::Semaphore::const_new(0);
+		SEM.add_permits(thread_limit);
+		while let (Ok(permit),Some(path))=(SEM.acquire().await,it.next()){
+			let send=send_texture.clone();
+			tokio::spawn(async move{
+				let result=gimme_them_textures(&path,vpk_list,send).await;
+				drop(permit);
+				match result{
+					Ok(())=>(),
+					Err(e)=>println!("Map={path:?} Decode error: {e:?}"),
+				}
+			});
+		}
+	});
+
+	// convert images
+	static SEM:tokio::sync::Semaphore=tokio::sync::Semaphore::const_new(0);
+	SEM.add_permits(thread_limit);
+	while let (Ok(permit),Some((data,dest)))=(SEM.acquire().await,recv_texture.recv().await){
+		// TODO: dedup dest?
+		tokio::spawn(async move{
+			let result=convert_texture(data,dest).await;
+			drop(permit);
+			match result{
+				Ok(())=>(),
+				Err(e)=>println!("Convert error: {e:?}"),
+			}
+		});
+	}
+	extract_thread.await?;
+	_=SEM.acquire_many(thread_limit as u32).await?;
+	Ok(())
+}
+
+fn vpk_contents(vpk_path:PathBuf)->AResult<()>{
+	let vpk_index=vpk::VPK::read(&vpk_path)?;
+	for (label,entry) in vpk_index.tree.into_iter(){
+		println!("vpk label={} entry={:?}",label,entry);
+	}
+	Ok(())
+}
+
+fn bsp_contents(path:PathBuf)->AResult<()>{
+	let bsp=vbsp::Bsp::read(std::fs::read(path)?.as_ref())?;
+	for file_name in bsp.pack.into_zip().into_inner().unwrap().file_names(){
+		println!("file_name={:?}",file_name);
+	}
+	Ok(())
+}
+
+#[derive(Debug)]
+#[allow(dead_code)]
+enum ConvertError{
+	IO(std::io::Error),
+	SNFMap(strafesnet_snf::map::Error),
+	BspRead(strafesnet_bsp_loader::ReadError),
+	BspLoad(strafesnet_bsp_loader::LoadError),
+}
+impl std::fmt::Display for ConvertError{
+	fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
+		write!(f,"{self:?}")
+	}
+}
+impl std::error::Error for ConvertError{}
+
+async fn convert_to_snf(path:&Path,vpk_list:&[vpk::VPK],output_folder:PathBuf)->AResult<()>{
+	let entire_file=tokio::fs::read(path).await?;
+
+	let bsp=strafesnet_bsp_loader::read(
+		std::io::Cursor::new(entire_file)
+	).map_err(ConvertError::BspRead)?;
+
+	let map=bsp.to_snf(LoadFailureMode::DefaultToNone,vpk_list).map_err(ConvertError::BspLoad)?;
+
+	let mut dest=output_folder;
+	dest.push(path.file_stem().unwrap());
+	dest.set_extension("snfm");
+	let file=std::fs::File::create(dest).map_err(ConvertError::IO)?;
+
+	strafesnet_snf::map::write_map(file,map).map_err(ConvertError::SNFMap)?;
+
+	Ok(())
+}
+async fn source_to_snf(paths:Vec<std::path::PathBuf>,output_folder:PathBuf,vpk_paths:Vec<PathBuf>)->AResult<()>{
+	let start=std::time::Instant::now();
+
+	let thread_limit=std::thread::available_parallelism()?.get();
+
+	// load vpk list
+	let vpk_list=read_vpks(vpk_paths,thread_limit).await;
+
+	// leak vpk_list for static lifetime?
+	let vpk_list:&[vpk::VPK]=vpk_list.leak();
+
+	let mut it=paths.into_iter();
+	static SEM:tokio::sync::Semaphore=tokio::sync::Semaphore::const_new(0);
+	SEM.add_permits(thread_limit);
+
+	while let (Ok(permit),Some(path))=(SEM.acquire().await,it.next()){
+		let output_folder=output_folder.clone();
+		tokio::spawn(async move{
+			let result=convert_to_snf(path.as_path(),vpk_list,output_folder).await;
+			drop(permit);
+			match result{
+				Ok(())=>(),
+				Err(e)=>println!("Convert error: {e:?}"),
+			}
+		});
+	}
+	_=SEM.acquire_many(thread_limit as u32).await.unwrap();
+
+	println!("elapsed={:?}", start.elapsed());
+	Ok(())
+}