combobulator: use cached assets #333

Merged
Quaternions merged 1 commits from staging into master 2026-03-03 01:22:13 +00:00

View File

@@ -125,28 +125,29 @@ impl Processor{
let asset_id=id.0;
let dds_key=S3Cache::texture_dds_key(asset_id);
// skip if DDS already cached
if self.s3.get(&dds_key).await.map_err(Error::S3Get)?.is_some(){
println!("[combobulator] Texture {asset_id} already cached, skipping");
continue;
}
// fetch cached DDS
let dds=if let Some(dds)=self.s3.get(&dds_key).await.map_err(Error::S3Get)?{
dds
}else{
// check raw cache, download if missing
let raw_key=S3Cache::texture_raw_key(asset_id);
let raw_data=match self.s3.get(&raw_key).await.map_err(Error::S3Get)?{
Some(cached)=>cached,
None=>{
println!("[combobulator] Downloading texture {asset_id}");
let Some(data)=self.download_asset(asset_id).await? else{continue};
self.s3.put(&raw_key,data.clone()).await.map_err(Error::S3Put)?;
data
},
};
// check raw cache, download if missing
let raw_key=S3Cache::texture_raw_key(asset_id);
let raw_data=match self.s3.get(&raw_key).await.map_err(Error::S3Get)?{
Some(cached)=>cached,
None=>{
println!("[combobulator] Downloading texture {asset_id}");
let Some(data)=self.download_asset(asset_id).await? else{continue};
self.s3.put(&raw_key,data.clone()).await.map_err(Error::S3Put)?;
data
},
// convert to DDS and upload
let dds=map_tool::roblox::convert_texture_to_dds(&raw_data)
.map_err(Error::ConvertTexture)?;
self.s3.put(&dds_key,dds.clone()).await.map_err(Error::S3Put)?;
dds
};
// convert to DDS and upload
let dds=map_tool::roblox::convert_texture_to_dds(&raw_data)
.map_err(Error::ConvertTexture)?;
self.s3.put(&dds_key,dds.clone()).await.map_err(Error::S3Put)?;
println!("[combobulator] Texture {asset_id} processed");
texture_loader.insert(id,dds);
@@ -158,14 +159,14 @@ impl Processor{
let asset_id=id.0;
let mesh_key=S3Cache::mesh_key(asset_id);
if self.s3.get(&mesh_key).await.map_err(Error::S3Get)?.is_some(){
println!("[combobulator] Mesh {asset_id} already cached, skipping");
continue;
}
println!("[combobulator] Downloading mesh {asset_id}");
let Some(data)=self.download_asset(asset_id).await? else{continue};
self.s3.put(&mesh_key,data.clone()).await.map_err(Error::S3Put)?;
let data=if let Some(data)=self.s3.get(&mesh_key).await.map_err(Error::S3Get)?{
data
}else{
println!("[combobulator] Downloading mesh {asset_id}");
let Some(data)=self.download_asset(asset_id).await? else{continue};
self.s3.put(&mesh_key,data.clone()).await.map_err(Error::S3Put)?;
data
};
println!("[combobulator] Mesh {asset_id} processed");
mesh_loader.insert_mesh(id,data).map_err(Error::Mesh)?;
@@ -176,18 +177,18 @@ impl Processor{
let asset_id=id.0;
let union_key=S3Cache::union_key(asset_id);
if self.s3.get(&union_key).await.map_err(Error::S3Get)?.is_some(){
println!("[combobulator] Union {asset_id} already cached, skipping");
continue;
}
let union_result=if let Some(data)=self.s3.get(&union_key).await.map_err(Error::S3Get)?{
rbx_binary::from_reader(data.as_slice())
}else{
println!("[combobulator] Downloading union {asset_id}");
let Some(data)=self.download_asset(asset_id).await? else{continue};
println!("[combobulator] Downloading union {asset_id}");
let Some(data)=self.download_asset(asset_id).await? else{continue};
// decode the data while we have ownership
let union_result=rbx_binary::from_reader(data.as_slice());
// decode the data while we have ownership
let union_result=rbx_binary::from_reader(data.as_slice());
self.s3.put(&union_key,data).await.map_err(Error::S3Put)?;
self.s3.put(&union_key,data).await.map_err(Error::S3Put)?;
union_result
};
println!("[combobulator] Union {asset_id} processed");
// handle error after caching data