Compare commits
22 Commits
revive-coo
...
container-
Author | SHA1 | Date | |
---|---|---|---|
fe326c8457 | |||
0d92221a27 | |||
9c862717a5 | |||
11fee65354 | |||
c947691f75 | |||
cb984a9f20 | |||
e46ad7a6a8 | |||
4805f3bc08 | |||
9638672dde | |||
c945036d60 | |||
f9bdfd0e00 | |||
d468113e51 | |||
b72bed638d | |||
452c00d53e | |||
b89a787af2 | |||
5085f6587f | |||
c856301aa6 | |||
d38152600e | |||
c08ff63033 | |||
6720f6213f | |||
db34436d64 | |||
a6ae26a93e |
24
.drone.yml
Normal file
24
.drone.yml
Normal file
@ -0,0 +1,24 @@
|
||||
---
|
||||
kind: pipeline
|
||||
type: docker
|
||||
|
||||
platform:
|
||||
os: linux
|
||||
arch: amd64
|
||||
|
||||
steps:
|
||||
- name: image
|
||||
image: plugins/docker
|
||||
settings:
|
||||
registry: git.itzana.me
|
||||
repo: git.itzana.me/strafesnet/asset-tool
|
||||
tags:
|
||||
- latest
|
||||
username:
|
||||
from_secret: GIT_USER
|
||||
password:
|
||||
from_secret: GIT_PASS
|
||||
dockerfile: Containerfile
|
||||
when:
|
||||
branch:
|
||||
- master
|
4
Cargo.lock
generated
4
Cargo.lock
generated
@ -110,7 +110,7 @@ checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711"
|
||||
|
||||
[[package]]
|
||||
name = "asset-tool"
|
||||
version = "0.4.0"
|
||||
version = "0.4.1"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
@ -1166,7 +1166,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rbx_asset"
|
||||
version = "0.2.0"
|
||||
version = "0.2.1"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"flate2",
|
||||
|
@ -1,7 +1,7 @@
|
||||
workspace = { members = ["rbx_asset", "rox_compiler"] }
|
||||
[package]
|
||||
name = "asset-tool"
|
||||
version = "0.4.0"
|
||||
version = "0.4.1"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
@ -1,6 +1,6 @@
|
||||
# Using the `rust-musl-builder` as base image, instead of
|
||||
# the official Rust toolchain
|
||||
FROM clux/muslrust:stable AS chef
|
||||
FROM docker.io/clux/muslrust:stable AS chef
|
||||
USER root
|
||||
RUN cargo install cargo-chef
|
||||
WORKDIR /app
|
||||
@ -16,8 +16,8 @@ RUN cargo chef cook --release --target x86_64-unknown-linux-musl --recipe-path r
|
||||
COPY . .
|
||||
RUN cargo build --release --target x86_64-unknown-linux-musl --bin asset-tool
|
||||
|
||||
FROM alpine AS runtime
|
||||
FROM docker.io/alpine:latest AS runtime
|
||||
RUN addgroup -S myuser && adduser -S myuser -G myuser
|
||||
COPY --from=builder /app/target/x86_64-unknown-linux-musl/release/asset-tool /usr/local/bin/
|
||||
USER myuser
|
||||
CMD ["/usr/local/bin/asset-tool"]
|
||||
ENTRYPOINT ["/usr/local/bin/asset-tool"]
|
@ -1,3 +1,5 @@
|
||||
# asset-tool
|
||||
|
||||
[](https://ci.itzana.me/StrafesNET/asset-tool)
|
||||
|
||||
For uploading and downloading roblox assets.
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "rbx_asset"
|
||||
version = "0.2.0"
|
||||
version = "0.2.1"
|
||||
edition = "2021"
|
||||
publish = ["strafesnet"]
|
||||
|
||||
|
@ -60,20 +60,6 @@ pub struct Preview{
|
||||
pub asset:String,
|
||||
pub altText:String,
|
||||
}
|
||||
#[derive(Debug,serde::Deserialize,serde::Serialize)]
|
||||
#[allow(nonstandard_style,dead_code)]
|
||||
pub struct AssetResponse{
|
||||
pub assetId:u64,
|
||||
pub creationContext:CreationContext,
|
||||
pub description:String,
|
||||
pub displayName:String,
|
||||
pub path:String,
|
||||
pub revisionId:u64,
|
||||
pub revisionCreateTime:chrono::DateTime<chrono::Utc>,
|
||||
pub moderationResult:ModerationResult,
|
||||
pub icon:String,
|
||||
pub previews:Vec<Preview>,
|
||||
}
|
||||
#[allow(nonstandard_style,dead_code)]
|
||||
pub struct UpdatePlaceRequest{
|
||||
pub universeId:u64,
|
||||
@ -119,7 +105,7 @@ pub struct AssetVersionsRequest{
|
||||
pub asset_id:u64,
|
||||
pub cursor:Option<String>,
|
||||
}
|
||||
#[derive(serde::Deserialize,serde::Serialize)]
|
||||
#[derive(Debug,serde::Deserialize,serde::Serialize)]
|
||||
#[allow(nonstandard_style,dead_code)]
|
||||
pub struct AssetVersion{
|
||||
pub Id:u64,
|
||||
@ -131,7 +117,7 @@ pub struct AssetVersion{
|
||||
pub created:chrono::DateTime<chrono::Utc>,
|
||||
pub isPublished:bool,
|
||||
}
|
||||
#[derive(serde::Deserialize)]
|
||||
#[derive(Debug,serde::Deserialize)]
|
||||
#[allow(nonstandard_style,dead_code)]
|
||||
pub struct AssetVersionsResponse{
|
||||
pub previousPageCursor:Option<String>,
|
||||
@ -154,13 +140,13 @@ pub struct InventoryPageRequest{
|
||||
pub group:u64,
|
||||
pub cursor:Option<String>,
|
||||
}
|
||||
#[derive(serde::Deserialize,serde::Serialize)]
|
||||
#[derive(Debug,serde::Deserialize,serde::Serialize)]
|
||||
#[allow(nonstandard_style,dead_code)]
|
||||
pub struct InventoryItem{
|
||||
pub id:u64,
|
||||
pub name:String,
|
||||
}
|
||||
#[derive(serde::Deserialize,serde::Serialize)]
|
||||
#[derive(Debug,serde::Deserialize,serde::Serialize)]
|
||||
#[allow(nonstandard_style,dead_code)]
|
||||
pub struct InventoryPageResponse{
|
||||
pub totalResults:u64,//up to 50
|
||||
@ -185,6 +171,16 @@ impl std::fmt::Display for InventoryPageError{
|
||||
}
|
||||
impl std::error::Error for InventoryPageError{}
|
||||
|
||||
#[derive(Debug,serde::Deserialize,serde::Serialize)]
|
||||
#[allow(nonstandard_style,dead_code)]
|
||||
pub struct RobloxOperation{
|
||||
pub path:Option<std::path::PathBuf>,
|
||||
pub metadata:Option<String>,
|
||||
pub done:Option<bool>,
|
||||
pub error:Option<String>,
|
||||
pub response:Option<String>,
|
||||
}
|
||||
|
||||
//idk how to do this better
|
||||
enum ReaderType<R:std::io::Read>{
|
||||
GZip(flate2::read::GzDecoder<std::io::BufReader<R>>),
|
||||
@ -251,7 +247,7 @@ impl CloudContext{
|
||||
.multipart(form)
|
||||
.send().await
|
||||
}
|
||||
pub async fn create_asset(&self,config:CreateAssetRequest,body:impl Into<std::borrow::Cow<'static,[u8]>>)->Result<AssetResponse,CreateError>{
|
||||
pub async fn create_asset(&self,config:CreateAssetRequest,body:impl Into<std::borrow::Cow<'static,[u8]>>)->Result<RobloxOperation,CreateError>{
|
||||
let url=reqwest::Url::parse("https://apis.roblox.com/assets/v1/assets").map_err(CreateError::ParseError)?;
|
||||
|
||||
let request_config=serde_json::to_string(&config).map_err(CreateError::SerializeError)?;
|
||||
@ -260,11 +256,12 @@ impl CloudContext{
|
||||
.text("request",request_config)
|
||||
.part("fileContent",reqwest::multipart::Part::bytes(body));
|
||||
|
||||
let resp=self.post_form(url,form).await.map_err(CreateError::Reqwest)?;
|
||||
let resp=self.post_form(url,form).await.map_err(CreateError::Reqwest)?
|
||||
.error_for_status().map_err(CreateError::Reqwest)?;
|
||||
|
||||
Ok(resp.json::<AssetResponse>().await.map_err(CreateError::Reqwest)?)
|
||||
Ok(resp.json::<RobloxOperation>().await.map_err(CreateError::Reqwest)?)
|
||||
}
|
||||
pub async fn update_asset(&self,config:UpdateAssetRequest,body:impl Into<std::borrow::Cow<'static,[u8]>>)->Result<AssetResponse,UpdateError>{
|
||||
pub async fn update_asset(&self,config:UpdateAssetRequest,body:impl Into<std::borrow::Cow<'static,[u8]>>)->Result<RobloxOperation,UpdateError>{
|
||||
let raw_url=format!("https://apis.roblox.com/assets/v1/assets/{}",config.assetId);
|
||||
let url=reqwest::Url::parse(raw_url.as_str()).map_err(UpdateError::ParseError)?;
|
||||
|
||||
@ -274,9 +271,12 @@ impl CloudContext{
|
||||
.text("request",request_config)
|
||||
.part("fileContent",reqwest::multipart::Part::bytes(body));
|
||||
|
||||
let resp=self.patch_form(url,form).await.map_err(UpdateError::Reqwest)?;
|
||||
let resp=self.patch_form(url,form).await
|
||||
.map_err(UpdateError::Reqwest)?
|
||||
//roblox api documentation is very poor, just give the status code and drop the json
|
||||
.error_for_status().map_err(UpdateError::Reqwest)?;
|
||||
|
||||
Ok(resp.json::<AssetResponse>().await.map_err(UpdateError::Reqwest)?)
|
||||
Ok(resp.json::<RobloxOperation>().await.map_err(UpdateError::Reqwest)?)
|
||||
}
|
||||
pub async fn get_asset(&self,config:GetAssetRequest)->Result<Vec<u8>,GetError>{
|
||||
let mut url=reqwest::Url::parse("https://assetdelivery.roblox.com/v1/asset/").map_err(GetError::ParseError)?;
|
||||
@ -320,9 +320,15 @@ impl CloudContext{
|
||||
}
|
||||
pub async fn update_place(&self,config:UpdatePlaceRequest,body:impl Into<reqwest::Body>+Clone)->Result<UpdatePlaceResponse,UpdateError>{
|
||||
let raw_url=format!("https://apis.roblox.com/universes/v1/{}/places/{}/versions",config.universeId,config.placeId);
|
||||
let url=reqwest::Url::parse(raw_url.as_str()).map_err(UpdateError::ParseError)?;
|
||||
let mut url=reqwest::Url::parse(raw_url.as_str()).map_err(UpdateError::ParseError)?;
|
||||
//url borrow scope
|
||||
{
|
||||
let mut query=url.query_pairs_mut();//borrow here
|
||||
query.append_pair("versionType","Published");
|
||||
}
|
||||
|
||||
let resp=self.post(url,body).await.map_err(UpdateError::Reqwest)?;
|
||||
let resp=self.post(url,body).await.map_err(UpdateError::Reqwest)?
|
||||
.error_for_status().map_err(UpdateError::Reqwest)?;
|
||||
|
||||
Ok(resp.json::<UpdatePlaceResponse>().await.map_err(UpdateError::Reqwest)?)
|
||||
}
|
||||
|
@ -62,22 +62,22 @@ pub struct UploadResponse{
|
||||
}
|
||||
|
||||
#[allow(nonstandard_style,dead_code)]
|
||||
pub struct DownloadRequest{
|
||||
pub struct GetAssetRequest{
|
||||
pub asset_id:u64,
|
||||
pub version:Option<u64>,
|
||||
}
|
||||
#[derive(Debug)]
|
||||
pub enum DownloadError{
|
||||
pub enum GetError{
|
||||
ParseError(url::ParseError),
|
||||
Reqwest(reqwest::Error),
|
||||
IO(std::io::Error)
|
||||
}
|
||||
impl std::fmt::Display for DownloadError{
|
||||
impl std::fmt::Display for GetError{
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for DownloadError{}
|
||||
impl std::error::Error for GetError{}
|
||||
|
||||
pub struct AssetVersionsPageRequest{
|
||||
pub asset_id:u64,
|
||||
@ -269,8 +269,8 @@ impl CookieContext{
|
||||
|
||||
Ok(resp.json::<UploadResponse>().await.map_err(UploadError::Reqwest)?)
|
||||
}
|
||||
pub async fn download(&self,config:DownloadRequest)->Result<Vec<u8>,DownloadError>{
|
||||
let mut url=reqwest::Url::parse("https://assetdelivery.roblox.com/v1/asset/").map_err(DownloadError::ParseError)?;
|
||||
pub async fn get_asset(&self,config:GetAssetRequest)->Result<Vec<u8>,GetError>{
|
||||
let mut url=reqwest::Url::parse("https://assetdelivery.roblox.com/v1/asset/").map_err(GetError::ParseError)?;
|
||||
//url borrow scope
|
||||
{
|
||||
let mut query=url.query_pairs_mut();//borrow here
|
||||
@ -279,15 +279,15 @@ impl CookieContext{
|
||||
query.append_pair("version",version.to_string().as_str());
|
||||
}
|
||||
}
|
||||
let resp=self.get(url).await.map_err(DownloadError::Reqwest)?;
|
||||
let resp=self.get(url).await.map_err(GetError::Reqwest)?;
|
||||
|
||||
let body=resp.bytes().await.map_err(DownloadError::Reqwest)?;
|
||||
let body=resp.bytes().await.map_err(GetError::Reqwest)?;
|
||||
|
||||
match maybe_gzip_decode(&mut std::io::Cursor::new(body)){
|
||||
Ok(ReaderType::GZip(readable))=>read_readable(readable),
|
||||
Ok(ReaderType::Raw(readable))=>read_readable(readable),
|
||||
Err(e)=>Err(e),
|
||||
}.map_err(DownloadError::IO)
|
||||
}.map_err(GetError::IO)
|
||||
}
|
||||
pub async fn get_asset_versions_page(&self,config:AssetVersionsPageRequest)->Result<AssetVersionsPageResponse,AssetVersionsPageError>{
|
||||
let mut url=reqwest::Url::parse(format!("https://develop.roblox.com/v1/assets/{}/saved-versions",config.asset_id).as_str()).map_err(AssetVersionsPageError::ParseError)?;
|
||||
@ -305,7 +305,7 @@ impl CookieContext{
|
||||
Ok(self.get(url).await.map_err(AssetVersionsPageError::Reqwest)?
|
||||
.json::<AssetVersionsPageResponse>().await.map_err(AssetVersionsPageError::Reqwest)?)
|
||||
}
|
||||
pub async fn inventory_page(&self,config:InventoryPageRequest)->Result<InventoryPageResponse,InventoryPageError>{
|
||||
pub async fn get_inventory_page(&self,config:InventoryPageRequest)->Result<InventoryPageResponse,InventoryPageError>{
|
||||
let mut url=reqwest::Url::parse(format!("https://apis.roblox.com/toolbox-service/v1/creations/group/{}/10?limit=50",config.group).as_str()).map_err(InventoryPageError::ParseError)?;
|
||||
//url borrow scope
|
||||
{
|
||||
|
@ -19,15 +19,15 @@ impl PropertiesOverride{
|
||||
impl std::fmt::Display for PropertiesOverride{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
if let Some(name)=self.name.as_deref(){
|
||||
writeln!(f,"--!Properties.Name = \"{}\"",name)?;
|
||||
writeln!(f,"-- Properties.Name = \"{}\"",name)?;
|
||||
}
|
||||
if let Some(class)=self.class.as_deref(){
|
||||
writeln!(f,"--!Properties.ClassName = \"{}\"",class)?;
|
||||
writeln!(f,"-- Properties.ClassName = \"{}\"",class)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn sanitize<'a>(s:&'a str)->std::borrow::Cow<'a,str>{
|
||||
lazy_regex::regex!(r"[^A-z0-9.-]").replace_all(s,"_")
|
||||
lazy_regex::regex!(r"[^A-Za-z0-9.-]").replace_all(s,"_")
|
||||
}
|
||||
|
@ -203,7 +203,7 @@ impl ScriptWithOverrides{
|
||||
let mut count=0;
|
||||
for line in source.lines(){
|
||||
//only string type properties are supported atm
|
||||
if let Some(captures)=lazy_regex::regex!(r#"^\-\-\!\s*Properties\.([A-z]\w*)\s*\=\s*"(\w+)"$"#)
|
||||
if let Some(captures)=lazy_regex::regex!(r#"^\-\-\s*Properties\.([A-Za-z]\w*)\s*\=\s*"(\w+)"$"#)
|
||||
.captures(line){
|
||||
count+=line.len();
|
||||
match &captures[1]{
|
||||
@ -248,7 +248,7 @@ pub enum CompileNodeError{
|
||||
extension:String,
|
||||
style:Option<Style>,
|
||||
},
|
||||
NoExtension,
|
||||
UnknownExtension,
|
||||
}
|
||||
impl std::fmt::Display for CompileNodeError{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
@ -343,7 +343,7 @@ impl CompileNode{
|
||||
_=>false,
|
||||
};
|
||||
let (ext_len,file_discernment)={
|
||||
if let Some(captures)=lazy_regex::regex!(r"^.*(.module.lua|.client.lua|.server.lua|.rbxmx|.lua)$")
|
||||
if let Some(captures)=lazy_regex::regex!(r"^.*(\.module\.lua|\.client\.lua|\.server\.lua)$")
|
||||
.captures(file_name.as_str()){
|
||||
let ext=&captures[1];
|
||||
(ext.len(),match ext{
|
||||
@ -355,6 +355,12 @@ impl CompileNode{
|
||||
},
|
||||
".client.lua"=>FileDiscernment::Script(ScriptHint::LocalScript),
|
||||
".server.lua"=>FileDiscernment::Script(ScriptHint::Script),
|
||||
_=>panic!("Regex failed"),
|
||||
})
|
||||
}else if let Some(captures)=lazy_regex::regex!(r"^.*(\.rbxmx|\.lua)$")
|
||||
.captures(file_name.as_str()){
|
||||
let ext=&captures[1];
|
||||
(ext.len(),match ext{
|
||||
".rbxmx"=>{
|
||||
if is_goober{
|
||||
Err(CompileNodeError::ExtensionNotSupportedInStyle{extension:ext.to_owned(),style})?;
|
||||
@ -365,7 +371,7 @@ impl CompileNode{
|
||||
_=>panic!("Regex failed"),
|
||||
})
|
||||
}else{
|
||||
return Err(CompileNodeError::NoExtension);
|
||||
return Err(CompileNodeError::UnknownExtension);
|
||||
}
|
||||
};
|
||||
file_name.truncate(file_name.len()-ext_len);
|
||||
@ -500,10 +506,19 @@ pub async fn compile(config:CompileConfig,mut dom:&mut rbx_dom_weak::WeakDom)->R
|
||||
Ok(Some(entry))=>tokio::spawn(async move{
|
||||
let met=entry.metadata().await.map_err(CompileError::IO)?;
|
||||
//discern that bad boy
|
||||
let compile_class=match met.is_dir(){
|
||||
true=>CompileNode::from_folder(&entry,style).await,
|
||||
false=>CompileNode::from_file(&entry,style).await,
|
||||
}.map_err(CompileError::CompileNode)?;
|
||||
let compile_class={
|
||||
let result=match met.is_dir(){
|
||||
true=>CompileNode::from_folder(&entry,style).await,
|
||||
false=>CompileNode::from_file(&entry,style).await,
|
||||
};
|
||||
match result{
|
||||
Ok(compile_class)=>compile_class,
|
||||
Err(e)=>{
|
||||
println!("Ignoring file {entry:?} due to error {e}");
|
||||
return Ok(None);
|
||||
},
|
||||
}
|
||||
};
|
||||
//prepare data structure
|
||||
Ok(Some((compile_class.blacklist,match compile_class.class{
|
||||
CompileClass::Folder=>PreparedData::Builder(rbx_dom_weak::InstanceBuilder::new("Folder").with_name(compile_class.name.as_str())),
|
||||
|
96
src/main.rs
96
src/main.rs
@ -2,8 +2,8 @@ use std::{io::Read,path::PathBuf};
|
||||
use clap::{Args,Parser,Subcommand};
|
||||
use anyhow::Result as AResult;
|
||||
use futures::StreamExt;
|
||||
use rbx_asset::cloud::{ApiKey,CloudContext,InventoryItem};
|
||||
use rbx_asset::cookie::{Cookie,CookieContext,AssetVersion};
|
||||
use rbx_asset::cloud::{ApiKey,CloudContext};
|
||||
use rbx_asset::cookie::{Cookie,CookieContext,AssetVersion,InventoryItem};
|
||||
|
||||
type AssetID=u64;
|
||||
type AssetIDFileMap=Vec<(AssetID,PathBuf)>;
|
||||
@ -20,6 +20,7 @@ struct Cli{
|
||||
|
||||
#[derive(Subcommand)]
|
||||
enum Commands{
|
||||
Info(InfoSubcommand),
|
||||
DownloadHistory(DownloadHistorySubcommand),
|
||||
Download(DownloadSubcommand),
|
||||
DownloadDecompile(DownloadDecompileSubcommand),
|
||||
@ -34,6 +35,11 @@ enum Commands{
|
||||
DecompileHistoryIntoGit(DecompileHistoryIntoGitSubcommand),
|
||||
DownloadAndDecompileHistoryIntoGit(DownloadAndDecompileHistoryIntoGitSubcommand),
|
||||
}
|
||||
#[derive(Args)]
|
||||
struct InfoSubcommand{
|
||||
#[arg(long)]
|
||||
path:PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
struct DownloadHistorySubcommand{
|
||||
@ -149,8 +155,6 @@ struct CompileUploadAssetSubcommand{
|
||||
#[arg(long,group="api_key",required=true)]
|
||||
api_key_file:Option<PathBuf>,
|
||||
#[arg(long)]
|
||||
input_file:PathBuf,
|
||||
#[arg(long)]
|
||||
input_folder:Option<PathBuf>,
|
||||
#[arg(long)]
|
||||
style:Option<Style>,
|
||||
@ -170,8 +174,6 @@ struct CompileUploadPlaceSubcommand{
|
||||
#[arg(long,group="api_key",required=true)]
|
||||
api_key_file:Option<PathBuf>,
|
||||
#[arg(long)]
|
||||
input_file:PathBuf,
|
||||
#[arg(long)]
|
||||
input_folder:Option<PathBuf>,
|
||||
#[arg(long)]
|
||||
style:Option<Style>,
|
||||
@ -195,12 +197,12 @@ struct DecompileSubcommand{
|
||||
}
|
||||
#[derive(Args)]
|
||||
struct DownloadDecompileSubcommand{
|
||||
#[arg(long,group="api_key",required=true)]
|
||||
api_key_literal:Option<String>,
|
||||
#[arg(long,group="api_key",required=true)]
|
||||
api_key_envvar:Option<String>,
|
||||
#[arg(long,group="api_key",required=true)]
|
||||
api_key_file:Option<PathBuf>,
|
||||
#[arg(long,group="cookie",required=true)]
|
||||
cookie_literal:Option<String>,
|
||||
#[arg(long,group="cookie",required=true)]
|
||||
cookie_envvar:Option<String>,
|
||||
#[arg(long,group="cookie",required=true)]
|
||||
cookie_file:Option<PathBuf>,
|
||||
#[arg(long)]
|
||||
output_folder:Option<PathBuf>,
|
||||
#[arg(long)]
|
||||
@ -237,12 +239,12 @@ struct DecompileHistoryIntoGitSubcommand{
|
||||
struct DownloadAndDecompileHistoryIntoGitSubcommand{
|
||||
#[arg(long)]
|
||||
asset_id:AssetID,
|
||||
#[arg(long,group="api_key",required=true)]
|
||||
api_key_literal:Option<String>,
|
||||
#[arg(long,group="api_key",required=true)]
|
||||
api_key_envvar:Option<String>,
|
||||
#[arg(long,group="api_key",required=true)]
|
||||
api_key_file:Option<PathBuf>,
|
||||
#[arg(long,group="cookie",required=true)]
|
||||
cookie_literal:Option<String>,
|
||||
#[arg(long,group="cookie",required=true)]
|
||||
cookie_envvar:Option<String>,
|
||||
#[arg(long,group="cookie",required=true)]
|
||||
cookie_file:Option<PathBuf>,
|
||||
//currently output folder must be the current folder due to git2 limitations
|
||||
//output_folder:cli.output.unwrap(),
|
||||
#[arg(long)]
|
||||
@ -275,10 +277,22 @@ impl Style{
|
||||
}
|
||||
}
|
||||
|
||||
async fn info(path:PathBuf)->AResult<()>{
|
||||
let dir=std::env::current_dir().unwrap();
|
||||
println!("pwd={:?}",dir);
|
||||
println!("path={path:?}");
|
||||
let mut read_dir=tokio::fs::read_dir(path).await?;
|
||||
while let Some(entry)=read_dir.next_entry().await?{
|
||||
println!("{:?}",entry);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main()->AResult<()>{
|
||||
let cli=Cli::parse();
|
||||
match cli.command{
|
||||
Commands::Info(subcommand)=>info(subcommand.path).await,
|
||||
Commands::DownloadHistory(subcommand)=>download_history(DownloadHistoryConfig{
|
||||
continue_from_versions:subcommand.continue_from_versions.unwrap_or(false),
|
||||
end_version:subcommand.end_version,
|
||||
@ -308,10 +322,10 @@ async fn main()->AResult<()>{
|
||||
},
|
||||
Commands::DownloadDecompile(subcommand)=>{
|
||||
download_decompile(DownloadDecompileConfig{
|
||||
api_key:api_key_from_args(
|
||||
subcommand.api_key_literal,
|
||||
subcommand.api_key_envvar,
|
||||
subcommand.api_key_file,
|
||||
cookie:cookie_from_args(
|
||||
subcommand.cookie_literal,
|
||||
subcommand.cookie_envvar,
|
||||
subcommand.cookie_file,
|
||||
).await?,
|
||||
asset_id:subcommand.asset_id,
|
||||
output_folder:subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
|
||||
@ -411,10 +425,10 @@ async fn main()->AResult<()>{
|
||||
Commands::DownloadAndDecompileHistoryIntoGit(subcommand)=>download_and_decompile_history_into_git(DownloadAndDecompileHistoryConfig{
|
||||
git_committer_name:subcommand.git_committer_name,
|
||||
git_committer_email:subcommand.git_committer_email,
|
||||
api_key:api_key_from_args(
|
||||
subcommand.api_key_literal,
|
||||
subcommand.api_key_envvar,
|
||||
subcommand.api_key_file,
|
||||
cookie:cookie_from_args(
|
||||
subcommand.cookie_literal,
|
||||
subcommand.cookie_envvar,
|
||||
subcommand.cookie_file,
|
||||
).await?,
|
||||
asset_id:subcommand.asset_id,
|
||||
output_folder:std::env::current_dir()?,
|
||||
@ -433,7 +447,7 @@ async fn cookie_from_args(literal:Option<String>,environment:Option<String>,file
|
||||
(None,None,Some(cookie_file))=>tokio::fs::read_to_string(cookie_file).await?,
|
||||
_=>Err(anyhow::Error::msg("Illegal api key argument triple"))?,
|
||||
};
|
||||
Ok(Cookie::new(cookie))
|
||||
Ok(Cookie::new(format!(".ROBLOSECURITY={cookie}")))
|
||||
}
|
||||
async fn api_key_from_args(literal:Option<String>,environment:Option<String>,file:Option<PathBuf>)->AResult<ApiKey>{
|
||||
let api_key=match (literal,environment,file){
|
||||
@ -469,7 +483,7 @@ async fn create(config:CreateConfig)->AResult<()>{
|
||||
expectedPrice:0,
|
||||
}
|
||||
},tokio::fs::read(config.input_file).await?).await?;
|
||||
println!("UploadResponse={:?}",resp);
|
||||
println!("CreateResponse={:?}",resp);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -480,11 +494,12 @@ struct UploadAssetConfig{
|
||||
}
|
||||
async fn upload_asset(config:UploadAssetConfig)->AResult<()>{
|
||||
let context=CloudContext::new(config.api_key);
|
||||
context.update_asset(rbx_asset::cloud::UpdateAssetRequest{
|
||||
let resp=context.update_asset(rbx_asset::cloud::UpdateAssetRequest{
|
||||
assetId:config.asset_id,
|
||||
displayName:None,
|
||||
description:None,
|
||||
},tokio::fs::read(config.input_file).await?).await?;
|
||||
println!("UploadResponse={:?}",resp);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -531,7 +546,7 @@ async fn get_inventory_pages(context:&CookieContext,group:u64)->AResult<Vec<Inve
|
||||
let mut cursor:Option<String>=None;
|
||||
let mut asset_list=Vec::new();
|
||||
loop{
|
||||
let mut page=context.inventory_page(rbx_asset::cookie::InventoryPageRequest{group,cursor}).await?;
|
||||
let mut page=context.get_inventory_page(rbx_asset::cookie::InventoryPageRequest{group,cursor}).await?;
|
||||
asset_list.append(&mut page.data);
|
||||
if page.nextPageCursor.is_none(){
|
||||
break;
|
||||
@ -552,11 +567,11 @@ async fn download_group_inventory_json(cookie:Cookie,group:u64,output_folder:Pat
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn get_version_history(context:&CloudContext,asset_id:AssetID)->AResult<Vec<AssetVersion>>{
|
||||
async fn get_version_history(context:&CookieContext,asset_id:AssetID)->AResult<Vec<AssetVersion>>{
|
||||
let mut cursor:Option<String>=None;
|
||||
let mut asset_list=Vec::new();
|
||||
loop{
|
||||
let mut page=context.get_asset_versions(rbx_asset::cloud::AssetVersionsRequest{asset_id,cursor}).await?;
|
||||
let mut page=context.get_asset_versions_page(rbx_asset::cookie::AssetVersionsPageRequest{asset_id,cursor}).await?;
|
||||
asset_list.append(&mut page.data);
|
||||
if page.nextPageCursor.is_none(){
|
||||
break;
|
||||
@ -651,7 +666,7 @@ async fn download_history(mut config:DownloadHistoryConfig)->AResult<()>{
|
||||
let mut path=output_folder.clone();
|
||||
path.push(format!("{}_v{}.rbxl",config.asset_id,version_number));
|
||||
join_set.spawn(async move{
|
||||
let file=context.get_asset(rbx_asset::cloud::GetAssetRequest{asset_id:config.asset_id,version:Some(version_number)}).await?;
|
||||
let file=context.get_asset(rbx_asset::cookie::GetAssetRequest{asset_id:config.asset_id,version:Some(version_number)}).await?;
|
||||
|
||||
tokio::fs::write(path,file).await?;
|
||||
|
||||
@ -746,7 +761,7 @@ async fn decompile(config:DecompileConfig)->AResult<()>{
|
||||
}
|
||||
|
||||
struct DownloadDecompileConfig{
|
||||
api_key:ApiKey,
|
||||
cookie:Cookie,
|
||||
asset_id:AssetID,
|
||||
style:rox_compiler::Style,
|
||||
output_folder:PathBuf,
|
||||
@ -756,8 +771,8 @@ struct DownloadDecompileConfig{
|
||||
}
|
||||
|
||||
async fn download_decompile(config:DownloadDecompileConfig)->AResult<()>{
|
||||
let context=CloudContext::new(config.api_key);
|
||||
let file=context.get_asset(rbx_asset::cloud::GetAssetRequest{asset_id:config.asset_id,version:None}).await?;
|
||||
let context=CookieContext::new(config.cookie);
|
||||
let file=context.get_asset(rbx_asset::cookie::GetAssetRequest{asset_id:config.asset_id,version:None}).await?;
|
||||
|
||||
let dom=load_dom(std::io::Cursor::new(file))?;
|
||||
let context=rox_compiler::DecompiledContext::from_dom(dom);
|
||||
@ -914,7 +929,7 @@ async fn decompile_history_into_git(config:DecompileHistoryConfig)->AResult<()>{
|
||||
}
|
||||
|
||||
struct DownloadAndDecompileHistoryConfig{
|
||||
api_key:ApiKey,
|
||||
cookie:Cookie,
|
||||
asset_id:AssetID,
|
||||
git_committer_name:String,
|
||||
git_committer_email:String,
|
||||
@ -926,7 +941,7 @@ struct DownloadAndDecompileHistoryConfig{
|
||||
}
|
||||
|
||||
async fn download_and_decompile_history_into_git(config:DownloadAndDecompileHistoryConfig)->AResult<()>{
|
||||
let context=CloudContext::new(config.api_key);
|
||||
let context=CookieContext::new(config.cookie);
|
||||
|
||||
//poll paged list of all asset versions
|
||||
let asset_list=get_version_history(&context,config.asset_id).await?;
|
||||
@ -939,7 +954,7 @@ async fn download_and_decompile_history_into_git(config:DownloadAndDecompileHist
|
||||
.map(|asset_version|{
|
||||
let context=context.clone();
|
||||
tokio::task::spawn(async move{
|
||||
let file=context.get_asset(rbx_asset::cloud::GetAssetRequest{asset_id,version:Some(asset_version.assetVersionNumber)}).await?;
|
||||
let file=context.get_asset(rbx_asset::cookie::GetAssetRequest{asset_id,version:Some(asset_version.assetVersionNumber)}).await?;
|
||||
let dom=load_dom(std::io::Cursor::new(file))?;
|
||||
Ok::<_,anyhow::Error>((asset_version,rox_compiler::DecompiledContext::from_dom(dom)))
|
||||
})
|
||||
@ -1018,11 +1033,12 @@ async fn compile_upload_asset(config:CompileUploadAssetConfig)->AResult<()>{
|
||||
|
||||
//upload it
|
||||
let context=CloudContext::new(config.api_key);
|
||||
context.update_asset(rbx_asset::cloud::UpdateAssetRequest{
|
||||
let resp=context.update_asset(rbx_asset::cloud::UpdateAssetRequest{
|
||||
assetId:config.asset_id,
|
||||
displayName:None,
|
||||
description:None,
|
||||
},data).await?;
|
||||
println!("UploadResponse={:?}",resp);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
Reference in New Issue
Block a user