20 Commits

Author SHA1 Message Date
f8e53c7dfd add docs 2025-09-28 21:18:45 -07:00
1c3c400828 itertools default feature 2025-09-28 21:05:20 -07:00
a6e8402937 fix readme 2025-09-28 21:05:20 -07:00
0db4cfe03d itertools feature 2025-09-28 20:55:47 -07:00
362fc4e5a5 do not protect user from themselves, make it like normal take seek 2025-09-28 20:55:47 -07:00
8644fdf54d function thingy 2025-09-28 20:55:47 -07:00
dd08f536d9 supertrait 2025-09-28 20:55:47 -07:00
0bf0c9c5ee fix tests 2025-09-28 20:55:47 -07:00
a41c66480d create adapter 2025-09-28 20:19:07 -07:00
3b888f3181 rename fn 2025-09-28 20:12:03 -07:00
cc8899029f ta 2025-09-28 20:03:57 -07:00
f94ea9f7ee use constructors 2025-09-28 20:00:55 -07:00
2209b5218b itertools 2025-09-28 19:58:56 -07:00
6b472e81e3 different 2025-09-28 19:58:49 -07:00
7b27cc7135 simplify builder thing 2025-09-28 18:19:43 -07:00
9f162cc63f wrong 2025-09-26 19:33:18 -07:00
ee65eb6dfb yeah 2025-09-26 19:26:30 -07:00
80fa551cca builder 2025-09-26 18:44:54 -07:00
9884552b6a split header 2025-09-26 18:42:18 -07:00
5000d8885e binrw generic struct tech 2025-09-26 17:32:39 -07:00
6 changed files with 141 additions and 570 deletions

38
Cargo.lock generated
View File

@@ -10,9 +10,9 @@ checksum = "3d62b7694a562cdf5a74227903507c56ab2cc8bdd1f781ed5cb4cf9c9f810bfc"
[[package]]
name = "binrw"
version = "0.15.0"
version = "0.14.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81419ff39e6ed10a92a7f125290859776ced35d9a08a665ae40b23e7ca702f30"
checksum = "7d4bca59c20d6f40c2cc0802afbe1e788b89096f61bdf7aeea6bf00f10c2909b"
dependencies = [
"array-init",
"binrw_derive",
@@ -21,9 +21,9 @@ dependencies = [
[[package]]
name = "binrw_derive"
version = "0.15.0"
version = "0.14.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "376404e55ec40d0d6f8b4b7df3f87b87954bd987f0cf9a7207ea3b6ea5c9add4"
checksum = "d8ba42866ce5bced2645bfa15e97eef2c62d2bdb530510538de8dd3d04efff3c"
dependencies = [
"either",
"owo-colors",
@@ -34,15 +34,15 @@ dependencies = [
[[package]]
name = "bitflags"
version = "2.10.0"
version = "2.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3"
checksum = "5c8214115b7bf84099f1309324e63141d4c5d7cc26862f97a0a857dbefe165bd"
[[package]]
name = "bytemuck"
version = "1.24.0"
version = "1.22.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fbdf580320f38b612e485521afda1ee26d10cc9884efaaa750d383e13e3c5f4"
checksum = "b6b1fc10dbac614ebc03540c9dbd60e83887fda27794998c6528f1782047d540"
[[package]]
name = "either"
@@ -61,31 +61,31 @@ dependencies = [
[[package]]
name = "owo-colors"
version = "4.2.3"
version = "3.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c6901729fa79e91a0913333229e9ca5dc725089d1c363b2f4b4760709dc4a52"
checksum = "c1b04fb49957986fdce4d6ee7a65027d55d4b6d2265e5848bbb507b58ccfdb6f"
[[package]]
name = "proc-macro2"
version = "1.0.103"
version = "1.0.94"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5ee95bc4ef87b8d5ba32e8b7714ccc834865276eab0aed5c9958d00ec45f49e8"
checksum = "a31971752e70b8b2686d7e46ec17fb38dad4051d94024c88df49b667caea9c84"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.42"
version = "1.0.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a338cc41d27e6cc6dce6cefc13a0729dfbb81c262b1f519331575dd80ef3067f"
checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d"
dependencies = [
"proc-macro2",
]
[[package]]
name = "strafesnet_roblox_bot_file"
version = "0.8.1"
version = "0.3.1"
dependencies = [
"binrw",
"bitflags",
@@ -94,9 +94,9 @@ dependencies = [
[[package]]
name = "syn"
version = "2.0.111"
version = "1.0.109"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "390cc9a294ab71bdb1aa2e99d13be9c753cd2d7bd6560c77118597410c4d2e87"
checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
dependencies = [
"proc-macro2",
"quote",
@@ -105,6 +105,6 @@ dependencies = [
[[package]]
name = "unicode-ident"
version = "1.0.22"
version = "1.0.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5"
checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512"

View File

@@ -1,10 +1,10 @@
[package]
name = "strafesnet_roblox_bot_file"
version = "0.8.1"
edition = "2024"
version = "0.3.1"
edition = "2021"
[dependencies]
binrw = "0.15.0"
binrw = "0.14.1"
bitflags = "2.6.0"
itertools = { version = "0.14.0", optional = true }

View File

@@ -3,7 +3,7 @@ Roblox Bhop/Surf Bot File Format
## Example
Read the whole file and print each position:
Read the whole file with the itertools feature enabled:
```rust
use strafesnet_roblox_bot_file::v0::read_all_to_block;
@@ -11,10 +11,6 @@ let file=std::fs::read("bot_file")?;
let mut input=std::io::Cursor::new(file);
let block=read_all_to_block(&mut input)?;
for output_event in &block.output_events{
println!("{:?}",output_event.event.position);
}
```
Or decode individual blocks using block location info:
```rust
@@ -23,25 +19,21 @@ use strafesnet_roblox_bot_file::v0::{Block,BlockTimelines,FileHeader};
let file=std::fs::read("bot_file")?;
let mut input=std::io::Cursor::new(file);
// FileHeader is the first 16 bytes of the file.
let header=FileHeader::from_reader(&mut input)?;
// BlockTimelines is an index of the blocks within the file.
let timelines=BlockTimelines::from_reader(&header,&mut input)?;
// offline blocks include the following event types:
// World, Gravity, Run, Camera, Setting
for timed in timelines.offline_blocks(){
let block_info=timelines.block_info(timed.event)?;
let block_reader=block_info.take_seek(&mut input)?;
let block=Block::from_reader(block_reader)?;
let block=Block::from_reader(block_info.take_seek(&mut input)?)?;
}
// realtime blocks include the following event types:
// Input, Output, Sound
for timed in timelines.realtime_blocks(){
let block_info=timelines.block_info(timed.event)?;
let block_reader=block_info.take_seek(&mut input)?;
let block=Block::from_reader(block_reader)?;
let block=Block::from_reader(block_info.take_seek(&mut input)?)?;
}
```

View File

@@ -1,5 +1,3 @@
pub use binrw::Error as BinrwError;
pub mod v0;
#[cfg(test)]

View File

@@ -1,41 +1,42 @@
use crate::v0;
use crate::v0::{Block,BlockTimelines,FileHeader};
use crate::v0::{Block,BlockTimelines,FileHeader,Timed};
#[test]
fn deserialize_manual()->Result<(),binrw::Error>{
fn _1(){
let file=std::fs::read("files/bhop_marble_7cf33a64-7120-4514-b9fa-4fe29d9523d").unwrap();
let mut input=std::io::Cursor::new(file);
let header=FileHeader::from_reader(&mut input)?;
let timelines=BlockTimelines::from_reader(&header,&mut input)?;
for block in timelines.offline_blocks(){
let block_info=timelines.block_info(block.event).unwrap();
let block_reader=block_info.take_seek(&mut input)?;
let _block=Block::from_reader(block_reader)?;
let header=FileHeader::from_reader(&mut input).unwrap();
let timelines=BlockTimelines::from_reader(&header,&mut input).unwrap();
println!("header={:?}",header);
for &Timed{time,event:block_id} in timelines.offline_blocks(){
println!("offline time={} block_id={:?}",time,block_id);
let take_seek=timelines.block_info(block_id).unwrap().take_seek(&mut input).unwrap();
let _block=Block::from_reader(take_seek).unwrap();
// offline blocks include the following event types:
// World, Gravity, Run, Camera, Setting
}
for block in timelines.realtime_blocks(){
let block_info=timelines.block_info(block.event).unwrap();
let block_reader=block_info.take_seek(&mut input)?;
let _block=Block::from_reader(block_reader)?;
for &Timed{time,event:block_id} in timelines.realtime_blocks(){
println!("realtime time={} block_id={:?}",time,block_id);
let take_seek=timelines.block_info(block_id).unwrap().take_seek(&mut input).unwrap();
let _block=Block::from_reader(take_seek).unwrap();
// realtime blocks include the following event types:
// Input, Output, Sound
}
Ok(())
}
#[test]
fn deserialize_all()->Result<(),v0::Error>{
let file=std::fs::read("files/bhop_marble_7cf33a64-7120-4514-b9fa-4fe29d9523d").unwrap();
let _block=v0::read_all_to_block(std::io::Cursor::new(file))?;
Ok(())
}
#[cfg(feature="itertools")]
use crate::v0::{read_all_to_block,Error};
#[test]
#[cfg(feature="itertools")]
fn serialize_round_trip()->Result<(),binrw::Error>{
fn _2()->Result<(),Error>{
let file=std::fs::read("files/bhop_marble_7cf33a64-7120-4514-b9fa-4fe29d9523d").unwrap();
let block=v0::read_all_to_block(std::io::Cursor::new(file.as_slice())).unwrap();
let mut data=Vec::with_capacity(file.len());
v0::serialize(&block,&mut std::io::Cursor::new(&mut data))?;
let t0=std::time::Instant::now();
let _block=read_all_to_block(std::io::Cursor::new(file))?;
println!("{:?}",t0.elapsed());
// TODO: It encodes, but is it equal? Test something! PartialEq?
Ok(())
}
// TODO: file serialization test

598
src/v0.rs
View File

@@ -2,7 +2,6 @@ use std::io::{SeekFrom,Error as IoError};
use binrw::binrw;
use binrw::io::{TakeSeek,TakeSeekExt};
use binrw::BinReaderExt;
use crate::BinrwError;
// the bit chunks are deposited in reverse
fn read_trey_float(bits:u32)->f32{
@@ -11,50 +10,29 @@ fn read_trey_float(bits:u32)->f32{
let m=(bits>>(1+8))&((1<<23)-1);
f32::from_bits(m|(e<<23)|(s<<31))
}
fn write_trey_float(value:&f32)->u32{
let bits=value.to_bits();
let s=(bits>>31)&1;
let e=(bits>>23)&((1<<8)-1);
let m=bits&((1<<23)-1);
m<<(1+8)|(e<<1)|s
}
fn read_trey_double(bits:u64)->f64{
let s=bits&1;
let e=(bits>>1)&((1<<11)-1);
let m=(bits>>(1+11))&((1<<52)-1);
f64::from_bits(m|(e<<52)|(s<<63))
}
fn write_trey_double(value:&f64)->u64{
let bits=value.to_bits();
let s=(bits>>63)&1;
let e=(bits>>52)&((1<<11)-1);
let m=bits&((1<<52)-1);
m<<(1+11)|(e<<1)|s
}
#[binrw]
#[brw(little)]
#[derive(Debug,Clone)]
pub struct Vector2{
#[br(map=read_trey_float)]
#[bw(map=write_trey_float)]
pub x:f32,
#[br(map=read_trey_float)]
#[bw(map=write_trey_float)]
pub y:f32,
}
#[binrw]
#[brw(little)]
#[derive(Debug,Clone)]
pub struct Vector3{
#[br(map=read_trey_float)]
#[bw(map=write_trey_float)]
pub x:f32,
#[br(map=read_trey_float)]
#[bw(map=write_trey_float)]
pub y:f32,
#[br(map=read_trey_float)]
#[bw(map=write_trey_float)]
pub z:f32,
}
@@ -97,45 +75,20 @@ impl GameControls{
// generic timed event
#[binrw]
#[brw(little)]
#[derive(Debug,Clone)]
#[derive(Debug)]
pub struct Timed<E>
where
E:for<'a>binrw::BinRead<Args<'a>=()>,
E:for<'a>binrw::BinWrite<Args<'a>=()>,
{
#[br(map=read_trey_double)]
#[bw(map=write_trey_double)]
pub time:f64,
pub event:E,
}
impl<A,B> PartialEq<Timed<B>> for Timed<A>
where
A:for<'a>binrw::BinRead<Args<'a>=()>,
A:for<'a>binrw::BinWrite<Args<'a>=()>,
B:for<'a>binrw::BinRead<Args<'a>=()>,
B:for<'a>binrw::BinWrite<Args<'a>=()>,
{
fn eq(&self,other:&Timed<B>)->bool{
self.time.eq(&other.time)
}
}
impl<A,B> PartialOrd<Timed<B>> for Timed<A>
where
A:for<'a>binrw::BinRead<Args<'a>=()>,
A:for<'a>binrw::BinWrite<Args<'a>=()>,
B:for<'a>binrw::BinRead<Args<'a>=()>,
B:for<'a>binrw::BinWrite<Args<'a>=()>,
{
fn partial_cmp(&self,other:&Timed<B>)->Option<core::cmp::Ordering>{
self.time.partial_cmp(&other.time)
}
}
// input
#[binrw]
#[brw(little)]
#[derive(Debug,Clone)]
pub struct InputEvent{
#[br(try_map=GameControls::try_from_bits)]
#[bw(map=GameControls::bits)]
@@ -145,7 +98,6 @@ pub struct InputEvent{
// output
bitflags::bitflags!{
#[derive(Debug,Clone,Copy,Hash,Eq,PartialEq)]
pub struct TickInfo:u32{
const TickEnd=1<<0;
const Jump=1<<1;
@@ -168,7 +120,6 @@ impl TickInfo{
}
#[binrw]
#[brw(little)]
#[derive(Debug,Clone)]
pub struct OutputEvent{
#[br(try_map=TickInfo::try_from_bits)]
#[bw(map=TickInfo::bits)]
@@ -203,7 +154,6 @@ pub enum SoundType{
}
#[binrw]
#[brw(little)]
#[derive(Debug,Clone)]
pub struct SoundEvent{
pub sound_type:SoundType,
/// Roblox enum
@@ -213,13 +163,11 @@ pub struct SoundEvent{
// world
#[binrw]
#[brw(little)]
#[derive(Debug,Clone)]
pub struct WorldEventReset{
pub position:Vector3,
}
#[binrw]
#[brw(little)]
#[derive(Debug,Clone)]
pub struct WorldEventButton{
pub button_id:u32,
// This field does not exist in the final struct and
@@ -231,10 +179,8 @@ pub struct WorldEventButton{
}
#[binrw]
#[brw(little)]
#[derive(Debug,Clone)]
pub struct WorldEventSetTime{
#[br(map=read_trey_double)]
#[bw(map=write_trey_double)]
pub time:f64,
#[br(temp)]
#[bw(ignore)]
@@ -243,7 +189,6 @@ pub struct WorldEventSetTime{
}
#[binrw]
#[brw(little)]
#[derive(Debug,Clone)]
pub struct WorldEventSetPaused{
#[br(map=|paused:u32|paused!=0)]
#[bw(map=|&paused:&bool|paused as u32)]
@@ -255,7 +200,6 @@ pub struct WorldEventSetPaused{
}
#[binrw]
#[brw(little)]
#[derive(Debug,Clone)]
pub enum WorldEvent{
#[brw(magic=0u32)]
Reset(WorldEventReset),
@@ -270,7 +214,6 @@ pub enum WorldEvent{
// gravity
#[binrw]
#[brw(little)]
#[derive(Debug,Clone)]
pub struct GravityEvent{
pub gravity:Vector3,
}
@@ -279,12 +222,30 @@ pub struct GravityEvent{
#[binrw]
#[brw(little)]
#[derive(Debug,Clone,Copy,Hash,Eq,PartialEq)]
pub struct ModeID(pub u32);
pub enum RunEventType{
#[brw(magic=0u32)]
Prepare,
#[brw(magic=1u32)]
Start,
#[brw(magic=2u32)]
Finish,
#[brw(magic=3u32)]
Clear,
#[brw(magic=4u32)]
Flag,
#[brw(magic=5u32)]
LoadState,
#[brw(magic=6u32)]
SaveState,
}
#[binrw]
#[brw(little)]
#[derive(Debug,Clone,Copy,Hash,Eq,PartialEq)]
pub enum ModeSpec{
Exactly(ModeID),
pub enum Mode{
#[brw(magic=0i32)]
Main,
#[brw(magic=1i32)]
Bonus,
#[brw(magic=-1i32)]
All,
#[brw(magic=-2i32)]
@@ -295,39 +256,6 @@ pub enum ModeSpec{
#[binrw]
#[brw(little)]
#[derive(Debug,Clone,Copy,Hash,Eq,PartialEq)]
pub enum Style{
#[brw(magic=1u32)]
Autohop,
#[brw(magic=2u32)]
Scroll,
#[brw(magic=3u32)]
Sideways,
#[brw(magic=4u32)]
HalfSideways,
#[brw(magic=5u32)]
WOnly,
#[brw(magic=6u32)]
AOnly,
#[brw(magic=7u32)]
Backwards,
#[brw(magic=8u32)]
Faste,
#[brw(magic=14u32)]
LowGravity,
#[brw(magic=501u32)]
Fly,
#[brw(magic=502u32)]
FlySustain,
#[brw(magic=503u32)]
Rocket,
#[brw(magic=504u32)]
Style3DStrafe,
#[brw(magic=505u32)]
RocketStrafe,
}
#[binrw]
#[brw(little)]
#[derive(Debug,Clone,Copy,Hash,Eq,PartialEq)]
pub enum FlagReason{
#[brw(magic=0u32)]
Anticheat,
@@ -349,67 +277,16 @@ pub enum FlagReason{
Teleport,
#[brw(magic=9u32)]
Practice,
}
#[binrw]
#[brw(little)]
#[derive(Debug,Clone)]
pub struct RunEventPrepare{
pub mode:ModeID,
pub style:Style,
}
#[binrw]
#[brw(little)]
#[derive(Debug,Clone)]
pub struct RunEventZone{
pub mode:ModeID,
#[br(temp)]
#[bw(ignore)]
#[brw(magic=b"data")]
_magic:(),
None,
}
#[binrw]
#[brw(little)]
#[derive(Debug,Clone)]
pub struct RunEventClear{
pub mode:ModeSpec,
#[br(temp)]
#[bw(ignore)]
#[brw(magic=b"data")]
_magic:(),
}
#[binrw]
#[brw(little)]
#[derive(Debug,Clone)]
pub struct RunEventFlag{
pub mode:ModeSpec,
pub struct RunEvent{
pub run_event_type:RunEventType,
pub mode:Mode,
pub flag_reason:FlagReason,
}
#[binrw]
#[brw(little)]
#[derive(Debug,Clone)]
pub struct RunEventPractice{
pub mode:ModeSpec,
pub state_id:u32,
}
#[binrw]
#[brw(little)]
#[derive(Debug,Clone)]
pub enum RunEvent{
#[brw(magic=0u32)]
Prepare(RunEventPrepare),
#[brw(magic=1u32)]
Start(RunEventZone),
#[brw(magic=2u32)]
Finish(RunEventZone),
#[brw(magic=3u32)]
Clear(RunEventClear),
#[brw(magic=4u32)]
Flag(RunEventFlag),
#[brw(magic=5u32)]
LoadState(RunEventPractice),
#[brw(magic=6u32)]
SaveState(RunEventPractice),
}
// camera
#[binrw]
@@ -423,7 +300,6 @@ pub enum CameraEventType{
}
#[binrw]
#[brw(little)]
#[derive(Debug,Clone)]
pub struct CameraEvent{
pub camera_event_type:CameraEventType,
pub value:Vector3,
@@ -447,16 +323,12 @@ pub enum SettingType{
}
#[binrw]
#[brw(little)]
#[derive(Debug,Clone)]
pub struct SettingEvent{
pub setting_type:SettingType,
#[br(map=read_trey_double)]
#[bw(map=write_trey_double)]
pub value:f64,
}
/// A segment of event timelines.
/// Timelines are always be sorted.
#[derive(Default)]
pub struct Block{
pub input_events:Vec<Timed<InputEvent>>,
@@ -471,7 +343,7 @@ pub struct Block{
#[binrw]
#[brw(little)]
#[derive(Clone,Copy)]
#[derive(Debug,Clone,Copy,Hash,Eq,PartialEq)]
enum EventType{
#[brw(magic=1u32)]
Input,
@@ -498,15 +370,15 @@ struct EventChunkHeader{
}
// binread args tech has been further refined
fn read_data_into_events<R,T,F>(
fn read_data_into_events<'a,R,T,F>(
data:&mut R,
events:&mut Vec<T>,
num_events:usize,
reserve_fn:F,
)->Result<(),BinrwError>
)->binrw::BinResult<()>
where
R:BinReaderExt,
T:for<'a> binrw::BinRead<Args<'a>=()>,
T:binrw::BinRead<Args<'a>=()>,
F:Fn(&mut Vec<T>,usize),
{
reserve_fn(events,num_events);
@@ -517,7 +389,7 @@ fn read_data_into_events<R,T,F>(
}
impl Block{
pub fn from_reader<R:BinReaderExt>(data:R)->Result<Block,BinrwError>{
pub fn from_reader<R:BinReaderExt>(data:R)->binrw::BinResult<Block>{
let mut block=Block::default();
// there is only supposed to be at most one of each type
// of event chunk per block, so allocate the size exactly.
@@ -526,7 +398,7 @@ impl Block{
}
/// Read a complete data block and append the elements to the timelines in this block.
/// Reserves exactly enough information for the new data.
pub fn extend_from_reader_exact<R:BinReaderExt>(&mut self,mut data:R)->Result<(),BinrwError>{
pub fn extend_from_reader_exact<R:BinReaderExt>(&mut self,mut data:R)->binrw::BinResult<()>{
// well... this looks error prone
while let Ok(event_chunk_header)=data.read_le::<EventChunkHeader>(){
match event_chunk_header.event_type{
@@ -543,7 +415,7 @@ impl Block{
Ok(())
}
/// Read a complete data block and append the elements to the timelines in this block.
pub fn extend_from_reader<R:BinReaderExt>(&mut self,mut data:R)->Result<(),BinrwError>{
pub fn extend_from_reader<R:BinReaderExt>(&mut self,mut data:R)->binrw::BinResult<()>{
// sad code duplication
while let Ok(event_chunk_header)=data.read_le::<EventChunkHeader>(){
match event_chunk_header.event_type{
@@ -559,23 +431,13 @@ impl Block{
}
Ok(())
}
fn extend_from_block_id_iter<'a,R:BinReaderExt>(&mut self,mut data:R,block_timelines:&BlockTimelines,blocks:impl IntoIterator<Item=&'a Timed<BlockId>>)->Result<(),Error>{
for timed in blocks{
let take_seek=block_timelines
.block_info(timed.event)?
.take_seek(&mut data)
.map_err(Error::Seek)?;
self.extend_from_reader(take_seek).map_err(Error::InvalidData)?;
}
Ok(())
}
}
#[derive(Debug)]
pub enum Error{
InvalidBlockId(InvalidBlockId),
InvalidBlockId(BlockId),
Seek(IoError),
InvalidData(BinrwError),
InvalidData(binrw::Error),
}
impl std::fmt::Display for Error{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
@@ -587,383 +449,101 @@ impl std::error::Error for Error{}
#[binrw]
#[brw(little)]
#[derive(Debug,Clone,Copy)]
pub struct BlockId(
#[br(map=|i:u32|i-1)]
#[bw(map=|&i:&u32|i+1)]
u32
);
pub struct BlockId(#[br(map=|i:u32|i-1)]u32);
#[binrw]
#[brw(little)]
#[derive(Debug,Clone)]
struct BlockPosition(
#[br(map=|i:u32|i-1)]
#[bw(map=|&i:&u32|i+1)]
u32
);
#[derive(Debug,Clone,Copy)]
pub struct BlockPosition(#[br(map=|i:u32|i-1)]u32);
impl PartialEq for Timed<BlockId>{
fn eq(&self,other:&Self)->bool{
self.time.eq(&other.time)
}
}
impl PartialOrd for Timed<BlockId>{
fn partial_cmp(&self,other:&Self)->Option<core::cmp::Ordering>{
self.time.partial_cmp(&other.time)
}
}
#[binrw]
#[brw(little)]
#[derive(Debug)]
pub struct InvalidBlockId(pub BlockId);
impl std::fmt::Display for InvalidBlockId{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
write!(f,"{self:?}")
}
}
impl std::error::Error for InvalidBlockId{}
impl From<InvalidBlockId> for Error{
fn from(value:InvalidBlockId)->Self{
Self::InvalidBlockId(value)
}
}
/// The first 16 bytes of the file.
#[binrw]
#[brw(little)]
#[brw(magic=b"qbot")]
#[derive(Debug,Clone)]
pub struct FileHeader{
#[brw(magic=b"qbot")]
file_version:u32,
num_offline_blocks:u32,
num_realtime_blocks:u32,
}
impl FileHeader{
pub fn from_reader<R:BinReaderExt>(mut data:R)->Result<Self,BinrwError>{
pub fn from_reader<R:BinReaderExt>(mut data:R)->binrw::BinResult<Self>{
data.read_le()
}
fn block_position_count(&self)->u32{
self.num_offline_blocks+self.num_realtime_blocks+1
}
/// Get BlockInfo for the BlockTimelines.
/// BlockTimelines is not really a "Block" per se, but BlockInfo is just a byte range.
pub fn block_timelines_info(&self)->BlockInfo{
const BLOCK_POSITION_SIZE:u32=size_of::<BlockPosition>() as u32;
const TIMED_BLOCKID_SIZE:u32=8+4;
let size=BLOCK_POSITION_SIZE*self.block_position_count()
+TIMED_BLOCKID_SIZE*self.num_offline_blocks
+TIMED_BLOCKID_SIZE*self.num_realtime_blocks;
let start=16;
let end=start+size;
BlockInfo(start..end)
}
}
/// Information about "Blocks" of data. Appears immediately after FileHeader.
/// Contains all the information required to implement streaming download, decode, and playback.
#[binrw]
#[brw(little)]
#[derive(Debug,Clone)]
#[br(import_raw(header:&FileHeader))]
#[derive(Debug)]
#[br(import(num_offline_blocks:u32,num_realtime_blocks:u32))]
pub struct BlockTimelines{
#[br(count=header.block_position_count())]
#[br(count=num_offline_blocks+num_realtime_blocks+1)]
block_positions:Vec<BlockPosition>,
#[br(count=header.num_offline_blocks)]
#[br(count=num_offline_blocks)]
offline_blocks_timeline:Vec<Timed<BlockId>>,
#[br(count=header.num_realtime_blocks)]
#[br(count=num_realtime_blocks)]
realtime_blocks_timeline:Vec<Timed<BlockId>>,
}
impl BlockTimelines{
pub fn from_reader<R:BinReaderExt>(header:&FileHeader,mut data:R)->Result<Self,BinrwError>{
data.read_le_args(header)
}
/// "Offline" blocks (containing World, Gravity, Run, Camera, and Setting events) in chronological order.
pub fn offline_blocks(&self)->&[Timed<BlockId>]{
&self.offline_blocks_timeline
}
/// "Realtime" blocks (containing Input, Output, and Sound events) in chronological order.
pub fn realtime_blocks(&self)->&[Timed<BlockId>]{
&self.realtime_blocks_timeline
}
/// Get BlockInfo for a specfic BlockId.
pub fn block_info(&self,block_id:BlockId)->Result<BlockInfo,InvalidBlockId>{
let BlockId(id)=block_id;
if self.block_positions.len() as u32<=id{
return Err(InvalidBlockId(block_id));
pub fn block_info(&self,BlockId(block_id):BlockId)->Result<BlockInfo,Error>{
if self.block_positions.len() as u32<=block_id{
return Err(Error::InvalidBlockId(BlockId(block_id)));
}
let BlockPosition(start)=self.block_positions[id as usize];
let BlockPosition(end)=self.block_positions[id as usize+1];
Ok(BlockInfo(start..end))
let BlockPosition(start)=self.block_positions[block_id as usize];
let BlockPosition(end)=self.block_positions[block_id as usize+1];
Ok(BlockInfo{start,length:end-start})
}
}
/// The range of data for a specific Block, relative to the start of the file.
#[derive(Debug,Clone)]
pub struct BlockInfo(core::ops::Range<u32>);
impl BlockTimelines{
pub fn from_reader<R:BinReaderExt>(header:&FileHeader,mut data:R)->binrw::BinResult<Self>{
data.read_le_args((header.num_offline_blocks,header.num_realtime_blocks))
}
}
pub struct BlockInfo{
start:u32,
length:u32,
}
impl BlockInfo{
pub fn start(&self)->u32{
self.start
}
pub fn length(&self)->u32{
self.length
}
/// Create an adapter which seeks to the block start and reads at most the block length.
pub fn take_seek<R:BinReaderExt>(&self,mut data:R)->Result<TakeSeek<R>,IoError>{
data.seek(SeekFrom::Start(self.start as u64))?;
Ok(data.take_seek(self.len() as u64))
data.seek(SeekFrom::Start(self.start() as u64))?;
Ok(data.take_seek(self.length() as u64))
}
}
impl core::ops::Deref for BlockInfo{
type Target=core::ops::Range<u32>;
fn deref(&self)->&Self::Target{
&self.0
}
}
/// Read offline blocks and combine the timelines into a single Block.
/// Note that this reads the blocks in chronological order, not the order they appear in the file, so there is some seeking involved.
pub fn read_offline_to_block<R:BinReaderExt>(mut data:R)->Result<Block,Error>{
let header=FileHeader::from_reader(&mut data).map_err(Error::InvalidData)?;
let block_timelines=BlockTimelines::from_reader(&header,&mut data).map_err(Error::InvalidData)?;
let mut block=Block::default();
block.extend_from_block_id_iter(data,&block_timelines,block_timelines.offline_blocks())?;
Ok(block)
}
/// Read realtime blocks and combine the timelines into a single Block.
/// Note that this reads the blocks in chronological order, not the order they appear in the file, so there is some seeking involved.
pub fn read_realtime_to_block<R:BinReaderExt>(mut data:R)->Result<Block,Error>{
let header=FileHeader::from_reader(&mut data).map_err(Error::InvalidData)?;
let block_timelines=BlockTimelines::from_reader(&header,&mut data).map_err(Error::InvalidData)?;
let mut block=Block::default();
block.extend_from_block_id_iter(data,&block_timelines,block_timelines.realtime_blocks())?;
Ok(block)
}
/// Read the entire file and combine the timelines into a single Block.
/// Note that this reads the blocks in chronological order, not the order they appear in the file, so there is some seeking involved.
#[cfg(feature="itertools")]
pub fn read_all_to_block<R:BinReaderExt>(mut data:R)->Result<Block,Error>{
let header=FileHeader::from_reader(&mut data).map_err(Error::InvalidData)?;
let block_timelines=BlockTimelines::from_reader(&header,&mut data).map_err(Error::InvalidData)?;
let mut block=Block::default();
block.extend_from_block_id_iter(&mut data,&block_timelines,block_timelines.offline_blocks())?;
block.extend_from_block_id_iter(&mut data,&block_timelines,block_timelines.realtime_blocks())?;
for timed in itertools::merge(block_timelines.offline_blocks(),block_timelines.realtime_blocks()){
let take_seek=block_timelines
.block_info(timed.event)?
.take_seek(&mut data)
.map_err(Error::Seek)?;
block.extend_from_reader(take_seek).map_err(Error::InvalidData)?;
}
Ok(block)
}
#[cfg(feature="itertools")]
pub fn serialize<W:binrw::BinWriterExt>(block:&Block,writer:&mut W)->Result<(),BinrwError>{
use std::ops::Range;
const MAX_BLOCK_SIZE:usize=1<<14;
const FILE_VERSION:u32=0;
const EVENT_TYPES:[EventType;8]=[
EventType::Input,
EventType::Output,
EventType::Sound,
EventType::World,
EventType::Gravity,
EventType::Run,
EventType::Camera,
EventType::Setting,
];
const EVENT_SIZE:[usize;8]=[
8+4+2*4, // Input
8+4+4*3*4, // Output
8+4+4, // Sound
8+4+12, // World
8+3*4, // Gravity
8+4+4+4, // Run
8+4+3*4, // Camera
8+4+8, // Setting
];
#[derive(Clone,Default)]
struct Plan<T>([T;8]);
// A plan of how many events of each type to include in a data block.
impl Plan<usize>{
/// Predict the size increment from adding a new event.
fn size_increase(&self,event_type:EventType)->usize{
let new_chunk_header=self.0[event_type as usize]==0;
let mask=(-(new_chunk_header as isize)) as usize;
EVENT_SIZE[event_type as usize]+(mask&size_of::<EventChunkHeader>())
}
/// Add the new event.
fn accumulate(&mut self,event_type:EventType){
self.0[event_type as usize]+=1;
}
fn range(&self,end:&Plan<usize>)->Plan<Range<usize>>{
Plan(core::array::from_fn(|i|self.0[i]..end.0[i]))
}
}
// A plan of what range of events to include in a data block.
impl Plan<Range<usize>>{
/// Calculate the predicted size of the planned block.
fn size(&self)->usize{
self.0.iter()
.zip(EVENT_SIZE)
.filter_map(|(range,event_size)|match range.len(){
0=>None,
other=>Some(other*event_size+size_of::<EventChunkHeader>()),
})
.sum()
}
}
// compare an event at the head of the plan to the best event collected so far.
fn collect_event<E>(
best:&mut Option<(f64,EventType)>,
list:&[Timed<E>],
plan:&Plan<usize>,
event_type:EventType,
)
where
E:for<'a>binrw::BinRead<Args<'a>=()>,
E:for<'a>binrw::BinWrite<Args<'a>=()>,
{
if let Some(event)=list.get(plan.0[event_type as usize])
&&best.is_none_or(|(time,_)|event.time<time)
{
*best=Some((event.time,event_type));
}
}
// plan a single block: collect events until the block is full
fn plan_block(plan:&mut Plan<usize>,next_event:impl Fn(&Plan<usize>)->Option<(f64,EventType)>)->Option<f64>{
let mut size=0;
let (start_time,first_event)=next_event(plan)?;
size+=plan.size_increase(first_event);
if MAX_BLOCK_SIZE<size{
return None;
}
plan.accumulate(first_event);
while let Some((_,event_type))=next_event(plan){
size+=plan.size_increase(event_type);
if MAX_BLOCK_SIZE<size{
break;
}
plan.accumulate(event_type);
}
Some(start_time)
}
struct PlannedBlock{
// index is not the same as BlockId.
// It is list-local for both plan_offline and plan_realtime.
index:usize,
time:f64,
plan:Plan<Range<usize>>,
}
fn plan_timeline<F>(next_event:F)->std::collections::VecDeque<PlannedBlock>
where
F:Copy,
F:Fn(&Plan<usize>)->Option<(f64,EventType)>
{
let mut timeline=std::collections::VecDeque::new();
let mut plan=Plan::default();
let mut last_plan=plan.clone();
let mut index=0;
while let Some(time)=plan_block(&mut plan,next_event){
timeline.push_back(PlannedBlock{
index,
time,
plan:last_plan.range(&plan),
});
last_plan=plan.clone();
index+=1;
}
timeline
}
// plan events into segments without spilling over max size threshold
// each plan describes the range of events included in the block.
let mut plan_offline=plan_timeline(|plan|{
let mut next_event=None;
collect_event(&mut next_event,&block.world_events,plan,EventType::World);
collect_event(&mut next_event,&block.gravity_events,plan,EventType::Gravity);
collect_event(&mut next_event,&block.run_events,plan,EventType::Run);
collect_event(&mut next_event,&block.camera_events,plan,EventType::Camera);
collect_event(&mut next_event,&block.setting_events,plan,EventType::Setting);
next_event
});
let mut plan_realtime=plan_timeline(|plan|{
let mut next_event=None;
collect_event(&mut next_event,&block.input_events,plan,EventType::Input);
collect_event(&mut next_event,&block.output_events,plan,EventType::Output);
collect_event(&mut next_event,&block.sound_events,plan,EventType::Sound);
next_event
});
let file_header=FileHeader{
file_version:FILE_VERSION,
num_offline_blocks:plan_offline.len() as u32,
num_realtime_blocks:plan_realtime.len() as u32,
};
let mut plan_order=Vec::with_capacity(plan_offline.len()+plan_realtime.len());
let mut block_positions=Vec::with_capacity(file_header.block_position_count() as usize);
// Fill the timelines with dummy values, we don't know the block ids yet.
// This can be done with Vec::spare_capacity_mut and unsafe, but whatever.
const DUMMY_BLOCK:Timed<BlockId>=Timed{time:0.0,event:BlockId(0)};
let mut offline_blocks_timeline=vec![DUMMY_BLOCK;plan_offline.len()];
let mut realtime_blocks_timeline=vec![DUMMY_BLOCK;plan_realtime.len()];
{
// position starts after the *predicted* end of the BlockTimelines
let mut position=file_header.block_timelines_info().end;
let mut block_id=0;
let mut push_block=|timeline:&mut Vec<Timed<BlockId>>,planned:PlannedBlock|{
block_positions.push(BlockPosition(position));
position+=planned.plan.size() as u32;
// write the block id to the correct index
timeline[planned.index]=Timed{
time:planned.time,
event:BlockId(block_id),
};
block_id+=1;
plan_order.push(planned.plan);
};
// the first block in the file is an offline block to
// initialize the state of things like the current style
if let Some(plan)=plan_offline.pop_front(){
push_block(&mut offline_blocks_timeline,plan);
}
// the second block is the first realtime block which
// includes the starting position of the replay
if let Some(plan)=plan_realtime.pop_front(){
push_block(&mut realtime_blocks_timeline,plan);
}
// the third block is the last realtime block which
// is used by the game client to determine the duration
if let Some(plan)=plan_realtime.pop_back(){
push_block(&mut realtime_blocks_timeline,plan);
}
// push the remaining blocks in chronological order
for either_plan in itertools::merge_join_by(
plan_offline,
plan_realtime,
|offline,realtime|offline.time<=realtime.time,
){
match either_plan{
itertools::Either::Left(offline)=>push_block(&mut offline_blocks_timeline,offline),
itertools::Either::Right(realtime)=>push_block(&mut realtime_blocks_timeline,realtime),
}
}
// final position
block_positions.push(BlockPosition(position));
}
let block_timelines=BlockTimelines{
block_positions,
offline_blocks_timeline,
realtime_blocks_timeline,
};
use binrw::BinWrite;
file_header.write_le(writer)?;
block_timelines.write_le(writer)?;
for plan in plan_order{
for (range,event_type) in plan.0.into_iter().zip(EVENT_TYPES){
let num_events=range.len();
if num_events==0{
continue;
}
let event_chunk_header=EventChunkHeader{
event_type,
num_events:num_events as u32,
};
event_chunk_header.write_le(writer)?;
match event_type{
EventType::Input=>block.input_events[range].write_le(writer)?,
EventType::Output=>block.output_events[range].write_le(writer)?,
EventType::Sound=>block.sound_events[range].write_le(writer)?,
EventType::World=>block.world_events[range].write_le(writer)?,
EventType::Gravity=>block.gravity_events[range].write_le(writer)?,
EventType::Run=>block.run_events[range].write_le(writer)?,
EventType::Camera=>block.camera_events[range].write_le(writer)?,
EventType::Setting=>block.setting_events[range].write_le(writer)?,
}
}
}
Ok(())
}