framework.rs -> lib.rs, smaller binary flags, removed dependencies and standard main function #2

Closed
blocksrey wants to merge 1 commits from blocksrey/strafe-client:no_main into master
9 changed files with 603 additions and 535 deletions

3
.gitignore vendored
View File

@ -1 +1,2 @@
/target Cargo.lock
target/

View File

@ -1,23 +1,24 @@
[package] [package]
name = "strafe-client" name = "strafe-client"
version = "0.2.0" version = "0.2.1"
edition = "2021" edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
async-executor = "1.5.1" async-executor = "1.5.1"
bytemuck = { version = "1.13.1", features = ["derive"] } bytemuck = { version = "1.14.0", features = ["derive"] }
ddsfile = "0.5.1" ddsfile = "0.5.1"
env_logger = "0.10.0"
glam = "0.24.1" glam = "0.24.1"
log = "0.4.20" logga = "0.1.2"
obj = "0.10.2" obj = "0.10.2"
pollster = "0.3.0" pollster = "0.3.0"
wgpu = "0.17.0" wgpu = "0.17.0"
winit = "0.28.6" winit = "0.28.6"
[profile.release] [profile.release]
lto = true
strip = true
codegen-units = 1 codegen-units = 1
lto = true
opt-level = "z"
panic = "abort"
strip = true

View File

@ -1,516 +0,0 @@
use std::future::Future;
#[cfg(target_arch = "wasm32")]
use std::str::FromStr;
#[cfg(not(target_arch = "wasm32"))]
use std::time::Instant;
#[cfg(target_arch = "wasm32")]
use web_sys::{ImageBitmapRenderingContext, OffscreenCanvas};
use winit::{
event::{self, WindowEvent},
event_loop::{ControlFlow, EventLoop},
};
#[allow(dead_code)]
pub fn cast_slice<T>(data: &[T]) -> &[u8] {
use std::{mem::size_of, slice::from_raw_parts};
unsafe { from_raw_parts(data.as_ptr() as *const u8, data.len() * size_of::<T>()) }
}
#[allow(dead_code)]
pub enum ShaderStage {
Vertex,
Fragment,
Compute,
}
pub trait Example: 'static + Sized {
fn optional_features() -> wgpu::Features {
wgpu::Features::empty()
}
fn required_features() -> wgpu::Features {
wgpu::Features::empty()
}
fn required_downlevel_capabilities() -> wgpu::DownlevelCapabilities {
wgpu::DownlevelCapabilities {
flags: wgpu::DownlevelFlags::empty(),
shader_model: wgpu::ShaderModel::Sm5,
..wgpu::DownlevelCapabilities::default()
}
}
fn required_limits() -> wgpu::Limits {
wgpu::Limits::downlevel_webgl2_defaults() // These downlevel limits will allow the code to run on all possible hardware
}
fn init(
config: &wgpu::SurfaceConfiguration,
adapter: &wgpu::Adapter,
device: &wgpu::Device,
queue: &wgpu::Queue,
) -> Self;
fn resize(
&mut self,
config: &wgpu::SurfaceConfiguration,
device: &wgpu::Device,
queue: &wgpu::Queue,
);
fn update(&mut self, event: WindowEvent);
fn move_mouse(&mut self, delta: (f64,f64));
fn render(
&mut self,
view: &wgpu::TextureView,
device: &wgpu::Device,
queue: &wgpu::Queue,
spawner: &Spawner,
);
}
struct Setup {
window: winit::window::Window,
event_loop: EventLoop<()>,
instance: wgpu::Instance,
size: winit::dpi::PhysicalSize<u32>,
surface: wgpu::Surface,
adapter: wgpu::Adapter,
device: wgpu::Device,
queue: wgpu::Queue,
#[cfg(target_arch = "wasm32")]
offscreen_canvas_setup: Option<OffscreenCanvasSetup>,
}
#[cfg(target_arch = "wasm32")]
struct OffscreenCanvasSetup {
offscreen_canvas: OffscreenCanvas,
bitmap_renderer: ImageBitmapRenderingContext,
}
async fn setup<E: Example>(title: &str) -> Setup {
#[cfg(not(target_arch = "wasm32"))]
{
env_logger::init();
};
let event_loop = EventLoop::new();
let mut builder = winit::window::WindowBuilder::new();
builder = builder.with_title(title);
#[cfg(windows_OFF)] // TODO
{
use winit::platform::windows::WindowBuilderExtWindows;
builder = builder.with_no_redirection_bitmap(true);
}
let window = builder.build(&event_loop).unwrap();
#[cfg(target_arch = "wasm32")]
{
use winit::platform::web::WindowExtWebSys;
let query_string = web_sys::window().unwrap().location().search().unwrap();
let level: log::Level = parse_url_query_string(&query_string, "RUST_LOG")
.and_then(|x| x.parse().ok())
.unwrap_or(log::Level::Error);
console_log::init_with_level(level).expect("could not initialize logger");
std::panic::set_hook(Box::new(console_error_panic_hook::hook));
// On wasm, append the canvas to the document body
web_sys::window()
.and_then(|win| win.document())
.and_then(|doc| doc.body())
.and_then(|body| {
body.append_child(&web_sys::Element::from(window.canvas()))
.ok()
})
.expect("couldn't append canvas to document body");
}
#[cfg(target_arch = "wasm32")]
let mut offscreen_canvas_setup: Option<OffscreenCanvasSetup> = None;
#[cfg(target_arch = "wasm32")]
{
use wasm_bindgen::JsCast;
use winit::platform::web::WindowExtWebSys;
let query_string = web_sys::window().unwrap().location().search().unwrap();
if let Some(offscreen_canvas_param) =
parse_url_query_string(&query_string, "offscreen_canvas")
{
if FromStr::from_str(offscreen_canvas_param) == Ok(true) {
log::info!("Creating OffscreenCanvasSetup");
let offscreen_canvas =
OffscreenCanvas::new(1024, 768).expect("couldn't create OffscreenCanvas");
let bitmap_renderer = window
.canvas()
.get_context("bitmaprenderer")
.expect("couldn't create ImageBitmapRenderingContext (Result)")
.expect("couldn't create ImageBitmapRenderingContext (Option)")
.dyn_into::<ImageBitmapRenderingContext>()
.expect("couldn't convert into ImageBitmapRenderingContext");
offscreen_canvas_setup = Some(OffscreenCanvasSetup {
offscreen_canvas,
bitmap_renderer,
})
}
}
};
log::info!("Initializing the surface...");
let backends = wgpu::util::backend_bits_from_env().unwrap_or_else(wgpu::Backends::all);
let dx12_shader_compiler = wgpu::util::dx12_shader_compiler_from_env().unwrap_or_default();
let instance = wgpu::Instance::new(wgpu::InstanceDescriptor {
backends,
dx12_shader_compiler,
});
let (size, surface) = unsafe {
let size = window.inner_size();
#[cfg(any(not(target_arch = "wasm32"), target_os = "emscripten"))]
let surface = instance.create_surface(&window).unwrap();
#[cfg(all(target_arch = "wasm32", not(target_os = "emscripten")))]
let surface = {
if let Some(offscreen_canvas_setup) = &offscreen_canvas_setup {
log::info!("Creating surface from OffscreenCanvas");
instance.create_surface_from_offscreen_canvas(
offscreen_canvas_setup.offscreen_canvas.clone(),
)
} else {
instance.create_surface(&window)
}
}
.unwrap();
(size, surface)
};
let adapter = wgpu::util::initialize_adapter_from_env_or_default(&instance, Some(&surface))
.await
.expect("No suitable GPU adapters found on the system!");
#[cfg(not(target_arch = "wasm32"))]
{
let adapter_info = adapter.get_info();
println!("Using {} ({:?})", adapter_info.name, adapter_info.backend);
}
let optional_features = E::optional_features();
let required_features = E::required_features();
let adapter_features = adapter.features();
assert!(
adapter_features.contains(required_features),
"Adapter does not support required features for this example: {:?}",
required_features - adapter_features
);
let required_downlevel_capabilities = E::required_downlevel_capabilities();
let downlevel_capabilities = adapter.get_downlevel_capabilities();
assert!(
downlevel_capabilities.shader_model >= required_downlevel_capabilities.shader_model,
"Adapter does not support the minimum shader model required to run this example: {:?}",
required_downlevel_capabilities.shader_model
);
assert!(
downlevel_capabilities
.flags
.contains(required_downlevel_capabilities.flags),
"Adapter does not support the downlevel capabilities required to run this example: {:?}",
required_downlevel_capabilities.flags - downlevel_capabilities.flags
);
// Make sure we use the texture resolution limits from the adapter, so we can support images the size of the surface.
let needed_limits = E::required_limits().using_resolution(adapter.limits());
let trace_dir = std::env::var("WGPU_TRACE");
let (device, queue) = adapter
.request_device(
&wgpu::DeviceDescriptor {
label: None,
features: (optional_features & adapter_features) | required_features,
limits: needed_limits,
},
trace_dir.ok().as_ref().map(std::path::Path::new),
)
.await
.expect("Unable to find a suitable GPU adapter!");
Setup {
window,
event_loop,
instance,
size,
surface,
adapter,
device,
queue,
#[cfg(target_arch = "wasm32")]
offscreen_canvas_setup,
}
}
fn start<E: Example>(
#[cfg(not(target_arch = "wasm32"))] Setup {
window,
event_loop,
instance,
size,
surface,
adapter,
device,
queue,
}: Setup,
#[cfg(target_arch = "wasm32")] Setup {
window,
event_loop,
instance,
size,
surface,
adapter,
device,
queue,
offscreen_canvas_setup,
}: Setup,
) {
let spawner = Spawner::new();
let mut config = surface
.get_default_config(&adapter, size.width, size.height)
.expect("Surface isn't supported by the adapter.");
let surface_view_format = config.format.add_srgb_suffix();
config.view_formats.push(surface_view_format);
surface.configure(&device, &config);
log::info!("Initializing the example...");
let mut example = E::init(&config, &adapter, &device, &queue);
#[cfg(not(target_arch = "wasm32"))]
let mut last_frame_inst = Instant::now();
#[cfg(not(target_arch = "wasm32"))]
let (mut frame_count, mut accum_time) = (0, 0.0);
log::info!("Entering render loop...");
event_loop.run(move |event, _, control_flow| {
let _ = (&instance, &adapter); // force ownership by the closure
*control_flow = if cfg!(feature = "metal-auto-capture") {
ControlFlow::Exit
} else {
ControlFlow::Poll
};
match event {
event::Event::RedrawEventsCleared => {
#[cfg(not(target_arch = "wasm32"))]
spawner.run_until_stalled();
window.request_redraw();
}
event::Event::WindowEvent {
event:
WindowEvent::Resized(size)
| WindowEvent::ScaleFactorChanged {
new_inner_size: &mut size,
..
},
..
} => {
// Once winit is fixed, the detection conditions here can be removed.
// https://github.com/rust-windowing/winit/issues/2876
let max_dimension = adapter.limits().max_texture_dimension_2d;
if size.width > max_dimension || size.height > max_dimension {
log::warn!(
"The resizing size {:?} exceeds the limit of {}.",
size,
max_dimension
);
} else {
log::info!("Resizing to {:?}", size);
config.width = size.width.max(1);
config.height = size.height.max(1);
example.resize(&config, &device, &queue);
surface.configure(&device, &config);
}
}
event::Event::WindowEvent { event, .. } => match event {
WindowEvent::KeyboardInput {
input:
event::KeyboardInput {
virtual_keycode: Some(event::VirtualKeyCode::Escape),
state: event::ElementState::Pressed,
..
},
..
}
| WindowEvent::CloseRequested => {
*control_flow = ControlFlow::Exit;
}
#[cfg(not(target_arch = "wasm32"))]
WindowEvent::KeyboardInput {
input:
event::KeyboardInput {
virtual_keycode: Some(event::VirtualKeyCode::R),
state: event::ElementState::Pressed,
..
},
..
} => {
println!("{:#?}", instance.generate_report());
}
_ => {
example.update(event);
}
},
event::Event::DeviceEvent {
event:
winit::event::DeviceEvent::MouseMotion {
delta,
},
..
} => {
example.move_mouse(delta);
},
event::Event::RedrawRequested(_) => {
#[cfg(not(target_arch = "wasm32"))]
{
accum_time += last_frame_inst.elapsed().as_secs_f32();
last_frame_inst = Instant::now();
frame_count += 1;
if frame_count == 100 {
println!(
"Avg frame time {}ms",
accum_time * 1000.0 / frame_count as f32
);
accum_time = 0.0;
frame_count = 0;
}
}
let frame = match surface.get_current_texture() {
Ok(frame) => frame,
Err(_) => {
surface.configure(&device, &config);
surface
.get_current_texture()
.expect("Failed to acquire next surface texture!")
}
};
let view = frame.texture.create_view(&wgpu::TextureViewDescriptor {
format: Some(surface_view_format),
..wgpu::TextureViewDescriptor::default()
});
example.render(&view, &device, &queue, &spawner);
frame.present();
#[cfg(target_arch = "wasm32")]
{
if let Some(offscreen_canvas_setup) = &offscreen_canvas_setup {
let image_bitmap = offscreen_canvas_setup
.offscreen_canvas
.transfer_to_image_bitmap()
.expect("couldn't transfer offscreen canvas to image bitmap.");
offscreen_canvas_setup
.bitmap_renderer
.transfer_from_image_bitmap(&image_bitmap);
log::info!("Transferring OffscreenCanvas to ImageBitmapRenderer");
}
}
}
_ => {}
}
});
}
#[cfg(not(target_arch = "wasm32"))]
pub struct Spawner<'a> {
executor: async_executor::LocalExecutor<'a>,
}
#[cfg(not(target_arch = "wasm32"))]
impl<'a> Spawner<'a> {
fn new() -> Self {
Self {
executor: async_executor::LocalExecutor::new(),
}
}
#[allow(dead_code)]
pub fn spawn_local(&self, future: impl Future<Output = ()> + 'a) {
self.executor.spawn(future).detach();
}
fn run_until_stalled(&self) {
while self.executor.try_tick() {}
}
}
#[cfg(target_arch = "wasm32")]
pub struct Spawner {}
#[cfg(target_arch = "wasm32")]
impl Spawner {
fn new() -> Self {
Self {}
}
#[allow(dead_code)]
pub fn spawn_local(&self, future: impl Future<Output = ()> + 'static) {
wasm_bindgen_futures::spawn_local(future);
}
}
#[cfg(not(target_arch = "wasm32"))]
pub fn run<E: Example>(title: &str) {
let setup = pollster::block_on(setup::<E>(title));
start::<E>(setup);
}
#[cfg(target_arch = "wasm32")]
pub fn run<E: Example>(title: &str) {
use wasm_bindgen::prelude::*;
let title = title.to_owned();
wasm_bindgen_futures::spawn_local(async move {
let setup = setup::<E>(&title).await;
let start_closure = Closure::once_into_js(move || start::<E>(setup));
// make sure to handle JS exceptions thrown inside start.
// Otherwise wasm_bindgen_futures Queue would break and never handle any tasks again.
// This is required, because winit uses JS exception for control flow to escape from `run`.
if let Err(error) = call_catch(&start_closure) {
let is_control_flow_exception = error.dyn_ref::<js_sys::Error>().map_or(false, |e| {
e.message().includes("Using exceptions for control flow", 0)
});
if !is_control_flow_exception {
web_sys::console::error_1(&error);
}
}
#[wasm_bindgen]
extern "C" {
#[wasm_bindgen(catch, js_namespace = Function, js_name = "prototype.call.call")]
fn call_catch(this: &JsValue) -> Result<(), JsValue>;
}
});
}
#[cfg(target_arch = "wasm32")]
/// Parse the query string as returned by `web_sys::window()?.location().search()?` and get a
/// specific key out of it.
pub fn parse_url_query_string<'a>(query: &'a str, search_key: &str) -> Option<&'a str> {
let query_string = query.strip_prefix('?')?;
for pair in query_string.split('&') {
let mut pair = pair.split('=');
let key = pair.next()?;
let value = pair.next()?;
if key == search_key {
return Some(value);
}
}
None
}
// This allows treating the framework as a standalone example,
// thus avoiding listing the example names in `Cargo.toml`.
#[allow(dead_code)]
fn main() {}

View File

@ -1 +1,414 @@
pub mod framework; #[cfg(not(target_arch = "wasm32"))]
use std::time::Instant;
use winit::{
event::{self, WindowEvent},
event_loop::{ControlFlow, EventLoop},
};
use logga;
#[allow(dead_code)]
pub fn cast_slice<T>(data: &[T]) -> &[u8] {
use std::{mem::size_of, slice::from_raw_parts};
unsafe { from_raw_parts(data.as_ptr() as *const u8, data.len() * size_of::<T>()) }
}
#[allow(dead_code)]
pub enum ShaderStage {
Vertex,
Fragment,
Compute,
}
pub trait Example: 'static + Sized {
fn optional_features() -> wgpu::Features {
wgpu::Features::empty()
}
fn required_features() -> wgpu::Features {
wgpu::Features::empty()
}
fn required_downlevel_capabilities() -> wgpu::DownlevelCapabilities {
wgpu::DownlevelCapabilities {
flags: wgpu::DownlevelFlags::empty(),
shader_model: wgpu::ShaderModel::Sm5,
..wgpu::DownlevelCapabilities::default()
}
}
fn required_limits() -> wgpu::Limits {
wgpu::Limits::downlevel_webgl2_defaults() // These downlevel limits will allow the code to run on all possible hardware
}
fn init(
config: &wgpu::SurfaceConfiguration,
adapter: &wgpu::Adapter,
device: &wgpu::Device,
queue: &wgpu::Queue,
) -> Self;
fn resize(
&mut self,
config: &wgpu::SurfaceConfiguration,
device: &wgpu::Device,
queue: &wgpu::Queue,
);
fn update(&mut self, event: WindowEvent);
fn move_mouse(&mut self, delta: (f64,f64));
fn render(
&mut self,
view: &wgpu::TextureView,
device: &wgpu::Device,
queue: &wgpu::Queue,
);
}
struct Setup {
window: winit::window::Window,
event_loop: EventLoop<()>,
instance: wgpu::Instance,
size: winit::dpi::PhysicalSize<u32>,
surface: wgpu::Surface,
adapter: wgpu::Adapter,
device: wgpu::Device,
queue: wgpu::Queue,
#[cfg(target_arch = "wasm32")]
offscreen_canvas_setup: Option<OffscreenCanvasSetup>,
}
#[cfg(target_arch = "wasm32")]
struct OffscreenCanvasSetup {
offscreen_canvas: OffscreenCanvas,
bitmap_renderer: ImageBitmapRenderingContext,
}
async fn setup<E: Example>(title: &str) -> Setup {
//logga::init();
let event_loop = EventLoop::new();
let mut builder = winit::window::WindowBuilder::new();
builder = builder.with_title(title);
#[cfg(windows_OFF)] // TODO
{
use winit::platform::windows::WindowBuilderExtWindows;
builder = builder.with_no_redirection_bitmap(true);
}
let window = builder.build(&event_loop).unwrap();
#[cfg(target_arch = "wasm32")]
{
use winit::platform::web::WindowExtWebSys;
let query_string = web_sys::window().unwrap().location().search().unwrap();
let level: log::Level = parse_url_query_string(&query_string, "RUST_LOG")
.and_then(|x| x.parse().ok())
.unwrap_or(log::Level::Error);
console_log::init_with_level(level).expect("could not initialize logger");
std::panic::set_hook(Box::new(console_error_panic_hook::hook));
// On wasm, append the canvas to the document body
web_sys::window()
.and_then(|win| win.document())
.and_then(|doc| doc.body())
.and_then(|body| {
body.append_child(&web_sys::Element::from(window.canvas()))
.ok()
})
.expect("couldn't append canvas to document body");
}
#[cfg(target_arch = "wasm32")]
let mut offscreen_canvas_setup: Option<OffscreenCanvasSetup> = None;
#[cfg(target_arch = "wasm32")]
{
use wasm_bindgen::JsCast;
use winit::platform::web::WindowExtWebSys;
let query_string = web_sys::window().unwrap().location().search().unwrap();
if let Some(offscreen_canvas_param) =
parse_url_query_string(&query_string, "offscreen_canvas")
{
if FromStr::from_str(offscreen_canvas_param) == Ok(true) {
logga::info!("Creating OffscreenCanvasSetup");
let offscreen_canvas =
OffscreenCanvas::new(1024, 768).expect("couldn't create OffscreenCanvas");
let bitmap_renderer = window
.canvas()
.get_context("bitmaprenderer")
.expect("couldn't create ImageBitmapRenderingContext (Result)")
.expect("couldn't create ImageBitmapRenderingContext (Option)")
.dyn_into::<ImageBitmapRenderingContext>()
.expect("couldn't convert into ImageBitmapRenderingContext");
offscreen_canvas_setup = Some(OffscreenCanvasSetup {
offscreen_canvas,
bitmap_renderer,
})
}
}
};
logga::info!("Initializing the surface...");
let backends = wgpu::util::backend_bits_from_env().unwrap_or_else(wgpu::Backends::all);
let dx12_shader_compiler = wgpu::util::dx12_shader_compiler_from_env().unwrap_or_default();
let instance = wgpu::Instance::new(wgpu::InstanceDescriptor {
backends,
dx12_shader_compiler,
});
let (size, surface) = unsafe {
let size = window.inner_size();
#[cfg(any(not(target_arch = "wasm32"), target_os = "emscripten"))]
let surface = instance.create_surface(&window).unwrap();
#[cfg(all(target_arch = "wasm32", not(target_os = "emscripten")))]
let surface = {
if let Some(offscreen_canvas_setup) = &offscreen_canvas_setup {
logga::info!("Creating surface from OffscreenCanvas");
instance.create_surface_from_offscreen_canvas(
offscreen_canvas_setup.offscreen_canvas.clone(),
)
} else {
instance.create_surface(&window)
}
}
.unwrap();
(size, surface)
};
let adapter = wgpu::util::initialize_adapter_from_env_or_default(&instance, Some(&surface))
.await
.expect("No suitable GPU adapters found on the system!");
#[cfg(not(target_arch = "wasm32"))]
{
let adapter_info = adapter.get_info();
println!("Using {} ({:?})", adapter_info.name, adapter_info.backend);
}
let optional_features = E::optional_features();
let required_features = E::required_features();
let adapter_features = adapter.features();
assert!(
adapter_features.contains(required_features),
"Adapter does not support required features for this example: {:?}",
required_features - adapter_features
);
let required_downlevel_capabilities = E::required_downlevel_capabilities();
let downlevel_capabilities = adapter.get_downlevel_capabilities();
assert!(
downlevel_capabilities.shader_model >= required_downlevel_capabilities.shader_model,
"Adapter does not support the minimum shader model required to run this example: {:?}",
required_downlevel_capabilities.shader_model
);
assert!(
downlevel_capabilities
.flags
.contains(required_downlevel_capabilities.flags),
"Adapter does not support the downlevel capabilities required to run this example: {:?}",
required_downlevel_capabilities.flags - downlevel_capabilities.flags
);
// Make sure we use the texture resolution limits from the adapter, so we can support images the size of the surface.
let needed_limits = E::required_limits().using_resolution(adapter.limits());
let trace_dir = std::env::var("WGPU_TRACE");
let (device, queue) = adapter
.request_device(
&wgpu::DeviceDescriptor {
label: None,
features: (optional_features & adapter_features) | required_features,
limits: needed_limits,
},
trace_dir.ok().as_ref().map(std::path::Path::new),
)
.await
.expect("Unable to find a suitable GPU adapter!");
Setup {
window,
event_loop,
instance,
size,
surface,
adapter,
device,
queue,
#[cfg(target_arch = "wasm32")]
offscreen_canvas_setup,
}
}
fn start<E: Example>(
#[cfg(not(target_arch = "wasm32"))] Setup {
window,
event_loop,
instance,
size,
surface,
adapter,
device,
queue,
}: Setup,
#[cfg(target_arch = "wasm32")] Setup {
window,
event_loop,
instance,
size,
surface,
adapter,
device,
queue,
offscreen_canvas_setup,
}: Setup,
) {
let mut config = surface
.get_default_config(&adapter, size.width, size.height)
.expect("Surface isn't supported by the adapter.");
let surface_view_format = config.format.add_srgb_suffix();
config.view_formats.push(surface_view_format);
surface.configure(&device, &config);
logga::info!("Initializing the example...");
let mut example = E::init(&config, &adapter, &device, &queue);
#[cfg(not(target_arch = "wasm32"))]
let mut last_frame_inst = Instant::now();
#[cfg(not(target_arch = "wasm32"))]
let (mut frame_count, mut accum_time) = (0, 0.0);
logga::info!("Entering render loop...");
event_loop.run(move |event, _, control_flow| {
let _ = (&instance, &adapter); // force ownership by the closure
*control_flow = if cfg!(feature = "metal-auto-capture") {
ControlFlow::Exit
} else {
ControlFlow::Poll
};
match event {
event::Event::RedrawEventsCleared => {
#[cfg(not(target_arch = "wasm32"))]
window.request_redraw();
}
event::Event::WindowEvent {
event:
WindowEvent::Resized(size)
| WindowEvent::ScaleFactorChanged {
new_inner_size: &mut size,
..
},
..
} => {
// Once winit is fixed, the detection conditions here can be removed.
// https://github.com/rust-windowing/winit/issues/2876
let max_dimension = adapter.limits().max_texture_dimension_2d;
if size.width > max_dimension || size.height > max_dimension {
logga::warn!(
"The resizing size {:?} exceeds the limit of {}.",
size,
max_dimension
);
} else {
logga::info!("Resizing to {:?}", size);
config.width = size.width.max(1);
config.height = size.height.max(1);
example.resize(&config, &device, &queue);
surface.configure(&device, &config);
}
}
event::Event::WindowEvent { event, .. } => match event {
WindowEvent::KeyboardInput {
input:
event::KeyboardInput {
virtual_keycode: Some(event::VirtualKeyCode::Escape),
state: event::ElementState::Pressed,
..
},
..
}
| WindowEvent::CloseRequested => {
*control_flow = ControlFlow::Exit;
}
#[cfg(not(target_arch = "wasm32"))]
WindowEvent::KeyboardInput {
input:
event::KeyboardInput {
virtual_keycode: Some(event::VirtualKeyCode::R),
state: event::ElementState::Pressed,
..
},
..
} => {
println!("{:#?}", instance.generate_report());
}
_ => {
example.update(event);
}
},
event::Event::DeviceEvent {
event:
winit::event::DeviceEvent::MouseMotion {
delta,
},
..
} => {
example.move_mouse(delta);
},
event::Event::RedrawRequested(_) => {
#[cfg(not(target_arch = "wasm32"))]
{
accum_time += last_frame_inst.elapsed().as_secs_f32();
last_frame_inst = Instant::now();
frame_count += 1;
if frame_count == 100 {
println!(
"Avg frame time {}ms",
accum_time * 1000.0 / frame_count as f32
);
accum_time = 0.0;
frame_count = 0;
}
}
let frame = match surface.get_current_texture() {
Ok(frame) => frame,
Err(_) => {
surface.configure(&device, &config);
surface
.get_current_texture()
.expect("Failed to acquire next surface texture!")
}
};
let view = frame.texture.create_view(&wgpu::TextureViewDescriptor {
format: Some(surface_view_format),
..wgpu::TextureViewDescriptor::default()
});
example.render(&view, &device, &queue);
frame.present();
#[cfg(target_arch = "wasm32")]
{
if let Some(offscreen_canvas_setup) = &offscreen_canvas_setup {
let image_bitmap = offscreen_canvas_setup
.offscreen_canvas
.transfer_to_image_bitmap()
.expect("couldn't transfer offscreen canvas to image bitmap.");
offscreen_canvas_setup
.bitmap_renderer
.transfer_from_image_bitmap(&image_bitmap);
logga::info!("Transferring OffscreenCanvas to ImageBitmapRenderer");
}
}
}
_ => {}
}
});
}
#[cfg(not(target_arch = "wasm32"))]
pub fn run<E: Example>(title: &str) {
let setup = pollster::block_on(setup::<E>(title));
start::<E>(setup);
}

View File

@ -1,3 +1,163 @@
//#![no_std]
#![no_main]
// I'm trying to replace the stdlib functions so don't delete this yet
/*
extern crate std;
use std::*;
pub struct Vec<T> {
data: [T; 16],
len: usize,
}
impl<T> Vec<T> {
pub fn new() -> Self {
Vec {
data: Default::default(),
len: 0,
}
}
pub fn push(&mut self, value: T) -> Result<(), &'static str> {
if self.len < self.data.len() {
self.data[self.len] = value;
self.len += 1;
Ok(())
} else {
Err("Vec is full")
}
}
pub fn pop(&mut self) -> Option<T> {
if self.len > 0 {
self.len -= 1;
Some(std::mem::replace(&mut self.data[self.len], Default::default()))
} else {
None
}
}
pub fn len(&self) -> usize {
self.len
}
pub fn capacity(&self) -> usize {
self.data.len()
}
pub fn is_empty(&self) -> bool {
self.len == 0
}
}
pub struct Iter<'a, T> {
vec: &'a Vec<T>,
index: usize,
}
impl<'a, T> Vec<T> {
// ... previous methods ...
pub fn iter(&'a self) -> Iter<'a, T> {
Iter {
vec: self,
index: 0,
}
}
}
impl<'a, T> Iterator for Iter<'a, T> {
type Item = &'a T;
fn next(&mut self) -> Option<Self::Item> {
if self.index < self.vec.len {
let item = &self.vec.data[self.index];
self.index += 1;
Some(item)
} else {
None
}
}
}
impl<T> Vec<T> {
// (Previous methods omitted for brevity)
pub fn drain(&mut self, range: std::ops::Range<usize>) -> Drain<T> {
Drain {
vec: self,
range,
}
}
}
pub struct Drain<'a, T> {
vec: &'a mut Vec<T>,
range: std::ops::Range<usize>,
}
impl<'a, T> Iterator for Drain<'a, T> {
type Item = T;
fn next(&mut self) -> Option<Self::Item> {
if let Some(index) = self.range.next_back() {
if index < self.vec.len {
// Swap and remove the element from the Vec
let last_index = self.vec.len - 1;
self.vec.len -= 1;
Some(std::mem::replace(&mut self.vec.data[index], Default::default()))
} else {
None
}
} else {
None
}
}
}
*/
//extern crate logga;
//extern crate libc;
/*
#[panic_handler]
fn my_panic(_info: &core::panic::PanicInfo) -> ! {
loop {}
}
*/
//use logga;
use bytemuck::{Pod, Zeroable}; use bytemuck::{Pod, Zeroable};
use std::{borrow::Cow, time::Instant}; use std::{borrow::Cow, time::Instant};
use wgpu::{util::DeviceExt, AstcBlock, AstcChannel}; use wgpu::{util::DeviceExt, AstcBlock, AstcChannel};
@ -215,7 +375,7 @@ fn add_obj(device:&wgpu::Device,modeldatas:& mut Vec<ModelData>,source:&[u8]){
} }
} }
impl strafe_client::framework::Example for Skybox { impl strafe_client::Example for Skybox {
fn optional_features() -> wgpu::Features { fn optional_features() -> wgpu::Features {
wgpu::Features::TEXTURE_COMPRESSION_ASTC wgpu::Features::TEXTURE_COMPRESSION_ASTC
| wgpu::Features::TEXTURE_COMPRESSION_ETC2 | wgpu::Features::TEXTURE_COMPRESSION_ETC2
@ -419,19 +579,19 @@ impl strafe_client::framework::Example for Skybox {
let device_features = device.features(); let device_features = device.features();
let skybox_format = if device_features.contains(wgpu::Features::TEXTURE_COMPRESSION_ASTC) { let skybox_format = if device_features.contains(wgpu::Features::TEXTURE_COMPRESSION_ASTC) {
log::info!("Using ASTC"); logga::info!("Using ASTC");
wgpu::TextureFormat::Astc { wgpu::TextureFormat::Astc {
block: AstcBlock::B4x4, block: AstcBlock::B4x4,
channel: AstcChannel::UnormSrgb, channel: AstcChannel::UnormSrgb,
} }
} else if device_features.contains(wgpu::Features::TEXTURE_COMPRESSION_ETC2) { } else if device_features.contains(wgpu::Features::TEXTURE_COMPRESSION_ETC2) {
log::info!("Using ETC2"); logga::info!("Using ETC2");
wgpu::TextureFormat::Etc2Rgb8UnormSrgb wgpu::TextureFormat::Etc2Rgb8UnormSrgb
} else if device_features.contains(wgpu::Features::TEXTURE_COMPRESSION_BC) { } else if device_features.contains(wgpu::Features::TEXTURE_COMPRESSION_BC) {
log::info!("Using BC"); logga::info!("Using BC");
wgpu::TextureFormat::Bc1RgbaUnormSrgb wgpu::TextureFormat::Bc1RgbaUnormSrgb
} else { } else {
log::info!("Using plain"); logga::info!("Using plain");
wgpu::TextureFormat::Bgra8UnormSrgb wgpu::TextureFormat::Bgra8UnormSrgb
}; };
@ -447,7 +607,7 @@ impl strafe_client::framework::Example for Skybox {
}; };
let max_mips = layer_size.max_mips(wgpu::TextureDimension::D2); let max_mips = layer_size.max_mips(wgpu::TextureDimension::D2);
log::debug!( logga::debug!(
"Copying {:?} skybox images of size {}, {}, 6 with {} mips to gpu", "Copying {:?} skybox images of size {}, {}, 6 with {} mips to gpu",
skybox_format, skybox_format,
IMAGE_SIZE, IMAGE_SIZE,
@ -623,7 +783,6 @@ impl strafe_client::framework::Example for Skybox {
view: &wgpu::TextureView, view: &wgpu::TextureView,
device: &wgpu::Device, device: &wgpu::Device,
queue: &wgpu::Queue, queue: &wgpu::Queue,
_spawner: &strafe_client::framework::Spawner,
) { ) {
let time = Instant::now(); let time = Instant::now();
@ -741,8 +900,10 @@ impl strafe_client::framework::Example for Skybox {
} }
} }
fn main() {
strafe_client::framework::run::<Skybox>( #[no_mangle]
fn main(_argc: isize, _argv: *const *const u8) {
strafe_client::run::<Skybox>(
format!("Strafe Client v{}", format!("Strafe Client v{}",
env!("CARGO_PKG_VERSION") env!("CARGO_PKG_VERSION")
).as_str() ).as_str()

8
std/lib.rs Normal file
View File

@ -0,0 +1,8 @@
#[export_macro]
macro_rules! insssfo {
($($arg:tt)*) => {
};
}