1
0
Fork 0

Refactoring | Not finished

This commit is contained in:
Florian RICHER 2022-06-21 21:58:26 +02:00
parent 9e24702770
commit 7f2a43700a
5 changed files with 41 additions and 390 deletions

View file

@ -1,16 +1,16 @@
use std::{sync::Arc, ops::Deref};
use cgmath::prelude::*;
use rayon::prelude::*;
use std::iter;
use wgpu::util::DeviceExt;
use winit::{
event::*,
event_loop::{ControlFlow, EventLoop},
window::Window,
};
#[cfg(target_arch = "wasm32")]
use wasm_bindgen::prelude::*;
use crate::render::{Renderer, DefaultState, State};
mod camera;
mod model;
mod resources;
@ -18,8 +18,6 @@ mod texture;
mod render;
use model::{DrawLight, DrawModel};
const NUM_INSTANCES_PER_ROW: u32 = 10;
#[repr(C)]
@ -131,355 +129,6 @@ struct LightUniform {
_padding2: u32,
}
struct State {
surface: wgpu::Surface,
device: wgpu::Device,
queue: wgpu::Queue,
config: wgpu::SurfaceConfiguration,
obj_model: model::Model,
camera: camera::Camera,
projection: camera::Projection,
camera_controller: camera::CameraController,
camera_uniform: CameraUniform,
camera_buffer: wgpu::Buffer,
camera_bind_group: wgpu::BindGroup,
instances: Vec<Instance>,
#[allow(dead_code)]
instance_buffer: wgpu::Buffer,
depth_texture: texture::Texture,
size: winit::dpi::PhysicalSize<u32>,
light_uniform: LightUniform,
light_buffer: wgpu::Buffer,
light_bind_group: wgpu::BindGroup,
#[allow(dead_code)]
debug_material: model::Material,
mouse_pressed: bool,
pipelines: render::Pipelines
}
impl State {
async fn new(window: &Window) -> Self {
let size = window.inner_size();
// The instance is a handle to our GPU
// BackendBit::PRIMARY => Vulkan + Metal + DX12 + Browser WebGPU
let instance = wgpu::Instance::new(wgpu::Backends::all());
let surface = unsafe { instance.create_surface(window) };
let adapter = instance
.request_adapter(&wgpu::RequestAdapterOptions {
power_preference: wgpu::PowerPreference::default(),
compatible_surface: Some(&surface),
force_fallback_adapter: false,
})
.await
.unwrap();
let (device, queue) = adapter
.request_device(
&wgpu::DeviceDescriptor {
label: None,
features: wgpu::Features::empty(),
// WebGL doesn't support all of wgpu's features, so if
// we're building for the web we'll have to disable some.
limits: if cfg!(target_arch = "wasm32") {
wgpu::Limits::downlevel_webgl2_defaults()
} else {
wgpu::Limits::default()
},
},
None, // Trace path
)
.await
.unwrap();
let config = wgpu::SurfaceConfiguration {
usage: wgpu::TextureUsages::RENDER_ATTACHMENT,
format: surface.get_preferred_format(&adapter).unwrap(),
width: size.width,
height: size.height,
present_mode: wgpu::PresentMode::Fifo,
};
surface.configure(&device, &config);
let global_bind_layout = render::GlobalBindLayout::new(&device);
let pipelines = render::Pipelines::new(&global_bind_layout, &device, &config);
// UPDATED!
let camera = camera::Camera::new((0.0, 5.0, 10.0), cgmath::Deg(-90.0), cgmath::Deg(-20.0));
let projection =
camera::Projection::new(config.width, config.height, cgmath::Deg(45.0), 0.1, 100.0);
let camera_controller = camera::CameraController::new(4.0, 0.4);
let mut camera_uniform = CameraUniform::new();
camera_uniform.update_view_proj(&camera, &projection);
let camera_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
label: Some("Camera Buffer"),
contents: bytemuck::cast_slice(&[camera_uniform]),
usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST,
});
const SPACE_BETWEEN: f32 = 3.0;
let iter = {
cfg_if::cfg_if! {
if #[cfg(target_arch = "wasm32")] {
(0..NUM_INSTANCES_PER_ROW)
.into_iter()
} else {
(0..NUM_INSTANCES_PER_ROW)
.into_par_iter()
}
}
};
let instances = iter
.clone()
.flat_map(|z| {
// UPDATED!
iter.clone().map(move |x| {
let x = SPACE_BETWEEN * (x as f32 - NUM_INSTANCES_PER_ROW as f32 / 2.0);
let z = SPACE_BETWEEN * (z as f32 - NUM_INSTANCES_PER_ROW as f32 / 2.0);
let position = cgmath::Vector3 { x, y: 0.0, z };
let rotation = if position.is_zero() {
cgmath::Quaternion::from_axis_angle(
cgmath::Vector3::unit_z(),
cgmath::Deg(0.0),
)
} else {
cgmath::Quaternion::from_axis_angle(position.normalize(), cgmath::Deg(45.0))
};
Instance { position, rotation }
})
})
.collect::<Vec<_>>();
let instance_data = instances.iter().map(Instance::to_raw).collect::<Vec<_>>();
let instance_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
label: Some("Instance Buffer"),
contents: bytemuck::cast_slice(&instance_data),
usage: wgpu::BufferUsages::VERTEX,
});
let camera_bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
layout: global_bind_layout.get_camera_bind_layout(),
entries: &[wgpu::BindGroupEntry {
binding: 0,
resource: camera_buffer.as_entire_binding(),
}],
label: Some("camera_bind_group"),
});
let obj_model =
resources::load_model("cube.obj", &device, &queue, global_bind_layout.get_texture_bind_layout())
.await
.unwrap();
let light_uniform = LightUniform {
position: [2.0, 2.0, 2.0],
_padding: 0,
color: [1.0, 1.0, 1.0],
_padding2: 0,
};
let light_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
label: Some("Light VB"),
contents: bytemuck::cast_slice(&[light_uniform]),
usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST,
});
let light_bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
layout: global_bind_layout.get_light_bind_layout(),
entries: &[wgpu::BindGroupEntry {
binding: 0,
resource: light_buffer.as_entire_binding(),
}],
label: None,
});
let depth_texture =
texture::Texture::create_depth_texture(&device, &config, "depth_texture");
let debug_material = {
let diffuse_bytes = include_bytes!("../res/cobble-diffuse.png");
let normal_bytes = include_bytes!("../res/cobble-normal.png");
let diffuse_texture = texture::Texture::from_bytes(
&device,
&queue,
diffuse_bytes,
"res/alt-diffuse.png",
false,
)
.unwrap();
let normal_texture = texture::Texture::from_bytes(
&device,
&queue,
normal_bytes,
"res/alt-normal.png",
true,
)
.unwrap();
model::Material::new(
&device,
"alt-material",
diffuse_texture,
normal_texture,
global_bind_layout.get_texture_bind_layout(),
)
};
Self {
surface,
device,
queue,
config,
obj_model,
camera,
projection,
camera_controller,
camera_buffer,
camera_bind_group,
camera_uniform,
instances,
instance_buffer,
depth_texture,
size,
light_uniform,
light_buffer,
light_bind_group,
#[allow(dead_code)]
debug_material,
mouse_pressed: false,
pipelines
}
}
fn resize(&mut self, new_size: winit::dpi::PhysicalSize<u32>) {
if new_size.width > 0 && new_size.height > 0 {
self.projection.resize(new_size.width, new_size.height);
self.size = new_size;
self.config.width = new_size.width;
self.config.height = new_size.height;
self.surface.configure(&self.device, &self.config);
self.depth_texture =
texture::Texture::create_depth_texture(&self.device, &self.config, "depth_texture");
}
}
fn input(&mut self, event: &WindowEvent) -> bool {
match event {
WindowEvent::KeyboardInput {
input:
KeyboardInput {
virtual_keycode: Some(key),
state,
..
},
..
} => self.camera_controller.process_keyboard(*key, *state),
WindowEvent::MouseWheel { delta, .. } => {
self.camera_controller.process_scroll(delta);
true
}
WindowEvent::MouseInput {
button: MouseButton::Left,
state,
..
} => {
self.mouse_pressed = *state == ElementState::Pressed;
true
}
_ => false,
}
}
fn update(&mut self, dt: instant::Duration) {
self.camera_controller.update_camera(&mut self.camera, dt);
self.camera_uniform
.update_view_proj(&self.camera, &self.projection);
self.queue.write_buffer(
&self.camera_buffer,
0,
bytemuck::cast_slice(&[self.camera_uniform]),
);
// Update the light
let old_position: cgmath::Vector3<_> = self.light_uniform.position.into();
self.light_uniform.position =
(cgmath::Quaternion::from_axis_angle((0.0, 1.0, 0.0).into(), cgmath::Deg(1.0))
* old_position)
.into();
self.queue.write_buffer(
&self.light_buffer,
0,
bytemuck::cast_slice(&[self.light_uniform]),
);
}
fn render(&mut self) -> Result<(), wgpu::SurfaceError> {
let output = self.surface.get_current_texture()?;
let view = output
.texture
.create_view(&wgpu::TextureViewDescriptor::default());
let mut encoder = self
.device
.create_command_encoder(&wgpu::CommandEncoderDescriptor {
label: Some("Render Encoder"),
});
{
let mut render_pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {
label: Some("Render Pass"),
color_attachments: &[wgpu::RenderPassColorAttachment {
view: &view,
resolve_target: None,
ops: wgpu::Operations {
load: wgpu::LoadOp::Clear(wgpu::Color {
r: 0.1,
g: 0.2,
b: 0.3,
a: 1.0,
}),
store: true,
},
}],
depth_stencil_attachment: Some(wgpu::RenderPassDepthStencilAttachment {
view: &self.depth_texture.view,
depth_ops: Some(wgpu::Operations {
load: wgpu::LoadOp::Clear(1.0),
store: true,
}),
stencil_ops: None,
}),
});
render_pass.set_vertex_buffer(1, self.instance_buffer.slice(..));
render_pass.set_pipeline(self.pipelines.get_light_pipeline());
render_pass.draw_light_model(
&self.obj_model,
&self.camera_bind_group,
&self.light_bind_group,
);
render_pass.set_pipeline(self.pipelines.get_render_pipeline());
render_pass.draw_model_instanced(
&self.obj_model,
0..self.instances.len() as u32,
&self.camera_bind_group,
&self.light_bind_group,
);
}
self.queue.submit(iter::once(encoder.finish()));
output.present();
Ok(())
}
}
#[cfg_attr(target_arch = "wasm32", wasm_bindgen(start))]
pub async fn run() {
cfg_if::cfg_if! {
@ -517,24 +166,25 @@ pub async fn run() {
.expect("Couldn't append canvas to document body.");
}
let mut state = State::new(&window).await; // NEW!
let mut renderer = Arc::from(Renderer::new(&window).await);
let default_state = Box::from(DefaultState::new(renderer.deref()));
Arc::get_mut(&mut renderer).unwrap().set_state(Some(default_state));
let mut last_render_time = instant::Instant::now();
event_loop.run(move |event, _, control_flow| {
*control_flow = ControlFlow::Poll;
let renderer = Arc::get_mut(&mut renderer).unwrap();
match event {
Event::MainEventsCleared => window.request_redraw(),
// NEW!
Event::DeviceEvent {
event: DeviceEvent::MouseMotion{ delta, },
.. // We're not using device_id currently
} => if state.mouse_pressed {
state.camera_controller.process_mouse(delta.0, delta.1)
}
// UPDATED!
// Event::DeviceEvent {
// event: DeviceEvent::MouseMotion{ delta, },
// .. // We're not using device_id currently
// } => if state.mouse_pressed {
// state.camera_controller.process_mouse(delta.0, delta.1)
// }
Event::WindowEvent {
ref event,
window_id,
} if window_id == window.id() && !state.input(event) => {
} if window_id == window.id() && !renderer.input(event) => {
match event {
#[cfg(not(target_arch="wasm32"))]
WindowEvent::CloseRequested
@ -548,10 +198,10 @@ pub async fn run() {
..
} => *control_flow = ControlFlow::Exit,
WindowEvent::Resized(physical_size) => {
state.resize(*physical_size);
renderer.resize(*physical_size);
}
WindowEvent::ScaleFactorChanged { new_inner_size, .. } => {
state.resize(**new_inner_size);
renderer.resize(**new_inner_size);
}
_ => {}
}
@ -560,11 +210,11 @@ pub async fn run() {
let now = instant::Instant::now();
let dt = now - last_render_time;
last_render_time = now;
state.update(dt);
match state.render() {
renderer.update(dt);
match renderer.render() {
Ok(_) => {}
// Reconfigure the surface if it's lost or outdated
Err(wgpu::SurfaceError::Lost | wgpu::SurfaceError::Outdated) => state.resize(state.size),
Err(wgpu::SurfaceError::Lost | wgpu::SurfaceError::Outdated) => renderer.resize(renderer.size),
// The system is out of memory, we should probably quit
Err(wgpu::SurfaceError::OutOfMemory) => *control_flow = ControlFlow::Exit,
// We're ignoring timeouts

View file

@ -3,4 +3,5 @@ mod pipelines;
pub use pipelines::utils::create_render_pipeline;
pub use pipelines::{GlobalBindLayout, Pipelines};
mod renderer;
mod renderer;
pub use renderer::{Renderer, DefaultState, State};

View file

@ -12,7 +12,6 @@ use crate::{
use super::Renderer;
pub struct DefaultState {
renderer: Box<Renderer>,
obj_model: model::Model,
camera: camera::Camera,
projection: camera::Projection,
@ -34,7 +33,7 @@ pub struct DefaultState {
}
impl super::State for DefaultState {
fn new(renderer: Box<Renderer>) -> Self
fn new(renderer: &Renderer) -> Self
where
Self: Sized,
{
@ -206,20 +205,19 @@ impl super::State for DefaultState {
debug_material,
mouse_pressed: false,
pipelines,
renderer,
}
}
fn resize(&mut self, new_size: winit::dpi::PhysicalSize<u32>) {
fn resize(&mut self, renderer: &Renderer, new_size: winit::dpi::PhysicalSize<u32>) {
self.projection.resize(new_size.width, new_size.height);
self.depth_texture = texture::Texture::create_depth_texture(
&self.renderer.device,
&self.renderer.config,
&renderer.device,
&renderer.config,
"depth_texture",
);
}
fn input(&mut self, event: &winit::event::WindowEvent) -> bool {
fn input(&mut self, renderer: &Renderer, event: &winit::event::WindowEvent) -> bool {
match event {
WindowEvent::KeyboardInput {
input:
@ -246,11 +244,11 @@ impl super::State for DefaultState {
}
}
fn update(&mut self, dt: instant::Duration) {
fn update(&mut self, renderer: &Renderer, dt: instant::Duration) {
self.camera_controller.update_camera(&mut self.camera, dt);
self.camera_uniform
.update_view_proj(&self.camera, &self.projection);
self.renderer.queue.write_buffer(
renderer.queue.write_buffer(
&self.camera_buffer,
0,
bytemuck::cast_slice(&[self.camera_uniform]),
@ -262,7 +260,7 @@ impl super::State for DefaultState {
(cgmath::Quaternion::from_axis_angle((0.0, 1.0, 0.0).into(), cgmath::Deg(1.0))
* old_position)
.into();
self.renderer.queue.write_buffer(
renderer.queue.write_buffer(
&self.light_buffer,
0,
bytemuck::cast_slice(&[self.light_uniform]),
@ -271,6 +269,7 @@ impl super::State for DefaultState {
fn render(
&mut self,
renderer: &Renderer,
view: &wgpu::TextureView,
encoder: &mut wgpu::CommandEncoder,
) -> Result<(), wgpu::SurfaceError> {

View file

@ -1,4 +1,5 @@
mod renderer;
pub use renderer::Renderer;
mod default_state;
@ -8,9 +9,9 @@ use wgpu::{TextureView, CommandEncoder};
use winit::event::WindowEvent;
pub trait State {
fn new(renderer: Box<Renderer>) -> Self where Self: Sized;
fn resize(&mut self, new_size: winit::dpi::PhysicalSize<u32>);
fn input(&mut self, event: &WindowEvent) -> bool;
fn update(&mut self, dt: instant::Duration);
fn render(&mut self, view: &TextureView, encoder: &mut CommandEncoder) -> Result<(), wgpu::SurfaceError>;
fn new(renderer: &Renderer) -> Self where Self: Sized;
fn resize(&mut self, renderer: &Renderer, new_size: winit::dpi::PhysicalSize<u32>);
fn input(&mut self, renderer: &Renderer, event: &WindowEvent) -> bool;
fn update(&mut self, renderer: &Renderer, dt: instant::Duration);
fn render(&mut self, renderer: &Renderer, view: &TextureView, encoder: &mut CommandEncoder) -> Result<(), wgpu::SurfaceError>;
}

View file

@ -75,21 +75,21 @@ impl Renderer {
self.surface.configure(&self.device, &self.config);
if let Some(state) = self.state.as_mut() {
state.resize(new_size);
// state.resize(new_size);
}
}
}
pub fn input(&mut self, event: &WindowEvent) -> bool {
if let Some(state) = self.state.as_mut() {
return state.input(event);
// return state.input(event);
}
false
}
pub fn update(&mut self, dt: instant::Duration) {
if let Some(state) = self.state.as_mut() {
state.update(dt);
// state.update(dt);
}
}
@ -106,7 +106,7 @@ impl Renderer {
});
if let Some(state) = self.state.as_mut() {
state.render(&view, &mut encoder)?;
// state.render(&view, &mut encoder)?;
}
self.queue.submit(iter::once(encoder.finish()));