1
0
Fork 0

[CAMERA] Begin refactor

This commit is contained in:
Florian RICHER 2022-06-16 14:06:21 +02:00
parent 8bd4ae11e9
commit 67cd53b91e
5 changed files with 147 additions and 124 deletions

5
src/input/mod.rs Normal file
View file

@ -0,0 +1,5 @@
use winit::event::WindowEvent;
pub trait Controllable {
fn process_events(&mut self, event: &WindowEvent) -> bool;
}

View file

@ -6,6 +6,7 @@ pub use state::State;
pub mod render;
pub mod meshs;
pub mod input;
use simplelog::{TermLogger, LevelFilter, Config, TerminalMode, ColorChoice};

View file

@ -1,4 +1,9 @@
use winit::event::{WindowEvent, KeyboardInput, ElementState, VirtualKeyCode};
use wgpu::util::DeviceExt;
use winit::event::{ElementState, KeyboardInput, VirtualKeyCode, WindowEvent};
use crate::input::Controllable;
use super::Renderable;
#[rustfmt::skip]
pub const OPENGL_TO_WGPU_MATRIX: cgmath::Matrix4<f32> = cgmath::Matrix4::new(
@ -16,41 +21,129 @@ pub struct Camera {
pub fovy: f32,
pub znear: f32,
pub zfar: f32,
controller: CameraController,
uniform: Option<CameraUniform>,
bind_group: Option<wgpu::BindGroup>,
bind_group_layout: Option<wgpu::BindGroupLayout>,
buffer: Option<wgpu::Buffer>,
}
impl Camera {
pub fn new(width: f32, height: f32, speed: f32) -> Self {
Self {
eye: (0.0, 1.0, 2.0).into(),
target: (0.0, 0.0, 0.0).into(),
up: cgmath::Vector3::unit_y(),
aspect: width / height,
fovy: 45.0,
znear: 0.1,
zfar: 100.0,
controller: CameraController::new(speed),
bind_group: None,
bind_group_layout: None,
uniform: None,
buffer: None,
}
}
fn build_view_projection_matrix(&self) -> cgmath::Matrix4<f32> {
let view = cgmath::Matrix4::look_at_rh(self.eye, self.target, self.up);
let proj = cgmath::perspective(cgmath::Deg(self.fovy), self.aspect, self.znear, self.zfar);
return OPENGL_TO_WGPU_MATRIX * proj * view;
}
}
// We need this for Rust to store our data correctly for the shaders
#[repr(C)]
// This is so we can store this in a buffer
#[derive(Debug, Copy, Clone, bytemuck::Pod, bytemuck::Zeroable)]
pub struct CameraUniform {
// We can't use cgmath with bytemuck directly so we'll have
// to convert the Matrix4 into a 4x4 f32 array
view_proj: [[f32; 4]; 4],
}
pub fn get_bind_group_layout(&self) -> &wgpu::BindGroupLayout {
&self.bind_group_layout.as_ref().unwrap()
}
impl CameraUniform {
pub fn new() -> Self {
use cgmath::SquareMatrix;
Self {
view_proj: cgmath::Matrix4::identity().into(),
pub fn update_camera(&mut self) {
use cgmath::InnerSpace;
let forward = self.target - self.eye;
let forward_norm = forward.normalize();
let forward_mag = forward.magnitude();
if self.controller.is_forward_pressed && forward_mag > self.controller.speed {
self.eye += forward_norm * self.controller.speed;
}
if self.controller.is_backward_pressed {
self.eye -= forward_norm * self.controller.speed;
}
let right = forward_norm.cross(self.up);
let forward = self.target - self.eye;
let forward_mag = forward.magnitude();
if self.controller.is_right_pressed {
self.eye = self.target - (forward + right * self.controller.speed).normalize() * forward_mag;
}
if self.controller.is_left_pressed {
self.eye = self.target - (forward - right * self.controller.speed).normalize() * forward_mag;
}
}
}
pub fn update_view_proj(&mut self, camera: &Camera) {
self.view_proj = camera.build_view_projection_matrix().into();
impl Renderable for Camera {
fn prepare(&mut self, device: &wgpu::Device) {
self.uniform = Some(CameraUniform::from(self));
self.buffer = Some(
device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
label: Some("Camera Buffer"),
contents: bytemuck::cast_slice(&[self.uniform.unwrap()]),
usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST,
}),
);
self.bind_group_layout = Some(device.create_bind_group_layout(
&wgpu::BindGroupLayoutDescriptor {
entries: &[wgpu::BindGroupLayoutEntry {
binding: 0,
visibility: wgpu::ShaderStages::VERTEX,
ty: wgpu::BindingType::Buffer {
ty: wgpu::BufferBindingType::Uniform,
has_dynamic_offset: false,
min_binding_size: None,
},
count: None,
}],
label: Some("camera_bind_group_layout"),
},
));
self.bind_group = Some(device.create_bind_group(&wgpu::BindGroupDescriptor {
layout: &self.bind_group_layout.as_ref().unwrap(),
entries: &[wgpu::BindGroupEntry {
binding: 0,
resource: self.buffer.as_ref().unwrap().as_entire_binding(),
}],
label: Some("camera_bind_group"),
}));
}
fn update_instances(&mut self, queue: &wgpu::Queue) {
self.update_camera();
self.uniform.unwrap().update_view_proj(self);
queue.write_buffer(
&self.buffer.as_ref().unwrap(),
0,
bytemuck::cast_slice(&[self.uniform.unwrap()]),
);
}
fn render<'a>(&'a self, render_pass: &mut wgpu::RenderPass<'a>) {
render_pass.set_bind_group(1, &self.bind_group.as_ref().unwrap(), &[]);
}
}
pub struct CameraController {
impl Controllable for Camera {
fn process_events(&mut self, event: &WindowEvent) -> bool {
self.controller.process_events(event)
}
}
struct CameraController {
speed: f32,
is_forward_pressed: bool,
is_backward_pressed: bool,
@ -104,36 +197,26 @@ impl CameraController {
_ => false,
}
}
}
pub fn update_camera(&self, camera: &mut Camera) {
use cgmath::InnerSpace;
let forward = camera.target - camera.eye;
let forward_norm = forward.normalize();
let forward_mag = forward.magnitude();
// We need this for Rust to store our data correctly for the shaders
#[repr(C)]
// This is so we can store this in a buffer
#[derive(Debug, Copy, Clone, bytemuck::Pod, bytemuck::Zeroable)]
struct CameraUniform {
// We can't use cgmath with bytemuck directly so we'll have
// to convert the Matrix4 into a 4x4 f32 array
view_proj: [[f32; 4]; 4],
}
// Prevents glitching when camera gets too close to the
// center of the scene.
if self.is_forward_pressed && forward_mag > self.speed {
camera.eye += forward_norm * self.speed;
}
if self.is_backward_pressed {
camera.eye -= forward_norm * self.speed;
}
let right = forward_norm.cross(camera.up);
// Redo radius calc in case the fowrard/backward is pressed.
let forward = camera.target - camera.eye;
let forward_mag = forward.magnitude();
if self.is_right_pressed {
// Rescale the distance between the target and eye so
// that it doesn't change. The eye therefore still
// lies on the circle made by the target and eye.
camera.eye = camera.target - (forward + right * self.speed).normalize() * forward_mag;
}
if self.is_left_pressed {
camera.eye = camera.target - (forward - right * self.speed).normalize() * forward_mag;
impl CameraUniform {
pub fn from(camera: &Camera) -> Self {
Self {
view_proj: camera.build_view_projection_matrix().into(),
}
}
pub fn update_view_proj(&mut self, camera: &Camera) {
self.view_proj = camera.build_view_projection_matrix().into();
}
}

View file

@ -2,9 +2,7 @@ mod vertex;
pub use vertex::Vertex;
mod camera;
pub use camera::{
Camera, CameraUniform, CameraController
};
pub use camera::Camera;
mod texture;
pub use texture::{Texture, TextureManager};
@ -20,6 +18,6 @@ pub use mesh::Mesh;
pub trait Renderable {
fn prepare(&mut self, device: &Device);
fn update_instances(&mut self, device: &Queue);
fn update_instances(&mut self, queue: &Queue);
fn render<'a>(&'a self, render_pass: &mut wgpu::RenderPass<'a>);
}

View file

@ -1,9 +1,8 @@
use crate::{meshs::DefaultMesh, render::{Renderable, TextureManager}};
use crate::{meshs::DefaultMesh, render::{Renderable, TextureManager}, input::Controllable};
use super::render::{
Vertex, Camera, CameraUniform, CameraController, Texture, InstanceRaw
Vertex, Camera, Texture, InstanceRaw
};
use wgpu::util::DeviceExt;
use winit::{
event::{KeyboardInput, VirtualKeyCode, WindowEvent, ElementState},
window::Window,
@ -17,10 +16,6 @@ pub struct State {
pub size: winit::dpi::PhysicalSize<u32>,
render_pipeline: wgpu::RenderPipeline,
camera: Camera,
camera_uniform: CameraUniform,
camera_buffer: wgpu::Buffer,
camera_bind_group: wgpu::BindGroup,
camera_controller: CameraController,
depth_texture: Texture,
mesh: DefaultMesh,
#[allow(dead_code)]
@ -88,54 +83,8 @@ impl State {
source: wgpu::ShaderSource::Wgsl(include_str!(concat!(env!("CARGO_MANIFEST_DIR"), "/res/shaders/main.wgsl")).into()),
});
let camera = Camera {
// position the camera one unit up and 2 units back
// +z is out of the screen
eye: (0.0, 1.0, 2.0).into(),
// have it look at the origin
target: (0.0, 0.0, 0.0).into(),
// which way is "up"
up: cgmath::Vector3::unit_y(),
aspect: config.width as f32 / config.height as f32,
fovy: 45.0,
znear: 0.1,
zfar: 100.0,
};
let mut camera_uniform = CameraUniform::new();
camera_uniform.update_view_proj(&camera);
let camera_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
label: Some("Camera Buffer"),
contents: bytemuck::cast_slice(&[camera_uniform]),
usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST,
});
let camera_bind_group_layout =
device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {
entries: &[wgpu::BindGroupLayoutEntry {
binding: 0,
visibility: wgpu::ShaderStages::VERTEX,
ty: wgpu::BindingType::Buffer {
ty: wgpu::BufferBindingType::Uniform,
has_dynamic_offset: false,
min_binding_size: None,
},
count: None,
}],
label: Some("camera_bind_group_layout"),
});
let camera_bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
layout: &camera_bind_group_layout,
entries: &[wgpu::BindGroupEntry {
binding: 0,
resource: camera_buffer.as_entire_binding(),
}],
label: Some("camera_bind_group"),
});
let camera_controller = CameraController::new(0.2);
let mut camera = Camera::new(config.width as f32, config.height as f32, 0.2);
camera.prepare(&device);
let depth_texture =
Texture::create_depth_texture(&device, &config, "depth_texture");
@ -143,7 +92,7 @@ impl State {
let render_pipeline_layout =
device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor {
label: Some("Render Pipeline Layout"),
bind_group_layouts: &[&texture_manager.get_texture_bind_group_layout(), &camera_bind_group_layout],
bind_group_layouts: &[&texture_manager.get_texture_bind_group_layout(), camera.get_bind_group_layout()],
push_constant_ranges: &[],
});
@ -216,10 +165,6 @@ impl State {
size,
render_pipeline,
camera,
camera_uniform,
camera_buffer,
camera_bind_group,
camera_controller,
depth_texture,
mesh,
texture_manager
@ -257,22 +202,15 @@ impl State {
self.mesh.toggle(is_pressed);
true
}
_ => self.camera_controller.process_events(event),
_ => self.camera.process_events(event),
}
}
_ => self.camera_controller.process_events(event),
_ => self.camera.process_events(event),
}
}
pub fn update(&mut self) {
self.camera_controller.update_camera(&mut self.camera);
self.camera_uniform.update_view_proj(&self.camera);
self.queue.write_buffer(
&self.camera_buffer,
0,
bytemuck::cast_slice(&[self.camera_uniform]),
);
self.camera.update_instances(&self.queue);
self.mesh.update_instances(&self.queue);
}
@ -317,9 +255,7 @@ impl State {
});
render_pass.set_pipeline(&self.render_pipeline);
render_pass.set_bind_group(1, &self.camera_bind_group, &[]);
self.camera.render(&mut render_pass);
self.mesh.render(&mut render_pass);
}