use std::sync::Arc; use vulkano::device::Device; use vulkano::image::ImageUsage; use vulkano::image::view::ImageView; use vulkano::pipeline::graphics::viewport::Viewport; use vulkano::swapchain::{Surface, Swapchain, SwapchainCreateInfo}; use vulkano::sync; use vulkano::sync::GpuFuture; use winit::window::Window; use crate::renderer::window_size_dependent_setup; pub struct RenderContext { pub(super) window: Arc, pub(super) swapchain: Arc, pub(super) attachment_image_views: Vec>, pub(super) viewport: Viewport, pub(super) recreate_swapchain: bool, pub(super) previous_frame_end: Option>, } impl RenderContext { pub fn new(window: Arc, surface: Arc, device: &Arc) -> Self { let window_size = window.inner_size(); // Before we can draw on the surface, we have to create what is called a swapchain. // Creating a swapchain allocates the color buffers that will contain the image that will // ultimately be visible on the screen. These images are returned alongside the swapchain. let (swapchain, images) = { // Querying the capabilities of the surface. When we create the swapchain we can only // pass values that are allowed by the capabilities. let surface_capabilities = device .physical_device() .surface_capabilities(&surface, Default::default()) .unwrap(); // Choosing the internal format that the images will have. let (image_format, _) = device .physical_device() .surface_formats(&surface, Default::default()) .unwrap()[0]; // Please take a look at the docs for the meaning of the parameters we didn't mention. Swapchain::new( device.clone(), surface, SwapchainCreateInfo { // Some drivers report an `min_image_count` of 1, but fullscreen mode requires // at least 2. Therefore we must ensure the count is at least 2, otherwise the // program would crash when entering fullscreen mode on those drivers. min_image_count: surface_capabilities.min_image_count.max(2), image_format, // The size of the window, only used to initially setup the swapchain. // // NOTE: // On some drivers the swapchain extent is specified by // `surface_capabilities.current_extent` and the swapchain size must use this // extent. This extent is always the same as the window size. // // However, other drivers don't specify a value, i.e. // `surface_capabilities.current_extent` is `None`. These drivers will allow // anything, but the only sensible value is the window size. // // Both of these cases need the swapchain to use the window size, so we just // use that. image_extent: window_size.into(), image_usage: ImageUsage::COLOR_ATTACHMENT, // The alpha mode indicates how the alpha value of the final image will behave. // For example, you can choose whether the window will be opaque or // transparent. composite_alpha: surface_capabilities .supported_composite_alpha .into_iter() .next() .unwrap(), ..Default::default() }, ) .unwrap() }; // When creating the swapchain, we only created plain images. To use them as an attachment // for rendering, we must wrap then in an image view. // // Since we need to draw to multiple images, we are going to create a different image view // for each image. let attachment_image_views = window_size_dependent_setup(&images); // Dynamic viewports allow us to recreate just the viewport when the window is resized. // Otherwise we would have to recreate the whole pipeline. let viewport = Viewport { offset: [0.0, 0.0], extent: window_size.into(), depth_range: 0.0..=1.0, }; // In some situations, the swapchain will become invalid by itself. This includes for // example when the window is resized (as the images of the swapchain will no longer match // the window's) or, on Android, when the application went to the background and goes back // to the foreground. // // In this situation, acquiring a swapchain image or presenting it will return an error. // Rendering to an image of that swapchain will not produce any error, but may or may not // work. To continue rendering, we need to recreate the swapchain by creating a new // swapchain. Here, we remember that we need to do this for the next loop iteration. let recreate_swapchain = false; // In the loop below we are going to submit commands to the GPU. Submitting a command // produces an object that implements the `GpuFuture` trait, which holds the resources for // as long as they are in use by the GPU. // // Destroying the `GpuFuture` blocks until the GPU is finished executing it. In order to // avoid that, we store the submission of the previous frame here. let previous_frame_end = Some(sync::now(device.clone()).boxed()); Self { window, swapchain, attachment_image_views, viewport, recreate_swapchain, previous_frame_end, } } }