vulkan-tutorial/src/app/data.rs

1430 lines
51 KiB
Rust

#![allow(clippy::too_many_arguments)]
use super::*;
use crate::VALIDATION_ENABLED;
use ::anyhow::{anyhow, Result};
use ::log::*;
use ::std::collections::HashSet;
use ::std::ffi::CStr;
use ::std::fs::File;
use ::std::os::raw::c_void;
use ::std::ptr::copy_nonoverlapping as memcpy;
use ::vulkanalia::prelude::v1_0::*;
use ::vulkanalia::vk::ExtDebugUtilsExtension;
use ::vulkanalia::window as vk_window;
use ::winit::window::Window;
extern "system" fn debug_callback(
severity: vk::DebugUtilsMessageSeverityFlagsEXT,
type_: vk::DebugUtilsMessageTypeFlagsEXT,
data: *const vk::DebugUtilsMessengerCallbackDataEXT,
_: *mut c_void,
) -> vk::Bool32 {
let data = unsafe { *data };
let message = unsafe { CStr::from_ptr(data.message) }.to_string_lossy();
if severity >= vk::DebugUtilsMessageSeverityFlagsEXT::ERROR {
error!("({:?}) {}", type_, message);
} else if severity >= vk::DebugUtilsMessageSeverityFlagsEXT::WARNING {
warn!("({:?}) {}", type_, message);
} else if severity >= vk::DebugUtilsMessageSeverityFlagsEXT::INFO {
debug!("({:?}) {}", type_, message);
} else {
trace!("({:?}) {}", type_, message);
}
vk::FALSE
}
/// The Vulkan handles and associated properties used by our Vulkan app.
#[derive(Clone, Debug, Default)]
pub(crate) struct AppData {
// Debug
messenger: vk::DebugUtilsMessengerEXT,
// Surface
pub(super) surface: vk::SurfaceKHR,
// Physical Device / Logical Device
physical_device: vk::PhysicalDevice,
pub(super) graphics_queue: vk::Queue,
pub(super) present_queue: vk::Queue,
// Swapchain
swapchain_format: vk::Format,
pub(super) swapchain_extent: vk::Extent2D,
pub(super) swapchain: vk::SwapchainKHR,
swapchain_images: Vec<vk::Image>,
swapchain_image_views: Vec<vk::ImageView>,
// Pipeline
render_pass: vk::RenderPass,
descriptor_set_layout: vk::DescriptorSetLayout,
pipeline_layout: vk::PipelineLayout,
pipeline: vk::Pipeline,
// Framebuffers
framebuffers: Vec<vk::Framebuffer>,
// Command Pool
command_pool: vk::CommandPool,
// Depth
depth_image: vk::Image,
depth_image_memory: vk::DeviceMemory,
depth_image_view: vk::ImageView,
// Texture
texture_image: vk::Image,
texture_image_memory: vk::DeviceMemory,
texture_image_view: vk::ImageView,
texture_sampler: vk::Sampler,
// Buffers
vertex_buffer: vk::Buffer,
vertex_buffer_memory: vk::DeviceMemory,
index_buffer: vk::Buffer,
index_buffer_memory: vk::DeviceMemory,
uniform_buffers: Vec<vk::Buffer>,
pub(super) uniform_buffers_memory: Vec<vk::DeviceMemory>,
// Descriptors
descriptor_pool: vk::DescriptorPool,
descriptor_sets: Vec<vk::DescriptorSet>,
// Command Buffers
pub(super) command_buffers: Vec<vk::CommandBuffer>,
// Sync Objects
pub(super) image_available_semaphores: Vec<vk::Semaphore>,
pub(super) render_finished_semaphores: Vec<vk::Semaphore>,
pub(super) in_flight_fences: Vec<vk::Fence>,
pub(super) images_in_flight: Vec<vk::Fence>,
}
impl AppData {
//================================================
// Create / Destroy Data
//================================================
pub(super) unsafe fn create(
&mut self,
window: &Window,
entry: &Entry,
) -> Result<(Instance, Device)> {
let instance = self.create_instance(window, entry)?;
self.surface = vk_window::create_surface(&instance, window)?;
self.pick_physical_device(&instance)?;
let device = self.create_logical_device(&instance)?;
self.create_swapchain(window, &instance, &device)?;
self.create_swapchain_image_views(&device)?;
self.create_render_pass(&instance, &device)?;
self.create_descriptor_set_layout(&device)?;
self.create_pipeline(&device)?;
self.create_command_pool(&instance, &device)?;
self.create_depth_objects(&instance, &device)?;
self.create_framebuffers(&device)?;
self.create_texture_image(&instance, &device)?;
self.create_texture_image_view(&device)?;
self.create_texture_sampler(&device)?;
self.create_vertex_buffer(&instance, &device)?;
self.create_index_buffer(&instance, &device)?;
self.create_uniform_buffers(&instance, &device)?;
self.create_descriptor_pool(&device)?;
self.create_descriptor_sets(&device)?;
self.create_command_buffers(&device)?;
self.create_sync_objects(&device)?;
Ok((instance, device))
}
unsafe fn create_instance(&mut self, window: &Window, entry: &Entry) -> Result<Instance> {
// Application Info
let application_info = vk::ApplicationInfo::builder()
.application_name(b"Vulkan Tutorial\0")
.application_version(vk::make_version(1, 0, 0))
.engine_name(b"No Engine\0")
.engine_version(vk::make_version(1, 0, 0))
.api_version(vk::make_version(1, 0, 0));
// Layers
let available_layers = entry
.enumerate_instance_layer_properties()?
.iter()
.map(|l| l.layer_name)
.collect::<HashSet<_>>();
if VALIDATION_ENABLED && !available_layers.contains(&VALIDATION_LAYER) {
return Err(anyhow!("Validation layer requested but not supported."));
}
let layers = if VALIDATION_ENABLED {
vec![VALIDATION_LAYER.as_ptr()]
} else {
Vec::new()
};
// Extensions
let mut extensions = vk_window::get_required_instance_extensions(window)
.iter()
.map(|e| e.as_ptr())
.collect::<Vec<_>>();
if VALIDATION_ENABLED {
extensions.push(vk::EXT_DEBUG_UTILS_EXTENSION.name.as_ptr());
}
// Compatibility extension for macOS
let flags = if (cfg!(target_os = "macos") && entry.version()? >= PORTABILITY_MACOS_VERSION)
|| entry
.enumerate_instance_extension_properties(None)?
.iter()
.any(|e| e.extension_name == vk::KHR_PORTABILITY_ENUMERATION_EXTENSION.name)
{
info!("Enabling extensions for macOS compatibility.");
extensions.push(vk::KHR_PORTABILITY_ENUMERATION_EXTENSION.name.as_ptr());
extensions.push(
vk::KHR_GET_PHYSICAL_DEVICE_PROPERTIES2_EXTENSION
.name
.as_ptr(),
);
vk::InstanceCreateFlags::ENUMERATE_PORTABILITY_KHR
} else {
vk::InstanceCreateFlags::empty()
};
// Create
let mut info = vk::InstanceCreateInfo::builder()
.application_info(&application_info)
.enabled_layer_names(&layers)
.enabled_extension_names(&extensions)
.flags(flags);
let mut debug_info = vk::DebugUtilsMessengerCreateInfoEXT::builder()
.message_severity(vk::DebugUtilsMessageSeverityFlagsEXT::all())
.message_type(vk::DebugUtilsMessageTypeFlagsEXT::all())
.user_callback(Some(debug_callback));
if VALIDATION_ENABLED {
info = info.push_next(&mut debug_info);
}
let instance = entry.create_instance(&info, None)?;
// Messenger
if VALIDATION_ENABLED {
self.messenger = instance.create_debug_utils_messenger_ext(&debug_info, None)?;
}
Ok(instance)
}
/// Cleanup the swapchain and related objects
///
/// # Safety
/// Here be Dragons
unsafe fn destroy_swapchain(&mut self, device: &Device) {
device.destroy_descriptor_pool(self.descriptor_pool, None);
self.uniform_buffers
.iter()
.for_each(|b| device.destroy_buffer(*b, None));
self.uniform_buffers_memory
.iter()
.for_each(|m| device.free_memory(*m, None));
device.destroy_image_view(self.depth_image_view, None);
device.free_memory(self.depth_image_memory, None);
device.destroy_image(self.depth_image, None);
self.framebuffers
.iter()
.for_each(|f| device.destroy_framebuffer(*f, None));
device.free_command_buffers(self.command_pool, &self.command_buffers);
device.destroy_pipeline(self.pipeline, None);
device.destroy_pipeline_layout(self.pipeline_layout, None);
device.destroy_render_pass(self.render_pass, None);
self.swapchain_image_views
.iter()
.for_each(|v| device.destroy_image_view(*v, None));
device.destroy_swapchain_khr(self.swapchain, None);
}
/// Destroys our Vulkan app, in reverse order of creation
///
/// # Safety
/// Here be Dragons
pub unsafe fn destroy(&mut self, instance: &Instance, device: &Device) {
self.destroy_swapchain(device);
device.destroy_sampler(self.texture_sampler, None);
device.destroy_image_view(self.texture_image_view, None);
device.destroy_image(self.texture_image, None);
device.free_memory(self.texture_image_memory, None);
device.destroy_descriptor_set_layout(self.descriptor_set_layout, None);
device.destroy_buffer(self.index_buffer, None);
device.free_memory(self.index_buffer_memory, None);
device.destroy_buffer(self.vertex_buffer, None);
device.free_memory(self.vertex_buffer_memory, None);
self.in_flight_fences
.iter()
.for_each(|f| device.destroy_fence(*f, None));
self.render_finished_semaphores
.iter()
.for_each(|s| device.destroy_semaphore(*s, None));
self.image_available_semaphores
.iter()
.for_each(|s| device.destroy_semaphore(*s, None));
device.destroy_command_pool(self.command_pool, None);
device.destroy_device(None);
instance.destroy_surface_khr(self.surface, None);
if VALIDATION_ENABLED {
instance.destroy_debug_utils_messenger_ext(self.messenger, None);
}
instance.destroy_instance(None);
}
//================================================
// Physical Device
//================================================
unsafe fn pick_physical_device(&mut self, instance: &Instance) -> Result<()> {
for physical_device in instance.enumerate_physical_devices()? {
let properties = instance.get_physical_device_properties(physical_device);
if let Err(error) = self.check_physical_device(instance, physical_device) {
warn!(
"Skipping physical device (`{}`): {}",
properties.device_name, error
);
} else {
info!("Selected physical device (`{}`).", properties.device_name);
self.physical_device = physical_device;
return Ok(());
}
}
Err(anyhow!("Failed to find suitable physical device."))
}
unsafe fn check_physical_device(
&self,
instance: &Instance,
physical_device: vk::PhysicalDevice,
) -> Result<()> {
QueueFamilyIndices::get(instance, self, physical_device)?;
check_physical_device_extensions(instance, physical_device)?;
let support = SwapchainSupport::get(instance, self, physical_device)?;
if support.formats.is_empty() || support.present_modes.is_empty() {
return Err(anyhow!(SuitabilityError("Insufficient swapchain support.")));
}
let features = instance.get_physical_device_features(physical_device);
if features.sampler_anisotropy != vk::TRUE {
return Err(anyhow!(SuitabilityError("No sampler anisotropy.")));
}
Ok(())
}
//================================================
// Logical Device
//================================================
unsafe fn create_logical_device(&mut self, instance: &Instance) -> Result<Device> {
// Queue Create Infos
let indices = QueueFamilyIndices::get(instance, self, self.physical_device)?;
let mut unique_indices = HashSet::new();
unique_indices.insert(indices.graphics);
unique_indices.insert(indices.present);
let queue_priorities = &[1.0];
let queue_infos = unique_indices
.iter()
.map(|i| {
vk::DeviceQueueCreateInfo::builder()
.queue_family_index(*i)
.queue_priorities(queue_priorities)
})
.collect::<Vec<_>>();
// Layers
let layers = if VALIDATION_ENABLED {
vec![VALIDATION_LAYER.as_ptr()]
} else {
vec![]
};
// Extensions
let mut extensions = DEVICE_EXTENSIONS
.iter()
.map(|n| n.as_ptr())
.collect::<Vec<_>>();
// mac OS Metal -> Vulkan rendering fix
if instance
.enumerate_device_extension_properties(self.physical_device, None)?
.iter()
.any(|e| e.extension_name == vk::KHR_PORTABILITY_SUBSET_EXTENSION.name)
{
extensions.push(vk::KHR_PORTABILITY_SUBSET_EXTENSION.name.as_ptr());
}
// Features
let features = vk::PhysicalDeviceFeatures::builder().sampler_anisotropy(true);
// Create
let info = vk::DeviceCreateInfo::builder()
.queue_create_infos(&queue_infos)
.enabled_layer_names(&layers)
.enabled_extension_names(&extensions)
.enabled_features(&features);
let device = instance.create_device(self.physical_device, &info, None)?;
// Queues
self.graphics_queue = device.get_device_queue(indices.graphics, 0);
self.present_queue = device.get_device_queue(indices.present, 0);
Ok(device)
}
//================================================
// Swapchain
//================================================
unsafe fn create_swapchain(
&mut self,
window: &Window,
instance: &Instance,
device: &Device,
) -> Result<()> {
let indices = QueueFamilyIndices::get(instance, self, self.physical_device)?;
let support = SwapchainSupport::get(instance, self, self.physical_device)?;
let surface_format = get_swapchain_surface_format(&support.formats);
let present_mode = get_swapchain_present_mode(&support.present_modes);
let extent = get_swapchain_extent(window, support.capabilities);
let mut image_count = support.capabilities.min_image_count + 1;
if support.capabilities.max_image_count != 0
&& image_count > support.capabilities.max_image_count
{
image_count = support.capabilities.max_image_count;
}
let mut queue_family_indices = vec![];
let image_sharing_mode = if indices.graphics != indices.present {
queue_family_indices.push(indices.graphics);
queue_family_indices.push(indices.present);
vk::SharingMode::CONCURRENT
} else {
vk::SharingMode::EXCLUSIVE
};
let info = vk::SwapchainCreateInfoKHR::builder()
.surface(self.surface)
.min_image_count(image_count)
.image_format(surface_format.format)
.image_color_space(surface_format.color_space)
.image_extent(extent)
.image_array_layers(1)
.image_usage(vk::ImageUsageFlags::COLOR_ATTACHMENT)
.image_sharing_mode(image_sharing_mode)
.queue_family_indices(&queue_family_indices)
.pre_transform(support.capabilities.current_transform)
.composite_alpha(vk::CompositeAlphaFlagsKHR::OPAQUE)
.present_mode(present_mode)
.clipped(true)
.old_swapchain(vk::SwapchainKHR::null());
self.swapchain = device.create_swapchain_khr(&info, None)?;
self.swapchain_images = device.get_swapchain_images_khr(self.swapchain)?;
self.swapchain_format = surface_format.format;
self.swapchain_extent = extent;
Ok(())
}
/// Recreates the swapchain
///
/// # Safety
/// Here be Dragons
pub(super) unsafe fn recreate_swapchain(
&mut self,
window: &Window,
instance: &Instance,
device: &Device,
) -> Result<()> {
device.device_wait_idle()?;
self.destroy_swapchain(device);
self.create_swapchain(window, instance, device)?;
self.create_swapchain_image_views(device)?;
self.create_render_pass(instance, device)?;
self.create_pipeline(device)?;
self.create_depth_objects(instance, device)?;
self.create_framebuffers(device)?;
self.create_uniform_buffers(instance, device)?;
self.create_descriptor_pool(device)?;
self.create_descriptor_sets(device)?;
self.create_command_buffers(device)?;
self.images_in_flight
.resize(self.swapchain_images.len(), vk::Fence::null());
Ok(())
}
unsafe fn create_swapchain_image_views(&mut self, device: &Device) -> Result<()> {
self.swapchain_image_views = self
.swapchain_images
.iter()
.map(|i| {
create_image_view(
device,
*i,
self.swapchain_format,
vk::ImageAspectFlags::COLOR,
)
})
.collect::<Result<Vec<_>, _>>()?;
Ok(())
}
//================================================
// Pipeline
//================================================
unsafe fn create_render_pass(&mut self, instance: &Instance, device: &Device) -> Result<()> {
let color_attachment = vk::AttachmentDescription::builder()
.format(self.swapchain_format)
.samples(vk::SampleCountFlags::_1)
.load_op(vk::AttachmentLoadOp::CLEAR)
.store_op(vk::AttachmentStoreOp::STORE)
.stencil_load_op(vk::AttachmentLoadOp::DONT_CARE)
.stencil_store_op(vk::AttachmentStoreOp::DONT_CARE)
.initial_layout(vk::ImageLayout::UNDEFINED)
.final_layout(vk::ImageLayout::PRESENT_SRC_KHR);
let depth_stencil_attachment = vk::AttachmentDescription::builder()
.format(self.get_depth_format(instance)?)
.samples(vk::SampleCountFlags::_1)
.load_op(vk::AttachmentLoadOp::CLEAR)
.store_op(vk::AttachmentStoreOp::DONT_CARE)
.stencil_load_op(vk::AttachmentLoadOp::DONT_CARE)
.stencil_store_op(vk::AttachmentStoreOp::DONT_CARE)
.initial_layout(vk::ImageLayout::UNDEFINED)
.final_layout(vk::ImageLayout::DEPTH_STENCIL_ATTACHMENT_OPTIMAL);
// Subpasses
let color_attachment_ref = vk::AttachmentReference::builder()
.attachment(0)
.layout(vk::ImageLayout::COLOR_ATTACHMENT_OPTIMAL);
let depth_stencil_attachment_ref = vk::AttachmentReference::builder()
.attachment(1)
.layout(vk::ImageLayout::DEPTH_STENCIL_ATTACHMENT_OPTIMAL);
let color_attachments = &[color_attachment_ref];
let subpass = vk::SubpassDescription::builder()
.pipeline_bind_point(vk::PipelineBindPoint::GRAPHICS)
.color_attachments(color_attachments)
.depth_stencil_attachment(&depth_stencil_attachment_ref);
let dependency = vk::SubpassDependency::builder()
.src_subpass(vk::SUBPASS_EXTERNAL)
.dst_subpass(0)
.src_stage_mask(
vk::PipelineStageFlags::COLOR_ATTACHMENT_OUTPUT
| vk::PipelineStageFlags::EARLY_FRAGMENT_TESTS,
)
.src_access_mask(vk::AccessFlags::empty())
.dst_stage_mask(
vk::PipelineStageFlags::COLOR_ATTACHMENT_OUTPUT
| vk::PipelineStageFlags::EARLY_FRAGMENT_TESTS,
)
.dst_access_mask(
vk::AccessFlags::COLOR_ATTACHMENT_WRITE
| vk::AccessFlags::DEPTH_STENCIL_ATTACHMENT_WRITE,
);
let attachments = &[color_attachment, depth_stencil_attachment];
let subpasses = &[subpass];
let dependencies = &[dependency];
let info = vk::RenderPassCreateInfo::builder()
.attachments(attachments)
.subpasses(subpasses)
.dependencies(dependencies);
self.render_pass = device.create_render_pass(&info, None)?;
Ok(())
}
unsafe fn create_descriptor_set_layout(&mut self, device: &Device) -> Result<()> {
let ubo_binding = vk::DescriptorSetLayoutBinding::builder()
.binding(0)
.descriptor_type(vk::DescriptorType::UNIFORM_BUFFER)
.descriptor_count(1)
.stage_flags(vk::ShaderStageFlags::VERTEX);
let sampler_binding = vk::DescriptorSetLayoutBinding::builder()
.binding(1)
.descriptor_type(vk::DescriptorType::COMBINED_IMAGE_SAMPLER)
.descriptor_count(1)
.stage_flags(vk::ShaderStageFlags::FRAGMENT);
let bindings = &[ubo_binding, sampler_binding];
let info = vk::DescriptorSetLayoutCreateInfo::builder().bindings(bindings);
self.descriptor_set_layout = device.create_descriptor_set_layout(&info, None)?;
Ok(())
}
unsafe fn create_pipeline(&mut self, device: &Device) -> Result<()> {
let vert = include_bytes!("../../shaders/vert.spv");
let frag = include_bytes!("../../shaders/frag.spv");
let vert_shader_module = create_shader_module(device, &vert[..])?;
let frag_shader_module = create_shader_module(device, &frag[..])?;
let vert_stage = vk::PipelineShaderStageCreateInfo::builder()
.stage(vk::ShaderStageFlags::VERTEX)
.module(vert_shader_module)
.name(b"main\0");
let frag_stage = vk::PipelineShaderStageCreateInfo::builder()
.stage(vk::ShaderStageFlags::FRAGMENT)
.module(frag_shader_module)
.name(b"main\0");
let binding_descriptions = &[Vertex::binding_description()];
let attribute_descriptions = Vertex::attribute_descriptions();
let vertex_input_state = vk::PipelineVertexInputStateCreateInfo::builder()
.vertex_binding_descriptions(binding_descriptions)
.vertex_attribute_descriptions(&attribute_descriptions);
let input_assembly_state = vk::PipelineInputAssemblyStateCreateInfo::builder()
.topology(vk::PrimitiveTopology::TRIANGLE_LIST)
.primitive_restart_enable(false);
let viewport = vk::Viewport::builder()
.x(0.0)
.y(0.0)
.width(self.swapchain_extent.width as f32)
.height(self.swapchain_extent.height as f32)
.min_depth(0.0)
.max_depth(1.0);
let scissor = vk::Rect2D::builder()
.offset(vk::Offset2D { x: 0, y: 0 })
.extent(self.swapchain_extent);
let viewports = &[viewport];
let scissors = &[scissor];
let viewport_state = vk::PipelineViewportStateCreateInfo::builder()
.viewports(viewports)
.scissors(scissors);
let rasterization_state = vk::PipelineRasterizationStateCreateInfo::builder()
.depth_clamp_enable(false)
.rasterizer_discard_enable(false)
.polygon_mode(vk::PolygonMode::FILL)
.line_width(1.0)
.cull_mode(vk::CullModeFlags::BACK)
.front_face(vk::FrontFace::COUNTER_CLOCKWISE)
.depth_bias_enable(false);
let multisample_state = vk::PipelineMultisampleStateCreateInfo::builder()
.sample_shading_enable(false)
.rasterization_samples(vk::SampleCountFlags::_1);
let depth_stencil_state = vk::PipelineDepthStencilStateCreateInfo::builder()
.depth_test_enable(true)
.depth_write_enable(true)
.depth_compare_op(vk::CompareOp::LESS)
.depth_bounds_test_enable(false)
.stencil_test_enable(false);
let attachment = vk::PipelineColorBlendAttachmentState::builder()
.color_write_mask(vk::ColorComponentFlags::all())
.blend_enable(false)
.src_color_blend_factor(vk::BlendFactor::ONE)
.dst_color_blend_factor(vk::BlendFactor::ZERO)
.color_blend_op(vk::BlendOp::ADD)
.src_alpha_blend_factor(vk::BlendFactor::ONE)
.dst_alpha_blend_factor(vk::BlendFactor::ZERO)
.alpha_blend_op(vk::BlendOp::ADD);
let attachments = &[attachment];
let color_blend_state = vk::PipelineColorBlendStateCreateInfo::builder()
.logic_op_enable(false)
.logic_op(vk::LogicOp::COPY)
.attachments(attachments)
.blend_constants([0.0, 0.0, 0.0, 0.0]);
let set_layouts = &[self.descriptor_set_layout];
let layout_info = vk::PipelineLayoutCreateInfo::builder().set_layouts(set_layouts);
self.pipeline_layout = device.create_pipeline_layout(&layout_info, None)?;
let stages = &[vert_stage, frag_stage];
let info = vk::GraphicsPipelineCreateInfo::builder()
.stages(stages)
.vertex_input_state(&vertex_input_state)
.input_assembly_state(&input_assembly_state)
.viewport_state(&viewport_state)
.rasterization_state(&rasterization_state)
.multisample_state(&multisample_state)
.depth_stencil_state(&depth_stencil_state)
.color_blend_state(&color_blend_state)
.layout(self.pipeline_layout)
.render_pass(self.render_pass)
.subpass(0);
self.pipeline = device
.create_graphics_pipelines(vk::PipelineCache::null(), &[info], None)?
.0;
// Cleanup
device.destroy_shader_module(vert_shader_module, None);
device.destroy_shader_module(frag_shader_module, None);
Ok(())
}
//================================================
// Framebuffers
//================================================
unsafe fn create_framebuffers(&mut self, device: &Device) -> Result<()> {
self.framebuffers = self
.swapchain_image_views
.iter()
.map(|i| {
let attachments = &[*i, self.depth_image_view];
let create_info = vk::FramebufferCreateInfo::builder()
.render_pass(self.render_pass)
.attachments(attachments)
.width(self.swapchain_extent.width)
.height(self.swapchain_extent.height)
.layers(1);
device.create_framebuffer(&create_info, None)
})
.collect::<Result<Vec<_>, _>>()?;
Ok(())
}
//================================================
// Command Pool
//================================================
unsafe fn create_command_pool(&mut self, instance: &Instance, device: &Device) -> Result<()> {
let indices = QueueFamilyIndices::get(instance, self, self.physical_device)?;
let info = vk::CommandPoolCreateInfo::builder()
.flags(vk::CommandPoolCreateFlags::empty())
.queue_family_index(indices.graphics);
self.command_pool = device.create_command_pool(&info, None)?;
Ok(())
}
//================================================
// Depth Objects
//================================================
unsafe fn create_depth_objects(&mut self, instance: &Instance, device: &Device) -> Result<()> {
let format = self.get_depth_format(instance)?;
let (depth_image, depth_image_memory) = self.create_image(
instance,
device,
self.swapchain_extent.width,
self.swapchain_extent.height,
format,
vk::ImageTiling::OPTIMAL,
vk::ImageUsageFlags::DEPTH_STENCIL_ATTACHMENT,
vk::MemoryPropertyFlags::DEVICE_LOCAL,
)?;
self.depth_image = depth_image;
self.depth_image_memory = depth_image_memory;
self.depth_image_view = create_image_view(
device,
self.depth_image,
format,
vk::ImageAspectFlags::DEPTH,
)?;
Ok(())
}
unsafe fn get_depth_format(&self, instance: &Instance) -> Result<vk::Format> {
let candidates = &[
vk::Format::D32_SFLOAT,
vk::Format::D32_SFLOAT_S8_UINT,
vk::Format::D24_UNORM_S8_UINT,
];
self.get_supported_format(
instance,
candidates,
vk::ImageTiling::OPTIMAL,
vk::FormatFeatureFlags::DEPTH_STENCIL_ATTACHMENT,
)
}
unsafe fn get_supported_format(
&self,
instance: &Instance,
candidates: &[vk::Format],
tiling: vk::ImageTiling,
features: vk::FormatFeatureFlags,
) -> Result<vk::Format> {
candidates
.iter()
.cloned()
.find(|f| {
let properties =
instance.get_physical_device_format_properties(self.physical_device, *f);
match tiling {
vk::ImageTiling::LINEAR => properties.linear_tiling_features.contains(features),
vk::ImageTiling::OPTIMAL => {
properties.optimal_tiling_features.contains(features)
}
_ => false,
}
})
.ok_or_else(|| anyhow!("Failed to find supported depth image format."))
}
//================================================
// Texture
//================================================
fn create_texture_image(&mut self, instance: &Instance, device: &Device) -> Result<()> {
// Load the image
let image = File::open("resources/texture.png")?;
let decoder = png::Decoder::new(image);
let mut reader = decoder.read_info()?;
let mut pixels = vec![0; reader.info().raw_bytes()];
reader.next_frame(&mut pixels)?;
let size = reader.info().raw_bytes() as u64;
let (width, height) = reader.info().size();
// Create the staging buffer
let (staging_buffer, staging_buffer_memory) = unsafe {
self.create_buffer(
instance,
device,
size,
vk::BufferUsageFlags::TRANSFER_SRC,
vk::MemoryPropertyFlags::HOST_COHERENT | vk::MemoryPropertyFlags::HOST_VISIBLE,
)?
};
let memory = unsafe {
device.map_memory(staging_buffer_memory, 0, size, vk::MemoryMapFlags::empty())?
};
unsafe {
memcpy(pixels.as_ptr(), memory.cast(), pixels.len());
device.unmap_memory(staging_buffer_memory);
let (texture_image, texture_image_memory) = self.create_image(
instance,
device,
width,
height,
vk::Format::R8G8B8A8_SRGB,
vk::ImageTiling::OPTIMAL,
vk::ImageUsageFlags::SAMPLED | vk::ImageUsageFlags::TRANSFER_DST,
vk::MemoryPropertyFlags::DEVICE_LOCAL,
)?;
self.texture_image = texture_image;
self.texture_image_memory = texture_image_memory;
self.transition_image_layout(
device,
self.texture_image,
vk::Format::R8G8B8A8_SRGB,
vk::ImageLayout::UNDEFINED,
vk::ImageLayout::TRANSFER_DST_OPTIMAL,
)?;
self.copy_buffer_to_image(device, staging_buffer, self.texture_image, width, height)?;
self.transition_image_layout(
device,
self.texture_image,
vk::Format::R8G8B8A8_SRGB,
vk::ImageLayout::TRANSFER_DST_OPTIMAL,
vk::ImageLayout::SHADER_READ_ONLY_OPTIMAL,
)?;
device.destroy_buffer(staging_buffer, None);
device.free_memory(staging_buffer_memory, None);
}
Ok(())
}
unsafe fn create_texture_image_view(&mut self, device: &Device) -> Result<()> {
self.texture_image_view = create_image_view(
device,
self.texture_image,
vk::Format::R8G8B8A8_SRGB,
vk::ImageAspectFlags::COLOR,
)?;
Ok(())
}
unsafe fn create_texture_sampler(&mut self, device: &Device) -> Result<()> {
let info = vk::SamplerCreateInfo::builder()
.mag_filter(vk::Filter::LINEAR)
.min_filter(vk::Filter::LINEAR)
.address_mode_u(vk::SamplerAddressMode::REPEAT)
.address_mode_v(vk::SamplerAddressMode::REPEAT)
.address_mode_w(vk::SamplerAddressMode::REPEAT)
.anisotropy_enable(true)
.max_anisotropy(16.0)
.border_color(vk::BorderColor::INT_OPAQUE_BLACK)
.unnormalized_coordinates(false)
.compare_enable(false)
.compare_op(vk::CompareOp::ALWAYS)
.mipmap_mode(vk::SamplerMipmapMode::LINEAR)
.mip_lod_bias(0.0)
.min_lod(0.0)
.max_lod(0.0);
self.texture_sampler = device.create_sampler(&info, None)?;
Ok(())
}
//================================================
// Buffers
//================================================
unsafe fn create_vertex_buffer(&mut self, instance: &Instance, device: &Device) -> Result<()> {
// Create staging buffer
let size = (size_of::<Vertex>() * VERTICES.len()) as u64;
let (staging_buffer, staging_buffer_memory) = self.create_buffer(
instance,
device,
size,
vk::BufferUsageFlags::TRANSFER_SRC,
vk::MemoryPropertyFlags::HOST_COHERENT | vk::MemoryPropertyFlags::HOST_VISIBLE,
)?;
let memory =
device.map_memory(staging_buffer_memory, 0, size, vk::MemoryMapFlags::empty())?;
memcpy(VERTICES.as_ptr(), memory.cast(), VERTICES.len());
device.unmap_memory(staging_buffer_memory);
// Create vertex buffer
let (vertex_buffer, vertex_buffer_memory) = self.create_buffer(
instance,
device,
size,
vk::BufferUsageFlags::TRANSFER_DST | vk::BufferUsageFlags::VERTEX_BUFFER,
vk::MemoryPropertyFlags::DEVICE_LOCAL,
)?;
self.vertex_buffer = vertex_buffer;
self.vertex_buffer_memory = vertex_buffer_memory;
// Copy to the vertex buffer
self.copy_buffer(device, staging_buffer, vertex_buffer, size)?;
// cleanup
device.destroy_buffer(staging_buffer, None);
device.free_memory(staging_buffer_memory, None);
Ok(())
}
unsafe fn create_index_buffer(&mut self, instance: &Instance, device: &Device) -> Result<()> {
let size = (size_of::<u16>() * INDICES.len()) as u64;
let (staging_buffer, staging_buffer_memory) = self.create_buffer(
instance,
device,
size,
vk::BufferUsageFlags::TRANSFER_SRC,
vk::MemoryPropertyFlags::HOST_COHERENT | vk::MemoryPropertyFlags::HOST_VISIBLE,
)?;
let memory =
device.map_memory(staging_buffer_memory, 0, size, vk::MemoryMapFlags::empty())?;
memcpy(INDICES.as_ptr(), memory.cast(), INDICES.len());
device.unmap_memory(staging_buffer_memory);
let (index_buffer, index_buffer_memory) = self.create_buffer(
instance,
device,
size,
vk::BufferUsageFlags::TRANSFER_DST | vk::BufferUsageFlags::INDEX_BUFFER,
vk::MemoryPropertyFlags::DEVICE_LOCAL,
)?;
self.index_buffer = index_buffer;
self.index_buffer_memory = index_buffer_memory;
self.copy_buffer(device, staging_buffer, index_buffer, size)?;
device.destroy_buffer(staging_buffer, None);
device.free_memory(staging_buffer_memory, None);
Ok(())
}
unsafe fn create_uniform_buffers(
&mut self,
instance: &Instance,
device: &Device,
) -> Result<()> {
self.uniform_buffers.clear();
self.uniform_buffers_memory.clear();
for _ in 0..self.swapchain_images.len() {
let (uniform_buffer, uniform_buffer_memory) = self.create_buffer(
instance,
device,
size_of::<UniformBufferObject>() as u64,
vk::BufferUsageFlags::UNIFORM_BUFFER,
vk::MemoryPropertyFlags::HOST_COHERENT | vk::MemoryPropertyFlags::HOST_VISIBLE,
)?;
self.uniform_buffers.push(uniform_buffer);
self.uniform_buffers_memory.push(uniform_buffer_memory);
}
Ok(())
}
//================================================
// Descriptors
//================================================
unsafe fn create_descriptor_pool(&mut self, device: &Device) -> Result<()> {
let ubo_size = vk::DescriptorPoolSize::builder()
.type_(vk::DescriptorType::UNIFORM_BUFFER)
.descriptor_count(self.swapchain_images.len() as u32);
let sampler_size = vk::DescriptorPoolSize::builder()
.type_(vk::DescriptorType::COMBINED_IMAGE_SAMPLER)
.descriptor_count(self.swapchain_images.len() as u32);
let pool_sizes = &[ubo_size, sampler_size];
let info = vk::DescriptorPoolCreateInfo::builder()
.pool_sizes(pool_sizes)
.max_sets(self.swapchain_images.len() as u32);
self.descriptor_pool = device.create_descriptor_pool(&info, None)?;
Ok(())
}
unsafe fn create_descriptor_sets(&mut self, device: &Device) -> Result<()> {
let layouts = vec![self.descriptor_set_layout; self.swapchain_images.len()];
let info = vk::DescriptorSetAllocateInfo::builder()
.descriptor_pool(self.descriptor_pool)
.set_layouts(&layouts);
self.descriptor_sets = device.allocate_descriptor_sets(&info)?;
for i in 0..self.swapchain_images.len() {
let info = vk::DescriptorBufferInfo::builder()
.buffer(self.uniform_buffers[i])
.offset(0)
.range(size_of::<UniformBufferObject>() as u64);
let buffer_info = &[info];
let ubo_write = vk::WriteDescriptorSet::builder()
.dst_set(self.descriptor_sets[i])
.dst_binding(0)
.dst_array_element(0)
.descriptor_type(vk::DescriptorType::UNIFORM_BUFFER)
.buffer_info(buffer_info);
let info = vk::DescriptorImageInfo::builder()
.image_layout(vk::ImageLayout::SHADER_READ_ONLY_OPTIMAL)
.image_view(self.texture_image_view)
.sampler(self.texture_sampler);
let image_info = &[info];
let sampler_write = vk::WriteDescriptorSet::builder()
.dst_set(self.descriptor_sets[i])
.dst_binding(1)
.dst_array_element(0)
.descriptor_type(vk::DescriptorType::COMBINED_IMAGE_SAMPLER)
.image_info(image_info);
device.update_descriptor_sets(
&[ubo_write, sampler_write],
&[] as &[vk::CopyDescriptorSet],
);
}
Ok(())
}
//================================================
// Command Buffers
//================================================
unsafe fn create_command_buffers(&mut self, device: &Device) -> Result<()> {
// Create the buffers
let allocate_info = vk::CommandBufferAllocateInfo::builder()
.command_pool(self.command_pool)
.level(vk::CommandBufferLevel::PRIMARY)
.command_buffer_count(self.framebuffers.len() as u32);
self.command_buffers = device.allocate_command_buffers(&allocate_info)?;
// Add commands
for (i, command_buffer) in self.command_buffers.iter().enumerate() {
let inheritance = vk::CommandBufferInheritanceInfo::builder();
let info = vk::CommandBufferBeginInfo::builder()
.flags(vk::CommandBufferUsageFlags::empty())
.inheritance_info(&inheritance);
device.begin_command_buffer(*command_buffer, &info)?;
let render_area = vk::Rect2D::builder()
.offset(vk::Offset2D::default())
.extent(self.swapchain_extent);
let color_clear_value = vk::ClearValue {
color: vk::ClearColorValue {
float32: [0.0, 0.0, 0.0, 1.0],
},
};
let depth_clear_value = vk::ClearValue {
depth_stencil: vk::ClearDepthStencilValue {
depth: 1.0,
stencil: 0,
},
};
let clear_values = &[color_clear_value, depth_clear_value];
let info = vk::RenderPassBeginInfo::builder()
.render_pass(self.render_pass)
.framebuffer(self.framebuffers[i])
.render_area(render_area)
.clear_values(clear_values);
device.cmd_begin_render_pass(*command_buffer, &info, vk::SubpassContents::INLINE);
device.cmd_bind_pipeline(
*command_buffer,
vk::PipelineBindPoint::GRAPHICS,
self.pipeline,
);
device.cmd_bind_vertex_buffers(*command_buffer, 0, &[self.vertex_buffer], &[0]);
device.cmd_bind_index_buffer(
*command_buffer,
self.index_buffer,
0,
vk::IndexType::UINT16,
);
device.cmd_bind_descriptor_sets(
*command_buffer,
vk::PipelineBindPoint::GRAPHICS,
self.pipeline_layout,
0,
&[self.descriptor_sets[i]],
&[],
);
device.cmd_draw_indexed(*command_buffer, INDICES.len() as u32, 1, 0, 0, 0);
device.cmd_end_render_pass(*command_buffer);
device.end_command_buffer(*command_buffer)?;
}
Ok(())
}
//================================================
// Sync Objects
//================================================
unsafe fn create_sync_objects(&mut self, device: &Device) -> Result<()> {
let semaphore_info = vk::SemaphoreCreateInfo::builder();
let fence_info = vk::FenceCreateInfo::builder().flags(vk::FenceCreateFlags::SIGNALED);
for _ in 0..MAX_FRAMES_IN_FLIGHT {
self.image_available_semaphores
.push(device.create_semaphore(&semaphore_info, None)?);
self.render_finished_semaphores
.push(device.create_semaphore(&semaphore_info, None)?);
self.in_flight_fences
.push(device.create_fence(&fence_info, None)?);
}
self.images_in_flight = self
.swapchain_images
.iter()
.map(|_| vk::Fence::null())
.collect();
Ok(())
}
//================================================
// Shared (Buffers)
//================================================
unsafe fn create_buffer(
&self,
instance: &Instance,
device: &Device,
size: vk::DeviceSize,
usage: vk::BufferUsageFlags,
properties: vk::MemoryPropertyFlags,
) -> Result<(vk::Buffer, vk::DeviceMemory)> {
let buffer_info = vk::BufferCreateInfo::builder()
.size(size)
.usage(usage)
.sharing_mode(vk::SharingMode::EXCLUSIVE);
let buffer = device.create_buffer(&buffer_info, None)?;
let requirements = device.get_buffer_memory_requirements(buffer);
let memory_info = vk::MemoryAllocateInfo::builder()
.allocation_size(requirements.size)
.memory_type_index(self.get_memory_type_index(instance, properties, requirements)?);
let buffer_memory = device.allocate_memory(&memory_info, None)?;
device.bind_buffer_memory(buffer, buffer_memory, 0)?;
Ok((buffer, buffer_memory))
}
unsafe fn copy_buffer(
&self,
device: &Device,
source: vk::Buffer,
destination: vk::Buffer,
size: vk::DeviceSize,
) -> Result<()> {
let command_buffer = self.begin_single_time_commands(device)?;
let regions = vk::BufferCopy::builder().size(size);
device.cmd_copy_buffer(command_buffer, source, destination, &[regions]);
self.end_single_time_commands(device, command_buffer)?;
Ok(())
}
//================================================
// Shared (Images)
//================================================
unsafe fn create_image(
&self,
instance: &Instance,
device: &Device,
width: u32,
height: u32,
format: vk::Format,
tiling: vk::ImageTiling,
usage: vk::ImageUsageFlags,
properties: vk::MemoryPropertyFlags,
) -> Result<(vk::Image, vk::DeviceMemory)> {
let info = vk::ImageCreateInfo::builder()
.image_type(vk::ImageType::_2D)
.extent(vk::Extent3D {
width,
height,
depth: 1,
})
.mip_levels(1)
.array_layers(1)
.format(format)
.tiling(tiling)
.initial_layout(vk::ImageLayout::UNDEFINED)
.usage(usage)
.samples(vk::SampleCountFlags::_1)
.sharing_mode(vk::SharingMode::EXCLUSIVE);
let image = device.create_image(&info, None)?;
let requirements = device.get_image_memory_requirements(image);
let info = vk::MemoryAllocateInfo::builder()
.allocation_size(requirements.size)
.memory_type_index(self.get_memory_type_index(instance, properties, requirements)?);
let image_memory = device.allocate_memory(&info, None)?;
device.bind_image_memory(image, image_memory, 0)?;
Ok((image, image_memory))
}
unsafe fn transition_image_layout(
&self,
device: &Device,
image: vk::Image,
_format: vk::Format,
old_layout: vk::ImageLayout,
new_layout: vk::ImageLayout,
) -> Result<()> {
let (src_access_mask, dst_access_mask, src_stage_mask, dst_stage_mask) =
match (old_layout, new_layout) {
(vk::ImageLayout::UNDEFINED, vk::ImageLayout::TRANSFER_DST_OPTIMAL) => (
vk::AccessFlags::empty(),
vk::AccessFlags::TRANSFER_WRITE,
vk::PipelineStageFlags::TOP_OF_PIPE,
vk::PipelineStageFlags::TRANSFER,
),
(
vk::ImageLayout::TRANSFER_DST_OPTIMAL,
vk::ImageLayout::SHADER_READ_ONLY_OPTIMAL,
) => (
vk::AccessFlags::TRANSFER_WRITE,
vk::AccessFlags::SHADER_READ,
vk::PipelineStageFlags::TRANSFER,
vk::PipelineStageFlags::FRAGMENT_SHADER,
),
_ => return Err(anyhow!("Unsupported image layout transition!")),
};
let command_buffer = self.begin_single_time_commands(device)?;
let subresource = vk::ImageSubresourceRange::builder()
.aspect_mask(vk::ImageAspectFlags::COLOR)
.base_mip_level(0)
.level_count(1)
.base_array_layer(0)
.layer_count(1);
let barrier = vk::ImageMemoryBarrier::builder()
.old_layout(old_layout)
.new_layout(new_layout)
.src_queue_family_index(vk::QUEUE_FAMILY_IGNORED)
.dst_queue_family_index(vk::QUEUE_FAMILY_IGNORED)
.image(image)
.subresource_range(subresource)
.src_access_mask(src_access_mask)
.dst_access_mask(dst_access_mask);
device.cmd_pipeline_barrier(
command_buffer,
src_stage_mask,
dst_stage_mask,
vk::DependencyFlags::empty(),
&[] as &[vk::MemoryBarrier],
&[] as &[vk::BufferMemoryBarrier],
&[barrier],
);
self.end_single_time_commands(device, command_buffer)?;
Ok(())
}
unsafe fn copy_buffer_to_image(
&self,
device: &Device,
buffer: vk::Buffer,
image: vk::Image,
width: u32,
height: u32,
) -> Result<()> {
let command_buffer = self.begin_single_time_commands(device)?;
let subresource = vk::ImageSubresourceLayers::builder()
.aspect_mask(vk::ImageAspectFlags::COLOR)
.mip_level(0)
.base_array_layer(0)
.layer_count(1);
let region = vk::BufferImageCopy::builder()
.buffer_offset(0)
.buffer_row_length(0)
.buffer_image_height(0)
.image_subresource(subresource)
.image_offset(vk::Offset3D { x: 0, y: 0, z: 0 })
.image_extent(vk::Extent3D {
width,
height,
depth: 1,
});
device.cmd_copy_buffer_to_image(
command_buffer,
buffer,
image,
vk::ImageLayout::TRANSFER_DST_OPTIMAL,
&[region],
);
self.end_single_time_commands(device, command_buffer)?;
Ok(())
}
//================================================
// Shared (Other)
//================================================
unsafe fn get_memory_type_index(
&self,
instance: &Instance,
properties: vk::MemoryPropertyFlags,
requirements: vk::MemoryRequirements,
) -> Result<u32> {
let memory = instance.get_physical_device_memory_properties(self.physical_device);
(0..memory.memory_type_count)
.find(|i| {
let suitable = (requirements.memory_type_bits & (1 << i)) != 0;
let memory_type = memory.memory_types[*i as usize];
suitable && memory_type.property_flags.contains(properties)
})
.ok_or_else(|| anyhow!("Failed to find suitable memory type."))
}
unsafe fn begin_single_time_commands(&self, device: &Device) -> Result<vk::CommandBuffer> {
let info = vk::CommandBufferAllocateInfo::builder()
.level(vk::CommandBufferLevel::PRIMARY)
.command_pool(self.command_pool)
.command_buffer_count(1);
let command_buffer = device.allocate_command_buffers(&info)?[0];
let info = vk::CommandBufferBeginInfo::builder()
.flags(vk::CommandBufferUsageFlags::ONE_TIME_SUBMIT);
device.begin_command_buffer(command_buffer, &info)?;
Ok(command_buffer)
}
unsafe fn end_single_time_commands(
&self,
device: &Device,
command_buffer: vk::CommandBuffer,
) -> Result<()> {
device.end_command_buffer(command_buffer)?;
let command_buffers = &[command_buffer];
let info = vk::SubmitInfo::builder().command_buffers(command_buffers);
device.queue_submit(self.graphics_queue, &[info], vk::Fence::null())?;
device.queue_wait_idle(self.graphics_queue)?;
device.free_command_buffers(self.command_pool, command_buffers);
Ok(())
}
}