use anyhow::{anyhow, Result}; use vulkanalia::prelude::v1_0::*; use std::mem::size_of; use std::ptr::copy_nonoverlapping as memcpy; pub type Mat4 = cgmath::Matrix4<f32>; use crate::app_data; use crate::command_buffer; use crate::vertex::VertexContainer; pub unsafe fn create_buffer( instance: &Instance, device: &Device, data: &app_data::AppData, size: vk::DeviceSize, usage: vk::BufferUsageFlags, properties: vk::MemoryPropertyFlags, ) -> Result<(vk::Buffer, vk::DeviceMemory)> { let buffer_info = vk::BufferCreateInfo::builder() .size(size) .usage(usage) .sharing_mode(vk::SharingMode::EXCLUSIVE); let buffer = device.create_buffer(&buffer_info, None)?; let requirements = device.get_buffer_memory_requirements(buffer); let memory_info = vk::MemoryAllocateInfo::builder() .allocation_size(requirements.size) .memory_type_index(get_memory_type_index( instance, data, properties, requirements, )?); let buffer_memory = device.allocate_memory(&memory_info, None)?; device.bind_buffer_memory(buffer, buffer_memory, 0)?; Ok((buffer, buffer_memory)) } pub unsafe fn create_vertex_buffer<const COUNT: usize, T: VertexContainer<COUNT>>( instance: &Instance, device: &Device, data: &app_data::AppData, vertices: &Vec<T> ) -> Result<(vk::Buffer, vk::DeviceMemory)> { let size = (size_of::<T>() * vertices.len()) as u64; let (staging_buffer, staging_buffer_memory) = create_buffer( instance, device, data, size, vk::BufferUsageFlags::TRANSFER_SRC, vk::MemoryPropertyFlags::HOST_COHERENT | vk::MemoryPropertyFlags::HOST_VISIBLE, )?; let memory = device.map_memory( staging_buffer_memory, 0, size, vk::MemoryMapFlags::empty(), )?; memcpy(vertices.as_ptr(), memory.cast(), vertices.len()); device.unmap_memory(staging_buffer_memory); let (vertex_buffer, vertex_buffer_memory) = create_buffer( instance, device, data, size, vk::BufferUsageFlags::TRANSFER_DST | vk::BufferUsageFlags::VERTEX_BUFFER, vk::MemoryPropertyFlags::DEVICE_LOCAL, )?; let vertex_buffer: vk::Buffer = vertex_buffer; let vertex_buffer_memory: vk::DeviceMemory = vertex_buffer_memory; copy_buffer(device, data, staging_buffer, vertex_buffer, size)?; device.destroy_buffer(staging_buffer, None); device.free_memory(staging_buffer_memory, None); Ok((vertex_buffer, vertex_buffer_memory)) } pub unsafe fn get_memory_type_index( instance: &Instance, data: &app_data::AppData, properties: vk::MemoryPropertyFlags, requirements: vk::MemoryRequirements, ) -> Result<u32> { let memory = instance.get_physical_device_memory_properties(data.physical_device); (0..memory.memory_type_count) .find(|i| { let suitable = (requirements.memory_type_bits & (1 << i)) != 0; let memory_type = memory.memory_types[*i as usize]; suitable && memory_type.property_flags.contains(properties) }) .ok_or_else(|| anyhow!("Failed to find suitable memory type.")) } pub unsafe fn copy_buffer( device: &Device, data: &app_data::AppData, source: vk::Buffer, destination: vk::Buffer, size: vk::DeviceSize, ) -> Result<()> { let command_buffer = command_buffer::begin_single_time_commands(device, data)?; let regions = vk::BufferCopy::builder().size(size); device.cmd_copy_buffer(command_buffer, source, destination, &[regions]); command_buffer::end_single_time_commands(device, data, command_buffer)?; Ok(()) } pub unsafe fn create_index_buffer( instance: &Instance, device: &Device, data: &app_data::AppData, indices: &Vec<u32>, ) -> Result<(vk::Buffer, vk::DeviceMemory)> { let size = (size_of::<u32>() * indices.len()) as u64; let (staging_buffer, staging_buffer_memory) = create_buffer( instance, device, data, size, vk::BufferUsageFlags::TRANSFER_SRC, vk::MemoryPropertyFlags::HOST_COHERENT | vk::MemoryPropertyFlags::HOST_VISIBLE, )?; let memory = device.map_memory( staging_buffer_memory, 0, size, vk::MemoryMapFlags::empty(), )?; memcpy(indices.as_ptr(), memory.cast(), indices.len()); device.unmap_memory(staging_buffer_memory); let (index_buffer, index_buffer_memory) = create_buffer( instance, device, data, size, vk::BufferUsageFlags::TRANSFER_DST | vk::BufferUsageFlags::INDEX_BUFFER, vk::MemoryPropertyFlags::DEVICE_LOCAL, )?; let index_buffer = index_buffer; let index_buffer_memory = index_buffer_memory; copy_buffer(device, data, staging_buffer, index_buffer, size)?; device.destroy_buffer(staging_buffer, None); device.free_memory(staging_buffer_memory, None); Ok((index_buffer, index_buffer_memory)) } #[repr(C)] #[derive(Copy, Clone, Debug)] pub struct UniformBufferObject { pub model: Mat4, pub geom_rot: Mat4, pub view: Mat4, pub proj: Mat4, pub use_geom_shader: [bool; 16], } pub unsafe fn create_descriptor_set_layout( device: &Device, data: &mut app_data::AppData, ) -> Result<()> { let ubo_binding = vk::DescriptorSetLayoutBinding::builder() .binding(0) .descriptor_type(vk::DescriptorType::UNIFORM_BUFFER) .descriptor_count(1) .stage_flags(vk::ShaderStageFlags::VERTEX | vk::ShaderStageFlags::GEOMETRY); let sampler_binding = vk::DescriptorSetLayoutBinding::builder() .binding(1) .descriptor_type(vk::DescriptorType::COMBINED_IMAGE_SAMPLER) .descriptor_count(1) .stage_flags(vk::ShaderStageFlags::FRAGMENT); let storage_binding = vk::DescriptorSetLayoutBinding::builder() .binding(2) .descriptor_type(vk::DescriptorType::STORAGE_BUFFER) .descriptor_count(1) .stage_flags(vk::ShaderStageFlags::FRAGMENT); let bindings = &[ubo_binding, sampler_binding, storage_binding]; let info = vk::DescriptorSetLayoutCreateInfo::builder() .bindings(bindings); data.descriptor_set_layout = device.create_descriptor_set_layout(&info, None)?; Ok(()) } pub unsafe fn create_uniform_buffers( instance: &Instance, device: &Device, data: &mut app_data::AppData, ) -> Result<()> { data.uniform_buffers.clear(); data.uniform_buffers_memory.clear(); for _ in 0..data.swapchain_images.len() { let (uniform_buffer, uniform_buffer_memory) = create_buffer( instance, device, data, size_of::<UniformBufferObject>() as u64, vk::BufferUsageFlags::UNIFORM_BUFFER, vk::MemoryPropertyFlags::HOST_COHERENT | vk::MemoryPropertyFlags::HOST_VISIBLE, )?; data.uniform_buffers.push(uniform_buffer); data.uniform_buffers_memory.push(uniform_buffer_memory); } Ok(()) } pub unsafe fn create_storage_buffers( instance: &Instance, device: &Device, data: &mut app_data::AppData, ) -> Result<()> { data.storage_buffers.clear(); data.storage_buffers_memory.clear(); for _ in 0..data.swapchain_images.len() { let (storage_buffer, storage_buffer_memory) = create_buffer( instance, device, data, size_of::<UniformBufferObject>() as u64, vk::BufferUsageFlags::STORAGE_BUFFER, vk::MemoryPropertyFlags::HOST_COHERENT | vk::MemoryPropertyFlags::HOST_VISIBLE, )?; data.storage_buffers.push(storage_buffer); data.storage_buffers_memory.push(storage_buffer_memory); } Ok(()) } pub unsafe fn create_descriptor_pool(device: &Device, data: &mut app_data::AppData) -> Result<()> { let ubo_size = vk::DescriptorPoolSize::builder() .type_(vk::DescriptorType::UNIFORM_BUFFER) .descriptor_count(data.swapchain_images.len() as u32); let sampler_size = vk::DescriptorPoolSize::builder() .type_(vk::DescriptorType::COMBINED_IMAGE_SAMPLER) .descriptor_count(data.swapchain_images.len() as u32); let storage_size = vk::DescriptorPoolSize::builder() .type_(vk::DescriptorType::STORAGE_BUFFER) .descriptor_count(data.swapchain_images.len() as u32); let pool_sizes = &[ubo_size, sampler_size, storage_size]; let info = vk::DescriptorPoolCreateInfo::builder() .pool_sizes(pool_sizes) .max_sets(data.swapchain_images.len() as u32); data.descriptor_pool = device.create_descriptor_pool(&info, None)?; Ok(()) } pub unsafe fn create_descriptor_sets(device: &Device, data: &mut app_data::AppData) -> Result<()> { let layouts = vec![data.descriptor_set_layout; data.swapchain_images.len()]; let info = vk::DescriptorSetAllocateInfo::builder() .descriptor_pool(data.descriptor_pool) .set_layouts(&layouts); data.descriptor_sets = device.allocate_descriptor_sets(&info)?; for i in 0..data.swapchain_images.len() { let info = vk::DescriptorBufferInfo::builder() .buffer(data.uniform_buffers[i]) .offset(0) .range(size_of::<UniformBufferObject>() as u64); let buffer_info = &[info]; let ubo_write = vk::WriteDescriptorSet::builder() .dst_set(data.descriptor_sets[i]) .dst_binding(0) .dst_array_element(0) .descriptor_type(vk::DescriptorType::UNIFORM_BUFFER) .buffer_info(buffer_info); let info = vk::DescriptorImageInfo::builder() .image_layout(vk::ImageLayout::SHADER_READ_ONLY_OPTIMAL) .image_view(data.texture_image_view) .sampler(data.texture_sampler); let image_info = &[info]; let sampler_write = vk::WriteDescriptorSet::builder() .dst_set(data.descriptor_sets[i]) .dst_binding(1) .dst_array_element(0) .descriptor_type(vk::DescriptorType::COMBINED_IMAGE_SAMPLER) .image_info(image_info); let info = vk::DescriptorBufferInfo::builder() .buffer(data.storage_buffers[i]) .offset(0) .range(size_of::<UniformBufferObject>() as u64); let storage_info = &[info]; let storage_write = vk::WriteDescriptorSet::builder() .dst_set(data.descriptor_sets[i]) .dst_binding(2) .dst_array_element(0) .descriptor_type(vk::DescriptorType::STORAGE_BUFFER) .buffer_info(storage_info); device.update_descriptor_sets( &[ubo_write, sampler_write, storage_write], &[] as &[vk::CopyDescriptorSet], ); } Ok(()) }