2019-06-22 16:34:26 +00:00
/*************************************************************************/
/* rendering_device_vulkan.cpp */
/*************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
/* https://godotengine.org */
/*************************************************************************/
2021-01-01 19:13:46 +00:00
/* Copyright (c) 2007-2021 Juan Linietsky, Ariel Manzur. */
/* Copyright (c) 2014-2021 Godot Engine contributors (cf. AUTHORS.md). */
2019-06-22 16:34:26 +00:00
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/*************************************************************************/
2019-06-07 16:07:57 +00:00
# include "rendering_device_vulkan.h"
2020-04-06 08:17:42 +00:00
2020-11-07 22:33:38 +00:00
# include "core/config/project_settings.h"
2021-06-11 12:51:48 +00:00
# include "core/io/file_access.h"
2019-09-20 20:58:06 +00:00
# include "core/os/os.h"
2020-11-07 22:33:38 +00:00
# include "core/templates/hashfuncs.h"
2019-07-28 16:42:15 +00:00
# include "drivers/vulkan/vulkan_context.h"
2020-06-03 08:45:44 +00:00
2019-07-28 16:42:15 +00:00
# include "thirdparty/spirv-reflect/spirv_reflect.h"
2019-06-07 16:07:57 +00:00
2020-10-19 20:32:19 +00:00
//#define FORCE_FULL_BARRIER
// Get the Vulkan object information and possible stage access types (bitwise OR'd with incoming values)
2021-01-26 00:52:58 +00:00
RenderingDeviceVulkan : : Buffer * RenderingDeviceVulkan : : _get_buffer_from_owner ( RID p_buffer , VkPipelineStageFlags & r_stage_mask , VkAccessFlags & r_access_mask , uint32_t p_post_barrier ) {
2020-10-19 20:32:19 +00:00
Buffer * buffer = nullptr ;
if ( vertex_buffer_owner . owns ( p_buffer ) ) {
buffer = vertex_buffer_owner . getornull ( p_buffer ) ;
2021-01-26 00:52:58 +00:00
r_stage_mask | = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT ;
r_access_mask | = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT ;
if ( buffer - > usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT ) {
if ( p_post_barrier & BARRIER_MASK_RASTER ) {
r_access_mask | = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT ;
r_stage_mask | = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT ;
}
if ( p_post_barrier & BARRIER_MASK_COMPUTE ) {
r_access_mask | = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT ;
r_stage_mask | = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT ;
}
}
2020-10-19 20:32:19 +00:00
} else if ( index_buffer_owner . owns ( p_buffer ) ) {
2021-01-26 00:52:58 +00:00
r_stage_mask | = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT ;
r_access_mask | = VK_ACCESS_INDEX_READ_BIT ;
2020-10-19 20:32:19 +00:00
buffer = index_buffer_owner . getornull ( p_buffer ) ;
} else if ( uniform_buffer_owner . owns ( p_buffer ) ) {
2021-01-26 00:52:58 +00:00
if ( p_post_barrier & BARRIER_MASK_RASTER ) {
r_stage_mask | = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT ;
}
if ( p_post_barrier & BARRIER_MASK_COMPUTE ) {
r_stage_mask | = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT ;
}
r_access_mask | = VK_ACCESS_UNIFORM_READ_BIT ;
2020-10-19 20:32:19 +00:00
buffer = uniform_buffer_owner . getornull ( p_buffer ) ;
} else if ( texture_buffer_owner . owns ( p_buffer ) ) {
2021-01-26 00:52:58 +00:00
if ( p_post_barrier & BARRIER_MASK_RASTER ) {
r_stage_mask | = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT ;
2021-02-05 03:02:06 +00:00
r_access_mask | = VK_ACCESS_SHADER_READ_BIT ;
2021-01-26 00:52:58 +00:00
}
if ( p_post_barrier & BARRIER_MASK_COMPUTE ) {
r_stage_mask | = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT ;
2021-02-05 03:02:06 +00:00
r_access_mask | = VK_ACCESS_SHADER_READ_BIT ;
2021-01-26 00:52:58 +00:00
}
2021-02-05 03:02:06 +00:00
2020-10-19 20:32:19 +00:00
buffer = & texture_buffer_owner . getornull ( p_buffer ) - > buffer ;
} else if ( storage_buffer_owner . owns ( p_buffer ) ) {
buffer = storage_buffer_owner . getornull ( p_buffer ) ;
2021-01-26 00:52:58 +00:00
if ( p_post_barrier & BARRIER_MASK_RASTER ) {
r_stage_mask | = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT ;
r_access_mask | = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT ;
}
if ( p_post_barrier & BARRIER_MASK_COMPUTE ) {
r_stage_mask | = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT ;
r_access_mask | = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT ;
}
if ( buffer - > usage & VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT ) {
r_stage_mask | = VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT ;
r_access_mask | = VK_ACCESS_INDIRECT_COMMAND_READ_BIT ;
}
2020-10-19 20:32:19 +00:00
}
return buffer ;
}
static void update_external_dependency_for_store ( VkSubpassDependency & dependency , bool is_sampled , bool is_storage , bool is_depth ) {
// Transitioning from write to read, protect the shaders that may use this next
// Allow for copies/image layout transitions
dependency . dstStageMask | = VK_PIPELINE_STAGE_TRANSFER_BIT ;
dependency . dstAccessMask | = VK_ACCESS_TRANSFER_READ_BIT ;
if ( is_sampled ) {
dependency . dstStageMask | = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT ;
dependency . dstAccessMask | = VK_ACCESS_SHADER_READ_BIT ;
} else if ( is_storage ) {
dependency . dstStageMask | = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT ;
dependency . dstAccessMask | = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT ;
} else {
dependency . dstStageMask | = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT ;
dependency . dstAccessMask | = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT ;
}
if ( is_depth ) {
2021-03-12 13:35:16 +00:00
// Depth resources have additional stages that may be interested in them
2020-10-19 20:32:19 +00:00
dependency . dstStageMask | = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT ;
dependency . dstAccessMask | = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT ;
}
}
2020-07-29 19:12:21 +00:00
2019-06-10 17:12:24 +00:00
void RenderingDeviceVulkan : : _add_dependency ( RID p_id , RID p_depends_on ) {
2019-06-07 16:07:57 +00:00
if ( ! dependency_map . has ( p_depends_on ) ) {
2019-06-10 17:12:24 +00:00
dependency_map [ p_depends_on ] = Set < RID > ( ) ;
2019-06-07 16:07:57 +00:00
}
dependency_map [ p_depends_on ] . insert ( p_id ) ;
if ( ! reverse_dependency_map . has ( p_id ) ) {
2019-06-10 17:12:24 +00:00
reverse_dependency_map [ p_id ] = Set < RID > ( ) ;
2019-06-07 16:07:57 +00:00
}
reverse_dependency_map [ p_id ] . insert ( p_depends_on ) ;
}
2019-06-10 17:12:24 +00:00
void RenderingDeviceVulkan : : _free_dependencies ( RID p_id ) {
2019-06-07 16:07:57 +00:00
//direct dependencies must be freed
2019-06-24 19:13:06 +00:00
2020-03-17 06:33:00 +00:00
Map < RID , Set < RID > > : : Element * E = dependency_map . find ( p_id ) ;
2019-06-07 16:07:57 +00:00
if ( E ) {
2019-06-24 19:13:06 +00:00
while ( E - > get ( ) . size ( ) ) {
free ( E - > get ( ) . front ( ) - > get ( ) ) ;
2019-06-07 16:07:57 +00:00
}
dependency_map . erase ( E ) ;
}
2020-07-16 16:54:15 +00:00
//reverse dependencies must be unreferenced
2019-06-07 16:07:57 +00:00
E = reverse_dependency_map . find ( p_id ) ;
if ( E ) {
2019-06-10 17:12:24 +00:00
for ( Set < RID > : : Element * F = E - > get ( ) . front ( ) ; F ; F = F - > next ( ) ) {
2020-03-17 06:33:00 +00:00
Map < RID , Set < RID > > : : Element * G = dependency_map . find ( F - > get ( ) ) ;
2019-06-24 19:13:06 +00:00
ERR_CONTINUE ( ! G ) ;
ERR_CONTINUE ( ! G - > get ( ) . has ( p_id ) ) ;
G - > get ( ) . erase ( p_id ) ;
2019-06-07 16:07:57 +00:00
}
reverse_dependency_map . erase ( E ) ;
}
}
const VkFormat RenderingDeviceVulkan : : vulkan_formats [ RenderingDevice : : DATA_FORMAT_MAX ] = {
VK_FORMAT_R4G4_UNORM_PACK8 ,
VK_FORMAT_R4G4B4A4_UNORM_PACK16 ,
VK_FORMAT_B4G4R4A4_UNORM_PACK16 ,
VK_FORMAT_R5G6B5_UNORM_PACK16 ,
VK_FORMAT_B5G6R5_UNORM_PACK16 ,
VK_FORMAT_R5G5B5A1_UNORM_PACK16 ,
VK_FORMAT_B5G5R5A1_UNORM_PACK16 ,
VK_FORMAT_A1R5G5B5_UNORM_PACK16 ,
VK_FORMAT_R8_UNORM ,
VK_FORMAT_R8_SNORM ,
VK_FORMAT_R8_USCALED ,
VK_FORMAT_R8_SSCALED ,
VK_FORMAT_R8_UINT ,
VK_FORMAT_R8_SINT ,
VK_FORMAT_R8_SRGB ,
VK_FORMAT_R8G8_UNORM ,
VK_FORMAT_R8G8_SNORM ,
VK_FORMAT_R8G8_USCALED ,
VK_FORMAT_R8G8_SSCALED ,
VK_FORMAT_R8G8_UINT ,
VK_FORMAT_R8G8_SINT ,
VK_FORMAT_R8G8_SRGB ,
VK_FORMAT_R8G8B8_UNORM ,
VK_FORMAT_R8G8B8_SNORM ,
VK_FORMAT_R8G8B8_USCALED ,
VK_FORMAT_R8G8B8_SSCALED ,
VK_FORMAT_R8G8B8_UINT ,
VK_FORMAT_R8G8B8_SINT ,
VK_FORMAT_R8G8B8_SRGB ,
VK_FORMAT_B8G8R8_UNORM ,
VK_FORMAT_B8G8R8_SNORM ,
VK_FORMAT_B8G8R8_USCALED ,
VK_FORMAT_B8G8R8_SSCALED ,
VK_FORMAT_B8G8R8_UINT ,
VK_FORMAT_B8G8R8_SINT ,
VK_FORMAT_B8G8R8_SRGB ,
VK_FORMAT_R8G8B8A8_UNORM ,
VK_FORMAT_R8G8B8A8_SNORM ,
VK_FORMAT_R8G8B8A8_USCALED ,
VK_FORMAT_R8G8B8A8_SSCALED ,
VK_FORMAT_R8G8B8A8_UINT ,
VK_FORMAT_R8G8B8A8_SINT ,
VK_FORMAT_R8G8B8A8_SRGB ,
VK_FORMAT_B8G8R8A8_UNORM ,
VK_FORMAT_B8G8R8A8_SNORM ,
VK_FORMAT_B8G8R8A8_USCALED ,
VK_FORMAT_B8G8R8A8_SSCALED ,
VK_FORMAT_B8G8R8A8_UINT ,
VK_FORMAT_B8G8R8A8_SINT ,
VK_FORMAT_B8G8R8A8_SRGB ,
VK_FORMAT_A8B8G8R8_UNORM_PACK32 ,
VK_FORMAT_A8B8G8R8_SNORM_PACK32 ,
VK_FORMAT_A8B8G8R8_USCALED_PACK32 ,
VK_FORMAT_A8B8G8R8_SSCALED_PACK32 ,
VK_FORMAT_A8B8G8R8_UINT_PACK32 ,
VK_FORMAT_A8B8G8R8_SINT_PACK32 ,
VK_FORMAT_A8B8G8R8_SRGB_PACK32 ,
VK_FORMAT_A2R10G10B10_UNORM_PACK32 ,
VK_FORMAT_A2R10G10B10_SNORM_PACK32 ,
VK_FORMAT_A2R10G10B10_USCALED_PACK32 ,
VK_FORMAT_A2R10G10B10_SSCALED_PACK32 ,
VK_FORMAT_A2R10G10B10_UINT_PACK32 ,
VK_FORMAT_A2R10G10B10_SINT_PACK32 ,
VK_FORMAT_A2B10G10R10_UNORM_PACK32 ,
VK_FORMAT_A2B10G10R10_SNORM_PACK32 ,
VK_FORMAT_A2B10G10R10_USCALED_PACK32 ,
VK_FORMAT_A2B10G10R10_SSCALED_PACK32 ,
VK_FORMAT_A2B10G10R10_UINT_PACK32 ,
VK_FORMAT_A2B10G10R10_SINT_PACK32 ,
VK_FORMAT_R16_UNORM ,
VK_FORMAT_R16_SNORM ,
VK_FORMAT_R16_USCALED ,
VK_FORMAT_R16_SSCALED ,
VK_FORMAT_R16_UINT ,
VK_FORMAT_R16_SINT ,
VK_FORMAT_R16_SFLOAT ,
VK_FORMAT_R16G16_UNORM ,
VK_FORMAT_R16G16_SNORM ,
VK_FORMAT_R16G16_USCALED ,
VK_FORMAT_R16G16_SSCALED ,
VK_FORMAT_R16G16_UINT ,
VK_FORMAT_R16G16_SINT ,
VK_FORMAT_R16G16_SFLOAT ,
VK_FORMAT_R16G16B16_UNORM ,
VK_FORMAT_R16G16B16_SNORM ,
VK_FORMAT_R16G16B16_USCALED ,
VK_FORMAT_R16G16B16_SSCALED ,
VK_FORMAT_R16G16B16_UINT ,
VK_FORMAT_R16G16B16_SINT ,
VK_FORMAT_R16G16B16_SFLOAT ,
VK_FORMAT_R16G16B16A16_UNORM ,
VK_FORMAT_R16G16B16A16_SNORM ,
VK_FORMAT_R16G16B16A16_USCALED ,
VK_FORMAT_R16G16B16A16_SSCALED ,
VK_FORMAT_R16G16B16A16_UINT ,
VK_FORMAT_R16G16B16A16_SINT ,
VK_FORMAT_R16G16B16A16_SFLOAT ,
VK_FORMAT_R32_UINT ,
VK_FORMAT_R32_SINT ,
VK_FORMAT_R32_SFLOAT ,
VK_FORMAT_R32G32_UINT ,
VK_FORMAT_R32G32_SINT ,
VK_FORMAT_R32G32_SFLOAT ,
VK_FORMAT_R32G32B32_UINT ,
VK_FORMAT_R32G32B32_SINT ,
VK_FORMAT_R32G32B32_SFLOAT ,
VK_FORMAT_R32G32B32A32_UINT ,
VK_FORMAT_R32G32B32A32_SINT ,
VK_FORMAT_R32G32B32A32_SFLOAT ,
VK_FORMAT_R64_UINT ,
VK_FORMAT_R64_SINT ,
VK_FORMAT_R64_SFLOAT ,
VK_FORMAT_R64G64_UINT ,
VK_FORMAT_R64G64_SINT ,
VK_FORMAT_R64G64_SFLOAT ,
VK_FORMAT_R64G64B64_UINT ,
VK_FORMAT_R64G64B64_SINT ,
VK_FORMAT_R64G64B64_SFLOAT ,
VK_FORMAT_R64G64B64A64_UINT ,
VK_FORMAT_R64G64B64A64_SINT ,
VK_FORMAT_R64G64B64A64_SFLOAT ,
VK_FORMAT_B10G11R11_UFLOAT_PACK32 ,
VK_FORMAT_E5B9G9R9_UFLOAT_PACK32 ,
VK_FORMAT_D16_UNORM ,
VK_FORMAT_X8_D24_UNORM_PACK32 ,
VK_FORMAT_D32_SFLOAT ,
VK_FORMAT_S8_UINT ,
VK_FORMAT_D16_UNORM_S8_UINT ,
VK_FORMAT_D24_UNORM_S8_UINT ,
VK_FORMAT_D32_SFLOAT_S8_UINT ,
VK_FORMAT_BC1_RGB_UNORM_BLOCK ,
VK_FORMAT_BC1_RGB_SRGB_BLOCK ,
VK_FORMAT_BC1_RGBA_UNORM_BLOCK ,
VK_FORMAT_BC1_RGBA_SRGB_BLOCK ,
VK_FORMAT_BC2_UNORM_BLOCK ,
VK_FORMAT_BC2_SRGB_BLOCK ,
VK_FORMAT_BC3_UNORM_BLOCK ,
VK_FORMAT_BC3_SRGB_BLOCK ,
VK_FORMAT_BC4_UNORM_BLOCK ,
VK_FORMAT_BC4_SNORM_BLOCK ,
VK_FORMAT_BC5_UNORM_BLOCK ,
VK_FORMAT_BC5_SNORM_BLOCK ,
VK_FORMAT_BC6H_UFLOAT_BLOCK ,
VK_FORMAT_BC6H_SFLOAT_BLOCK ,
VK_FORMAT_BC7_UNORM_BLOCK ,
VK_FORMAT_BC7_SRGB_BLOCK ,
VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK ,
VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK ,
VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK ,
VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK ,
VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK ,
VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK ,
VK_FORMAT_EAC_R11_UNORM_BLOCK ,
VK_FORMAT_EAC_R11_SNORM_BLOCK ,
VK_FORMAT_EAC_R11G11_UNORM_BLOCK ,
VK_FORMAT_EAC_R11G11_SNORM_BLOCK ,
VK_FORMAT_ASTC_4x4_UNORM_BLOCK ,
VK_FORMAT_ASTC_4x4_SRGB_BLOCK ,
VK_FORMAT_ASTC_5x4_UNORM_BLOCK ,
VK_FORMAT_ASTC_5x4_SRGB_BLOCK ,
VK_FORMAT_ASTC_5x5_UNORM_BLOCK ,
VK_FORMAT_ASTC_5x5_SRGB_BLOCK ,
VK_FORMAT_ASTC_6x5_UNORM_BLOCK ,
VK_FORMAT_ASTC_6x5_SRGB_BLOCK ,
VK_FORMAT_ASTC_6x6_UNORM_BLOCK ,
VK_FORMAT_ASTC_6x6_SRGB_BLOCK ,
VK_FORMAT_ASTC_8x5_UNORM_BLOCK ,
VK_FORMAT_ASTC_8x5_SRGB_BLOCK ,
VK_FORMAT_ASTC_8x6_UNORM_BLOCK ,
VK_FORMAT_ASTC_8x6_SRGB_BLOCK ,
VK_FORMAT_ASTC_8x8_UNORM_BLOCK ,
VK_FORMAT_ASTC_8x8_SRGB_BLOCK ,
VK_FORMAT_ASTC_10x5_UNORM_BLOCK ,
VK_FORMAT_ASTC_10x5_SRGB_BLOCK ,
VK_FORMAT_ASTC_10x6_UNORM_BLOCK ,
VK_FORMAT_ASTC_10x6_SRGB_BLOCK ,
VK_FORMAT_ASTC_10x8_UNORM_BLOCK ,
VK_FORMAT_ASTC_10x8_SRGB_BLOCK ,
VK_FORMAT_ASTC_10x10_UNORM_BLOCK ,
VK_FORMAT_ASTC_10x10_SRGB_BLOCK ,
VK_FORMAT_ASTC_12x10_UNORM_BLOCK ,
VK_FORMAT_ASTC_12x10_SRGB_BLOCK ,
VK_FORMAT_ASTC_12x12_UNORM_BLOCK ,
VK_FORMAT_ASTC_12x12_SRGB_BLOCK ,
VK_FORMAT_G8B8G8R8_422_UNORM ,
VK_FORMAT_B8G8R8G8_422_UNORM ,
VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM ,
VK_FORMAT_G8_B8R8_2PLANE_420_UNORM ,
VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM ,
VK_FORMAT_G8_B8R8_2PLANE_422_UNORM ,
VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM ,
VK_FORMAT_R10X6_UNORM_PACK16 ,
VK_FORMAT_R10X6G10X6_UNORM_2PACK16 ,
VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16 ,
VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16 ,
VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16 ,
VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16 ,
VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16 ,
VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16 ,
VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16 ,
VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16 ,
VK_FORMAT_R12X4_UNORM_PACK16 ,
VK_FORMAT_R12X4G12X4_UNORM_2PACK16 ,
VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16 ,
VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16 ,
VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16 ,
VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16 ,
VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16 ,
VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16 ,
VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16 ,
VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16 ,
VK_FORMAT_G16B16G16R16_422_UNORM ,
VK_FORMAT_B16G16R16G16_422_UNORM ,
VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM ,
VK_FORMAT_G16_B16R16_2PLANE_420_UNORM ,
VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM ,
VK_FORMAT_G16_B16R16_2PLANE_422_UNORM ,
VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM ,
VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG ,
VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG ,
VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG ,
VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG ,
VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG ,
VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG ,
VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG ,
VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG ,
} ;
const char * RenderingDeviceVulkan : : named_formats [ RenderingDevice : : DATA_FORMAT_MAX ] = {
" R4G4_Unorm_Pack8 " ,
" R4G4B4A4_Unorm_Pack16 " ,
" B4G4R4A4_Unorm_Pack16 " ,
" R5G6B5_Unorm_Pack16 " ,
" B5G6R5_Unorm_Pack16 " ,
" R5G5B5A1_Unorm_Pack16 " ,
" B5G5R5A1_Unorm_Pack16 " ,
" A1R5G5B5_Unorm_Pack16 " ,
" R8_Unorm " ,
" R8_Snorm " ,
" R8_Uscaled " ,
" R8_Sscaled " ,
" R8_Uint " ,
" R8_Sint " ,
" R8_Srgb " ,
" R8G8_Unorm " ,
" R8G8_Snorm " ,
" R8G8_Uscaled " ,
" R8G8_Sscaled " ,
" R8G8_Uint " ,
" R8G8_Sint " ,
" R8G8_Srgb " ,
" R8G8B8_Unorm " ,
" R8G8B8_Snorm " ,
" R8G8B8_Uscaled " ,
" R8G8B8_Sscaled " ,
" R8G8B8_Uint " ,
" R8G8B8_Sint " ,
" R8G8B8_Srgb " ,
" B8G8R8_Unorm " ,
" B8G8R8_Snorm " ,
" B8G8R8_Uscaled " ,
" B8G8R8_Sscaled " ,
" B8G8R8_Uint " ,
" B8G8R8_Sint " ,
" B8G8R8_Srgb " ,
" R8G8B8A8_Unorm " ,
" R8G8B8A8_Snorm " ,
" R8G8B8A8_Uscaled " ,
" R8G8B8A8_Sscaled " ,
" R8G8B8A8_Uint " ,
" R8G8B8A8_Sint " ,
" R8G8B8A8_Srgb " ,
" B8G8R8A8_Unorm " ,
" B8G8R8A8_Snorm " ,
" B8G8R8A8_Uscaled " ,
" B8G8R8A8_Sscaled " ,
" B8G8R8A8_Uint " ,
" B8G8R8A8_Sint " ,
" B8G8R8A8_Srgb " ,
" A8B8G8R8_Unorm_Pack32 " ,
" A8B8G8R8_Snorm_Pack32 " ,
" A8B8G8R8_Uscaled_Pack32 " ,
" A8B8G8R8_Sscaled_Pack32 " ,
" A8B8G8R8_Uint_Pack32 " ,
" A8B8G8R8_Sint_Pack32 " ,
" A8B8G8R8_Srgb_Pack32 " ,
" A2R10G10B10_Unorm_Pack32 " ,
" A2R10G10B10_Snorm_Pack32 " ,
" A2R10G10B10_Uscaled_Pack32 " ,
" A2R10G10B10_Sscaled_Pack32 " ,
" A2R10G10B10_Uint_Pack32 " ,
" A2R10G10B10_Sint_Pack32 " ,
" A2B10G10R10_Unorm_Pack32 " ,
" A2B10G10R10_Snorm_Pack32 " ,
" A2B10G10R10_Uscaled_Pack32 " ,
" A2B10G10R10_Sscaled_Pack32 " ,
" A2B10G10R10_Uint_Pack32 " ,
" A2B10G10R10_Sint_Pack32 " ,
" R16_Unorm " ,
" R16_Snorm " ,
" R16_Uscaled " ,
" R16_Sscaled " ,
" R16_Uint " ,
" R16_Sint " ,
" R16_Sfloat " ,
" R16G16_Unorm " ,
" R16G16_Snorm " ,
" R16G16_Uscaled " ,
" R16G16_Sscaled " ,
" R16G16_Uint " ,
" R16G16_Sint " ,
" R16G16_Sfloat " ,
" R16G16B16_Unorm " ,
" R16G16B16_Snorm " ,
" R16G16B16_Uscaled " ,
" R16G16B16_Sscaled " ,
" R16G16B16_Uint " ,
" R16G16B16_Sint " ,
" R16G16B16_Sfloat " ,
" R16G16B16A16_Unorm " ,
" R16G16B16A16_Snorm " ,
" R16G16B16A16_Uscaled " ,
" R16G16B16A16_Sscaled " ,
" R16G16B16A16_Uint " ,
" R16G16B16A16_Sint " ,
" R16G16B16A16_Sfloat " ,
" R32_Uint " ,
" R32_Sint " ,
" R32_Sfloat " ,
" R32G32_Uint " ,
" R32G32_Sint " ,
" R32G32_Sfloat " ,
" R32G32B32_Uint " ,
" R32G32B32_Sint " ,
" R32G32B32_Sfloat " ,
" R32G32B32A32_Uint " ,
" R32G32B32A32_Sint " ,
" R32G32B32A32_Sfloat " ,
" R64_Uint " ,
" R64_Sint " ,
" R64_Sfloat " ,
" R64G64_Uint " ,
" R64G64_Sint " ,
" R64G64_Sfloat " ,
" R64G64B64_Uint " ,
" R64G64B64_Sint " ,
" R64G64B64_Sfloat " ,
" R64G64B64A64_Uint " ,
" R64G64B64A64_Sint " ,
" R64G64B64A64_Sfloat " ,
" B10G11R11_Ufloat_Pack32 " ,
" E5B9G9R9_Ufloat_Pack32 " ,
" D16_Unorm " ,
" X8_D24_Unorm_Pack32 " ,
" D32_Sfloat " ,
" S8_Uint " ,
" D16_Unorm_S8_Uint " ,
" D24_Unorm_S8_Uint " ,
" D32_Sfloat_S8_Uint " ,
" Bc1_Rgb_Unorm_Block " ,
" Bc1_Rgb_Srgb_Block " ,
" Bc1_Rgba_Unorm_Block " ,
" Bc1_Rgba_Srgb_Block " ,
" Bc2_Unorm_Block " ,
" Bc2_Srgb_Block " ,
" Bc3_Unorm_Block " ,
" Bc3_Srgb_Block " ,
" Bc4_Unorm_Block " ,
" Bc4_Snorm_Block " ,
" Bc5_Unorm_Block " ,
" Bc5_Snorm_Block " ,
" Bc6H_Ufloat_Block " ,
" Bc6H_Sfloat_Block " ,
" Bc7_Unorm_Block " ,
" Bc7_Srgb_Block " ,
" Etc2_R8G8B8_Unorm_Block " ,
" Etc2_R8G8B8_Srgb_Block " ,
" Etc2_R8G8B8A1_Unorm_Block " ,
" Etc2_R8G8B8A1_Srgb_Block " ,
" Etc2_R8G8B8A8_Unorm_Block " ,
" Etc2_R8G8B8A8_Srgb_Block " ,
" Eac_R11_Unorm_Block " ,
" Eac_R11_Snorm_Block " ,
" Eac_R11G11_Unorm_Block " ,
" Eac_R11G11_Snorm_Block " ,
" Astc_4X4_Unorm_Block " ,
" Astc_4X4_Srgb_Block " ,
" Astc_5X4_Unorm_Block " ,
" Astc_5X4_Srgb_Block " ,
" Astc_5X5_Unorm_Block " ,
" Astc_5X5_Srgb_Block " ,
" Astc_6X5_Unorm_Block " ,
" Astc_6X5_Srgb_Block " ,
" Astc_6X6_Unorm_Block " ,
" Astc_6X6_Srgb_Block " ,
" Astc_8X5_Unorm_Block " ,
" Astc_8X5_Srgb_Block " ,
" Astc_8X6_Unorm_Block " ,
" Astc_8X6_Srgb_Block " ,
" Astc_8X8_Unorm_Block " ,
" Astc_8X8_Srgb_Block " ,
" Astc_10X5_Unorm_Block " ,
" Astc_10X5_Srgb_Block " ,
" Astc_10X6_Unorm_Block " ,
" Astc_10X6_Srgb_Block " ,
" Astc_10X8_Unorm_Block " ,
" Astc_10X8_Srgb_Block " ,
" Astc_10X10_Unorm_Block " ,
" Astc_10X10_Srgb_Block " ,
" Astc_12X10_Unorm_Block " ,
" Astc_12X10_Srgb_Block " ,
" Astc_12X12_Unorm_Block " ,
" Astc_12X12_Srgb_Block " ,
" G8B8G8R8_422_Unorm " ,
" B8G8R8G8_422_Unorm " ,
" G8_B8_R8_3Plane_420_Unorm " ,
" G8_B8R8_2Plane_420_Unorm " ,
" G8_B8_R8_3Plane_422_Unorm " ,
" G8_B8R8_2Plane_422_Unorm " ,
" G8_B8_R8_3Plane_444_Unorm " ,
" R10X6_Unorm_Pack16 " ,
" R10X6G10X6_Unorm_2Pack16 " ,
" R10X6G10X6B10X6A10X6_Unorm_4Pack16 " ,
" G10X6B10X6G10X6R10X6_422_Unorm_4Pack16 " ,
" B10X6G10X6R10X6G10X6_422_Unorm_4Pack16 " ,
" G10X6_B10X6_R10X6_3Plane_420_Unorm_3Pack16 " ,
" G10X6_B10X6R10X6_2Plane_420_Unorm_3Pack16 " ,
" G10X6_B10X6_R10X6_3Plane_422_Unorm_3Pack16 " ,
" G10X6_B10X6R10X6_2Plane_422_Unorm_3Pack16 " ,
" G10X6_B10X6_R10X6_3Plane_444_Unorm_3Pack16 " ,
" R12X4_Unorm_Pack16 " ,
" R12X4G12X4_Unorm_2Pack16 " ,
" R12X4G12X4B12X4A12X4_Unorm_4Pack16 " ,
" G12X4B12X4G12X4R12X4_422_Unorm_4Pack16 " ,
" B12X4G12X4R12X4G12X4_422_Unorm_4Pack16 " ,
" G12X4_B12X4_R12X4_3Plane_420_Unorm_3Pack16 " ,
" G12X4_B12X4R12X4_2Plane_420_Unorm_3Pack16 " ,
" G12X4_B12X4_R12X4_3Plane_422_Unorm_3Pack16 " ,
" G12X4_B12X4R12X4_2Plane_422_Unorm_3Pack16 " ,
" G12X4_B12X4_R12X4_3Plane_444_Unorm_3Pack16 " ,
" G16B16G16R16_422_Unorm " ,
" B16G16R16G16_422_Unorm " ,
" G16_B16_R16_3Plane_420_Unorm " ,
" G16_B16R16_2Plane_420_Unorm " ,
" G16_B16_R16_3Plane_422_Unorm " ,
" G16_B16R16_2Plane_422_Unorm " ,
" G16_B16_R16_3Plane_444_Unorm " ,
" Pvrtc1_2Bpp_Unorm_Block_Img " ,
" Pvrtc1_4Bpp_Unorm_Block_Img " ,
" Pvrtc2_2Bpp_Unorm_Block_Img " ,
" Pvrtc2_4Bpp_Unorm_Block_Img " ,
" Pvrtc1_2Bpp_Srgb_Block_Img " ,
" Pvrtc1_4Bpp_Srgb_Block_Img " ,
" Pvrtc2_2Bpp_Srgb_Block_Img " ,
" Pvrtc2_4Bpp_Srgb_Block_Img "
} ;
int RenderingDeviceVulkan : : get_format_vertex_size ( DataFormat p_format ) {
switch ( p_format ) {
case DATA_FORMAT_R8_UNORM :
case DATA_FORMAT_R8_SNORM :
case DATA_FORMAT_R8_UINT :
case DATA_FORMAT_R8_SINT :
case DATA_FORMAT_R8G8_UNORM :
case DATA_FORMAT_R8G8_SNORM :
case DATA_FORMAT_R8G8_UINT :
case DATA_FORMAT_R8G8_SINT :
case DATA_FORMAT_R8G8B8_UNORM :
case DATA_FORMAT_R8G8B8_SNORM :
case DATA_FORMAT_R8G8B8_UINT :
case DATA_FORMAT_R8G8B8_SINT :
case DATA_FORMAT_B8G8R8_UNORM :
case DATA_FORMAT_B8G8R8_SNORM :
case DATA_FORMAT_B8G8R8_UINT :
case DATA_FORMAT_B8G8R8_SINT :
case DATA_FORMAT_R8G8B8A8_UNORM :
case DATA_FORMAT_R8G8B8A8_SNORM :
case DATA_FORMAT_R8G8B8A8_UINT :
case DATA_FORMAT_R8G8B8A8_SINT :
case DATA_FORMAT_B8G8R8A8_UNORM :
case DATA_FORMAT_B8G8R8A8_SNORM :
case DATA_FORMAT_B8G8R8A8_UINT :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_B8G8R8A8_SINT :
2020-12-02 01:40:47 +00:00
case DATA_FORMAT_A2B10G10R10_UNORM_PACK32 :
2020-05-10 11:00:47 +00:00
return 4 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_R16_UNORM :
case DATA_FORMAT_R16_SNORM :
case DATA_FORMAT_R16_UINT :
case DATA_FORMAT_R16_SINT :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_R16_SFLOAT :
return 4 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_R16G16_UNORM :
case DATA_FORMAT_R16G16_SNORM :
case DATA_FORMAT_R16G16_UINT :
case DATA_FORMAT_R16G16_SINT :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_R16G16_SFLOAT :
return 4 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_R16G16B16_UNORM :
case DATA_FORMAT_R16G16B16_SNORM :
case DATA_FORMAT_R16G16B16_UINT :
case DATA_FORMAT_R16G16B16_SINT :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_R16G16B16_SFLOAT :
return 8 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_R16G16B16A16_UNORM :
case DATA_FORMAT_R16G16B16A16_SNORM :
case DATA_FORMAT_R16G16B16A16_UINT :
case DATA_FORMAT_R16G16B16A16_SINT :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_R16G16B16A16_SFLOAT :
return 8 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_R32_UINT :
case DATA_FORMAT_R32_SINT :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_R32_SFLOAT :
return 4 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_R32G32_UINT :
case DATA_FORMAT_R32G32_SINT :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_R32G32_SFLOAT :
return 8 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_R32G32B32_UINT :
case DATA_FORMAT_R32G32B32_SINT :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_R32G32B32_SFLOAT :
return 12 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_R32G32B32A32_UINT :
case DATA_FORMAT_R32G32B32A32_SINT :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_R32G32B32A32_SFLOAT :
return 16 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_R64_UINT :
case DATA_FORMAT_R64_SINT :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_R64_SFLOAT :
return 8 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_R64G64_UINT :
case DATA_FORMAT_R64G64_SINT :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_R64G64_SFLOAT :
return 16 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_R64G64B64_UINT :
case DATA_FORMAT_R64G64B64_SINT :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_R64G64B64_SFLOAT :
return 24 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_R64G64B64A64_UINT :
case DATA_FORMAT_R64G64B64A64_SINT :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_R64G64B64A64_SFLOAT :
return 32 ;
default :
return 0 ;
2019-06-07 16:07:57 +00:00
}
}
uint32_t RenderingDeviceVulkan : : get_image_format_pixel_size ( DataFormat p_format ) {
switch ( p_format ) {
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_R4G4_UNORM_PACK8 :
return 1 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_R4G4B4A4_UNORM_PACK16 :
case DATA_FORMAT_B4G4R4A4_UNORM_PACK16 :
case DATA_FORMAT_R5G6B5_UNORM_PACK16 :
case DATA_FORMAT_B5G6R5_UNORM_PACK16 :
case DATA_FORMAT_R5G5B5A1_UNORM_PACK16 :
case DATA_FORMAT_B5G5R5A1_UNORM_PACK16 :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_A1R5G5B5_UNORM_PACK16 :
return 2 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_R8_UNORM :
case DATA_FORMAT_R8_SNORM :
case DATA_FORMAT_R8_USCALED :
case DATA_FORMAT_R8_SSCALED :
case DATA_FORMAT_R8_UINT :
case DATA_FORMAT_R8_SINT :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_R8_SRGB :
return 1 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_R8G8_UNORM :
case DATA_FORMAT_R8G8_SNORM :
case DATA_FORMAT_R8G8_USCALED :
case DATA_FORMAT_R8G8_SSCALED :
case DATA_FORMAT_R8G8_UINT :
case DATA_FORMAT_R8G8_SINT :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_R8G8_SRGB :
return 2 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_R8G8B8_UNORM :
case DATA_FORMAT_R8G8B8_SNORM :
case DATA_FORMAT_R8G8B8_USCALED :
case DATA_FORMAT_R8G8B8_SSCALED :
case DATA_FORMAT_R8G8B8_UINT :
case DATA_FORMAT_R8G8B8_SINT :
case DATA_FORMAT_R8G8B8_SRGB :
case DATA_FORMAT_B8G8R8_UNORM :
case DATA_FORMAT_B8G8R8_SNORM :
case DATA_FORMAT_B8G8R8_USCALED :
case DATA_FORMAT_B8G8R8_SSCALED :
case DATA_FORMAT_B8G8R8_UINT :
case DATA_FORMAT_B8G8R8_SINT :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_B8G8R8_SRGB :
return 3 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_R8G8B8A8_UNORM :
case DATA_FORMAT_R8G8B8A8_SNORM :
case DATA_FORMAT_R8G8B8A8_USCALED :
case DATA_FORMAT_R8G8B8A8_SSCALED :
case DATA_FORMAT_R8G8B8A8_UINT :
case DATA_FORMAT_R8G8B8A8_SINT :
case DATA_FORMAT_R8G8B8A8_SRGB :
case DATA_FORMAT_B8G8R8A8_UNORM :
case DATA_FORMAT_B8G8R8A8_SNORM :
case DATA_FORMAT_B8G8R8A8_USCALED :
case DATA_FORMAT_B8G8R8A8_SSCALED :
case DATA_FORMAT_B8G8R8A8_UINT :
case DATA_FORMAT_B8G8R8A8_SINT :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_B8G8R8A8_SRGB :
return 4 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_A8B8G8R8_UNORM_PACK32 :
case DATA_FORMAT_A8B8G8R8_SNORM_PACK32 :
case DATA_FORMAT_A8B8G8R8_USCALED_PACK32 :
case DATA_FORMAT_A8B8G8R8_SSCALED_PACK32 :
case DATA_FORMAT_A8B8G8R8_UINT_PACK32 :
case DATA_FORMAT_A8B8G8R8_SINT_PACK32 :
case DATA_FORMAT_A8B8G8R8_SRGB_PACK32 :
case DATA_FORMAT_A2R10G10B10_UNORM_PACK32 :
case DATA_FORMAT_A2R10G10B10_SNORM_PACK32 :
case DATA_FORMAT_A2R10G10B10_USCALED_PACK32 :
case DATA_FORMAT_A2R10G10B10_SSCALED_PACK32 :
case DATA_FORMAT_A2R10G10B10_UINT_PACK32 :
case DATA_FORMAT_A2R10G10B10_SINT_PACK32 :
case DATA_FORMAT_A2B10G10R10_UNORM_PACK32 :
case DATA_FORMAT_A2B10G10R10_SNORM_PACK32 :
case DATA_FORMAT_A2B10G10R10_USCALED_PACK32 :
case DATA_FORMAT_A2B10G10R10_SSCALED_PACK32 :
case DATA_FORMAT_A2B10G10R10_UINT_PACK32 :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_A2B10G10R10_SINT_PACK32 :
return 4 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_R16_UNORM :
case DATA_FORMAT_R16_SNORM :
case DATA_FORMAT_R16_USCALED :
case DATA_FORMAT_R16_SSCALED :
case DATA_FORMAT_R16_UINT :
case DATA_FORMAT_R16_SINT :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_R16_SFLOAT :
return 2 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_R16G16_UNORM :
case DATA_FORMAT_R16G16_SNORM :
case DATA_FORMAT_R16G16_USCALED :
case DATA_FORMAT_R16G16_SSCALED :
case DATA_FORMAT_R16G16_UINT :
case DATA_FORMAT_R16G16_SINT :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_R16G16_SFLOAT :
return 4 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_R16G16B16_UNORM :
case DATA_FORMAT_R16G16B16_SNORM :
case DATA_FORMAT_R16G16B16_USCALED :
case DATA_FORMAT_R16G16B16_SSCALED :
case DATA_FORMAT_R16G16B16_UINT :
case DATA_FORMAT_R16G16B16_SINT :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_R16G16B16_SFLOAT :
return 6 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_R16G16B16A16_UNORM :
case DATA_FORMAT_R16G16B16A16_SNORM :
case DATA_FORMAT_R16G16B16A16_USCALED :
case DATA_FORMAT_R16G16B16A16_SSCALED :
case DATA_FORMAT_R16G16B16A16_UINT :
case DATA_FORMAT_R16G16B16A16_SINT :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_R16G16B16A16_SFLOAT :
return 8 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_R32_UINT :
case DATA_FORMAT_R32_SINT :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_R32_SFLOAT :
return 4 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_R32G32_UINT :
case DATA_FORMAT_R32G32_SINT :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_R32G32_SFLOAT :
return 8 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_R32G32B32_UINT :
case DATA_FORMAT_R32G32B32_SINT :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_R32G32B32_SFLOAT :
return 12 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_R32G32B32A32_UINT :
case DATA_FORMAT_R32G32B32A32_SINT :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_R32G32B32A32_SFLOAT :
return 16 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_R64_UINT :
case DATA_FORMAT_R64_SINT :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_R64_SFLOAT :
return 8 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_R64G64_UINT :
case DATA_FORMAT_R64G64_SINT :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_R64G64_SFLOAT :
return 16 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_R64G64B64_UINT :
case DATA_FORMAT_R64G64B64_SINT :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_R64G64B64_SFLOAT :
return 24 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_R64G64B64A64_UINT :
case DATA_FORMAT_R64G64B64A64_SINT :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_R64G64B64A64_SFLOAT :
return 32 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_B10G11R11_UFLOAT_PACK32 :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_E5B9G9R9_UFLOAT_PACK32 :
return 4 ;
case DATA_FORMAT_D16_UNORM :
return 2 ;
case DATA_FORMAT_X8_D24_UNORM_PACK32 :
return 4 ;
case DATA_FORMAT_D32_SFLOAT :
return 4 ;
case DATA_FORMAT_S8_UINT :
return 1 ;
case DATA_FORMAT_D16_UNORM_S8_UINT :
return 4 ;
case DATA_FORMAT_D24_UNORM_S8_UINT :
return 4 ;
2019-06-19 20:03:19 +00:00
case DATA_FORMAT_D32_SFLOAT_S8_UINT :
return 5 ; //?
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_BC1_RGB_UNORM_BLOCK :
case DATA_FORMAT_BC1_RGB_SRGB_BLOCK :
case DATA_FORMAT_BC1_RGBA_UNORM_BLOCK :
case DATA_FORMAT_BC1_RGBA_SRGB_BLOCK :
case DATA_FORMAT_BC2_UNORM_BLOCK :
case DATA_FORMAT_BC2_SRGB_BLOCK :
case DATA_FORMAT_BC3_UNORM_BLOCK :
case DATA_FORMAT_BC3_SRGB_BLOCK :
case DATA_FORMAT_BC4_UNORM_BLOCK :
case DATA_FORMAT_BC4_SNORM_BLOCK :
case DATA_FORMAT_BC5_UNORM_BLOCK :
case DATA_FORMAT_BC5_SNORM_BLOCK :
case DATA_FORMAT_BC6H_UFLOAT_BLOCK :
case DATA_FORMAT_BC6H_SFLOAT_BLOCK :
case DATA_FORMAT_BC7_UNORM_BLOCK :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_BC7_SRGB_BLOCK :
return 1 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_ETC2_R8G8B8_UNORM_BLOCK :
case DATA_FORMAT_ETC2_R8G8B8_SRGB_BLOCK :
case DATA_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK :
case DATA_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK :
case DATA_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK :
return 1 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_EAC_R11_UNORM_BLOCK :
case DATA_FORMAT_EAC_R11_SNORM_BLOCK :
case DATA_FORMAT_EAC_R11G11_UNORM_BLOCK :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_EAC_R11G11_SNORM_BLOCK :
return 1 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_ASTC_4x4_UNORM_BLOCK :
case DATA_FORMAT_ASTC_4x4_SRGB_BLOCK :
case DATA_FORMAT_ASTC_5x4_UNORM_BLOCK :
case DATA_FORMAT_ASTC_5x4_SRGB_BLOCK :
case DATA_FORMAT_ASTC_5x5_UNORM_BLOCK :
case DATA_FORMAT_ASTC_5x5_SRGB_BLOCK :
case DATA_FORMAT_ASTC_6x5_UNORM_BLOCK :
case DATA_FORMAT_ASTC_6x5_SRGB_BLOCK :
case DATA_FORMAT_ASTC_6x6_UNORM_BLOCK :
case DATA_FORMAT_ASTC_6x6_SRGB_BLOCK :
case DATA_FORMAT_ASTC_8x5_UNORM_BLOCK :
case DATA_FORMAT_ASTC_8x5_SRGB_BLOCK :
case DATA_FORMAT_ASTC_8x6_UNORM_BLOCK :
case DATA_FORMAT_ASTC_8x6_SRGB_BLOCK :
case DATA_FORMAT_ASTC_8x8_UNORM_BLOCK :
case DATA_FORMAT_ASTC_8x8_SRGB_BLOCK :
case DATA_FORMAT_ASTC_10x5_UNORM_BLOCK :
case DATA_FORMAT_ASTC_10x5_SRGB_BLOCK :
case DATA_FORMAT_ASTC_10x6_UNORM_BLOCK :
case DATA_FORMAT_ASTC_10x6_SRGB_BLOCK :
case DATA_FORMAT_ASTC_10x8_UNORM_BLOCK :
case DATA_FORMAT_ASTC_10x8_SRGB_BLOCK :
case DATA_FORMAT_ASTC_10x10_UNORM_BLOCK :
case DATA_FORMAT_ASTC_10x10_SRGB_BLOCK :
case DATA_FORMAT_ASTC_12x10_UNORM_BLOCK :
case DATA_FORMAT_ASTC_12x10_SRGB_BLOCK :
case DATA_FORMAT_ASTC_12x12_UNORM_BLOCK :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_ASTC_12x12_SRGB_BLOCK :
return 1 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_G8B8G8R8_422_UNORM :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_B8G8R8G8_422_UNORM :
return 4 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_G8_B8_R8_3PLANE_420_UNORM :
case DATA_FORMAT_G8_B8R8_2PLANE_420_UNORM :
case DATA_FORMAT_G8_B8_R8_3PLANE_422_UNORM :
case DATA_FORMAT_G8_B8R8_2PLANE_422_UNORM :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_G8_B8_R8_3PLANE_444_UNORM :
return 4 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_R10X6_UNORM_PACK16 :
case DATA_FORMAT_R10X6G10X6_UNORM_2PACK16 :
case DATA_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16 :
case DATA_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16 :
case DATA_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16 :
case DATA_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16 :
case DATA_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16 :
case DATA_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16 :
case DATA_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16 :
case DATA_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16 :
case DATA_FORMAT_R12X4_UNORM_PACK16 :
case DATA_FORMAT_R12X4G12X4_UNORM_2PACK16 :
case DATA_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16 :
case DATA_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16 :
case DATA_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16 :
case DATA_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16 :
case DATA_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16 :
case DATA_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16 :
case DATA_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16 :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16 :
return 2 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_G16B16G16R16_422_UNORM :
case DATA_FORMAT_B16G16R16G16_422_UNORM :
case DATA_FORMAT_G16_B16_R16_3PLANE_420_UNORM :
case DATA_FORMAT_G16_B16R16_2PLANE_420_UNORM :
case DATA_FORMAT_G16_B16_R16_3PLANE_422_UNORM :
case DATA_FORMAT_G16_B16R16_2PLANE_422_UNORM :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_G16_B16_R16_3PLANE_444_UNORM :
return 8 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG :
case DATA_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG :
case DATA_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG :
case DATA_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG :
case DATA_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG :
case DATA_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG :
case DATA_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG :
return 1 ;
2019-06-07 16:07:57 +00:00
default : {
ERR_PRINT ( " Format not handled, bug " ) ;
}
}
return 1 ;
}
// https://www.khronos.org/registry/DataFormat/specs/1.1/dataformat.1.1.pdf
void RenderingDeviceVulkan : : get_compressed_image_format_block_dimensions ( DataFormat p_format , uint32_t & r_w , uint32_t & r_h ) {
switch ( p_format ) {
case DATA_FORMAT_BC1_RGB_UNORM_BLOCK :
case DATA_FORMAT_BC1_RGB_SRGB_BLOCK :
case DATA_FORMAT_BC1_RGBA_UNORM_BLOCK :
case DATA_FORMAT_BC1_RGBA_SRGB_BLOCK :
case DATA_FORMAT_BC2_UNORM_BLOCK :
case DATA_FORMAT_BC2_SRGB_BLOCK :
case DATA_FORMAT_BC3_UNORM_BLOCK :
case DATA_FORMAT_BC3_SRGB_BLOCK :
case DATA_FORMAT_BC4_UNORM_BLOCK :
case DATA_FORMAT_BC4_SNORM_BLOCK :
case DATA_FORMAT_BC5_UNORM_BLOCK :
case DATA_FORMAT_BC5_SNORM_BLOCK :
case DATA_FORMAT_BC6H_UFLOAT_BLOCK :
case DATA_FORMAT_BC6H_SFLOAT_BLOCK :
case DATA_FORMAT_BC7_UNORM_BLOCK :
case DATA_FORMAT_BC7_SRGB_BLOCK :
case DATA_FORMAT_ETC2_R8G8B8_UNORM_BLOCK :
case DATA_FORMAT_ETC2_R8G8B8_SRGB_BLOCK :
case DATA_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK :
case DATA_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK :
case DATA_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK :
case DATA_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK :
case DATA_FORMAT_EAC_R11_UNORM_BLOCK :
case DATA_FORMAT_EAC_R11_SNORM_BLOCK :
case DATA_FORMAT_EAC_R11G11_UNORM_BLOCK :
case DATA_FORMAT_EAC_R11G11_SNORM_BLOCK :
case DATA_FORMAT_ASTC_4x4_UNORM_BLOCK : //again, not sure about astc
case DATA_FORMAT_ASTC_4x4_SRGB_BLOCK :
case DATA_FORMAT_ASTC_5x4_UNORM_BLOCK :
case DATA_FORMAT_ASTC_5x4_SRGB_BLOCK :
case DATA_FORMAT_ASTC_5x5_UNORM_BLOCK :
case DATA_FORMAT_ASTC_5x5_SRGB_BLOCK :
case DATA_FORMAT_ASTC_6x5_UNORM_BLOCK :
case DATA_FORMAT_ASTC_6x5_SRGB_BLOCK :
case DATA_FORMAT_ASTC_6x6_UNORM_BLOCK :
case DATA_FORMAT_ASTC_6x6_SRGB_BLOCK :
case DATA_FORMAT_ASTC_8x5_UNORM_BLOCK :
case DATA_FORMAT_ASTC_8x5_SRGB_BLOCK :
case DATA_FORMAT_ASTC_8x6_UNORM_BLOCK :
case DATA_FORMAT_ASTC_8x6_SRGB_BLOCK :
case DATA_FORMAT_ASTC_8x8_UNORM_BLOCK :
case DATA_FORMAT_ASTC_8x8_SRGB_BLOCK :
case DATA_FORMAT_ASTC_10x5_UNORM_BLOCK :
case DATA_FORMAT_ASTC_10x5_SRGB_BLOCK :
case DATA_FORMAT_ASTC_10x6_UNORM_BLOCK :
case DATA_FORMAT_ASTC_10x6_SRGB_BLOCK :
case DATA_FORMAT_ASTC_10x8_UNORM_BLOCK :
case DATA_FORMAT_ASTC_10x8_SRGB_BLOCK :
case DATA_FORMAT_ASTC_10x10_UNORM_BLOCK :
case DATA_FORMAT_ASTC_10x10_SRGB_BLOCK :
case DATA_FORMAT_ASTC_12x10_UNORM_BLOCK :
case DATA_FORMAT_ASTC_12x10_SRGB_BLOCK :
case DATA_FORMAT_ASTC_12x12_UNORM_BLOCK :
case DATA_FORMAT_ASTC_12x12_SRGB_BLOCK :
r_w = 4 ;
r_h = 4 ;
return ;
case DATA_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG :
case DATA_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG :
case DATA_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG :
case DATA_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG :
r_w = 4 ;
r_h = 4 ;
return ;
case DATA_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG :
case DATA_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG :
case DATA_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG :
case DATA_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG :
r_w = 8 ;
r_h = 4 ;
return ;
default : {
r_w = 1 ;
r_h = 1 ;
}
}
}
uint32_t RenderingDeviceVulkan : : get_compressed_image_format_block_byte_size ( DataFormat p_format ) {
switch ( p_format ) {
case DATA_FORMAT_BC1_RGB_UNORM_BLOCK :
case DATA_FORMAT_BC1_RGB_SRGB_BLOCK :
case DATA_FORMAT_BC1_RGBA_UNORM_BLOCK :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_BC1_RGBA_SRGB_BLOCK :
return 8 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_BC2_UNORM_BLOCK :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_BC2_SRGB_BLOCK :
return 16 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_BC3_UNORM_BLOCK :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_BC3_SRGB_BLOCK :
return 16 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_BC4_UNORM_BLOCK :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_BC4_SNORM_BLOCK :
return 8 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_BC5_UNORM_BLOCK :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_BC5_SNORM_BLOCK :
return 16 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_BC6H_UFLOAT_BLOCK :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_BC6H_SFLOAT_BLOCK :
return 16 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_BC7_UNORM_BLOCK :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_BC7_SRGB_BLOCK :
return 16 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_ETC2_R8G8B8_UNORM_BLOCK :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_ETC2_R8G8B8_SRGB_BLOCK :
return 8 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK :
return 8 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK :
return 16 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_EAC_R11_UNORM_BLOCK :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_EAC_R11_SNORM_BLOCK :
return 8 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_EAC_R11G11_UNORM_BLOCK :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_EAC_R11G11_SNORM_BLOCK :
return 16 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_ASTC_4x4_UNORM_BLOCK : //again, not sure about astc
case DATA_FORMAT_ASTC_4x4_SRGB_BLOCK :
case DATA_FORMAT_ASTC_5x4_UNORM_BLOCK :
case DATA_FORMAT_ASTC_5x4_SRGB_BLOCK :
case DATA_FORMAT_ASTC_5x5_UNORM_BLOCK :
case DATA_FORMAT_ASTC_5x5_SRGB_BLOCK :
case DATA_FORMAT_ASTC_6x5_UNORM_BLOCK :
case DATA_FORMAT_ASTC_6x5_SRGB_BLOCK :
case DATA_FORMAT_ASTC_6x6_UNORM_BLOCK :
case DATA_FORMAT_ASTC_6x6_SRGB_BLOCK :
case DATA_FORMAT_ASTC_8x5_UNORM_BLOCK :
case DATA_FORMAT_ASTC_8x5_SRGB_BLOCK :
case DATA_FORMAT_ASTC_8x6_UNORM_BLOCK :
case DATA_FORMAT_ASTC_8x6_SRGB_BLOCK :
case DATA_FORMAT_ASTC_8x8_UNORM_BLOCK :
case DATA_FORMAT_ASTC_8x8_SRGB_BLOCK :
case DATA_FORMAT_ASTC_10x5_UNORM_BLOCK :
case DATA_FORMAT_ASTC_10x5_SRGB_BLOCK :
case DATA_FORMAT_ASTC_10x6_UNORM_BLOCK :
case DATA_FORMAT_ASTC_10x6_SRGB_BLOCK :
case DATA_FORMAT_ASTC_10x8_UNORM_BLOCK :
case DATA_FORMAT_ASTC_10x8_SRGB_BLOCK :
case DATA_FORMAT_ASTC_10x10_UNORM_BLOCK :
case DATA_FORMAT_ASTC_10x10_SRGB_BLOCK :
case DATA_FORMAT_ASTC_12x10_UNORM_BLOCK :
case DATA_FORMAT_ASTC_12x10_SRGB_BLOCK :
case DATA_FORMAT_ASTC_12x12_UNORM_BLOCK :
2019-06-19 20:03:19 +00:00
case DATA_FORMAT_ASTC_12x12_SRGB_BLOCK :
return 8 ; //wrong
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG :
case DATA_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG :
case DATA_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG :
case DATA_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG :
case DATA_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG :
case DATA_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG :
case DATA_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG :
2019-06-19 20:03:19 +00:00
case DATA_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG :
return 8 ; //what varies is resolution
2019-06-07 16:07:57 +00:00
default : {
}
}
return 1 ;
}
uint32_t RenderingDeviceVulkan : : get_compressed_image_format_pixel_rshift ( DataFormat p_format ) {
switch ( p_format ) {
case DATA_FORMAT_BC1_RGB_UNORM_BLOCK : //these formats are half byte size, so rshift is 1
case DATA_FORMAT_BC1_RGB_SRGB_BLOCK :
case DATA_FORMAT_BC1_RGBA_UNORM_BLOCK :
case DATA_FORMAT_BC1_RGBA_SRGB_BLOCK :
case DATA_FORMAT_BC4_UNORM_BLOCK :
case DATA_FORMAT_BC4_SNORM_BLOCK :
case DATA_FORMAT_ETC2_R8G8B8_UNORM_BLOCK :
case DATA_FORMAT_ETC2_R8G8B8_SRGB_BLOCK :
case DATA_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK :
case DATA_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK :
case DATA_FORMAT_EAC_R11_UNORM_BLOCK :
case DATA_FORMAT_EAC_R11_SNORM_BLOCK :
case DATA_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG :
case DATA_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG :
case DATA_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG :
return 1 ;
2019-06-07 16:07:57 +00:00
case DATA_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG : //these formats are quarter byte size, so rshift is 1
case DATA_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG :
case DATA_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG :
2020-05-10 11:00:47 +00:00
case DATA_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG :
return 2 ;
2019-06-07 16:07:57 +00:00
default : {
}
}
return 0 ;
}
2019-08-20 20:54:03 +00:00
bool RenderingDeviceVulkan : : format_has_stencil ( DataFormat p_format ) {
switch ( p_format ) {
case DATA_FORMAT_S8_UINT :
case DATA_FORMAT_D16_UNORM_S8_UINT :
case DATA_FORMAT_D24_UNORM_S8_UINT :
case DATA_FORMAT_D32_SFLOAT_S8_UINT : {
return true ;
}
2019-08-26 20:43:58 +00:00
default : {
}
2019-08-20 20:54:03 +00:00
}
return false ;
}
2019-06-25 22:49:52 +00:00
uint32_t RenderingDeviceVulkan : : get_image_format_required_size ( DataFormat p_format , uint32_t p_width , uint32_t p_height , uint32_t p_depth , uint32_t p_mipmaps , uint32_t * r_blockw , uint32_t * r_blockh , uint32_t * r_depth ) {
2019-07-27 13:23:24 +00:00
ERR_FAIL_COND_V ( p_mipmaps = = 0 , 0 ) ;
2019-06-07 16:07:57 +00:00
uint32_t w = p_width ;
uint32_t h = p_height ;
uint32_t d = p_depth ;
uint32_t size = 0 ;
uint32_t pixel_size = get_image_format_pixel_size ( p_format ) ;
uint32_t pixel_rshift = get_compressed_image_format_pixel_rshift ( p_format ) ;
uint32_t blockw , blockh ;
get_compressed_image_format_block_dimensions ( p_format , blockw , blockh ) ;
2019-06-16 02:45:24 +00:00
for ( uint32_t i = 0 ; i < p_mipmaps ; i + + ) {
2019-06-07 16:07:57 +00:00
uint32_t bw = w % blockw ! = 0 ? w + ( blockw - w % blockw ) : w ;
uint32_t bh = h % blockh ! = 0 ? h + ( blockh - h % blockh ) : h ;
uint32_t s = bw * bh ;
s * = pixel_size ;
s > > = pixel_rshift ;
2019-06-11 20:21:39 +00:00
size + = s * d ;
2019-06-07 16:07:57 +00:00
if ( r_blockw ) {
* r_blockw = bw ;
}
if ( r_blockh ) {
* r_blockh = bh ;
}
2019-06-25 22:49:52 +00:00
if ( r_depth ) {
* r_depth = d ;
}
2019-06-07 16:07:57 +00:00
w = MAX ( blockw , w > > 1 ) ;
h = MAX ( blockh , h > > 1 ) ;
d = MAX ( 1 , d > > 1 ) ;
}
return size ;
}
uint32_t RenderingDeviceVulkan : : get_image_required_mipmaps ( uint32_t p_width , uint32_t p_height , uint32_t p_depth ) {
//formats and block size don't really matter here since they can all go down to 1px (even if block is larger)
int w = p_width ;
int h = p_height ;
int d = p_depth ;
int mipmaps = 1 ;
while ( true ) {
if ( w = = 1 & & h = = 1 & & d = = 1 ) {
break ;
}
w = MAX ( 1 , w > > 1 ) ;
h = MAX ( 1 , h > > 1 ) ;
d = MAX ( 1 , d > > 1 ) ;
mipmaps + + ;
} ;
return mipmaps ;
}
///////////////////////
const VkCompareOp RenderingDeviceVulkan : : compare_operators [ RenderingDevice : : COMPARE_OP_MAX ] = {
VK_COMPARE_OP_NEVER ,
VK_COMPARE_OP_LESS ,
VK_COMPARE_OP_EQUAL ,
VK_COMPARE_OP_LESS_OR_EQUAL ,
VK_COMPARE_OP_GREATER ,
VK_COMPARE_OP_NOT_EQUAL ,
VK_COMPARE_OP_GREATER_OR_EQUAL ,
VK_COMPARE_OP_ALWAYS
} ;
const VkStencilOp RenderingDeviceVulkan : : stencil_operations [ RenderingDevice : : STENCIL_OP_MAX ] = {
VK_STENCIL_OP_KEEP ,
VK_STENCIL_OP_ZERO ,
VK_STENCIL_OP_REPLACE ,
VK_STENCIL_OP_INCREMENT_AND_CLAMP ,
VK_STENCIL_OP_DECREMENT_AND_CLAMP ,
VK_STENCIL_OP_INVERT ,
VK_STENCIL_OP_INCREMENT_AND_WRAP ,
VK_STENCIL_OP_DECREMENT_AND_WRAP
} ;
const VkSampleCountFlagBits RenderingDeviceVulkan : : rasterization_sample_count [ RenderingDevice : : TEXTURE_SAMPLES_MAX ] = {
VK_SAMPLE_COUNT_1_BIT ,
VK_SAMPLE_COUNT_2_BIT ,
VK_SAMPLE_COUNT_4_BIT ,
VK_SAMPLE_COUNT_8_BIT ,
VK_SAMPLE_COUNT_16_BIT ,
VK_SAMPLE_COUNT_32_BIT ,
VK_SAMPLE_COUNT_64_BIT ,
} ;
const VkLogicOp RenderingDeviceVulkan : : logic_operations [ RenderingDevice : : LOGIC_OP_MAX ] = {
VK_LOGIC_OP_CLEAR ,
VK_LOGIC_OP_AND ,
VK_LOGIC_OP_AND_REVERSE ,
VK_LOGIC_OP_COPY ,
VK_LOGIC_OP_AND_INVERTED ,
VK_LOGIC_OP_NO_OP ,
VK_LOGIC_OP_XOR ,
VK_LOGIC_OP_OR ,
VK_LOGIC_OP_NOR ,
VK_LOGIC_OP_EQUIVALENT ,
VK_LOGIC_OP_INVERT ,
VK_LOGIC_OP_OR_REVERSE ,
VK_LOGIC_OP_COPY_INVERTED ,
VK_LOGIC_OP_OR_INVERTED ,
VK_LOGIC_OP_NAND ,
VK_LOGIC_OP_SET
} ;
const VkBlendFactor RenderingDeviceVulkan : : blend_factors [ RenderingDevice : : BLEND_FACTOR_MAX ] = {
VK_BLEND_FACTOR_ZERO ,
VK_BLEND_FACTOR_ONE ,
VK_BLEND_FACTOR_SRC_COLOR ,
VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR ,
VK_BLEND_FACTOR_DST_COLOR ,
VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR ,
VK_BLEND_FACTOR_SRC_ALPHA ,
VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA ,
VK_BLEND_FACTOR_DST_ALPHA ,
VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA ,
VK_BLEND_FACTOR_CONSTANT_COLOR ,
VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR ,
VK_BLEND_FACTOR_CONSTANT_ALPHA ,
VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA ,
VK_BLEND_FACTOR_SRC_ALPHA_SATURATE ,
VK_BLEND_FACTOR_SRC1_COLOR ,
VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR ,
VK_BLEND_FACTOR_SRC1_ALPHA ,
VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA
} ;
const VkBlendOp RenderingDeviceVulkan : : blend_operations [ RenderingDevice : : BLEND_OP_MAX ] = {
VK_BLEND_OP_ADD ,
VK_BLEND_OP_SUBTRACT ,
VK_BLEND_OP_REVERSE_SUBTRACT ,
VK_BLEND_OP_MIN ,
VK_BLEND_OP_MAX
} ;
const VkSamplerAddressMode RenderingDeviceVulkan : : address_modes [ RenderingDevice : : SAMPLER_REPEAT_MODE_MAX ] = {
VK_SAMPLER_ADDRESS_MODE_REPEAT ,
VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT ,
VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE ,
VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER ,
VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE
} ;
const VkBorderColor RenderingDeviceVulkan : : sampler_border_colors [ RenderingDevice : : SAMPLER_BORDER_COLOR_MAX ] = {
VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK ,
VK_BORDER_COLOR_INT_TRANSPARENT_BLACK ,
VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK ,
VK_BORDER_COLOR_INT_OPAQUE_BLACK ,
VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE ,
VK_BORDER_COLOR_INT_OPAQUE_WHITE
} ;
2019-06-25 22:49:52 +00:00
const VkImageType RenderingDeviceVulkan : : vulkan_image_type [ RenderingDevice : : TEXTURE_TYPE_MAX ] = {
VK_IMAGE_TYPE_1D ,
VK_IMAGE_TYPE_2D ,
VK_IMAGE_TYPE_3D ,
VK_IMAGE_TYPE_2D ,
VK_IMAGE_TYPE_1D ,
VK_IMAGE_TYPE_2D ,
2019-08-26 20:43:58 +00:00
VK_IMAGE_TYPE_2D
2019-06-25 22:49:52 +00:00
} ;
2019-06-07 16:07:57 +00:00
/***************************/
/**** BUFFER MANAGEMENT ****/
/***************************/
Error RenderingDeviceVulkan : : _buffer_allocate ( Buffer * p_buffer , uint32_t p_size , uint32_t p_usage , VmaMemoryUsage p_mapping ) {
VkBufferCreateInfo bufferInfo ;
bufferInfo . sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO ;
2020-04-01 23:20:12 +00:00
bufferInfo . pNext = nullptr ;
2019-06-07 16:07:57 +00:00
bufferInfo . flags = 0 ;
bufferInfo . size = p_size ;
bufferInfo . usage = p_usage ;
bufferInfo . sharingMode = VK_SHARING_MODE_EXCLUSIVE ;
bufferInfo . queueFamilyIndexCount = 0 ;
2020-04-01 23:20:12 +00:00
bufferInfo . pQueueFamilyIndices = nullptr ;
2019-06-07 16:07:57 +00:00
VmaAllocationCreateInfo allocInfo ;
allocInfo . flags = 0 ;
allocInfo . usage = p_mapping ;
allocInfo . requiredFlags = 0 ;
allocInfo . preferredFlags = 0 ;
allocInfo . memoryTypeBits = 0 ;
2020-04-01 23:20:12 +00:00
allocInfo . pool = nullptr ;
allocInfo . pUserData = nullptr ;
2019-06-07 16:07:57 +00:00
2020-04-01 23:20:12 +00:00
VkResult err = vmaCreateBuffer ( allocator , & bufferInfo , & allocInfo , & p_buffer - > buffer , & p_buffer - > allocation , nullptr ) ;
2020-03-08 08:26:22 +00:00
ERR_FAIL_COND_V_MSG ( err , ERR_CANT_CREATE , " Can't create buffer of size: " + itos ( p_size ) + " , error " + itos ( err ) + " . " ) ;
2019-06-07 16:07:57 +00:00
p_buffer - > size = p_size ;
p_buffer - > buffer_info . buffer = p_buffer - > buffer ;
p_buffer - > buffer_info . offset = 0 ;
p_buffer - > buffer_info . range = p_size ;
2020-12-16 14:07:08 +00:00
p_buffer - > usage = p_usage ;
2019-06-07 16:07:57 +00:00
2021-07-02 23:14:19 +00:00
buffer_memory + = p_size ;
2019-06-07 16:07:57 +00:00
return OK ;
}
Error RenderingDeviceVulkan : : _buffer_free ( Buffer * p_buffer ) {
ERR_FAIL_COND_V ( p_buffer - > size = = 0 , ERR_INVALID_PARAMETER ) ;
2021-07-02 23:14:19 +00:00
buffer_memory - = p_buffer - > size ;
2019-06-07 16:07:57 +00:00
vmaDestroyBuffer ( allocator , p_buffer - > buffer , p_buffer - > allocation ) ;
2020-04-06 08:17:42 +00:00
p_buffer - > buffer = VK_NULL_HANDLE ;
2020-04-01 23:20:12 +00:00
p_buffer - > allocation = nullptr ;
2019-06-07 16:07:57 +00:00
p_buffer - > size = 0 ;
return OK ;
}
Error RenderingDeviceVulkan : : _insert_staging_block ( ) {
VkBufferCreateInfo bufferInfo ;
bufferInfo . sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO ;
2020-04-01 23:20:12 +00:00
bufferInfo . pNext = nullptr ;
2019-06-07 16:07:57 +00:00
bufferInfo . flags = 0 ;
bufferInfo . size = staging_buffer_block_size ;
bufferInfo . usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT ;
bufferInfo . sharingMode = VK_SHARING_MODE_EXCLUSIVE ;
bufferInfo . queueFamilyIndexCount = 0 ;
2020-04-01 23:20:12 +00:00
bufferInfo . pQueueFamilyIndices = nullptr ;
2019-06-07 16:07:57 +00:00
VmaAllocationCreateInfo allocInfo ;
allocInfo . flags = 0 ;
allocInfo . usage = VMA_MEMORY_USAGE_CPU_ONLY ;
allocInfo . requiredFlags = 0 ;
allocInfo . preferredFlags = 0 ;
allocInfo . memoryTypeBits = 0 ;
2020-04-01 23:20:12 +00:00
allocInfo . pool = nullptr ;
allocInfo . pUserData = nullptr ;
2019-06-07 16:07:57 +00:00
StagingBufferBlock block ;
2020-04-01 23:20:12 +00:00
VkResult err = vmaCreateBuffer ( allocator , & bufferInfo , & allocInfo , & block . buffer , & block . allocation , nullptr ) ;
2020-03-08 08:26:22 +00:00
ERR_FAIL_COND_V_MSG ( err , ERR_CANT_CREATE , " vmaCreateBuffer failed with error " + itos ( err ) + " . " ) ;
2019-06-07 16:07:57 +00:00
block . frame_used = 0 ;
block . fill_amount = 0 ;
staging_buffer_blocks . insert ( staging_buffer_current , block ) ;
return OK ;
}
Error RenderingDeviceVulkan : : _staging_buffer_allocate ( uint32_t p_amount , uint32_t p_required_align , uint32_t & r_alloc_offset , uint32_t & r_alloc_size , bool p_can_segment , bool p_on_draw_command_buffer ) {
//determine a block to use
r_alloc_size = p_amount ;
while ( true ) {
r_alloc_offset = 0 ;
//see if we can use current block
if ( staging_buffer_blocks [ staging_buffer_current ] . frame_used = = frames_drawn ) {
//we used this block this frame, let's see if there is still room
uint32_t write_from = staging_buffer_blocks [ staging_buffer_current ] . fill_amount ;
{
uint32_t align_remainder = write_from % p_required_align ;
if ( align_remainder ! = 0 ) {
write_from + = p_required_align - align_remainder ;
}
}
int32_t available_bytes = int32_t ( staging_buffer_block_size ) - int32_t ( write_from ) ;
if ( ( int32_t ) p_amount < available_bytes ) {
//all is good, we should be ok, all will fit
r_alloc_offset = write_from ;
} else if ( p_can_segment & & available_bytes > = ( int32_t ) p_required_align ) {
//ok all won't fit but at least we can fit a chunkie
//all is good, update what needs to be written to
r_alloc_offset = write_from ;
r_alloc_size = available_bytes - ( available_bytes % p_required_align ) ;
} else {
//can't fit it into this buffer.
//will need to try next buffer
staging_buffer_current = ( staging_buffer_current + 1 ) % staging_buffer_blocks . size ( ) ;
// before doing anything, though, let's check that we didn't manage to fill all blocks
// possible in a single frame
if ( staging_buffer_blocks [ staging_buffer_current ] . frame_used = = frames_drawn ) {
//guess we did.. ok, let's see if we can insert a new block..
2021-03-23 02:52:05 +00:00
if ( ( uint64_t ) staging_buffer_blocks . size ( ) * staging_buffer_block_size < staging_buffer_max_size ) {
2019-06-07 16:07:57 +00:00
//we can, so we are safe
Error err = _insert_staging_block ( ) ;
if ( err ) {
return err ;
}
//claim for this frame
staging_buffer_blocks . write [ staging_buffer_current ] . frame_used = frames_drawn ;
} else {
// Ok, worst case scenario, all the staging buffers belong to this frame
// and this frame is not even done.
// If this is the main thread, it means the user is likely loading a lot of resources at once,
// otherwise, the thread should just be blocked until the next frame (currently unimplemented)
if ( false ) { //separate thread from render
//block_until_next_frame()
continue ;
} else {
//flush EVERYTHING including setup commands. IF not immediate, also need to flush the draw commands
2019-08-26 20:43:58 +00:00
_flush ( true ) ;
2019-06-07 16:07:57 +00:00
//clear the whole staging buffer
for ( int i = 0 ; i < staging_buffer_blocks . size ( ) ; i + + ) {
staging_buffer_blocks . write [ i ] . frame_used = 0 ;
staging_buffer_blocks . write [ i ] . fill_amount = 0 ;
}
//claim current
staging_buffer_blocks . write [ staging_buffer_current ] . frame_used = frames_drawn ;
}
}
} else {
//not from current frame, so continue and try again
continue ;
}
}
} else if ( staging_buffer_blocks [ staging_buffer_current ] . frame_used < = frames_drawn - frame_count ) {
//this is an old block, which was already processed, let's reuse
staging_buffer_blocks . write [ staging_buffer_current ] . frame_used = frames_drawn ;
staging_buffer_blocks . write [ staging_buffer_current ] . fill_amount = 0 ;
} else if ( staging_buffer_blocks [ staging_buffer_current ] . frame_used > frames_drawn - frame_count ) {
//this block may still be in use, let's not touch it unless we have to, so.. can we create a new one?
2021-03-23 02:52:05 +00:00
if ( ( uint64_t ) staging_buffer_blocks . size ( ) * staging_buffer_block_size < staging_buffer_max_size ) {
2019-06-07 16:07:57 +00:00
//we are still allowed to create a new block, so let's do that and insert it for current pos
Error err = _insert_staging_block ( ) ;
if ( err ) {
return err ;
}
//claim for this frame
staging_buffer_blocks . write [ staging_buffer_current ] . frame_used = frames_drawn ;
} else {
// oops, we are out of room and we can't create more.
// let's flush older frames.
// The logic here is that if a game is loading a lot of data from the main thread, it will need to be stalled anyway.
// If loading from a separate thread, we can block that thread until next frame when more room is made (not currently implemented, though).
if ( false ) {
//separate thread from render
//block_until_next_frame()
continue ; //and try again
} else {
2019-08-26 20:43:58 +00:00
_flush ( false ) ;
2019-06-07 16:07:57 +00:00
for ( int i = 0 ; i < staging_buffer_blocks . size ( ) ; i + + ) {
//clear all blocks but the ones from this frame
int block_idx = ( i + staging_buffer_current ) % staging_buffer_blocks . size ( ) ;
if ( staging_buffer_blocks [ block_idx ] . frame_used = = frames_drawn ) {
break ; //ok, we reached something from this frame, abort
}
staging_buffer_blocks . write [ block_idx ] . frame_used = 0 ;
staging_buffer_blocks . write [ block_idx ] . fill_amount = 0 ;
}
//claim for current frame
staging_buffer_blocks . write [ staging_buffer_current ] . frame_used = frames_drawn ;
}
}
}
//all was good, break
break ;
}
staging_buffer_used = true ;
return OK ;
}
Error RenderingDeviceVulkan : : _buffer_update ( Buffer * p_buffer , size_t p_offset , const uint8_t * p_data , size_t p_data_size , bool p_use_draw_command_buffer , uint32_t p_required_align ) {
//submitting may get chunked for various reasons, so convert this to a task
size_t to_submit = p_data_size ;
size_t submit_from = 0 ;
while ( to_submit > 0 ) {
uint32_t block_write_offset ;
uint32_t block_write_amount ;
Error err = _staging_buffer_allocate ( MIN ( to_submit , staging_buffer_block_size ) , p_required_align , block_write_offset , block_write_amount , p_use_draw_command_buffer ) ;
if ( err ) {
return err ;
}
//map staging buffer (It's CPU and coherent)
2020-04-01 23:20:12 +00:00
void * data_ptr = nullptr ;
2019-06-07 16:07:57 +00:00
{
VkResult vkerr = vmaMapMemory ( allocator , staging_buffer_blocks [ staging_buffer_current ] . allocation , & data_ptr ) ;
2020-03-08 08:26:22 +00:00
ERR_FAIL_COND_V_MSG ( vkerr , ERR_CANT_CREATE , " vmaMapMemory failed with error " + itos ( vkerr ) + " . " ) ;
2019-06-07 16:07:57 +00:00
}
//copy to staging buffer
2021-04-27 14:19:21 +00:00
memcpy ( ( ( uint8_t * ) data_ptr ) + block_write_offset , p_data + submit_from , block_write_amount ) ;
2019-06-07 16:07:57 +00:00
//unmap
vmaUnmapMemory ( allocator , staging_buffer_blocks [ staging_buffer_current ] . allocation ) ;
//insert a command to copy this
VkBufferCopy region ;
region . srcOffset = block_write_offset ;
region . dstOffset = submit_from + p_offset ;
region . size = block_write_amount ;
vkCmdCopyBuffer ( p_use_draw_command_buffer ? frames [ frame ] . draw_command_buffer : frames [ frame ] . setup_command_buffer , staging_buffer_blocks [ staging_buffer_current ] . buffer , p_buffer - > buffer , 1 , & region ) ;
staging_buffer_blocks . write [ staging_buffer_current ] . fill_amount = block_write_offset + block_write_amount ;
to_submit - = block_write_amount ;
submit_from + = block_write_amount ;
}
return OK ;
}
2019-06-24 19:13:06 +00:00
void RenderingDeviceVulkan : : _memory_barrier ( VkPipelineStageFlags p_src_stage_mask , VkPipelineStageFlags p_dst_stage_mask , VkAccessFlags p_src_access , VkAccessFlags p_dst_sccess , bool p_sync_with_draw ) {
VkMemoryBarrier mem_barrier ;
mem_barrier . sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER ;
2020-04-01 23:20:12 +00:00
mem_barrier . pNext = nullptr ;
2019-06-24 19:13:06 +00:00
mem_barrier . srcAccessMask = p_src_access ;
mem_barrier . dstAccessMask = p_dst_sccess ;
2021-02-02 19:51:36 +00:00
if ( p_src_stage_mask = = 0 | | p_dst_stage_mask = = 0 ) {
return ; //no barrier, since this is invalid
}
2020-04-01 23:20:12 +00:00
vkCmdPipelineBarrier ( p_sync_with_draw ? frames [ frame ] . draw_command_buffer : frames [ frame ] . setup_command_buffer , p_src_stage_mask , p_dst_stage_mask , 0 , 1 , & mem_barrier , 0 , nullptr , 0 , nullptr ) ;
2019-06-24 19:13:06 +00:00
}
2019-09-07 01:51:27 +00:00
2019-10-31 22:54:21 +00:00
void RenderingDeviceVulkan : : _full_barrier ( bool p_sync_with_draw ) {
//used for debug
_memory_barrier ( VK_PIPELINE_STAGE_ALL_COMMANDS_BIT , VK_PIPELINE_STAGE_ALL_COMMANDS_BIT ,
VK_ACCESS_INDIRECT_COMMAND_READ_BIT |
VK_ACCESS_INDEX_READ_BIT |
VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT |
VK_ACCESS_UNIFORM_READ_BIT |
VK_ACCESS_INPUT_ATTACHMENT_READ_BIT |
VK_ACCESS_SHADER_READ_BIT |
VK_ACCESS_SHADER_WRITE_BIT |
VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT |
VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
VK_ACCESS_TRANSFER_READ_BIT |
VK_ACCESS_TRANSFER_WRITE_BIT |
VK_ACCESS_HOST_READ_BIT |
VK_ACCESS_HOST_WRITE_BIT ,
VK_ACCESS_INDIRECT_COMMAND_READ_BIT |
VK_ACCESS_INDEX_READ_BIT |
VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT |
VK_ACCESS_UNIFORM_READ_BIT |
VK_ACCESS_INPUT_ATTACHMENT_READ_BIT |
VK_ACCESS_SHADER_READ_BIT |
VK_ACCESS_SHADER_WRITE_BIT |
VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT |
VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
VK_ACCESS_TRANSFER_READ_BIT |
VK_ACCESS_TRANSFER_WRITE_BIT |
VK_ACCESS_HOST_READ_BIT |
VK_ACCESS_HOST_WRITE_BIT ,
p_sync_with_draw ) ;
}
2019-09-07 01:51:27 +00:00
void RenderingDeviceVulkan : : _buffer_memory_barrier ( VkBuffer buffer , uint64_t p_from , uint64_t p_size , VkPipelineStageFlags p_src_stage_mask , VkPipelineStageFlags p_dst_stage_mask , VkAccessFlags p_src_access , VkAccessFlags p_dst_sccess , bool p_sync_with_draw ) {
VkBufferMemoryBarrier buffer_mem_barrier ;
buffer_mem_barrier . sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER ;
2020-04-01 23:20:12 +00:00
buffer_mem_barrier . pNext = nullptr ;
2019-09-07 01:51:27 +00:00
buffer_mem_barrier . srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
buffer_mem_barrier . dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
buffer_mem_barrier . srcAccessMask = p_src_access ;
buffer_mem_barrier . dstAccessMask = p_dst_sccess ;
buffer_mem_barrier . buffer = buffer ;
buffer_mem_barrier . offset = p_from ;
buffer_mem_barrier . size = p_size ;
2020-04-01 23:20:12 +00:00
vkCmdPipelineBarrier ( p_sync_with_draw ? frames [ frame ] . draw_command_buffer : frames [ frame ] . setup_command_buffer , p_src_stage_mask , p_dst_stage_mask , 0 , 0 , nullptr , 1 , & buffer_mem_barrier , 0 , nullptr ) ;
2019-09-07 01:51:27 +00:00
}
2019-06-07 16:07:57 +00:00
/*****************/
/**** TEXTURE ****/
/*****************/
2020-03-17 06:33:00 +00:00
RID RenderingDeviceVulkan : : texture_create ( const TextureFormat & p_format , const TextureView & p_view , const Vector < Vector < uint8_t > > & p_data ) {
2019-06-07 16:07:57 +00:00
_THREAD_SAFE_METHOD_
VkImageCreateInfo image_create_info ;
image_create_info . sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO ;
2020-04-01 23:20:12 +00:00
image_create_info . pNext = nullptr ;
2019-06-07 16:07:57 +00:00
image_create_info . flags = 0 ;
2020-03-27 16:30:18 +00:00
# ifndef _MSC_VER
# warning TODO check for support via RenderingDevice to enable on mobile when possible
# endif
2020-04-11 17:43:12 +00:00
2020-03-05 18:00:28 +00:00
# ifndef ANDROID_ENABLED
2020-04-11 17:43:12 +00:00
// vkCreateImage fails with format list on Android (VK_ERROR_OUT_OF_HOST_MEMORY)
VkImageFormatListCreateInfoKHR format_list_create_info ; //keep out of the if, needed for creation
Vector < VkFormat > allowed_formats ; //keep out of the if, needed for creation
# endif
2019-06-19 20:03:19 +00:00
if ( p_format . shareable_formats . size ( ) ) {
image_create_info . flags | = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT ;
2020-03-05 18:00:28 +00:00
2020-04-11 17:43:12 +00:00
# ifndef ANDROID_ENABLED
2019-06-19 20:03:19 +00:00
for ( int i = 0 ; i < p_format . shareable_formats . size ( ) ; i + + ) {
allowed_formats . push_back ( vulkan_formats [ p_format . shareable_formats [ i ] ] ) ;
}
format_list_create_info . sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR ;
2020-04-01 23:20:12 +00:00
format_list_create_info . pNext = nullptr ;
2019-06-19 20:03:19 +00:00
format_list_create_info . viewFormatCount = allowed_formats . size ( ) ;
format_list_create_info . pViewFormats = allowed_formats . ptr ( ) ;
image_create_info . pNext = & format_list_create_info ;
ERR_FAIL_COND_V_MSG ( p_format . shareable_formats . find ( p_format . format ) = = - 1 , RID ( ) ,
" If supplied a list of shareable formats, the current format must be present in the list " ) ;
ERR_FAIL_COND_V_MSG ( p_view . format_override ! = DATA_FORMAT_MAX & & p_format . shareable_formats . find ( p_view . format_override ) = = - 1 , RID ( ) ,
" If supplied a list of shareable formats, the current view format override must be present in the list " ) ;
2020-03-05 18:00:28 +00:00
# endif
2020-04-11 17:43:12 +00:00
}
2020-11-27 03:50:05 +00:00
if ( p_format . texture_type = = TEXTURE_TYPE_CUBE | | p_format . texture_type = = TEXTURE_TYPE_CUBE_ARRAY ) {
2019-06-07 16:07:57 +00:00
image_create_info . flags | = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT ;
}
/*if (p_format.type == TEXTURE_TYPE_2D || p_format.type == TEXTURE_TYPE_2D_ARRAY) {
image_create_info . flags | = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT ;
} */
2020-11-27 03:50:05 +00:00
ERR_FAIL_INDEX_V ( p_format . texture_type , TEXTURE_TYPE_MAX , RID ( ) ) ;
2019-06-07 16:07:57 +00:00
2020-11-27 03:50:05 +00:00
image_create_info . imageType = vulkan_image_type [ p_format . texture_type ] ;
2019-06-07 16:07:57 +00:00
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V_MSG ( p_format . width < 1 , RID ( ) , " Width must be equal or greater than 1 for all textures " ) ;
2019-06-07 16:07:57 +00:00
image_create_info . format = vulkan_formats [ p_format . format ] ;
image_create_info . extent . width = p_format . width ;
if ( image_create_info . imageType = = VK_IMAGE_TYPE_3D | | image_create_info . imageType = = VK_IMAGE_TYPE_2D ) {
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V_MSG ( p_format . height < 1 , RID ( ) , " Height must be equal or greater than 1 for 2D and 3D textures " ) ;
2019-06-07 16:07:57 +00:00
image_create_info . extent . height = p_format . height ;
} else {
image_create_info . extent . height = 1 ;
}
if ( image_create_info . imageType = = VK_IMAGE_TYPE_3D ) {
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V_MSG ( p_format . depth < 1 , RID ( ) , " Depth must be equal or greater than 1 for 3D textures " ) ;
2019-06-07 16:07:57 +00:00
image_create_info . extent . depth = p_format . depth ;
} else {
image_create_info . extent . depth = 1 ;
}
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V ( p_format . mipmaps < 1 , RID ( ) ) ;
2019-06-07 16:07:57 +00:00
image_create_info . mipLevels = p_format . mipmaps ;
2020-11-27 03:50:05 +00:00
if ( p_format . texture_type = = TEXTURE_TYPE_1D_ARRAY | | p_format . texture_type = = TEXTURE_TYPE_2D_ARRAY | | p_format . texture_type = = TEXTURE_TYPE_CUBE_ARRAY | | p_format . texture_type = = TEXTURE_TYPE_CUBE ) {
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V_MSG ( p_format . array_layers < 1 , RID ( ) ,
2019-06-07 16:07:57 +00:00
" Amount of layers must be equal or greater than 1 for arrays and cubemaps. " ) ;
2020-11-27 03:50:05 +00:00
ERR_FAIL_COND_V_MSG ( ( p_format . texture_type = = TEXTURE_TYPE_CUBE_ARRAY | | p_format . texture_type = = TEXTURE_TYPE_CUBE ) & & ( p_format . array_layers % 6 ) ! = 0 , RID ( ) ,
2019-06-10 17:12:24 +00:00
" Cubemap and cubemap array textures must provide a layer number that is multiple of 6 " ) ;
2019-06-07 16:07:57 +00:00
image_create_info . arrayLayers = p_format . array_layers ;
} else {
image_create_info . arrayLayers = 1 ;
}
2019-06-10 17:12:24 +00:00
ERR_FAIL_INDEX_V ( p_format . samples , TEXTURE_SAMPLES_MAX , RID ( ) ) ;
2019-06-07 16:07:57 +00:00
image_create_info . samples = rasterization_sample_count [ p_format . samples ] ;
image_create_info . tiling = ( p_format . usage_bits & TEXTURE_USAGE_CPU_READ_BIT ) ? VK_IMAGE_TILING_LINEAR : VK_IMAGE_TILING_OPTIMAL ;
//usage
image_create_info . usage = 0 ;
if ( p_format . usage_bits & TEXTURE_USAGE_SAMPLING_BIT ) {
image_create_info . usage | = VK_IMAGE_USAGE_SAMPLED_BIT ;
}
2019-09-25 19:44:44 +00:00
if ( p_format . usage_bits & TEXTURE_USAGE_STORAGE_BIT ) {
image_create_info . usage | = VK_IMAGE_USAGE_STORAGE_BIT ;
}
2019-06-07 16:07:57 +00:00
if ( p_format . usage_bits & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT ) {
image_create_info . usage | = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT ;
}
if ( p_format . usage_bits & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT ) {
image_create_info . usage | = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT ;
}
if ( p_format . usage_bits & TEXTURE_USAGE_CAN_UPDATE_BIT ) {
image_create_info . usage | = VK_IMAGE_USAGE_TRANSFER_DST_BIT ;
}
2019-07-27 13:23:24 +00:00
if ( p_format . usage_bits & TEXTURE_USAGE_CAN_COPY_FROM_BIT ) {
2019-06-25 22:49:52 +00:00
image_create_info . usage | = VK_IMAGE_USAGE_TRANSFER_SRC_BIT ;
}
2019-06-07 16:07:57 +00:00
2019-07-27 13:23:24 +00:00
if ( p_format . usage_bits & TEXTURE_USAGE_CAN_COPY_TO_BIT ) {
image_create_info . usage | = VK_IMAGE_USAGE_TRANSFER_DST_BIT ;
}
2019-06-07 16:07:57 +00:00
image_create_info . sharingMode = VK_SHARING_MODE_EXCLUSIVE ;
image_create_info . queueFamilyIndexCount = 0 ;
2020-04-01 23:20:12 +00:00
image_create_info . pQueueFamilyIndices = nullptr ;
2019-06-07 16:07:57 +00:00
image_create_info . initialLayout = VK_IMAGE_LAYOUT_UNDEFINED ;
uint32_t required_mipmaps = get_image_required_mipmaps ( image_create_info . extent . width , image_create_info . extent . height , image_create_info . extent . depth ) ;
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V_MSG ( required_mipmaps < image_create_info . mipLevels , RID ( ) ,
2019-06-07 16:07:57 +00:00
" Too many mipmaps requested for texture format and dimensions ( " + itos ( image_create_info . mipLevels ) + " ), maximum allowed: ( " + itos ( required_mipmaps ) + " ). " ) ;
if ( p_data . size ( ) ) {
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V_MSG ( ! ( p_format . usage_bits & TEXTURE_USAGE_CAN_UPDATE_BIT ) , RID ( ) ,
2019-06-07 16:07:57 +00:00
" Texture needs the TEXTURE_USAGE_CAN_UPDATE_BIT usage flag in order to be updated at initialization or later " ) ;
2019-08-26 20:43:58 +00:00
int expected_images = image_create_info . arrayLayers ;
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V_MSG ( p_data . size ( ) ! = expected_images , RID ( ) ,
2019-06-07 16:07:57 +00:00
" Default supplied data for image format is of invalid length ( " + itos ( p_data . size ( ) ) + " ), should be ( " + itos ( expected_images ) + " ). " ) ;
2019-08-26 20:43:58 +00:00
for ( uint32_t i = 0 ; i < image_create_info . arrayLayers ; i + + ) {
2019-06-11 20:21:39 +00:00
uint32_t required_size = get_image_format_required_size ( p_format . format , image_create_info . extent . width , image_create_info . extent . height , image_create_info . extent . depth , image_create_info . mipLevels ) ;
ERR_FAIL_COND_V_MSG ( ( uint32_t ) p_data [ i ] . size ( ) ! = required_size , RID ( ) ,
" Data for slice index " + itos ( i ) + " (mapped to layer " + itos ( i ) + " ) differs in size (supplied: " + itos ( p_data [ i ] . size ( ) ) + " ) than what is required by the format ( " + itos ( required_size ) + " ). " ) ;
2019-06-07 16:07:57 +00:00
}
}
{
//validate that this image is supported for the intended use
VkFormatProperties properties ;
vkGetPhysicalDeviceFormatProperties ( context - > get_physical_device ( ) , image_create_info . format , & properties ) ;
VkFormatFeatureFlags flags ;
String format_text = " ' " + String ( named_formats [ p_format . format ] ) + " ' " ;
if ( p_format . usage_bits & TEXTURE_USAGE_CPU_READ_BIT ) {
flags = properties . linearTilingFeatures ;
format_text + = " (with CPU read bit) " ;
} else {
flags = properties . optimalTilingFeatures ;
}
if ( p_format . usage_bits & TEXTURE_USAGE_SAMPLING_BIT & & ! ( flags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT ) ) {
2019-06-10 17:12:24 +00:00
ERR_FAIL_V_MSG ( RID ( ) , " Format " + format_text + " does not support usage as sampling texture. " ) ;
2019-06-07 16:07:57 +00:00
}
if ( p_format . usage_bits & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT & & ! ( flags & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT ) ) {
2019-06-10 17:12:24 +00:00
ERR_FAIL_V_MSG ( RID ( ) , " Format " + format_text + " does not support usage as color attachment. " ) ;
2019-06-07 16:07:57 +00:00
}
if ( p_format . usage_bits & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT & & ! ( flags & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT ) ) {
2019-07-05 01:54:32 +00:00
printf ( " vkformat: %x \n " , image_create_info . format ) ;
2019-06-10 17:12:24 +00:00
ERR_FAIL_V_MSG ( RID ( ) , " Format " + format_text + " does not support usage as depth-stencil attachment. " ) ;
2019-06-07 16:07:57 +00:00
}
if ( p_format . usage_bits & TEXTURE_USAGE_STORAGE_BIT & & ! ( flags & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT ) ) {
2019-06-10 17:12:24 +00:00
ERR_FAIL_V_MSG ( RID ( ) , " Format " + format_text + " does not support usage as storage image. " ) ;
2019-06-07 16:07:57 +00:00
}
if ( p_format . usage_bits & TEXTURE_USAGE_STORAGE_ATOMIC_BIT & & ! ( flags & VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT ) ) {
2019-06-10 17:12:24 +00:00
ERR_FAIL_V_MSG ( RID ( ) , " Format " + format_text + " does not support usage as atomic storage image. " ) ;
2019-06-07 16:07:57 +00:00
}
}
//some view validation
if ( p_view . format_override ! = DATA_FORMAT_MAX ) {
2019-06-10 17:12:24 +00:00
ERR_FAIL_INDEX_V ( p_view . format_override , DATA_FORMAT_MAX , RID ( ) ) ;
2019-06-07 16:07:57 +00:00
}
2019-06-10 17:12:24 +00:00
ERR_FAIL_INDEX_V ( p_view . swizzle_r , TEXTURE_SWIZZLE_MAX , RID ( ) ) ;
ERR_FAIL_INDEX_V ( p_view . swizzle_g , TEXTURE_SWIZZLE_MAX , RID ( ) ) ;
ERR_FAIL_INDEX_V ( p_view . swizzle_b , TEXTURE_SWIZZLE_MAX , RID ( ) ) ;
ERR_FAIL_INDEX_V ( p_view . swizzle_a , TEXTURE_SWIZZLE_MAX , RID ( ) ) ;
2019-06-07 16:07:57 +00:00
//allocate memory
VmaAllocationCreateInfo allocInfo ;
allocInfo . flags = 0 ;
2019-06-25 22:49:52 +00:00
allocInfo . usage = p_format . usage_bits & TEXTURE_USAGE_CPU_READ_BIT ? VMA_MEMORY_USAGE_CPU_ONLY : VMA_MEMORY_USAGE_GPU_ONLY ;
2019-06-07 16:07:57 +00:00
allocInfo . requiredFlags = 0 ;
allocInfo . preferredFlags = 0 ;
allocInfo . memoryTypeBits = 0 ;
2020-04-01 23:20:12 +00:00
allocInfo . pool = nullptr ;
allocInfo . pUserData = nullptr ;
2019-06-07 16:07:57 +00:00
Texture texture ;
VkResult err = vmaCreateImage ( allocator , & image_create_info , & allocInfo , & texture . image , & texture . allocation , & texture . allocation_info ) ;
2020-03-08 08:26:22 +00:00
ERR_FAIL_COND_V_MSG ( err , RID ( ) , " vmaCreateImage failed with error " + itos ( err ) + " . " ) ;
2021-07-02 23:14:19 +00:00
image_memory + = texture . allocation_info . size ;
2020-11-27 03:50:05 +00:00
texture . type = p_format . texture_type ;
2019-06-07 16:07:57 +00:00
texture . format = p_format . format ;
texture . width = image_create_info . extent . width ;
texture . height = image_create_info . extent . height ;
texture . depth = image_create_info . extent . depth ;
texture . layers = image_create_info . arrayLayers ;
texture . mipmaps = image_create_info . mipLevels ;
2020-04-11 17:43:12 +00:00
texture . base_mipmap = 0 ;
texture . base_layer = 0 ;
2019-06-07 16:07:57 +00:00
texture . usage_flags = p_format . usage_bits ;
texture . samples = p_format . samples ;
2019-06-19 20:03:19 +00:00
texture . allowed_shared_formats = p_format . shareable_formats ;
2019-06-07 16:07:57 +00:00
2019-09-25 19:44:44 +00:00
//set base layout based on usage priority
2019-08-20 20:54:03 +00:00
2019-09-25 19:44:44 +00:00
if ( p_format . usage_bits & TEXTURE_USAGE_SAMPLING_BIT ) {
//first priority, readable
texture . layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL ;
2019-06-07 18:20:01 +00:00
2019-09-25 19:44:44 +00:00
} else if ( p_format . usage_bits & TEXTURE_USAGE_STORAGE_BIT ) {
//second priority, storage
texture . layout = VK_IMAGE_LAYOUT_GENERAL ;
2019-06-07 18:20:01 +00:00
} else if ( p_format . usage_bits & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT ) {
2019-09-25 19:44:44 +00:00
//third priority, color or depth
2019-06-07 18:20:01 +00:00
2019-09-25 19:44:44 +00:00
texture . layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL ;
2019-08-20 20:54:03 +00:00
2019-09-25 19:44:44 +00:00
} else if ( p_format . usage_bits & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT ) {
texture . layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL ;
} else {
texture . layout = VK_IMAGE_LAYOUT_GENERAL ;
}
if ( p_format . usage_bits & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT ) {
texture . read_aspect_mask = VK_IMAGE_ASPECT_DEPTH_BIT ;
texture . barrier_aspect_mask = VK_IMAGE_ASPECT_DEPTH_BIT ;
if ( format_has_stencil ( p_format . format ) ) {
texture . barrier_aspect_mask | = VK_IMAGE_ASPECT_STENCIL_BIT ;
2019-06-07 18:20:01 +00:00
}
} else {
2019-08-20 20:54:03 +00:00
texture . read_aspect_mask = VK_IMAGE_ASPECT_COLOR_BIT ;
2019-09-25 19:44:44 +00:00
texture . barrier_aspect_mask = VK_IMAGE_ASPECT_COLOR_BIT ;
2019-06-07 16:07:57 +00:00
}
texture . bound = false ;
//create view
VkImageViewCreateInfo image_view_create_info ;
image_view_create_info . sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO ;
2020-04-01 23:20:12 +00:00
image_view_create_info . pNext = nullptr ;
2019-06-07 16:07:57 +00:00
image_view_create_info . flags = 0 ;
image_view_create_info . image = texture . image ;
static const VkImageViewType view_types [ TEXTURE_TYPE_MAX ] = {
VK_IMAGE_VIEW_TYPE_1D ,
VK_IMAGE_VIEW_TYPE_2D ,
VK_IMAGE_VIEW_TYPE_3D ,
VK_IMAGE_VIEW_TYPE_CUBE ,
VK_IMAGE_VIEW_TYPE_1D_ARRAY ,
VK_IMAGE_VIEW_TYPE_2D_ARRAY ,
VK_IMAGE_VIEW_TYPE_CUBE_ARRAY ,
} ;
2020-11-27 03:50:05 +00:00
image_view_create_info . viewType = view_types [ p_format . texture_type ] ;
2019-06-07 16:07:57 +00:00
if ( p_view . format_override = = DATA_FORMAT_MAX ) {
image_view_create_info . format = image_create_info . format ;
} else {
image_view_create_info . format = vulkan_formats [ p_view . format_override ] ;
}
static const VkComponentSwizzle component_swizzles [ TEXTURE_SWIZZLE_MAX ] = {
VK_COMPONENT_SWIZZLE_IDENTITY ,
VK_COMPONENT_SWIZZLE_ZERO ,
VK_COMPONENT_SWIZZLE_ONE ,
VK_COMPONENT_SWIZZLE_R ,
VK_COMPONENT_SWIZZLE_G ,
VK_COMPONENT_SWIZZLE_B ,
VK_COMPONENT_SWIZZLE_A
} ;
image_view_create_info . components . r = component_swizzles [ p_view . swizzle_r ] ;
image_view_create_info . components . g = component_swizzles [ p_view . swizzle_g ] ;
image_view_create_info . components . b = component_swizzles [ p_view . swizzle_b ] ;
image_view_create_info . components . a = component_swizzles [ p_view . swizzle_a ] ;
image_view_create_info . subresourceRange . baseMipLevel = 0 ;
image_view_create_info . subresourceRange . levelCount = image_create_info . mipLevels ;
image_view_create_info . subresourceRange . baseArrayLayer = 0 ;
2019-08-26 20:43:58 +00:00
image_view_create_info . subresourceRange . layerCount = image_create_info . arrayLayers ;
2019-06-07 16:07:57 +00:00
if ( p_format . usage_bits & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT ) {
image_view_create_info . subresourceRange . aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT ;
} else {
image_view_create_info . subresourceRange . aspectMask = VK_IMAGE_ASPECT_COLOR_BIT ;
}
2020-04-01 23:20:12 +00:00
err = vkCreateImageView ( device , & image_view_create_info , nullptr , & texture . view ) ;
2019-06-07 16:07:57 +00:00
if ( err ) {
vmaDestroyImage ( allocator , texture . image , texture . allocation ) ;
2020-03-08 08:26:22 +00:00
ERR_FAIL_V_MSG ( RID ( ) , " vkCreateImageView failed with error " + itos ( err ) + " . " ) ;
2019-06-07 16:07:57 +00:00
}
//barrier to set layout
{
VkImageMemoryBarrier image_memory_barrier ;
image_memory_barrier . sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER ;
2020-04-01 23:20:12 +00:00
image_memory_barrier . pNext = nullptr ;
2019-06-07 16:07:57 +00:00
image_memory_barrier . srcAccessMask = 0 ;
image_memory_barrier . dstAccessMask = VK_ACCESS_SHADER_READ_BIT ;
image_memory_barrier . oldLayout = VK_IMAGE_LAYOUT_UNDEFINED ;
2019-09-25 19:44:44 +00:00
image_memory_barrier . newLayout = texture . layout ;
2019-06-07 16:07:57 +00:00
image_memory_barrier . srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
image_memory_barrier . dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
image_memory_barrier . image = texture . image ;
2019-08-20 20:54:03 +00:00
image_memory_barrier . subresourceRange . aspectMask = texture . barrier_aspect_mask ;
2019-06-07 16:07:57 +00:00
image_memory_barrier . subresourceRange . baseMipLevel = 0 ;
image_memory_barrier . subresourceRange . levelCount = image_create_info . mipLevels ;
image_memory_barrier . subresourceRange . baseArrayLayer = 0 ;
2019-08-26 20:43:58 +00:00
image_memory_barrier . subresourceRange . layerCount = image_create_info . arrayLayers ;
2019-06-07 16:07:57 +00:00
2020-10-19 20:32:19 +00:00
vkCmdPipelineBarrier ( frames [ frame ] . setup_command_buffer , VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT , VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT , 0 , 0 , nullptr , 0 , nullptr , 1 , & image_memory_barrier ) ;
2019-06-07 16:07:57 +00:00
}
2019-06-10 17:12:24 +00:00
RID id = texture_owner . make_rid ( texture ) ;
2019-06-07 16:07:57 +00:00
if ( p_data . size ( ) ) {
for ( uint32_t i = 0 ; i < image_create_info . arrayLayers ; i + + ) {
2021-07-07 21:06:06 +00:00
_texture_update ( id , i , p_data [ i ] , RD : : BARRIER_MASK_ALL , true ) ;
2019-06-07 16:07:57 +00:00
}
}
return id ;
}
2019-06-10 17:12:24 +00:00
RID RenderingDeviceVulkan : : texture_create_shared ( const TextureView & p_view , RID p_with_texture ) {
2019-10-05 13:27:43 +00:00
_THREAD_SAFE_METHOD_
2019-06-07 16:07:57 +00:00
Texture * src_texture = texture_owner . getornull ( p_with_texture ) ;
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V ( ! src_texture , RID ( ) ) ;
2019-06-07 16:07:57 +00:00
2019-06-10 17:12:24 +00:00
if ( src_texture - > owner . is_valid ( ) ) { //ahh this is a share
2019-06-07 16:07:57 +00:00
p_with_texture = src_texture - > owner ;
src_texture = texture_owner . getornull ( src_texture - > owner ) ;
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V ( ! src_texture , RID ( ) ) ; //this is a bug
2019-06-07 16:07:57 +00:00
}
//create view
Texture texture = * src_texture ;
VkImageViewCreateInfo image_view_create_info ;
image_view_create_info . sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO ;
2020-04-01 23:20:12 +00:00
image_view_create_info . pNext = nullptr ;
2019-06-07 16:07:57 +00:00
image_view_create_info . flags = 0 ;
image_view_create_info . image = texture . image ;
static const VkImageViewType view_types [ TEXTURE_TYPE_MAX ] = {
VK_IMAGE_VIEW_TYPE_1D ,
VK_IMAGE_VIEW_TYPE_2D ,
VK_IMAGE_VIEW_TYPE_3D ,
VK_IMAGE_VIEW_TYPE_CUBE ,
VK_IMAGE_VIEW_TYPE_1D_ARRAY ,
VK_IMAGE_VIEW_TYPE_2D_ARRAY ,
VK_IMAGE_VIEW_TYPE_CUBE_ARRAY ,
} ;
image_view_create_info . viewType = view_types [ texture . type ] ;
2019-06-19 20:03:19 +00:00
if ( p_view . format_override = = DATA_FORMAT_MAX | | p_view . format_override = = texture . format ) {
2019-06-07 16:07:57 +00:00
image_view_create_info . format = vulkan_formats [ texture . format ] ;
} else {
2019-06-10 17:12:24 +00:00
ERR_FAIL_INDEX_V ( p_view . format_override , DATA_FORMAT_MAX , RID ( ) ) ;
2019-06-19 20:03:19 +00:00
ERR_FAIL_COND_V_MSG ( texture . allowed_shared_formats . find ( p_view . format_override ) = = - 1 , RID ( ) ,
" Format override is not in the list of allowed shareable formats for original texture. " ) ;
2019-06-07 16:07:57 +00:00
image_view_create_info . format = vulkan_formats [ p_view . format_override ] ;
}
static const VkComponentSwizzle component_swizzles [ TEXTURE_SWIZZLE_MAX ] = {
VK_COMPONENT_SWIZZLE_IDENTITY ,
VK_COMPONENT_SWIZZLE_ZERO ,
VK_COMPONENT_SWIZZLE_ONE ,
VK_COMPONENT_SWIZZLE_R ,
VK_COMPONENT_SWIZZLE_G ,
VK_COMPONENT_SWIZZLE_B ,
VK_COMPONENT_SWIZZLE_A
} ;
image_view_create_info . components . r = component_swizzles [ p_view . swizzle_r ] ;
image_view_create_info . components . g = component_swizzles [ p_view . swizzle_g ] ;
image_view_create_info . components . b = component_swizzles [ p_view . swizzle_b ] ;
image_view_create_info . components . a = component_swizzles [ p_view . swizzle_a ] ;
image_view_create_info . subresourceRange . baseMipLevel = 0 ;
image_view_create_info . subresourceRange . levelCount = texture . mipmaps ;
2019-08-26 20:43:58 +00:00
image_view_create_info . subresourceRange . layerCount = texture . layers ;
2019-06-07 16:07:57 +00:00
image_view_create_info . subresourceRange . baseArrayLayer = 0 ;
if ( texture . usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT ) {
image_view_create_info . subresourceRange . aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT ;
} else {
image_view_create_info . subresourceRange . aspectMask = VK_IMAGE_ASPECT_COLOR_BIT ;
}
2021-01-26 00:52:58 +00:00
VkImageViewUsageCreateInfo usage_info ;
usage_info . sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO ;
usage_info . pNext = nullptr ;
if ( p_view . format_override ! = DATA_FORMAT_MAX ) {
//need to validate usage with vulkan
usage_info . usage = 0 ;
if ( texture . usage_flags & TEXTURE_USAGE_SAMPLING_BIT ) {
usage_info . usage | = VK_IMAGE_USAGE_SAMPLED_BIT ;
}
if ( texture . usage_flags & TEXTURE_USAGE_STORAGE_BIT ) {
if ( texture_is_format_supported_for_usage ( p_view . format_override , TEXTURE_USAGE_STORAGE_BIT ) ) {
usage_info . usage | = VK_IMAGE_USAGE_STORAGE_BIT ;
}
}
if ( texture . usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT ) {
if ( texture_is_format_supported_for_usage ( p_view . format_override , TEXTURE_USAGE_COLOR_ATTACHMENT_BIT ) ) {
usage_info . usage | = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT ;
}
}
if ( texture . usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT ) {
usage_info . usage | = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT ;
}
if ( texture . usage_flags & TEXTURE_USAGE_CAN_UPDATE_BIT ) {
usage_info . usage | = VK_IMAGE_USAGE_TRANSFER_DST_BIT ;
}
if ( texture . usage_flags & TEXTURE_USAGE_CAN_COPY_FROM_BIT ) {
usage_info . usage | = VK_IMAGE_USAGE_TRANSFER_SRC_BIT ;
}
if ( texture . usage_flags & TEXTURE_USAGE_CAN_COPY_TO_BIT ) {
usage_info . usage | = VK_IMAGE_USAGE_TRANSFER_DST_BIT ;
}
image_view_create_info . pNext = & usage_info ;
}
2020-04-01 23:20:12 +00:00
VkResult err = vkCreateImageView ( device , & image_view_create_info , nullptr , & texture . view ) ;
2020-03-08 08:26:22 +00:00
ERR_FAIL_COND_V_MSG ( err , RID ( ) , " vkCreateImageView failed with error " + itos ( err ) + " . " ) ;
2019-06-07 16:07:57 +00:00
texture . owner = p_with_texture ;
2019-06-10 17:12:24 +00:00
RID id = texture_owner . make_rid ( texture ) ;
2019-06-07 16:07:57 +00:00
_add_dependency ( id , p_with_texture ) ;
return id ;
}
2019-08-26 20:43:58 +00:00
RID RenderingDeviceVulkan : : texture_create_shared_from_slice ( const TextureView & p_view , RID p_with_texture , uint32_t p_layer , uint32_t p_mipmap , TextureSliceType p_slice_type ) {
2019-10-05 13:27:43 +00:00
_THREAD_SAFE_METHOD_
2019-07-10 20:44:55 +00:00
Texture * src_texture = texture_owner . getornull ( p_with_texture ) ;
ERR_FAIL_COND_V ( ! src_texture , RID ( ) ) ;
if ( src_texture - > owner . is_valid ( ) ) { //ahh this is a share
p_with_texture = src_texture - > owner ;
src_texture = texture_owner . getornull ( src_texture - > owner ) ;
ERR_FAIL_COND_V ( ! src_texture , RID ( ) ) ; //this is a bug
}
2019-08-26 20:43:58 +00:00
ERR_FAIL_COND_V_MSG ( p_slice_type = = TEXTURE_SLICE_CUBEMAP & & ( src_texture - > type ! = TEXTURE_TYPE_CUBE & & src_texture - > type ! = TEXTURE_TYPE_CUBE_ARRAY ) , RID ( ) ,
" Can only create a cubemap slice from a cubemap or cubemap array mipmap " ) ;
2019-10-03 20:39:08 +00:00
ERR_FAIL_COND_V_MSG ( p_slice_type = = TEXTURE_SLICE_3D & & src_texture - > type ! = TEXTURE_TYPE_3D , RID ( ) ,
" Can only create a 3D slice from a 3D texture " ) ;
2020-12-08 05:37:09 +00:00
ERR_FAIL_COND_V_MSG ( p_slice_type = = TEXTURE_SLICE_2D_ARRAY & & ( src_texture - > type ! = TEXTURE_TYPE_2D_ARRAY ) , RID ( ) ,
" Can only create an array slice from a 2D array mipmap " ) ;
2019-07-10 20:44:55 +00:00
//create view
2020-02-13 14:53:32 +00:00
ERR_FAIL_UNSIGNED_INDEX_V ( p_mipmap , src_texture - > mipmaps , RID ( ) ) ;
ERR_FAIL_UNSIGNED_INDEX_V ( p_layer , src_texture - > layers , RID ( ) ) ;
2019-07-27 13:23:24 +00:00
2020-12-08 05:37:09 +00:00
int slice_layers = 1 ;
if ( p_slice_type = = TEXTURE_SLICE_2D_ARRAY ) {
ERR_FAIL_COND_V_MSG ( p_layer ! = 0 , RID ( ) , " layer must be 0 when obtaining a 2D array mipmap slice " ) ;
slice_layers = src_texture - > layers ;
} else if ( p_slice_type = = TEXTURE_SLICE_CUBEMAP ) {
slice_layers = 6 ;
}
2019-07-27 13:23:24 +00:00
Texture texture = * src_texture ;
get_image_format_required_size ( texture . format , texture . width , texture . height , texture . depth , p_mipmap + 1 , & texture . width , & texture . height ) ;
texture . mipmaps = 1 ;
2020-12-08 05:37:09 +00:00
texture . layers = slice_layers ;
2020-04-11 17:43:12 +00:00
texture . base_mipmap = p_mipmap ;
texture . base_layer = p_layer ;
2019-07-10 20:44:55 +00:00
VkImageViewCreateInfo image_view_create_info ;
image_view_create_info . sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO ;
2020-04-01 23:20:12 +00:00
image_view_create_info . pNext = nullptr ;
2019-07-10 20:44:55 +00:00
image_view_create_info . flags = 0 ;
image_view_create_info . image = texture . image ;
static const VkImageViewType view_types [ TEXTURE_TYPE_MAX ] = {
VK_IMAGE_VIEW_TYPE_1D ,
VK_IMAGE_VIEW_TYPE_2D ,
VK_IMAGE_VIEW_TYPE_2D ,
VK_IMAGE_VIEW_TYPE_2D ,
VK_IMAGE_VIEW_TYPE_1D ,
VK_IMAGE_VIEW_TYPE_2D ,
VK_IMAGE_VIEW_TYPE_2D ,
} ;
2020-12-31 18:33:38 +00:00
image_view_create_info . viewType = view_types [ texture . type ] ;
if ( p_slice_type = = TEXTURE_SLICE_CUBEMAP ) {
image_view_create_info . viewType = VK_IMAGE_VIEW_TYPE_CUBE ;
} else if ( p_slice_type = = TEXTURE_SLICE_3D ) {
image_view_create_info . viewType = VK_IMAGE_VIEW_TYPE_3D ;
} else if ( p_slice_type = = TEXTURE_SLICE_2D_ARRAY ) {
image_view_create_info . viewType = VK_IMAGE_VIEW_TYPE_2D_ARRAY ;
}
2019-07-10 20:44:55 +00:00
if ( p_view . format_override = = DATA_FORMAT_MAX | | p_view . format_override = = texture . format ) {
image_view_create_info . format = vulkan_formats [ texture . format ] ;
} else {
ERR_FAIL_INDEX_V ( p_view . format_override , DATA_FORMAT_MAX , RID ( ) ) ;
ERR_FAIL_COND_V_MSG ( texture . allowed_shared_formats . find ( p_view . format_override ) = = - 1 , RID ( ) ,
" Format override is not in the list of allowed shareable formats for original texture. " ) ;
image_view_create_info . format = vulkan_formats [ p_view . format_override ] ;
}
static const VkComponentSwizzle component_swizzles [ TEXTURE_SWIZZLE_MAX ] = {
VK_COMPONENT_SWIZZLE_IDENTITY ,
VK_COMPONENT_SWIZZLE_ZERO ,
VK_COMPONENT_SWIZZLE_ONE ,
VK_COMPONENT_SWIZZLE_R ,
VK_COMPONENT_SWIZZLE_G ,
VK_COMPONENT_SWIZZLE_B ,
VK_COMPONENT_SWIZZLE_A
} ;
image_view_create_info . components . r = component_swizzles [ p_view . swizzle_r ] ;
image_view_create_info . components . g = component_swizzles [ p_view . swizzle_g ] ;
image_view_create_info . components . b = component_swizzles [ p_view . swizzle_b ] ;
image_view_create_info . components . a = component_swizzles [ p_view . swizzle_a ] ;
2019-08-26 20:43:58 +00:00
if ( p_slice_type = = TEXTURE_SLICE_CUBEMAP ) {
ERR_FAIL_COND_V_MSG ( p_layer > = src_texture - > layers , RID ( ) ,
" Specified layer is invalid for cubemap " ) ;
ERR_FAIL_COND_V_MSG ( ( p_layer % 6 ) ! = 0 , RID ( ) ,
" Specified layer must be a multiple of 6. " ) ;
}
2019-07-10 20:44:55 +00:00
image_view_create_info . subresourceRange . baseMipLevel = p_mipmap ;
image_view_create_info . subresourceRange . levelCount = 1 ;
2020-12-08 05:37:09 +00:00
image_view_create_info . subresourceRange . layerCount = slice_layers ;
2019-07-10 20:44:55 +00:00
image_view_create_info . subresourceRange . baseArrayLayer = p_layer ;
if ( texture . usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT ) {
image_view_create_info . subresourceRange . aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT ;
} else {
image_view_create_info . subresourceRange . aspectMask = VK_IMAGE_ASPECT_COLOR_BIT ;
}
2020-04-01 23:20:12 +00:00
VkResult err = vkCreateImageView ( device , & image_view_create_info , nullptr , & texture . view ) ;
2020-03-08 08:26:22 +00:00
ERR_FAIL_COND_V_MSG ( err , RID ( ) , " vkCreateImageView failed with error " + itos ( err ) + " . " ) ;
2019-07-10 20:44:55 +00:00
texture . owner = p_with_texture ;
RID id = texture_owner . make_rid ( texture ) ;
_add_dependency ( id , p_with_texture ) ;
return id ;
}
2021-01-26 00:52:58 +00:00
Error RenderingDeviceVulkan : : texture_update ( RID p_texture , uint32_t p_layer , const Vector < uint8_t > & p_data , uint32_t p_post_barrier ) {
2021-07-07 21:06:06 +00:00
return _texture_update ( p_texture , p_layer , p_data , p_post_barrier , false ) ;
}
Error RenderingDeviceVulkan : : _texture_update ( RID p_texture , uint32_t p_layer , const Vector < uint8_t > & p_data , uint32_t p_post_barrier , bool p_use_setup_queue ) {
2019-06-07 16:07:57 +00:00
_THREAD_SAFE_METHOD_
2021-07-07 21:06:06 +00:00
ERR_FAIL_COND_V_MSG ( ( draw_list | | compute_list ) & & ! p_use_setup_queue , ERR_INVALID_PARAMETER ,
" Updating textures is forbidden during creation of a draw or compute list " ) ;
2019-06-25 01:24:07 +00:00
2019-06-07 16:07:57 +00:00
Texture * texture = texture_owner . getornull ( p_texture ) ;
ERR_FAIL_COND_V ( ! texture , ERR_INVALID_PARAMETER ) ;
2019-06-10 17:12:24 +00:00
if ( texture - > owner ! = RID ( ) ) {
2019-06-07 16:07:57 +00:00
p_texture = texture - > owner ;
texture = texture_owner . getornull ( texture - > owner ) ;
ERR_FAIL_COND_V ( ! texture , ERR_BUG ) ; //this is a bug
}
ERR_FAIL_COND_V_MSG ( texture - > bound , ERR_CANT_ACQUIRE_RESOURCE ,
" Texture can't be updated while a render pass that uses it is being created. Ensure render pass is finalized (and that it was created with RENDER_PASS_CONTENTS_FINISH) to unbind this texture. " ) ;
ERR_FAIL_COND_V_MSG ( ! ( texture - > usage_flags & TEXTURE_USAGE_CAN_UPDATE_BIT ) , ERR_INVALID_PARAMETER ,
" Texture requires the TEXTURE_USAGE_CAN_UPDATE_BIT in order to be updatable. " ) ;
uint32_t layer_count = texture - > layers ;
if ( texture - > type = = TEXTURE_TYPE_CUBE | | texture - > type = = TEXTURE_TYPE_CUBE_ARRAY ) {
layer_count * = 6 ;
}
ERR_FAIL_COND_V ( p_layer > = layer_count , ERR_INVALID_PARAMETER ) ;
uint32_t width , height ;
2019-06-25 22:49:52 +00:00
uint32_t image_size = get_image_format_required_size ( texture - > format , texture - > width , texture - > height , texture - > depth , texture - > mipmaps , & width , & height ) ;
uint32_t required_size = image_size ;
2019-06-19 20:03:19 +00:00
uint32_t required_align = get_compressed_image_format_block_byte_size ( texture - > format ) ;
if ( required_align = = 1 ) {
required_align = get_image_format_pixel_size ( texture - > format ) ;
}
if ( ( required_align % 4 ) ! = 0 ) { //alignment rules are really strange
required_align * = 4 ;
}
2019-06-07 16:07:57 +00:00
ERR_FAIL_COND_V_MSG ( required_size ! = ( uint32_t ) p_data . size ( ) , ERR_INVALID_PARAMETER ,
" Required size for texture update ( " + itos ( required_size ) + " ) does not match data supplied size ( " + itos ( p_data . size ( ) ) + " ). " ) ;
uint32_t region_size = texture_upload_region_size_px ;
2020-02-17 21:06:54 +00:00
const uint8_t * r = p_data . ptr ( ) ;
2019-06-07 16:07:57 +00:00
2021-07-07 21:06:06 +00:00
VkCommandBuffer command_buffer = p_use_setup_queue ? frames [ frame ] . setup_command_buffer : frames [ frame ] . draw_command_buffer ;
2019-06-07 16:07:57 +00:00
//barrier to transfer
{
VkImageMemoryBarrier image_memory_barrier ;
image_memory_barrier . sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER ;
2020-04-01 23:20:12 +00:00
image_memory_barrier . pNext = nullptr ;
2019-06-07 16:07:57 +00:00
image_memory_barrier . srcAccessMask = 0 ;
image_memory_barrier . dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT ;
2019-09-25 19:44:44 +00:00
image_memory_barrier . oldLayout = texture - > layout ;
2019-06-07 16:07:57 +00:00
image_memory_barrier . newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL ;
image_memory_barrier . srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
image_memory_barrier . dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
image_memory_barrier . image = texture - > image ;
2019-08-20 20:54:03 +00:00
image_memory_barrier . subresourceRange . aspectMask = texture - > barrier_aspect_mask ;
2019-06-11 20:21:39 +00:00
image_memory_barrier . subresourceRange . baseMipLevel = 0 ;
image_memory_barrier . subresourceRange . levelCount = texture - > mipmaps ;
2019-06-07 16:07:57 +00:00
image_memory_barrier . subresourceRange . baseArrayLayer = p_layer ;
image_memory_barrier . subresourceRange . layerCount = 1 ;
2020-10-19 20:32:19 +00:00
vkCmdPipelineBarrier ( command_buffer , VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT , VK_PIPELINE_STAGE_TRANSFER_BIT , 0 , 0 , nullptr , 0 , nullptr , 1 , & image_memory_barrier ) ;
2019-06-07 16:07:57 +00:00
}
2019-06-11 20:21:39 +00:00
uint32_t mipmap_offset = 0 ;
2021-01-26 00:52:58 +00:00
uint32_t logic_width = texture - > width ;
uint32_t logic_height = texture - > height ;
2019-06-11 20:21:39 +00:00
for ( uint32_t mm_i = 0 ; mm_i < texture - > mipmaps ; mm_i + + ) {
2019-06-25 22:49:52 +00:00
uint32_t depth ;
uint32_t image_total = get_image_format_required_size ( texture - > format , texture - > width , texture - > height , texture - > depth , mm_i + 1 , & width , & height , & depth ) ;
2019-06-11 20:21:39 +00:00
2020-02-17 21:06:54 +00:00
const uint8_t * read_ptr_mipmap = r + mipmap_offset ;
2019-06-11 20:21:39 +00:00
image_size = image_total - mipmap_offset ;
2019-06-07 16:07:57 +00:00
2019-06-25 22:49:52 +00:00
for ( uint32_t z = 0 ; z < depth ; z + + ) { //for 3D textures, depth may be > 0
2019-06-07 16:07:57 +00:00
2019-06-25 22:49:52 +00:00
const uint8_t * read_ptr = read_ptr_mipmap + image_size * z / depth ;
2019-06-07 16:07:57 +00:00
2019-06-11 20:21:39 +00:00
for ( uint32_t x = 0 ; x < width ; x + = region_size ) {
for ( uint32_t y = 0 ; y < height ; y + = region_size ) {
uint32_t region_w = MIN ( region_size , width - x ) ;
uint32_t region_h = MIN ( region_size , height - y ) ;
2019-06-07 16:07:57 +00:00
2021-01-26 00:52:58 +00:00
uint32_t region_logic_w = MIN ( region_size , logic_width - x ) ;
uint32_t region_logic_h = MIN ( region_size , logic_height - y ) ;
2019-06-11 20:21:39 +00:00
uint32_t pixel_size = get_image_format_pixel_size ( texture - > format ) ;
uint32_t to_allocate = region_w * region_h * pixel_size ;
to_allocate > > = get_compressed_image_format_pixel_rshift ( texture - > format ) ;
2019-06-07 16:07:57 +00:00
2019-06-11 20:21:39 +00:00
uint32_t alloc_offset , alloc_size ;
2021-07-07 21:06:06 +00:00
Error err = _staging_buffer_allocate ( to_allocate , required_align , alloc_offset , alloc_size , false , ! p_use_setup_queue ) ;
2019-06-11 20:21:39 +00:00
ERR_FAIL_COND_V ( err , ERR_CANT_CREATE ) ;
2019-06-07 16:07:57 +00:00
2019-06-11 20:21:39 +00:00
uint8_t * write_ptr ;
{ //map
2020-04-01 23:20:12 +00:00
void * data_ptr = nullptr ;
2019-06-11 20:21:39 +00:00
VkResult vkerr = vmaMapMemory ( allocator , staging_buffer_blocks [ staging_buffer_current ] . allocation , & data_ptr ) ;
2020-03-08 08:26:22 +00:00
ERR_FAIL_COND_V_MSG ( vkerr , ERR_CANT_CREATE , " vmaMapMemory failed with error " + itos ( vkerr ) + " . " ) ;
2019-06-11 20:21:39 +00:00
write_ptr = ( uint8_t * ) data_ptr ;
write_ptr + = alloc_offset ;
2019-06-07 16:07:57 +00:00
}
2019-06-11 20:21:39 +00:00
uint32_t block_w , block_h ;
get_compressed_image_format_block_dimensions ( texture - > format , block_w , block_h ) ;
ERR_FAIL_COND_V ( region_w % block_w , ERR_BUG ) ;
ERR_FAIL_COND_V ( region_h % block_h , ERR_BUG ) ;
if ( block_w ! = 1 | | block_h ! = 1 ) {
//compressed image (blocks)
//must copy a block region
uint32_t block_size = get_compressed_image_format_block_byte_size ( texture - > format ) ;
//re-create current variables in blocky format
uint32_t xb = x / block_w ;
uint32_t yb = y / block_h ;
uint32_t wb = width / block_w ;
//uint32_t hb = height / block_h;
uint32_t region_wb = region_w / block_w ;
uint32_t region_hb = region_h / block_h ;
for ( uint32_t xr = 0 ; xr < region_wb ; xr + + ) {
for ( uint32_t yr = 0 ; yr < region_hb ; yr + + ) {
uint32_t src_offset = ( ( yr + yb ) * wb + xr + xb ) * block_size ;
uint32_t dst_offset = ( yr * region_wb + xr ) * block_size ;
//copy block
for ( uint32_t i = 0 ; i < block_size ; i + + ) {
write_ptr [ dst_offset + i ] = read_ptr [ src_offset + i ] ;
}
2019-06-07 16:07:57 +00:00
}
}
2019-06-11 20:21:39 +00:00
} else {
//regular image (pixels)
//must copy a pixel region
2019-06-07 16:07:57 +00:00
2019-06-11 20:21:39 +00:00
for ( uint32_t xr = 0 ; xr < region_w ; xr + + ) {
for ( uint32_t yr = 0 ; yr < region_h ; yr + + ) {
uint32_t src_offset = ( ( yr + y ) * width + xr + x ) * pixel_size ;
uint32_t dst_offset = ( yr * region_w + xr ) * pixel_size ;
//copy block
for ( uint32_t i = 0 ; i < pixel_size ; i + + ) {
write_ptr [ dst_offset + i ] = read_ptr [ src_offset + i ] ;
}
2019-06-07 16:07:57 +00:00
}
}
}
2019-06-11 20:21:39 +00:00
{ //unmap
vmaUnmapMemory ( allocator , staging_buffer_blocks [ staging_buffer_current ] . allocation ) ;
}
2019-06-07 16:07:57 +00:00
2019-06-11 20:21:39 +00:00
VkBufferImageCopy buffer_image_copy ;
buffer_image_copy . bufferOffset = alloc_offset ;
2020-07-16 16:54:15 +00:00
buffer_image_copy . bufferRowLength = 0 ; //tightly packed
buffer_image_copy . bufferImageHeight = 0 ; //tightly packed
2019-06-07 16:07:57 +00:00
2019-08-20 20:54:03 +00:00
buffer_image_copy . imageSubresource . aspectMask = texture - > read_aspect_mask ;
2019-06-11 20:21:39 +00:00
buffer_image_copy . imageSubresource . mipLevel = mm_i ;
buffer_image_copy . imageSubresource . baseArrayLayer = p_layer ;
buffer_image_copy . imageSubresource . layerCount = 1 ;
2019-06-07 16:07:57 +00:00
2019-06-11 20:21:39 +00:00
buffer_image_copy . imageOffset . x = x ;
buffer_image_copy . imageOffset . y = y ;
buffer_image_copy . imageOffset . z = z ;
2019-06-07 16:07:57 +00:00
2021-01-26 00:52:58 +00:00
buffer_image_copy . imageExtent . width = region_logic_w ;
buffer_image_copy . imageExtent . height = region_logic_h ;
2019-06-11 20:21:39 +00:00
buffer_image_copy . imageExtent . depth = 1 ;
2019-06-07 16:07:57 +00:00
2019-06-11 20:21:39 +00:00
vkCmdCopyBufferToImage ( command_buffer , staging_buffer_blocks [ staging_buffer_current ] . buffer , texture - > image , VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL , 1 , & buffer_image_copy ) ;
2019-06-07 16:07:57 +00:00
2019-06-11 20:21:39 +00:00
staging_buffer_blocks . write [ staging_buffer_current ] . fill_amount + = alloc_size ;
}
2019-06-07 16:07:57 +00:00
}
}
2019-06-11 20:21:39 +00:00
mipmap_offset = image_total ;
2021-01-26 00:52:58 +00:00
logic_width = MAX ( 1 , logic_width > > 1 ) ;
logic_height = MAX ( 1 , logic_height > > 1 ) ;
2019-06-07 16:07:57 +00:00
}
//barrier to restore layout
{
2021-01-26 00:52:58 +00:00
uint32_t barrier_flags = 0 ;
uint32_t access_flags = 0 ;
if ( p_post_barrier & BARRIER_MASK_COMPUTE ) {
barrier_flags | = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT ;
access_flags | = VK_ACCESS_SHADER_READ_BIT ;
}
if ( p_post_barrier & BARRIER_MASK_RASTER ) {
barrier_flags | = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT ;
access_flags | = VK_ACCESS_SHADER_READ_BIT ;
}
if ( p_post_barrier & BARRIER_MASK_TRANSFER ) {
barrier_flags | = VK_PIPELINE_STAGE_TRANSFER_BIT ;
access_flags | = VK_ACCESS_TRANSFER_WRITE_BIT ;
}
2021-02-02 19:51:36 +00:00
if ( barrier_flags = = 0 ) {
barrier_flags = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT ;
}
2019-06-07 16:07:57 +00:00
VkImageMemoryBarrier image_memory_barrier ;
image_memory_barrier . sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER ;
2020-04-01 23:20:12 +00:00
image_memory_barrier . pNext = nullptr ;
2019-06-07 16:07:57 +00:00
image_memory_barrier . srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT ;
2021-01-26 00:52:58 +00:00
image_memory_barrier . dstAccessMask = access_flags ;
2019-06-07 16:07:57 +00:00
image_memory_barrier . oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL ;
2019-09-25 19:44:44 +00:00
image_memory_barrier . newLayout = texture - > layout ;
2019-06-07 16:07:57 +00:00
image_memory_barrier . srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
image_memory_barrier . dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
image_memory_barrier . image = texture - > image ;
2019-08-20 20:54:03 +00:00
image_memory_barrier . subresourceRange . aspectMask = texture - > barrier_aspect_mask ;
2019-06-11 20:21:39 +00:00
image_memory_barrier . subresourceRange . baseMipLevel = 0 ;
image_memory_barrier . subresourceRange . levelCount = texture - > mipmaps ;
2019-06-07 16:07:57 +00:00
image_memory_barrier . subresourceRange . baseArrayLayer = p_layer ;
image_memory_barrier . subresourceRange . layerCount = 1 ;
2021-01-26 00:52:58 +00:00
vkCmdPipelineBarrier ( command_buffer , VK_ACCESS_TRANSFER_WRITE_BIT , barrier_flags , 0 , 0 , nullptr , 0 , nullptr , 1 , & image_memory_barrier ) ;
2019-06-07 16:07:57 +00:00
}
2021-02-02 19:51:36 +00:00
if ( texture - > used_in_frame ! = frames_drawn ) {
texture - > used_in_raster = false ;
texture - > used_in_compute = false ;
texture - > used_in_frame = frames_drawn ;
}
texture - > used_in_transfer = true ;
2019-06-07 16:07:57 +00:00
return OK ;
}
2020-02-17 21:06:54 +00:00
Vector < uint8_t > RenderingDeviceVulkan : : _texture_get_data_from_image ( Texture * tex , VkImage p_image , VmaAllocation p_allocation , uint32_t p_layer , bool p_2d ) {
2019-06-25 22:49:52 +00:00
uint32_t width , height , depth ;
2019-10-25 14:22:19 +00:00
uint32_t image_size = get_image_format_required_size ( tex - > format , tex - > width , tex - > height , p_2d ? 1 : tex - > depth , tex - > mipmaps , & width , & height , & depth ) ;
2019-06-25 22:49:52 +00:00
2020-02-17 21:06:54 +00:00
Vector < uint8_t > image_data ;
2019-06-25 22:49:52 +00:00
image_data . resize ( image_size ) ;
void * img_mem ;
vmaMapMemory ( allocator , p_allocation , & img_mem ) ;
uint32_t blockw , blockh ;
get_compressed_image_format_block_dimensions ( tex - > format , blockw , blockh ) ;
uint32_t block_size = get_compressed_image_format_block_byte_size ( tex - > format ) ;
uint32_t pixel_size = get_image_format_pixel_size ( tex - > format ) ;
{
2020-02-17 21:06:54 +00:00
uint8_t * w = image_data . ptrw ( ) ;
2019-06-25 22:49:52 +00:00
uint32_t mipmap_offset = 0 ;
for ( uint32_t mm_i = 0 ; mm_i < tex - > mipmaps ; mm_i + + ) {
2019-10-25 14:22:19 +00:00
uint32_t image_total = get_image_format_required_size ( tex - > format , tex - > width , tex - > height , p_2d ? 1 : tex - > depth , mm_i + 1 , & width , & height , & depth ) ;
2019-06-25 22:49:52 +00:00
2020-02-17 21:06:54 +00:00
uint8_t * write_ptr_mipmap = w + mipmap_offset ;
2019-06-25 22:49:52 +00:00
image_size = image_total - mipmap_offset ;
VkImageSubresource image_sub_resorce ;
image_sub_resorce . aspectMask = VK_IMAGE_ASPECT_COLOR_BIT ;
image_sub_resorce . arrayLayer = p_layer ;
image_sub_resorce . mipLevel = mm_i ;
VkSubresourceLayout layout ;
vkGetImageSubresourceLayout ( device , p_image , & image_sub_resorce , & layout ) ;
for ( uint32_t z = 0 ; z < depth ; z + + ) {
uint8_t * write_ptr = write_ptr_mipmap + z * image_size / depth ;
const uint8_t * slice_read_ptr = ( ( uint8_t * ) img_mem ) + layout . offset + z * layout . depthPitch ;
if ( block_size > 1 ) {
//compressed
uint32_t line_width = ( block_size * ( width / blockw ) ) ;
for ( uint32_t y = 0 ; y < height / blockh ; y + + ) {
const uint8_t * rptr = slice_read_ptr + y * layout . rowPitch ;
uint8_t * wptr = write_ptr + y * line_width ;
2021-04-27 14:19:21 +00:00
memcpy ( wptr , rptr , line_width ) ;
2019-06-25 22:49:52 +00:00
}
} else {
//uncompressed
for ( uint32_t y = 0 ; y < height ; y + + ) {
const uint8_t * rptr = slice_read_ptr + y * layout . rowPitch ;
uint8_t * wptr = write_ptr + y * pixel_size * width ;
2021-04-27 14:19:21 +00:00
memcpy ( wptr , rptr , ( uint64_t ) pixel_size * width ) ;
2019-06-25 22:49:52 +00:00
}
}
}
mipmap_offset = image_total ;
}
}
vmaUnmapMemory ( allocator , p_allocation ) ;
return image_data ;
}
2020-02-17 21:06:54 +00:00
Vector < uint8_t > RenderingDeviceVulkan : : texture_get_data ( RID p_texture , uint32_t p_layer ) {
2019-10-05 13:27:43 +00:00
_THREAD_SAFE_METHOD_
2019-06-25 22:49:52 +00:00
Texture * tex = texture_owner . getornull ( p_texture ) ;
2020-02-17 21:06:54 +00:00
ERR_FAIL_COND_V ( ! tex , Vector < uint8_t > ( ) ) ;
2019-06-25 22:49:52 +00:00
2020-02-17 21:06:54 +00:00
ERR_FAIL_COND_V_MSG ( tex - > bound , Vector < uint8_t > ( ) ,
2019-06-25 22:49:52 +00:00
" Texture can't be retrieved while a render pass that uses it is being created. Ensure render pass is finalized (and that it was created with RENDER_PASS_CONTENTS_FINISH) to unbind this texture. " ) ;
2020-02-17 21:06:54 +00:00
ERR_FAIL_COND_V_MSG ( ! ( tex - > usage_flags & TEXTURE_USAGE_CAN_COPY_FROM_BIT ) , Vector < uint8_t > ( ) ,
2019-07-27 13:23:24 +00:00
" Texture requires the TEXTURE_USAGE_CAN_COPY_FROM_BIT in order to be retrieved. " ) ;
2019-06-25 22:49:52 +00:00
uint32_t layer_count = tex - > layers ;
if ( tex - > type = = TEXTURE_TYPE_CUBE | | tex - > type = = TEXTURE_TYPE_CUBE_ARRAY ) {
layer_count * = 6 ;
}
2020-02-17 21:06:54 +00:00
ERR_FAIL_COND_V ( p_layer > = layer_count , Vector < uint8_t > ( ) ) ;
2019-06-25 22:49:52 +00:00
if ( tex - > usage_flags & TEXTURE_USAGE_CPU_READ_BIT ) {
//does not need anything fancy, map and read.
return _texture_get_data_from_image ( tex , tex - > image , tex - > allocation , p_layer ) ;
} else {
2019-10-25 14:22:19 +00:00
//compute total image size
uint32_t width , height , depth ;
uint32_t buffer_size = get_image_format_required_size ( tex - > format , tex - > width , tex - > height , tex - > depth , tex - > mipmaps , & width , & height , & depth ) ;
//allocate buffer
2020-05-01 12:34:23 +00:00
VkCommandBuffer command_buffer = frames [ frame ] . draw_command_buffer ; //makes more sense to retrieve
2019-10-25 14:22:19 +00:00
Buffer tmp_buffer ;
_buffer_allocate ( & tmp_buffer , buffer_size , VK_BUFFER_USAGE_TRANSFER_DST_BIT , VMA_MEMORY_USAGE_CPU_ONLY ) ;
2019-06-25 22:49:52 +00:00
2019-10-25 14:22:19 +00:00
{ //Source image barrier
2019-06-25 22:49:52 +00:00
VkImageMemoryBarrier image_memory_barrier ;
image_memory_barrier . sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER ;
2020-04-01 23:20:12 +00:00
image_memory_barrier . pNext = nullptr ;
2019-06-25 22:49:52 +00:00
image_memory_barrier . srcAccessMask = 0 ;
image_memory_barrier . dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT ;
2019-09-25 19:44:44 +00:00
image_memory_barrier . oldLayout = tex - > layout ;
2019-06-25 22:49:52 +00:00
image_memory_barrier . newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL ;
image_memory_barrier . srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
image_memory_barrier . dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
image_memory_barrier . image = tex - > image ;
2019-08-20 20:54:03 +00:00
image_memory_barrier . subresourceRange . aspectMask = tex - > barrier_aspect_mask ;
2019-06-25 22:49:52 +00:00
image_memory_barrier . subresourceRange . baseMipLevel = 0 ;
image_memory_barrier . subresourceRange . levelCount = tex - > mipmaps ;
image_memory_barrier . subresourceRange . baseArrayLayer = p_layer ;
image_memory_barrier . subresourceRange . layerCount = 1 ;
2020-04-01 23:20:12 +00:00
vkCmdPipelineBarrier ( command_buffer , VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT , VK_PIPELINE_STAGE_TRANSFER_BIT , 0 , 0 , nullptr , 0 , nullptr , 1 , & image_memory_barrier ) ;
2019-06-25 22:49:52 +00:00
}
2019-10-25 14:22:19 +00:00
uint32_t computed_w = tex - > width ;
uint32_t computed_h = tex - > height ;
uint32_t computed_d = tex - > depth ;
uint32_t prev_size = 0 ;
uint32_t offset = 0 ;
for ( uint32_t i = 0 ; i < tex - > mipmaps ; i + + ) {
VkBufferImageCopy buffer_image_copy ;
uint32_t image_size = get_image_format_required_size ( tex - > format , tex - > width , tex - > height , tex - > depth , i + 1 ) ;
uint32_t size = image_size - prev_size ;
prev_size = image_size ;
buffer_image_copy . bufferOffset = offset ;
buffer_image_copy . bufferImageHeight = 0 ;
buffer_image_copy . bufferRowLength = 0 ;
buffer_image_copy . imageSubresource . aspectMask = tex - > read_aspect_mask ;
buffer_image_copy . imageSubresource . baseArrayLayer = p_layer ;
buffer_image_copy . imageSubresource . layerCount = 1 ;
buffer_image_copy . imageSubresource . mipLevel = i ;
buffer_image_copy . imageOffset . x = 0 ;
buffer_image_copy . imageOffset . y = 0 ;
buffer_image_copy . imageOffset . z = 0 ;
buffer_image_copy . imageExtent . width = computed_w ;
buffer_image_copy . imageExtent . height = computed_h ;
buffer_image_copy . imageExtent . depth = computed_d ;
vkCmdCopyImageToBuffer ( command_buffer , tex - > image , VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL , tmp_buffer . buffer , 1 , & buffer_image_copy ) ;
computed_w = MAX ( 1 , computed_w > > 1 ) ;
computed_h = MAX ( 1 , computed_h > > 1 ) ;
computed_d = MAX ( 1 , computed_d > > 1 ) ;
offset + = size ;
2019-06-25 22:49:52 +00:00
}
{ //restore src
VkImageMemoryBarrier image_memory_barrier ;
image_memory_barrier . sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER ;
2020-04-01 23:20:12 +00:00
image_memory_barrier . pNext = nullptr ;
2019-06-25 22:49:52 +00:00
image_memory_barrier . srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT ;
image_memory_barrier . dstAccessMask = VK_ACCESS_SHADER_READ_BIT ;
2020-10-19 20:32:19 +00:00
if ( tex - > usage_flags & TEXTURE_USAGE_STORAGE_BIT ) {
image_memory_barrier . dstAccessMask | = VK_ACCESS_SHADER_WRITE_BIT ;
}
2019-06-25 22:49:52 +00:00
image_memory_barrier . oldLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL ;
2019-09-25 19:44:44 +00:00
image_memory_barrier . newLayout = tex - > layout ;
2019-06-25 22:49:52 +00:00
image_memory_barrier . srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
image_memory_barrier . dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
image_memory_barrier . image = tex - > image ;
2019-08-20 20:54:03 +00:00
image_memory_barrier . subresourceRange . aspectMask = tex - > barrier_aspect_mask ;
2019-06-25 22:49:52 +00:00
image_memory_barrier . subresourceRange . baseMipLevel = 0 ;
image_memory_barrier . subresourceRange . levelCount = tex - > mipmaps ;
image_memory_barrier . subresourceRange . baseArrayLayer = p_layer ;
image_memory_barrier . subresourceRange . layerCount = 1 ;
2020-10-19 20:32:19 +00:00
vkCmdPipelineBarrier ( command_buffer , VK_PIPELINE_STAGE_TRANSFER_BIT , VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT , 0 , 0 , nullptr , 0 , nullptr , 1 , & image_memory_barrier ) ;
2019-06-25 22:49:52 +00:00
}
2019-10-25 14:22:19 +00:00
_flush ( true ) ;
2019-06-25 22:49:52 +00:00
2019-10-25 14:22:19 +00:00
void * buffer_mem ;
VkResult vkerr = vmaMapMemory ( allocator , tmp_buffer . allocation , & buffer_mem ) ;
2020-03-08 08:26:22 +00:00
ERR_FAIL_COND_V_MSG ( vkerr , Vector < uint8_t > ( ) , " vmaMapMemory failed with error " + itos ( vkerr ) + " . " ) ;
2019-06-25 22:49:52 +00:00
2020-02-17 21:06:54 +00:00
Vector < uint8_t > buffer_data ;
2019-10-25 14:22:19 +00:00
{
buffer_data . resize ( buffer_size ) ;
2020-02-17 21:06:54 +00:00
uint8_t * w = buffer_data . ptrw ( ) ;
2021-04-27 14:19:21 +00:00
memcpy ( w , buffer_mem , buffer_size ) ;
2019-06-25 22:49:52 +00:00
}
2019-10-25 14:22:19 +00:00
vmaUnmapMemory ( allocator , tmp_buffer . allocation ) ;
_buffer_free ( & tmp_buffer ) ;
2019-06-25 22:49:52 +00:00
2019-10-25 14:22:19 +00:00
return buffer_data ;
2019-06-25 22:49:52 +00:00
}
}
2019-06-24 19:13:06 +00:00
bool RenderingDeviceVulkan : : texture_is_shared ( RID p_texture ) {
2019-10-05 13:27:43 +00:00
_THREAD_SAFE_METHOD_
2019-06-24 19:13:06 +00:00
Texture * tex = texture_owner . getornull ( p_texture ) ;
ERR_FAIL_COND_V ( ! tex , false ) ;
return tex - > owner . is_valid ( ) ;
}
bool RenderingDeviceVulkan : : texture_is_valid ( RID p_texture ) {
return texture_owner . owns ( p_texture ) ;
}
2021-01-26 00:52:58 +00:00
Error RenderingDeviceVulkan : : texture_copy ( RID p_from_texture , RID p_to_texture , const Vector3 & p_from , const Vector3 & p_to , const Vector3 & p_size , uint32_t p_src_mipmap , uint32_t p_dst_mipmap , uint32_t p_src_layer , uint32_t p_dst_layer , uint32_t p_post_barrier ) {
2019-10-05 13:27:43 +00:00
_THREAD_SAFE_METHOD_
2019-07-27 13:23:24 +00:00
Texture * src_tex = texture_owner . getornull ( p_from_texture ) ;
ERR_FAIL_COND_V ( ! src_tex , ERR_INVALID_PARAMETER ) ;
2021-01-26 00:52:58 +00:00
ERR_FAIL_COND_V_MSG ( src_tex - > bound , ERR_INVALID_PARAMETER ,
2019-07-27 13:23:24 +00:00
" Source texture can't be copied while a render pass that uses it is being created. Ensure render pass is finalized (and that it was created with RENDER_PASS_CONTENTS_FINISH) to unbind this texture. " ) ;
ERR_FAIL_COND_V_MSG ( ! ( src_tex - > usage_flags & TEXTURE_USAGE_CAN_COPY_FROM_BIT ) , ERR_INVALID_PARAMETER ,
" Source texture requires the TEXTURE_USAGE_CAN_COPY_FROM_BIT in order to be retrieved. " ) ;
uint32_t src_layer_count = src_tex - > layers ;
uint32_t src_width , src_height , src_depth ;
get_image_format_required_size ( src_tex - > format , src_tex - > width , src_tex - > height , src_tex - > depth , p_src_mipmap + 1 , & src_width , & src_height , & src_depth ) ;
if ( src_tex - > type = = TEXTURE_TYPE_CUBE | | src_tex - > type = = TEXTURE_TYPE_CUBE_ARRAY ) {
src_layer_count * = 6 ;
}
ERR_FAIL_COND_V ( p_from . x < 0 | | p_from . x + p_size . x > src_width , ERR_INVALID_PARAMETER ) ;
ERR_FAIL_COND_V ( p_from . y < 0 | | p_from . y + p_size . y > src_height , ERR_INVALID_PARAMETER ) ;
ERR_FAIL_COND_V ( p_from . z < 0 | | p_from . z + p_size . z > src_depth , ERR_INVALID_PARAMETER ) ;
ERR_FAIL_COND_V ( p_src_mipmap > = src_tex - > mipmaps , ERR_INVALID_PARAMETER ) ;
ERR_FAIL_COND_V ( p_src_layer > = src_layer_count , ERR_INVALID_PARAMETER ) ;
Texture * dst_tex = texture_owner . getornull ( p_to_texture ) ;
ERR_FAIL_COND_V ( ! dst_tex , ERR_INVALID_PARAMETER ) ;
2021-01-26 00:52:58 +00:00
ERR_FAIL_COND_V_MSG ( dst_tex - > bound , ERR_INVALID_PARAMETER ,
2019-07-27 13:23:24 +00:00
" Destination texture can't be copied while a render pass that uses it is being created. Ensure render pass is finalized (and that it was created with RENDER_PASS_CONTENTS_FINISH) to unbind this texture. " ) ;
ERR_FAIL_COND_V_MSG ( ! ( dst_tex - > usage_flags & TEXTURE_USAGE_CAN_COPY_TO_BIT ) , ERR_INVALID_PARAMETER ,
" Destination texture requires the TEXTURE_USAGE_CAN_COPY_TO_BIT in order to be retrieved. " ) ;
uint32_t dst_layer_count = dst_tex - > layers ;
uint32_t dst_width , dst_height , dst_depth ;
get_image_format_required_size ( dst_tex - > format , dst_tex - > width , dst_tex - > height , dst_tex - > depth , p_dst_mipmap + 1 , & dst_width , & dst_height , & dst_depth ) ;
if ( dst_tex - > type = = TEXTURE_TYPE_CUBE | | dst_tex - > type = = TEXTURE_TYPE_CUBE_ARRAY ) {
dst_layer_count * = 6 ;
}
ERR_FAIL_COND_V ( p_to . x < 0 | | p_to . x + p_size . x > dst_width , ERR_INVALID_PARAMETER ) ;
ERR_FAIL_COND_V ( p_to . y < 0 | | p_to . y + p_size . y > dst_height , ERR_INVALID_PARAMETER ) ;
ERR_FAIL_COND_V ( p_to . z < 0 | | p_to . z + p_size . z > dst_depth , ERR_INVALID_PARAMETER ) ;
ERR_FAIL_COND_V ( p_dst_mipmap > = dst_tex - > mipmaps , ERR_INVALID_PARAMETER ) ;
ERR_FAIL_COND_V ( p_dst_layer > = dst_layer_count , ERR_INVALID_PARAMETER ) ;
2019-09-07 01:51:27 +00:00
ERR_FAIL_COND_V_MSG ( src_tex - > read_aspect_mask ! = dst_tex - > read_aspect_mask , ERR_INVALID_PARAMETER ,
" Source and destination texture must be of the same type (color or depth). " ) ;
2021-01-26 00:52:58 +00:00
VkCommandBuffer command_buffer = frames [ frame ] . draw_command_buffer ;
2019-07-27 13:23:24 +00:00
{
//PRE Copy the image
{ //Source
VkImageMemoryBarrier image_memory_barrier ;
image_memory_barrier . sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER ;
2020-04-01 23:20:12 +00:00
image_memory_barrier . pNext = nullptr ;
2019-07-27 13:23:24 +00:00
image_memory_barrier . srcAccessMask = 0 ;
image_memory_barrier . dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT ;
2019-09-25 19:44:44 +00:00
image_memory_barrier . oldLayout = src_tex - > layout ;
2019-07-27 13:23:24 +00:00
image_memory_barrier . newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL ;
image_memory_barrier . srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
image_memory_barrier . dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
image_memory_barrier . image = src_tex - > image ;
2019-08-20 20:54:03 +00:00
image_memory_barrier . subresourceRange . aspectMask = src_tex - > barrier_aspect_mask ;
2019-07-27 13:23:24 +00:00
image_memory_barrier . subresourceRange . baseMipLevel = p_src_mipmap ;
image_memory_barrier . subresourceRange . levelCount = 1 ;
image_memory_barrier . subresourceRange . baseArrayLayer = p_src_layer ;
image_memory_barrier . subresourceRange . layerCount = 1 ;
2020-04-01 23:20:12 +00:00
vkCmdPipelineBarrier ( command_buffer , VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT , VK_PIPELINE_STAGE_TRANSFER_BIT , 0 , 0 , nullptr , 0 , nullptr , 1 , & image_memory_barrier ) ;
2019-07-27 13:23:24 +00:00
}
{ //Dest
VkImageMemoryBarrier image_memory_barrier ;
image_memory_barrier . sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER ;
2020-04-01 23:20:12 +00:00
image_memory_barrier . pNext = nullptr ;
2019-07-27 13:23:24 +00:00
image_memory_barrier . srcAccessMask = 0 ;
image_memory_barrier . dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT ;
2019-09-25 19:44:44 +00:00
image_memory_barrier . oldLayout = dst_tex - > layout ;
2019-07-27 13:23:24 +00:00
image_memory_barrier . newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL ;
image_memory_barrier . srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
image_memory_barrier . dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
image_memory_barrier . image = dst_tex - > image ;
2019-08-20 20:54:03 +00:00
image_memory_barrier . subresourceRange . aspectMask = dst_tex - > read_aspect_mask ;
2019-07-27 13:23:24 +00:00
image_memory_barrier . subresourceRange . baseMipLevel = p_dst_mipmap ;
image_memory_barrier . subresourceRange . levelCount = 1 ;
image_memory_barrier . subresourceRange . baseArrayLayer = p_dst_layer ;
image_memory_barrier . subresourceRange . layerCount = 1 ;
2020-04-01 23:20:12 +00:00
vkCmdPipelineBarrier ( command_buffer , VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT , VK_PIPELINE_STAGE_TRANSFER_BIT , 0 , 0 , nullptr , 0 , nullptr , 1 , & image_memory_barrier ) ;
2019-07-27 13:23:24 +00:00
}
//COPY
{
VkImageCopy image_copy_region ;
2019-08-20 20:54:03 +00:00
image_copy_region . srcSubresource . aspectMask = src_tex - > read_aspect_mask ;
2019-07-27 13:23:24 +00:00
image_copy_region . srcSubresource . baseArrayLayer = p_src_layer ;
image_copy_region . srcSubresource . layerCount = 1 ;
image_copy_region . srcSubresource . mipLevel = p_src_mipmap ;
image_copy_region . srcOffset . x = p_from . x ;
image_copy_region . srcOffset . y = p_from . y ;
image_copy_region . srcOffset . z = p_from . z ;
2019-09-07 01:51:27 +00:00
image_copy_region . dstSubresource . aspectMask = dst_tex - > read_aspect_mask ;
2019-07-27 13:23:24 +00:00
image_copy_region . dstSubresource . baseArrayLayer = p_dst_layer ;
image_copy_region . dstSubresource . layerCount = 1 ;
image_copy_region . dstSubresource . mipLevel = p_dst_mipmap ;
image_copy_region . dstOffset . x = p_to . x ;
image_copy_region . dstOffset . y = p_to . y ;
image_copy_region . dstOffset . z = p_to . z ;
image_copy_region . extent . width = p_size . x ;
image_copy_region . extent . height = p_size . y ;
image_copy_region . extent . depth = p_size . z ;
vkCmdCopyImage ( command_buffer , src_tex - > image , VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL , dst_tex - > image , VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL , 1 , & image_copy_region ) ;
}
// RESTORE LAYOUT for SRC and DST
2021-01-26 00:52:58 +00:00
uint32_t barrier_flags = 0 ;
uint32_t access_flags = 0 ;
if ( p_post_barrier & BARRIER_MASK_COMPUTE ) {
barrier_flags | = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT ;
access_flags | = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT ;
}
if ( p_post_barrier & BARRIER_MASK_RASTER ) {
barrier_flags | = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT ;
access_flags | = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT ;
}
if ( p_post_barrier & BARRIER_MASK_TRANSFER ) {
barrier_flags | = VK_PIPELINE_STAGE_TRANSFER_BIT ;
access_flags | = VK_ACCESS_TRANSFER_WRITE_BIT ;
}
2021-02-02 19:51:36 +00:00
if ( barrier_flags = = 0 ) {
barrier_flags = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT ;
}
2019-07-27 13:23:24 +00:00
{ //restore src
VkImageMemoryBarrier image_memory_barrier ;
image_memory_barrier . sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER ;
2020-04-01 23:20:12 +00:00
image_memory_barrier . pNext = nullptr ;
2019-07-27 13:23:24 +00:00
image_memory_barrier . srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT ;
2021-01-26 00:52:58 +00:00
image_memory_barrier . dstAccessMask = access_flags ;
2019-07-27 13:23:24 +00:00
image_memory_barrier . oldLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL ;
2019-09-25 19:44:44 +00:00
image_memory_barrier . newLayout = src_tex - > layout ;
2019-07-27 13:23:24 +00:00
image_memory_barrier . srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
image_memory_barrier . dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
image_memory_barrier . image = src_tex - > image ;
2019-08-20 20:54:03 +00:00
image_memory_barrier . subresourceRange . aspectMask = src_tex - > barrier_aspect_mask ;
2019-07-27 13:23:24 +00:00
image_memory_barrier . subresourceRange . baseMipLevel = p_src_mipmap ;
image_memory_barrier . subresourceRange . levelCount = src_tex - > mipmaps ;
image_memory_barrier . subresourceRange . baseArrayLayer = p_src_layer ;
image_memory_barrier . subresourceRange . layerCount = 1 ;
2021-01-26 00:52:58 +00:00
vkCmdPipelineBarrier ( command_buffer , VK_ACCESS_TRANSFER_WRITE_BIT , barrier_flags , 0 , 0 , nullptr , 0 , nullptr , 1 , & image_memory_barrier ) ;
2019-07-27 13:23:24 +00:00
}
{ //make dst readable
VkImageMemoryBarrier image_memory_barrier ;
image_memory_barrier . sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER ;
2020-04-01 23:20:12 +00:00
image_memory_barrier . pNext = nullptr ;
2019-07-27 13:23:24 +00:00
image_memory_barrier . srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT ;
2021-01-26 00:52:58 +00:00
image_memory_barrier . dstAccessMask = access_flags ;
2019-07-27 13:23:24 +00:00
image_memory_barrier . oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL ;
2019-09-25 19:44:44 +00:00
image_memory_barrier . newLayout = dst_tex - > layout ;
2019-07-27 13:23:24 +00:00
image_memory_barrier . srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
image_memory_barrier . dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
image_memory_barrier . image = dst_tex - > image ;
image_memory_barrier . subresourceRange . aspectMask = VK_IMAGE_ASPECT_COLOR_BIT ;
image_memory_barrier . subresourceRange . baseMipLevel = p_src_mipmap ;
image_memory_barrier . subresourceRange . levelCount = 1 ;
image_memory_barrier . subresourceRange . baseArrayLayer = p_src_layer ;
image_memory_barrier . subresourceRange . layerCount = 1 ;
2020-04-12 18:33:57 +00:00
2021-01-26 00:52:58 +00:00
vkCmdPipelineBarrier ( command_buffer , VK_PIPELINE_STAGE_TRANSFER_BIT , barrier_flags , 0 , 0 , nullptr , 0 , nullptr , 1 , & image_memory_barrier ) ;
2020-04-12 18:33:57 +00:00
}
}
return OK ;
}
2020-05-14 12:29:06 +00:00
2021-01-26 00:52:58 +00:00
Error RenderingDeviceVulkan : : texture_resolve_multisample ( RID p_from_texture , RID p_to_texture , uint32_t p_post_barrier ) {
2020-04-12 18:33:57 +00:00
_THREAD_SAFE_METHOD_
Texture * src_tex = texture_owner . getornull ( p_from_texture ) ;
ERR_FAIL_COND_V ( ! src_tex , ERR_INVALID_PARAMETER ) ;
2021-01-26 00:52:58 +00:00
ERR_FAIL_COND_V_MSG ( src_tex - > bound , ERR_INVALID_PARAMETER ,
2020-04-12 18:33:57 +00:00
" Source texture can't be copied while a render pass that uses it is being created. Ensure render pass is finalized (and that it was created with RENDER_PASS_CONTENTS_FINISH) to unbind this texture. " ) ;
ERR_FAIL_COND_V_MSG ( ! ( src_tex - > usage_flags & TEXTURE_USAGE_CAN_COPY_FROM_BIT ) , ERR_INVALID_PARAMETER ,
" Source texture requires the TEXTURE_USAGE_CAN_COPY_FROM_BIT in order to be retrieved. " ) ;
ERR_FAIL_COND_V_MSG ( src_tex - > type ! = TEXTURE_TYPE_2D , ERR_INVALID_PARAMETER , " Source texture must be 2D (or a slice of a 3D/Cube texture) " ) ;
ERR_FAIL_COND_V_MSG ( src_tex - > samples = = TEXTURE_SAMPLES_1 , ERR_INVALID_PARAMETER , " Source texture must be multisampled. " ) ;
Texture * dst_tex = texture_owner . getornull ( p_to_texture ) ;
ERR_FAIL_COND_V ( ! dst_tex , ERR_INVALID_PARAMETER ) ;
2021-01-26 00:52:58 +00:00
ERR_FAIL_COND_V_MSG ( dst_tex - > bound , ERR_INVALID_PARAMETER ,
2020-04-12 18:33:57 +00:00
" Destination texture can't be copied while a render pass that uses it is being created. Ensure render pass is finalized (and that it was created with RENDER_PASS_CONTENTS_FINISH) to unbind this texture. " ) ;
ERR_FAIL_COND_V_MSG ( ! ( dst_tex - > usage_flags & TEXTURE_USAGE_CAN_COPY_TO_BIT ) , ERR_INVALID_PARAMETER ,
" Destination texture requires the TEXTURE_USAGE_CAN_COPY_TO_BIT in order to be retrieved. " ) ;
ERR_FAIL_COND_V_MSG ( dst_tex - > type ! = TEXTURE_TYPE_2D , ERR_INVALID_PARAMETER , " Destination texture must be 2D (or a slice of a 3D/Cube texture). " ) ;
ERR_FAIL_COND_V_MSG ( dst_tex - > samples ! = TEXTURE_SAMPLES_1 , ERR_INVALID_PARAMETER , " Destination texture must not be multisampled. " ) ;
2020-07-16 16:54:15 +00:00
ERR_FAIL_COND_V_MSG ( src_tex - > format ! = dst_tex - > format , ERR_INVALID_PARAMETER , " Source and Destination textures must be the same format. " ) ;
ERR_FAIL_COND_V_MSG ( src_tex - > width ! = dst_tex - > width & & src_tex - > height ! = dst_tex - > height & & src_tex - > depth ! = dst_tex - > depth , ERR_INVALID_PARAMETER , " Source and Destination textures must have the same dimensions. " ) ;
2020-04-12 18:33:57 +00:00
ERR_FAIL_COND_V_MSG ( src_tex - > read_aspect_mask ! = dst_tex - > read_aspect_mask , ERR_INVALID_PARAMETER ,
" Source and destination texture must be of the same type (color or depth). " ) ;
2021-01-26 00:52:58 +00:00
VkCommandBuffer command_buffer = frames [ frame ] . draw_command_buffer ;
2020-04-12 18:33:57 +00:00
{
//PRE Copy the image
{ //Source
VkImageMemoryBarrier image_memory_barrier ;
image_memory_barrier . sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER ;
image_memory_barrier . pNext = nullptr ;
image_memory_barrier . srcAccessMask = 0 ;
image_memory_barrier . dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT ;
image_memory_barrier . oldLayout = src_tex - > layout ;
image_memory_barrier . newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL ;
image_memory_barrier . srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
image_memory_barrier . dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
image_memory_barrier . image = src_tex - > image ;
image_memory_barrier . subresourceRange . aspectMask = src_tex - > barrier_aspect_mask ;
image_memory_barrier . subresourceRange . baseMipLevel = src_tex - > base_mipmap ;
image_memory_barrier . subresourceRange . levelCount = 1 ;
image_memory_barrier . subresourceRange . baseArrayLayer = src_tex - > base_layer ;
image_memory_barrier . subresourceRange . layerCount = 1 ;
2020-10-19 20:32:19 +00:00
vkCmdPipelineBarrier ( command_buffer , VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT , VK_PIPELINE_STAGE_TRANSFER_BIT , 0 , 0 , nullptr , 0 , nullptr , 1 , & image_memory_barrier ) ;
2020-04-12 18:33:57 +00:00
}
{ //Dest
VkImageMemoryBarrier image_memory_barrier ;
image_memory_barrier . sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER ;
image_memory_barrier . pNext = nullptr ;
image_memory_barrier . srcAccessMask = 0 ;
image_memory_barrier . dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT ;
image_memory_barrier . oldLayout = dst_tex - > layout ;
image_memory_barrier . newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL ;
image_memory_barrier . srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
image_memory_barrier . dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
image_memory_barrier . image = dst_tex - > image ;
image_memory_barrier . subresourceRange . aspectMask = dst_tex - > read_aspect_mask ;
image_memory_barrier . subresourceRange . baseMipLevel = dst_tex - > base_mipmap ;
image_memory_barrier . subresourceRange . levelCount = 1 ;
image_memory_barrier . subresourceRange . baseArrayLayer = dst_tex - > base_layer ;
image_memory_barrier . subresourceRange . layerCount = 1 ;
vkCmdPipelineBarrier ( command_buffer , VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT , VK_PIPELINE_STAGE_TRANSFER_BIT , 0 , 0 , nullptr , 0 , nullptr , 1 , & image_memory_barrier ) ;
}
//COPY
{
VkImageResolve image_copy_region ;
image_copy_region . srcSubresource . aspectMask = src_tex - > read_aspect_mask ;
image_copy_region . srcSubresource . baseArrayLayer = src_tex - > base_layer ;
image_copy_region . srcSubresource . layerCount = 1 ;
image_copy_region . srcSubresource . mipLevel = src_tex - > base_mipmap ;
image_copy_region . srcOffset . x = 0 ;
image_copy_region . srcOffset . y = 0 ;
image_copy_region . srcOffset . z = 0 ;
image_copy_region . dstSubresource . aspectMask = dst_tex - > read_aspect_mask ;
image_copy_region . dstSubresource . baseArrayLayer = dst_tex - > base_layer ;
image_copy_region . dstSubresource . layerCount = 1 ;
image_copy_region . dstSubresource . mipLevel = dst_tex - > base_mipmap ;
image_copy_region . dstOffset . x = 0 ;
image_copy_region . dstOffset . y = 0 ;
image_copy_region . dstOffset . z = 0 ;
image_copy_region . extent . width = src_tex - > width ;
image_copy_region . extent . height = src_tex - > height ;
image_copy_region . extent . depth = src_tex - > depth ;
vkCmdResolveImage ( command_buffer , src_tex - > image , VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL , dst_tex - > image , VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL , 1 , & image_copy_region ) ;
}
// RESTORE LAYOUT for SRC and DST
2021-01-26 00:52:58 +00:00
uint32_t barrier_flags = 0 ;
uint32_t access_flags = 0 ;
if ( p_post_barrier & BARRIER_MASK_COMPUTE ) {
barrier_flags | = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT ;
access_flags | = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT ;
}
if ( p_post_barrier & BARRIER_MASK_RASTER ) {
barrier_flags | = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT ;
access_flags | = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT ;
}
if ( p_post_barrier & BARRIER_MASK_TRANSFER ) {
barrier_flags | = VK_PIPELINE_STAGE_TRANSFER_BIT ;
access_flags | = VK_ACCESS_TRANSFER_WRITE_BIT ;
}
2021-02-02 19:51:36 +00:00
if ( barrier_flags = = 0 ) {
barrier_flags = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT ;
}
2020-04-12 18:33:57 +00:00
{ //restore src
VkImageMemoryBarrier image_memory_barrier ;
image_memory_barrier . sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER ;
image_memory_barrier . pNext = nullptr ;
image_memory_barrier . srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT ;
2021-01-26 00:52:58 +00:00
image_memory_barrier . dstAccessMask = access_flags ;
2020-04-12 18:33:57 +00:00
image_memory_barrier . oldLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL ;
image_memory_barrier . newLayout = src_tex - > layout ;
image_memory_barrier . srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
image_memory_barrier . dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
image_memory_barrier . image = src_tex - > image ;
image_memory_barrier . subresourceRange . aspectMask = src_tex - > barrier_aspect_mask ;
image_memory_barrier . subresourceRange . baseMipLevel = src_tex - > base_mipmap ;
image_memory_barrier . subresourceRange . levelCount = 1 ;
image_memory_barrier . subresourceRange . baseArrayLayer = src_tex - > base_layer ;
image_memory_barrier . subresourceRange . layerCount = 1 ;
2021-01-26 00:52:58 +00:00
vkCmdPipelineBarrier ( command_buffer , VK_ACCESS_TRANSFER_WRITE_BIT , barrier_flags , 0 , 0 , nullptr , 0 , nullptr , 1 , & image_memory_barrier ) ;
2020-04-12 18:33:57 +00:00
}
{ //make dst readable
VkImageMemoryBarrier image_memory_barrier ;
image_memory_barrier . sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER ;
image_memory_barrier . pNext = nullptr ;
image_memory_barrier . srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT ;
2021-01-26 00:52:58 +00:00
image_memory_barrier . dstAccessMask = access_flags ;
2020-04-12 18:33:57 +00:00
image_memory_barrier . oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL ;
image_memory_barrier . newLayout = dst_tex - > layout ;
image_memory_barrier . srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
image_memory_barrier . dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
image_memory_barrier . image = dst_tex - > image ;
image_memory_barrier . subresourceRange . aspectMask = VK_IMAGE_ASPECT_COLOR_BIT ;
image_memory_barrier . subresourceRange . baseMipLevel = dst_tex - > base_mipmap ;
image_memory_barrier . subresourceRange . levelCount = 1 ;
image_memory_barrier . subresourceRange . baseArrayLayer = dst_tex - > base_layer ;
image_memory_barrier . subresourceRange . layerCount = 1 ;
2019-07-27 13:23:24 +00:00
2021-01-26 00:52:58 +00:00
vkCmdPipelineBarrier ( command_buffer , VK_PIPELINE_STAGE_TRANSFER_BIT , barrier_flags , 0 , 0 , nullptr , 0 , nullptr , 1 , & image_memory_barrier ) ;
2019-07-27 13:23:24 +00:00
}
}
return OK ;
}
2021-01-26 00:52:58 +00:00
Error RenderingDeviceVulkan : : texture_clear ( RID p_texture , const Color & p_color , uint32_t p_base_mipmap , uint32_t p_mipmaps , uint32_t p_base_layer , uint32_t p_layers , uint32_t p_post_barrier ) {
2019-10-05 13:27:43 +00:00
_THREAD_SAFE_METHOD_
2019-10-03 20:39:08 +00:00
Texture * src_tex = texture_owner . getornull ( p_texture ) ;
ERR_FAIL_COND_V ( ! src_tex , ERR_INVALID_PARAMETER ) ;
2021-01-26 00:52:58 +00:00
ERR_FAIL_COND_V_MSG ( src_tex - > bound , ERR_INVALID_PARAMETER ,
2019-10-03 20:39:08 +00:00
" Source texture can't be cleared while a render pass that uses it is being created. Ensure render pass is finalized (and that it was created with RENDER_PASS_CONTENTS_FINISH) to unbind this texture. " ) ;
ERR_FAIL_COND_V ( p_layers = = 0 , ERR_INVALID_PARAMETER ) ;
ERR_FAIL_COND_V ( p_mipmaps = = 0 , ERR_INVALID_PARAMETER ) ;
ERR_FAIL_COND_V_MSG ( ! ( src_tex - > usage_flags & TEXTURE_USAGE_CAN_COPY_TO_BIT ) , ERR_INVALID_PARAMETER ,
" Source texture requires the TEXTURE_USAGE_CAN_COPY_TO_BIT in order to be cleared. " ) ;
uint32_t src_layer_count = src_tex - > layers ;
if ( src_tex - > type = = TEXTURE_TYPE_CUBE | | src_tex - > type = = TEXTURE_TYPE_CUBE_ARRAY ) {
src_layer_count * = 6 ;
}
2020-04-14 03:05:21 +00:00
ERR_FAIL_COND_V ( p_base_mipmap + p_mipmaps > src_tex - > mipmaps , ERR_INVALID_PARAMETER ) ;
ERR_FAIL_COND_V ( p_base_layer + p_layers > src_layer_count , ERR_INVALID_PARAMETER ) ;
2019-10-03 20:39:08 +00:00
2021-01-26 00:52:58 +00:00
VkCommandBuffer command_buffer = frames [ frame ] . draw_command_buffer ;
2019-10-03 20:39:08 +00:00
2020-10-19 20:32:19 +00:00
VkImageLayout clear_layout = ( src_tex - > layout = = VK_IMAGE_LAYOUT_GENERAL ) ? VK_IMAGE_LAYOUT_GENERAL : VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL ;
2019-10-03 20:39:08 +00:00
2021-03-12 13:35:16 +00:00
// NOTE: Perhaps the valid stages/accesses for a given owner should be a property of the owner. (Here and places like _get_buffer_from_owner)
2020-10-19 20:32:19 +00:00
const VkPipelineStageFlags valid_texture_stages = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT ;
constexpr VkAccessFlags read_access = VK_ACCESS_SHADER_READ_BIT ;
constexpr VkAccessFlags read_write_access = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT ;
const VkAccessFlags valid_texture_access = ( src_tex - > usage_flags & TEXTURE_USAGE_STORAGE_BIT ) ? read_write_access : read_access ;
{ // Barrier from previous access with optional layout change (see clear_layout logic above)
2019-10-03 20:39:08 +00:00
VkImageMemoryBarrier image_memory_barrier ;
image_memory_barrier . sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER ;
2020-04-01 23:20:12 +00:00
image_memory_barrier . pNext = nullptr ;
2020-10-19 20:32:19 +00:00
image_memory_barrier . srcAccessMask = valid_texture_access ;
2019-10-03 20:39:08 +00:00
image_memory_barrier . dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT ;
image_memory_barrier . oldLayout = src_tex - > layout ;
2020-10-28 18:34:27 +00:00
image_memory_barrier . newLayout = clear_layout ;
2019-10-03 20:39:08 +00:00
image_memory_barrier . srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
image_memory_barrier . dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
image_memory_barrier . image = src_tex - > image ;
image_memory_barrier . subresourceRange . aspectMask = src_tex - > read_aspect_mask ;
2020-04-11 17:43:12 +00:00
image_memory_barrier . subresourceRange . baseMipLevel = src_tex - > base_mipmap + p_base_mipmap ;
2019-10-03 20:39:08 +00:00
image_memory_barrier . subresourceRange . levelCount = p_mipmaps ;
2020-04-11 17:43:12 +00:00
image_memory_barrier . subresourceRange . baseArrayLayer = src_tex - > base_layer + p_base_layer ;
2019-10-03 20:39:08 +00:00
image_memory_barrier . subresourceRange . layerCount = p_layers ;
2020-10-19 20:32:19 +00:00
vkCmdPipelineBarrier ( command_buffer , valid_texture_stages , VK_PIPELINE_STAGE_TRANSFER_BIT , 0 , 0 , nullptr , 0 , nullptr , 1 , & image_memory_barrier ) ;
2019-10-03 20:39:08 +00:00
}
VkClearColorValue clear_color ;
clear_color . float32 [ 0 ] = p_color . r ;
clear_color . float32 [ 1 ] = p_color . g ;
clear_color . float32 [ 2 ] = p_color . b ;
clear_color . float32 [ 3 ] = p_color . a ;
VkImageSubresourceRange range ;
range . aspectMask = src_tex - > read_aspect_mask ;
2020-04-14 03:05:21 +00:00
range . baseArrayLayer = src_tex - > base_layer + p_base_layer ;
2019-10-03 20:39:08 +00:00
range . layerCount = p_layers ;
2020-04-14 03:05:21 +00:00
range . baseMipLevel = src_tex - > base_mipmap + p_base_mipmap ;
2019-10-03 20:39:08 +00:00
range . levelCount = p_mipmaps ;
2020-10-19 20:32:19 +00:00
vkCmdClearColorImage ( command_buffer , src_tex - > image , clear_layout , & clear_color , 1 , & range ) ;
2019-10-03 20:39:08 +00:00
2020-10-19 20:32:19 +00:00
{ // Barrier to post clear accesses (changing back the layout if needed)
2021-01-26 00:52:58 +00:00
uint32_t barrier_flags = 0 ;
uint32_t access_flags = 0 ;
if ( p_post_barrier & BARRIER_MASK_COMPUTE ) {
barrier_flags | = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT ;
access_flags | = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT ;
}
if ( p_post_barrier & BARRIER_MASK_RASTER ) {
barrier_flags | = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT ;
access_flags | = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT ;
}
if ( p_post_barrier & BARRIER_MASK_TRANSFER ) {
barrier_flags | = VK_PIPELINE_STAGE_TRANSFER_BIT ;
access_flags | = VK_ACCESS_TRANSFER_WRITE_BIT ;
}
2021-02-02 19:51:36 +00:00
if ( barrier_flags = = 0 ) {
barrier_flags = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT ;
}
2019-10-03 20:39:08 +00:00
VkImageMemoryBarrier image_memory_barrier ;
image_memory_barrier . sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER ;
2020-04-01 23:20:12 +00:00
image_memory_barrier . pNext = nullptr ;
2019-10-03 20:39:08 +00:00
image_memory_barrier . srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT ;
2021-01-26 00:52:58 +00:00
image_memory_barrier . dstAccessMask = access_flags ;
2020-10-19 20:32:19 +00:00
image_memory_barrier . oldLayout = clear_layout ;
2019-10-03 20:39:08 +00:00
image_memory_barrier . newLayout = src_tex - > layout ;
image_memory_barrier . srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
image_memory_barrier . dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
image_memory_barrier . image = src_tex - > image ;
image_memory_barrier . subresourceRange . aspectMask = src_tex - > read_aspect_mask ;
2020-04-11 17:43:12 +00:00
image_memory_barrier . subresourceRange . baseMipLevel = src_tex - > base_mipmap + p_base_mipmap ;
2019-10-03 20:39:08 +00:00
image_memory_barrier . subresourceRange . levelCount = p_mipmaps ;
2020-04-11 17:43:12 +00:00
image_memory_barrier . subresourceRange . baseArrayLayer = src_tex - > base_layer + p_base_layer ;
2019-10-03 20:39:08 +00:00
image_memory_barrier . subresourceRange . layerCount = p_layers ;
2021-01-26 00:52:58 +00:00
vkCmdPipelineBarrier ( command_buffer , VK_PIPELINE_STAGE_TRANSFER_BIT , barrier_flags , 0 , 0 , nullptr , 0 , nullptr , 1 , & image_memory_barrier ) ;
2019-10-03 20:39:08 +00:00
}
2021-02-02 19:51:36 +00:00
if ( src_tex - > used_in_frame ! = frames_drawn ) {
src_tex - > used_in_raster = false ;
src_tex - > used_in_compute = false ;
src_tex - > used_in_frame = frames_drawn ;
}
src_tex - > used_in_transfer = true ;
2019-10-03 20:39:08 +00:00
return OK ;
}
2019-06-16 02:45:24 +00:00
bool RenderingDeviceVulkan : : texture_is_format_supported_for_usage ( DataFormat p_format , uint32_t p_usage ) const {
2019-06-07 16:07:57 +00:00
ERR_FAIL_INDEX_V ( p_format , DATA_FORMAT_MAX , false ) ;
_THREAD_SAFE_METHOD_
//validate that this image is supported for the intended use
VkFormatProperties properties ;
vkGetPhysicalDeviceFormatProperties ( context - > get_physical_device ( ) , vulkan_formats [ p_format ] , & properties ) ;
VkFormatFeatureFlags flags ;
if ( p_usage & TEXTURE_USAGE_CPU_READ_BIT ) {
flags = properties . linearTilingFeatures ;
} else {
flags = properties . optimalTilingFeatures ;
}
if ( p_usage & TEXTURE_USAGE_SAMPLING_BIT & & ! ( flags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT ) ) {
return false ;
}
if ( p_usage & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT & & ! ( flags & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT ) ) {
return false ;
}
if ( p_usage & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT & & ! ( flags & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT ) ) {
return false ;
}
if ( p_usage & TEXTURE_USAGE_STORAGE_BIT & & ! ( flags & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT ) ) {
return false ;
}
if ( p_usage & TEXTURE_USAGE_STORAGE_ATOMIC_BIT & & ! ( flags & VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT ) ) {
return false ;
}
return true ;
}
/********************/
/**** ATTACHMENT ****/
/********************/
2021-06-24 13:58:36 +00:00
VkRenderPass RenderingDeviceVulkan : : _render_pass_create ( const Vector < AttachmentFormat > & p_attachments , const Vector < FramebufferPass > & p_passes , InitialAction p_initial_action , FinalAction p_final_action , InitialAction p_initial_depth_action , FinalAction p_final_depth_action , uint32_t p_view_count , Vector < TextureSamples > * r_samples ) {
2021-03-12 13:35:16 +00:00
// Set up dependencies from/to external equivalent to the default (implicit) one, and then amend them
2020-10-19 20:32:19 +00:00
const VkPipelineStageFlags default_access_mask = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT |
VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT |
VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT ; // From Section 7.1 of Vulkan API Spec v1.1.148
VkPipelineStageFlags reading_stages = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_TRANSFER_BIT ;
VkSubpassDependency dependencies [ 2 ] = { { VK_SUBPASS_EXTERNAL , 0 , VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT , VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT , 0 , default_access_mask , 0 } ,
{ 0 , VK_SUBPASS_EXTERNAL , VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT , VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT , default_access_mask , 0 , 0 } } ;
VkSubpassDependency & dependency_from_external = dependencies [ 0 ] ;
VkSubpassDependency & dependency_to_external = dependencies [ 1 ] ;
2021-06-24 13:58:36 +00:00
LocalVector < int32_t > attachment_last_pass ;
attachment_last_pass . resize ( p_attachments . size ( ) ) ;
2020-10-19 20:32:19 +00:00
2021-06-24 13:58:36 +00:00
Vector < VkAttachmentDescription > attachments ;
for ( int i = 0 ; i < p_attachments . size ( ) ; i + + ) {
ERR_FAIL_INDEX_V ( p_attachments [ i ] . format , DATA_FORMAT_MAX , VK_NULL_HANDLE ) ;
ERR_FAIL_INDEX_V ( p_attachments [ i ] . samples , TEXTURE_SAMPLES_MAX , VK_NULL_HANDLE ) ;
ERR_FAIL_COND_V_MSG ( ! ( p_attachments [ i ] . usage_flags & ( TEXTURE_USAGE_COLOR_ATTACHMENT_BIT | TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | TEXTURE_USAGE_RESOLVE_ATTACHMENT_BIT ) ) ,
2020-05-18 14:49:52 +00:00
VK_NULL_HANDLE , " Texture format for index ( " + itos ( i ) + " ) requires an attachment (depth, stencil or resolve) bit set. " ) ;
2019-06-07 16:07:57 +00:00
2020-07-22 14:31:17 +00:00
VkAttachmentDescription description = { } ;
2019-06-07 16:07:57 +00:00
description . flags = 0 ;
2021-06-24 13:58:36 +00:00
description . format = vulkan_formats [ p_attachments [ i ] . format ] ;
description . samples = rasterization_sample_count [ p_attachments [ i ] . samples ] ;
2019-06-07 16:07:57 +00:00
2021-06-24 13:58:36 +00:00
bool is_sampled = p_attachments [ i ] . usage_flags & TEXTURE_USAGE_SAMPLING_BIT ;
bool is_storage = p_attachments [ i ] . usage_flags & TEXTURE_USAGE_STORAGE_BIT ;
bool is_depth = p_attachments [ i ] . usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT ;
2019-09-07 01:51:27 +00:00
2020-10-19 20:32:19 +00:00
// For each UNDEFINED, assume the prior use was a *read*, as we'd be discarding the output of a write
2021-03-12 13:35:16 +00:00
// Also, each UNDEFINED will do an immediate layout transition (write), s.t. we must ensure execution synchronization vs.
2021-06-07 08:17:32 +00:00
// the read. If this is a performance issue, one could track the actual last accessor of each resource, adding only that
2020-10-19 20:32:19 +00:00
// stage
2021-06-24 13:58:36 +00:00
switch ( is_depth ? p_initial_depth_action : p_initial_action ) {
2021-01-24 19:00:20 +00:00
case INITIAL_ACTION_CLEAR_REGION :
2019-06-07 16:07:57 +00:00
case INITIAL_ACTION_CLEAR : {
description . loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR ;
description . stencilLoadOp = VK_ATTACHMENT_LOAD_OP_CLEAR ;
description . initialLayout = VK_IMAGE_LAYOUT_UNDEFINED ; //don't care what is there
2020-10-19 20:32:19 +00:00
dependency_from_external . srcStageMask | = reading_stages ;
2019-06-07 16:07:57 +00:00
} break ;
2019-10-03 20:39:08 +00:00
case INITIAL_ACTION_KEEP : {
2021-06-24 13:58:36 +00:00
if ( p_attachments [ i ] . usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT ) {
2019-06-07 16:07:57 +00:00
description . loadOp = VK_ATTACHMENT_LOAD_OP_LOAD ;
2019-09-25 19:44:44 +00:00
description . initialLayout = is_sampled ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : ( is_storage ? VK_IMAGE_LAYOUT_GENERAL : VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL ) ;
2019-06-07 16:07:57 +00:00
description . stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE ;
2021-06-24 13:58:36 +00:00
} else if ( p_attachments [ i ] . usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT ) {
2021-01-24 19:00:20 +00:00
description . loadOp = VK_ATTACHMENT_LOAD_OP_LOAD ;
description . initialLayout = is_sampled ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : ( is_storage ? VK_IMAGE_LAYOUT_GENERAL : VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL ) ;
description . stencilLoadOp = VK_ATTACHMENT_LOAD_OP_LOAD ;
2020-10-19 20:32:19 +00:00
dependency_from_external . srcStageMask | = reading_stages ;
2019-06-07 16:07:57 +00:00
} else {
description . loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE ;
description . stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE ;
description . initialLayout = VK_IMAGE_LAYOUT_UNDEFINED ; //don't care what is there
2020-10-19 20:32:19 +00:00
dependency_from_external . srcStageMask | = reading_stages ;
2019-06-07 16:07:57 +00:00
}
} break ;
2020-04-11 17:43:12 +00:00
case INITIAL_ACTION_DROP : {
2021-06-24 13:58:36 +00:00
if ( p_attachments [ i ] . usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT ) {
2020-04-11 17:43:12 +00:00
description . loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE ;
description . initialLayout = is_sampled ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : ( is_storage ? VK_IMAGE_LAYOUT_GENERAL : VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL ) ;
description . stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE ;
2021-06-24 13:58:36 +00:00
} else if ( p_attachments [ i ] . usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT ) {
2020-04-11 17:43:12 +00:00
description . loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE ;
description . initialLayout = VK_IMAGE_LAYOUT_UNDEFINED ; //don't care what is there
description . stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE ;
2020-10-19 20:32:19 +00:00
dependency_from_external . srcStageMask | = reading_stages ;
2020-04-11 17:43:12 +00:00
} else {
description . loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE ;
description . stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE ;
description . initialLayout = VK_IMAGE_LAYOUT_UNDEFINED ; //don't care what is there
2020-10-19 20:32:19 +00:00
dependency_from_external . srcStageMask | = reading_stages ;
2020-04-11 17:43:12 +00:00
}
} break ;
2021-02-02 19:51:36 +00:00
case INITIAL_ACTION_CLEAR_REGION_CONTINUE :
2019-06-07 16:07:57 +00:00
case INITIAL_ACTION_CONTINUE : {
2021-06-24 13:58:36 +00:00
if ( p_attachments [ i ] . usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT ) {
2019-06-07 16:07:57 +00:00
description . loadOp = VK_ATTACHMENT_LOAD_OP_LOAD ;
description . initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL ;
description . stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE ;
2021-06-24 13:58:36 +00:00
} else if ( p_attachments [ i ] . usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT ) {
2019-06-07 16:07:57 +00:00
description . loadOp = VK_ATTACHMENT_LOAD_OP_LOAD ;
2021-02-02 19:51:36 +00:00
description . initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL ;
2019-06-07 16:07:57 +00:00
description . stencilLoadOp = VK_ATTACHMENT_LOAD_OP_LOAD ;
} else {
description . loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE ;
description . stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE ;
description . initialLayout = VK_IMAGE_LAYOUT_UNDEFINED ; //don't care what is there
2020-10-19 20:32:19 +00:00
dependency_from_external . srcStageMask | = reading_stages ;
2019-06-07 16:07:57 +00:00
}
} break ;
default : {
ERR_FAIL_V ( VK_NULL_HANDLE ) ; //should never reach here
}
}
2021-06-24 13:58:36 +00:00
switch ( is_depth ? p_final_depth_action : p_final_action ) {
2019-10-03 20:39:08 +00:00
case FINAL_ACTION_READ : {
2021-06-24 13:58:36 +00:00
if ( p_attachments [ i ] . usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT ) {
2019-06-07 16:07:57 +00:00
description . storeOp = VK_ATTACHMENT_STORE_OP_STORE ;
description . stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE ;
2019-09-25 19:44:44 +00:00
description . finalLayout = is_sampled ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : ( is_storage ? VK_IMAGE_LAYOUT_GENERAL : VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL ) ;
2020-10-19 20:32:19 +00:00
update_external_dependency_for_store ( dependency_to_external , is_sampled , is_storage , false ) ;
2021-06-24 13:58:36 +00:00
} else if ( p_attachments [ i ] . usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT ) {
2019-06-07 16:07:57 +00:00
description . storeOp = VK_ATTACHMENT_STORE_OP_STORE ;
description . stencilStoreOp = VK_ATTACHMENT_STORE_OP_STORE ;
2019-09-25 19:44:44 +00:00
description . finalLayout = is_sampled ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : ( is_storage ? VK_IMAGE_LAYOUT_GENERAL : VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL ) ;
2020-10-19 20:32:19 +00:00
update_external_dependency_for_store ( dependency_to_external , is_sampled , is_storage , true ) ;
2019-06-07 16:07:57 +00:00
} else {
description . loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE ;
description . stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE ;
description . initialLayout = VK_IMAGE_LAYOUT_UNDEFINED ; //don't care what is there
2020-10-19 20:32:19 +00:00
// TODO: What does this mean about the next usage (and thus appropriate dependency masks
2019-06-07 16:07:57 +00:00
}
2019-10-03 20:39:08 +00:00
} break ;
case FINAL_ACTION_DISCARD : {
2021-06-24 13:58:36 +00:00
if ( p_attachments [ i ] . usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT ) {
2019-10-03 20:39:08 +00:00
description . storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE ;
description . stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE ;
description . finalLayout = is_sampled ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : ( is_storage ? VK_IMAGE_LAYOUT_GENERAL : VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL ) ;
2021-06-24 13:58:36 +00:00
} else if ( p_attachments [ i ] . usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT ) {
2019-10-03 20:39:08 +00:00
description . storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE ;
description . stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE ;
description . finalLayout = is_sampled ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : ( is_storage ? VK_IMAGE_LAYOUT_GENERAL : VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL ) ;
} else {
description . loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE ;
description . stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE ;
description . initialLayout = VK_IMAGE_LAYOUT_UNDEFINED ; //don't care what is there
2019-06-07 16:07:57 +00:00
}
2019-10-03 20:39:08 +00:00
} break ;
case FINAL_ACTION_CONTINUE : {
2021-06-24 13:58:36 +00:00
if ( p_attachments [ i ] . usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT ) {
2019-10-03 20:39:08 +00:00
description . storeOp = VK_ATTACHMENT_STORE_OP_STORE ;
description . stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE ;
description . finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL ;
2021-06-24 13:58:36 +00:00
} else if ( p_attachments [ i ] . usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT ) {
2019-10-03 20:39:08 +00:00
description . storeOp = VK_ATTACHMENT_STORE_OP_STORE ;
description . stencilStoreOp = VK_ATTACHMENT_STORE_OP_STORE ;
description . finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL ;
} else {
description . loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE ;
description . stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE ;
description . initialLayout = VK_IMAGE_LAYOUT_UNDEFINED ; //don't care what is there
}
} break ;
default : {
ERR_FAIL_V ( VK_NULL_HANDLE ) ; //should never reach here
2019-06-07 16:07:57 +00:00
}
}
2021-06-24 13:58:36 +00:00
attachment_last_pass [ i ] = - 1 ;
2019-06-07 16:07:57 +00:00
attachments . push_back ( description ) ;
2021-06-24 13:58:36 +00:00
}
LocalVector < VkSubpassDescription > subpasses ;
LocalVector < LocalVector < VkAttachmentReference > > color_reference_array ;
LocalVector < LocalVector < VkAttachmentReference > > input_reference_array ;
LocalVector < LocalVector < VkAttachmentReference > > resolve_reference_array ;
LocalVector < LocalVector < uint32_t > > preserve_reference_array ;
LocalVector < VkAttachmentReference > depth_reference_array ;
2019-06-07 16:07:57 +00:00
2021-06-24 13:58:36 +00:00
subpasses . resize ( p_passes . size ( ) ) ;
color_reference_array . resize ( p_passes . size ( ) ) ;
input_reference_array . resize ( p_passes . size ( ) ) ;
resolve_reference_array . resize ( p_passes . size ( ) ) ;
preserve_reference_array . resize ( p_passes . size ( ) ) ;
depth_reference_array . resize ( p_passes . size ( ) ) ;
2019-06-07 16:07:57 +00:00
2021-06-24 13:58:36 +00:00
LocalVector < VkSubpassDependency > subpass_dependencies ;
for ( int i = 0 ; i < p_passes . size ( ) ; i + + ) {
const FramebufferPass * pass = & p_passes [ i ] ;
LocalVector < VkAttachmentReference > & color_references = color_reference_array [ i ] ;
TextureSamples texture_samples = TEXTURE_SAMPLES_1 ;
bool is_multisample_first = true ;
for ( int j = 0 ; j < pass - > color_attachments . size ( ) ; j + + ) {
int32_t attachment = pass - > color_attachments [ j ] ;
VkAttachmentReference reference ;
if ( attachment = = FramebufferPass : : ATTACHMENT_UNUSED ) {
reference . attachment = VK_ATTACHMENT_UNUSED ;
reference . layout = VK_IMAGE_LAYOUT_UNDEFINED ;
} else {
ERR_FAIL_INDEX_V_MSG ( attachment , p_attachments . size ( ) , VK_NULL_HANDLE , " Invalid framebuffer format attachment( " + itos ( attachment ) + " ), in pass ( " + itos ( i ) + " ), color attachment ( " + itos ( j ) + " ). " ) ;
ERR_FAIL_COND_V_MSG ( ! ( p_attachments [ attachment ] . usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT ) , VK_NULL_HANDLE , " Invalid framebuffer format attachment( " + itos ( attachment ) + " ), in pass ( " + itos ( i ) + " ), it's marked as depth, but it's not usable as color attachment. " ) ;
ERR_FAIL_COND_V_MSG ( attachment_last_pass [ attachment ] = = i , VK_NULL_HANDLE , " Invalid framebuffer format attachment( " + itos ( attachment ) + " ), in pass ( " + itos ( i ) + " ), it already was used for something else before in this pass. " ) ;
if ( is_multisample_first ) {
texture_samples = p_attachments [ attachment ] . samples ;
is_multisample_first = false ;
} else {
ERR_FAIL_COND_V_MSG ( texture_samples ! = p_attachments [ attachment ] . samples , VK_NULL_HANDLE , " Invalid framebuffer format attachment( " + itos ( attachment ) + " ), in pass ( " + itos ( i ) + " ), if an attachment is marked as multisample, all of them should be multisample and use the same number of samples. " ) ;
}
reference . attachment = attachment ;
reference . layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL ;
attachment_last_pass [ attachment ] = i ;
}
2019-06-07 16:07:57 +00:00
color_references . push_back ( reference ) ;
2021-06-24 13:58:36 +00:00
}
LocalVector < VkAttachmentReference > & input_references = input_reference_array [ i ] ;
for ( int j = 0 ; j < pass - > input_attachments . size ( ) ; j + + ) {
int32_t attachment = pass - > input_attachments [ j ] ;
VkAttachmentReference reference ;
if ( attachment = = FramebufferPass : : ATTACHMENT_UNUSED ) {
reference . attachment = VK_ATTACHMENT_UNUSED ;
reference . layout = VK_IMAGE_LAYOUT_UNDEFINED ;
} else {
ERR_FAIL_INDEX_V_MSG ( attachment , p_attachments . size ( ) , VK_NULL_HANDLE , " Invalid framebuffer format attachment( " + itos ( attachment ) + " ), in pass ( " + itos ( i ) + " ), input attachment ( " + itos ( j ) + " ). " ) ;
ERR_FAIL_COND_V_MSG ( ! ( p_attachments [ attachment ] . usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT ) , VK_NULL_HANDLE , " Invalid framebuffer format attachment( " + itos ( attachment ) + " ), in pass ( " + itos ( i ) + " ), it's marked as depth, but it's not usable as input attachment. " ) ;
ERR_FAIL_COND_V_MSG ( attachment_last_pass [ attachment ] = = i , VK_NULL_HANDLE , " Invalid framebuffer format attachment( " + itos ( attachment ) + " ), in pass ( " + itos ( i ) + " ), it already was used for something else before in this pass. " ) ;
reference . attachment = attachment ;
reference . layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL ;
attachment_last_pass [ attachment ] = i ;
}
input_references . push_back ( reference ) ;
}
LocalVector < VkAttachmentReference > & resolve_references = resolve_reference_array [ i ] ;
if ( pass - > resolve_attachments . size ( ) > 0 ) {
ERR_FAIL_COND_V_MSG ( pass - > resolve_attachments . size ( ) ! = pass - > color_attachments . size ( ) , VK_NULL_HANDLE , " The amount of resolve attachments ( " + itos ( pass - > resolve_attachments . size ( ) ) + " ) must match the number of color attachments ( " + itos ( pass - > color_attachments . size ( ) ) + " ). " ) ;
ERR_FAIL_COND_V_MSG ( texture_samples = = TEXTURE_SAMPLES_1 , VK_NULL_HANDLE , " Resolve attachments specified, but color attachments are not multisample. " ) ;
}
for ( int j = 0 ; j < pass - > resolve_attachments . size ( ) ; j + + ) {
int32_t attachment = pass - > resolve_attachments [ j ] ;
VkAttachmentReference reference ;
if ( attachment = = FramebufferPass : : ATTACHMENT_UNUSED ) {
reference . attachment = VK_ATTACHMENT_UNUSED ;
reference . layout = VK_IMAGE_LAYOUT_UNDEFINED ;
} else {
ERR_FAIL_INDEX_V_MSG ( attachment , p_attachments . size ( ) , VK_NULL_HANDLE , " Invalid framebuffer format attachment( " + itos ( attachment ) + " ), in pass ( " + itos ( i ) + " ), resolve attachment ( " + itos ( j ) + " ). " ) ;
ERR_FAIL_COND_V_MSG ( pass - > color_attachments [ j ] = = FramebufferPass : : ATTACHMENT_UNUSED , VK_NULL_HANDLE , " Invalid framebuffer format attachment( " + itos ( attachment ) + " ), in pass ( " + itos ( i ) + " ), resolve attachment ( " + itos ( j ) + " ), the respective color attachment is marked as unused. " ) ;
ERR_FAIL_COND_V_MSG ( ! ( p_attachments [ attachment ] . usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT ) , VK_NULL_HANDLE , " Invalid framebuffer format attachment( " + itos ( attachment ) + " ), in pass ( " + itos ( i ) + " ), it's marked as depth, but it's not usable as resolve attachment. " ) ;
ERR_FAIL_COND_V_MSG ( attachment_last_pass [ attachment ] = = i , VK_NULL_HANDLE , " Invalid framebuffer format attachment( " + itos ( attachment ) + " ), in pass ( " + itos ( i ) + " ), it already was used for something else before in this pass. " ) ;
bool multisample = p_attachments [ attachment ] . samples > TEXTURE_SAMPLES_1 ;
ERR_FAIL_COND_V_MSG ( multisample , VK_NULL_HANDLE , " Invalid framebuffer format attachment( " + itos ( attachment ) + " ), in pass ( " + itos ( i ) + " ), resolve attachments can't be multisample. " ) ;
reference . attachment = attachment ;
reference . layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL ;
attachment_last_pass [ attachment ] = i ;
}
2019-06-07 16:07:57 +00:00
resolve_references . push_back ( reference ) ;
2021-06-24 13:58:36 +00:00
}
LocalVector < uint32_t > & preserve_references = preserve_reference_array [ i ] ;
for ( int j = 0 ; j < pass - > preserve_attachments . size ( ) ; j + + ) {
int32_t attachment = pass - > preserve_attachments [ j ] ;
ERR_FAIL_COND_V_MSG ( attachment = = FramebufferPass : : ATTACHMENT_UNUSED , VK_NULL_HANDLE , " Invalid framebuffer format attachment( " + itos ( attachment ) + " ), in pass ( " + itos ( i ) + " ), preserve attachment ( " + itos ( j ) + " ). Preserve attachments can't be unused. " ) ;
ERR_FAIL_INDEX_V_MSG ( attachment , p_attachments . size ( ) , VK_NULL_HANDLE , " Invalid framebuffer format attachment( " + itos ( attachment ) + " ), in pass ( " + itos ( i ) + " ), preserve attachment ( " + itos ( j ) + " ). " ) ;
ERR_FAIL_COND_V_MSG ( attachment_last_pass [ attachment ] = = i , VK_NULL_HANDLE , " Invalid framebuffer format attachment( " + itos ( attachment ) + " ), in pass ( " + itos ( i ) + " ), it already was used for something else before in this pass. " ) ;
attachment_last_pass [ attachment ] = i ;
preserve_references . push_back ( attachment ) ;
}
VkAttachmentReference & depth_stencil_reference = depth_reference_array [ i ] ;
if ( pass - > depth_attachment ! = FramebufferPass : : ATTACHMENT_UNUSED ) {
int32_t attachment = pass - > depth_attachment ;
ERR_FAIL_INDEX_V_MSG ( attachment , p_attachments . size ( ) , VK_NULL_HANDLE , " Invalid framebuffer depth format attachment( " + itos ( attachment ) + " ), in pass ( " + itos ( i ) + " ), depth attachment. " ) ;
ERR_FAIL_COND_V_MSG ( ! ( p_attachments [ attachment ] . usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT ) , VK_NULL_HANDLE , " Invalid framebuffer depth format attachment( " + itos ( attachment ) + " ), in pass ( " + itos ( i ) + " ), it's marked as depth, but it's not a depth attachment. " ) ;
ERR_FAIL_COND_V_MSG ( attachment_last_pass [ attachment ] = = i , VK_NULL_HANDLE , " Invalid framebuffer depth format attachment( " + itos ( attachment ) + " ), in pass ( " + itos ( i ) + " ), it already was used for something else before in this pass. " ) ;
depth_stencil_reference . attachment = attachment ;
depth_stencil_reference . layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL ;
attachment_last_pass [ attachment ] = i ;
2021-06-28 20:43:10 +00:00
if ( is_multisample_first ) {
texture_samples = p_attachments [ attachment ] . samples ;
is_multisample_first = false ;
} else {
2021-06-24 13:58:36 +00:00
ERR_FAIL_COND_V_MSG ( texture_samples ! = p_attachments [ attachment ] . samples , VK_NULL_HANDLE , " Invalid framebuffer depth format attachment( " + itos ( attachment ) + " ), in pass ( " + itos ( i ) + " ), if an attachment is marked as multisample, all of them should be multisample and use the same number of samples including the depth. " ) ;
}
2019-06-07 16:07:57 +00:00
} else {
2021-06-24 13:58:36 +00:00
depth_stencil_reference . attachment = VK_ATTACHMENT_UNUSED ;
depth_stencil_reference . layout = VK_IMAGE_LAYOUT_UNDEFINED ;
2019-06-07 16:07:57 +00:00
}
2020-10-19 20:32:19 +00:00
2021-06-24 13:58:36 +00:00
VkSubpassDescription & subpass = subpasses [ i ] ;
subpass . flags = 0 ;
subpass . pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS ;
subpass . inputAttachmentCount = input_references . size ( ) ;
if ( input_references . size ( ) ) {
subpass . pInputAttachments = input_references . ptr ( ) ;
} else {
subpass . pInputAttachments = nullptr ;
}
subpass . colorAttachmentCount = color_references . size ( ) ;
if ( color_references . size ( ) ) {
subpass . pColorAttachments = color_references . ptr ( ) ;
} else {
subpass . pColorAttachments = nullptr ;
}
if ( depth_stencil_reference . attachment ! = VK_ATTACHMENT_UNUSED ) {
subpass . pDepthStencilAttachment = & depth_stencil_reference ;
} else {
subpass . pDepthStencilAttachment = nullptr ;
}
if ( resolve_references . size ( ) ) {
subpass . pResolveAttachments = resolve_references . ptr ( ) ;
} else {
subpass . pResolveAttachments = nullptr ;
}
subpass . preserveAttachmentCount = preserve_references . size ( ) ;
if ( preserve_references . size ( ) ) {
subpass . pPreserveAttachments = preserve_references . ptr ( ) ;
} else {
subpass . pPreserveAttachments = nullptr ;
}
if ( r_samples ) {
r_samples - > push_back ( texture_samples ) ;
}
if ( i > 0 ) {
VkSubpassDependency dependency ;
dependency . srcSubpass = i - 1 ;
dependency . dstSubpass = i ;
dependency . srcStageMask = 0 ;
dependency . srcStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT ;
dependency . dstStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT ;
dependency . srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT ;
dependency . dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT ;
dependency . dependencyFlags = VK_DEPENDENCY_BY_REGION_BIT ;
subpass_dependencies . push_back ( dependency ) ;
}
/*
2020-10-19 20:32:19 +00:00
// NOTE: Big Mallet Approach -- any layout transition causes a full barrier
if ( reference . layout ! = description . initialLayout ) {
2021-03-12 13:35:16 +00:00
// NOTE: this should be smarter based on the texture's knowledge of its previous role
2020-10-19 20:32:19 +00:00
dependency_from_external . srcStageMask | = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT ;
dependency_from_external . srcAccessMask | = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT ;
}
if ( reference . layout ! = description . finalLayout ) {
2021-03-12 13:35:16 +00:00
// NOTE: this should be smarter based on the texture's knowledge of its subsequent role
2020-10-29 21:42:30 +00:00
dependency_to_external . dstStageMask | = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT ;
dependency_to_external . dstAccessMask | = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT ;
2020-10-19 20:32:19 +00:00
}
2021-06-24 13:58:36 +00:00
*/
2019-06-07 16:07:57 +00:00
}
VkRenderPassCreateInfo render_pass_create_info ;
render_pass_create_info . sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO ;
2020-04-01 23:20:12 +00:00
render_pass_create_info . pNext = nullptr ;
2019-06-07 16:07:57 +00:00
render_pass_create_info . flags = 0 ;
render_pass_create_info . attachmentCount = attachments . size ( ) ;
render_pass_create_info . pAttachments = attachments . ptr ( ) ;
2021-06-24 13:58:36 +00:00
render_pass_create_info . subpassCount = subpasses . size ( ) ;
render_pass_create_info . pSubpasses = subpasses . ptr ( ) ;
2021-02-02 19:51:36 +00:00
// Commenting this because it seems it just avoids raster and compute to work at the same time.
// Other barriers seem to be protecting the render pass fine.
// render_pass_create_info.dependencyCount = 2;
// render_pass_create_info.pDependencies = dependencies;
2021-06-24 13:58:36 +00:00
render_pass_create_info . dependencyCount = subpass_dependencies . size ( ) ;
if ( subpass_dependencies . size ( ) ) {
render_pass_create_info . pDependencies = subpass_dependencies . ptr ( ) ;
} else {
render_pass_create_info . pDependencies = nullptr ;
}
2019-06-07 16:07:57 +00:00
2021-05-07 13:19:04 +00:00
const uint32_t view_mask = ( 1 < < p_view_count ) - 1 ;
const uint32_t correlation_mask = ( 1 < < p_view_count ) - 1 ;
VkRenderPassMultiviewCreateInfo render_pass_multiview_create_info ;
if ( p_view_count > 1 ) {
const VulkanContext : : MultiviewCapabilities capabilities = context - > get_multiview_capabilities ( ) ;
// For now this only works with multiview!
ERR_FAIL_COND_V_MSG ( ! capabilities . is_supported , VK_NULL_HANDLE , " Multiview not supported " ) ;
// Make sure we limit this to the number of views we support.
ERR_FAIL_COND_V_MSG ( p_view_count > capabilities . max_view_count , VK_NULL_HANDLE , " Hardware does not support requested number of views for Multiview render pass " ) ;
render_pass_multiview_create_info . sType = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO ;
render_pass_multiview_create_info . pNext = nullptr ;
render_pass_multiview_create_info . subpassCount = 1 ;
render_pass_multiview_create_info . pViewMasks = & view_mask ;
render_pass_multiview_create_info . dependencyCount = 0 ;
render_pass_multiview_create_info . pViewOffsets = nullptr ;
render_pass_multiview_create_info . correlationMaskCount = 1 ;
render_pass_multiview_create_info . pCorrelationMasks = & correlation_mask ;
render_pass_create_info . pNext = & render_pass_multiview_create_info ;
}
2019-06-07 16:07:57 +00:00
VkRenderPass render_pass ;
2020-04-01 23:20:12 +00:00
VkResult res = vkCreateRenderPass ( device , & render_pass_create_info , nullptr , & render_pass ) ;
2020-03-08 08:26:22 +00:00
ERR_FAIL_COND_V_MSG ( res , VK_NULL_HANDLE , " vkCreateRenderPass failed with error " + itos ( res ) + " . " ) ;
2019-06-07 16:07:57 +00:00
return render_pass ;
}
2021-05-07 13:19:04 +00:00
RenderingDevice : : FramebufferFormatID RenderingDeviceVulkan : : framebuffer_format_create ( const Vector < AttachmentFormat > & p_format , uint32_t p_view_count ) {
2021-06-24 13:58:36 +00:00
FramebufferPass pass ;
for ( int i = 0 ; i < p_format . size ( ) ; i + + ) {
if ( p_format [ i ] . usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT ) {
pass . depth_attachment = i ;
} else {
pass . color_attachments . push_back ( i ) ;
}
}
Vector < FramebufferPass > passes ;
passes . push_back ( pass ) ;
return framebuffer_format_create_multipass ( p_format , passes , p_view_count ) ;
}
RenderingDevice : : FramebufferFormatID RenderingDeviceVulkan : : framebuffer_format_create_multipass ( const Vector < AttachmentFormat > & p_attachments , Vector < FramebufferPass > & p_passes , uint32_t p_view_count ) {
2019-06-07 16:07:57 +00:00
_THREAD_SAFE_METHOD_
FramebufferFormatKey key ;
2021-06-24 13:58:36 +00:00
key . attachments = p_attachments ;
key . passes = p_passes ;
2021-05-07 13:19:04 +00:00
key . view_count = p_view_count ;
2019-06-07 16:07:57 +00:00
2019-06-10 17:12:24 +00:00
const Map < FramebufferFormatKey , FramebufferFormatID > : : Element * E = framebuffer_format_cache . find ( key ) ;
2019-06-07 16:07:57 +00:00
if ( E ) {
//exists, return
return E - > get ( ) ;
}
2021-06-24 13:58:36 +00:00
Vector < TextureSamples > samples ;
VkRenderPass render_pass = _render_pass_create ( p_attachments , p_passes , INITIAL_ACTION_CLEAR , FINAL_ACTION_READ , INITIAL_ACTION_CLEAR , FINAL_ACTION_READ , p_view_count , & samples ) ; //actions don't matter for this use case
2019-06-07 16:07:57 +00:00
2019-06-07 18:20:01 +00:00
if ( render_pass = = VK_NULL_HANDLE ) { //was likely invalid
return INVALID_ID ;
}
2019-06-10 17:12:24 +00:00
FramebufferFormatID id = FramebufferFormatID ( framebuffer_format_cache . size ( ) ) | ( FramebufferFormatID ( ID_TYPE_FRAMEBUFFER_FORMAT ) < < FramebufferFormatID ( ID_BASE_SHIFT ) ) ;
2019-06-07 16:07:57 +00:00
E = framebuffer_format_cache . insert ( key , id ) ;
FramebufferFormat fb_format ;
fb_format . E = E ;
fb_format . render_pass = render_pass ;
2021-06-24 13:58:36 +00:00
fb_format . pass_samples = samples ;
2021-05-07 13:19:04 +00:00
fb_format . view_count = p_view_count ;
2019-06-07 16:07:57 +00:00
framebuffer_formats [ id ] = fb_format ;
return id ;
}
2021-01-17 16:25:38 +00:00
RenderingDevice : : FramebufferFormatID RenderingDeviceVulkan : : framebuffer_format_create_empty ( TextureSamples p_samples ) {
2020-06-25 13:33:28 +00:00
FramebufferFormatKey key ;
2021-06-24 13:58:36 +00:00
key . passes . push_back ( FramebufferPass ( ) ) ;
2020-06-25 13:33:28 +00:00
const Map < FramebufferFormatKey , FramebufferFormatID > : : Element * E = framebuffer_format_cache . find ( key ) ;
if ( E ) {
//exists, return
return E - > get ( ) ;
}
VkSubpassDescription subpass ;
subpass . flags = 0 ;
subpass . pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS ;
subpass . inputAttachmentCount = 0 ; //unsupported for now
subpass . pInputAttachments = nullptr ;
subpass . colorAttachmentCount = 0 ;
subpass . pColorAttachments = nullptr ;
subpass . pDepthStencilAttachment = nullptr ;
subpass . pResolveAttachments = nullptr ;
subpass . preserveAttachmentCount = 0 ;
subpass . pPreserveAttachments = nullptr ;
VkRenderPassCreateInfo render_pass_create_info ;
render_pass_create_info . sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO ;
render_pass_create_info . pNext = nullptr ;
render_pass_create_info . flags = 0 ;
render_pass_create_info . attachmentCount = 0 ;
render_pass_create_info . pAttachments = nullptr ;
render_pass_create_info . subpassCount = 1 ;
render_pass_create_info . pSubpasses = & subpass ;
render_pass_create_info . dependencyCount = 0 ;
render_pass_create_info . pDependencies = nullptr ;
VkRenderPass render_pass ;
VkResult res = vkCreateRenderPass ( device , & render_pass_create_info , nullptr , & render_pass ) ;
ERR_FAIL_COND_V_MSG ( res , VK_NULL_HANDLE , " vkCreateRenderPass for empty fb failed with error " + itos ( res ) + " . " ) ;
if ( render_pass = = VK_NULL_HANDLE ) { //was likely invalid
return INVALID_ID ;
}
FramebufferFormatID id = FramebufferFormatID ( framebuffer_format_cache . size ( ) ) | ( FramebufferFormatID ( ID_TYPE_FRAMEBUFFER_FORMAT ) < < FramebufferFormatID ( ID_BASE_SHIFT ) ) ;
E = framebuffer_format_cache . insert ( key , id ) ;
FramebufferFormat fb_format ;
fb_format . E = E ;
fb_format . render_pass = render_pass ;
2021-06-24 13:58:36 +00:00
fb_format . pass_samples . push_back ( p_samples ) ;
2020-06-25 13:33:28 +00:00
framebuffer_formats [ id ] = fb_format ;
return id ;
}
2021-06-24 13:58:36 +00:00
RenderingDevice : : TextureSamples RenderingDeviceVulkan : : framebuffer_format_get_texture_samples ( FramebufferFormatID p_format , uint32_t p_pass ) {
2019-07-10 20:44:55 +00:00
Map < FramebufferFormatID , FramebufferFormat > : : Element * E = framebuffer_formats . find ( p_format ) ;
ERR_FAIL_COND_V ( ! E , TEXTURE_SAMPLES_1 ) ;
2021-06-24 13:58:36 +00:00
ERR_FAIL_COND_V ( p_pass > = uint32_t ( E - > get ( ) . pass_samples . size ( ) ) , TEXTURE_SAMPLES_1 ) ;
2019-07-10 20:44:55 +00:00
2021-06-24 13:58:36 +00:00
return E - > get ( ) . pass_samples [ p_pass ] ;
2019-07-10 20:44:55 +00:00
}
2019-06-07 16:07:57 +00:00
/***********************/
/**** RENDER TARGET ****/
/***********************/
2021-01-17 16:25:38 +00:00
RID RenderingDeviceVulkan : : framebuffer_create_empty ( const Size2i & p_size , TextureSamples p_samples , FramebufferFormatID p_format_check ) {
2020-06-25 13:33:28 +00:00
_THREAD_SAFE_METHOD_
Framebuffer framebuffer ;
2021-01-17 16:25:38 +00:00
framebuffer . format_id = framebuffer_format_create_empty ( p_samples ) ;
2020-06-25 13:33:28 +00:00
ERR_FAIL_COND_V ( p_format_check ! = INVALID_FORMAT_ID & & framebuffer . format_id ! = p_format_check , RID ( ) ) ;
framebuffer . size = p_size ;
2021-05-07 13:19:04 +00:00
framebuffer . view_count = 1 ;
2020-06-25 13:33:28 +00:00
return framebuffer_owner . make_rid ( framebuffer ) ;
}
2021-05-07 13:19:04 +00:00
RID RenderingDeviceVulkan : : framebuffer_create ( const Vector < RID > & p_texture_attachments , FramebufferFormatID p_format_check , uint32_t p_view_count ) {
2019-06-07 16:07:57 +00:00
_THREAD_SAFE_METHOD_
2021-06-24 13:58:36 +00:00
FramebufferPass pass ;
for ( int i = 0 ; i < p_texture_attachments . size ( ) ; i + + ) {
Texture * texture = texture_owner . getornull ( p_texture_attachments [ i ] ) ;
ERR_FAIL_COND_V_MSG ( ! texture , RID ( ) , " Texture index supplied for framebuffer ( " + itos ( i ) + " ) is not a valid texture. " ) ;
ERR_FAIL_COND_V_MSG ( texture - > layers ! = p_view_count , RID ( ) , " Layers of our texture doesn't match view count for this framebuffer " ) ;
if ( texture - > usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT ) {
pass . depth_attachment = i ;
} else {
pass . color_attachments . push_back ( i ) ;
}
}
Vector < FramebufferPass > passes ;
passes . push_back ( pass ) ;
return framebuffer_create_multipass ( p_texture_attachments , passes , p_format_check , p_view_count ) ;
}
RID RenderingDeviceVulkan : : framebuffer_create_multipass ( const Vector < RID > & p_texture_attachments , Vector < FramebufferPass > & p_passes , FramebufferFormatID p_format_check , uint32_t p_view_count ) {
_THREAD_SAFE_METHOD_
2019-06-07 16:07:57 +00:00
Vector < AttachmentFormat > attachments ;
Size2i size ;
for ( int i = 0 ; i < p_texture_attachments . size ( ) ; i + + ) {
Texture * texture = texture_owner . getornull ( p_texture_attachments [ i ] ) ;
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V_MSG ( ! texture , RID ( ) , " Texture index supplied for framebuffer ( " + itos ( i ) + " ) is not a valid texture. " ) ;
2019-06-07 16:07:57 +00:00
2021-05-07 13:19:04 +00:00
ERR_FAIL_COND_V_MSG ( texture - > layers ! = p_view_count , RID ( ) , " Layers of our texture doesn't match view count for this framebuffer " ) ;
2019-06-07 16:07:57 +00:00
if ( i = = 0 ) {
size . width = texture - > width ;
size . height = texture - > height ;
} else {
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V_MSG ( ( uint32_t ) size . width ! = texture - > width | | ( uint32_t ) size . height ! = texture - > height , RID ( ) ,
2019-06-07 16:07:57 +00:00
" All textures in a framebuffer should be the same size. " ) ;
}
AttachmentFormat af ;
af . format = texture - > format ;
af . samples = texture - > samples ;
af . usage_flags = texture - > usage_flags ;
attachments . push_back ( af ) ;
}
2021-06-24 13:58:36 +00:00
FramebufferFormatID format_id = framebuffer_format_create_multipass ( attachments , p_passes , p_view_count ) ;
2019-06-07 16:07:57 +00:00
if ( format_id = = INVALID_ID ) {
2019-06-10 17:12:24 +00:00
return RID ( ) ;
2019-06-07 16:07:57 +00:00
}
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V_MSG ( p_format_check ! = INVALID_ID & & format_id ! = p_format_check , RID ( ) ,
2019-06-07 16:07:57 +00:00
" The format used to check this framebuffer differs from the intended framebuffer format. " ) ;
Framebuffer framebuffer ;
framebuffer . format_id = format_id ;
framebuffer . texture_ids = p_texture_attachments ;
framebuffer . size = size ;
2021-05-07 13:19:04 +00:00
framebuffer . view_count = p_view_count ;
2019-06-07 16:07:57 +00:00
2019-06-10 17:12:24 +00:00
RID id = framebuffer_owner . make_rid ( framebuffer ) ;
2019-06-07 16:07:57 +00:00
for ( int i = 0 ; i < p_texture_attachments . size ( ) ; i + + ) {
_add_dependency ( id , p_texture_attachments [ i ] ) ;
}
return id ;
}
2019-06-10 17:12:24 +00:00
RenderingDevice : : FramebufferFormatID RenderingDeviceVulkan : : framebuffer_get_format ( RID p_framebuffer ) {
2019-06-07 16:07:57 +00:00
_THREAD_SAFE_METHOD_
Framebuffer * framebuffer = framebuffer_owner . getornull ( p_framebuffer ) ;
ERR_FAIL_COND_V ( ! framebuffer , INVALID_ID ) ;
return framebuffer - > format_id ;
}
/*****************/
/**** SAMPLER ****/
/*****************/
2019-06-10 17:12:24 +00:00
RID RenderingDeviceVulkan : : sampler_create ( const SamplerState & p_state ) {
2019-06-07 16:07:57 +00:00
_THREAD_SAFE_METHOD_
VkSamplerCreateInfo sampler_create_info ;
sampler_create_info . sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO ;
2020-04-01 23:20:12 +00:00
sampler_create_info . pNext = nullptr ;
2019-06-07 16:07:57 +00:00
sampler_create_info . flags = 0 ;
sampler_create_info . magFilter = p_state . mag_filter = = SAMPLER_FILTER_LINEAR ? VK_FILTER_LINEAR : VK_FILTER_NEAREST ;
sampler_create_info . minFilter = p_state . min_filter = = SAMPLER_FILTER_LINEAR ? VK_FILTER_LINEAR : VK_FILTER_NEAREST ;
sampler_create_info . mipmapMode = p_state . mip_filter = = SAMPLER_FILTER_LINEAR ? VK_SAMPLER_MIPMAP_MODE_LINEAR : VK_SAMPLER_MIPMAP_MODE_NEAREST ;
2019-06-10 17:12:24 +00:00
ERR_FAIL_INDEX_V ( p_state . repeat_u , SAMPLER_REPEAT_MODE_MAX , RID ( ) ) ;
2019-06-07 16:07:57 +00:00
sampler_create_info . addressModeU = address_modes [ p_state . repeat_u ] ;
2019-06-10 17:12:24 +00:00
ERR_FAIL_INDEX_V ( p_state . repeat_v , SAMPLER_REPEAT_MODE_MAX , RID ( ) ) ;
2019-06-07 16:07:57 +00:00
sampler_create_info . addressModeV = address_modes [ p_state . repeat_v ] ;
2019-06-10 17:12:24 +00:00
ERR_FAIL_INDEX_V ( p_state . repeat_w , SAMPLER_REPEAT_MODE_MAX , RID ( ) ) ;
2019-06-07 16:07:57 +00:00
sampler_create_info . addressModeW = address_modes [ p_state . repeat_w ] ;
sampler_create_info . mipLodBias = p_state . lod_bias ;
sampler_create_info . anisotropyEnable = p_state . use_anisotropy ;
sampler_create_info . maxAnisotropy = p_state . anisotropy_max ;
sampler_create_info . compareEnable = p_state . enable_compare ;
2019-06-10 17:12:24 +00:00
ERR_FAIL_INDEX_V ( p_state . compare_op , COMPARE_OP_MAX , RID ( ) ) ;
2019-06-07 16:07:57 +00:00
sampler_create_info . compareOp = compare_operators [ p_state . compare_op ] ;
sampler_create_info . minLod = p_state . min_lod ;
sampler_create_info . maxLod = p_state . max_lod ;
2019-06-10 17:12:24 +00:00
ERR_FAIL_INDEX_V ( p_state . border_color , SAMPLER_BORDER_COLOR_MAX , RID ( ) ) ;
2019-06-07 16:07:57 +00:00
sampler_create_info . borderColor = sampler_border_colors [ p_state . border_color ] ;
sampler_create_info . unnormalizedCoordinates = p_state . unnormalized_uvw ;
VkSampler sampler ;
2020-04-01 23:20:12 +00:00
VkResult res = vkCreateSampler ( device , & sampler_create_info , nullptr , & sampler ) ;
2020-03-08 08:26:22 +00:00
ERR_FAIL_COND_V_MSG ( res , RID ( ) , " vkCreateSampler failed with error " + itos ( res ) + " . " ) ;
2019-06-07 16:07:57 +00:00
2019-06-10 17:12:24 +00:00
return sampler_owner . make_rid ( sampler ) ;
2019-06-07 16:07:57 +00:00
}
/**********************/
/**** VERTEX ARRAY ****/
/**********************/
2020-12-16 14:07:08 +00:00
RID RenderingDeviceVulkan : : vertex_buffer_create ( uint32_t p_size_bytes , const Vector < uint8_t > & p_data , bool p_use_as_storage ) {
2019-06-07 16:07:57 +00:00
_THREAD_SAFE_METHOD_
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V ( p_data . size ( ) & & ( uint32_t ) p_data . size ( ) ! = p_size_bytes , RID ( ) ) ;
2020-10-19 20:01:53 +00:00
ERR_FAIL_COND_V_MSG ( draw_list ! = nullptr & & p_data . size ( ) , RID ( ) ,
" Creating buffers with data is forbidden during creation of a draw list " ) ;
ERR_FAIL_COND_V_MSG ( compute_list ! = nullptr & & p_data . size ( ) , RID ( ) ,
" Creating buffers with data is forbidden during creation of a draw list " ) ;
2019-06-07 16:07:57 +00:00
2020-12-16 14:07:08 +00:00
uint32_t usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT ;
if ( p_use_as_storage ) {
usage | = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT ;
}
2019-06-07 16:07:57 +00:00
Buffer buffer ;
2020-12-16 14:07:08 +00:00
_buffer_allocate ( & buffer , p_size_bytes , usage , VMA_MEMORY_USAGE_GPU_ONLY ) ;
2019-06-07 16:07:57 +00:00
if ( p_data . size ( ) ) {
uint64_t data_size = p_data . size ( ) ;
2020-02-17 21:06:54 +00:00
const uint8_t * r = p_data . ptr ( ) ;
_buffer_update ( & buffer , 0 , r , data_size ) ;
2019-09-07 01:51:27 +00:00
_buffer_memory_barrier ( buffer . buffer , 0 , data_size , VK_PIPELINE_STAGE_TRANSFER_BIT , VK_PIPELINE_STAGE_VERTEX_INPUT_BIT , VK_ACCESS_TRANSFER_WRITE_BIT , VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT , false ) ;
2019-06-07 16:07:57 +00:00
}
2019-06-24 19:13:06 +00:00
2019-06-10 17:12:24 +00:00
return vertex_buffer_owner . make_rid ( buffer ) ;
2019-06-07 16:07:57 +00:00
}
// Internally reference counted, this ID is warranted to be unique for the same description, but needs to be freed as many times as it was allocated
2020-04-21 15:16:45 +00:00
RenderingDevice : : VertexFormatID RenderingDeviceVulkan : : vertex_format_create ( const Vector < VertexAttribute > & p_vertex_formats ) {
2019-06-07 16:07:57 +00:00
_THREAD_SAFE_METHOD_
VertexDescriptionKey key ;
2019-06-10 17:12:24 +00:00
key . vertex_formats = p_vertex_formats ;
2019-06-24 19:13:06 +00:00
VertexFormatID * idptr = vertex_format_cache . getptr ( key ) ;
if ( idptr ) {
return * idptr ;
2019-06-07 16:07:57 +00:00
}
2019-06-24 19:13:06 +00:00
2019-06-07 16:07:57 +00:00
//does not exist, create one and cache it
VertexDescriptionCache vdcache ;
2019-06-10 17:12:24 +00:00
vdcache . bindings = memnew_arr ( VkVertexInputBindingDescription , p_vertex_formats . size ( ) ) ;
vdcache . attributes = memnew_arr ( VkVertexInputAttributeDescription , p_vertex_formats . size ( ) ) ;
2019-06-24 19:13:06 +00:00
2019-06-07 16:07:57 +00:00
Set < int > used_locations ;
2019-06-10 17:12:24 +00:00
for ( int i = 0 ; i < p_vertex_formats . size ( ) ; i + + ) {
ERR_CONTINUE ( p_vertex_formats [ i ] . format > = DATA_FORMAT_MAX ) ;
ERR_FAIL_COND_V ( used_locations . has ( p_vertex_formats [ i ] . location ) , INVALID_ID ) ;
2019-06-07 16:07:57 +00:00
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V_MSG ( get_format_vertex_size ( p_vertex_formats [ i ] . format ) = = 0 , INVALID_ID ,
2020-12-02 01:40:47 +00:00
" Data format for attachment ( " + itos ( i ) + " ), ' " + named_formats [ p_vertex_formats [ i ] . format ] + " ', is not valid for a vertex array. " ) ;
2019-06-07 16:07:57 +00:00
vdcache . bindings [ i ] . binding = i ;
2019-06-10 17:12:24 +00:00
vdcache . bindings [ i ] . stride = p_vertex_formats [ i ] . stride ;
vdcache . bindings [ i ] . inputRate = p_vertex_formats [ i ] . frequency = = VERTEX_FREQUENCY_INSTANCE ? VK_VERTEX_INPUT_RATE_INSTANCE : VK_VERTEX_INPUT_RATE_VERTEX ;
2019-06-07 16:07:57 +00:00
vdcache . attributes [ i ] . binding = i ;
2019-06-10 17:12:24 +00:00
vdcache . attributes [ i ] . location = p_vertex_formats [ i ] . location ;
vdcache . attributes [ i ] . format = vulkan_formats [ p_vertex_formats [ i ] . format ] ;
vdcache . attributes [ i ] . offset = p_vertex_formats [ i ] . offset ;
used_locations . insert ( p_vertex_formats [ i ] . location ) ;
2019-06-07 16:07:57 +00:00
}
vdcache . create_info . sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO ;
2020-04-01 23:20:12 +00:00
vdcache . create_info . pNext = nullptr ;
2019-06-07 16:07:57 +00:00
vdcache . create_info . flags = 0 ;
2019-06-10 17:12:24 +00:00
vdcache . create_info . vertexAttributeDescriptionCount = p_vertex_formats . size ( ) ;
2019-06-07 16:07:57 +00:00
vdcache . create_info . pVertexAttributeDescriptions = vdcache . attributes ;
2019-06-10 17:12:24 +00:00
vdcache . create_info . vertexBindingDescriptionCount = p_vertex_formats . size ( ) ;
2019-06-07 16:07:57 +00:00
vdcache . create_info . pVertexBindingDescriptions = vdcache . bindings ;
2019-06-24 19:13:06 +00:00
vdcache . vertex_formats = p_vertex_formats ;
2019-06-07 16:07:57 +00:00
2019-06-10 17:12:24 +00:00
VertexFormatID id = VertexFormatID ( vertex_format_cache . size ( ) ) | ( VertexFormatID ( ID_TYPE_VERTEX_FORMAT ) < < ID_BASE_SHIFT ) ;
2019-06-24 19:13:06 +00:00
vertex_format_cache [ key ] = id ;
2019-06-10 17:12:24 +00:00
vertex_formats [ id ] = vdcache ;
2019-06-07 16:07:57 +00:00
return id ;
}
2019-06-10 17:12:24 +00:00
RID RenderingDeviceVulkan : : vertex_array_create ( uint32_t p_vertex_count , VertexFormatID p_vertex_format , const Vector < RID > & p_src_buffers ) {
2019-06-07 16:07:57 +00:00
_THREAD_SAFE_METHOD_
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V ( ! vertex_formats . has ( p_vertex_format ) , RID ( ) ) ;
const VertexDescriptionCache & vd = vertex_formats [ p_vertex_format ] ;
2019-06-07 16:07:57 +00:00
2019-06-24 19:13:06 +00:00
ERR_FAIL_COND_V ( vd . vertex_formats . size ( ) ! = p_src_buffers . size ( ) , RID ( ) ) ;
2019-06-07 16:07:57 +00:00
for ( int i = 0 ; i < p_src_buffers . size ( ) ; i + + ) {
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V ( ! vertex_buffer_owner . owns ( p_src_buffers [ i ] ) , RID ( ) ) ;
2019-06-07 16:07:57 +00:00
}
VertexArray vertex_array ;
vertex_array . vertex_count = p_vertex_count ;
2019-06-10 17:12:24 +00:00
vertex_array . description = p_vertex_format ;
2019-06-07 16:07:57 +00:00
vertex_array . max_instances_allowed = 0xFFFFFFFF ; //by default as many as you want
for ( int i = 0 ; i < p_src_buffers . size ( ) ; i + + ) {
Buffer * buffer = vertex_buffer_owner . getornull ( p_src_buffers [ i ] ) ;
//validate with buffer
{
2020-04-21 15:16:45 +00:00
const VertexAttribute & atf = vd . vertex_formats [ i ] ;
2019-06-24 19:13:06 +00:00
2019-06-07 16:07:57 +00:00
uint32_t element_size = get_format_vertex_size ( atf . format ) ;
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V ( element_size = = 0 , RID ( ) ) ; //should never happens since this was prevalidated
2019-06-07 16:07:57 +00:00
if ( atf . frequency = = VERTEX_FREQUENCY_VERTEX ) {
//validate size for regular drawing
uint64_t total_size = uint64_t ( atf . stride ) * ( p_vertex_count - 1 ) + atf . offset + element_size ;
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V_MSG ( total_size > buffer - > size , RID ( ) ,
2019-06-07 16:07:57 +00:00
" Attachment ( " + itos ( i ) + " ) will read past the end of the buffer. " ) ;
} else {
//validate size for instances drawing
uint64_t available = buffer - > size - atf . offset ;
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V_MSG ( available < element_size , RID ( ) ,
2019-06-07 16:07:57 +00:00
" Attachment ( " + itos ( i ) + " ) uses instancing, but it's just too small. " ) ;
uint32_t instances_allowed = available / atf . stride ;
vertex_array . max_instances_allowed = MIN ( instances_allowed , vertex_array . max_instances_allowed ) ;
}
}
vertex_array . buffers . push_back ( buffer - > buffer ) ;
vertex_array . offsets . push_back ( 0 ) ; //offset unused, but passing anyway
}
2019-06-10 17:12:24 +00:00
RID id = vertex_array_owner . make_rid ( vertex_array ) ;
2019-06-07 16:07:57 +00:00
for ( int i = 0 ; i < p_src_buffers . size ( ) ; i + + ) {
_add_dependency ( id , p_src_buffers [ i ] ) ;
}
return id ;
}
2020-02-17 21:06:54 +00:00
RID RenderingDeviceVulkan : : index_buffer_create ( uint32_t p_index_count , IndexBufferFormat p_format , const Vector < uint8_t > & p_data , bool p_use_restart_indices ) {
2019-06-07 16:07:57 +00:00
_THREAD_SAFE_METHOD_
2020-10-19 20:01:53 +00:00
ERR_FAIL_COND_V_MSG ( draw_list ! = nullptr & & p_data . size ( ) , RID ( ) ,
" Creating buffers with data is forbidden during creation of a draw list " ) ;
ERR_FAIL_COND_V_MSG ( compute_list ! = nullptr & & p_data . size ( ) , RID ( ) ,
" Creating buffers with data is forbidden during creation of a draw list " ) ;
2019-06-07 16:07:57 +00:00
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V ( p_index_count = = 0 , RID ( ) ) ;
2019-06-07 16:07:57 +00:00
IndexBuffer index_buffer ;
index_buffer . index_type = ( p_format = = INDEX_BUFFER_FORMAT_UINT16 ) ? VK_INDEX_TYPE_UINT16 : VK_INDEX_TYPE_UINT32 ;
index_buffer . supports_restart_indices = p_use_restart_indices ;
index_buffer . index_count = p_index_count ;
uint32_t size_bytes = p_index_count * ( ( p_format = = INDEX_BUFFER_FORMAT_UINT16 ) ? 2 : 4 ) ;
# ifdef DEBUG_ENABLED
if ( p_data . size ( ) ) {
index_buffer . max_index = 0 ;
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V_MSG ( ( uint32_t ) p_data . size ( ) ! = size_bytes , RID ( ) ,
2019-06-07 16:07:57 +00:00
" Default index buffer initializer array size ( " + itos ( p_data . size ( ) ) + " ) does not match format required size ( " + itos ( size_bytes ) + " ). " ) ;
2020-02-17 21:06:54 +00:00
const uint8_t * r = p_data . ptr ( ) ;
2019-06-07 16:07:57 +00:00
if ( p_format = = INDEX_BUFFER_FORMAT_UINT16 ) {
2020-02-17 21:06:54 +00:00
const uint16_t * index16 = ( const uint16_t * ) r ;
2019-06-07 16:07:57 +00:00
for ( uint32_t i = 0 ; i < p_index_count ; i + + ) {
if ( p_use_restart_indices & & index16 [ i ] = = 0xFFFF ) {
2020-07-16 16:54:15 +00:00
continue ; //restart index, ignore
2019-06-07 16:07:57 +00:00
}
index_buffer . max_index = MAX ( index16 [ i ] , index_buffer . max_index ) ;
}
} else {
2020-02-17 21:06:54 +00:00
const uint32_t * index32 = ( const uint32_t * ) r ;
2019-06-07 16:07:57 +00:00
for ( uint32_t i = 0 ; i < p_index_count ; i + + ) {
if ( p_use_restart_indices & & index32 [ i ] = = 0xFFFFFFFF ) {
2020-07-16 16:54:15 +00:00
continue ; //restart index, ignore
2019-06-07 16:07:57 +00:00
}
index_buffer . max_index = MAX ( index32 [ i ] , index_buffer . max_index ) ;
}
}
} else {
index_buffer . max_index = 0xFFFFFFFF ;
}
# else
index_buffer . max_index = 0xFFFFFFFF ;
# endif
2019-08-18 22:40:52 +00:00
_buffer_allocate ( & index_buffer , size_bytes , VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT , VMA_MEMORY_USAGE_GPU_ONLY ) ;
2019-06-07 16:07:57 +00:00
if ( p_data . size ( ) ) {
uint64_t data_size = p_data . size ( ) ;
2020-02-17 21:06:54 +00:00
const uint8_t * r = p_data . ptr ( ) ;
_buffer_update ( & index_buffer , 0 , r , data_size ) ;
2019-09-07 01:51:27 +00:00
_buffer_memory_barrier ( index_buffer . buffer , 0 , data_size , VK_PIPELINE_STAGE_TRANSFER_BIT , VK_PIPELINE_STAGE_VERTEX_INPUT_BIT , VK_ACCESS_TRANSFER_WRITE_BIT , VK_ACCESS_INDEX_READ_BIT , false ) ;
2019-06-07 16:07:57 +00:00
}
2019-06-10 17:12:24 +00:00
return index_buffer_owner . make_rid ( index_buffer ) ;
2019-06-07 16:07:57 +00:00
}
2019-06-10 17:12:24 +00:00
RID RenderingDeviceVulkan : : index_array_create ( RID p_index_buffer , uint32_t p_index_offset , uint32_t p_index_count ) {
2019-06-07 16:07:57 +00:00
_THREAD_SAFE_METHOD_
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V ( ! index_buffer_owner . owns ( p_index_buffer ) , RID ( ) ) ;
2019-06-07 16:07:57 +00:00
IndexBuffer * index_buffer = index_buffer_owner . getornull ( p_index_buffer ) ;
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V ( p_index_count = = 0 , RID ( ) ) ;
ERR_FAIL_COND_V ( p_index_offset + p_index_count > index_buffer - > index_count , RID ( ) ) ;
2019-06-07 16:07:57 +00:00
IndexArray index_array ;
index_array . max_index = index_buffer - > max_index ;
index_array . buffer = index_buffer - > buffer ;
index_array . offset = p_index_offset ;
index_array . indices = p_index_count ;
index_array . index_type = index_buffer - > index_type ;
index_array . supports_restart_indices = index_buffer - > supports_restart_indices ;
2019-06-10 17:12:24 +00:00
RID id = index_array_owner . make_rid ( index_array ) ;
2019-06-07 16:07:57 +00:00
_add_dependency ( id , p_index_buffer ) ;
return id ;
}
/****************/
/**** SHADER ****/
/****************/
static const char * shader_stage_names [ RenderingDevice : : SHADER_STAGE_MAX ] = {
" Vertex " ,
" Fragment " ,
" TesselationControl " ,
" TesselationEvaluation " ,
" Compute "
} ;
2019-06-16 02:45:24 +00:00
static const char * shader_uniform_names [ RenderingDevice : : UNIFORM_TYPE_MAX ] = {
" Sampler " , " CombinedSampler " , " Texture " , " Image " , " TextureBuffer " , " SamplerTextureBuffer " , " ImageBuffer " , " UniformBuffer " , " StorageBuffer " , " InputAttachment "
} ;
2019-06-07 16:07:57 +00:00
static VkShaderStageFlagBits shader_stage_masks [ RenderingDevice : : SHADER_STAGE_MAX ] = {
VK_SHADER_STAGE_VERTEX_BIT ,
VK_SHADER_STAGE_FRAGMENT_BIT ,
VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT ,
VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT ,
VK_SHADER_STAGE_COMPUTE_BIT ,
} ;
2019-07-27 13:23:24 +00:00
String RenderingDeviceVulkan : : _shader_uniform_debug ( RID p_shader , int p_set ) {
String ret ;
const Shader * shader = shader_owner . getornull ( p_shader ) ;
ERR_FAIL_COND_V ( ! shader , String ( ) ) ;
for ( int i = 0 ; i < shader - > sets . size ( ) ; i + + ) {
if ( p_set > = 0 & & i ! = p_set ) {
continue ;
}
for ( int j = 0 ; j < shader - > sets [ i ] . uniform_info . size ( ) ; j + + ) {
const UniformInfo & ui = shader - > sets [ i ] . uniform_info [ j ] ;
if ( ret ! = String ( ) ) {
ret + = " \n " ;
}
ret + = " Set: " + itos ( i ) + " Binding: " + itos ( ui . binding ) + " Type: " + shader_uniform_names [ ui . type ] + " Length: " + itos ( ui . length ) ;
}
}
return ret ;
}
2019-07-28 22:58:32 +00:00
#if 0
2019-07-27 13:23:24 +00:00
bool RenderingDeviceVulkan : : _uniform_add_binding ( Vector < Vector < VkDescriptorSetLayoutBinding > > & bindings , Vector < Vector < UniformInfo > > & uniform_infos , const glslang : : TObjectReflection & reflection , RenderingDevice : : ShaderStage p_stage , Shader : : PushConstant & push_constant , String * r_error ) {
2019-06-07 16:07:57 +00:00
VkDescriptorSetLayoutBinding layout_binding ;
2019-07-27 13:23:24 +00:00
UniformInfo info ;
2019-06-07 16:07:57 +00:00
switch ( reflection . getType ( ) - > getBasicType ( ) ) {
case glslang : : EbtSampler : {
2019-06-16 02:45:24 +00:00
//print_line("DEBUG: IsSampler");
2019-06-07 16:07:57 +00:00
if ( reflection . getType ( ) - > getSampler ( ) . dim = = glslang : : EsdBuffer ) {
//texture buffers
if ( reflection . getType ( ) - > getSampler ( ) . isCombined ( ) ) {
layout_binding . descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER ;
info . type = UNIFORM_TYPE_SAMPLER_WITH_TEXTURE_BUFFER ;
2019-06-16 02:45:24 +00:00
//print_line("DEBUG: SAMPLER: texel combined");
2019-06-07 16:07:57 +00:00
} else if ( reflection . getType ( ) - > getSampler ( ) . isTexture ( ) ) {
layout_binding . descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER ;
info . type = UNIFORM_TYPE_TEXTURE_BUFFER ;
2019-06-16 02:45:24 +00:00
//print_line("DEBUG: SAMPLER: texel alone");
2019-06-07 16:07:57 +00:00
} else if ( reflection . getType ( ) - > getSampler ( ) . isImage ( ) ) {
layout_binding . descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER ;
info . type = UNIFORM_TYPE_IMAGE_BUFFER ;
2019-06-16 02:45:24 +00:00
//print_line("DEBUG: SAMPLER: texel buffer");
2019-06-07 16:07:57 +00:00
} else {
if ( r_error ) {
2020-07-27 10:43:20 +00:00
* r_error = " On shader stage ' " + String ( shader_stage_names [ p_stage ] ) + " ', uniform ' " + reflection . name + " ' is of unsupported buffer type. " ;
2019-06-07 16:07:57 +00:00
}
return false ;
}
} else if ( reflection . getType ( ) - > getSampler ( ) . isCombined ( ) ) {
layout_binding . descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ;
info . type = UNIFORM_TYPE_SAMPLER_WITH_TEXTURE ;
2019-06-16 02:45:24 +00:00
//print_line("DEBUG: SAMPLER: combined");
2019-06-07 16:07:57 +00:00
} else if ( reflection . getType ( ) - > getSampler ( ) . isPureSampler ( ) ) {
layout_binding . descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER ;
info . type = UNIFORM_TYPE_SAMPLER ;
2019-06-16 02:45:24 +00:00
//print_line("DEBUG: SAMPLER: sampler");
2019-06-07 16:07:57 +00:00
} else if ( reflection . getType ( ) - > getSampler ( ) . isTexture ( ) ) {
layout_binding . descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE ;
info . type = UNIFORM_TYPE_TEXTURE ;
2019-06-16 02:45:24 +00:00
//print_line("DEBUG: SAMPLER: image");
2019-06-07 16:07:57 +00:00
} else if ( reflection . getType ( ) - > getSampler ( ) . isImage ( ) ) {
layout_binding . descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE ;
info . type = UNIFORM_TYPE_IMAGE ;
2019-06-16 02:45:24 +00:00
//print_line("DEBUG: SAMPLER: storage image");
2019-06-07 16:07:57 +00:00
} else {
2019-06-16 02:45:24 +00:00
//print_line("DEBUG: sampler unknown");
2019-06-07 16:07:57 +00:00
if ( r_error ) {
2020-07-27 10:43:20 +00:00
* r_error = " On shader stage ' " + String ( shader_stage_names [ p_stage ] ) + " ', uniform ' " + reflection . name + " ' is of unsupported sampler type. " ;
2019-06-07 16:07:57 +00:00
}
return false ;
}
if ( reflection . getType ( ) - > isArray ( ) ) {
layout_binding . descriptorCount = reflection . getType ( ) - > getArraySizes ( ) - > getCumulativeSize ( ) ;
2019-06-16 02:45:24 +00:00
//print_line("DEBUG: array of size: " + itos(layout_binding.descriptorCount));
2019-06-07 16:07:57 +00:00
} else {
layout_binding . descriptorCount = 1 ;
}
info . length = layout_binding . descriptorCount ;
} break ;
/*case glslang::EbtStruct: {
print_line ( " DEBUG: Struct " ) ;
} break ; */
case glslang : : EbtBlock : {
2019-06-16 02:45:24 +00:00
//print_line("DEBUG: Block");
2019-06-07 16:07:57 +00:00
if ( reflection . getType ( ) - > getQualifier ( ) . storage = = glslang : : EvqUniform ) {
2019-06-08 20:10:52 +00:00
if ( reflection . getType ( ) - > getQualifier ( ) . layoutPushConstant ) {
uint32_t len = reflection . size ;
if ( push_constant . push_constant_size ! = 0 & & push_constant . push_constant_size ! = len ) {
2020-07-27 10:43:20 +00:00
* r_error = " On shader stage ' " + String ( shader_stage_names [ p_stage ] ) + " ', uniform ' " + reflection . name + " ' push constants for different stages should all be the same size. " ;
2019-06-08 20:10:52 +00:00
return false ;
}
push_constant . push_constant_size = len ;
push_constant . push_constants_vk_stage | = shader_stage_masks [ p_stage ] ;
return true ;
}
2019-06-16 02:45:24 +00:00
//print_line("DEBUG: Uniform buffer");
2019-06-07 16:07:57 +00:00
layout_binding . descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ;
info . type = UNIFORM_TYPE_UNIFORM_BUFFER ;
} else if ( reflection . getType ( ) - > getQualifier ( ) . storage = = glslang : : EvqBuffer ) {
layout_binding . descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER ;
info . type = UNIFORM_TYPE_STORAGE_BUFFER ;
2019-06-16 02:45:24 +00:00
//print_line("DEBUG: Storage buffer");
2019-06-07 16:07:57 +00:00
} else {
if ( r_error ) {
2020-07-27 10:43:20 +00:00
* r_error = " On shader stage ' " + String ( shader_stage_names [ p_stage ] ) + " ', uniform ' " + reflection . name + " ' is of unsupported block type: ( " + itos ( reflection . getType ( ) - > getQualifier ( ) . storage ) + " ). " ;
2019-06-07 16:07:57 +00:00
}
return false ;
}
if ( reflection . getType ( ) - > isArray ( ) ) {
layout_binding . descriptorCount = reflection . getType ( ) - > getArraySizes ( ) - > getCumulativeSize ( ) ;
2019-06-16 02:45:24 +00:00
//print_line("DEBUG: array of size: " + itos(layout_binding.descriptorCount));
2019-06-07 16:07:57 +00:00
} else {
layout_binding . descriptorCount = 1 ;
}
info . length = reflection . size ;
} break ;
/*case glslang::EbtReference: {
} break ; */
/*case glslang::EbtAtomicUint: {
} break ; */
default : {
2019-07-05 01:54:32 +00:00
if ( reflection . getType ( ) - > getQualifier ( ) . hasOffset ( ) | | reflection . name . find ( " . " ) ! = std : : string : : npos ) {
2019-06-07 16:07:57 +00:00
//member of uniform block?
return true ;
}
if ( r_error ) {
2020-07-27 10:43:20 +00:00
* r_error = " On shader stage ' " + String ( shader_stage_names [ p_stage ] ) + " ', uniform ' " + reflection . name + " ' unsupported uniform type. " ;
2019-06-07 16:07:57 +00:00
}
return false ;
}
}
if ( ! reflection . getType ( ) - > getQualifier ( ) . hasBinding ( ) ) {
if ( r_error ) {
2020-07-27 10:43:20 +00:00
* r_error = " On shader stage ' " + String ( shader_stage_names [ p_stage ] ) + " ', uniform ' " + reflection . name + " ' lacks a binding number. " ;
2019-06-07 16:07:57 +00:00
}
return false ;
}
uint32_t set = reflection . getType ( ) - > getQualifier ( ) . hasSet ( ) ? reflection . getType ( ) - > getQualifier ( ) . layoutSet : 0 ;
2019-07-27 13:23:24 +00:00
if ( set > = MAX_UNIFORM_SETS ) {
if ( r_error ) {
2020-07-27 10:43:20 +00:00
* r_error = " On shader stage ' " + String ( shader_stage_names [ p_stage ] ) + " ', uniform ' " + reflection . name + " ' uses a set ( " + itos ( set ) + " ) index larger than what is supported ( " + itos ( MAX_UNIFORM_SETS ) + " ). " ;
2019-07-27 13:23:24 +00:00
}
return false ;
}
2019-06-07 16:07:57 +00:00
if ( set > = limits . maxBoundDescriptorSets ) {
if ( r_error ) {
2020-07-27 10:43:20 +00:00
* r_error = " On shader stage ' " + String ( shader_stage_names [ p_stage ] ) + " ', uniform ' " + reflection . name + " ' uses a set ( " + itos ( set ) + " ) index larger than what is supported by the hardware ( " + itos ( limits . maxBoundDescriptorSets ) + " ). " ;
2019-06-07 16:07:57 +00:00
}
return false ;
}
uint32_t binding = reflection . getType ( ) - > getQualifier ( ) . layoutBinding ;
if ( set < ( uint32_t ) bindings . size ( ) ) {
//check if this already exists
for ( int i = 0 ; i < bindings [ set ] . size ( ) ; i + + ) {
if ( bindings [ set ] [ i ] . binding = = binding ) {
//already exists, verify that it's the same type
if ( bindings [ set ] [ i ] . descriptorType ! = layout_binding . descriptorType ) {
if ( r_error ) {
2020-07-27 10:43:20 +00:00
* r_error = " On shader stage ' " + String ( shader_stage_names [ p_stage ] ) + " ', uniform ' " + reflection . name + " ' trying to re-use location for set= " + itos ( set ) + " , binding= " + itos ( binding ) + " with different uniform type. " ;
2019-06-07 16:07:57 +00:00
}
return false ;
}
//also, verify that it's the same size
if ( bindings [ set ] [ i ] . descriptorCount ! = layout_binding . descriptorCount | | uniform_infos [ set ] [ i ] . length ! = info . length ) {
if ( r_error ) {
2020-07-27 10:43:20 +00:00
* r_error = " On shader stage ' " + String ( shader_stage_names [ p_stage ] ) + " ', uniform ' " + reflection . name + " ' trying to re-use location for set= " + itos ( set ) + " , binding= " + itos ( binding ) + " with different uniform size. " ;
2019-06-07 16:07:57 +00:00
}
return false ;
}
//just append stage mask and return
bindings . write [ set ] . write [ i ] . stageFlags | = shader_stage_masks [ p_stage ] ;
uniform_infos . write [ set ] . write [ i ] . stages | = 1 < < p_stage ;
return true ;
}
}
}
layout_binding . binding = binding ;
layout_binding . stageFlags = shader_stage_masks [ p_stage ] ;
2020-04-01 23:20:12 +00:00
layout_binding . pImmutableSamplers = nullptr ; //no support for this yet
2019-06-07 16:07:57 +00:00
info . stages = 1 < < p_stage ;
info . binding = binding ;
if ( set > = ( uint32_t ) bindings . size ( ) ) {
bindings . resize ( set + 1 ) ;
uniform_infos . resize ( set + 1 ) ;
}
2019-06-19 20:03:19 +00:00
#if 0
2019-06-16 02:45:24 +00:00
print_line ( " stage: " + String ( shader_stage_names [ p_stage ] ) + " set: " + itos ( set ) + " binding: " + itos ( info . binding ) + " type: " + shader_uniform_names [ info . type ] + " length: " + itos ( info . length ) ) ;
# endif
2019-06-07 16:07:57 +00:00
bindings . write [ set ] . push_back ( layout_binding ) ;
uniform_infos . write [ set ] . push_back ( info ) ;
return true ;
}
2019-07-28 22:58:32 +00:00
# endif
2019-06-07 16:07:57 +00:00
2019-07-28 22:58:32 +00:00
RID RenderingDeviceVulkan : : shader_create ( const Vector < ShaderStageData > & p_stages ) {
2019-06-07 16:07:57 +00:00
//descriptor layouts
2020-03-17 06:33:00 +00:00
Vector < Vector < VkDescriptorSetLayoutBinding > > set_bindings ;
Vector < Vector < UniformInfo > > uniform_info ;
2019-06-08 20:10:52 +00:00
Shader : : PushConstant push_constant ;
push_constant . push_constant_size = 0 ;
push_constant . push_constants_vk_stage = 0 ;
2019-08-18 22:40:52 +00:00
uint32_t vertex_input_mask = 0 ;
uint32_t fragment_outputs = 0 ;
2019-06-07 16:07:57 +00:00
uint32_t stages_processed = 0 ;
2021-07-09 19:48:28 +00:00
Vector < Shader : : SpecializationConstant > specialization_constants ;
2019-09-25 19:44:44 +00:00
bool is_compute = false ;
2021-02-02 19:51:36 +00:00
uint32_t compute_local_size [ 3 ] = { 0 , 0 , 0 } ;
2019-06-07 16:07:57 +00:00
for ( int i = 0 ; i < p_stages . size ( ) ; i + + ) {
2019-09-25 19:44:44 +00:00
if ( p_stages [ i ] . shader_stage = = SHADER_STAGE_COMPUTE ) {
is_compute = true ;
ERR_FAIL_COND_V_MSG ( p_stages . size ( ) ! = 1 , RID ( ) ,
" Compute shaders can only receive one stage, dedicated to compute. " ) ;
}
2019-07-28 22:58:32 +00:00
ERR_FAIL_COND_V_MSG ( stages_processed & ( 1 < < p_stages [ i ] . shader_stage ) , RID ( ) ,
" Stage " + String ( shader_stage_names [ p_stages [ i ] . shader_stage ] ) + " submitted more than once. " ) ;
2019-06-07 16:07:57 +00:00
2019-07-28 16:42:15 +00:00
{
SpvReflectShaderModule module ;
2020-02-17 21:06:54 +00:00
const uint8_t * spirv = p_stages [ i ] . spir_v . ptr ( ) ;
SpvReflectResult result = spvReflectCreateShaderModule ( p_stages [ i ] . spir_v . size ( ) , spirv , & module ) ;
2019-07-28 16:42:15 +00:00
ERR_FAIL_COND_V_MSG ( result ! = SPV_REFLECT_RESULT_SUCCESS , RID ( ) ,
" Reflection of SPIR-V shader stage ' " + String ( shader_stage_names [ p_stages [ i ] . shader_stage ] ) + " ' failed parsing shader. " ) ;
2021-02-02 19:51:36 +00:00
if ( is_compute ) {
compute_local_size [ 0 ] = module . entry_points - > local_size . x ;
compute_local_size [ 1 ] = module . entry_points - > local_size . y ;
compute_local_size [ 2 ] = module . entry_points - > local_size . z ;
}
2019-07-28 16:42:15 +00:00
uint32_t binding_count = 0 ;
2020-04-01 23:20:12 +00:00
result = spvReflectEnumerateDescriptorBindings ( & module , & binding_count , nullptr ) ;
2019-07-28 16:42:15 +00:00
ERR_FAIL_COND_V_MSG ( result ! = SPV_REFLECT_RESULT_SUCCESS , RID ( ) ,
" Reflection of SPIR-V shader stage ' " + String ( shader_stage_names [ p_stages [ i ] . shader_stage ] ) + " ' failed enumerating descriptor bindings. " ) ;
uint32_t stage = p_stages [ i ] . shader_stage ;
if ( binding_count > 0 ) {
//Parse bindings
Vector < SpvReflectDescriptorBinding * > bindings ;
bindings . resize ( binding_count ) ;
result = spvReflectEnumerateDescriptorBindings ( & module , & binding_count , bindings . ptrw ( ) ) ;
ERR_FAIL_COND_V_MSG ( result ! = SPV_REFLECT_RESULT_SUCCESS , RID ( ) ,
" Reflection of SPIR-V shader stage ' " + String ( shader_stage_names [ p_stages [ i ] . shader_stage ] ) + " ' failed getting descriptor bindings. " ) ;
for ( uint32_t j = 0 ; j < binding_count ; j + + ) {
const SpvReflectDescriptorBinding & binding = * bindings [ j ] ;
VkDescriptorSetLayoutBinding layout_binding ;
UniformInfo info ;
bool need_array_dimensions = false ;
bool need_block_size = false ;
switch ( binding . descriptor_type ) {
case SPV_REFLECT_DESCRIPTOR_TYPE_SAMPLER : {
layout_binding . descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER ;
info . type = UNIFORM_TYPE_SAMPLER ;
need_array_dimensions = true ;
} break ;
case SPV_REFLECT_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER : {
layout_binding . descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ;
info . type = UNIFORM_TYPE_SAMPLER_WITH_TEXTURE ;
need_array_dimensions = true ;
} break ;
case SPV_REFLECT_DESCRIPTOR_TYPE_SAMPLED_IMAGE : {
layout_binding . descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE ;
info . type = UNIFORM_TYPE_TEXTURE ;
need_array_dimensions = true ;
} break ;
case SPV_REFLECT_DESCRIPTOR_TYPE_STORAGE_IMAGE : {
layout_binding . descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE ;
info . type = UNIFORM_TYPE_IMAGE ;
need_array_dimensions = true ;
} break ;
case SPV_REFLECT_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER : {
layout_binding . descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER ;
info . type = UNIFORM_TYPE_TEXTURE_BUFFER ;
need_array_dimensions = true ;
} break ;
case SPV_REFLECT_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER : {
layout_binding . descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER ;
info . type = UNIFORM_TYPE_IMAGE_BUFFER ;
need_array_dimensions = true ;
} break ;
case SPV_REFLECT_DESCRIPTOR_TYPE_UNIFORM_BUFFER : {
layout_binding . descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ;
info . type = UNIFORM_TYPE_UNIFORM_BUFFER ;
need_block_size = true ;
} break ;
case SPV_REFLECT_DESCRIPTOR_TYPE_STORAGE_BUFFER : {
layout_binding . descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER ;
info . type = UNIFORM_TYPE_STORAGE_BUFFER ;
need_block_size = true ;
} break ;
case SPV_REFLECT_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC : {
ERR_PRINT ( " Dynamic uniform buffer not supported. " ) ;
continue ;
} break ;
case SPV_REFLECT_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC : {
ERR_PRINT ( " Dynamic storage buffer not supported. " ) ;
continue ;
} break ;
case SPV_REFLECT_DESCRIPTOR_TYPE_INPUT_ATTACHMENT : {
layout_binding . descriptorType = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT ;
info . type = UNIFORM_TYPE_INPUT_ATTACHMENT ;
} break ;
2021-01-08 13:17:49 +00:00
case SPV_REFLECT_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR : {
ERR_PRINT ( " Acceleration structure not supported. " ) ;
continue ;
} break ;
2019-07-28 16:42:15 +00:00
}
if ( need_array_dimensions ) {
if ( binding . array . dims_count = = 0 ) {
info . length = 1 ;
} else {
for ( uint32_t k = 0 ; k < binding . array . dims_count ; k + + ) {
if ( k = = 0 ) {
info . length = binding . array . dims [ 0 ] ;
} else {
info . length * = binding . array . dims [ k ] ;
}
}
}
layout_binding . descriptorCount = info . length ;
} else if ( need_block_size ) {
info . length = binding . block . size ;
layout_binding . descriptorCount = 1 ;
} else {
info . length = 0 ;
layout_binding . descriptorCount = 1 ;
}
info . binding = binding . binding ;
uint32_t set = binding . set ;
//print_line("Stage: " + String(shader_stage_names[stage]) + " set=" + itos(set) + " binding=" + itos(info.binding) + " type=" + shader_uniform_names[info.type] + " length=" + itos(info.length));
ERR_FAIL_COND_V_MSG ( set > = MAX_UNIFORM_SETS , RID ( ) ,
" On shader stage ' " + String ( shader_stage_names [ stage ] ) + " ', uniform ' " + binding . name + " ' uses a set ( " + itos ( set ) + " ) index larger than what is supported ( " + itos ( MAX_UNIFORM_SETS ) + " ). " ) ;
ERR_FAIL_COND_V_MSG ( set > = limits . maxBoundDescriptorSets , RID ( ) ,
" On shader stage ' " + String ( shader_stage_names [ stage ] ) + " ', uniform ' " + binding . name + " ' uses a set ( " + itos ( set ) + " ) index larger than what is supported by the hardware ( " + itos ( limits . maxBoundDescriptorSets ) + " ). " ) ;
if ( set < ( uint32_t ) set_bindings . size ( ) ) {
//check if this already exists
bool exists = false ;
for ( int k = 0 ; k < set_bindings [ set ] . size ( ) ; k + + ) {
if ( set_bindings [ set ] [ k ] . binding = = ( uint32_t ) info . binding ) {
//already exists, verify that it's the same type
ERR_FAIL_COND_V_MSG ( set_bindings [ set ] [ k ] . descriptorType ! = layout_binding . descriptorType , RID ( ) ,
" On shader stage ' " + String ( shader_stage_names [ stage ] ) + " ', uniform ' " + binding . name + " ' trying to re-use location for set= " + itos ( set ) + " , binding= " + itos ( info . binding ) + " with different uniform type. " ) ;
//also, verify that it's the same size
ERR_FAIL_COND_V_MSG ( set_bindings [ set ] [ k ] . descriptorCount ! = layout_binding . descriptorCount | | uniform_info [ set ] [ k ] . length ! = info . length , RID ( ) ,
" On shader stage ' " + String ( shader_stage_names [ stage ] ) + " ', uniform ' " + binding . name + " ' trying to re-use location for set= " + itos ( set ) + " , binding= " + itos ( info . binding ) + " with different uniform size. " ) ;
//just append stage mask and return
set_bindings . write [ set ] . write [ k ] . stageFlags | = shader_stage_masks [ stage ] ;
uniform_info . write [ set ] . write [ k ] . stages | = 1 < < stage ;
exists = true ;
}
}
if ( exists ) {
continue ; //merged
}
}
layout_binding . binding = info . binding ;
layout_binding . stageFlags = shader_stage_masks [ stage ] ;
2020-04-01 23:20:12 +00:00
layout_binding . pImmutableSamplers = nullptr ; //no support for this yet
2019-07-28 16:42:15 +00:00
info . stages = 1 < < stage ;
info . binding = info . binding ;
if ( set > = ( uint32_t ) set_bindings . size ( ) ) {
set_bindings . resize ( set + 1 ) ;
uniform_info . resize ( set + 1 ) ;
}
set_bindings . write [ set ] . push_back ( layout_binding ) ;
uniform_info . write [ set ] . push_back ( info ) ;
}
}
2021-07-09 19:48:28 +00:00
{
//specialization constants
uint32_t sc_count = 0 ;
result = spvReflectEnumerateSpecializationConstants ( & module , & sc_count , nullptr ) ;
ERR_FAIL_COND_V_MSG ( result ! = SPV_REFLECT_RESULT_SUCCESS , RID ( ) ,
" Reflection of SPIR-V shader stage ' " + String ( shader_stage_names [ p_stages [ i ] . shader_stage ] ) + " ' failed enumerating specialization constants. " ) ;
if ( sc_count ) {
Vector < SpvReflectSpecializationConstant * > spec_constants ;
spec_constants . resize ( sc_count ) ;
result = spvReflectEnumerateSpecializationConstants ( & module , & sc_count , spec_constants . ptrw ( ) ) ;
ERR_FAIL_COND_V_MSG ( result ! = SPV_REFLECT_RESULT_SUCCESS , RID ( ) ,
" Reflection of SPIR-V shader stage ' " + String ( shader_stage_names [ p_stages [ i ] . shader_stage ] ) + " ' failed obtaining specialization constants. " ) ;
for ( uint32_t j = 0 ; j < sc_count ; j + + ) {
int32_t existing = - 1 ;
Shader : : SpecializationConstant sconst ;
sconst . constant . constant_id = spec_constants [ j ] - > constant_id ;
switch ( spec_constants [ j ] - > constant_type ) {
case SPV_REFLECT_SPECIALIZATION_CONSTANT_BOOL : {
sconst . constant . type = PIPELINE_SPECIALIZATION_CONSTANT_TYPE_BOOL ;
sconst . constant . bool_value = spec_constants [ j ] - > default_value . int_bool_value ! = 0 ;
} break ;
case SPV_REFLECT_SPECIALIZATION_CONSTANT_INT : {
sconst . constant . type = PIPELINE_SPECIALIZATION_CONSTANT_TYPE_INT ;
sconst . constant . int_value = spec_constants [ j ] - > default_value . int_bool_value ;
} break ;
case SPV_REFLECT_SPECIALIZATION_CONSTANT_FLOAT : {
sconst . constant . type = PIPELINE_SPECIALIZATION_CONSTANT_TYPE_FLOAT ;
sconst . constant . float_value = spec_constants [ j ] - > default_value . float_value ;
} break ;
}
sconst . stage_flags = 1 < < p_stages [ i ] . shader_stage ;
print_line ( " spec constant " + itos ( i ) + " : " + String ( spec_constants [ j ] - > name ) + " type " + itos ( spec_constants [ j ] - > constant_type ) + " id " + itos ( spec_constants [ j ] - > constant_id ) ) ;
for ( int k = 0 ; k < specialization_constants . size ( ) ; k + + ) {
if ( specialization_constants [ k ] . constant . constant_id = = sconst . constant . constant_id ) {
ERR_FAIL_COND_V_MSG ( specialization_constants [ k ] . constant . type ! = sconst . constant . type , RID ( ) , " More than one specialization constant used for id ( " + itos ( sconst . constant . constant_id ) + " ), but their types differ. " ) ;
ERR_FAIL_COND_V_MSG ( specialization_constants [ k ] . constant . int_value ! = sconst . constant . int_value , RID ( ) , " More than one specialization constant used for id ( " + itos ( sconst . constant . constant_id ) + " ), but their default values differ. " ) ;
existing = k ;
break ;
}
}
if ( existing > 0 ) {
specialization_constants . write [ existing ] . stage_flags | = sconst . stage_flags ;
} else {
specialization_constants . push_back ( sconst ) ;
}
}
}
}
2019-08-18 22:40:52 +00:00
if ( stage = = SHADER_STAGE_VERTEX ) {
uint32_t iv_count = 0 ;
2020-04-01 23:20:12 +00:00
result = spvReflectEnumerateInputVariables ( & module , & iv_count , nullptr ) ;
2019-08-18 22:40:52 +00:00
ERR_FAIL_COND_V_MSG ( result ! = SPV_REFLECT_RESULT_SUCCESS , RID ( ) ,
" Reflection of SPIR-V shader stage ' " + String ( shader_stage_names [ p_stages [ i ] . shader_stage ] ) + " ' failed enumerating input variables. " ) ;
if ( iv_count ) {
Vector < SpvReflectInterfaceVariable * > input_vars ;
input_vars . resize ( iv_count ) ;
2019-08-20 20:54:03 +00:00
result = spvReflectEnumerateInputVariables ( & module , & iv_count , input_vars . ptrw ( ) ) ;
2019-08-18 22:40:52 +00:00
ERR_FAIL_COND_V_MSG ( result ! = SPV_REFLECT_RESULT_SUCCESS , RID ( ) ,
" Reflection of SPIR-V shader stage ' " + String ( shader_stage_names [ p_stages [ i ] . shader_stage ] ) + " ' failed obtaining input variables. " ) ;
for ( uint32_t j = 0 ; j < iv_count ; j + + ) {
2019-08-20 20:54:03 +00:00
if ( input_vars [ j ] & & input_vars [ j ] - > decoration_flags = = 0 ) { //regular input
2019-08-18 22:40:52 +00:00
vertex_input_mask | = ( 1 < < uint32_t ( input_vars [ j ] - > location ) ) ;
}
}
}
}
2019-07-28 16:42:15 +00:00
if ( stage = = SHADER_STAGE_FRAGMENT ) {
uint32_t ov_count = 0 ;
2020-04-01 23:20:12 +00:00
result = spvReflectEnumerateOutputVariables ( & module , & ov_count , nullptr ) ;
2019-07-28 16:42:15 +00:00
ERR_FAIL_COND_V_MSG ( result ! = SPV_REFLECT_RESULT_SUCCESS , RID ( ) ,
" Reflection of SPIR-V shader stage ' " + String ( shader_stage_names [ p_stages [ i ] . shader_stage ] ) + " ' failed enumerating output variables. " ) ;
if ( ov_count ) {
Vector < SpvReflectInterfaceVariable * > output_vars ;
output_vars . resize ( ov_count ) ;
result = spvReflectEnumerateOutputVariables ( & module , & ov_count , output_vars . ptrw ( ) ) ;
ERR_FAIL_COND_V_MSG ( result ! = SPV_REFLECT_RESULT_SUCCESS , RID ( ) ,
" Reflection of SPIR-V shader stage ' " + String ( shader_stage_names [ p_stages [ i ] . shader_stage ] ) + " ' failed obtaining output variables. " ) ;
for ( uint32_t j = 0 ; j < ov_count ; j + + ) {
2021-06-24 13:58:36 +00:00
const SpvReflectInterfaceVariable * refvar = output_vars [ j ] ;
if ( refvar ! = nullptr & & refvar - > built_in ! = SpvBuiltInFragDepth ) {
fragment_outputs | = 1 < < refvar - > location ;
2019-07-28 16:42:15 +00:00
}
}
}
}
2021-02-02 19:51:36 +00:00
2019-07-28 16:42:15 +00:00
uint32_t pc_count = 0 ;
2020-04-01 23:20:12 +00:00
result = spvReflectEnumeratePushConstantBlocks ( & module , & pc_count , nullptr ) ;
2019-07-28 16:42:15 +00:00
ERR_FAIL_COND_V_MSG ( result ! = SPV_REFLECT_RESULT_SUCCESS , RID ( ) ,
" Reflection of SPIR-V shader stage ' " + String ( shader_stage_names [ p_stages [ i ] . shader_stage ] ) + " ' failed enumerating push constants. " ) ;
if ( pc_count ) {
ERR_FAIL_COND_V_MSG ( pc_count > 1 , RID ( ) ,
" Reflection of SPIR-V shader stage ' " + String ( shader_stage_names [ p_stages [ i ] . shader_stage ] ) + " ': Only one push constant is supported, which should be the same across shader stages. " ) ;
Vector < SpvReflectBlockVariable * > pconstants ;
pconstants . resize ( pc_count ) ;
result = spvReflectEnumeratePushConstantBlocks ( & module , & pc_count , pconstants . ptrw ( ) ) ;
ERR_FAIL_COND_V_MSG ( result ! = SPV_REFLECT_RESULT_SUCCESS , RID ( ) ,
" Reflection of SPIR-V shader stage ' " + String ( shader_stage_names [ p_stages [ i ] . shader_stage ] ) + " ' failed obtaining push constants. " ) ;
#if 0
2020-04-01 23:20:12 +00:00
if ( pconstants [ 0 ] = = nullptr ) {
2019-07-28 16:42:15 +00:00
FileAccess * f = FileAccess : : open ( " res://popo.spv " , FileAccess : : WRITE ) ;
f - > store_buffer ( ( const uint8_t * ) & SpirV [ 0 ] , SpirV . size ( ) * sizeof ( uint32_t ) ) ;
memdelete ( f ) ;
}
# endif
ERR_FAIL_COND_V_MSG ( push_constant . push_constant_size & & push_constant . push_constant_size ! = pconstants [ 0 ] - > size , RID ( ) ,
" Reflection of SPIR-V shader stage ' " + String ( shader_stage_names [ p_stages [ i ] . shader_stage ] ) + " ': Push constant block must be the same across shader stages. " ) ;
push_constant . push_constant_size = pconstants [ 0 ] - > size ;
push_constant . push_constants_vk_stage | = shader_stage_masks [ stage ] ;
//print_line("Stage: " + String(shader_stage_names[stage]) + " push constant of size=" + itos(push_constant.push_constant_size));
}
// Destroy the reflection data when no longer required.
spvReflectDestroyShaderModule ( & module ) ;
}
2019-06-07 16:07:57 +00:00
stages_processed | = ( 1 < < p_stages [ i ] . shader_stage ) ;
}
//all good, let's create modules
2019-07-29 15:59:18 +00:00
_THREAD_SAFE_METHOD_
2019-06-07 16:07:57 +00:00
Shader shader ;
2019-08-18 22:40:52 +00:00
shader . vertex_input_mask = vertex_input_mask ;
2021-06-24 13:58:36 +00:00
shader . fragment_output_mask = fragment_outputs ;
2019-06-08 20:10:52 +00:00
shader . push_constant = push_constant ;
2019-09-25 19:44:44 +00:00
shader . is_compute = is_compute ;
2021-02-02 19:51:36 +00:00
shader . compute_local_size [ 0 ] = compute_local_size [ 0 ] ;
shader . compute_local_size [ 1 ] = compute_local_size [ 1 ] ;
shader . compute_local_size [ 2 ] = compute_local_size [ 2 ] ;
2021-07-09 19:48:28 +00:00
shader . specialization_constants = specialization_constants ;
2019-06-07 16:07:57 +00:00
2019-06-16 02:45:24 +00:00
String error_text ;
2019-06-07 16:07:57 +00:00
bool success = true ;
for ( int i = 0 ; i < p_stages . size ( ) ; i + + ) {
VkShaderModuleCreateInfo shader_module_create_info ;
shader_module_create_info . sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO ;
2020-04-01 23:20:12 +00:00
shader_module_create_info . pNext = nullptr ;
2019-06-07 16:07:57 +00:00
shader_module_create_info . flags = 0 ;
2019-07-28 22:58:32 +00:00
shader_module_create_info . codeSize = p_stages [ i ] . spir_v . size ( ) ;
2020-02-17 21:06:54 +00:00
const uint8_t * r = p_stages [ i ] . spir_v . ptr ( ) ;
2019-07-28 22:58:32 +00:00
2020-02-17 21:06:54 +00:00
shader_module_create_info . pCode = ( const uint32_t * ) r ;
2019-06-07 16:07:57 +00:00
VkShaderModule module ;
2020-04-01 23:20:12 +00:00
VkResult res = vkCreateShaderModule ( device , & shader_module_create_info , nullptr , & module ) ;
2019-06-07 16:07:57 +00:00
if ( res ) {
success = false ;
2020-03-08 08:26:22 +00:00
error_text = " Error ( " + itos ( res ) + " ) creating shader module for stage: " + String ( shader_stage_names [ p_stages [ i ] . shader_stage ] ) ;
2019-06-07 16:07:57 +00:00
break ;
}
const VkShaderStageFlagBits shader_stage_bits [ SHADER_STAGE_MAX ] = {
VK_SHADER_STAGE_VERTEX_BIT ,
VK_SHADER_STAGE_FRAGMENT_BIT ,
VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT ,
VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT ,
VK_SHADER_STAGE_COMPUTE_BIT ,
} ;
VkPipelineShaderStageCreateInfo shader_stage ;
shader_stage . sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO ;
2020-04-01 23:20:12 +00:00
shader_stage . pNext = nullptr ;
2019-06-07 16:07:57 +00:00
shader_stage . flags = 0 ;
shader_stage . stage = shader_stage_bits [ p_stages [ i ] . shader_stage ] ;
shader_stage . module = module ;
shader_stage . pName = " main " ;
2020-04-01 23:20:12 +00:00
shader_stage . pSpecializationInfo = nullptr ;
2019-06-07 16:07:57 +00:00
shader . pipeline_stages . push_back ( shader_stage ) ;
}
//proceed to create descriptor sets
if ( success ) {
2019-07-28 16:42:15 +00:00
for ( int i = 0 ; i < set_bindings . size ( ) ; i + + ) {
2019-06-07 16:07:57 +00:00
//empty ones are fine if they were not used according to spec (binding count will be 0)
VkDescriptorSetLayoutCreateInfo layout_create_info ;
layout_create_info . sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO ;
2020-04-01 23:20:12 +00:00
layout_create_info . pNext = nullptr ;
2019-06-07 16:07:57 +00:00
layout_create_info . flags = 0 ;
2019-07-28 16:42:15 +00:00
layout_create_info . bindingCount = set_bindings [ i ] . size ( ) ;
layout_create_info . pBindings = set_bindings [ i ] . ptr ( ) ;
2019-06-07 16:07:57 +00:00
VkDescriptorSetLayout layout ;
2020-04-01 23:20:12 +00:00
VkResult res = vkCreateDescriptorSetLayout ( device , & layout_create_info , nullptr , & layout ) ;
2019-06-07 16:07:57 +00:00
if ( res ) {
2020-03-08 08:26:22 +00:00
error_text = " Error ( " + itos ( res ) + " ) creating descriptor set layout for set " + itos ( i ) ;
2019-06-07 16:07:57 +00:00
success = false ;
break ;
}
Shader : : Set set ;
set . descriptor_set_layout = layout ;
set . uniform_info = uniform_info [ i ] ;
//sort and hash
set . uniform_info . sort ( ) ;
2019-07-27 13:23:24 +00:00
uint32_t format = 0 ; //no format, default
if ( set . uniform_info . size ( ) ) {
//has data, needs an actual format;
UniformSetFormat usformat ;
usformat . uniform_info = set . uniform_info ;
Map < UniformSetFormat , uint32_t > : : Element * E = uniform_set_format_cache . find ( usformat ) ;
if ( E ) {
format = E - > get ( ) ;
} else {
format = uniform_set_format_cache . size ( ) + 1 ;
uniform_set_format_cache . insert ( usformat , format ) ;
}
2019-06-07 16:07:57 +00:00
}
shader . sets . push_back ( set ) ;
2019-07-27 13:23:24 +00:00
shader . set_formats . push_back ( format ) ;
2019-06-07 16:07:57 +00:00
}
}
if ( success ) {
//create pipeline layout
VkPipelineLayoutCreateInfo pipeline_layout_create_info ;
pipeline_layout_create_info . sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO ;
2020-04-01 23:20:12 +00:00
pipeline_layout_create_info . pNext = nullptr ;
2019-06-07 16:07:57 +00:00
pipeline_layout_create_info . flags = 0 ;
pipeline_layout_create_info . setLayoutCount = shader . sets . size ( ) ;
Vector < VkDescriptorSetLayout > layouts ;
layouts . resize ( shader . sets . size ( ) ) ;
for ( int i = 0 ; i < layouts . size ( ) ; i + + ) {
layouts . write [ i ] = shader . sets [ i ] . descriptor_set_layout ;
}
pipeline_layout_create_info . pSetLayouts = layouts . ptr ( ) ;
2020-10-24 22:56:54 +00:00
// Needs to be declared in this outer scope, otherwise it may not outlive its assignment
// to pipeline_layout_create_info.
VkPushConstantRange push_constant_range ;
2019-06-08 20:10:52 +00:00
if ( push_constant . push_constant_size ) {
push_constant_range . stageFlags = push_constant . push_constants_vk_stage ;
push_constant_range . offset = 0 ;
push_constant_range . size = push_constant . push_constant_size ;
pipeline_layout_create_info . pushConstantRangeCount = 1 ;
pipeline_layout_create_info . pPushConstantRanges = & push_constant_range ;
} else {
pipeline_layout_create_info . pushConstantRangeCount = 0 ;
2020-04-01 23:20:12 +00:00
pipeline_layout_create_info . pPushConstantRanges = nullptr ;
2019-06-08 20:10:52 +00:00
}
2019-06-07 16:07:57 +00:00
2020-04-01 23:20:12 +00:00
VkResult err = vkCreatePipelineLayout ( device , & pipeline_layout_create_info , nullptr , & shader . pipeline_layout ) ;
2019-06-07 16:07:57 +00:00
if ( err ) {
2020-03-08 08:26:22 +00:00
error_text = " Error ( " + itos ( err ) + " ) creating pipeline layout. " ;
2019-06-07 16:07:57 +00:00
success = false ;
}
}
if ( ! success ) {
//clean up if failed
for ( int i = 0 ; i < shader . pipeline_stages . size ( ) ; i + + ) {
2020-04-01 23:20:12 +00:00
vkDestroyShaderModule ( device , shader . pipeline_stages [ i ] . module , nullptr ) ;
2019-06-07 16:07:57 +00:00
}
for ( int i = 0 ; i < shader . sets . size ( ) ; i + + ) {
2020-04-01 23:20:12 +00:00
vkDestroyDescriptorSetLayout ( device , shader . sets [ i ] . descriptor_set_layout , nullptr ) ;
2019-06-07 16:07:57 +00:00
}
2019-07-28 22:58:32 +00:00
ERR_FAIL_V_MSG ( RID ( ) , error_text ) ;
2019-06-07 16:07:57 +00:00
}
2019-06-10 17:12:24 +00:00
return shader_owner . make_rid ( shader ) ;
2019-06-07 16:07:57 +00:00
}
2019-08-18 22:40:52 +00:00
uint32_t RenderingDeviceVulkan : : shader_get_vertex_input_attribute_mask ( RID p_shader ) {
2019-10-05 13:27:43 +00:00
_THREAD_SAFE_METHOD_
2019-06-16 02:45:24 +00:00
const Shader * shader = shader_owner . getornull ( p_shader ) ;
2019-08-18 22:40:52 +00:00
ERR_FAIL_COND_V ( ! shader , 0 ) ;
return shader - > vertex_input_mask ;
2019-06-16 02:45:24 +00:00
}
2019-06-07 16:07:57 +00:00
/******************/
/**** UNIFORMS ****/
/******************/
2020-02-17 21:06:54 +00:00
RID RenderingDeviceVulkan : : uniform_buffer_create ( uint32_t p_size_bytes , const Vector < uint8_t > & p_data ) {
2019-06-07 16:07:57 +00:00
_THREAD_SAFE_METHOD_
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V ( p_data . size ( ) & & ( uint32_t ) p_data . size ( ) ! = p_size_bytes , RID ( ) ) ;
2020-10-19 20:01:53 +00:00
ERR_FAIL_COND_V_MSG ( draw_list ! = nullptr & & p_data . size ( ) , RID ( ) ,
" Creating buffers with data is forbidden during creation of a draw list " ) ;
ERR_FAIL_COND_V_MSG ( compute_list ! = nullptr & & p_data . size ( ) , RID ( ) ,
" Creating buffers with data is forbidden during creation of a draw list " ) ;
2019-06-07 16:07:57 +00:00
Buffer buffer ;
Error err = _buffer_allocate ( & buffer , p_size_bytes , VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT , VMA_MEMORY_USAGE_GPU_ONLY ) ;
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V ( err ! = OK , RID ( ) ) ;
2019-06-07 16:07:57 +00:00
if ( p_data . size ( ) ) {
uint64_t data_size = p_data . size ( ) ;
2020-02-17 21:06:54 +00:00
const uint8_t * r = p_data . ptr ( ) ;
_buffer_update ( & buffer , 0 , r , data_size ) ;
2019-09-07 01:51:27 +00:00
_buffer_memory_barrier ( buffer . buffer , 0 , data_size , VK_PIPELINE_STAGE_TRANSFER_BIT , VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT , VK_ACCESS_TRANSFER_WRITE_BIT , VK_ACCESS_UNIFORM_READ_BIT , false ) ;
2019-06-07 16:07:57 +00:00
}
2019-06-10 17:12:24 +00:00
return uniform_buffer_owner . make_rid ( buffer ) ;
2019-06-07 16:07:57 +00:00
}
2020-06-25 13:33:28 +00:00
RID RenderingDeviceVulkan : : storage_buffer_create ( uint32_t p_size_bytes , const Vector < uint8_t > & p_data , uint32_t p_usage ) {
2019-06-07 16:07:57 +00:00
_THREAD_SAFE_METHOD_
2020-10-19 20:01:53 +00:00
ERR_FAIL_COND_V_MSG ( draw_list ! = nullptr & & p_data . size ( ) , RID ( ) ,
" Creating buffers with data is forbidden during creation of a draw list " ) ;
ERR_FAIL_COND_V_MSG ( compute_list ! = nullptr & & p_data . size ( ) , RID ( ) ,
" Creating buffers with data is forbidden during creation of a draw list " ) ;
2019-06-07 16:07:57 +00:00
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V ( p_data . size ( ) & & ( uint32_t ) p_data . size ( ) ! = p_size_bytes , RID ( ) ) ;
2019-06-07 16:07:57 +00:00
Buffer buffer ;
2020-06-25 13:33:28 +00:00
buffer . usage = p_usage ;
uint32_t flags = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_STORAGE_BUFFER_BIT ;
if ( p_usage & STORAGE_BUFFER_USAGE_DISPATCH_INDIRECT ) {
flags | = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT ;
}
Error err = _buffer_allocate ( & buffer , p_size_bytes , flags , VMA_MEMORY_USAGE_GPU_ONLY ) ;
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V ( err ! = OK , RID ( ) ) ;
2019-06-07 16:07:57 +00:00
if ( p_data . size ( ) ) {
uint64_t data_size = p_data . size ( ) ;
2020-02-17 21:06:54 +00:00
const uint8_t * r = p_data . ptr ( ) ;
_buffer_update ( & buffer , 0 , r , data_size ) ;
2019-09-07 01:51:27 +00:00
_buffer_memory_barrier ( buffer . buffer , 0 , data_size , VK_PIPELINE_STAGE_TRANSFER_BIT , VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT , VK_ACCESS_TRANSFER_WRITE_BIT , VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT , false ) ;
2019-06-07 16:07:57 +00:00
}
2019-06-10 17:12:24 +00:00
return storage_buffer_owner . make_rid ( buffer ) ;
2019-06-07 16:07:57 +00:00
}
2020-02-17 21:06:54 +00:00
RID RenderingDeviceVulkan : : texture_buffer_create ( uint32_t p_size_elements , DataFormat p_format , const Vector < uint8_t > & p_data ) {
2019-06-07 16:07:57 +00:00
_THREAD_SAFE_METHOD_
2020-10-19 20:01:53 +00:00
ERR_FAIL_COND_V_MSG ( draw_list ! = nullptr & & p_data . size ( ) , RID ( ) ,
" Creating buffers with data is forbidden during creation of a draw list " ) ;
ERR_FAIL_COND_V_MSG ( compute_list ! = nullptr & & p_data . size ( ) , RID ( ) ,
" Creating buffers with data is forbidden during creation of a draw list " ) ;
2019-06-07 16:07:57 +00:00
uint32_t element_size = get_format_vertex_size ( p_format ) ;
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V_MSG ( element_size = = 0 , RID ( ) , " Format requested is not supported for texture buffers " ) ;
2019-06-07 16:07:57 +00:00
uint64_t size_bytes = uint64_t ( element_size ) * p_size_elements ;
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V ( p_data . size ( ) & & ( uint32_t ) p_data . size ( ) ! = size_bytes , RID ( ) ) ;
2019-06-07 16:07:57 +00:00
TextureBuffer texture_buffer ;
2019-08-18 22:40:52 +00:00
Error err = _buffer_allocate ( & texture_buffer . buffer , size_bytes , VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT , VMA_MEMORY_USAGE_GPU_ONLY ) ;
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V ( err ! = OK , RID ( ) ) ;
2019-06-07 16:07:57 +00:00
if ( p_data . size ( ) ) {
uint64_t data_size = p_data . size ( ) ;
2020-02-17 21:06:54 +00:00
const uint8_t * r = p_data . ptr ( ) ;
_buffer_update ( & texture_buffer . buffer , 0 , r , data_size ) ;
2019-09-07 01:51:27 +00:00
_buffer_memory_barrier ( texture_buffer . buffer . buffer , 0 , data_size , VK_PIPELINE_STAGE_TRANSFER_BIT , VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT , VK_ACCESS_TRANSFER_WRITE_BIT , VK_ACCESS_SHADER_READ_BIT , false ) ;
2019-06-07 16:07:57 +00:00
}
VkBufferViewCreateInfo view_create_info ;
view_create_info . sType = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO ;
2020-04-01 23:20:12 +00:00
view_create_info . pNext = nullptr ;
2019-06-07 16:07:57 +00:00
view_create_info . flags = 0 ;
view_create_info . buffer = texture_buffer . buffer . buffer ;
view_create_info . format = vulkan_formats [ p_format ] ;
view_create_info . offset = 0 ;
view_create_info . range = size_bytes ;
texture_buffer . view = VK_NULL_HANDLE ;
2020-04-01 23:20:12 +00:00
VkResult res = vkCreateBufferView ( device , & view_create_info , nullptr , & texture_buffer . view ) ;
2019-06-07 16:07:57 +00:00
if ( res ) {
_buffer_free ( & texture_buffer . buffer ) ;
2020-03-08 08:26:22 +00:00
ERR_FAIL_V_MSG ( RID ( ) , " Unable to create buffer view, error " + itos ( res ) + " . " ) ;
2019-06-07 16:07:57 +00:00
}
//allocate the view
2019-06-10 17:12:24 +00:00
return texture_buffer_owner . make_rid ( texture_buffer ) ;
2019-06-07 16:07:57 +00:00
}
RenderingDeviceVulkan : : DescriptorPool * RenderingDeviceVulkan : : _descriptor_pool_allocate ( const DescriptorPoolKey & p_key ) {
if ( ! descriptor_pools . has ( p_key ) ) {
descriptor_pools [ p_key ] = Set < DescriptorPool * > ( ) ;
}
2020-04-01 23:20:12 +00:00
DescriptorPool * pool = nullptr ;
2019-06-07 16:07:57 +00:00
for ( Set < DescriptorPool * > : : Element * E = descriptor_pools [ p_key ] . front ( ) ; E ; E = E - > next ( ) ) {
if ( E - > get ( ) - > usage < max_descriptors_per_pool ) {
pool = E - > get ( ) ;
break ;
}
}
if ( ! pool ) {
//create a new one
pool = memnew ( DescriptorPool ) ;
pool - > usage = 0 ;
VkDescriptorPoolCreateInfo descriptor_pool_create_info ;
descriptor_pool_create_info . sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO ;
2020-04-01 23:20:12 +00:00
descriptor_pool_create_info . pNext = nullptr ;
2019-06-07 16:07:57 +00:00
descriptor_pool_create_info . flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT ; // can't think how somebody may NOT need this flag..
descriptor_pool_create_info . maxSets = max_descriptors_per_pool ;
Vector < VkDescriptorPoolSize > sizes ;
//here comes more vulkan API strangeness
if ( p_key . uniform_type [ UNIFORM_TYPE_SAMPLER ] ) {
VkDescriptorPoolSize s ;
s . type = VK_DESCRIPTOR_TYPE_SAMPLER ;
s . descriptorCount = p_key . uniform_type [ UNIFORM_TYPE_SAMPLER ] * max_descriptors_per_pool ;
sizes . push_back ( s ) ;
}
if ( p_key . uniform_type [ UNIFORM_TYPE_SAMPLER_WITH_TEXTURE ] ) {
VkDescriptorPoolSize s ;
s . type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ;
s . descriptorCount = p_key . uniform_type [ UNIFORM_TYPE_SAMPLER_WITH_TEXTURE ] * max_descriptors_per_pool ;
sizes . push_back ( s ) ;
}
if ( p_key . uniform_type [ UNIFORM_TYPE_TEXTURE ] ) {
VkDescriptorPoolSize s ;
s . type = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE ;
s . descriptorCount = p_key . uniform_type [ UNIFORM_TYPE_TEXTURE ] * max_descriptors_per_pool ;
sizes . push_back ( s ) ;
}
if ( p_key . uniform_type [ UNIFORM_TYPE_IMAGE ] ) {
VkDescriptorPoolSize s ;
s . type = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE ;
s . descriptorCount = p_key . uniform_type [ UNIFORM_TYPE_IMAGE ] * max_descriptors_per_pool ;
sizes . push_back ( s ) ;
}
if ( p_key . uniform_type [ UNIFORM_TYPE_TEXTURE_BUFFER ] | | p_key . uniform_type [ UNIFORM_TYPE_SAMPLER_WITH_TEXTURE_BUFFER ] ) {
VkDescriptorPoolSize s ;
s . type = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER ;
s . descriptorCount = ( p_key . uniform_type [ UNIFORM_TYPE_TEXTURE_BUFFER ] + p_key . uniform_type [ UNIFORM_TYPE_SAMPLER_WITH_TEXTURE_BUFFER ] ) * max_descriptors_per_pool ;
sizes . push_back ( s ) ;
}
if ( p_key . uniform_type [ UNIFORM_TYPE_IMAGE_BUFFER ] ) {
VkDescriptorPoolSize s ;
s . type = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER ;
s . descriptorCount = p_key . uniform_type [ UNIFORM_TYPE_IMAGE_BUFFER ] * max_descriptors_per_pool ;
sizes . push_back ( s ) ;
}
if ( p_key . uniform_type [ UNIFORM_TYPE_UNIFORM_BUFFER ] ) {
VkDescriptorPoolSize s ;
s . type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ;
s . descriptorCount = p_key . uniform_type [ UNIFORM_TYPE_UNIFORM_BUFFER ] * max_descriptors_per_pool ;
sizes . push_back ( s ) ;
}
if ( p_key . uniform_type [ UNIFORM_TYPE_STORAGE_BUFFER ] ) {
VkDescriptorPoolSize s ;
s . type = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER ;
s . descriptorCount = p_key . uniform_type [ UNIFORM_TYPE_STORAGE_BUFFER ] * max_descriptors_per_pool ;
sizes . push_back ( s ) ;
}
if ( p_key . uniform_type [ UNIFORM_TYPE_INPUT_ATTACHMENT ] ) {
VkDescriptorPoolSize s ;
s . type = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT ;
s . descriptorCount = p_key . uniform_type [ UNIFORM_TYPE_INPUT_ATTACHMENT ] * max_descriptors_per_pool ;
sizes . push_back ( s ) ;
}
descriptor_pool_create_info . poolSizeCount = sizes . size ( ) ;
descriptor_pool_create_info . pPoolSizes = sizes . ptr ( ) ;
2020-04-01 23:20:12 +00:00
VkResult res = vkCreateDescriptorPool ( device , & descriptor_pool_create_info , nullptr , & pool - > pool ) ;
2020-03-02 18:17:20 +00:00
if ( res ) {
memdelete ( pool ) ;
2020-04-01 23:20:12 +00:00
ERR_FAIL_COND_V_MSG ( res , nullptr , " vkCreateDescriptorPool failed with error " + itos ( res ) + " . " ) ;
2020-03-02 18:17:20 +00:00
}
2019-06-07 16:07:57 +00:00
descriptor_pools [ p_key ] . insert ( pool ) ;
}
pool - > usage + + ;
return pool ;
}
void RenderingDeviceVulkan : : _descriptor_pool_free ( const DescriptorPoolKey & p_key , DescriptorPool * p_pool ) {
# ifdef DEBUG_ENABLED
ERR_FAIL_COND ( ! descriptor_pools [ p_key ] . has ( p_pool ) ) ;
# endif
ERR_FAIL_COND ( p_pool - > usage = = 0 ) ;
p_pool - > usage - - ;
if ( p_pool - > usage = = 0 ) {
2020-04-01 23:20:12 +00:00
vkDestroyDescriptorPool ( device , p_pool - > pool , nullptr ) ;
2019-06-07 16:07:57 +00:00
descriptor_pools [ p_key ] . erase ( p_pool ) ;
memdelete ( p_pool ) ;
2020-12-15 12:04:21 +00:00
if ( descriptor_pools [ p_key ] . is_empty ( ) ) {
2019-06-24 19:13:06 +00:00
descriptor_pools . erase ( p_key ) ;
}
2019-06-07 16:07:57 +00:00
}
}
2019-06-10 17:12:24 +00:00
RID RenderingDeviceVulkan : : uniform_set_create ( const Vector < Uniform > & p_uniforms , RID p_shader , uint32_t p_shader_set ) {
2019-06-07 16:07:57 +00:00
_THREAD_SAFE_METHOD_
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V ( p_uniforms . size ( ) = = 0 , RID ( ) ) ;
2019-06-07 16:07:57 +00:00
Shader * shader = shader_owner . getornull ( p_shader ) ;
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V ( ! shader , RID ( ) ) ;
2019-06-07 16:07:57 +00:00
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V_MSG ( p_shader_set > = ( uint32_t ) shader - > sets . size ( ) | | shader - > sets [ p_shader_set ] . uniform_info . size ( ) = = 0 , RID ( ) ,
2019-06-07 16:07:57 +00:00
" Desired set ( " + itos ( p_shader_set ) + " ) not used by shader. " ) ;
//see that all sets in shader are satisfied
const Shader : : Set & set = shader - > sets [ p_shader_set ] ;
uint32_t uniform_count = p_uniforms . size ( ) ;
const Uniform * uniforms = p_uniforms . ptr ( ) ;
uint32_t set_uniform_count = set . uniform_info . size ( ) ;
2019-07-27 13:23:24 +00:00
const UniformInfo * set_uniforms = set . uniform_info . ptr ( ) ;
2019-06-07 16:07:57 +00:00
Vector < VkWriteDescriptorSet > writes ;
DescriptorPoolKey pool_key ;
//to keep them alive until update call
2020-03-17 06:33:00 +00:00
List < Vector < VkDescriptorBufferInfo > > buffer_infos ;
List < Vector < VkBufferView > > buffer_views ;
List < Vector < VkDescriptorImageInfo > > image_infos ;
2019-06-07 16:07:57 +00:00
//used for verification to make sure a uniform set does not use a framebuffer bound texture
2020-12-19 13:18:08 +00:00
LocalVector < UniformSet : : AttachableTexture > attachable_textures ;
2019-09-25 19:44:44 +00:00
Vector < Texture * > mutable_sampled_textures ;
Vector < Texture * > mutable_storage_textures ;
2019-06-07 16:07:57 +00:00
for ( uint32_t i = 0 ; i < set_uniform_count ; i + + ) {
2019-07-27 13:23:24 +00:00
const UniformInfo & set_uniform = set_uniforms [ i ] ;
2019-06-07 16:07:57 +00:00
int uniform_idx = - 1 ;
for ( int j = 0 ; j < ( int ) uniform_count ; j + + ) {
if ( uniforms [ j ] . binding = = set_uniform . binding ) {
uniform_idx = j ;
}
}
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V_MSG ( uniform_idx = = - 1 , RID ( ) ,
2020-10-24 15:15:43 +00:00
" All the shader bindings for the given set must be covered by the uniforms provided. Binding ( " + itos ( set_uniform . binding ) + " ), set ( " + itos ( p_shader_set ) + " ) was not provided. " ) ;
2019-06-07 16:07:57 +00:00
const Uniform & uniform = uniforms [ uniform_idx ] ;
2020-10-17 01:19:21 +00:00
ERR_FAIL_COND_V_MSG ( uniform . uniform_type ! = set_uniform . type , RID ( ) ,
" Mismatch uniform type for binding ( " + itos ( set_uniform . binding ) + " ), set ( " + itos ( p_shader_set ) + " ). Expected ' " + shader_uniform_names [ set_uniform . type ] + " ', supplied: ' " + shader_uniform_names [ uniform . uniform_type ] + " '. " ) ;
2019-06-07 16:07:57 +00:00
VkWriteDescriptorSet write ; //common header
write . sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET ;
2020-04-01 23:20:12 +00:00
write . pNext = nullptr ;
2020-04-06 08:17:42 +00:00
write . dstSet = VK_NULL_HANDLE ; //will assign afterwards when everything is valid
2019-06-07 16:07:57 +00:00
write . dstBinding = set_uniform . binding ;
2020-05-15 10:48:00 +00:00
write . dstArrayElement = 0 ;
write . descriptorCount = 0 ;
write . descriptorType = VK_DESCRIPTOR_TYPE_MAX_ENUM ; //Invalid value.
write . pImageInfo = nullptr ;
write . pBufferInfo = nullptr ;
write . pTexelBufferView = nullptr ;
2019-06-07 16:07:57 +00:00
uint32_t type_size = 1 ;
2020-10-17 01:19:21 +00:00
switch ( uniform . uniform_type ) {
2019-06-07 16:07:57 +00:00
case UNIFORM_TYPE_SAMPLER : {
if ( uniform . ids . size ( ) ! = set_uniform . length ) {
if ( set_uniform . length > 1 ) {
2019-06-10 17:12:24 +00:00
ERR_FAIL_V_MSG ( RID ( ) , " Sampler (binding: " + itos ( uniform . binding ) + " ) is an array of ( " + itos ( set_uniform . length ) + " ) sampler elements, so it should be provided equal number of sampler IDs to satisfy it (IDs provided: " + itos ( uniform . ids . size ( ) ) + " ). " ) ;
2019-06-07 16:07:57 +00:00
} else {
2019-06-10 17:12:24 +00:00
ERR_FAIL_V_MSG ( RID ( ) , " Sampler (binding: " + itos ( uniform . binding ) + " ) should provide one ID referencing a sampler (IDs provided: " + itos ( uniform . ids . size ( ) ) + " ). " ) ;
2019-06-07 16:07:57 +00:00
}
}
Vector < VkDescriptorImageInfo > image_info ;
for ( int j = 0 ; j < uniform . ids . size ( ) ; j + + ) {
VkSampler * sampler = sampler_owner . getornull ( uniform . ids [ j ] ) ;
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V_MSG ( ! sampler , RID ( ) , " Sampler (binding: " + itos ( uniform . binding ) + " , index " + itos ( j ) + " ) is not a valid sampler. " ) ;
2019-06-07 16:07:57 +00:00
VkDescriptorImageInfo img_info ;
img_info . sampler = * sampler ;
img_info . imageView = VK_NULL_HANDLE ;
img_info . imageLayout = VK_IMAGE_LAYOUT_UNDEFINED ;
image_info . push_back ( img_info ) ;
}
write . dstArrayElement = 0 ;
write . descriptorCount = uniform . ids . size ( ) ;
write . descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER ;
write . pImageInfo = image_infos . push_back ( image_info ) - > get ( ) . ptr ( ) ;
2020-04-01 23:20:12 +00:00
write . pBufferInfo = nullptr ;
write . pTexelBufferView = nullptr ;
2019-06-07 16:07:57 +00:00
type_size = uniform . ids . size ( ) ;
} break ;
case UNIFORM_TYPE_SAMPLER_WITH_TEXTURE : {
if ( uniform . ids . size ( ) ! = set_uniform . length * 2 ) {
if ( set_uniform . length > 1 ) {
2019-06-10 17:12:24 +00:00
ERR_FAIL_V_MSG ( RID ( ) , " SamplerTexture (binding: " + itos ( uniform . binding ) + " ) is an array of ( " + itos ( set_uniform . length ) + " ) sampler&texture elements, so it should provided twice the amount of IDs (sampler,texture pairs) to satisfy it (IDs provided: " + itos ( uniform . ids . size ( ) ) + " ). " ) ;
2019-06-07 16:07:57 +00:00
} else {
2019-06-10 17:12:24 +00:00
ERR_FAIL_V_MSG ( RID ( ) , " SamplerTexture (binding: " + itos ( uniform . binding ) + " ) should provide two IDs referencing a sampler and then a texture (IDs provided: " + itos ( uniform . ids . size ( ) ) + " ). " ) ;
2019-06-07 16:07:57 +00:00
}
}
Vector < VkDescriptorImageInfo > image_info ;
for ( int j = 0 ; j < uniform . ids . size ( ) ; j + = 2 ) {
VkSampler * sampler = sampler_owner . getornull ( uniform . ids [ j + 0 ] ) ;
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V_MSG ( ! sampler , RID ( ) , " SamplerBuffer (binding: " + itos ( uniform . binding ) + " , index " + itos ( j + 1 ) + " ) is not a valid sampler. " ) ;
2019-06-07 16:07:57 +00:00
Texture * texture = texture_owner . getornull ( uniform . ids [ j + 1 ] ) ;
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V_MSG ( ! texture , RID ( ) , " Texture (binding: " + itos ( uniform . binding ) + " , index " + itos ( j ) + " ) is not a valid texture. " ) ;
2019-06-07 16:07:57 +00:00
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V_MSG ( ! ( texture - > usage_flags & TEXTURE_USAGE_SAMPLING_BIT ) , RID ( ) ,
2019-06-07 16:07:57 +00:00
" Texture (binding: " + itos ( uniform . binding ) + " , index " + itos ( j ) + " ) needs the TEXTURE_USAGE_SAMPLING_BIT usage flag set in order to be used as uniform. " ) ;
VkDescriptorImageInfo img_info ;
img_info . sampler = * sampler ;
img_info . imageView = texture - > view ;
2019-06-07 18:20:01 +00:00
if ( texture - > usage_flags & ( TEXTURE_USAGE_COLOR_ATTACHMENT_BIT | TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | TEXTURE_USAGE_RESOLVE_ATTACHMENT_BIT ) ) {
2020-12-19 13:18:08 +00:00
UniformSet : : AttachableTexture attachable_texture ;
attachable_texture . bind = set_uniform . binding ;
attachable_texture . texture = texture - > owner . is_valid ( ) ? texture - > owner : uniform . ids [ j + 1 ] ;
attachable_textures . push_back ( attachable_texture ) ;
2019-06-07 18:20:01 +00:00
}
2019-06-07 16:07:57 +00:00
2020-04-11 17:43:12 +00:00
if ( texture - > usage_flags & TEXTURE_USAGE_STORAGE_BIT ) {
//can also be used as storage, add to mutable sampled
mutable_sampled_textures . push_back ( texture ) ;
}
2019-06-10 17:12:24 +00:00
if ( texture - > owner . is_valid ( ) ) {
2019-06-07 16:07:57 +00:00
texture = texture_owner . getornull ( texture - > owner ) ;
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V ( ! texture , RID ( ) ) ; //bug, should never happen
2019-06-07 16:07:57 +00:00
}
2019-09-25 19:44:44 +00:00
img_info . imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL ;
2019-06-07 16:07:57 +00:00
image_info . push_back ( img_info ) ;
}
write . dstArrayElement = 0 ;
write . descriptorCount = uniform . ids . size ( ) / 2 ;
write . descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ;
write . pImageInfo = image_infos . push_back ( image_info ) - > get ( ) . ptr ( ) ;
2020-04-01 23:20:12 +00:00
write . pBufferInfo = nullptr ;
write . pTexelBufferView = nullptr ;
2019-06-07 16:07:57 +00:00
type_size = uniform . ids . size ( ) / 2 ;
} break ;
case UNIFORM_TYPE_TEXTURE : {
if ( uniform . ids . size ( ) ! = set_uniform . length ) {
if ( set_uniform . length > 1 ) {
2019-06-10 17:12:24 +00:00
ERR_FAIL_V_MSG ( RID ( ) , " Texture (binding: " + itos ( uniform . binding ) + " ) is an array of ( " + itos ( set_uniform . length ) + " ) textures, so it should be provided equal number of texture IDs to satisfy it (IDs provided: " + itos ( uniform . ids . size ( ) ) + " ). " ) ;
2019-06-07 16:07:57 +00:00
} else {
2019-06-10 17:12:24 +00:00
ERR_FAIL_V_MSG ( RID ( ) , " Texture (binding: " + itos ( uniform . binding ) + " ) should provide one ID referencing a texture (IDs provided: " + itos ( uniform . ids . size ( ) ) + " ). " ) ;
2019-06-07 16:07:57 +00:00
}
}
Vector < VkDescriptorImageInfo > image_info ;
for ( int j = 0 ; j < uniform . ids . size ( ) ; j + + ) {
Texture * texture = texture_owner . getornull ( uniform . ids [ j ] ) ;
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V_MSG ( ! texture , RID ( ) , " Texture (binding: " + itos ( uniform . binding ) + " , index " + itos ( j ) + " ) is not a valid texture. " ) ;
2019-06-07 16:07:57 +00:00
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V_MSG ( ! ( texture - > usage_flags & TEXTURE_USAGE_SAMPLING_BIT ) , RID ( ) ,
2019-06-07 16:07:57 +00:00
" Texture (binding: " + itos ( uniform . binding ) + " , index " + itos ( j ) + " ) needs the TEXTURE_USAGE_SAMPLING_BIT usage flag set in order to be used as uniform. " ) ;
VkDescriptorImageInfo img_info ;
2020-04-06 08:17:42 +00:00
img_info . sampler = VK_NULL_HANDLE ;
2019-06-07 16:07:57 +00:00
img_info . imageView = texture - > view ;
2019-06-07 18:20:01 +00:00
if ( texture - > usage_flags & ( TEXTURE_USAGE_COLOR_ATTACHMENT_BIT | TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | TEXTURE_USAGE_RESOLVE_ATTACHMENT_BIT ) ) {
2020-12-19 13:18:08 +00:00
UniformSet : : AttachableTexture attachable_texture ;
attachable_texture . bind = set_uniform . binding ;
attachable_texture . texture = texture - > owner . is_valid ( ) ? texture - > owner : uniform . ids [ j ] ;
attachable_textures . push_back ( attachable_texture ) ;
2019-06-07 18:20:01 +00:00
}
2019-06-07 16:07:57 +00:00
2020-04-11 17:43:12 +00:00
if ( texture - > usage_flags & TEXTURE_USAGE_STORAGE_BIT ) {
//can also be used as storage, add to mutable sampled
mutable_sampled_textures . push_back ( texture ) ;
}
2019-06-10 17:12:24 +00:00
if ( texture - > owner . is_valid ( ) ) {
2019-06-07 16:07:57 +00:00
texture = texture_owner . getornull ( texture - > owner ) ;
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V ( ! texture , RID ( ) ) ; //bug, should never happen
2019-06-07 16:07:57 +00:00
}
2019-09-25 19:44:44 +00:00
img_info . imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL ;
2019-06-07 16:07:57 +00:00
image_info . push_back ( img_info ) ;
}
write . dstArrayElement = 0 ;
write . descriptorCount = uniform . ids . size ( ) ;
write . descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE ;
write . pImageInfo = image_infos . push_back ( image_info ) - > get ( ) . ptr ( ) ;
2020-04-01 23:20:12 +00:00
write . pBufferInfo = nullptr ;
write . pTexelBufferView = nullptr ;
2019-06-07 16:07:57 +00:00
type_size = uniform . ids . size ( ) ;
} break ;
case UNIFORM_TYPE_IMAGE : {
2019-09-25 19:44:44 +00:00
if ( uniform . ids . size ( ) ! = set_uniform . length ) {
if ( set_uniform . length > 1 ) {
ERR_FAIL_V_MSG ( RID ( ) , " Image (binding: " + itos ( uniform . binding ) + " ) is an array of ( " + itos ( set_uniform . length ) + " ) textures, so it should be provided equal number of texture IDs to satisfy it (IDs provided: " + itos ( uniform . ids . size ( ) ) + " ). " ) ;
} else {
ERR_FAIL_V_MSG ( RID ( ) , " Image (binding: " + itos ( uniform . binding ) + " ) should provide one ID referencing a texture (IDs provided: " + itos ( uniform . ids . size ( ) ) + " ). " ) ;
}
}
Vector < VkDescriptorImageInfo > image_info ;
for ( int j = 0 ; j < uniform . ids . size ( ) ; j + + ) {
Texture * texture = texture_owner . getornull ( uniform . ids [ j ] ) ;
ERR_FAIL_COND_V_MSG ( ! texture , RID ( ) ,
" Image (binding: " + itos ( uniform . binding ) + " , index " + itos ( j ) + " ) is not a valid texture. " ) ;
ERR_FAIL_COND_V_MSG ( ! ( texture - > usage_flags & TEXTURE_USAGE_STORAGE_BIT ) , RID ( ) ,
" Image (binding: " + itos ( uniform . binding ) + " , index " + itos ( j ) + " ) needs the TEXTURE_USAGE_STORAGE_BIT usage flag set in order to be used as uniform. " ) ;
VkDescriptorImageInfo img_info ;
2020-04-06 08:17:42 +00:00
img_info . sampler = VK_NULL_HANDLE ;
2019-09-25 19:44:44 +00:00
img_info . imageView = texture - > view ;
2020-04-11 17:43:12 +00:00
if ( texture - > usage_flags & TEXTURE_USAGE_SAMPLING_BIT ) {
//can also be used as storage, add to mutable sampled
mutable_storage_textures . push_back ( texture ) ;
}
2019-09-25 19:44:44 +00:00
if ( texture - > owner . is_valid ( ) ) {
texture = texture_owner . getornull ( texture - > owner ) ;
ERR_FAIL_COND_V ( ! texture , RID ( ) ) ; //bug, should never happen
}
img_info . imageLayout = VK_IMAGE_LAYOUT_GENERAL ;
image_info . push_back ( img_info ) ;
}
write . dstArrayElement = 0 ;
write . descriptorCount = uniform . ids . size ( ) ;
write . descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE ;
write . pImageInfo = image_infos . push_back ( image_info ) - > get ( ) . ptr ( ) ;
2020-04-01 23:20:12 +00:00
write . pBufferInfo = nullptr ;
write . pTexelBufferView = nullptr ;
2019-09-25 19:44:44 +00:00
type_size = uniform . ids . size ( ) ;
2019-06-07 16:07:57 +00:00
} break ;
case UNIFORM_TYPE_TEXTURE_BUFFER : {
if ( uniform . ids . size ( ) ! = set_uniform . length ) {
if ( set_uniform . length > 1 ) {
2019-06-10 17:12:24 +00:00
ERR_FAIL_V_MSG ( RID ( ) , " Buffer (binding: " + itos ( uniform . binding ) + " ) is an array of ( " + itos ( set_uniform . length ) + " ) texture buffer elements, so it should be provided equal number of texture buffer IDs to satisfy it (IDs provided: " + itos ( uniform . ids . size ( ) ) + " ). " ) ;
2019-06-07 16:07:57 +00:00
} else {
2019-06-10 17:12:24 +00:00
ERR_FAIL_V_MSG ( RID ( ) , " Buffer (binding: " + itos ( uniform . binding ) + " ) should provide one ID referencing a texture buffer (IDs provided: " + itos ( uniform . ids . size ( ) ) + " ). " ) ;
2019-06-07 16:07:57 +00:00
}
}
Vector < VkDescriptorBufferInfo > buffer_info ;
Vector < VkBufferView > buffer_view ;
for ( int j = 0 ; j < uniform . ids . size ( ) ; j + + ) {
TextureBuffer * buffer = texture_buffer_owner . getornull ( uniform . ids [ j ] ) ;
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V_MSG ( ! buffer , RID ( ) , " Texture Buffer (binding: " + itos ( uniform . binding ) + " , index " + itos ( j ) + " ) is not a valid texture buffer. " ) ;
2019-06-07 16:07:57 +00:00
buffer_info . push_back ( buffer - > buffer . buffer_info ) ;
buffer_view . push_back ( buffer - > view ) ;
}
write . dstArrayElement = 0 ;
write . descriptorCount = uniform . ids . size ( ) ;
write . descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER ;
2020-04-01 23:20:12 +00:00
write . pImageInfo = nullptr ;
2019-06-07 16:07:57 +00:00
write . pBufferInfo = buffer_infos . push_back ( buffer_info ) - > get ( ) . ptr ( ) ;
write . pTexelBufferView = buffer_views . push_back ( buffer_view ) - > get ( ) . ptr ( ) ;
type_size = uniform . ids . size ( ) ;
} break ;
case UNIFORM_TYPE_SAMPLER_WITH_TEXTURE_BUFFER : {
if ( uniform . ids . size ( ) ! = set_uniform . length * 2 ) {
if ( set_uniform . length > 1 ) {
2019-06-10 17:12:24 +00:00
ERR_FAIL_V_MSG ( RID ( ) , " SamplerBuffer (binding: " + itos ( uniform . binding ) + " ) is an array of ( " + itos ( set_uniform . length ) + " ) sampler buffer elements, so it should provided twice the amount of IDs (sampler,buffer pairs) to satisfy it (IDs provided: " + itos ( uniform . ids . size ( ) ) + " ). " ) ;
2019-06-07 16:07:57 +00:00
} else {
2019-06-10 17:12:24 +00:00
ERR_FAIL_V_MSG ( RID ( ) , " SamplerBuffer (binding: " + itos ( uniform . binding ) + " ) should provide two IDs referencing a sampler and then a texture buffer (IDs provided: " + itos ( uniform . ids . size ( ) ) + " ). " ) ;
2019-06-07 16:07:57 +00:00
}
}
Vector < VkDescriptorImageInfo > image_info ;
Vector < VkDescriptorBufferInfo > buffer_info ;
Vector < VkBufferView > buffer_view ;
for ( int j = 0 ; j < uniform . ids . size ( ) ; j + = 2 ) {
VkSampler * sampler = sampler_owner . getornull ( uniform . ids [ j + 0 ] ) ;
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V_MSG ( ! sampler , RID ( ) , " SamplerBuffer (binding: " + itos ( uniform . binding ) + " , index " + itos ( j + 1 ) + " ) is not a valid sampler. " ) ;
2019-06-07 16:07:57 +00:00
TextureBuffer * buffer = texture_buffer_owner . getornull ( uniform . ids [ j + 1 ] ) ;
VkDescriptorImageInfo img_info ;
img_info . sampler = * sampler ;
img_info . imageView = VK_NULL_HANDLE ;
img_info . imageLayout = VK_IMAGE_LAYOUT_UNDEFINED ;
image_info . push_back ( img_info ) ;
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V_MSG ( ! buffer , RID ( ) , " SamplerBuffer (binding: " + itos ( uniform . binding ) + " , index " + itos ( j + 1 ) + " ) is not a valid texture buffer. " ) ;
2019-06-07 16:07:57 +00:00
buffer_info . push_back ( buffer - > buffer . buffer_info ) ;
buffer_view . push_back ( buffer - > view ) ;
}
write . dstArrayElement = 0 ;
write . descriptorCount = uniform . ids . size ( ) / 2 ;
write . descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER ;
write . pImageInfo = image_infos . push_back ( image_info ) - > get ( ) . ptr ( ) ;
write . pBufferInfo = buffer_infos . push_back ( buffer_info ) - > get ( ) . ptr ( ) ;
write . pTexelBufferView = buffer_views . push_back ( buffer_view ) - > get ( ) . ptr ( ) ;
type_size = uniform . ids . size ( ) / 2 ;
} break ;
case UNIFORM_TYPE_IMAGE_BUFFER : {
//todo
} break ;
case UNIFORM_TYPE_UNIFORM_BUFFER : {
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V_MSG ( uniform . ids . size ( ) ! = 1 , RID ( ) ,
2019-07-21 14:31:30 +00:00
" Uniform buffer supplied (binding: " + itos ( uniform . binding ) + " ) must provide one ID ( " + itos ( uniform . ids . size ( ) ) + " provided). " ) ;
2019-06-07 16:07:57 +00:00
Buffer * buffer = uniform_buffer_owner . getornull ( uniform . ids [ 0 ] ) ;
2019-07-21 14:31:30 +00:00
ERR_FAIL_COND_V_MSG ( ! buffer , RID ( ) , " Uniform buffer supplied (binding: " + itos ( uniform . binding ) + " ) is invalid. " ) ;
2019-06-07 16:07:57 +00:00
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V_MSG ( buffer - > size ! = ( uint32_t ) set_uniform . length , RID ( ) ,
2019-07-21 14:31:30 +00:00
" Uniform buffer supplied (binding: " + itos ( uniform . binding ) + " ) size ( " + itos ( buffer - > size ) + " does not match size of shader uniform: ( " + itos ( set_uniform . length ) + " ). " ) ;
2019-06-07 16:07:57 +00:00
write . dstArrayElement = 0 ;
write . descriptorCount = 1 ;
write . descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ;
2020-04-01 23:20:12 +00:00
write . pImageInfo = nullptr ;
2019-06-07 16:07:57 +00:00
write . pBufferInfo = & buffer - > buffer_info ;
2020-04-01 23:20:12 +00:00
write . pTexelBufferView = nullptr ;
2019-06-07 16:07:57 +00:00
} break ;
case UNIFORM_TYPE_STORAGE_BUFFER : {
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V_MSG ( uniform . ids . size ( ) ! = 1 , RID ( ) ,
2019-07-21 14:31:30 +00:00
" Storage buffer supplied (binding: " + itos ( uniform . binding ) + " ) must provide one ID ( " + itos ( uniform . ids . size ( ) ) + " provided). " ) ;
2019-06-07 16:07:57 +00:00
2020-12-16 14:07:08 +00:00
Buffer * buffer = nullptr ;
if ( storage_buffer_owner . owns ( uniform . ids [ 0 ] ) ) {
buffer = storage_buffer_owner . getornull ( uniform . ids [ 0 ] ) ;
} else if ( vertex_buffer_owner . owns ( uniform . ids [ 0 ] ) ) {
buffer = vertex_buffer_owner . getornull ( uniform . ids [ 0 ] ) ;
ERR_FAIL_COND_V_MSG ( ! ( buffer - > usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT ) , RID ( ) , " Vertex buffer supplied (binding: " + itos ( uniform . binding ) + " ) was not created with storage flag. " ) ;
}
2019-07-21 14:31:30 +00:00
ERR_FAIL_COND_V_MSG ( ! buffer , RID ( ) , " Storage buffer supplied (binding: " + itos ( uniform . binding ) + " ) is invalid. " ) ;
2019-06-07 16:07:57 +00:00
2021-03-12 13:35:16 +00:00
//if 0, then it's sized on link time
2019-09-07 01:51:27 +00:00
ERR_FAIL_COND_V_MSG ( set_uniform . length > 0 & & buffer - > size ! = ( uint32_t ) set_uniform . length , RID ( ) ,
2019-07-21 14:31:30 +00:00
" Storage buffer supplied (binding: " + itos ( uniform . binding ) + " ) size ( " + itos ( buffer - > size ) + " does not match size of shader uniform: ( " + itos ( set_uniform . length ) + " ). " ) ;
2019-06-07 16:07:57 +00:00
write . dstArrayElement = 0 ;
write . descriptorCount = 1 ;
write . descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER ;
2020-04-01 23:20:12 +00:00
write . pImageInfo = nullptr ;
2019-06-07 16:07:57 +00:00
write . pBufferInfo = & buffer - > buffer_info ;
2020-04-01 23:20:12 +00:00
write . pTexelBufferView = nullptr ;
2019-06-07 16:07:57 +00:00
} break ;
case UNIFORM_TYPE_INPUT_ATTACHMENT : {
2021-06-24 13:58:36 +00:00
ERR_FAIL_COND_V_MSG ( shader - > is_compute , RID ( ) , " InputAttachment (binding: " + itos ( uniform . binding ) + " ) supplied for compute shader (this is not allowed). " ) ;
if ( uniform . ids . size ( ) ! = set_uniform . length ) {
if ( set_uniform . length > 1 ) {
ERR_FAIL_V_MSG ( RID ( ) , " InputAttachment (binding: " + itos ( uniform . binding ) + " ) is an array of ( " + itos ( set_uniform . length ) + " ) textures, so it should be provided equal number of texture IDs to satisfy it (IDs provided: " + itos ( uniform . ids . size ( ) ) + " ). " ) ;
} else {
ERR_FAIL_V_MSG ( RID ( ) , " InputAttachment (binding: " + itos ( uniform . binding ) + " ) should provide one ID referencing a texture (IDs provided: " + itos ( uniform . ids . size ( ) ) + " ). " ) ;
}
}
Vector < VkDescriptorImageInfo > image_info ;
for ( int j = 0 ; j < uniform . ids . size ( ) ; j + + ) {
Texture * texture = texture_owner . getornull ( uniform . ids [ j ] ) ;
ERR_FAIL_COND_V_MSG ( ! texture , RID ( ) ,
" InputAttachment (binding: " + itos ( uniform . binding ) + " , index " + itos ( j ) + " ) is not a valid texture. " ) ;
ERR_FAIL_COND_V_MSG ( ! ( texture - > usage_flags & TEXTURE_USAGE_SAMPLING_BIT ) , RID ( ) ,
" InputAttachment (binding: " + itos ( uniform . binding ) + " , index " + itos ( j ) + " ) needs the TEXTURE_USAGE_SAMPLING_BIT usage flag set in order to be used as uniform. " ) ;
VkDescriptorImageInfo img_info ;
img_info . sampler = VK_NULL_HANDLE ;
img_info . imageView = texture - > view ;
if ( texture - > owner . is_valid ( ) ) {
texture = texture_owner . getornull ( texture - > owner ) ;
ERR_FAIL_COND_V ( ! texture , RID ( ) ) ; //bug, should never happen
}
img_info . imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL ;
image_info . push_back ( img_info ) ;
}
write . dstArrayElement = 0 ;
write . descriptorCount = uniform . ids . size ( ) ;
write . descriptorType = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT ;
write . pImageInfo = image_infos . push_back ( image_info ) - > get ( ) . ptr ( ) ;
write . pBufferInfo = nullptr ;
write . pTexelBufferView = nullptr ;
type_size = uniform . ids . size ( ) ;
2019-06-07 16:07:57 +00:00
} break ;
default : {
}
}
writes . push_back ( write ) ;
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V_MSG ( pool_key . uniform_type [ set_uniform . type ] = = MAX_DESCRIPTOR_POOL_ELEMENT , RID ( ) ,
2019-06-07 16:07:57 +00:00
" Uniform set reached the limit of bindings for the same type ( " + itos ( MAX_DESCRIPTOR_POOL_ELEMENT ) + " ). " ) ;
pool_key . uniform_type [ set_uniform . type ] + = type_size ;
}
//need a descriptor pool
DescriptorPool * pool = _descriptor_pool_allocate ( pool_key ) ;
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V ( ! pool , RID ( ) ) ;
2019-06-07 16:07:57 +00:00
VkDescriptorSetAllocateInfo descriptor_set_allocate_info ;
descriptor_set_allocate_info . sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO ;
2020-04-01 23:20:12 +00:00
descriptor_set_allocate_info . pNext = nullptr ;
2019-06-07 16:07:57 +00:00
descriptor_set_allocate_info . descriptorPool = pool - > pool ;
descriptor_set_allocate_info . descriptorSetCount = 1 ;
descriptor_set_allocate_info . pSetLayouts = & shader - > sets [ p_shader_set ] . descriptor_set_layout ;
VkDescriptorSet descriptor_set ;
VkResult res = vkAllocateDescriptorSets ( device , & descriptor_set_allocate_info , & descriptor_set ) ;
if ( res ) {
_descriptor_pool_free ( pool_key , pool ) ; // meh
2020-03-08 08:26:22 +00:00
ERR_FAIL_V_MSG ( RID ( ) , " Cannot allocate descriptor sets, error " + itos ( res ) + " . " ) ;
2019-06-07 16:07:57 +00:00
}
UniformSet uniform_set ;
uniform_set . pool = pool ;
uniform_set . pool_key = pool_key ;
uniform_set . descriptor_set = descriptor_set ;
2019-07-27 13:23:24 +00:00
uniform_set . format = shader - > set_formats [ p_shader_set ] ;
2019-06-07 18:20:01 +00:00
uniform_set . attachable_textures = attachable_textures ;
2019-09-25 19:44:44 +00:00
uniform_set . mutable_sampled_textures = mutable_sampled_textures ;
uniform_set . mutable_storage_textures = mutable_storage_textures ;
2019-07-27 13:23:24 +00:00
uniform_set . shader_set = p_shader_set ;
uniform_set . shader_id = p_shader ;
2019-06-07 16:07:57 +00:00
2019-06-10 17:12:24 +00:00
RID id = uniform_set_owner . make_rid ( uniform_set ) ;
2019-06-07 16:07:57 +00:00
//add dependencies
_add_dependency ( id , p_shader ) ;
for ( uint32_t i = 0 ; i < uniform_count ; i + + ) {
const Uniform & uniform = uniforms [ i ] ;
int id_count = uniform . ids . size ( ) ;
2019-06-10 17:12:24 +00:00
const RID * ids = uniform . ids . ptr ( ) ;
2019-06-07 16:07:57 +00:00
for ( int j = 0 ; j < id_count ; j + + ) {
_add_dependency ( id , ids [ j ] ) ;
}
}
//write the contents
if ( writes . size ( ) ) {
for ( int i = 0 ; i < writes . size ( ) ; i + + ) {
writes . write [ i ] . dstSet = descriptor_set ;
}
2020-04-01 23:20:12 +00:00
vkUpdateDescriptorSets ( device , writes . size ( ) , writes . ptr ( ) , 0 , nullptr ) ;
2019-06-07 16:07:57 +00:00
}
return id ;
}
2019-06-16 02:45:24 +00:00
bool RenderingDeviceVulkan : : uniform_set_is_valid ( RID p_uniform_set ) {
return uniform_set_owner . owns ( p_uniform_set ) ;
}
2021-07-07 22:55:20 +00:00
void RenderingDeviceVulkan : : uniform_set_set_invalidation_callback ( RID p_uniform_set , UniformSetInvalidatedCallback p_callback , void * p_userdata ) {
UniformSet * us = uniform_set_owner . getornull ( p_uniform_set ) ;
ERR_FAIL_COND ( ! us ) ;
us - > invalidated_callback = p_callback ;
us - > invalidated_callback_userdata = p_userdata ;
}
2021-01-26 00:52:58 +00:00
Error RenderingDeviceVulkan : : buffer_update ( RID p_buffer , uint32_t p_offset , uint32_t p_size , const void * p_data , uint32_t p_post_barrier ) {
2019-06-07 16:07:57 +00:00
_THREAD_SAFE_METHOD_
2021-01-26 00:52:58 +00:00
ERR_FAIL_COND_V_MSG ( draw_list , ERR_INVALID_PARAMETER ,
" Updating buffers is forbidden during creation of a draw list " ) ;
ERR_FAIL_COND_V_MSG ( compute_list , ERR_INVALID_PARAMETER ,
" Updating buffers is forbidden during creation of a compute list " ) ;
2019-06-25 01:24:07 +00:00
2021-01-26 00:52:58 +00:00
VkPipelineStageFlags dst_stage_mask = 0 ;
VkAccessFlags dst_access = 0 ;
if ( p_post_barrier & BARRIER_MASK_TRANSFER ) {
// Protect subsequent updates...
dst_stage_mask = VK_PIPELINE_STAGE_TRANSFER_BIT ;
dst_access = VK_ACCESS_TRANSFER_WRITE_BIT ;
}
Buffer * buffer = _get_buffer_from_owner ( p_buffer , dst_stage_mask , dst_access , p_post_barrier ) ;
2020-10-19 20:32:19 +00:00
if ( ! buffer ) {
2019-06-07 16:07:57 +00:00
ERR_FAIL_V_MSG ( ERR_INVALID_PARAMETER , " Buffer argument is not a valid buffer of any type. " ) ;
}
ERR_FAIL_COND_V_MSG ( p_offset + p_size > buffer - > size , ERR_INVALID_PARAMETER ,
" Attempted to write buffer ( " + itos ( ( p_offset + p_size ) - buffer - > size ) + " bytes) past the end. " ) ;
2021-01-26 00:52:58 +00:00
// no barrier should be needed here
// _buffer_memory_barrier(buffer->buffer, p_offset, p_size, dst_stage_mask, VK_PIPELINE_STAGE_TRANSFER_BIT, dst_access, VK_ACCESS_TRANSFER_WRITE_BIT, true);
Error err = _buffer_update ( buffer , p_offset , ( uint8_t * ) p_data , p_size , p_post_barrier ) ;
2019-06-24 19:13:06 +00:00
if ( err ) {
return err ;
}
2019-10-31 22:54:21 +00:00
# ifdef FORCE_FULL_BARRIER
2021-01-26 00:52:58 +00:00
_full_barrier ( true ) ;
2019-10-31 22:54:21 +00:00
# else
2021-02-02 19:51:36 +00:00
if ( dst_stage_mask = = 0 ) {
dst_stage_mask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT ;
}
if ( p_post_barrier ! = RD : : BARRIER_MASK_NO_BARRIER ) {
2021-02-05 03:02:06 +00:00
_buffer_memory_barrier ( buffer - > buffer , p_offset , p_size , VK_PIPELINE_STAGE_TRANSFER_BIT , dst_stage_mask , VK_ACCESS_TRANSFER_WRITE_BIT , dst_access , true ) ;
2021-02-02 19:51:36 +00:00
}
2019-10-31 22:54:21 +00:00
# endif
2019-06-24 19:13:06 +00:00
return err ;
2019-06-07 16:07:57 +00:00
}
2021-01-26 00:52:58 +00:00
Error RenderingDeviceVulkan : : buffer_clear ( RID p_buffer , uint32_t p_offset , uint32_t p_size , uint32_t p_post_barrier ) {
2021-01-17 16:25:38 +00:00
_THREAD_SAFE_METHOD_
ERR_FAIL_COND_V_MSG ( ( p_size % 4 ) ! = 0 , ERR_INVALID_PARAMETER ,
" Size must be a multiple of four " ) ;
2021-01-26 00:52:58 +00:00
ERR_FAIL_COND_V_MSG ( draw_list , ERR_INVALID_PARAMETER ,
" Updating buffers in is forbidden during creation of a draw list " ) ;
ERR_FAIL_COND_V_MSG ( compute_list , ERR_INVALID_PARAMETER ,
" Updating buffers is forbidden during creation of a compute list " ) ;
2021-01-17 16:25:38 +00:00
2021-01-26 00:52:58 +00:00
VkPipelineStageFlags dst_stage_mask = 0 ;
VkAccessFlags dst_access = 0 ;
if ( p_post_barrier & BARRIER_MASK_TRANSFER ) {
// Protect subsequent updates...
dst_stage_mask = VK_PIPELINE_STAGE_TRANSFER_BIT ;
dst_access = VK_ACCESS_TRANSFER_WRITE_BIT ;
}
2021-01-17 16:25:38 +00:00
2021-01-26 00:52:58 +00:00
Buffer * buffer = _get_buffer_from_owner ( p_buffer , dst_stage_mask , dst_access , p_post_barrier ) ;
2021-01-17 16:25:38 +00:00
if ( ! buffer ) {
ERR_FAIL_V_MSG ( ERR_INVALID_PARAMETER , " Buffer argument is not a valid buffer of any type. " ) ;
}
ERR_FAIL_COND_V_MSG ( p_offset + p_size > buffer - > size , ERR_INVALID_PARAMETER ,
" Attempted to write buffer ( " + itos ( ( p_offset + p_size ) - buffer - > size ) + " bytes) past the end. " ) ;
2021-01-26 00:52:58 +00:00
// should not be needed
// _buffer_memory_barrier(buffer->buffer, p_offset, p_size, dst_stage_mask, VK_PIPELINE_STAGE_TRANSFER_BIT, dst_access, VK_ACCESS_TRANSFER_WRITE_BIT, p_post_barrier);
2021-01-17 16:25:38 +00:00
2021-01-26 00:52:58 +00:00
vkCmdFillBuffer ( frames [ frame ] . draw_command_buffer , buffer - > buffer , p_offset , p_size , 0 ) ;
2021-01-17 16:25:38 +00:00
# ifdef FORCE_FULL_BARRIER
2021-01-26 00:52:58 +00:00
_full_barrier ( true ) ;
2021-01-17 16:25:38 +00:00
# else
2021-02-02 19:51:36 +00:00
if ( dst_stage_mask = = 0 ) {
dst_stage_mask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT ;
}
_buffer_memory_barrier ( buffer - > buffer , p_offset , p_size , VK_PIPELINE_STAGE_TRANSFER_BIT , dst_stage_mask , VK_ACCESS_TRANSFER_WRITE_BIT , dst_access , dst_stage_mask ) ;
2021-01-17 16:25:38 +00:00
# endif
return OK ;
}
2020-02-17 21:06:54 +00:00
Vector < uint8_t > RenderingDeviceVulkan : : buffer_get_data ( RID p_buffer ) {
2019-10-05 13:27:43 +00:00
_THREAD_SAFE_METHOD_
2020-10-19 20:32:19 +00:00
// It could be this buffer was just created
VkPipelineShaderStageCreateFlags src_stage_mask = VK_PIPELINE_STAGE_TRANSFER_BIT ;
VkAccessFlags src_access_mask = VK_ACCESS_TRANSFER_WRITE_BIT ;
// Get the vulkan buffer and the potential stage/access possible
2021-01-26 00:52:58 +00:00
Buffer * buffer = _get_buffer_from_owner ( p_buffer , src_stage_mask , src_access_mask , BARRIER_MASK_ALL ) ;
2020-10-19 20:32:19 +00:00
if ( ! buffer ) {
2020-02-17 21:06:54 +00:00
ERR_FAIL_V_MSG ( Vector < uint8_t > ( ) , " Buffer is either invalid or this type of buffer can't be retrieved. Only Index and Vertex buffers allow retrieving. " ) ;
2019-08-18 22:40:52 +00:00
}
2021-03-12 13:35:16 +00:00
// Make sure no one is using the buffer -- the "false" gets us to the same command buffer as below.
2020-10-19 20:32:19 +00:00
_buffer_memory_barrier ( buffer - > buffer , 0 , buffer - > size , src_stage_mask , src_access_mask , VK_PIPELINE_STAGE_TRANSFER_BIT , VK_ACCESS_TRANSFER_READ_BIT , false ) ;
2019-08-18 22:40:52 +00:00
VkCommandBuffer command_buffer = frames [ frame ] . setup_command_buffer ;
2020-10-19 20:32:19 +00:00
2019-08-18 22:40:52 +00:00
Buffer tmp_buffer ;
_buffer_allocate ( & tmp_buffer , buffer - > size , VK_BUFFER_USAGE_TRANSFER_DST_BIT , VMA_MEMORY_USAGE_CPU_ONLY ) ;
VkBufferCopy region ;
region . srcOffset = 0 ;
region . dstOffset = 0 ;
region . size = buffer - > size ;
vkCmdCopyBuffer ( command_buffer , buffer - > buffer , tmp_buffer . buffer , 1 , & region ) ; //dst buffer is in CPU, but I wonder if src buffer needs a barrier for this..
//flush everything so memory can be safely mapped
2019-08-26 20:43:58 +00:00
_flush ( true ) ;
2019-08-18 22:40:52 +00:00
void * buffer_mem ;
VkResult vkerr = vmaMapMemory ( allocator , tmp_buffer . allocation , & buffer_mem ) ;
2020-03-08 08:26:22 +00:00
ERR_FAIL_COND_V_MSG ( vkerr , Vector < uint8_t > ( ) , " vmaMapMemory failed with error " + itos ( vkerr ) + " . " ) ;
2019-08-18 22:40:52 +00:00
2020-02-17 21:06:54 +00:00
Vector < uint8_t > buffer_data ;
2019-08-18 22:40:52 +00:00
{
buffer_data . resize ( buffer - > size ) ;
2020-02-17 21:06:54 +00:00
uint8_t * w = buffer_data . ptrw ( ) ;
2021-04-27 14:19:21 +00:00
memcpy ( w , buffer_mem , buffer - > size ) ;
2019-08-18 22:40:52 +00:00
}
2019-08-26 20:43:58 +00:00
vmaUnmapMemory ( allocator , tmp_buffer . allocation ) ;
2019-08-18 22:40:52 +00:00
_buffer_free ( & tmp_buffer ) ;
return buffer_data ;
}
2019-06-07 16:07:57 +00:00
/*************************/
/**** RENDER PIPELINE ****/
/*************************/
2021-07-09 19:48:28 +00:00
RID RenderingDeviceVulkan : : render_pipeline_create ( RID p_shader , FramebufferFormatID p_framebuffer_format , VertexFormatID p_vertex_format , RenderPrimitive p_render_primitive , const PipelineRasterizationState & p_rasterization_state , const PipelineMultisampleState & p_multisample_state , const PipelineDepthStencilState & p_depth_stencil_state , const PipelineColorBlendState & p_blend_state , int p_dynamic_state_flags , uint32_t p_for_render_pass , const Vector < PipelineSpecializationConstant > & p_specialization_constants ) {
2019-06-07 16:07:57 +00:00
_THREAD_SAFE_METHOD_
//needs a shader
Shader * shader = shader_owner . getornull ( p_shader ) ;
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V ( ! shader , RID ( ) ) ;
2019-06-07 16:07:57 +00:00
2019-09-25 19:44:44 +00:00
ERR_FAIL_COND_V_MSG ( shader - > is_compute , RID ( ) ,
" Compute shaders can't be used in render pipelines " ) ;
2019-06-07 16:07:57 +00:00
if ( p_framebuffer_format = = INVALID_ID ) {
//if nothing provided, use an empty one (no attachments)
p_framebuffer_format = framebuffer_format_create ( Vector < AttachmentFormat > ( ) ) ;
}
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V ( ! framebuffer_formats . has ( p_framebuffer_format ) , RID ( ) ) ;
2019-06-07 16:07:57 +00:00
const FramebufferFormat & fb_format = framebuffer_formats [ p_framebuffer_format ] ;
{ //validate shader vs framebuffer
2021-06-24 13:58:36 +00:00
ERR_FAIL_COND_V_MSG ( p_for_render_pass > = uint32_t ( fb_format . E - > key ( ) . passes . size ( ) ) , RID ( ) , " Render pass requested for pipeline creation ( " + itos ( p_for_render_pass ) + " ) is out of bounds " ) ;
const FramebufferPass & pass = fb_format . E - > key ( ) . passes [ p_for_render_pass ] ;
uint32_t output_mask = 0 ;
for ( int i = 0 ; i < pass . color_attachments . size ( ) ; i + + ) {
if ( pass . color_attachments [ i ] ! = FramebufferPass : : ATTACHMENT_UNUSED ) {
output_mask | = 1 < < i ;
}
}
ERR_FAIL_COND_V_MSG ( shader - > fragment_output_mask ! = output_mask , RID ( ) ,
" Mismatch fragment shader output mask ( " + itos ( shader - > fragment_output_mask ) + " ) and framebuffer color output mask ( " + itos ( output_mask ) + " ) when binding both in render pipeline. " ) ;
2019-06-07 16:07:57 +00:00
}
//vertex
VkPipelineVertexInputStateCreateInfo pipeline_vertex_input_state_create_info ;
2019-06-10 17:12:24 +00:00
if ( p_vertex_format ! = INVALID_ID ) {
2019-06-07 16:07:57 +00:00
//uses vertices, else it does not
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V ( ! vertex_formats . has ( p_vertex_format ) , RID ( ) ) ;
2019-06-24 19:13:06 +00:00
const VertexDescriptionCache & vd = vertex_formats [ p_vertex_format ] ;
2019-06-07 16:07:57 +00:00
pipeline_vertex_input_state_create_info = vd . create_info ;
//validate with inputs
2019-08-18 22:40:52 +00:00
for ( uint32_t i = 0 ; i < 32 ; i + + ) {
if ( ! ( shader - > vertex_input_mask & ( 1 < < i ) ) ) {
continue ;
}
2019-06-07 16:07:57 +00:00
bool found = false ;
2019-06-24 19:13:06 +00:00
for ( int j = 0 ; j < vd . vertex_formats . size ( ) ; j + + ) {
2019-08-18 22:40:52 +00:00
if ( vd . vertex_formats [ j ] . location = = i ) {
2019-06-07 16:07:57 +00:00
found = true ;
}
}
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V_MSG ( ! found , RID ( ) ,
2019-08-18 22:40:52 +00:00
" Shader vertex input location ( " + itos ( i ) + " ) not provided in vertex input description for pipeline creation. " ) ;
2019-06-07 16:07:57 +00:00
}
} else {
//does not use vertices
pipeline_vertex_input_state_create_info . sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO ;
2020-04-01 23:20:12 +00:00
pipeline_vertex_input_state_create_info . pNext = nullptr ;
2019-06-07 16:07:57 +00:00
pipeline_vertex_input_state_create_info . flags = 0 ;
pipeline_vertex_input_state_create_info . vertexBindingDescriptionCount = 0 ;
2020-04-01 23:20:12 +00:00
pipeline_vertex_input_state_create_info . pVertexBindingDescriptions = nullptr ;
2019-06-07 16:07:57 +00:00
pipeline_vertex_input_state_create_info . vertexAttributeDescriptionCount = 0 ;
2020-04-01 23:20:12 +00:00
pipeline_vertex_input_state_create_info . pVertexAttributeDescriptions = nullptr ;
2019-06-07 16:07:57 +00:00
2019-08-18 22:40:52 +00:00
ERR_FAIL_COND_V_MSG ( shader - > vertex_input_mask ! = 0 , RID ( ) ,
" Shader contains vertex inputs, but no vertex input description was provided for pipeline creation. " ) ;
2019-06-07 16:07:57 +00:00
}
//input assembly
2019-06-10 17:12:24 +00:00
ERR_FAIL_INDEX_V ( p_render_primitive , RENDER_PRIMITIVE_MAX , RID ( ) ) ;
2019-06-07 16:07:57 +00:00
VkPipelineInputAssemblyStateCreateInfo input_assembly_create_info ;
input_assembly_create_info . sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO ;
2020-04-01 23:20:12 +00:00
input_assembly_create_info . pNext = nullptr ;
2019-06-07 16:07:57 +00:00
input_assembly_create_info . flags = 0 ;
static const VkPrimitiveTopology topology_list [ RENDER_PRIMITIVE_MAX ] = {
VK_PRIMITIVE_TOPOLOGY_POINT_LIST ,
VK_PRIMITIVE_TOPOLOGY_LINE_LIST ,
VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY ,
VK_PRIMITIVE_TOPOLOGY_LINE_STRIP ,
VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY ,
VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST ,
VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY ,
VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP ,
VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY ,
VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP ,
VK_PRIMITIVE_TOPOLOGY_PATCH_LIST
} ;
input_assembly_create_info . topology = topology_list [ p_render_primitive ] ;
input_assembly_create_info . primitiveRestartEnable = ( p_render_primitive = = RENDER_PRIMITIVE_TRIANGLE_STRIPS_WITH_RESTART_INDEX ) ;
2020-07-17 16:02:06 +00:00
//tessellation
VkPipelineTessellationStateCreateInfo tessellation_create_info ;
tessellation_create_info . sType = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO ;
tessellation_create_info . pNext = nullptr ;
tessellation_create_info . flags = 0 ;
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V ( p_rasterization_state . patch_control_points < 1 | | p_rasterization_state . patch_control_points > limits . maxTessellationPatchSize , RID ( ) ) ;
2020-07-17 16:02:06 +00:00
tessellation_create_info . patchControlPoints = p_rasterization_state . patch_control_points ;
2019-06-07 16:07:57 +00:00
VkPipelineViewportStateCreateInfo viewport_state_create_info ;
viewport_state_create_info . sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO ;
2020-04-01 23:20:12 +00:00
viewport_state_create_info . pNext = nullptr ;
2019-06-07 16:07:57 +00:00
viewport_state_create_info . flags = 0 ;
viewport_state_create_info . viewportCount = 1 ; //if VR extensions are supported at some point, this will have to be customizable in the framebuffer format
2020-04-01 23:20:12 +00:00
viewport_state_create_info . pViewports = nullptr ;
2019-06-07 16:07:57 +00:00
viewport_state_create_info . scissorCount = 1 ;
2020-04-01 23:20:12 +00:00
viewport_state_create_info . pScissors = nullptr ;
2019-06-07 16:07:57 +00:00
//rasterization
VkPipelineRasterizationStateCreateInfo rasterization_state_create_info ;
rasterization_state_create_info . sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO ;
2020-04-01 23:20:12 +00:00
rasterization_state_create_info . pNext = nullptr ;
2019-06-07 16:07:57 +00:00
rasterization_state_create_info . flags = 0 ;
rasterization_state_create_info . depthClampEnable = p_rasterization_state . enable_depth_clamp ;
rasterization_state_create_info . rasterizerDiscardEnable = p_rasterization_state . discard_primitives ;
rasterization_state_create_info . polygonMode = ( p_rasterization_state . wireframe ? VK_POLYGON_MODE_LINE : VK_POLYGON_MODE_FILL ) ;
static VkCullModeFlags cull_mode [ 3 ] = {
VK_CULL_MODE_NONE ,
VK_CULL_MODE_FRONT_BIT ,
VK_CULL_MODE_BACK_BIT
} ;
2019-06-10 17:12:24 +00:00
ERR_FAIL_INDEX_V ( p_rasterization_state . cull_mode , 3 , RID ( ) ) ;
2019-06-07 16:07:57 +00:00
rasterization_state_create_info . cullMode = cull_mode [ p_rasterization_state . cull_mode ] ;
rasterization_state_create_info . frontFace = ( p_rasterization_state . front_face = = POLYGON_FRONT_FACE_CLOCKWISE ? VK_FRONT_FACE_CLOCKWISE : VK_FRONT_FACE_COUNTER_CLOCKWISE ) ;
rasterization_state_create_info . depthBiasEnable = p_rasterization_state . depth_bias_enable ;
rasterization_state_create_info . depthBiasConstantFactor = p_rasterization_state . depth_bias_constant_factor ;
rasterization_state_create_info . depthBiasClamp = p_rasterization_state . depth_bias_clamp ;
rasterization_state_create_info . depthBiasSlopeFactor = p_rasterization_state . depth_bias_slope_factor ;
rasterization_state_create_info . lineWidth = p_rasterization_state . line_width ;
//multisample
VkPipelineMultisampleStateCreateInfo multisample_state_create_info ;
multisample_state_create_info . sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO ;
2020-04-01 23:20:12 +00:00
multisample_state_create_info . pNext = nullptr ;
2019-06-07 16:07:57 +00:00
multisample_state_create_info . flags = 0 ;
multisample_state_create_info . rasterizationSamples = rasterization_sample_count [ p_multisample_state . sample_count ] ;
multisample_state_create_info . sampleShadingEnable = p_multisample_state . enable_sample_shading ;
multisample_state_create_info . minSampleShading = p_multisample_state . min_sample_shading ;
Vector < VkSampleMask > sample_mask ;
if ( p_multisample_state . sample_mask . size ( ) ) {
//use sample mask
int rasterization_sample_mask_expected_size [ TEXTURE_SAMPLES_MAX ] = {
1 , 2 , 4 , 8 , 16 , 32 , 64
} ;
2019-06-10 17:12:24 +00:00
ERR_FAIL_COND_V ( rasterization_sample_mask_expected_size [ p_multisample_state . sample_count ] ! = p_multisample_state . sample_mask . size ( ) , RID ( ) ) ;
2019-06-07 16:07:57 +00:00
sample_mask . resize ( p_multisample_state . sample_mask . size ( ) ) ;
for ( int i = 0 ; i < p_multisample_state . sample_mask . size ( ) ; i + + ) {
VkSampleMask mask = p_multisample_state . sample_mask [ i ] ;
sample_mask . push_back ( mask ) ;
}
multisample_state_create_info . pSampleMask = sample_mask . ptr ( ) ;
} else {
2020-04-01 23:20:12 +00:00
multisample_state_create_info . pSampleMask = nullptr ;
2019-06-07 16:07:57 +00:00
}
multisample_state_create_info . alphaToCoverageEnable = p_multisample_state . enable_alpha_to_coverage ;
multisample_state_create_info . alphaToOneEnable = p_multisample_state . enable_alpha_to_one ;
//depth stencil
VkPipelineDepthStencilStateCreateInfo depth_stencil_state_create_info ;
depth_stencil_state_create_info . sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO ;
2020-04-01 23:20:12 +00:00
depth_stencil_state_create_info . pNext = nullptr ;
2019-06-07 16:07:57 +00:00
depth_stencil_state_create_info . flags = 0 ;
depth_stencil_state_create_info . depthTestEnable = p_depth_stencil_state . enable_depth_test ;
depth_stencil_state_create_info . depthWriteEnable = p_depth_stencil_state . enable_depth_write ;
2019-06-10 17:12:24 +00:00
ERR_FAIL_INDEX_V ( p_depth_stencil_state . depth_compare_operator , COMPARE_OP_MAX , RID ( ) ) ;
2019-06-07 16:07:57 +00:00
depth_stencil_state_create_info . depthCompareOp = compare_operators [ p_depth_stencil_state . depth_compare_operator ] ;
depth_stencil_state_create_info . depthBoundsTestEnable = p_depth_stencil_state . enable_depth_range ;
depth_stencil_state_create_info . stencilTestEnable = p_depth_stencil_state . enable_stencil ;
2020-04-20 02:19:21 +00:00
ERR_FAIL_INDEX_V ( p_depth_stencil_state . front_op . fail , STENCIL_OP_MAX , RID ( ) ) ;
depth_stencil_state_create_info . front . failOp = stencil_operations [ p_depth_stencil_state . front_op . fail ] ;
ERR_FAIL_INDEX_V ( p_depth_stencil_state . front_op . pass , STENCIL_OP_MAX , RID ( ) ) ;
depth_stencil_state_create_info . front . passOp = stencil_operations [ p_depth_stencil_state . front_op . pass ] ;
ERR_FAIL_INDEX_V ( p_depth_stencil_state . front_op . depth_fail , STENCIL_OP_MAX , RID ( ) ) ;
depth_stencil_state_create_info . front . depthFailOp = stencil_operations [ p_depth_stencil_state . front_op . depth_fail ] ;
ERR_FAIL_INDEX_V ( p_depth_stencil_state . front_op . compare , COMPARE_OP_MAX , RID ( ) ) ;
depth_stencil_state_create_info . front . compareOp = compare_operators [ p_depth_stencil_state . front_op . compare ] ;
depth_stencil_state_create_info . front . compareMask = p_depth_stencil_state . front_op . compare_mask ;
depth_stencil_state_create_info . front . writeMask = p_depth_stencil_state . front_op . write_mask ;
depth_stencil_state_create_info . front . reference = p_depth_stencil_state . front_op . reference ;
ERR_FAIL_INDEX_V ( p_depth_stencil_state . back_op . fail , STENCIL_OP_MAX , RID ( ) ) ;
depth_stencil_state_create_info . back . failOp = stencil_operations [ p_depth_stencil_state . back_op . fail ] ;
ERR_FAIL_INDEX_V ( p_depth_stencil_state . back_op . pass , STENCIL_OP_MAX , RID ( ) ) ;
depth_stencil_state_create_info . back . passOp = stencil_operations [ p_depth_stencil_state . back_op . pass ] ;
ERR_FAIL_INDEX_V ( p_depth_stencil_state . back_op . depth_fail , STENCIL_OP_MAX , RID ( ) ) ;
depth_stencil_state_create_info . back . depthFailOp = stencil_operations [ p_depth_stencil_state . back_op . depth_fail ] ;
ERR_FAIL_INDEX_V ( p_depth_stencil_state . back_op . compare , COMPARE_OP_MAX , RID ( ) ) ;
depth_stencil_state_create_info . back . compareOp = compare_operators [ p_depth_stencil_state . back_op . compare ] ;
depth_stencil_state_create_info . back . compareMask = p_depth_stencil_state . back_op . compare_mask ;
depth_stencil_state_create_info . back . writeMask = p_depth_stencil_state . back_op . write_mask ;
depth_stencil_state_create_info . back . reference = p_depth_stencil_state . back_op . reference ;
2019-06-07 16:07:57 +00:00
depth_stencil_state_create_info . minDepthBounds = p_depth_stencil_state . depth_range_min ;
depth_stencil_state_create_info . maxDepthBounds = p_depth_stencil_state . depth_range_max ;
//blend state
VkPipelineColorBlendStateCreateInfo color_blend_state_create_info ;
color_blend_state_create_info . sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO ;
2020-04-01 23:20:12 +00:00
color_blend_state_create_info . pNext = nullptr ;
2019-06-07 16:07:57 +00:00
color_blend_state_create_info . flags = 0 ;
color_blend_state_create_info . logicOpEnable = p_blend_state . enable_logic_op ;
2019-06-10 17:12:24 +00:00
ERR_FAIL_INDEX_V ( p_blend_state . logic_op , LOGIC_OP_MAX , RID ( ) ) ;
2019-06-07 16:07:57 +00:00
color_blend_state_create_info . logicOp = logic_operations [ p_blend_state . logic_op ] ;
Vector < VkPipelineColorBlendAttachmentState > attachment_states ;
2021-06-24 13:58:36 +00:00
{
const FramebufferPass & pass = fb_format . E - > key ( ) . passes [ p_for_render_pass ] ;
for ( int i = 0 ; i < pass . color_attachments . size ( ) ; i + + ) {
if ( pass . color_attachments [ i ] ! = FramebufferPass : : ATTACHMENT_UNUSED ) {
int idx = attachment_states . size ( ) ;
ERR_FAIL_INDEX_V ( idx , p_blend_state . attachments . size ( ) , RID ( ) ) ;
VkPipelineColorBlendAttachmentState state ;
state . blendEnable = p_blend_state . attachments [ idx ] . enable_blend ;
ERR_FAIL_INDEX_V ( p_blend_state . attachments [ idx ] . src_color_blend_factor , BLEND_FACTOR_MAX , RID ( ) ) ;
state . srcColorBlendFactor = blend_factors [ p_blend_state . attachments [ idx ] . src_color_blend_factor ] ;
ERR_FAIL_INDEX_V ( p_blend_state . attachments [ idx ] . dst_color_blend_factor , BLEND_FACTOR_MAX , RID ( ) ) ;
state . dstColorBlendFactor = blend_factors [ p_blend_state . attachments [ idx ] . dst_color_blend_factor ] ;
ERR_FAIL_INDEX_V ( p_blend_state . attachments [ idx ] . color_blend_op , BLEND_OP_MAX , RID ( ) ) ;
state . colorBlendOp = blend_operations [ p_blend_state . attachments [ idx ] . color_blend_op ] ;
ERR_FAIL_INDEX_V ( p_blend_state . attachments [ idx ] . src_alpha_blend_factor , BLEND_FACTOR_MAX , RID ( ) ) ;
state . srcAlphaBlendFactor = blend_factors [ p_blend_state . attachments [ idx ] . src_alpha_blend_factor ] ;
ERR_FAIL_INDEX_V ( p_blend_state . attachments [ idx ] . dst_alpha_blend_factor , BLEND_FACTOR_MAX , RID ( ) ) ;
state . dstAlphaBlendFactor = blend_factors [ p_blend_state . attachments [ idx ] . dst_alpha_blend_factor ] ;
ERR_FAIL_INDEX_V ( p_blend_state . attachments [ idx ] . alpha_blend_op , BLEND_OP_MAX , RID ( ) ) ;
state . alphaBlendOp = blend_operations [ p_blend_state . attachments [ idx ] . alpha_blend_op ] ;
state . colorWriteMask = 0 ;
if ( p_blend_state . attachments [ idx ] . write_r ) {
state . colorWriteMask | = VK_COLOR_COMPONENT_R_BIT ;
}
if ( p_blend_state . attachments [ idx ] . write_g ) {
state . colorWriteMask | = VK_COLOR_COMPONENT_G_BIT ;
}
if ( p_blend_state . attachments [ idx ] . write_b ) {
state . colorWriteMask | = VK_COLOR_COMPONENT_B_BIT ;
}
if ( p_blend_state . attachments [ idx ] . write_a ) {
state . colorWriteMask | = VK_COLOR_COMPONENT_A_BIT ;
}
2019-06-07 16:07:57 +00:00
2021-06-24 13:58:36 +00:00
attachment_states . push_back ( state ) ;
idx + + ;
} ;
2019-06-07 16:07:57 +00:00
}
2021-06-24 13:58:36 +00:00
ERR_FAIL_COND_V ( attachment_states . size ( ) ! = p_blend_state . attachments . size ( ) , RID ( ) ) ;
}
2019-06-07 16:07:57 +00:00
color_blend_state_create_info . attachmentCount = attachment_states . size ( ) ;
color_blend_state_create_info . pAttachments = attachment_states . ptr ( ) ;
color_blend_state_create_info . blendConstants [ 0 ] = p_blend_state . blend_constant . r ;
color_blend_state_create_info . blendConstants [ 1 ] = p_blend_state . blend_constant . g ;
color_blend_state_create_info . blendConstants [ 2 ] = p_blend_state . blend_constant . b ;
color_blend_state_create_info . blendConstants [ 3 ] = p_blend_state . blend_constant . a ;
//dynamic state
VkPipelineDynamicStateCreateInfo dynamic_state_create_info ;
dynamic_state_create_info . sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO ;
2020-04-01 23:20:12 +00:00
dynamic_state_create_info . pNext = nullptr ;
2019-06-07 16:07:57 +00:00
dynamic_state_create_info . flags = 0 ;
Vector < VkDynamicState > dynamic_states ; //vulkan is weird..
dynamic_states . push_back ( VK_DYNAMIC_STATE_VIEWPORT ) ; //viewport and scissor are always dynamic
dynamic_states . push_back ( VK_DYNAMIC_STATE_SCISSOR ) ;
if ( p_dynamic_state_flags & DYNAMIC_STATE_LINE_WIDTH ) {
dynamic_states . push_back ( VK_DYNAMIC_STATE_LINE_WIDTH ) ;
}
if ( p_dynamic_state_flags & DYNAMIC_STATE_DEPTH_BIAS ) {
dynamic_states . push_back ( VK_DYNAMIC_STATE_DEPTH_BIAS ) ;
}
if ( p_dynamic_state_flags & DYNAMIC_STATE_BLEND_CONSTANTS ) {
dynamic_states . push_back ( VK_DYNAMIC_STATE_BLEND_CONSTANTS ) ;
}
if ( p_dynamic_state_flags & DYNAMIC_STATE_DEPTH_BOUNDS ) {
dynamic_states . push_back ( VK_DYNAMIC_STATE_DEPTH_BOUNDS ) ;
}
if ( p_dynamic_state_flags & DYNAMIC_STATE_STENCIL_COMPARE_MASK ) {
dynamic_states . push_back ( VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK ) ;
}
if ( p_dynamic_state_flags & DYNAMIC_STATE_STENCIL_WRITE_MASK ) {
dynamic_states . push_back ( VK_DYNAMIC_STATE_STENCIL_WRITE_MASK ) ;
}
if ( p_dynamic_state_flags & DYNAMIC_STATE_STENCIL_REFERENCE ) {
dynamic_states . push_back ( VK_DYNAMIC_STATE_STENCIL_REFERENCE ) ;
}
dynamic_state_create_info . dynamicStateCount = dynamic_states . size ( ) ;
dynamic_state_create_info . pDynamicStates = dynamic_states . ptr ( ) ;
//finally, pipeline create info
VkGraphicsPipelineCreateInfo graphics_pipeline_create_info ;
graphics_pipeline_create_info . sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO ;
2020-04-01 23:20:12 +00:00
graphics_pipeline_create_info . pNext = nullptr ;
2019-06-16 02:45:24 +00:00
graphics_pipeline_create_info . flags = 0 ;
2019-06-07 16:07:57 +00:00
2021-07-09 19:48:28 +00:00
Vector < VkPipelineShaderStageCreateInfo > pipeline_stages = shader - > pipeline_stages ;
Vector < VkSpecializationInfo > specialization_info ;
Vector < Vector < VkSpecializationMapEntry > > specialization_map_entries ;
Vector < uint32_t > specialization_constant_data ;
if ( shader - > specialization_constants . size ( ) ) {
specialization_constant_data . resize ( shader - > specialization_constants . size ( ) ) ;
uint32_t * data_ptr = specialization_constant_data . ptrw ( ) ;
specialization_info . resize ( pipeline_stages . size ( ) ) ;
specialization_map_entries . resize ( pipeline_stages . size ( ) ) ;
for ( int i = 0 ; i < shader - > specialization_constants . size ( ) ; i + + ) {
//see if overriden
const Shader : : SpecializationConstant & sc = shader - > specialization_constants [ i ] ;
data_ptr [ i ] = sc . constant . int_value ; //just copy the 32 bits
for ( int j = 0 ; j < p_specialization_constants . size ( ) ; j + + ) {
const PipelineSpecializationConstant & psc = p_specialization_constants [ j ] ;
if ( psc . constant_id = = sc . constant . constant_id ) {
ERR_FAIL_COND_V_MSG ( psc . type ! = sc . constant . type , RID ( ) , " Specialization constant provided for id ( " + itos ( sc . constant . constant_id ) + " ) is of the wrong type. " ) ;
data_ptr [ i ] = sc . constant . int_value ;
break ;
}
}
VkSpecializationMapEntry entry ;
entry . constantID = sc . constant . constant_id ;
entry . offset = i * sizeof ( uint32_t ) ;
entry . size = sizeof ( uint32_t ) ;
for ( int j = 0 ; j < SHADER_STAGE_MAX ; j + + ) {
if ( sc . stage_flags & ( 1 < < j ) ) {
VkShaderStageFlagBits stage = shader_stage_masks [ j ] ;
for ( int k = 0 ; k < pipeline_stages . size ( ) ; k + + ) {
if ( pipeline_stages [ k ] . stage = = stage ) {
specialization_map_entries . write [ k ] . push_back ( entry ) ;
}
}
}
}
}
for ( int k = 0 ; k < pipeline_stages . size ( ) ; k + + ) {
if ( specialization_map_entries [ k ] . size ( ) ) {
specialization_info . write [ k ] . dataSize = specialization_constant_data . size ( ) * sizeof ( uint32_t ) ;
specialization_info . write [ k ] . pData = data_ptr ;
specialization_info . write [ k ] . mapEntryCount = specialization_map_entries [ k ] . size ( ) ;
specialization_info . write [ k ] . pMapEntries = specialization_map_entries [ k ] . ptr ( ) ;
pipeline_stages . write [ k ] . pSpecializationInfo = specialization_info . ptr ( ) ;
}
}
}
graphics_pipeline_create_info . stageCount = pipeline_stages . size ( ) ;
graphics_pipeline_create_info . pStages = pipeline_stages . ptr ( ) ;
2019-06-07 16:07:57 +00:00
graphics_pipeline_create_info . pVertexInputState = & pipeline_vertex_input_state_create_info ;
graphics_pipeline_create_info . pInputAssemblyState = & input_assembly_create_info ;
2020-07-17 16:02:06 +00:00
graphics_pipeline_create_info . pTessellationState = & tessellation_create_info ;
2019-06-07 16:07:57 +00:00
graphics_pipeline_create_info . pViewportState = & viewport_state_create_info ;
graphics_pipeline_create_info . pRasterizationState = & rasterization_state_create_info ;
graphics_pipeline_create_info . pMultisampleState = & multisample_state_create_info ;
graphics_pipeline_create_info . pDepthStencilState = & depth_stencil_state_create_info ;
graphics_pipeline_create_info . pColorBlendState = & color_blend_state_create_info ;
graphics_pipeline_create_info . pDynamicState = & dynamic_state_create_info ;
graphics_pipeline_create_info . layout = shader - > pipeline_layout ;
graphics_pipeline_create_info . renderPass = fb_format . render_pass ;
2021-06-24 13:58:36 +00:00
graphics_pipeline_create_info . subpass = p_for_render_pass ;
2020-04-06 08:17:42 +00:00
graphics_pipeline_create_info . basePipelineHandle = VK_NULL_HANDLE ;
2019-06-07 16:07:57 +00:00
graphics_pipeline_create_info . basePipelineIndex = 0 ;
RenderPipeline pipeline ;
2020-04-06 08:17:42 +00:00
VkResult err = vkCreateGraphicsPipelines ( device , VK_NULL_HANDLE , 1 , & graphics_pipeline_create_info , nullptr , & pipeline . pipeline ) ;
2020-03-08 08:26:22 +00:00
ERR_FAIL_COND_V_MSG ( err , RID ( ) , " vkCreateGraphicsPipelines failed with error " + itos ( err ) + " . " ) ;
2019-06-07 16:07:57 +00:00
2019-07-27 13:23:24 +00:00
pipeline . set_formats = shader - > set_formats ;
2019-06-08 20:10:52 +00:00
pipeline . push_constant_stages = shader - > push_constant . push_constants_vk_stage ;
pipeline . pipeline_layout = shader - > pipeline_layout ;
2019-07-27 13:23:24 +00:00
pipeline . shader = p_shader ;
pipeline . push_constant_size = shader - > push_constant . push_constant_size ;
# ifdef DEBUG_ENABLED
pipeline . validation . dynamic_state = p_dynamic_state_flags ;
pipeline . validation . framebuffer_format = p_framebuffer_format ;
2021-06-24 13:58:36 +00:00
pipeline . validation . render_pass = p_for_render_pass ;
2019-07-27 13:23:24 +00:00
pipeline . validation . vertex_format = p_vertex_format ;
pipeline . validation . uses_restart_indices = input_assembly_create_info . primitiveRestartEnable ;
2019-06-07 16:07:57 +00:00
static const uint32_t primitive_divisor [ RENDER_PRIMITIVE_MAX ] = {
1 , 2 , 1 , 1 , 1 , 3 , 1 , 1 , 1 , 1 , 1
} ;
2019-07-27 13:23:24 +00:00
pipeline . validation . primitive_divisor = primitive_divisor [ p_render_primitive ] ;
2019-06-07 16:07:57 +00:00
static const uint32_t primitive_minimum [ RENDER_PRIMITIVE_MAX ] = {
1 ,
2 ,
2 ,
2 ,
2 ,
3 ,
3 ,
3 ,
3 ,
3 ,
1 ,
} ;
2019-07-27 13:23:24 +00:00
pipeline . validation . primitive_minimum = primitive_minimum [ p_render_primitive ] ;
# endif
2019-06-07 16:07:57 +00:00
//create ID to associate with this pipeline
2019-09-25 19:44:44 +00:00
RID id = render_pipeline_owner . make_rid ( pipeline ) ;
2021-03-12 13:35:16 +00:00
//now add all the dependencies
2019-06-07 16:07:57 +00:00
_add_dependency ( id , p_shader ) ;
return id ;
}
2019-06-24 19:13:06 +00:00
bool RenderingDeviceVulkan : : render_pipeline_is_valid ( RID p_pipeline ) {
_THREAD_SAFE_METHOD_
2019-09-25 19:44:44 +00:00
return render_pipeline_owner . owns ( p_pipeline ) ;
}
/**************************/
/**** COMPUTE PIPELINE ****/
/**************************/
2021-07-09 19:48:28 +00:00
RID RenderingDeviceVulkan : : compute_pipeline_create ( RID p_shader , const Vector < PipelineSpecializationConstant > & p_specialization_constants ) {
2019-09-25 19:44:44 +00:00
_THREAD_SAFE_METHOD_
//needs a shader
Shader * shader = shader_owner . getornull ( p_shader ) ;
ERR_FAIL_COND_V ( ! shader , RID ( ) ) ;
ERR_FAIL_COND_V_MSG ( ! shader - > is_compute , RID ( ) ,
" Non-compute shaders can't be used in compute pipelines " ) ;
//finally, pipeline create info
VkComputePipelineCreateInfo compute_pipeline_create_info ;
compute_pipeline_create_info . sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO ;
2020-04-01 23:20:12 +00:00
compute_pipeline_create_info . pNext = nullptr ;
2019-09-25 19:44:44 +00:00
compute_pipeline_create_info . flags = 0 ;
compute_pipeline_create_info . stage = shader - > pipeline_stages [ 0 ] ;
compute_pipeline_create_info . layout = shader - > pipeline_layout ;
2020-04-06 08:17:42 +00:00
compute_pipeline_create_info . basePipelineHandle = VK_NULL_HANDLE ;
2019-09-25 19:44:44 +00:00
compute_pipeline_create_info . basePipelineIndex = 0 ;
2021-07-09 19:48:28 +00:00
VkSpecializationInfo specialization_info ;
Vector < VkSpecializationMapEntry > specialization_map_entries ;
Vector < uint32_t > specialization_constant_data ;
if ( shader - > specialization_constants . size ( ) ) {
specialization_constant_data . resize ( shader - > specialization_constants . size ( ) ) ;
uint32_t * data_ptr = specialization_constant_data . ptrw ( ) ;
for ( int i = 0 ; i < shader - > specialization_constants . size ( ) ; i + + ) {
//see if overriden
const Shader : : SpecializationConstant & sc = shader - > specialization_constants [ i ] ;
data_ptr [ i ] = sc . constant . int_value ; //just copy the 32 bits
for ( int j = 0 ; j < p_specialization_constants . size ( ) ; j + + ) {
const PipelineSpecializationConstant & psc = p_specialization_constants [ j ] ;
if ( psc . constant_id = = sc . constant . constant_id ) {
ERR_FAIL_COND_V_MSG ( psc . type ! = sc . constant . type , RID ( ) , " Specialization constant provided for id ( " + itos ( sc . constant . constant_id ) + " ) is of the wrong type. " ) ;
data_ptr [ i ] = sc . constant . int_value ;
break ;
}
}
VkSpecializationMapEntry entry ;
entry . constantID = sc . constant . constant_id ;
entry . offset = i * sizeof ( uint32_t ) ;
entry . size = sizeof ( uint32_t ) ;
specialization_map_entries . push_back ( entry ) ;
}
specialization_info . dataSize = specialization_constant_data . size ( ) * sizeof ( uint32_t ) ;
specialization_info . pData = data_ptr ;
specialization_info . mapEntryCount = specialization_map_entries . size ( ) ;
specialization_info . pMapEntries = specialization_map_entries . ptr ( ) ;
compute_pipeline_create_info . stage . pSpecializationInfo = & specialization_info ;
}
2019-09-25 19:44:44 +00:00
ComputePipeline pipeline ;
2020-04-06 08:17:42 +00:00
VkResult err = vkCreateComputePipelines ( device , VK_NULL_HANDLE , 1 , & compute_pipeline_create_info , nullptr , & pipeline . pipeline ) ;
2020-03-08 08:26:22 +00:00
ERR_FAIL_COND_V_MSG ( err , RID ( ) , " vkCreateComputePipelines failed with error " + itos ( err ) + " . " ) ;
2019-09-25 19:44:44 +00:00
pipeline . set_formats = shader - > set_formats ;
pipeline . push_constant_stages = shader - > push_constant . push_constants_vk_stage ;
pipeline . pipeline_layout = shader - > pipeline_layout ;
pipeline . shader = p_shader ;
pipeline . push_constant_size = shader - > push_constant . push_constant_size ;
2021-02-02 19:51:36 +00:00
pipeline . local_group_size [ 0 ] = shader - > compute_local_size [ 0 ] ;
pipeline . local_group_size [ 1 ] = shader - > compute_local_size [ 1 ] ;
pipeline . local_group_size [ 2 ] = shader - > compute_local_size [ 2 ] ;
2019-09-25 19:44:44 +00:00
//create ID to associate with this pipeline
RID id = compute_pipeline_owner . make_rid ( pipeline ) ;
2021-03-12 13:35:16 +00:00
//now add all the dependencies
2019-09-25 19:44:44 +00:00
_add_dependency ( id , p_shader ) ;
return id ;
}
bool RenderingDeviceVulkan : : compute_pipeline_is_valid ( RID p_pipeline ) {
return compute_pipeline_owner . owns ( p_pipeline ) ;
2019-06-24 19:13:06 +00:00
}
2019-06-07 16:07:57 +00:00
/****************/
/**** SCREEN ****/
/****************/
2020-03-04 01:51:12 +00:00
int RenderingDeviceVulkan : : screen_get_width ( DisplayServer : : WindowID p_screen ) const {
2019-06-07 16:07:57 +00:00
_THREAD_SAFE_METHOD_
2020-04-18 23:30:57 +00:00
ERR_FAIL_COND_V_MSG ( local_device . is_valid ( ) , - 1 , " Local devices have no screen " ) ;
2019-06-24 19:13:06 +00:00
return context - > window_get_width ( p_screen ) ;
2019-06-07 16:07:57 +00:00
}
2020-05-14 12:29:06 +00:00
2020-03-04 01:51:12 +00:00
int RenderingDeviceVulkan : : screen_get_height ( DisplayServer : : WindowID p_screen ) const {
2019-06-07 16:07:57 +00:00
_THREAD_SAFE_METHOD_
2020-04-18 23:30:57 +00:00
ERR_FAIL_COND_V_MSG ( local_device . is_valid ( ) , - 1 , " Local devices have no screen " ) ;
2019-06-07 16:07:57 +00:00
2019-06-24 19:13:06 +00:00
return context - > window_get_height ( p_screen ) ;
2019-06-07 16:07:57 +00:00
}
2020-05-14 12:29:06 +00:00
2019-06-10 17:12:24 +00:00
RenderingDevice : : FramebufferFormatID RenderingDeviceVulkan : : screen_get_framebuffer_format ( ) const {
2019-06-07 16:07:57 +00:00
_THREAD_SAFE_METHOD_
2020-04-18 23:30:57 +00:00
ERR_FAIL_COND_V_MSG ( local_device . is_valid ( ) , INVALID_ID , " Local devices have no screen " ) ;
2019-06-07 16:07:57 +00:00
//very hacky, but not used often per frame so I guess ok
VkFormat vkformat = context - > get_screen_format ( ) ;
DataFormat format = DATA_FORMAT_MAX ;
for ( int i = 0 ; i < DATA_FORMAT_MAX ; i + + ) {
if ( vkformat = = vulkan_formats [ i ] ) {
format = DataFormat ( i ) ;
break ;
}
}
ERR_FAIL_COND_V ( format = = DATA_FORMAT_MAX , INVALID_ID ) ;
AttachmentFormat attachment ;
attachment . format = format ;
attachment . samples = TEXTURE_SAMPLES_1 ;
attachment . usage_flags = TEXTURE_USAGE_COLOR_ATTACHMENT_BIT ;
Vector < AttachmentFormat > screen_attachment ;
screen_attachment . push_back ( attachment ) ;
return const_cast < RenderingDeviceVulkan * > ( this ) - > framebuffer_format_create ( screen_attachment ) ;
}
/*******************/
/**** DRAW LIST ****/
/*******************/
2020-03-04 01:51:12 +00:00
RenderingDevice : : DrawListID RenderingDeviceVulkan : : draw_list_begin_for_screen ( DisplayServer : : WindowID p_screen , const Color & p_clear_color ) {
2019-06-07 16:07:57 +00:00
_THREAD_SAFE_METHOD_
2020-04-18 23:30:57 +00:00
ERR_FAIL_COND_V_MSG ( local_device . is_valid ( ) , INVALID_ID , " Local devices have no screen " ) ;
2019-06-07 16:07:57 +00:00
2020-04-01 23:20:12 +00:00
ERR_FAIL_COND_V_MSG ( draw_list ! = nullptr , INVALID_ID , " Only one draw list can be active at the same time. " ) ;
ERR_FAIL_COND_V_MSG ( compute_list ! = nullptr , INVALID_ID , " Only one draw/compute list can be active at the same time. " ) ;
2019-09-25 19:44:44 +00:00
2019-06-07 16:07:57 +00:00
VkCommandBuffer command_buffer = frames [ frame ] . draw_command_buffer ;
2021-06-24 13:58:36 +00:00
Size2i size = Size2i ( context - > window_get_width ( p_screen ) , context - > window_get_height ( p_screen ) ) ;
_draw_list_allocate ( Rect2i ( Vector2i ( ) , size ) , 0 , 0 ) ;
2019-07-12 13:18:30 +00:00
# ifdef DEBUG_ENABLED
2021-06-24 13:58:36 +00:00
draw_list_framebuffer_format = screen_get_framebuffer_format ( ) ;
2019-07-12 13:18:30 +00:00
# endif
2021-06-24 13:58:36 +00:00
draw_list_subpass_count = 1 ;
2019-06-07 16:07:57 +00:00
VkRenderPassBeginInfo render_pass_begin ;
render_pass_begin . sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO ;
2020-04-01 23:20:12 +00:00
render_pass_begin . pNext = nullptr ;
2019-06-24 19:13:06 +00:00
render_pass_begin . renderPass = context - > window_get_render_pass ( p_screen ) ;
render_pass_begin . framebuffer = context - > window_get_framebuffer ( p_screen ) ;
2019-06-07 16:07:57 +00:00
2021-06-24 13:58:36 +00:00
render_pass_begin . renderArea . extent . width = size . width ;
render_pass_begin . renderArea . extent . height = size . height ;
2019-06-07 16:07:57 +00:00
render_pass_begin . renderArea . offset . x = 0 ;
render_pass_begin . renderArea . offset . y = 0 ;
render_pass_begin . clearValueCount = 1 ;
VkClearValue clear_value ;
clear_value . color . float32 [ 0 ] = p_clear_color . r ;
clear_value . color . float32 [ 1 ] = p_clear_color . g ;
clear_value . color . float32 [ 2 ] = p_clear_color . b ;
clear_value . color . float32 [ 3 ] = p_clear_color . a ;
render_pass_begin . pClearValues = & clear_value ;
vkCmdBeginRenderPass ( command_buffer , & render_pass_begin , VK_SUBPASS_CONTENTS_INLINE ) ;
uint32_t size_x = screen_get_width ( p_screen ) ;
uint32_t size_y = screen_get_height ( p_screen ) ;
VkViewport viewport ;
viewport . x = 0 ;
viewport . y = 0 ;
viewport . width = size_x ;
viewport . height = size_y ;
viewport . minDepth = 0 ;
viewport . maxDepth = 1.0 ;
vkCmdSetViewport ( command_buffer , 0 , 1 , & viewport ) ;
VkRect2D scissor ;
scissor . offset . x = 0 ;
scissor . offset . y = 0 ;
scissor . extent . width = size_x ;
2019-10-14 08:30:27 +00:00
scissor . extent . height = size_y ;
2019-06-07 16:07:57 +00:00
vkCmdSetScissor ( command_buffer , 0 , 1 , & scissor ) ;
2021-01-04 20:00:44 +00:00
return int64_t ( ID_TYPE_DRAW_LIST ) < < ID_BASE_SHIFT ;
2019-06-07 16:07:57 +00:00
}
2021-06-24 13:58:36 +00:00
Error RenderingDeviceVulkan : : _draw_list_setup_framebuffer ( Framebuffer * p_framebuffer , InitialAction p_initial_color_action , FinalAction p_final_color_action , InitialAction p_initial_depth_action , FinalAction p_final_depth_action , VkFramebuffer * r_framebuffer , VkRenderPass * r_render_pass , uint32_t * r_subpass_count ) {
2019-06-07 16:07:57 +00:00
Framebuffer : : VersionKey vk ;
2019-10-03 20:39:08 +00:00
vk . initial_color_action = p_initial_color_action ;
vk . final_color_action = p_final_color_action ;
vk . initial_depth_action = p_initial_depth_action ;
vk . final_depth_action = p_final_depth_action ;
2021-05-07 13:19:04 +00:00
vk . view_count = p_framebuffer - > view_count ;
2019-06-07 16:07:57 +00:00
if ( ! p_framebuffer - > framebuffers . has ( vk ) ) {
//need to create this version
Framebuffer : : Version version ;
2021-06-24 13:58:36 +00:00
version . render_pass = _render_pass_create ( framebuffer_formats [ p_framebuffer - > format_id ] . E - > key ( ) . attachments , framebuffer_formats [ p_framebuffer - > format_id ] . E - > key ( ) . passes , p_initial_color_action , p_final_color_action , p_initial_depth_action , p_final_depth_action , p_framebuffer - > view_count ) ;
2019-06-07 16:07:57 +00:00
VkFramebufferCreateInfo framebuffer_create_info ;
framebuffer_create_info . sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO ;
2020-04-01 23:20:12 +00:00
framebuffer_create_info . pNext = nullptr ;
2019-06-07 16:07:57 +00:00
framebuffer_create_info . flags = 0 ;
framebuffer_create_info . renderPass = version . render_pass ;
Vector < VkImageView > attachments ;
for ( int i = 0 ; i < p_framebuffer - > texture_ids . size ( ) ; i + + ) {
Texture * texture = texture_owner . getornull ( p_framebuffer - > texture_ids [ i ] ) ;
ERR_FAIL_COND_V ( ! texture , ERR_BUG ) ;
attachments . push_back ( texture - > view ) ;
2019-07-27 13:23:24 +00:00
ERR_FAIL_COND_V ( texture - > width ! = p_framebuffer - > size . width , ERR_BUG ) ;
ERR_FAIL_COND_V ( texture - > height ! = p_framebuffer - > size . height , ERR_BUG ) ;
2019-06-07 16:07:57 +00:00
}
framebuffer_create_info . attachmentCount = attachments . size ( ) ;
framebuffer_create_info . pAttachments = attachments . ptr ( ) ;
framebuffer_create_info . width = p_framebuffer - > size . width ;
framebuffer_create_info . height = p_framebuffer - > size . height ;
framebuffer_create_info . layers = 1 ;
2020-04-01 23:20:12 +00:00
VkResult err = vkCreateFramebuffer ( device , & framebuffer_create_info , nullptr , & version . framebuffer ) ;
2020-03-08 08:26:22 +00:00
ERR_FAIL_COND_V_MSG ( err , ERR_CANT_CREATE , " vkCreateFramebuffer failed with error " + itos ( err ) + " . " ) ;
2019-06-07 16:07:57 +00:00
2021-06-24 13:58:36 +00:00
version . subpass_count = framebuffer_formats [ p_framebuffer - > format_id ] . E - > key ( ) . passes . size ( ) ;
2019-06-07 16:07:57 +00:00
p_framebuffer - > framebuffers . insert ( vk , version ) ;
}
const Framebuffer : : Version & version = p_framebuffer - > framebuffers [ vk ] ;
* r_framebuffer = version . framebuffer ;
* r_render_pass = version . render_pass ;
2021-06-24 13:58:36 +00:00
* r_subpass_count = version . subpass_count ;
2019-06-07 16:07:57 +00:00
return OK ;
}
2020-06-25 13:33:28 +00:00
Error RenderingDeviceVulkan : : _draw_list_render_pass_begin ( Framebuffer * framebuffer , InitialAction p_initial_color_action , FinalAction p_final_color_action , InitialAction p_initial_depth_action , FinalAction p_final_depth_action , const Vector < Color > & p_clear_colors , float p_clear_depth , uint32_t p_clear_stencil , Point2i viewport_offset , Point2i viewport_size , VkFramebuffer vkframebuffer , VkRenderPass render_pass , VkCommandBuffer command_buffer , VkSubpassContents subpass_contents , const Vector < RID > & p_storage_textures ) {
2019-06-07 16:07:57 +00:00
VkRenderPassBeginInfo render_pass_begin ;
render_pass_begin . sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO ;
2020-04-01 23:20:12 +00:00
render_pass_begin . pNext = nullptr ;
2019-06-07 16:07:57 +00:00
render_pass_begin . renderPass = render_pass ;
render_pass_begin . framebuffer = vkframebuffer ;
2021-01-24 19:00:20 +00:00
/*
* Given how API works , it makes sense to always fully operate on the whole framebuffer .
* This allows better continue operations for operations like shadowmapping .
2019-06-07 16:07:57 +00:00
render_pass_begin . renderArea . extent . width = viewport_size . width ;
render_pass_begin . renderArea . extent . height = viewport_size . height ;
render_pass_begin . renderArea . offset . x = viewport_offset . x ;
render_pass_begin . renderArea . offset . y = viewport_offset . y ;
2021-01-24 19:00:20 +00:00
*/
render_pass_begin . renderArea . extent . width = framebuffer - > size . width ;
render_pass_begin . renderArea . extent . height = framebuffer - > size . height ;
render_pass_begin . renderArea . offset . x = 0 ;
render_pass_begin . renderArea . offset . y = 0 ;
2019-06-07 16:07:57 +00:00
Vector < VkClearValue > clear_values ;
2019-10-03 20:39:08 +00:00
clear_values . resize ( framebuffer - > texture_ids . size ( ) ) ;
{
2019-06-07 16:07:57 +00:00
int color_index = 0 ;
for ( int i = 0 ; i < framebuffer - > texture_ids . size ( ) ; i + + ) {
Texture * texture = texture_owner . getornull ( framebuffer - > texture_ids [ i ] ) ;
VkClearValue clear_value ;
2019-10-03 20:39:08 +00:00
if ( color_index < p_clear_colors . size ( ) & & texture - > usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT ) {
2019-06-07 16:07:57 +00:00
ERR_FAIL_INDEX_V ( color_index , p_clear_colors . size ( ) , ERR_BUG ) ; //a bug
Color clear_color = p_clear_colors [ color_index ] ;
clear_value . color . float32 [ 0 ] = clear_color . r ;
clear_value . color . float32 [ 1 ] = clear_color . g ;
clear_value . color . float32 [ 2 ] = clear_color . b ;
clear_value . color . float32 [ 3 ] = clear_color . a ;
color_index + + ;
} else if ( texture - > usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT ) {
2019-10-03 20:39:08 +00:00
clear_value . depthStencil . depth = p_clear_depth ;
clear_value . depthStencil . stencil = p_clear_stencil ;
2019-06-07 16:07:57 +00:00
} else {
clear_value . color . float32 [ 0 ] = 0 ;
clear_value . color . float32 [ 1 ] = 0 ;
clear_value . color . float32 [ 2 ] = 0 ;
clear_value . color . float32 [ 3 ] = 0 ;
}
2019-10-03 20:39:08 +00:00
clear_values . write [ i ] = clear_value ;
2019-06-07 16:07:57 +00:00
}
}
render_pass_begin . clearValueCount = clear_values . size ( ) ;
render_pass_begin . pClearValues = clear_values . ptr ( ) ;
2020-06-25 13:33:28 +00:00
for ( int i = 0 ; i < p_storage_textures . size ( ) ; i + + ) {
Texture * texture = texture_owner . getornull ( p_storage_textures [ i ] ) ;
2020-07-16 16:54:15 +00:00
ERR_CONTINUE_MSG ( ! ( texture - > usage_flags & TEXTURE_USAGE_STORAGE_BIT ) , " Supplied storage texture " + itos ( i ) + " for draw list is not set to be used for storage. " ) ;
2020-06-25 13:33:28 +00:00
if ( texture - > usage_flags & TEXTURE_USAGE_SAMPLING_BIT ) {
//must change layout to general
VkImageMemoryBarrier image_memory_barrier ;
image_memory_barrier . sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER ;
image_memory_barrier . pNext = nullptr ;
image_memory_barrier . srcAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT ;
image_memory_barrier . dstAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT ;
image_memory_barrier . oldLayout = texture - > layout ;
image_memory_barrier . newLayout = VK_IMAGE_LAYOUT_GENERAL ;
image_memory_barrier . srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
image_memory_barrier . dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
image_memory_barrier . image = texture - > image ;
image_memory_barrier . subresourceRange . aspectMask = texture - > read_aspect_mask ;
image_memory_barrier . subresourceRange . baseMipLevel = texture - > base_mipmap ;
image_memory_barrier . subresourceRange . levelCount = texture - > mipmaps ;
image_memory_barrier . subresourceRange . baseArrayLayer = texture - > base_layer ;
image_memory_barrier . subresourceRange . layerCount = texture - > layers ;
2020-10-19 20:32:19 +00:00
vkCmdPipelineBarrier ( command_buffer , VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT , VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT , 0 , 0 , nullptr , 0 , nullptr , 1 , & image_memory_barrier ) ;
2020-06-25 13:33:28 +00:00
texture - > layout = VK_IMAGE_LAYOUT_GENERAL ;
draw_list_storage_textures . push_back ( p_storage_textures [ i ] ) ;
}
}
2019-06-07 16:07:57 +00:00
vkCmdBeginRenderPass ( command_buffer , & render_pass_begin , subpass_contents ) ;
//mark textures as bound
draw_list_bound_textures . clear ( ) ;
2019-10-03 20:39:08 +00:00
draw_list_unbind_color_textures = p_final_color_action ! = FINAL_ACTION_CONTINUE ;
draw_list_unbind_depth_textures = p_final_depth_action ! = FINAL_ACTION_CONTINUE ;
2019-06-07 16:07:57 +00:00
for ( int i = 0 ; i < framebuffer - > texture_ids . size ( ) ; i + + ) {
Texture * texture = texture_owner . getornull ( framebuffer - > texture_ids [ i ] ) ;
texture - > bound = true ;
draw_list_bound_textures . push_back ( framebuffer - > texture_ids [ i ] ) ;
}
return OK ;
}
2019-10-03 20:39:08 +00:00
void RenderingDeviceVulkan : : _draw_list_insert_clear_region ( DrawList * draw_list , Framebuffer * framebuffer , Point2i viewport_offset , Point2i viewport_size , bool p_clear_color , const Vector < Color > & p_clear_colors , bool p_clear_depth , float p_depth , uint32_t p_stencil ) {
2019-09-07 01:51:27 +00:00
Vector < VkClearAttachment > clear_attachments ;
int color_index = 0 ;
for ( int i = 0 ; i < framebuffer - > texture_ids . size ( ) ; i + + ) {
Texture * texture = texture_owner . getornull ( framebuffer - > texture_ids [ i ] ) ;
2020-07-22 14:31:17 +00:00
VkClearAttachment clear_at = { } ;
2020-05-18 14:49:52 +00:00
2019-10-03 20:39:08 +00:00
if ( p_clear_color & & texture - > usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT ) {
2019-09-07 01:51:27 +00:00
ERR_FAIL_INDEX ( color_index , p_clear_colors . size ( ) ) ; //a bug
Color clear_color = p_clear_colors [ color_index ] ;
clear_at . clearValue . color . float32 [ 0 ] = clear_color . r ;
clear_at . clearValue . color . float32 [ 1 ] = clear_color . g ;
clear_at . clearValue . color . float32 [ 2 ] = clear_color . b ;
clear_at . clearValue . color . float32 [ 3 ] = clear_color . a ;
clear_at . colorAttachment = color_index + + ;
clear_at . aspectMask = VK_IMAGE_ASPECT_COLOR_BIT ;
2019-10-03 20:39:08 +00:00
} else if ( p_clear_depth & & texture - > usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT ) {
clear_at . clearValue . depthStencil . depth = p_depth ;
clear_at . clearValue . depthStencil . stencil = p_stencil ;
2019-09-07 01:51:27 +00:00
clear_at . colorAttachment = 0 ;
clear_at . aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT ;
if ( format_has_stencil ( texture - > format ) ) {
clear_at . aspectMask | = VK_IMAGE_ASPECT_STENCIL_BIT ;
}
} else {
ERR_CONTINUE ( true ) ;
}
clear_attachments . push_back ( clear_at ) ;
}
VkClearRect cr ;
cr . baseArrayLayer = 0 ;
cr . layerCount = 1 ;
cr . rect . offset . x = viewport_offset . x ;
cr . rect . offset . y = viewport_offset . y ;
cr . rect . extent . width = viewport_size . width ;
cr . rect . extent . height = viewport_size . height ;
vkCmdClearAttachments ( draw_list - > command_buffer , clear_attachments . size ( ) , clear_attachments . ptr ( ) , 1 , & cr ) ;
}
2020-06-25 13:33:28 +00:00
RenderingDevice : : DrawListID RenderingDeviceVulkan : : draw_list_begin ( RID p_framebuffer , InitialAction p_initial_color_action , FinalAction p_final_color_action , InitialAction p_initial_depth_action , FinalAction p_final_depth_action , const Vector < Color > & p_clear_color_values , float p_clear_depth , uint32_t p_clear_stencil , const Rect2 & p_region , const Vector < RID > & p_storage_textures ) {
2019-06-07 16:07:57 +00:00
_THREAD_SAFE_METHOD_
2020-04-01 23:20:12 +00:00
ERR_FAIL_COND_V_MSG ( draw_list ! = nullptr , INVALID_ID , " Only one draw list can be active at the same time. " ) ;
2021-02-02 19:51:36 +00:00
ERR_FAIL_COND_V_MSG ( compute_list ! = nullptr & & ! compute_list - > state . allow_draw_overlap , INVALID_ID , " Only one draw/compute list can be active at the same time. " ) ;
2019-09-25 19:44:44 +00:00
2019-06-07 16:07:57 +00:00
Framebuffer * framebuffer = framebuffer_owner . getornull ( p_framebuffer ) ;
ERR_FAIL_COND_V ( ! framebuffer , INVALID_ID ) ;
Point2i viewport_offset ;
Point2i viewport_size = framebuffer - > size ;
2019-10-03 20:39:08 +00:00
bool needs_clear_color = false ;
bool needs_clear_depth = false ;
2019-06-07 16:07:57 +00:00
2019-09-07 01:51:27 +00:00
if ( p_region ! = Rect2 ( ) & & p_region ! = Rect2 ( Vector2 ( ) , viewport_size ) ) { //check custom region
2019-06-07 16:07:57 +00:00
Rect2i viewport ( viewport_offset , viewport_size ) ;
Rect2i regioni = p_region ;
if ( ! ( regioni . position . x > = viewport . position . x ) & & ( regioni . position . y > = viewport . position . y ) & &
( ( regioni . position . x + regioni . size . x ) < = ( viewport . position . x + viewport . size . x ) ) & &
( ( regioni . position . y + regioni . size . y ) < = ( viewport . position . y + viewport . size . y ) ) ) {
ERR_FAIL_V_MSG ( INVALID_ID , " When supplying a custom region, it must be contained within the framebuffer rectangle " ) ;
}
viewport_offset = regioni . position ;
viewport_size = regioni . size ;
2021-02-02 19:51:36 +00:00
if ( p_initial_color_action = = INITIAL_ACTION_CLEAR_REGION_CONTINUE ) {
needs_clear_color = true ;
p_initial_color_action = INITIAL_ACTION_CONTINUE ;
}
if ( p_initial_depth_action = = INITIAL_ACTION_CLEAR_REGION_CONTINUE ) {
needs_clear_depth = true ;
p_initial_depth_action = INITIAL_ACTION_CONTINUE ;
}
2021-01-24 19:00:20 +00:00
if ( p_initial_color_action = = INITIAL_ACTION_CLEAR_REGION ) {
2019-10-03 20:39:08 +00:00
needs_clear_color = true ;
p_initial_color_action = INITIAL_ACTION_KEEP ;
}
2021-01-24 19:00:20 +00:00
if ( p_initial_depth_action = = INITIAL_ACTION_CLEAR_REGION ) {
2019-10-03 20:39:08 +00:00
needs_clear_depth = true ;
p_initial_depth_action = INITIAL_ACTION_KEEP ;
2019-09-07 01:51:27 +00:00
}
2019-06-07 16:07:57 +00:00
}
2019-10-03 20:39:08 +00:00
if ( p_initial_color_action = = INITIAL_ACTION_CLEAR ) { //check clear values
2019-06-07 16:07:57 +00:00
2021-06-24 13:58:36 +00:00
int color_count = 0 ;
for ( int i = 0 ; i < framebuffer - > texture_ids . size ( ) ; i + + ) {
Texture * texture = texture_owner . getornull ( framebuffer - > texture_ids [ i ] ) ;
if ( texture - > usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT ) {
color_count + + ;
}
}
ERR_FAIL_COND_V_MSG ( p_clear_color_values . size ( ) ! = color_count , INVALID_ID ,
" Clear color values supplied ( " + itos ( p_clear_color_values . size ( ) ) + " ) differ from the amount required for framebuffer color attachments ( " + itos ( color_count ) + " ). " ) ;
2019-06-07 16:07:57 +00:00
}
VkFramebuffer vkframebuffer ;
VkRenderPass render_pass ;
2021-06-24 13:58:36 +00:00
Error err = _draw_list_setup_framebuffer ( framebuffer , p_initial_color_action , p_final_color_action , p_initial_depth_action , p_final_depth_action , & vkframebuffer , & render_pass , & draw_list_subpass_count ) ;
2019-06-07 16:07:57 +00:00
ERR_FAIL_COND_V ( err ! = OK , INVALID_ID ) ;
VkCommandBuffer command_buffer = frames [ frame ] . draw_command_buffer ;
2020-06-25 13:33:28 +00:00
err = _draw_list_render_pass_begin ( framebuffer , p_initial_color_action , p_final_color_action , p_initial_depth_action , p_final_depth_action , p_clear_color_values , p_clear_depth , p_clear_stencil , viewport_offset , viewport_size , vkframebuffer , render_pass , command_buffer , VK_SUBPASS_CONTENTS_INLINE , p_storage_textures ) ;
2019-06-07 16:07:57 +00:00
if ( err ! = OK ) {
return INVALID_ID ;
}
2021-06-24 13:58:36 +00:00
draw_list_render_pass = render_pass ;
draw_list_vkframebuffer = vkframebuffer ;
_draw_list_allocate ( Rect2i ( viewport_offset , viewport_size ) , 0 , 0 ) ;
2019-07-12 13:18:30 +00:00
# ifdef DEBUG_ENABLED
2021-06-24 13:58:36 +00:00
draw_list_framebuffer_format = framebuffer - > format_id ;
2019-07-12 13:18:30 +00:00
# endif
2021-06-24 13:58:36 +00:00
draw_list_current_subpass = 0 ;
2019-06-07 16:07:57 +00:00
2019-10-03 20:39:08 +00:00
if ( needs_clear_color | | needs_clear_depth ) {
_draw_list_insert_clear_region ( draw_list , framebuffer , viewport_offset , viewport_size , needs_clear_color , p_clear_color_values , needs_clear_depth , p_clear_depth , p_clear_stencil ) ;
2019-09-07 01:51:27 +00:00
}
2019-06-07 16:07:57 +00:00
VkViewport viewport ;
viewport . x = viewport_offset . x ;
viewport . y = viewport_offset . y ;
viewport . width = viewport_size . width ;
viewport . height = viewport_size . height ;
viewport . minDepth = 0 ;
viewport . maxDepth = 1.0 ;
vkCmdSetViewport ( command_buffer , 0 , 1 , & viewport ) ;
VkRect2D scissor ;
scissor . offset . x = viewport_offset . x ;
scissor . offset . y = viewport_offset . y ;
scissor . extent . width = viewport_size . width ;
scissor . extent . height = viewport_size . height ;
vkCmdSetScissor ( command_buffer , 0 , 1 , & scissor ) ;
2021-01-04 20:00:44 +00:00
return int64_t ( ID_TYPE_DRAW_LIST ) < < ID_BASE_SHIFT ;
2019-06-07 16:07:57 +00:00
}
2020-06-25 13:33:28 +00:00
Error RenderingDeviceVulkan : : draw_list_begin_split ( RID p_framebuffer , uint32_t p_splits , DrawListID * r_split_ids , InitialAction p_initial_color_action , FinalAction p_final_color_action , InitialAction p_initial_depth_action , FinalAction p_final_depth_action , const Vector < Color > & p_clear_color_values , float p_clear_depth , uint32_t p_clear_stencil , const Rect2 & p_region , const Vector < RID > & p_storage_textures ) {
2019-06-07 16:07:57 +00:00
_THREAD_SAFE_METHOD_
ERR_FAIL_COND_V ( p_splits < 1 , ERR_INVALID_DECLARATION ) ;
Framebuffer * framebuffer = framebuffer_owner . getornull ( p_framebuffer ) ;
ERR_FAIL_COND_V ( ! framebuffer , ERR_INVALID_DECLARATION ) ;
Point2i viewport_offset ;
Point2i viewport_size = framebuffer - > size ;
2019-10-03 20:39:08 +00:00
bool needs_clear_color = false ;
bool needs_clear_depth = false ;
2019-09-07 01:51:27 +00:00
if ( p_region ! = Rect2 ( ) & & p_region ! = Rect2 ( Vector2 ( ) , viewport_size ) ) { //check custom region
2019-06-07 16:07:57 +00:00
Rect2i viewport ( viewport_offset , viewport_size ) ;
Rect2i regioni = p_region ;
if ( ! ( regioni . position . x > = viewport . position . x ) & & ( regioni . position . y > = viewport . position . y ) & &
( ( regioni . position . x + regioni . size . x ) < = ( viewport . position . x + viewport . size . x ) ) & &
( ( regioni . position . y + regioni . size . y ) < = ( viewport . position . y + viewport . size . y ) ) ) {
ERR_FAIL_V_MSG ( ERR_INVALID_PARAMETER , " When supplying a custom region, it must be contained within the framebuffer rectangle " ) ;
}
viewport_offset = regioni . position ;
viewport_size = regioni . size ;
2019-09-07 01:51:27 +00:00
2021-01-24 19:00:20 +00:00
if ( p_initial_color_action = = INITIAL_ACTION_CLEAR_REGION ) {
2019-10-03 20:39:08 +00:00
needs_clear_color = true ;
p_initial_color_action = INITIAL_ACTION_KEEP ;
}
2021-01-24 19:00:20 +00:00
if ( p_initial_depth_action = = INITIAL_ACTION_CLEAR_REGION ) {
2019-10-03 20:39:08 +00:00
needs_clear_depth = true ;
p_initial_depth_action = INITIAL_ACTION_KEEP ;
2019-09-07 01:51:27 +00:00
}
2019-06-07 16:07:57 +00:00
}
2019-10-03 20:39:08 +00:00
if ( p_initial_color_action = = INITIAL_ACTION_CLEAR ) { //check clear values
2019-06-07 16:07:57 +00:00
2021-06-24 13:58:36 +00:00
int color_count = 0 ;
for ( int i = 0 ; i < framebuffer - > texture_ids . size ( ) ; i + + ) {
Texture * texture = texture_owner . getornull ( framebuffer - > texture_ids [ i ] ) ;
2019-06-07 16:07:57 +00:00
2021-06-24 13:58:36 +00:00
if ( texture - > usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT ) {
color_count + + ;
2019-06-07 16:07:57 +00:00
}
}
2021-06-24 13:58:36 +00:00
ERR_FAIL_COND_V_MSG ( p_clear_color_values . size ( ) ! = color_count , ERR_INVALID_PARAMETER ,
" Clear color values supplied ( " + itos ( p_clear_color_values . size ( ) ) + " ) differ from the amount required for framebuffer ( " + itos ( color_count ) + " ). " ) ;
2019-06-07 16:07:57 +00:00
}
VkFramebuffer vkframebuffer ;
VkRenderPass render_pass ;
2021-06-24 13:58:36 +00:00
Error err = _draw_list_setup_framebuffer ( framebuffer , p_initial_color_action , p_final_color_action , p_initial_depth_action , p_final_depth_action , & vkframebuffer , & render_pass , & draw_list_subpass_count ) ;
2019-06-07 16:07:57 +00:00
ERR_FAIL_COND_V ( err ! = OK , ERR_CANT_CREATE ) ;
VkCommandBuffer frame_command_buffer = frames [ frame ] . draw_command_buffer ;
2020-06-25 13:33:28 +00:00
err = _draw_list_render_pass_begin ( framebuffer , p_initial_color_action , p_final_color_action , p_initial_depth_action , p_final_depth_action , p_clear_color_values , p_clear_depth , p_clear_stencil , viewport_offset , viewport_size , vkframebuffer , render_pass , frame_command_buffer , VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS , p_storage_textures ) ;
2019-06-07 16:07:57 +00:00
if ( err ! = OK ) {
return ERR_CANT_CREATE ;
}
2021-06-24 13:58:36 +00:00
draw_list_current_subpass = 0 ;
2019-06-07 16:07:57 +00:00
2019-07-12 13:18:30 +00:00
# ifdef DEBUG_ENABLED
2021-06-24 13:58:36 +00:00
draw_list_framebuffer_format = framebuffer - > format_id ;
2019-07-12 13:18:30 +00:00
# endif
2021-06-24 13:58:36 +00:00
draw_list_render_pass = render_pass ;
draw_list_vkframebuffer = vkframebuffer ;
2019-10-03 20:39:08 +00:00
2021-06-24 13:58:36 +00:00
err = _draw_list_allocate ( Rect2i ( viewport_offset , viewport_size ) , p_splits , 0 ) ;
if ( err ! = OK ) {
return err ;
}
2019-06-07 16:07:57 +00:00
2021-06-24 13:58:36 +00:00
if ( needs_clear_color | | needs_clear_depth ) {
_draw_list_insert_clear_region ( & draw_list [ 0 ] , framebuffer , viewport_offset , viewport_size , needs_clear_color , p_clear_color_values , needs_clear_depth , p_clear_depth , p_clear_stencil ) ;
}
for ( uint32_t i = 0 ; i < p_splits ; i + + ) {
2019-06-07 16:07:57 +00:00
VkViewport viewport ;
viewport . x = viewport_offset . x ;
viewport . y = viewport_offset . y ;
viewport . width = viewport_size . width ;
viewport . height = viewport_size . height ;
viewport . minDepth = 0 ;
viewport . maxDepth = 1.0 ;
2021-06-24 13:58:36 +00:00
vkCmdSetViewport ( draw_list [ i ] . command_buffer , 0 , 1 , & viewport ) ;
2019-06-07 16:07:57 +00:00
VkRect2D scissor ;
scissor . offset . x = viewport_offset . x ;
scissor . offset . y = viewport_offset . y ;
scissor . extent . width = viewport_size . width ;
scissor . extent . height = viewport_size . height ;
2021-06-24 13:58:36 +00:00
vkCmdSetScissor ( draw_list [ i ] . command_buffer , 0 , 1 , & scissor ) ;
2021-01-04 20:00:44 +00:00
r_split_ids [ i ] = ( int64_t ( ID_TYPE_SPLIT_DRAW_LIST ) < < ID_BASE_SHIFT ) + i ;
2019-06-07 16:07:57 +00:00
}
return OK ;
}
2019-06-10 17:12:24 +00:00
RenderingDeviceVulkan : : DrawList * RenderingDeviceVulkan : : _get_draw_list_ptr ( DrawListID p_id ) {
2019-06-07 16:07:57 +00:00
if ( p_id < 0 ) {
2020-04-01 23:20:12 +00:00
return nullptr ;
2019-06-07 16:07:57 +00:00
}
if ( ! draw_list ) {
2020-04-01 23:20:12 +00:00
return nullptr ;
2021-01-04 20:00:44 +00:00
} else if ( p_id = = ( int64_t ( ID_TYPE_DRAW_LIST ) < < ID_BASE_SHIFT ) ) {
2019-06-07 16:07:57 +00:00
if ( draw_list_split ) {
2020-04-01 23:20:12 +00:00
return nullptr ;
2019-06-07 16:07:57 +00:00
}
return draw_list ;
2019-06-10 17:12:24 +00:00
} else if ( p_id > > DrawListID ( ID_BASE_SHIFT ) = = ID_TYPE_SPLIT_DRAW_LIST ) {
2019-06-07 16:07:57 +00:00
if ( ! draw_list_split ) {
2020-04-01 23:20:12 +00:00
return nullptr ;
2019-06-07 16:07:57 +00:00
}
2019-06-10 17:12:24 +00:00
uint64_t index = p_id & ( ( DrawListID ( 1 ) < < DrawListID ( ID_BASE_SHIFT ) ) - 1 ) ; //mask
2019-06-07 16:07:57 +00:00
if ( index > = draw_list_count ) {
2020-04-01 23:20:12 +00:00
return nullptr ;
2019-06-07 16:07:57 +00:00
}
return & draw_list [ index ] ;
} else {
2020-04-01 23:20:12 +00:00
return nullptr ;
2019-06-07 16:07:57 +00:00
}
}
2019-06-10 17:12:24 +00:00
void RenderingDeviceVulkan : : draw_list_bind_render_pipeline ( DrawListID p_list , RID p_render_pipeline ) {
2019-06-07 16:07:57 +00:00
DrawList * dl = _get_draw_list_ptr ( p_list ) ;
ERR_FAIL_COND ( ! dl ) ;
2019-07-27 13:23:24 +00:00
# ifdef DEBUG_ENABLED
2019-06-07 16:07:57 +00:00
ERR_FAIL_COND_MSG ( ! dl - > validation . active , " Submitted Draw Lists can no longer be modified. " ) ;
2019-07-27 13:23:24 +00:00
# endif
2019-06-07 16:07:57 +00:00
2019-09-25 19:44:44 +00:00
const RenderPipeline * pipeline = render_pipeline_owner . getornull ( p_render_pipeline ) ;
2019-06-07 16:07:57 +00:00
ERR_FAIL_COND ( ! pipeline ) ;
2019-07-27 13:23:24 +00:00
# ifdef DEBUG_ENABLED
2021-06-24 13:58:36 +00:00
ERR_FAIL_COND ( pipeline - > validation . framebuffer_format ! = draw_list_framebuffer_format & & pipeline - > validation . render_pass ! = draw_list_current_subpass ) ;
2019-07-27 13:23:24 +00:00
# endif
if ( p_render_pipeline = = dl - > state . pipeline ) {
return ; //redundant state, return.
}
2019-06-07 16:07:57 +00:00
2019-07-27 13:23:24 +00:00
dl - > state . pipeline = p_render_pipeline ;
dl - > state . pipeline_layout = pipeline - > pipeline_layout ;
2019-06-07 16:07:57 +00:00
vkCmdBindPipeline ( dl - > command_buffer , VK_PIPELINE_BIND_POINT_GRAPHICS , pipeline - > pipeline ) ;
2019-07-27 13:23:24 +00:00
if ( dl - > state . pipeline_shader ! = pipeline - > shader ) {
// shader changed, so descriptor sets may become incompatible.
//go through ALL sets, and unbind them (and all those above) if the format is different
uint32_t pcount = pipeline - > set_formats . size ( ) ; //formats count in this pipeline
dl - > state . set_count = MAX ( dl - > state . set_count , pcount ) ;
const uint32_t * pformats = pipeline - > set_formats . ptr ( ) ; //pipeline set formats
bool sets_valid = true ; //once invalid, all above become invalid
for ( uint32_t i = 0 ; i < pcount ; i + + ) {
//if a part of the format is different, invalidate it (and the rest)
if ( ! sets_valid | | dl - > state . sets [ i ] . pipeline_expected_format ! = pformats [ i ] ) {
dl - > state . sets [ i ] . bound = false ;
dl - > state . sets [ i ] . pipeline_expected_format = pformats [ i ] ;
sets_valid = false ;
}
}
for ( uint32_t i = pcount ; i < dl - > state . set_count ; i + + ) {
//unbind the ones above (not used) if exist
dl - > state . sets [ i ] . bound = false ;
}
dl - > state . set_count = pcount ; //update set count
if ( pipeline - > push_constant_size ) {
dl - > state . pipeline_push_constant_stages = pipeline - > push_constant_stages ;
# ifdef DEBUG_ENABLED
2020-06-03 08:45:44 +00:00
dl - > validation . pipeline_push_constant_supplied = false ;
2019-07-27 13:23:24 +00:00
# endif
}
2020-01-12 01:26:52 +00:00
dl - > state . pipeline_shader = pipeline - > shader ;
2019-07-27 13:23:24 +00:00
}
# ifdef DEBUG_ENABLED
2019-06-07 16:07:57 +00:00
//update render pass pipeline info
dl - > validation . pipeline_active = true ;
2019-07-27 13:23:24 +00:00
dl - > validation . pipeline_dynamic_state = pipeline - > validation . dynamic_state ;
dl - > validation . pipeline_vertex_format = pipeline - > validation . vertex_format ;
dl - > validation . pipeline_uses_restart_indices = pipeline - > validation . uses_restart_indices ;
dl - > validation . pipeline_primitive_divisor = pipeline - > validation . primitive_divisor ;
dl - > validation . pipeline_primitive_minimum = pipeline - > validation . primitive_minimum ;
2019-06-08 20:10:52 +00:00
dl - > validation . pipeline_push_constant_size = pipeline - > push_constant_size ;
2019-07-27 13:23:24 +00:00
# endif
2019-06-07 16:07:57 +00:00
}
2019-06-10 17:12:24 +00:00
void RenderingDeviceVulkan : : draw_list_bind_uniform_set ( DrawListID p_list , RID p_uniform_set , uint32_t p_index ) {
2019-07-27 13:23:24 +00:00
# ifdef DEBUG_ENABLED
2020-12-08 18:58:49 +00:00
ERR_FAIL_COND_MSG ( p_index > = limits . maxBoundDescriptorSets | | p_index > = MAX_UNIFORM_SETS ,
2019-06-07 16:07:57 +00:00
" Attempting to bind a descriptor set ( " + itos ( p_index ) + " ) greater than what the hardware supports ( " + itos ( limits . maxBoundDescriptorSets ) + " ). " ) ;
2019-07-27 13:23:24 +00:00
# endif
2019-06-07 16:07:57 +00:00
DrawList * dl = _get_draw_list_ptr ( p_list ) ;
ERR_FAIL_COND ( ! dl ) ;
2019-07-27 13:23:24 +00:00
# ifdef DEBUG_ENABLED
2019-06-07 16:07:57 +00:00
ERR_FAIL_COND_MSG ( ! dl - > validation . active , " Submitted Draw Lists can no longer be modified. " ) ;
2019-07-27 13:23:24 +00:00
# endif
2019-06-07 16:07:57 +00:00
const UniformSet * uniform_set = uniform_set_owner . getornull ( p_uniform_set ) ;
ERR_FAIL_COND ( ! uniform_set ) ;
2019-07-27 13:23:24 +00:00
if ( p_index > dl - > state . set_count ) {
dl - > state . set_count = p_index ;
2019-06-07 16:07:57 +00:00
}
2019-06-07 18:20:01 +00:00
2019-07-27 13:23:24 +00:00
dl - > state . sets [ p_index ] . descriptor_set = uniform_set - > descriptor_set ; //update set pointer
dl - > state . sets [ p_index ] . bound = false ; //needs rebind
dl - > state . sets [ p_index ] . uniform_set_format = uniform_set - > format ;
dl - > state . sets [ p_index ] . uniform_set = p_uniform_set ;
2021-02-02 19:51:36 +00:00
uint32_t mst_count = uniform_set - > mutable_storage_textures . size ( ) ;
if ( mst_count ) {
Texture * * mst_textures = const_cast < UniformSet * > ( uniform_set ) - > mutable_storage_textures . ptrw ( ) ;
for ( uint32_t i = 0 ; i < mst_count ; i + + ) {
if ( mst_textures [ i ] - > used_in_frame ! = frames_drawn ) {
mst_textures [ i ] - > used_in_frame = frames_drawn ;
mst_textures [ i ] - > used_in_transfer = false ;
mst_textures [ i ] - > used_in_compute = false ;
}
mst_textures [ i ] - > used_in_raster = true ;
}
}
2019-07-27 13:23:24 +00:00
# ifdef DEBUG_ENABLED
2019-06-07 18:20:01 +00:00
{ //validate that textures bound are not attached as framebuffer bindings
uint32_t attachable_count = uniform_set - > attachable_textures . size ( ) ;
2020-12-19 13:18:08 +00:00
const UniformSet : : AttachableTexture * attachable_ptr = uniform_set - > attachable_textures . ptr ( ) ;
2019-06-07 18:20:01 +00:00
uint32_t bound_count = draw_list_bound_textures . size ( ) ;
2019-06-10 17:12:24 +00:00
const RID * bound_ptr = draw_list_bound_textures . ptr ( ) ;
2019-06-07 18:20:01 +00:00
for ( uint32_t i = 0 ; i < attachable_count ; i + + ) {
for ( uint32_t j = 0 ; j < bound_count ; j + + ) {
2020-12-19 13:18:08 +00:00
ERR_FAIL_COND_MSG ( attachable_ptr [ i ] . texture = = bound_ptr [ j ] ,
" Attempted to use the same texture in framebuffer attachment and a uniform (set: " + itos ( p_index ) + " , binding: " + itos ( attachable_ptr [ i ] . bind ) + " ), this is not allowed. " ) ;
2019-06-07 18:20:01 +00:00
}
2019-06-07 16:07:57 +00:00
}
}
2019-07-27 13:23:24 +00:00
# endif
2019-06-07 16:07:57 +00:00
}
2019-06-10 17:12:24 +00:00
void RenderingDeviceVulkan : : draw_list_bind_vertex_array ( DrawListID p_list , RID p_vertex_array ) {
2019-06-07 16:07:57 +00:00
DrawList * dl = _get_draw_list_ptr ( p_list ) ;
ERR_FAIL_COND ( ! dl ) ;
2019-07-27 13:23:24 +00:00
# ifdef DEBUG_ENABLED
2019-06-07 16:07:57 +00:00
ERR_FAIL_COND_MSG ( ! dl - > validation . active , " Submitted Draw Lists can no longer be modified. " ) ;
2019-07-27 13:23:24 +00:00
# endif
2019-06-07 16:07:57 +00:00
const VertexArray * vertex_array = vertex_array_owner . getornull ( p_vertex_array ) ;
ERR_FAIL_COND ( ! vertex_array ) ;
2019-07-27 13:23:24 +00:00
if ( dl - > state . vertex_array = = p_vertex_array ) {
return ; //already set
}
dl - > state . vertex_array = p_vertex_array ;
# ifdef DEBUG_ENABLED
2019-06-07 16:07:57 +00:00
dl - > validation . vertex_format = vertex_array - > description ;
dl - > validation . vertex_max_instances_allowed = vertex_array - > max_instances_allowed ;
2019-07-27 13:23:24 +00:00
# endif
2019-07-12 13:18:30 +00:00
dl - > validation . vertex_array_size = vertex_array - > vertex_count ;
2019-06-07 16:07:57 +00:00
vkCmdBindVertexBuffers ( dl - > command_buffer , 0 , vertex_array - > buffers . size ( ) , vertex_array - > buffers . ptr ( ) , vertex_array - > offsets . ptr ( ) ) ;
}
2020-05-14 12:29:06 +00:00
2019-06-10 17:12:24 +00:00
void RenderingDeviceVulkan : : draw_list_bind_index_array ( DrawListID p_list , RID p_index_array ) {
2019-06-07 16:07:57 +00:00
DrawList * dl = _get_draw_list_ptr ( p_list ) ;
ERR_FAIL_COND ( ! dl ) ;
2019-07-27 13:23:24 +00:00
# ifdef DEBUG_ENABLED
2019-06-07 16:07:57 +00:00
ERR_FAIL_COND_MSG ( ! dl - > validation . active , " Submitted Draw Lists can no longer be modified. " ) ;
2019-07-27 13:23:24 +00:00
# endif
2019-06-07 16:07:57 +00:00
const IndexArray * index_array = index_array_owner . getornull ( p_index_array ) ;
ERR_FAIL_COND ( ! index_array ) ;
2019-07-27 13:23:24 +00:00
if ( dl - > state . index_array = = p_index_array ) {
return ; //already set
}
dl - > state . index_array = p_index_array ;
# ifdef DEBUG_ENABLED
2019-06-07 16:07:57 +00:00
dl - > validation . index_array_max_index = index_array - > max_index ;
2019-07-27 13:23:24 +00:00
# endif
2019-07-12 13:18:30 +00:00
dl - > validation . index_array_size = index_array - > indices ;
dl - > validation . index_array_offset = index_array - > offset ;
2019-06-07 16:07:57 +00:00
vkCmdBindIndexBuffer ( dl - > command_buffer , index_array - > buffer , index_array - > offset , index_array - > index_type ) ;
}
2019-06-24 19:13:06 +00:00
void RenderingDeviceVulkan : : draw_list_set_line_width ( DrawListID p_list , float p_width ) {
DrawList * dl = _get_draw_list_ptr ( p_list ) ;
ERR_FAIL_COND ( ! dl ) ;
2019-07-12 13:18:30 +00:00
# ifdef DEBUG_ENABLED
2019-06-24 19:13:06 +00:00
ERR_FAIL_COND_MSG ( ! dl - > validation . active , " Submitted Draw Lists can no longer be modified. " ) ;
2019-07-12 13:18:30 +00:00
# endif
2019-06-24 19:13:06 +00:00
vkCmdSetLineWidth ( dl - > command_buffer , p_width ) ;
}
2020-04-20 02:19:21 +00:00
void RenderingDeviceVulkan : : draw_list_set_push_constant ( DrawListID p_list , const void * p_data , uint32_t p_data_size ) {
2019-06-08 20:10:52 +00:00
DrawList * dl = _get_draw_list_ptr ( p_list ) ;
ERR_FAIL_COND ( ! dl ) ;
2019-07-27 13:23:24 +00:00
# ifdef DEBUG_ENABLED
ERR_FAIL_COND_MSG ( ! dl - > validation . active , " Submitted Draw Lists can no longer be modified. " ) ;
# endif
# ifdef DEBUG_ENABLED
2019-06-08 20:10:52 +00:00
ERR_FAIL_COND_MSG ( p_data_size ! = dl - > validation . pipeline_push_constant_size ,
" This render pipeline requires ( " + itos ( dl - > validation . pipeline_push_constant_size ) + " ) bytes of push constant data, supplied: ( " + itos ( p_data_size ) + " ) " ) ;
2019-07-27 13:23:24 +00:00
# endif
vkCmdPushConstants ( dl - > command_buffer , dl - > state . pipeline_layout , dl - > state . pipeline_push_constant_stages , 0 , p_data_size , p_data ) ;
2019-07-12 13:18:30 +00:00
# ifdef DEBUG_ENABLED
2020-06-03 08:45:44 +00:00
dl - > validation . pipeline_push_constant_supplied = true ;
2019-07-12 13:18:30 +00:00
# endif
2019-06-08 20:10:52 +00:00
}
2019-10-03 20:39:08 +00:00
void RenderingDeviceVulkan : : draw_list_draw ( DrawListID p_list , bool p_use_indices , uint32_t p_instances , uint32_t p_procedural_vertices ) {
2019-06-07 16:07:57 +00:00
DrawList * dl = _get_draw_list_ptr ( p_list ) ;
ERR_FAIL_COND ( ! dl ) ;
2019-07-27 13:23:24 +00:00
# ifdef DEBUG_ENABLED
2019-06-07 16:07:57 +00:00
ERR_FAIL_COND_MSG ( ! dl - > validation . active , " Submitted Draw Lists can no longer be modified. " ) ;
2019-07-27 13:23:24 +00:00
# endif
2019-06-07 16:07:57 +00:00
2019-07-27 13:23:24 +00:00
# ifdef DEBUG_ENABLED
2019-06-07 16:07:57 +00:00
ERR_FAIL_COND_MSG ( ! dl - > validation . pipeline_active ,
" No render pipeline was set before attempting to draw. " ) ;
if ( dl - > validation . pipeline_vertex_format ! = INVALID_ID ) {
//pipeline uses vertices, validate format
ERR_FAIL_COND_MSG ( dl - > validation . vertex_format = = INVALID_ID ,
" No vertex array was bound, and render pipeline expects vertices. " ) ;
//make sure format is right
ERR_FAIL_COND_MSG ( dl - > validation . pipeline_vertex_format ! = dl - > validation . vertex_format ,
" The vertex format used to create the pipeline does not match the vertex format bound. " ) ;
2020-07-16 16:54:15 +00:00
//make sure number of instances is valid
2019-06-07 16:07:57 +00:00
ERR_FAIL_COND_MSG ( p_instances > dl - > validation . vertex_max_instances_allowed ,
2020-07-16 16:54:15 +00:00
" Number of instances requested ( " + itos ( p_instances ) + " is larger than the maximum number supported by the bound vertex array ( " + itos ( dl - > validation . vertex_max_instances_allowed ) + " ). " ) ;
2019-06-07 16:07:57 +00:00
}
2019-06-08 20:10:52 +00:00
if ( dl - > validation . pipeline_push_constant_size > 0 ) {
//using push constants, check that they were supplied
2020-06-03 08:45:44 +00:00
ERR_FAIL_COND_MSG ( ! dl - > validation . pipeline_push_constant_supplied ,
2019-06-08 20:10:52 +00:00
" The shader in this pipeline requires a push constant to be set before drawing, but it's not present. " ) ;
}
2019-06-07 16:07:57 +00:00
2019-07-27 13:23:24 +00:00
# endif
2019-06-07 16:07:57 +00:00
2019-07-27 13:23:24 +00:00
//Bind descriptor sets
for ( uint32_t i = 0 ; i < dl - > state . set_count ; i + + ) {
if ( dl - > state . sets [ i ] . pipeline_expected_format = = 0 ) {
continue ; //nothing expected by this pipeline
}
# ifdef DEBUG_ENABLED
if ( dl - > state . sets [ i ] . pipeline_expected_format ! = dl - > state . sets [ i ] . uniform_set_format ) {
if ( dl - > state . sets [ i ] . uniform_set_format = = 0 ) {
ERR_FAIL_MSG ( " Uniforms were never supplied for set ( " + itos ( i ) + " ) at the time of drawing, which are required by the pipeline " ) ;
} else if ( uniform_set_owner . owns ( dl - > state . sets [ i ] . uniform_set ) ) {
UniformSet * us = uniform_set_owner . getornull ( dl - > state . sets [ i ] . uniform_set ) ;
ERR_FAIL_MSG ( " Uniforms supplied for set ( " + itos ( i ) + " ): \n " + _shader_uniform_debug ( us - > shader_id , us - > shader_set ) + " \n are not the same format as required by the pipeline shader. Pipeline shader requires the following bindings: \n " + _shader_uniform_debug ( dl - > state . pipeline_shader ) ) ;
} else {
ERR_FAIL_MSG ( " Uniforms supplied for set ( " + itos ( i ) + " , which was was just freed) are not the same format as required by the pipeline shader. Pipeline shader requires the following bindings: \n " + _shader_uniform_debug ( dl - > state . pipeline_shader ) ) ;
2019-06-07 16:07:57 +00:00
}
}
2019-07-27 13:23:24 +00:00
# endif
if ( ! dl - > state . sets [ i ] . bound ) {
//All good, see if this requires re-binding
2020-04-01 23:20:12 +00:00
vkCmdBindDescriptorSets ( dl - > command_buffer , VK_PIPELINE_BIND_POINT_GRAPHICS , dl - > state . pipeline_layout , i , 1 , & dl - > state . sets [ i ] . descriptor_set , 0 , nullptr ) ;
2019-07-27 13:23:24 +00:00
dl - > state . sets [ i ] . bound = true ;
}
2019-06-07 16:07:57 +00:00
}
if ( p_use_indices ) {
2019-07-27 13:23:24 +00:00
# ifdef DEBUG_ENABLED
2019-10-03 20:39:08 +00:00
ERR_FAIL_COND_MSG ( p_procedural_vertices > 0 ,
" Procedural vertices can't be used together with indices. " ) ;
2019-06-07 16:07:57 +00:00
ERR_FAIL_COND_MSG ( ! dl - > validation . index_array_size ,
" Draw command requested indices, but no index buffer was set. " ) ;
2019-10-03 20:39:08 +00:00
2019-06-07 16:07:57 +00:00
if ( dl - > validation . pipeline_vertex_format ! = INVALID_ID ) {
//uses vertices, do some vertex validations
ERR_FAIL_COND_MSG ( dl - > validation . vertex_array_size < dl - > validation . index_array_max_index ,
" Index array references (max index: " + itos ( dl - > validation . index_array_max_index ) + " ) indices beyond the vertex array size ( " + itos ( dl - > validation . vertex_array_size ) + " ). " ) ;
}
ERR_FAIL_COND_MSG ( dl - > validation . pipeline_uses_restart_indices ! = dl - > validation . index_buffer_uses_restart_indices ,
" The usage of restart indices in index buffer does not match the render primitive in the pipeline. " ) ;
2019-07-27 13:23:24 +00:00
# endif
2019-06-07 16:07:57 +00:00
uint32_t to_draw = dl - > validation . index_array_size ;
2019-07-27 13:23:24 +00:00
# ifdef DEBUG_ENABLED
2019-06-07 16:07:57 +00:00
ERR_FAIL_COND_MSG ( to_draw < dl - > validation . pipeline_primitive_minimum ,
" Too few indices ( " + itos ( to_draw ) + " ) for the render primitive set in the render pipeline ( " + itos ( dl - > validation . pipeline_primitive_minimum ) + " ). " ) ;
ERR_FAIL_COND_MSG ( ( to_draw % dl - > validation . pipeline_primitive_divisor ) ! = 0 ,
" Index amount ( " + itos ( to_draw ) + " ) must be a multiple of the amount of indices required by the render primitive ( " + itos ( dl - > validation . pipeline_primitive_divisor ) + " ). " ) ;
2019-07-27 13:23:24 +00:00
# endif
2019-06-07 16:07:57 +00:00
vkCmdDrawIndexed ( dl - > command_buffer , to_draw , p_instances , dl - > validation . index_array_offset , 0 , 0 ) ;
} else {
2019-10-03 20:39:08 +00:00
uint32_t to_draw ;
if ( p_procedural_vertices > 0 ) {
2019-07-27 13:23:24 +00:00
# ifdef DEBUG_ENABLED
2020-02-22 14:36:26 +00:00
ERR_FAIL_COND_MSG ( dl - > validation . pipeline_vertex_format ! = INVALID_ID ,
2020-01-12 01:26:52 +00:00
" Procedural vertices requested, but pipeline expects a vertex array. " ) ;
2019-07-27 13:23:24 +00:00
# endif
2019-10-03 20:39:08 +00:00
to_draw = p_procedural_vertices ;
} else {
# ifdef DEBUG_ENABLED
ERR_FAIL_COND_MSG ( dl - > validation . pipeline_vertex_format = = INVALID_ID ,
" Draw command lacks indices, but pipeline format does not use vertices. " ) ;
# endif
to_draw = dl - > validation . vertex_array_size ;
}
2019-06-07 16:07:57 +00:00
2019-07-27 13:23:24 +00:00
# ifdef DEBUG_ENABLED
2019-06-07 16:07:57 +00:00
ERR_FAIL_COND_MSG ( to_draw < dl - > validation . pipeline_primitive_minimum ,
" Too few vertices ( " + itos ( to_draw ) + " ) for the render primitive set in the render pipeline ( " + itos ( dl - > validation . pipeline_primitive_minimum ) + " ). " ) ;
ERR_FAIL_COND_MSG ( ( to_draw % dl - > validation . pipeline_primitive_divisor ) ! = 0 ,
" Vertex amount ( " + itos ( to_draw ) + " ) must be a multiple of the amount of vertices required by the render primitive ( " + itos ( dl - > validation . pipeline_primitive_divisor ) + " ). " ) ;
2019-07-27 13:23:24 +00:00
# endif
2019-10-03 20:39:08 +00:00
2019-06-07 16:07:57 +00:00
vkCmdDraw ( dl - > command_buffer , to_draw , p_instances , 0 , 0 ) ;
}
}
2019-06-10 17:12:24 +00:00
void RenderingDeviceVulkan : : draw_list_enable_scissor ( DrawListID p_list , const Rect2 & p_rect ) {
2019-06-24 19:13:06 +00:00
DrawList * dl = _get_draw_list_ptr ( p_list ) ;
2019-07-27 13:23:24 +00:00
2019-06-24 19:13:06 +00:00
ERR_FAIL_COND ( ! dl ) ;
2019-07-27 13:23:24 +00:00
# ifdef DEBUG_ENABLED
2019-06-24 19:13:06 +00:00
ERR_FAIL_COND_MSG ( ! dl - > validation . active , " Submitted Draw Lists can no longer be modified. " ) ;
2019-07-27 13:23:24 +00:00
# endif
2019-06-24 19:13:06 +00:00
Rect2i rect = p_rect ;
rect . position + = dl - > viewport . position ;
2020-12-19 12:43:35 +00:00
rect = dl - > viewport . intersection ( rect ) ;
2019-06-24 19:13:06 +00:00
if ( rect . get_area ( ) = = 0 ) {
return ;
}
VkRect2D scissor ;
scissor . offset . x = rect . position . x ;
scissor . offset . y = rect . position . y ;
scissor . extent . width = rect . size . width ;
scissor . extent . height = rect . size . height ;
vkCmdSetScissor ( dl - > command_buffer , 0 , 1 , & scissor ) ;
2019-06-07 16:07:57 +00:00
}
2020-05-14 12:29:06 +00:00
2019-06-10 17:12:24 +00:00
void RenderingDeviceVulkan : : draw_list_disable_scissor ( DrawListID p_list ) {
2019-06-24 19:13:06 +00:00
DrawList * dl = _get_draw_list_ptr ( p_list ) ;
ERR_FAIL_COND ( ! dl ) ;
2019-07-27 13:23:24 +00:00
# ifdef DEBUG_ENABLED
2019-06-24 19:13:06 +00:00
ERR_FAIL_COND_MSG ( ! dl - > validation . active , " Submitted Draw Lists can no longer be modified. " ) ;
2019-07-27 13:23:24 +00:00
# endif
2019-06-24 19:13:06 +00:00
VkRect2D scissor ;
scissor . offset . x = dl - > viewport . position . x ;
scissor . offset . y = dl - > viewport . position . y ;
scissor . extent . width = dl - > viewport . size . width ;
scissor . extent . height = dl - > viewport . size . height ;
vkCmdSetScissor ( dl - > command_buffer , 0 , 1 , & scissor ) ;
2019-06-07 16:07:57 +00:00
}
2021-06-24 13:58:36 +00:00
RenderingDevice : : DrawListID RenderingDeviceVulkan : : draw_list_switch_to_next_pass ( ) {
ERR_FAIL_COND_V ( draw_list = = nullptr , INVALID_ID ) ;
ERR_FAIL_COND_V ( draw_list_current_subpass > = draw_list_subpass_count - 1 , INVALID_FORMAT_ID ) ;
2019-06-07 16:07:57 +00:00
2021-06-24 13:58:36 +00:00
draw_list_current_subpass + + ;
Rect2i viewport ;
_draw_list_free ( & viewport ) ;
vkCmdNextSubpass ( frames [ frame ] . draw_command_buffer , VK_SUBPASS_CONTENTS_INLINE ) ;
_draw_list_allocate ( viewport , 0 , draw_list_current_subpass ) ;
return int64_t ( ID_TYPE_DRAW_LIST ) < < ID_BASE_SHIFT ;
}
Error RenderingDeviceVulkan : : draw_list_switch_to_next_pass_split ( uint32_t p_splits , DrawListID * r_split_ids ) {
ERR_FAIL_COND_V ( draw_list = = nullptr , ERR_INVALID_PARAMETER ) ;
ERR_FAIL_COND_V ( draw_list_current_subpass > = draw_list_subpass_count - 1 , ERR_INVALID_PARAMETER ) ;
draw_list_current_subpass + + ;
Rect2i viewport ;
_draw_list_free ( & viewport ) ;
vkCmdNextSubpass ( frames [ frame ] . draw_command_buffer , VK_SUBPASS_CONTENTS_INLINE ) ;
_draw_list_allocate ( viewport , p_splits , draw_list_current_subpass ) ;
for ( uint32_t i = 0 ; i < p_splits ; i + + ) {
r_split_ids [ i ] = ( int64_t ( ID_TYPE_SPLIT_DRAW_LIST ) < < ID_BASE_SHIFT ) + i ;
}
return OK ;
}
Error RenderingDeviceVulkan : : _draw_list_allocate ( const Rect2i & p_viewport , uint32_t p_splits , uint32_t p_subpass ) {
if ( p_splits = = 0 ) {
draw_list = memnew ( DrawList ) ;
draw_list - > command_buffer = frames [ frame ] . draw_command_buffer ;
draw_list - > viewport = p_viewport ;
draw_list_count = 0 ;
draw_list_split = false ;
} else {
if ( p_splits > ( uint32_t ) split_draw_list_allocators . size ( ) ) {
uint32_t from = split_draw_list_allocators . size ( ) ;
split_draw_list_allocators . resize ( p_splits ) ;
for ( uint32_t i = from ; i < p_splits ; i + + ) {
VkCommandPoolCreateInfo cmd_pool_info ;
cmd_pool_info . sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO ;
cmd_pool_info . pNext = nullptr ;
cmd_pool_info . queueFamilyIndex = context - > get_graphics_queue ( ) ;
cmd_pool_info . flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT ;
VkResult res = vkCreateCommandPool ( device , & cmd_pool_info , nullptr , & split_draw_list_allocators . write [ i ] . command_pool ) ;
ERR_FAIL_COND_V_MSG ( res , ERR_CANT_CREATE , " vkCreateCommandPool failed with error " + itos ( res ) + " . " ) ;
for ( int j = 0 ; j < frame_count ; j + + ) {
VkCommandBuffer command_buffer ;
VkCommandBufferAllocateInfo cmdbuf ;
//no command buffer exists, create it.
cmdbuf . sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO ;
cmdbuf . pNext = nullptr ;
cmdbuf . commandPool = split_draw_list_allocators [ i ] . command_pool ;
cmdbuf . level = VK_COMMAND_BUFFER_LEVEL_SECONDARY ;
cmdbuf . commandBufferCount = 1 ;
VkResult err = vkAllocateCommandBuffers ( device , & cmdbuf , & command_buffer ) ;
ERR_FAIL_COND_V_MSG ( err , ERR_CANT_CREATE , " vkAllocateCommandBuffers failed with error " + itos ( err ) + " . " ) ;
split_draw_list_allocators . write [ i ] . command_buffers . push_back ( command_buffer ) ;
}
}
}
draw_list = memnew_arr ( DrawList , p_splits ) ;
draw_list_count = p_splits ;
draw_list_split = true ;
for ( uint32_t i = 0 ; i < p_splits ; i + + ) {
//take a command buffer and initialize it
VkCommandBuffer command_buffer = split_draw_list_allocators [ i ] . command_buffers [ frame ] ;
VkCommandBufferInheritanceInfo inheritance_info ;
inheritance_info . sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO ;
inheritance_info . pNext = nullptr ;
inheritance_info . renderPass = draw_list_render_pass ;
inheritance_info . subpass = p_subpass ;
inheritance_info . framebuffer = draw_list_vkframebuffer ;
inheritance_info . occlusionQueryEnable = false ;
inheritance_info . queryFlags = 0 ; //?
inheritance_info . pipelineStatistics = 0 ;
VkCommandBufferBeginInfo cmdbuf_begin ;
cmdbuf_begin . sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO ;
cmdbuf_begin . pNext = nullptr ;
cmdbuf_begin . flags = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT | VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT ;
cmdbuf_begin . pInheritanceInfo = & inheritance_info ;
VkResult res = vkResetCommandBuffer ( command_buffer , 0 ) ;
if ( res ) {
memdelete_arr ( draw_list ) ;
draw_list = nullptr ;
ERR_FAIL_V_MSG ( ERR_CANT_CREATE , " vkResetCommandBuffer failed with error " + itos ( res ) + " . " ) ;
}
res = vkBeginCommandBuffer ( command_buffer , & cmdbuf_begin ) ;
if ( res ) {
memdelete_arr ( draw_list ) ;
draw_list = nullptr ;
ERR_FAIL_V_MSG ( ERR_CANT_CREATE , " vkBeginCommandBuffer failed with error " + itos ( res ) + " . " ) ;
}
draw_list [ i ] . command_buffer = command_buffer ;
draw_list [ i ] . viewport = p_viewport ;
}
}
2019-06-07 16:07:57 +00:00
2021-06-24 13:58:36 +00:00
return OK ;
}
void RenderingDeviceVulkan : : _draw_list_free ( Rect2i * r_last_viewport ) {
2019-06-07 16:07:57 +00:00
if ( draw_list_split ) {
//send all command buffers
VkCommandBuffer * command_buffers = ( VkCommandBuffer * ) alloca ( sizeof ( VkCommandBuffer ) * draw_list_count ) ;
for ( uint32_t i = 0 ; i < draw_list_count ; i + + ) {
2021-01-04 20:00:44 +00:00
vkEndCommandBuffer ( draw_list [ i ] . command_buffer ) ;
command_buffers [ i ] = draw_list [ i ] . command_buffer ;
2021-06-24 13:58:36 +00:00
if ( r_last_viewport ) {
if ( i = = 0 | | draw_list [ i ] . viewport_set ) {
* r_last_viewport = draw_list [ i ] . viewport ;
}
}
2019-06-07 16:07:57 +00:00
}
vkCmdExecuteCommands ( frames [ frame ] . draw_command_buffer , draw_list_count , command_buffers ) ;
memdelete_arr ( draw_list ) ;
2020-04-01 23:20:12 +00:00
draw_list = nullptr ;
2019-06-07 16:07:57 +00:00
} else {
2021-06-24 13:58:36 +00:00
if ( r_last_viewport ) {
* r_last_viewport = draw_list - > viewport ;
}
2019-06-07 16:07:57 +00:00
//just end the list
memdelete ( draw_list ) ;
2020-04-01 23:20:12 +00:00
draw_list = nullptr ;
2019-06-07 16:07:57 +00:00
}
2021-06-24 13:58:36 +00:00
}
void RenderingDeviceVulkan : : draw_list_end ( uint32_t p_post_barrier ) {
_THREAD_SAFE_METHOD_
ERR_FAIL_COND_MSG ( ! draw_list , " Immediate draw list is already inactive. " ) ;
_draw_list_free ( ) ;
vkCmdEndRenderPass ( frames [ frame ] . draw_command_buffer ) ;
2019-06-07 16:07:57 +00:00
2019-10-03 20:39:08 +00:00
for ( int i = 0 ; i < draw_list_bound_textures . size ( ) ; i + + ) {
Texture * texture = texture_owner . getornull ( draw_list_bound_textures [ i ] ) ;
ERR_CONTINUE ( ! texture ) ; //wtf
if ( draw_list_unbind_color_textures & & ( texture - > usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT ) ) {
texture - > bound = false ;
}
if ( draw_list_unbind_depth_textures & & ( texture - > usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT ) ) {
2019-06-07 16:07:57 +00:00
texture - > bound = false ;
}
}
2019-10-03 20:39:08 +00:00
2021-01-26 00:52:58 +00:00
uint32_t barrier_flags = 0 ;
uint32_t access_flags = 0 ;
if ( p_post_barrier & BARRIER_MASK_COMPUTE ) {
barrier_flags | = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT ;
access_flags | = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT ;
}
if ( p_post_barrier & BARRIER_MASK_RASTER ) {
2021-02-02 19:51:36 +00:00
barrier_flags | = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT /*| VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT*/ ;
access_flags | = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT | VK_ACCESS_INDEX_READ_BIT | VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT /*| VK_ACCESS_INDIRECT_COMMAND_READ_BIT*/ ;
2021-01-26 00:52:58 +00:00
}
if ( p_post_barrier & BARRIER_MASK_TRANSFER ) {
barrier_flags | = VK_PIPELINE_STAGE_TRANSFER_BIT ;
access_flags | = VK_ACCESS_TRANSFER_WRITE_BIT | VK_ACCESS_TRANSFER_READ_BIT ;
}
2021-02-02 19:51:36 +00:00
if ( barrier_flags = = 0 ) {
barrier_flags = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT ;
}
2019-06-07 16:07:57 +00:00
draw_list_bound_textures . clear ( ) ;
2019-09-07 01:51:27 +00:00
2021-02-02 19:51:36 +00:00
VkImageMemoryBarrier * image_barriers = nullptr ;
uint32_t image_barrier_count = draw_list_storage_textures . size ( ) ;
if ( image_barrier_count ) {
image_barriers = ( VkImageMemoryBarrier * ) alloca ( sizeof ( VkImageMemoryBarrier ) * draw_list_storage_textures . size ( ) ) ;
}
uint32_t src_stage = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT ;
uint32_t src_access = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT ;
if ( image_barrier_count ) {
src_stage | = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT ;
src_access | = VK_ACCESS_SHADER_WRITE_BIT ;
}
for ( uint32_t i = 0 ; i < image_barrier_count ; i + + ) {
2020-06-25 13:33:28 +00:00
Texture * texture = texture_owner . getornull ( draw_list_storage_textures [ i ] ) ;
2021-02-02 19:51:36 +00:00
VkImageMemoryBarrier & image_memory_barrier = image_barriers [ i ] ;
2020-06-25 13:33:28 +00:00
image_memory_barrier . sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER ;
image_memory_barrier . pNext = nullptr ;
2021-02-02 19:51:36 +00:00
image_memory_barrier . srcAccessMask = src_access ;
2021-01-26 00:52:58 +00:00
image_memory_barrier . dstAccessMask = access_flags ;
2020-06-25 13:33:28 +00:00
image_memory_barrier . oldLayout = texture - > layout ;
image_memory_barrier . newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL ;
image_memory_barrier . srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
image_memory_barrier . dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
image_memory_barrier . image = texture - > image ;
image_memory_barrier . subresourceRange . aspectMask = texture - > read_aspect_mask ;
image_memory_barrier . subresourceRange . baseMipLevel = texture - > base_mipmap ;
image_memory_barrier . subresourceRange . levelCount = texture - > mipmaps ;
image_memory_barrier . subresourceRange . baseArrayLayer = texture - > base_layer ;
image_memory_barrier . subresourceRange . layerCount = texture - > layers ;
texture - > layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL ;
}
draw_list_storage_textures . clear ( ) ;
2019-09-07 01:51:27 +00:00
// To ensure proper synchronization, we must make sure rendering is done before:
// * Some buffer is copied
2021-03-12 13:35:16 +00:00
// * Another render pass happens (since we may be done)
2019-10-31 22:54:21 +00:00
# ifdef FORCE_FULL_BARRIER
_full_barrier ( true ) ;
# else
2021-02-02 19:51:36 +00:00
VkMemoryBarrier mem_barrier ;
mem_barrier . sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER ;
mem_barrier . pNext = nullptr ;
mem_barrier . srcAccessMask = src_access ;
mem_barrier . dstAccessMask = access_flags ;
if ( image_barrier_count > 0 | | p_post_barrier ! = BARRIER_MASK_NO_BARRIER ) {
vkCmdPipelineBarrier ( frames [ frame ] . draw_command_buffer , src_stage , barrier_flags , 0 , 1 , & mem_barrier , 0 , nullptr , image_barrier_count , image_barriers ) ;
}
2019-10-31 22:54:21 +00:00
# endif
2019-06-07 16:07:57 +00:00
}
2019-09-25 19:44:44 +00:00
/***********************/
/**** COMPUTE LISTS ****/
/***********************/
2021-02-02 19:51:36 +00:00
RenderingDevice : : ComputeListID RenderingDeviceVulkan : : compute_list_begin ( bool p_allow_draw_overlap ) {
ERR_FAIL_COND_V_MSG ( ! p_allow_draw_overlap & & draw_list ! = nullptr , INVALID_ID , " Only one draw list can be active at the same time. " ) ;
2020-04-01 23:20:12 +00:00
ERR_FAIL_COND_V_MSG ( compute_list ! = nullptr , INVALID_ID , " Only one draw/compute list can be active at the same time. " ) ;
2019-09-25 19:44:44 +00:00
compute_list = memnew ( ComputeList ) ;
compute_list - > command_buffer = frames [ frame ] . draw_command_buffer ;
2021-02-02 19:51:36 +00:00
compute_list - > state . allow_draw_overlap = p_allow_draw_overlap ;
2019-09-25 19:44:44 +00:00
return ID_TYPE_COMPUTE_LIST ;
}
void RenderingDeviceVulkan : : compute_list_bind_compute_pipeline ( ComputeListID p_list , RID p_compute_pipeline ) {
ERR_FAIL_COND ( p_list ! = ID_TYPE_COMPUTE_LIST ) ;
ERR_FAIL_COND ( ! compute_list ) ;
ComputeList * cl = compute_list ;
const ComputePipeline * pipeline = compute_pipeline_owner . getornull ( p_compute_pipeline ) ;
ERR_FAIL_COND ( ! pipeline ) ;
if ( p_compute_pipeline = = cl - > state . pipeline ) {
return ; //redundant state, return.
}
cl - > state . pipeline = p_compute_pipeline ;
cl - > state . pipeline_layout = pipeline - > pipeline_layout ;
vkCmdBindPipeline ( cl - > command_buffer , VK_PIPELINE_BIND_POINT_COMPUTE , pipeline - > pipeline ) ;
if ( cl - > state . pipeline_shader ! = pipeline - > shader ) {
// shader changed, so descriptor sets may become incompatible.
//go through ALL sets, and unbind them (and all those above) if the format is different
uint32_t pcount = pipeline - > set_formats . size ( ) ; //formats count in this pipeline
cl - > state . set_count = MAX ( cl - > state . set_count , pcount ) ;
const uint32_t * pformats = pipeline - > set_formats . ptr ( ) ; //pipeline set formats
bool sets_valid = true ; //once invalid, all above become invalid
for ( uint32_t i = 0 ; i < pcount ; i + + ) {
//if a part of the format is different, invalidate it (and the rest)
if ( ! sets_valid | | cl - > state . sets [ i ] . pipeline_expected_format ! = pformats [ i ] ) {
cl - > state . sets [ i ] . bound = false ;
cl - > state . sets [ i ] . pipeline_expected_format = pformats [ i ] ;
sets_valid = false ;
}
}
for ( uint32_t i = pcount ; i < cl - > state . set_count ; i + + ) {
//unbind the ones above (not used) if exist
cl - > state . sets [ i ] . bound = false ;
}
cl - > state . set_count = pcount ; //update set count
if ( pipeline - > push_constant_size ) {
cl - > state . pipeline_push_constant_stages = pipeline - > push_constant_stages ;
# ifdef DEBUG_ENABLED
2020-06-03 08:45:44 +00:00
cl - > validation . pipeline_push_constant_supplied = false ;
2019-09-25 19:44:44 +00:00
# endif
}
2020-01-12 01:26:52 +00:00
cl - > state . pipeline_shader = pipeline - > shader ;
2021-02-02 19:51:36 +00:00
cl - > state . local_group_size [ 0 ] = pipeline - > local_group_size [ 0 ] ;
cl - > state . local_group_size [ 1 ] = pipeline - > local_group_size [ 1 ] ;
cl - > state . local_group_size [ 2 ] = pipeline - > local_group_size [ 2 ] ;
2019-09-25 19:44:44 +00:00
}
# ifdef DEBUG_ENABLED
//update compute pass pipeline info
cl - > validation . pipeline_active = true ;
cl - > validation . pipeline_push_constant_size = pipeline - > push_constant_size ;
# endif
}
2020-05-14 12:29:06 +00:00
2019-09-25 19:44:44 +00:00
void RenderingDeviceVulkan : : compute_list_bind_uniform_set ( ComputeListID p_list , RID p_uniform_set , uint32_t p_index ) {
ERR_FAIL_COND ( p_list ! = ID_TYPE_COMPUTE_LIST ) ;
ERR_FAIL_COND ( ! compute_list ) ;
ComputeList * cl = compute_list ;
# ifdef DEBUG_ENABLED
2020-12-08 18:58:49 +00:00
ERR_FAIL_COND_MSG ( p_index > = limits . maxBoundDescriptorSets | | p_index > = MAX_UNIFORM_SETS ,
2019-09-25 19:44:44 +00:00
" Attempting to bind a descriptor set ( " + itos ( p_index ) + " ) greater than what the hardware supports ( " + itos ( limits . maxBoundDescriptorSets ) + " ). " ) ;
# endif
# ifdef DEBUG_ENABLED
ERR_FAIL_COND_MSG ( ! cl - > validation . active , " Submitted Compute Lists can no longer be modified. " ) ;
# endif
UniformSet * uniform_set = uniform_set_owner . getornull ( p_uniform_set ) ;
ERR_FAIL_COND ( ! uniform_set ) ;
if ( p_index > cl - > state . set_count ) {
cl - > state . set_count = p_index ;
}
cl - > state . sets [ p_index ] . descriptor_set = uniform_set - > descriptor_set ; //update set pointer
cl - > state . sets [ p_index ] . bound = false ; //needs rebind
cl - > state . sets [ p_index ] . uniform_set_format = uniform_set - > format ;
cl - > state . sets [ p_index ] . uniform_set = p_uniform_set ;
uint32_t textures_to_sampled_count = uniform_set - > mutable_sampled_textures . size ( ) ;
2021-02-02 19:51:36 +00:00
uint32_t textures_to_storage_count = uniform_set - > mutable_storage_textures . size ( ) ;
2019-09-25 19:44:44 +00:00
Texture * * textures_to_sampled = uniform_set - > mutable_sampled_textures . ptrw ( ) ;
2021-02-02 19:51:36 +00:00
VkImageMemoryBarrier * texture_barriers = nullptr ;
if ( textures_to_sampled_count + textures_to_storage_count ) {
texture_barriers = ( VkImageMemoryBarrier * ) alloca ( sizeof ( VkImageMemoryBarrier ) * ( textures_to_sampled_count + textures_to_storage_count ) ) ;
}
uint32_t texture_barrier_count = 0 ;
uint32_t src_stage_flags = 0 ;
2019-09-25 19:44:44 +00:00
for ( uint32_t i = 0 ; i < textures_to_sampled_count ; i + + ) {
if ( textures_to_sampled [ i ] - > layout ! = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL ) {
2021-02-02 19:51:36 +00:00
src_stage_flags | = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT ;
VkImageMemoryBarrier & image_memory_barrier = texture_barriers [ texture_barrier_count + + ] ;
2019-09-25 19:44:44 +00:00
image_memory_barrier . sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER ;
2020-04-01 23:20:12 +00:00
image_memory_barrier . pNext = nullptr ;
2019-09-25 19:44:44 +00:00
image_memory_barrier . srcAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT ;
image_memory_barrier . dstAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT ;
image_memory_barrier . oldLayout = textures_to_sampled [ i ] - > layout ;
image_memory_barrier . newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL ;
image_memory_barrier . srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
image_memory_barrier . dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
image_memory_barrier . image = textures_to_sampled [ i ] - > image ;
image_memory_barrier . subresourceRange . aspectMask = textures_to_sampled [ i ] - > read_aspect_mask ;
2020-04-11 17:43:12 +00:00
image_memory_barrier . subresourceRange . baseMipLevel = textures_to_sampled [ i ] - > base_mipmap ;
2019-09-25 19:44:44 +00:00
image_memory_barrier . subresourceRange . levelCount = textures_to_sampled [ i ] - > mipmaps ;
2020-04-11 17:43:12 +00:00
image_memory_barrier . subresourceRange . baseArrayLayer = textures_to_sampled [ i ] - > base_layer ;
2019-09-25 19:44:44 +00:00
image_memory_barrier . subresourceRange . layerCount = textures_to_sampled [ i ] - > layers ;
textures_to_sampled [ i ] - > layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL ;
cl - > state . textures_to_sampled_layout . erase ( textures_to_sampled [ i ] ) ;
}
2021-02-02 19:51:36 +00:00
if ( textures_to_sampled [ i ] - > used_in_frame ! = frames_drawn ) {
textures_to_sampled [ i ] - > used_in_frame = frames_drawn ;
textures_to_sampled [ i ] - > used_in_transfer = false ;
textures_to_sampled [ i ] - > used_in_raster = false ;
}
textures_to_sampled [ i ] - > used_in_compute = true ;
2019-09-25 19:44:44 +00:00
}
Texture * * textures_to_storage = uniform_set - > mutable_storage_textures . ptrw ( ) ;
for ( uint32_t i = 0 ; i < textures_to_storage_count ; i + + ) {
if ( textures_to_storage [ i ] - > layout ! = VK_IMAGE_LAYOUT_GENERAL ) {
2021-02-02 19:51:36 +00:00
uint32_t src_access_flags = 0 ;
if ( textures_to_storage [ i ] - > used_in_frame = = frames_drawn ) {
if ( textures_to_storage [ i ] - > used_in_compute ) {
src_stage_flags | = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT ;
src_access_flags | = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT ;
}
if ( textures_to_storage [ i ] - > used_in_raster ) {
src_stage_flags | = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT ;
src_access_flags | = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT ;
}
if ( textures_to_storage [ i ] - > used_in_transfer ) {
src_stage_flags | = VK_PIPELINE_STAGE_TRANSFER_BIT ;
src_access_flags | = VK_ACCESS_TRANSFER_WRITE_BIT | VK_ACCESS_TRANSFER_READ_BIT ;
}
textures_to_storage [ i ] - > used_in_compute = false ;
textures_to_storage [ i ] - > used_in_raster = false ;
textures_to_storage [ i ] - > used_in_compute = false ;
} else {
src_access_flags = 0 ;
textures_to_storage [ i ] - > used_in_compute = false ;
textures_to_storage [ i ] - > used_in_raster = false ;
textures_to_storage [ i ] - > used_in_compute = false ;
textures_to_storage [ i ] - > used_in_frame = frames_drawn ;
}
VkImageMemoryBarrier & image_memory_barrier = texture_barriers [ texture_barrier_count + + ] ;
2019-09-25 19:44:44 +00:00
image_memory_barrier . sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER ;
2020-04-01 23:20:12 +00:00
image_memory_barrier . pNext = nullptr ;
2021-02-02 19:51:36 +00:00
image_memory_barrier . srcAccessMask = src_access_flags ;
2019-09-25 19:44:44 +00:00
image_memory_barrier . dstAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT ;
image_memory_barrier . oldLayout = textures_to_storage [ i ] - > layout ;
image_memory_barrier . newLayout = VK_IMAGE_LAYOUT_GENERAL ;
image_memory_barrier . srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
image_memory_barrier . dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
image_memory_barrier . image = textures_to_storage [ i ] - > image ;
2019-10-03 20:39:08 +00:00
image_memory_barrier . subresourceRange . aspectMask = textures_to_storage [ i ] - > read_aspect_mask ;
2020-04-11 17:43:12 +00:00
image_memory_barrier . subresourceRange . baseMipLevel = textures_to_storage [ i ] - > base_mipmap ;
2019-09-25 19:44:44 +00:00
image_memory_barrier . subresourceRange . levelCount = textures_to_storage [ i ] - > mipmaps ;
2020-04-11 17:43:12 +00:00
image_memory_barrier . subresourceRange . baseArrayLayer = textures_to_storage [ i ] - > base_layer ;
2019-09-25 19:44:44 +00:00
image_memory_barrier . subresourceRange . layerCount = textures_to_storage [ i ] - > layers ;
textures_to_storage [ i ] - > layout = VK_IMAGE_LAYOUT_GENERAL ;
cl - > state . textures_to_sampled_layout . insert ( textures_to_storage [ i ] ) ; //needs to go back to sampled layout afterwards
}
}
2021-02-02 19:51:36 +00:00
if ( texture_barrier_count ) {
if ( src_stage_flags = = 0 ) {
src_stage_flags = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT ;
}
vkCmdPipelineBarrier ( cl - > command_buffer , src_stage_flags , VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT , 0 , 0 , nullptr , 0 , nullptr , texture_barrier_count , texture_barriers ) ;
}
2019-09-25 19:44:44 +00:00
#if 0
{ //validate that textures bound are not attached as framebuffer bindings
uint32_t attachable_count = uniform_set - > attachable_textures . size ( ) ;
const RID * attachable_ptr = uniform_set - > attachable_textures . ptr ( ) ;
uint32_t bound_count = draw_list_bound_textures . size ( ) ;
const RID * bound_ptr = draw_list_bound_textures . ptr ( ) ;
for ( uint32_t i = 0 ; i < attachable_count ; i + + ) {
for ( uint32_t j = 0 ; j < bound_count ; j + + ) {
ERR_FAIL_COND_MSG ( attachable_ptr [ i ] = = bound_ptr [ j ] ,
" Attempted to use the same texture in framebuffer attachment and a uniform set, this is not allowed. " ) ;
}
}
}
# endif
}
2019-10-03 20:39:08 +00:00
2020-04-20 02:19:21 +00:00
void RenderingDeviceVulkan : : compute_list_set_push_constant ( ComputeListID p_list , const void * p_data , uint32_t p_data_size ) {
2019-09-25 19:44:44 +00:00
ERR_FAIL_COND ( p_list ! = ID_TYPE_COMPUTE_LIST ) ;
ERR_FAIL_COND ( ! compute_list ) ;
ComputeList * cl = compute_list ;
# ifdef DEBUG_ENABLED
ERR_FAIL_COND_MSG ( ! cl - > validation . active , " Submitted Compute Lists can no longer be modified. " ) ;
# endif
# ifdef DEBUG_ENABLED
ERR_FAIL_COND_MSG ( p_data_size ! = cl - > validation . pipeline_push_constant_size ,
" This compute pipeline requires ( " + itos ( cl - > validation . pipeline_push_constant_size ) + " ) bytes of push constant data, supplied: ( " + itos ( p_data_size ) + " ) " ) ;
# endif
vkCmdPushConstants ( cl - > command_buffer , cl - > state . pipeline_layout , cl - > state . pipeline_push_constant_stages , 0 , p_data_size , p_data ) ;
# ifdef DEBUG_ENABLED
2020-06-03 08:45:44 +00:00
cl - > validation . pipeline_push_constant_supplied = true ;
2019-09-25 19:44:44 +00:00
# endif
}
2020-05-14 12:29:06 +00:00
2019-09-25 19:44:44 +00:00
void RenderingDeviceVulkan : : compute_list_dispatch ( ComputeListID p_list , uint32_t p_x_groups , uint32_t p_y_groups , uint32_t p_z_groups ) {
ERR_FAIL_COND ( p_list ! = ID_TYPE_COMPUTE_LIST ) ;
ERR_FAIL_COND ( ! compute_list ) ;
ComputeList * cl = compute_list ;
# ifdef DEBUG_ENABLED
2019-10-05 13:27:43 +00:00
ERR_FAIL_COND_MSG ( p_x_groups > limits . maxComputeWorkGroupCount [ 0 ] ,
" Dispatch amount of X compute groups ( " + itos ( p_x_groups ) + " ) is larger than device limit ( " + itos ( limits . maxComputeWorkGroupCount [ 0 ] ) + " ) " ) ;
ERR_FAIL_COND_MSG ( p_y_groups > limits . maxComputeWorkGroupCount [ 1 ] ,
" Dispatch amount of Y compute groups ( " + itos ( p_x_groups ) + " ) is larger than device limit ( " + itos ( limits . maxComputeWorkGroupCount [ 0 ] ) + " ) " ) ;
ERR_FAIL_COND_MSG ( p_z_groups > limits . maxComputeWorkGroupCount [ 2 ] ,
" Dispatch amount of Z compute groups ( " + itos ( p_x_groups ) + " ) is larger than device limit ( " + itos ( limits . maxComputeWorkGroupCount [ 0 ] ) + " ) " ) ;
2019-10-03 20:39:08 +00:00
2019-09-25 19:44:44 +00:00
ERR_FAIL_COND_MSG ( ! cl - > validation . active , " Submitted Compute Lists can no longer be modified. " ) ;
# endif
# ifdef DEBUG_ENABLED
ERR_FAIL_COND_MSG ( ! cl - > validation . pipeline_active , " No compute pipeline was set before attempting to draw. " ) ;
if ( cl - > validation . pipeline_push_constant_size > 0 ) {
//using push constants, check that they were supplied
2020-06-03 08:45:44 +00:00
ERR_FAIL_COND_MSG ( ! cl - > validation . pipeline_push_constant_supplied ,
2019-09-25 19:44:44 +00:00
" The shader in this pipeline requires a push constant to be set before drawing, but it's not present. " ) ;
}
# endif
//Bind descriptor sets
for ( uint32_t i = 0 ; i < cl - > state . set_count ; i + + ) {
if ( cl - > state . sets [ i ] . pipeline_expected_format = = 0 ) {
continue ; //nothing expected by this pipeline
}
# ifdef DEBUG_ENABLED
if ( cl - > state . sets [ i ] . pipeline_expected_format ! = cl - > state . sets [ i ] . uniform_set_format ) {
if ( cl - > state . sets [ i ] . uniform_set_format = = 0 ) {
ERR_FAIL_MSG ( " Uniforms were never supplied for set ( " + itos ( i ) + " ) at the time of drawing, which are required by the pipeline " ) ;
} else if ( uniform_set_owner . owns ( cl - > state . sets [ i ] . uniform_set ) ) {
UniformSet * us = uniform_set_owner . getornull ( cl - > state . sets [ i ] . uniform_set ) ;
ERR_FAIL_MSG ( " Uniforms supplied for set ( " + itos ( i ) + " ): \n " + _shader_uniform_debug ( us - > shader_id , us - > shader_set ) + " \n are not the same format as required by the pipeline shader. Pipeline shader requires the following bindings: \n " + _shader_uniform_debug ( cl - > state . pipeline_shader ) ) ;
} else {
ERR_FAIL_MSG ( " Uniforms supplied for set ( " + itos ( i ) + " , which was was just freed) are not the same format as required by the pipeline shader. Pipeline shader requires the following bindings: \n " + _shader_uniform_debug ( cl - > state . pipeline_shader ) ) ;
}
}
# endif
if ( ! cl - > state . sets [ i ] . bound ) {
//All good, see if this requires re-binding
2020-04-01 23:20:12 +00:00
vkCmdBindDescriptorSets ( cl - > command_buffer , VK_PIPELINE_BIND_POINT_COMPUTE , cl - > state . pipeline_layout , i , 1 , & cl - > state . sets [ i ] . descriptor_set , 0 , nullptr ) ;
2019-09-25 19:44:44 +00:00
cl - > state . sets [ i ] . bound = true ;
}
}
vkCmdDispatch ( cl - > command_buffer , p_x_groups , p_y_groups , p_z_groups ) ;
}
2019-10-03 20:39:08 +00:00
2021-02-02 19:51:36 +00:00
void RenderingDeviceVulkan : : compute_list_dispatch_threads ( ComputeListID p_list , uint32_t p_x_threads , uint32_t p_y_threads , uint32_t p_z_threads ) {
ERR_FAIL_COND ( p_list ! = ID_TYPE_COMPUTE_LIST ) ;
ERR_FAIL_COND ( ! compute_list ) ;
ComputeList * cl = compute_list ;
# ifdef DEBUG_ENABLED
ERR_FAIL_COND_MSG ( ! cl - > validation . pipeline_active , " No compute pipeline was set before attempting to draw. " ) ;
if ( cl - > validation . pipeline_push_constant_size > 0 ) {
//using push constants, check that they were supplied
ERR_FAIL_COND_MSG ( ! cl - > validation . pipeline_push_constant_supplied ,
" The shader in this pipeline requires a push constant to be set before drawing, but it's not present. " ) ;
}
# endif
compute_list_dispatch ( p_list , ( p_x_threads - 1 ) / cl - > state . local_group_size [ 0 ] + 1 , ( p_y_threads - 1 ) / cl - > state . local_group_size [ 1 ] + 1 , ( p_z_threads - 1 ) / cl - > state . local_group_size [ 2 ] + 1 ) ;
}
2020-06-25 13:33:28 +00:00
void RenderingDeviceVulkan : : compute_list_dispatch_indirect ( ComputeListID p_list , RID p_buffer , uint32_t p_offset ) {
ERR_FAIL_COND ( p_list ! = ID_TYPE_COMPUTE_LIST ) ;
ERR_FAIL_COND ( ! compute_list ) ;
ComputeList * cl = compute_list ;
Buffer * buffer = storage_buffer_owner . getornull ( p_buffer ) ;
ERR_FAIL_COND ( ! buffer ) ;
ERR_FAIL_COND_MSG ( ! ( buffer - > usage & STORAGE_BUFFER_USAGE_DISPATCH_INDIRECT ) , " Buffer provided was not created to do indirect dispatch. " ) ;
ERR_FAIL_COND_MSG ( p_offset + 12 > buffer - > size , " Offset provided (+12) is past the end of buffer. " ) ;
# ifdef DEBUG_ENABLED
ERR_FAIL_COND_MSG ( ! cl - > validation . active , " Submitted Compute Lists can no longer be modified. " ) ;
# endif
# ifdef DEBUG_ENABLED
ERR_FAIL_COND_MSG ( ! cl - > validation . pipeline_active , " No compute pipeline was set before attempting to draw. " ) ;
if ( cl - > validation . pipeline_push_constant_size > 0 ) {
//using push constants, check that they were supplied
ERR_FAIL_COND_MSG ( ! cl - > validation . pipeline_push_constant_supplied ,
" The shader in this pipeline requires a push constant to be set before drawing, but it's not present. " ) ;
}
# endif
//Bind descriptor sets
for ( uint32_t i = 0 ; i < cl - > state . set_count ; i + + ) {
if ( cl - > state . sets [ i ] . pipeline_expected_format = = 0 ) {
continue ; //nothing expected by this pipeline
}
# ifdef DEBUG_ENABLED
if ( cl - > state . sets [ i ] . pipeline_expected_format ! = cl - > state . sets [ i ] . uniform_set_format ) {
if ( cl - > state . sets [ i ] . uniform_set_format = = 0 ) {
ERR_FAIL_MSG ( " Uniforms were never supplied for set ( " + itos ( i ) + " ) at the time of drawing, which are required by the pipeline " ) ;
} else if ( uniform_set_owner . owns ( cl - > state . sets [ i ] . uniform_set ) ) {
UniformSet * us = uniform_set_owner . getornull ( cl - > state . sets [ i ] . uniform_set ) ;
ERR_FAIL_MSG ( " Uniforms supplied for set ( " + itos ( i ) + " ): \n " + _shader_uniform_debug ( us - > shader_id , us - > shader_set ) + " \n are not the same format as required by the pipeline shader. Pipeline shader requires the following bindings: \n " + _shader_uniform_debug ( cl - > state . pipeline_shader ) ) ;
} else {
ERR_FAIL_MSG ( " Uniforms supplied for set ( " + itos ( i ) + " , which was was just freed) are not the same format as required by the pipeline shader. Pipeline shader requires the following bindings: \n " + _shader_uniform_debug ( cl - > state . pipeline_shader ) ) ;
}
}
# endif
if ( ! cl - > state . sets [ i ] . bound ) {
//All good, see if this requires re-binding
vkCmdBindDescriptorSets ( cl - > command_buffer , VK_PIPELINE_BIND_POINT_COMPUTE , cl - > state . pipeline_layout , i , 1 , & cl - > state . sets [ i ] . descriptor_set , 0 , nullptr ) ;
cl - > state . sets [ i ] . bound = true ;
}
}
vkCmdDispatchIndirect ( cl - > command_buffer , buffer - > buffer , p_offset ) ;
}
2019-10-03 20:39:08 +00:00
void RenderingDeviceVulkan : : compute_list_add_barrier ( ComputeListID p_list ) {
2019-10-31 22:54:21 +00:00
# ifdef FORCE_FULL_BARRIER
_full_barrier ( true ) ;
# else
2019-10-03 20:39:08 +00:00
_memory_barrier ( VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT , VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT , VK_ACCESS_SHADER_WRITE_BIT , VK_ACCESS_SHADER_READ_BIT , true ) ;
2019-10-31 22:54:21 +00:00
# endif
2019-10-03 20:39:08 +00:00
}
2021-01-26 00:52:58 +00:00
void RenderingDeviceVulkan : : compute_list_end ( uint32_t p_post_barrier ) {
2019-09-25 19:44:44 +00:00
ERR_FAIL_COND ( ! compute_list ) ;
2021-01-26 00:52:58 +00:00
uint32_t barrier_flags = 0 ;
uint32_t access_flags = 0 ;
if ( p_post_barrier & BARRIER_MASK_COMPUTE ) {
barrier_flags | = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT ;
2021-02-02 19:51:36 +00:00
access_flags | = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT ;
2021-01-26 00:52:58 +00:00
}
if ( p_post_barrier & BARRIER_MASK_RASTER ) {
barrier_flags | = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT ;
access_flags | = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT | VK_ACCESS_INDEX_READ_BIT | VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT | VK_ACCESS_INDIRECT_COMMAND_READ_BIT ;
}
if ( p_post_barrier & BARRIER_MASK_TRANSFER ) {
barrier_flags | = VK_PIPELINE_STAGE_TRANSFER_BIT ;
access_flags | = VK_ACCESS_TRANSFER_WRITE_BIT | VK_ACCESS_TRANSFER_READ_BIT ;
}
2021-02-02 19:51:36 +00:00
if ( barrier_flags = = 0 ) {
barrier_flags = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT ;
}
VkImageMemoryBarrier * image_barriers = nullptr ;
uint32_t image_barrier_count = compute_list - > state . textures_to_sampled_layout . size ( ) ;
if ( image_barrier_count ) {
image_barriers = ( VkImageMemoryBarrier * ) alloca ( sizeof ( VkImageMemoryBarrier ) * image_barrier_count ) ;
}
uint32_t barrier_idx = 0 ;
2019-09-25 19:44:44 +00:00
for ( Set < Texture * > : : Element * E = compute_list - > state . textures_to_sampled_layout . front ( ) ; E ; E = E - > next ( ) ) {
2021-02-02 19:51:36 +00:00
VkImageMemoryBarrier & image_memory_barrier = image_barriers [ barrier_idx + + ] ;
2019-09-25 19:44:44 +00:00
image_memory_barrier . sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER ;
2020-04-01 23:20:12 +00:00
image_memory_barrier . pNext = nullptr ;
2019-09-25 19:44:44 +00:00
image_memory_barrier . srcAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT ;
2021-01-26 00:52:58 +00:00
image_memory_barrier . dstAccessMask = access_flags ;
2019-09-25 19:44:44 +00:00
image_memory_barrier . oldLayout = E - > get ( ) - > layout ;
image_memory_barrier . newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL ;
image_memory_barrier . srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
image_memory_barrier . dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
image_memory_barrier . image = E - > get ( ) - > image ;
image_memory_barrier . subresourceRange . aspectMask = E - > get ( ) - > read_aspect_mask ;
2020-04-11 17:43:12 +00:00
image_memory_barrier . subresourceRange . baseMipLevel = E - > get ( ) - > base_mipmap ;
2019-09-25 19:44:44 +00:00
image_memory_barrier . subresourceRange . levelCount = E - > get ( ) - > mipmaps ;
2020-04-11 17:43:12 +00:00
image_memory_barrier . subresourceRange . baseArrayLayer = E - > get ( ) - > base_layer ;
2019-09-25 19:44:44 +00:00
image_memory_barrier . subresourceRange . layerCount = E - > get ( ) - > layers ;
E - > get ( ) - > layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL ;
2021-02-02 19:51:36 +00:00
if ( E - > get ( ) - > used_in_frame ! = frames_drawn ) {
E - > get ( ) - > used_in_transfer = false ;
E - > get ( ) - > used_in_raster = false ;
E - > get ( ) - > used_in_compute = false ;
E - > get ( ) - > used_in_frame = frames_drawn ;
}
2019-09-25 19:44:44 +00:00
}
2019-10-31 22:54:21 +00:00
# ifdef FORCE_FULL_BARRIER
_full_barrier ( true ) ;
# else
2021-02-02 19:51:36 +00:00
VkMemoryBarrier mem_barrier ;
mem_barrier . sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER ;
mem_barrier . pNext = nullptr ;
mem_barrier . srcAccessMask = VK_ACCESS_SHADER_WRITE_BIT ;
mem_barrier . dstAccessMask = access_flags ;
if ( image_barrier_count > 0 | | p_post_barrier ! = BARRIER_MASK_NO_BARRIER ) {
vkCmdPipelineBarrier ( compute_list - > command_buffer , VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT , barrier_flags , 0 , 1 , & mem_barrier , 0 , nullptr , image_barrier_count , image_barriers ) ;
}
2019-10-31 22:54:21 +00:00
# endif
2021-02-02 19:51:36 +00:00
memdelete ( compute_list ) ;
compute_list = nullptr ;
2019-09-25 19:44:44 +00:00
}
2021-01-26 00:52:58 +00:00
void RenderingDeviceVulkan : : barrier ( uint32_t p_from , uint32_t p_to ) {
uint32_t src_barrier_flags = 0 ;
uint32_t src_access_flags = 0 ;
if ( p_from & BARRIER_MASK_COMPUTE ) {
src_barrier_flags | = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT ;
src_access_flags | = VK_ACCESS_SHADER_WRITE_BIT ;
}
if ( p_from & BARRIER_MASK_RASTER ) {
src_barrier_flags | = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT ;
src_access_flags | = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT ;
}
if ( p_from & BARRIER_MASK_TRANSFER ) {
src_barrier_flags | = VK_PIPELINE_STAGE_TRANSFER_BIT ;
src_access_flags | = VK_ACCESS_TRANSFER_WRITE_BIT ;
}
2021-02-05 03:02:06 +00:00
if ( p_from = = 0 ) {
src_barrier_flags = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT ;
}
2021-01-26 00:52:58 +00:00
uint32_t dst_barrier_flags = 0 ;
uint32_t dst_access_flags = 0 ;
if ( p_to & BARRIER_MASK_COMPUTE ) {
dst_barrier_flags | = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT ;
2021-02-02 19:51:36 +00:00
dst_access_flags | = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT ;
2021-01-26 00:52:58 +00:00
}
if ( p_to & BARRIER_MASK_RASTER ) {
dst_barrier_flags | = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT ;
dst_access_flags | = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT | VK_ACCESS_INDEX_READ_BIT | VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT | VK_ACCESS_INDIRECT_COMMAND_READ_BIT ;
}
if ( p_to & BARRIER_MASK_TRANSFER ) {
dst_barrier_flags | = VK_PIPELINE_STAGE_TRANSFER_BIT ;
dst_access_flags | = VK_ACCESS_TRANSFER_WRITE_BIT | VK_ACCESS_TRANSFER_READ_BIT ;
}
2021-02-05 03:02:06 +00:00
if ( p_to = = 0 ) {
dst_barrier_flags = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT ;
}
2021-01-26 00:52:58 +00:00
_memory_barrier ( src_barrier_flags , dst_barrier_flags , src_access_flags , dst_access_flags , true ) ;
}
2020-06-25 13:33:28 +00:00
void RenderingDeviceVulkan : : full_barrier ( ) {
# ifndef DEBUG_ENABLED
ERR_PRINT ( " Full barrier is debug-only, should not be used in production " ) ;
# endif
_full_barrier ( true ) ;
}
2019-06-07 16:07:57 +00:00
#if 0
void RenderingDeviceVulkan : : draw_list_render_secondary_to_framebuffer ( ID p_framebuffer , ID * p_draw_lists , uint32_t p_draw_list_count , InitialAction p_initial_action , FinalAction p_final_action , const Vector < Variant > & p_clear_colors ) {
VkCommandBuffer frame_cmdbuf = frames [ frame ] . frame_buffer ;
ERR_FAIL_COND ( ! frame_cmdbuf ) ;
VkRenderPassBeginInfo render_pass_begin ;
render_pass_begin . sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO ;
2020-04-01 23:20:12 +00:00
render_pass_begin . pNext = nullptr ;
2019-06-07 16:07:57 +00:00
render_pass_begin . renderPass = context - > get_render_pass ( ) ;
render_pass_begin . framebuffer = context - > get_frame_framebuffer ( frame ) ;
render_pass_begin . renderArea . extent . width = context - > get_screen_width ( p_screen ) ;
render_pass_begin . renderArea . extent . height = context - > get_screen_height ( p_screen ) ;
render_pass_begin . renderArea . offset . x = 0 ;
render_pass_begin . renderArea . offset . y = 0 ;
render_pass_begin . clearValueCount = 1 ;
VkClearValue clear_value ;
clear_value . color . float32 [ 0 ] = p_clear_color . r ;
clear_value . color . float32 [ 1 ] = p_clear_color . g ;
clear_value . color . float32 [ 2 ] = p_clear_color . b ;
clear_value . color . float32 [ 3 ] = p_clear_color . a ;
render_pass_begin . pClearValues = & clear_value ;
vkCmdBeginRenderPass ( frame_cmdbuf , & render_pass_begin , VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS ) ;
ID screen_format = screen_get_framebuffer_format ( ) ;
{
VkCommandBuffer * command_buffers = ( VkCommandBuffer * ) alloca ( sizeof ( VkCommandBuffer ) * p_draw_list_count ) ;
uint32_t command_buffer_count = 0 ;
for ( uint32_t i = 0 ; i < p_draw_list_count ; i + + ) {
DrawList * dl = _get_draw_list_ptr ( p_draw_lists [ i ] ) ;
ERR_CONTINUE_MSG ( ! dl , " Draw list index ( " + itos ( i ) + " ) is not a valid draw list ID. " ) ;
ERR_CONTINUE_MSG ( dl - > validation . framebuffer_format ! = p_format_check ,
" Draw list index ( " + itos ( i ) + " ) is created with a framebuffer format incompatible with this render pass. " ) ;
if ( dl - > validation . active ) {
//needs to be closed, so close it.
vkEndCommandBuffer ( dl - > command_buffer ) ;
dl - > validation . active = false ;
}
command_buffers [ command_buffer_count + + ] = dl - > command_buffer ;
}
print_line ( " to draw: " + itos ( command_buffer_count ) ) ;
vkCmdExecuteCommands ( p_primary , command_buffer_count , command_buffers ) ;
}
vkCmdEndRenderPass ( frame_cmdbuf ) ;
}
# endif
2019-06-10 17:12:24 +00:00
void RenderingDeviceVulkan : : _free_internal ( RID p_id ) {
2019-06-07 16:07:57 +00:00
//push everything so it's disposed of next time this frame index is processed (means, it's safe to do it)
if ( texture_owner . owns ( p_id ) ) {
Texture * texture = texture_owner . getornull ( p_id ) ;
frames [ frame ] . textures_to_dispose_of . push_back ( * texture ) ;
texture_owner . free ( p_id ) ;
} else if ( framebuffer_owner . owns ( p_id ) ) {
Framebuffer * framebuffer = framebuffer_owner . getornull ( p_id ) ;
frames [ frame ] . framebuffers_to_dispose_of . push_back ( * framebuffer ) ;
framebuffer_owner . free ( p_id ) ;
} else if ( sampler_owner . owns ( p_id ) ) {
VkSampler * sampler = sampler_owner . getornull ( p_id ) ;
frames [ frame ] . samplers_to_dispose_of . push_back ( * sampler ) ;
sampler_owner . free ( p_id ) ;
} else if ( vertex_buffer_owner . owns ( p_id ) ) {
Buffer * vertex_buffer = vertex_buffer_owner . getornull ( p_id ) ;
frames [ frame ] . buffers_to_dispose_of . push_back ( * vertex_buffer ) ;
vertex_buffer_owner . free ( p_id ) ;
} else if ( vertex_array_owner . owns ( p_id ) ) {
vertex_array_owner . free ( p_id ) ;
} else if ( index_buffer_owner . owns ( p_id ) ) {
IndexBuffer * index_buffer = index_buffer_owner . getornull ( p_id ) ;
Buffer b ;
b . allocation = index_buffer - > allocation ;
b . buffer = index_buffer - > buffer ;
2019-06-24 19:13:06 +00:00
b . size = index_buffer - > size ;
2019-06-07 16:07:57 +00:00
frames [ frame ] . buffers_to_dispose_of . push_back ( b ) ;
index_buffer_owner . free ( p_id ) ;
} else if ( index_array_owner . owns ( p_id ) ) {
index_array_owner . free ( p_id ) ;
} else if ( shader_owner . owns ( p_id ) ) {
Shader * shader = shader_owner . getornull ( p_id ) ;
frames [ frame ] . shaders_to_dispose_of . push_back ( * shader ) ;
shader_owner . free ( p_id ) ;
} else if ( uniform_buffer_owner . owns ( p_id ) ) {
Buffer * uniform_buffer = uniform_buffer_owner . getornull ( p_id ) ;
frames [ frame ] . buffers_to_dispose_of . push_back ( * uniform_buffer ) ;
uniform_buffer_owner . free ( p_id ) ;
} else if ( texture_buffer_owner . owns ( p_id ) ) {
TextureBuffer * texture_buffer = texture_buffer_owner . getornull ( p_id ) ;
frames [ frame ] . buffers_to_dispose_of . push_back ( texture_buffer - > buffer ) ;
frames [ frame ] . buffer_views_to_dispose_of . push_back ( texture_buffer - > view ) ;
texture_buffer_owner . free ( p_id ) ;
} else if ( storage_buffer_owner . owns ( p_id ) ) {
Buffer * storage_buffer = storage_buffer_owner . getornull ( p_id ) ;
frames [ frame ] . buffers_to_dispose_of . push_back ( * storage_buffer ) ;
storage_buffer_owner . free ( p_id ) ;
} else if ( uniform_set_owner . owns ( p_id ) ) {
UniformSet * uniform_set = uniform_set_owner . getornull ( p_id ) ;
frames [ frame ] . uniform_sets_to_dispose_of . push_back ( * uniform_set ) ;
2021-07-07 22:55:20 +00:00
if ( uniform_set - > invalidated_callback ! = nullptr ) {
uniform_set - > invalidated_callback ( p_id , uniform_set - > invalidated_callback_userdata ) ;
}
2019-06-07 16:07:57 +00:00
uniform_set_owner . free ( p_id ) ;
2019-09-25 19:44:44 +00:00
} else if ( render_pipeline_owner . owns ( p_id ) ) {
RenderPipeline * pipeline = render_pipeline_owner . getornull ( p_id ) ;
frames [ frame ] . render_pipelines_to_dispose_of . push_back ( * pipeline ) ;
render_pipeline_owner . free ( p_id ) ;
} else if ( compute_pipeline_owner . owns ( p_id ) ) {
ComputePipeline * pipeline = compute_pipeline_owner . getornull ( p_id ) ;
frames [ frame ] . compute_pipelines_to_dispose_of . push_back ( * pipeline ) ;
compute_pipeline_owner . free ( p_id ) ;
2019-06-07 16:07:57 +00:00
} else {
2019-06-10 17:12:24 +00:00
ERR_PRINT ( " Attempted to free invalid ID: " + itos ( p_id . get_id ( ) ) ) ;
2019-06-07 16:07:57 +00:00
}
}
2020-05-14 12:29:06 +00:00
2019-06-10 17:12:24 +00:00
void RenderingDeviceVulkan : : free ( RID p_id ) {
2019-06-07 16:07:57 +00:00
_THREAD_SAFE_METHOD_
_free_dependencies ( p_id ) ; //recursively erase dependencies first, to avoid potential API problems
_free_internal ( p_id ) ;
}
2021-01-24 06:21:54 +00:00
// The full list of resources that can be named is in the VkObjectType enum
// We just expose the resources that are owned and can be accessed easily.
void RenderingDeviceVulkan : : set_resource_name ( RID p_id , const String p_name ) {
if ( texture_owner . owns ( p_id ) ) {
Texture * texture = texture_owner . getornull ( p_id ) ;
if ( texture - > owner . is_null ( ) ) {
// Don't set the source texture's name when calling on a texture view
context - > set_object_name ( VK_OBJECT_TYPE_IMAGE , uint64_t ( texture - > image ) , p_name ) ;
}
context - > set_object_name ( VK_OBJECT_TYPE_IMAGE_VIEW , uint64_t ( texture - > view ) , p_name + " View " ) ;
} else if ( framebuffer_owner . owns ( p_id ) ) {
//Framebuffer *framebuffer = framebuffer_owner.getornull(p_id);
// Not implemented for now as the relationship between Framebuffer and RenderPass is very complex
} else if ( sampler_owner . owns ( p_id ) ) {
VkSampler * sampler = sampler_owner . getornull ( p_id ) ;
context - > set_object_name ( VK_OBJECT_TYPE_SAMPLER , uint64_t ( * sampler ) , p_name ) ;
} else if ( vertex_buffer_owner . owns ( p_id ) ) {
Buffer * vertex_buffer = vertex_buffer_owner . getornull ( p_id ) ;
context - > set_object_name ( VK_OBJECT_TYPE_BUFFER , uint64_t ( vertex_buffer - > buffer ) , p_name ) ;
} else if ( index_buffer_owner . owns ( p_id ) ) {
IndexBuffer * index_buffer = index_buffer_owner . getornull ( p_id ) ;
context - > set_object_name ( VK_OBJECT_TYPE_BUFFER , uint64_t ( index_buffer - > buffer ) , p_name ) ;
} else if ( shader_owner . owns ( p_id ) ) {
Shader * shader = shader_owner . getornull ( p_id ) ;
context - > set_object_name ( VK_OBJECT_TYPE_PIPELINE_LAYOUT , uint64_t ( shader - > pipeline_layout ) , p_name + " Pipeline Layout " ) ;
for ( int i = 0 ; i < shader - > sets . size ( ) ; i + + ) {
context - > set_object_name ( VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT , uint64_t ( shader - > sets [ i ] . descriptor_set_layout ) , p_name ) ;
}
} else if ( uniform_buffer_owner . owns ( p_id ) ) {
Buffer * uniform_buffer = uniform_buffer_owner . getornull ( p_id ) ;
context - > set_object_name ( VK_OBJECT_TYPE_BUFFER , uint64_t ( uniform_buffer - > buffer ) , p_name ) ;
} else if ( texture_buffer_owner . owns ( p_id ) ) {
TextureBuffer * texture_buffer = texture_buffer_owner . getornull ( p_id ) ;
context - > set_object_name ( VK_OBJECT_TYPE_BUFFER , uint64_t ( texture_buffer - > buffer . buffer ) , p_name ) ;
context - > set_object_name ( VK_OBJECT_TYPE_BUFFER_VIEW , uint64_t ( texture_buffer - > view ) , p_name + " View " ) ;
} else if ( storage_buffer_owner . owns ( p_id ) ) {
Buffer * storage_buffer = storage_buffer_owner . getornull ( p_id ) ;
context - > set_object_name ( VK_OBJECT_TYPE_BUFFER , uint64_t ( storage_buffer - > buffer ) , p_name ) ;
} else if ( uniform_set_owner . owns ( p_id ) ) {
UniformSet * uniform_set = uniform_set_owner . getornull ( p_id ) ;
context - > set_object_name ( VK_OBJECT_TYPE_DESCRIPTOR_SET , uint64_t ( uniform_set - > descriptor_set ) , p_name ) ;
} else if ( render_pipeline_owner . owns ( p_id ) ) {
RenderPipeline * pipeline = render_pipeline_owner . getornull ( p_id ) ;
context - > set_object_name ( VK_OBJECT_TYPE_PIPELINE , uint64_t ( pipeline - > pipeline ) , p_name ) ;
context - > set_object_name ( VK_OBJECT_TYPE_PIPELINE_LAYOUT , uint64_t ( pipeline - > pipeline_layout ) , p_name + " Layout " ) ;
} else if ( compute_pipeline_owner . owns ( p_id ) ) {
ComputePipeline * pipeline = compute_pipeline_owner . getornull ( p_id ) ;
context - > set_object_name ( VK_OBJECT_TYPE_PIPELINE , uint64_t ( pipeline - > pipeline ) , p_name ) ;
context - > set_object_name ( VK_OBJECT_TYPE_PIPELINE_LAYOUT , uint64_t ( pipeline - > pipeline_layout ) , p_name + " Layout " ) ;
} else {
ERR_PRINT ( " Attempted to name invalid ID: " + itos ( p_id . get_id ( ) ) ) ;
}
}
void RenderingDeviceVulkan : : draw_command_begin_label ( String p_label_name , const Color p_color ) {
context - > command_begin_label ( frames [ frame ] . draw_command_buffer , p_label_name , p_color ) ;
}
void RenderingDeviceVulkan : : draw_command_insert_label ( String p_label_name , const Color p_color ) {
context - > command_insert_label ( frames [ frame ] . draw_command_buffer , p_label_name , p_color ) ;
}
void RenderingDeviceVulkan : : draw_command_end_label ( ) {
context - > command_end_label ( frames [ frame ] . draw_command_buffer ) ;
}
2021-02-02 19:51:36 +00:00
String RenderingDeviceVulkan : : get_device_vendor_name ( ) const {
return context - > get_device_vendor_name ( ) ;
}
String RenderingDeviceVulkan : : get_device_name ( ) const {
return context - > get_device_name ( ) ;
}
String RenderingDeviceVulkan : : get_device_pipeline_cache_uuid ( ) const {
return context - > get_device_pipeline_cache_uuid ( ) ;
}
2020-04-18 23:30:57 +00:00
void RenderingDeviceVulkan : : _finalize_command_bufers ( ) {
if ( draw_list ) {
ERR_PRINT ( " Found open draw list at the end of the frame, this should never happen (further drawing will likely not work). " ) ;
}
2019-06-07 16:07:57 +00:00
2020-04-18 23:30:57 +00:00
if ( compute_list ) {
ERR_PRINT ( " Found open compute list at the end of the frame, this should never happen (further compute will likely not work). " ) ;
}
2019-10-05 13:27:43 +00:00
2020-04-18 23:30:57 +00:00
{ //complete the setup buffer (that needs to be processed before anything else)
vkEndCommandBuffer ( frames [ frame ] . setup_command_buffer ) ;
vkEndCommandBuffer ( frames [ frame ] . draw_command_buffer ) ;
}
}
void RenderingDeviceVulkan : : _begin_frame ( ) {
//erase pending resources
_free_pending_resources ( frame ) ;
//create setup command buffer and set as the setup buffer
{
VkCommandBufferBeginInfo cmdbuf_begin ;
cmdbuf_begin . sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO ;
cmdbuf_begin . pNext = nullptr ;
cmdbuf_begin . flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT ;
cmdbuf_begin . pInheritanceInfo = nullptr ;
VkResult err = vkResetCommandBuffer ( frames [ frame ] . setup_command_buffer , 0 ) ;
ERR_FAIL_COND_MSG ( err , " vkResetCommandBuffer failed with error " + itos ( err ) + " . " ) ;
err = vkBeginCommandBuffer ( frames [ frame ] . setup_command_buffer , & cmdbuf_begin ) ;
ERR_FAIL_COND_MSG ( err , " vkBeginCommandBuffer failed with error " + itos ( err ) + " . " ) ;
err = vkBeginCommandBuffer ( frames [ frame ] . draw_command_buffer , & cmdbuf_begin ) ;
ERR_FAIL_COND_MSG ( err , " vkBeginCommandBuffer failed with error " + itos ( err ) + " . " ) ;
2019-10-05 13:27:43 +00:00
2020-04-18 23:30:57 +00:00
if ( local_device . is_null ( ) ) {
context - > append_command_buffer ( frames [ frame ] . draw_command_buffer ) ;
context - > set_setup_buffer ( frames [ frame ] . setup_command_buffer ) ; //append now so it's added before everything else
2019-10-05 13:27:43 +00:00
}
2019-09-25 19:44:44 +00:00
}
2020-04-18 23:30:57 +00:00
//advance current frame
frames_drawn + + ;
//advance staging buffer if used
if ( staging_buffer_used ) {
staging_buffer_current = ( staging_buffer_current + 1 ) % staging_buffer_blocks . size ( ) ;
staging_buffer_used = false ;
}
2019-10-05 13:27:43 +00:00
2020-04-18 23:30:57 +00:00
if ( frames [ frame ] . timestamp_count ) {
vkGetQueryPoolResults ( device , frames [ frame ] . timestamp_pool , 0 , frames [ frame ] . timestamp_count , sizeof ( uint64_t ) * max_timestamp_query_elements , frames [ frame ] . timestamp_result_values , sizeof ( uint64_t ) , VK_QUERY_RESULT_64_BIT ) ;
2021-02-02 19:51:36 +00:00
vkCmdResetQueryPool ( frames [ frame ] . setup_command_buffer , frames [ frame ] . timestamp_pool , 0 , frames [ frame ] . timestamp_count ) ;
2020-04-18 23:30:57 +00:00
SWAP ( frames [ frame ] . timestamp_names , frames [ frame ] . timestamp_result_names ) ;
SWAP ( frames [ frame ] . timestamp_cpu_values , frames [ frame ] . timestamp_cpu_result_values ) ;
}
2019-10-05 13:27:43 +00:00
2020-04-18 23:30:57 +00:00
frames [ frame ] . timestamp_result_count = frames [ frame ] . timestamp_count ;
frames [ frame ] . timestamp_count = 0 ;
frames [ frame ] . index = Engine : : get_singleton ( ) - > get_frames_drawn ( ) ;
}
2019-10-05 13:27:43 +00:00
2020-04-18 23:30:57 +00:00
void RenderingDeviceVulkan : : swap_buffers ( ) {
ERR_FAIL_COND_MSG ( local_device . is_valid ( ) , " Local devices can't swap buffers. " ) ;
_THREAD_SAFE_METHOD_
2019-10-05 13:27:43 +00:00
2020-04-18 23:30:57 +00:00
_finalize_command_bufers ( ) ;
2019-10-05 13:27:43 +00:00
2020-04-18 23:30:57 +00:00
screen_prepared = false ;
//swap buffers
context - > swap_buffers ( ) ;
2019-10-05 13:27:43 +00:00
2020-04-18 23:30:57 +00:00
frame = ( frame + 1 ) % frame_count ;
2019-10-05 13:27:43 +00:00
2020-04-18 23:30:57 +00:00
_begin_frame ( ) ;
}
2019-10-05 13:27:43 +00:00
2020-04-18 23:30:57 +00:00
void RenderingDeviceVulkan : : submit ( ) {
ERR_FAIL_COND_MSG ( local_device . is_null ( ) , " Only local devices can submit and sync. " ) ;
ERR_FAIL_COND_MSG ( local_device_processing , " device already submitted, call sync to wait until done. " ) ;
2019-10-05 13:27:43 +00:00
2020-04-18 23:30:57 +00:00
_finalize_command_bufers ( ) ;
VkCommandBuffer command_buffers [ 2 ] = { frames [ frame ] . setup_command_buffer , frames [ frame ] . draw_command_buffer } ;
context - > local_device_push_command_buffers ( local_device , command_buffers , 2 ) ;
local_device_processing = true ;
}
void RenderingDeviceVulkan : : sync ( ) {
ERR_FAIL_COND_MSG ( local_device . is_null ( ) , " Only local devices can submit and sync. " ) ;
ERR_FAIL_COND_MSG ( ! local_device_processing , " sync can only be called after a submit " ) ;
context - > local_device_sync ( local_device ) ;
_begin_frame ( ) ;
2020-05-01 12:34:23 +00:00
local_device_processing = false ;
2019-06-07 16:07:57 +00:00
}
2019-06-24 19:13:06 +00:00
void RenderingDeviceVulkan : : _free_pending_resources ( int p_frame ) {
2019-06-07 16:07:57 +00:00
//free in dependency usage order, so nothing weird happens
//pipelines
2019-09-25 19:44:44 +00:00
while ( frames [ p_frame ] . render_pipelines_to_dispose_of . front ( ) ) {
RenderPipeline * pipeline = & frames [ p_frame ] . render_pipelines_to_dispose_of . front ( ) - > get ( ) ;
2020-04-01 23:20:12 +00:00
vkDestroyPipeline ( device , pipeline - > pipeline , nullptr ) ;
2019-09-25 19:44:44 +00:00
frames [ p_frame ] . render_pipelines_to_dispose_of . pop_front ( ) ;
}
while ( frames [ p_frame ] . compute_pipelines_to_dispose_of . front ( ) ) {
ComputePipeline * pipeline = & frames [ p_frame ] . compute_pipelines_to_dispose_of . front ( ) - > get ( ) ;
2019-06-07 16:07:57 +00:00
2020-04-01 23:20:12 +00:00
vkDestroyPipeline ( device , pipeline - > pipeline , nullptr ) ;
2019-06-07 16:07:57 +00:00
2019-09-25 19:44:44 +00:00
frames [ p_frame ] . compute_pipelines_to_dispose_of . pop_front ( ) ;
2019-06-07 16:07:57 +00:00
}
//uniform sets
2019-06-24 19:13:06 +00:00
while ( frames [ p_frame ] . uniform_sets_to_dispose_of . front ( ) ) {
UniformSet * uniform_set = & frames [ p_frame ] . uniform_sets_to_dispose_of . front ( ) - > get ( ) ;
2019-06-07 16:07:57 +00:00
vkFreeDescriptorSets ( device , uniform_set - > pool - > pool , 1 , & uniform_set - > descriptor_set ) ;
_descriptor_pool_free ( uniform_set - > pool_key , uniform_set - > pool ) ;
2019-06-24 19:13:06 +00:00
frames [ p_frame ] . uniform_sets_to_dispose_of . pop_front ( ) ;
2019-06-07 16:07:57 +00:00
}
//buffer views
2019-06-24 19:13:06 +00:00
while ( frames [ p_frame ] . buffer_views_to_dispose_of . front ( ) ) {
VkBufferView buffer_view = frames [ p_frame ] . buffer_views_to_dispose_of . front ( ) - > get ( ) ;
2019-06-07 16:07:57 +00:00
2020-04-01 23:20:12 +00:00
vkDestroyBufferView ( device , buffer_view , nullptr ) ;
2019-06-07 16:07:57 +00:00
2019-06-24 19:13:06 +00:00
frames [ p_frame ] . buffer_views_to_dispose_of . pop_front ( ) ;
2019-06-07 16:07:57 +00:00
}
//shaders
2019-06-24 19:13:06 +00:00
while ( frames [ p_frame ] . shaders_to_dispose_of . front ( ) ) {
Shader * shader = & frames [ p_frame ] . shaders_to_dispose_of . front ( ) - > get ( ) ;
2019-06-07 16:07:57 +00:00
//descriptor set layout for each set
for ( int i = 0 ; i < shader - > sets . size ( ) ; i + + ) {
2020-04-01 23:20:12 +00:00
vkDestroyDescriptorSetLayout ( device , shader - > sets [ i ] . descriptor_set_layout , nullptr ) ;
2019-06-07 16:07:57 +00:00
}
//pipeline layout
2020-04-01 23:20:12 +00:00
vkDestroyPipelineLayout ( device , shader - > pipeline_layout , nullptr ) ;
2019-06-07 16:07:57 +00:00
//shaders themselves
for ( int i = 0 ; i < shader - > pipeline_stages . size ( ) ; i + + ) {
2020-04-01 23:20:12 +00:00
vkDestroyShaderModule ( device , shader - > pipeline_stages [ i ] . module , nullptr ) ;
2019-06-07 16:07:57 +00:00
}
2019-06-24 19:13:06 +00:00
frames [ p_frame ] . shaders_to_dispose_of . pop_front ( ) ;
2019-06-07 16:07:57 +00:00
}
//samplers
2019-06-24 19:13:06 +00:00
while ( frames [ p_frame ] . samplers_to_dispose_of . front ( ) ) {
VkSampler sampler = frames [ p_frame ] . samplers_to_dispose_of . front ( ) - > get ( ) ;
2019-06-07 16:07:57 +00:00
2020-04-01 23:20:12 +00:00
vkDestroySampler ( device , sampler , nullptr ) ;
2019-06-07 16:07:57 +00:00
2019-06-24 19:13:06 +00:00
frames [ p_frame ] . samplers_to_dispose_of . pop_front ( ) ;
2019-06-07 16:07:57 +00:00
}
//framebuffers
2019-06-24 19:13:06 +00:00
while ( frames [ p_frame ] . framebuffers_to_dispose_of . front ( ) ) {
Framebuffer * framebuffer = & frames [ p_frame ] . framebuffers_to_dispose_of . front ( ) - > get ( ) ;
2019-06-07 16:07:57 +00:00
for ( Map < Framebuffer : : VersionKey , Framebuffer : : Version > : : Element * E = framebuffer - > framebuffers . front ( ) ; E ; E = E - > next ( ) ) {
//first framebuffer, then render pass because it depends on it
2020-04-01 23:20:12 +00:00
vkDestroyFramebuffer ( device , E - > get ( ) . framebuffer , nullptr ) ;
vkDestroyRenderPass ( device , E - > get ( ) . render_pass , nullptr ) ;
2019-06-07 16:07:57 +00:00
}
2019-06-24 19:13:06 +00:00
frames [ p_frame ] . framebuffers_to_dispose_of . pop_front ( ) ;
2019-06-07 16:07:57 +00:00
}
//textures
2019-06-24 19:13:06 +00:00
while ( frames [ p_frame ] . textures_to_dispose_of . front ( ) ) {
Texture * texture = & frames [ p_frame ] . textures_to_dispose_of . front ( ) - > get ( ) ;
2019-06-07 16:07:57 +00:00
if ( texture - > bound ) {
WARN_PRINT ( " Deleted a texture while it was bound.. " ) ;
}
2020-04-01 23:20:12 +00:00
vkDestroyImageView ( device , texture - > view , nullptr ) ;
2019-06-10 17:12:24 +00:00
if ( texture - > owner . is_null ( ) ) {
2019-06-07 16:07:57 +00:00
//actually owns the image and the allocation too
2021-07-02 23:14:19 +00:00
image_memory - = texture - > allocation_info . size ;
2019-06-07 16:07:57 +00:00
vmaDestroyImage ( allocator , texture - > image , texture - > allocation ) ;
}
2019-06-24 19:13:06 +00:00
frames [ p_frame ] . textures_to_dispose_of . pop_front ( ) ;
2019-06-07 16:07:57 +00:00
}
//buffers
2019-06-24 19:13:06 +00:00
while ( frames [ p_frame ] . buffers_to_dispose_of . front ( ) ) {
_buffer_free ( & frames [ p_frame ] . buffers_to_dispose_of . front ( ) - > get ( ) ) ;
2019-06-07 16:07:57 +00:00
2019-06-24 19:13:06 +00:00
frames [ p_frame ] . buffers_to_dispose_of . pop_front ( ) ;
2019-06-07 16:07:57 +00:00
}
}
2019-06-24 19:13:06 +00:00
void RenderingDeviceVulkan : : prepare_screen_for_drawing ( ) {
_THREAD_SAFE_METHOD_
context - > prepare_buffers ( ) ;
screen_prepared = true ;
}
2019-09-20 20:58:06 +00:00
uint32_t RenderingDeviceVulkan : : get_frame_delay ( ) const {
return frame_count ;
2019-06-07 16:07:57 +00:00
}
2021-07-02 23:14:19 +00:00
uint64_t RenderingDeviceVulkan : : get_memory_usage ( MemoryType p_type ) const {
if ( p_type = = MEMORY_BUFFERS ) {
return buffer_memory ;
} else if ( p_type = = MEMORY_TEXTURES ) {
return image_memory ;
} else {
VmaStats stats ;
vmaCalculateStats ( allocator , & stats ) ;
return stats . total . usedBytes ;
}
2020-05-01 12:34:23 +00:00
}
2019-08-26 20:43:58 +00:00
void RenderingDeviceVulkan : : _flush ( bool p_current_frame ) {
2020-04-18 23:30:57 +00:00
if ( local_device . is_valid ( ) & & ! p_current_frame ) {
2021-03-12 13:35:16 +00:00
return ; //flushing previous frames has no effect with local device
2020-04-18 23:30:57 +00:00
}
2019-06-25 22:49:52 +00:00
//not doing this crashes RADV (undefined behavior)
2019-08-26 20:43:58 +00:00
if ( p_current_frame ) {
2019-06-25 22:49:52 +00:00
vkEndCommandBuffer ( frames [ frame ] . setup_command_buffer ) ;
vkEndCommandBuffer ( frames [ frame ] . draw_command_buffer ) ;
}
2020-04-18 23:30:57 +00:00
if ( local_device . is_valid ( ) ) {
VkCommandBuffer command_buffers [ 2 ] = { frames [ frame ] . setup_command_buffer , frames [ frame ] . draw_command_buffer } ;
context - > local_device_push_command_buffers ( local_device , command_buffers , 2 ) ;
context - > local_device_sync ( local_device ) ;
2019-06-25 22:49:52 +00:00
VkCommandBufferBeginInfo cmdbuf_begin ;
cmdbuf_begin . sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO ;
2020-04-01 23:20:12 +00:00
cmdbuf_begin . pNext = nullptr ;
2019-06-25 22:49:52 +00:00
cmdbuf_begin . flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT ;
2020-04-01 23:20:12 +00:00
cmdbuf_begin . pInheritanceInfo = nullptr ;
2019-06-25 22:49:52 +00:00
VkResult err = vkBeginCommandBuffer ( frames [ frame ] . setup_command_buffer , & cmdbuf_begin ) ;
2020-03-08 08:26:22 +00:00
ERR_FAIL_COND_MSG ( err , " vkBeginCommandBuffer failed with error " + itos ( err ) + " . " ) ;
2020-04-18 23:30:57 +00:00
err = vkBeginCommandBuffer ( frames [ frame ] . draw_command_buffer , & cmdbuf_begin ) ;
ERR_FAIL_COND_MSG ( err , " vkBeginCommandBuffer failed with error " + itos ( err ) + " . " ) ;
2019-06-25 22:49:52 +00:00
2020-04-18 23:30:57 +00:00
} else {
context - > flush ( p_current_frame , p_current_frame ) ;
//re-create the setup command
if ( p_current_frame ) {
VkCommandBufferBeginInfo cmdbuf_begin ;
cmdbuf_begin . sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO ;
cmdbuf_begin . pNext = nullptr ;
cmdbuf_begin . flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT ;
cmdbuf_begin . pInheritanceInfo = nullptr ;
2019-06-25 22:49:52 +00:00
2020-04-18 23:30:57 +00:00
VkResult err = vkBeginCommandBuffer ( frames [ frame ] . setup_command_buffer , & cmdbuf_begin ) ;
ERR_FAIL_COND_MSG ( err , " vkBeginCommandBuffer failed with error " + itos ( err ) + " . " ) ;
context - > set_setup_buffer ( frames [ frame ] . setup_command_buffer ) ; //append now so it's added before everything else
}
if ( p_current_frame ) {
VkCommandBufferBeginInfo cmdbuf_begin ;
cmdbuf_begin . sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO ;
cmdbuf_begin . pNext = nullptr ;
cmdbuf_begin . flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT ;
cmdbuf_begin . pInheritanceInfo = nullptr ;
VkResult err = vkBeginCommandBuffer ( frames [ frame ] . draw_command_buffer , & cmdbuf_begin ) ;
ERR_FAIL_COND_MSG ( err , " vkBeginCommandBuffer failed with error " + itos ( err ) + " . " ) ;
context - > append_command_buffer ( frames [ frame ] . draw_command_buffer ) ;
}
2019-06-25 22:49:52 +00:00
}
}
2020-04-18 23:30:57 +00:00
void RenderingDeviceVulkan : : initialize ( VulkanContext * p_context , bool p_local_device ) {
2021-03-22 10:04:55 +00:00
// get our device capabilities
{
device_capabilities . version_major = p_context - > get_vulkan_major ( ) ;
device_capabilities . version_minor = p_context - > get_vulkan_minor ( ) ;
// get info about subgroups
VulkanContext : : SubgroupCapabilities subgroup_capabilities = p_context - > get_subgroup_capabilities ( ) ;
device_capabilities . subgroup_size = subgroup_capabilities . size ;
device_capabilities . subgroup_in_shaders = subgroup_capabilities . supported_stages_flags_rd ( ) ;
device_capabilities . subgroup_operations = subgroup_capabilities . supported_operations_flags_rd ( ) ;
2021-05-04 05:30:21 +00:00
// get info about further features
VulkanContext : : MultiviewCapabilities multiview_capabilies = p_context - > get_multiview_capabilities ( ) ;
device_capabilities . supports_multiview = multiview_capabilies . is_supported & & multiview_capabilies . max_view_count > 1 ;
2021-03-22 10:04:55 +00:00
}
2019-06-07 16:07:57 +00:00
context = p_context ;
device = p_context - > get_device ( ) ;
2020-04-18 23:30:57 +00:00
if ( p_local_device ) {
frame_count = 1 ;
local_device = p_context - > local_device_create ( ) ;
2020-05-01 12:34:23 +00:00
device = p_context - > local_device_get_vk_device ( local_device ) ;
2020-04-18 23:30:57 +00:00
} else {
frame_count = p_context - > get_swapchain_image_count ( ) + 1 ; //always need one extra to ensure it's unused at any time, without having to use a fence for this.
}
2019-06-07 16:07:57 +00:00
limits = p_context - > get_device_limits ( ) ;
2019-09-20 20:58:06 +00:00
max_timestamp_query_elements = 256 ;
2019-06-07 16:07:57 +00:00
{ //initialize allocator
VmaAllocatorCreateInfo allocatorInfo ;
memset ( & allocatorInfo , 0 , sizeof ( VmaAllocatorCreateInfo ) ) ;
allocatorInfo . physicalDevice = p_context - > get_physical_device ( ) ;
allocatorInfo . device = device ;
vmaCreateAllocator ( & allocatorInfo , & allocator ) ;
}
frames = memnew_arr ( Frame , frame_count ) ;
frame = 0 ;
//create setup and frame buffers
for ( int i = 0 ; i < frame_count ; i + + ) {
2019-09-20 20:58:06 +00:00
frames [ i ] . index = 0 ;
2019-06-07 16:07:57 +00:00
{ //create command pool, one per frame is recommended
VkCommandPoolCreateInfo cmd_pool_info ;
cmd_pool_info . sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO ;
2020-04-01 23:20:12 +00:00
cmd_pool_info . pNext = nullptr ;
2019-06-07 16:07:57 +00:00
cmd_pool_info . queueFamilyIndex = p_context - > get_graphics_queue ( ) ;
cmd_pool_info . flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT ;
2020-04-01 23:20:12 +00:00
VkResult res = vkCreateCommandPool ( device , & cmd_pool_info , nullptr , & frames [ i ] . command_pool ) ;
2020-03-08 08:26:22 +00:00
ERR_FAIL_COND_MSG ( res , " vkCreateCommandPool failed with error " + itos ( res ) + " . " ) ;
2019-06-07 16:07:57 +00:00
}
{ //create command buffers
VkCommandBufferAllocateInfo cmdbuf ;
//no command buffer exists, create it.
cmdbuf . sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO ;
2020-04-01 23:20:12 +00:00
cmdbuf . pNext = nullptr ;
2019-06-07 16:07:57 +00:00
cmdbuf . commandPool = frames [ i ] . command_pool ;
cmdbuf . level = VK_COMMAND_BUFFER_LEVEL_PRIMARY ;
cmdbuf . commandBufferCount = 1 ;
VkResult err = vkAllocateCommandBuffers ( device , & cmdbuf , & frames [ i ] . setup_command_buffer ) ;
2020-03-08 08:26:22 +00:00
ERR_CONTINUE_MSG ( err , " vkAllocateCommandBuffers failed with error " + itos ( err ) + " . " ) ;
2019-06-07 16:07:57 +00:00
err = vkAllocateCommandBuffers ( device , & cmdbuf , & frames [ i ] . draw_command_buffer ) ;
2020-03-08 08:26:22 +00:00
ERR_CONTINUE_MSG ( err , " vkAllocateCommandBuffers failed with error " + itos ( err ) + " . " ) ;
2019-06-07 16:07:57 +00:00
}
2019-09-20 20:58:06 +00:00
{
//create query pool
VkQueryPoolCreateInfo query_pool_create_info ;
query_pool_create_info . sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO ;
query_pool_create_info . flags = 0 ;
2020-04-01 23:20:12 +00:00
query_pool_create_info . pNext = nullptr ;
2019-09-20 20:58:06 +00:00
query_pool_create_info . queryType = VK_QUERY_TYPE_TIMESTAMP ;
query_pool_create_info . queryCount = max_timestamp_query_elements ;
query_pool_create_info . pipelineStatistics = 0 ;
2020-04-01 23:20:12 +00:00
vkCreateQueryPool ( device , & query_pool_create_info , nullptr , & frames [ i ] . timestamp_pool ) ;
2019-09-20 20:58:06 +00:00
frames [ i ] . timestamp_names = memnew_arr ( String , max_timestamp_query_elements ) ;
frames [ i ] . timestamp_cpu_values = memnew_arr ( uint64_t , max_timestamp_query_elements ) ;
frames [ i ] . timestamp_count = 0 ;
frames [ i ] . timestamp_result_names = memnew_arr ( String , max_timestamp_query_elements ) ;
frames [ i ] . timestamp_cpu_result_values = memnew_arr ( uint64_t , max_timestamp_query_elements ) ;
frames [ i ] . timestamp_result_values = memnew_arr ( uint64_t , max_timestamp_query_elements ) ;
frames [ i ] . timestamp_result_count = 0 ;
}
2019-06-07 16:07:57 +00:00
}
{
//begin the first command buffer for the first frame, so
2019-10-05 13:27:43 +00:00
//setting up things can be done in the meantime until swap_buffers(), which is called before advance.
2019-06-07 16:07:57 +00:00
VkCommandBufferBeginInfo cmdbuf_begin ;
cmdbuf_begin . sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO ;
2020-04-01 23:20:12 +00:00
cmdbuf_begin . pNext = nullptr ;
2019-06-07 16:07:57 +00:00
cmdbuf_begin . flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT ;
2020-04-01 23:20:12 +00:00
cmdbuf_begin . pInheritanceInfo = nullptr ;
2019-06-07 16:07:57 +00:00
VkResult err = vkBeginCommandBuffer ( frames [ 0 ] . setup_command_buffer , & cmdbuf_begin ) ;
2020-03-08 08:26:22 +00:00
ERR_FAIL_COND_MSG ( err , " vkBeginCommandBuffer failed with error " + itos ( err ) + " . " ) ;
2019-06-07 16:07:57 +00:00
err = vkBeginCommandBuffer ( frames [ 0 ] . draw_command_buffer , & cmdbuf_begin ) ;
2020-03-08 08:26:22 +00:00
ERR_FAIL_COND_MSG ( err , " vkBeginCommandBuffer failed with error " + itos ( err ) + " . " ) ;
2020-04-18 23:30:57 +00:00
if ( local_device . is_null ( ) ) {
context - > set_setup_buffer ( frames [ 0 ] . setup_command_buffer ) ; //append now so it's added before everything else
context - > append_command_buffer ( frames [ 0 ] . draw_command_buffer ) ;
}
2019-06-07 16:07:57 +00:00
}
staging_buffer_block_size = GLOBAL_DEF ( " rendering/vulkan/staging_buffer/block_size_kb " , 256 ) ;
staging_buffer_block_size = MAX ( 4 , staging_buffer_block_size ) ;
staging_buffer_block_size * = 1024 ; //kb -> bytes
staging_buffer_max_size = GLOBAL_DEF ( " rendering/vulkan/staging_buffer/max_size_mb " , 128 ) ;
staging_buffer_max_size = MAX ( 1 , staging_buffer_max_size ) ;
staging_buffer_max_size * = 1024 * 1024 ;
if ( staging_buffer_max_size < staging_buffer_block_size * 4 ) {
//validate enough blocks
staging_buffer_max_size = staging_buffer_block_size * 4 ;
}
texture_upload_region_size_px = GLOBAL_DEF ( " rendering/vulkan/staging_buffer/texture_upload_region_size_px " , 64 ) ;
texture_upload_region_size_px = nearest_power_of_2_templated ( texture_upload_region_size_px ) ;
frames_drawn = frame_count ; //start from frame count, so everything else is immediately old
//ensure current staging block is valid and at least one per frame exists
staging_buffer_current = 0 ;
staging_buffer_used = false ;
for ( int i = 0 ; i < frame_count ; i + + ) {
//staging was never used, create a block
Error err = _insert_staging_block ( ) ;
ERR_CONTINUE ( err ! = OK ) ;
}
max_descriptors_per_pool = GLOBAL_DEF ( " rendering/vulkan/descriptor_pools/max_descriptors_per_pool " , 64 ) ;
//check to make sure DescriptorPoolKey is good
2020-02-20 14:13:08 +00:00
static_assert ( sizeof ( uint64_t ) * 3 > = UNIFORM_TYPE_MAX * sizeof ( uint16_t ) ) ;
2019-06-07 16:07:57 +00:00
2020-04-01 23:20:12 +00:00
draw_list = nullptr ;
2019-06-07 16:07:57 +00:00
draw_list_count = 0 ;
draw_list_split = false ;
2019-09-25 19:44:44 +00:00
2020-04-01 23:20:12 +00:00
compute_list = nullptr ;
2019-06-07 16:07:57 +00:00
}
2019-06-24 19:13:06 +00:00
template < class T >
void RenderingDeviceVulkan : : _free_rids ( T & p_owner , const char * p_type ) {
List < RID > owned ;
p_owner . get_owned_list ( & owned ) ;
if ( owned . size ( ) ) {
2021-05-03 14:05:38 +00:00
if ( owned . size ( ) = = 1 ) {
WARN_PRINT ( vformat ( " 1 RID of type \" %s \" was leaked. " , p_type ) ) ;
} else {
WARN_PRINT ( vformat ( " %d RIDs of type \" %s \" were leaked. " , owned . size ( ) , p_type ) ) ;
}
2019-06-24 19:13:06 +00:00
for ( List < RID > : : Element * E = owned . front ( ) ; E ; E = E - > next ( ) ) {
free ( E - > get ( ) ) ;
}
}
}
2021-01-26 00:52:58 +00:00
void RenderingDeviceVulkan : : capture_timestamp ( const String & p_name ) {
2019-09-20 20:58:06 +00:00
ERR_FAIL_COND ( frames [ frame ] . timestamp_count > = max_timestamp_query_elements ) ;
2021-01-26 00:52:58 +00:00
//this should be optional for profiling, else it will slow things down
2019-10-03 23:15:38 +00:00
{
VkMemoryBarrier memoryBarrier ;
memoryBarrier . sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER ;
2020-04-01 23:20:12 +00:00
memoryBarrier . pNext = nullptr ;
2019-10-03 23:15:38 +00:00
memoryBarrier . srcAccessMask = VK_ACCESS_INDIRECT_COMMAND_READ_BIT |
VK_ACCESS_INDEX_READ_BIT |
VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT |
VK_ACCESS_UNIFORM_READ_BIT |
VK_ACCESS_INPUT_ATTACHMENT_READ_BIT |
VK_ACCESS_SHADER_READ_BIT |
VK_ACCESS_SHADER_WRITE_BIT |
VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT |
VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
VK_ACCESS_TRANSFER_READ_BIT |
VK_ACCESS_TRANSFER_WRITE_BIT |
VK_ACCESS_HOST_READ_BIT |
VK_ACCESS_HOST_WRITE_BIT ;
memoryBarrier . dstAccessMask = VK_ACCESS_INDIRECT_COMMAND_READ_BIT |
VK_ACCESS_INDEX_READ_BIT |
VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT |
VK_ACCESS_UNIFORM_READ_BIT |
VK_ACCESS_INPUT_ATTACHMENT_READ_BIT |
VK_ACCESS_SHADER_READ_BIT |
VK_ACCESS_SHADER_WRITE_BIT |
VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT |
VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
VK_ACCESS_TRANSFER_READ_BIT |
VK_ACCESS_TRANSFER_WRITE_BIT |
VK_ACCESS_HOST_READ_BIT |
VK_ACCESS_HOST_WRITE_BIT ;
2021-01-26 00:52:58 +00:00
vkCmdPipelineBarrier ( frames [ frame ] . draw_command_buffer , VK_PIPELINE_STAGE_ALL_COMMANDS_BIT , VK_PIPELINE_STAGE_ALL_COMMANDS_BIT , 0 , 1 , & memoryBarrier , 0 , nullptr , 0 , nullptr ) ;
2019-10-03 23:15:38 +00:00
}
2021-01-26 00:52:58 +00:00
vkCmdWriteTimestamp ( frames [ frame ] . draw_command_buffer , VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT , frames [ frame ] . timestamp_pool , frames [ frame ] . timestamp_count ) ;
2019-09-20 20:58:06 +00:00
frames [ frame ] . timestamp_names [ frames [ frame ] . timestamp_count ] = p_name ;
frames [ frame ] . timestamp_cpu_values [ frames [ frame ] . timestamp_count ] = OS : : get_singleton ( ) - > get_ticks_usec ( ) ;
frames [ frame ] . timestamp_count + + ;
}
uint32_t RenderingDeviceVulkan : : get_captured_timestamps_count ( ) const {
return frames [ frame ] . timestamp_result_count ;
}
uint64_t RenderingDeviceVulkan : : get_captured_timestamps_frame ( ) const {
return frames [ frame ] . index ;
}
2020-04-10 17:18:42 +00:00
static void mult64to128 ( uint64_t u , uint64_t v , uint64_t & h , uint64_t & l ) {
uint64_t u1 = ( u & 0xffffffff ) ;
uint64_t v1 = ( v & 0xffffffff ) ;
uint64_t t = ( u1 * v1 ) ;
uint64_t w3 = ( t & 0xffffffff ) ;
uint64_t k = ( t > > 32 ) ;
u > > = 32 ;
t = ( u * v1 ) + k ;
k = ( t & 0xffffffff ) ;
uint64_t w1 = ( t > > 32 ) ;
v > > = 32 ;
t = ( u1 * v ) + k ;
k = ( t > > 32 ) ;
h = ( u * v ) + w1 + k ;
l = ( t < < 32 ) + w3 ;
}
2019-09-20 20:58:06 +00:00
uint64_t RenderingDeviceVulkan : : get_captured_timestamp_gpu_time ( uint32_t p_index ) const {
2020-02-13 14:53:32 +00:00
ERR_FAIL_UNSIGNED_INDEX_V ( p_index , frames [ frame ] . timestamp_result_count , 0 ) ;
2020-04-10 17:18:42 +00:00
// this sucks because timestampPeriod multiplier is a float, while the timestamp is 64 bits nanosecs.
// so, in cases like nvidia which give you enormous numbers and 1 as multiplier, multiplying is next to impossible
2020-07-16 16:54:15 +00:00
// need to do 128 bits fixed point multiplication to get the right value
2020-04-10 17:18:42 +00:00
uint64_t shift_bits = 16 ;
uint64_t h , l ;
mult64to128 ( frames [ frame ] . timestamp_result_values [ p_index ] , uint64_t ( double ( limits . timestampPeriod ) * double ( 1 < < shift_bits ) ) , h , l ) ;
l > > = shift_bits ;
l | = h < < ( 64 - shift_bits ) ;
return l ;
2019-09-20 20:58:06 +00:00
}
2020-05-14 12:29:06 +00:00
2019-09-20 20:58:06 +00:00
uint64_t RenderingDeviceVulkan : : get_captured_timestamp_cpu_time ( uint32_t p_index ) const {
2020-02-13 14:53:32 +00:00
ERR_FAIL_UNSIGNED_INDEX_V ( p_index , frames [ frame ] . timestamp_result_count , 0 ) ;
2019-09-20 20:58:06 +00:00
return frames [ frame ] . timestamp_cpu_result_values [ p_index ] ;
}
2020-05-14 12:29:06 +00:00
2019-09-20 20:58:06 +00:00
String RenderingDeviceVulkan : : get_captured_timestamp_name ( uint32_t p_index ) const {
2020-02-13 14:53:32 +00:00
ERR_FAIL_UNSIGNED_INDEX_V ( p_index , frames [ frame ] . timestamp_result_count , String ( ) ) ;
2019-09-20 20:58:06 +00:00
return frames [ frame ] . timestamp_result_names [ p_index ] ;
}
2019-07-10 20:44:55 +00:00
int RenderingDeviceVulkan : : limit_get ( Limit p_limit ) {
switch ( p_limit ) {
2020-05-10 11:00:47 +00:00
case LIMIT_MAX_BOUND_UNIFORM_SETS :
return limits . maxBoundDescriptorSets ;
case LIMIT_MAX_FRAMEBUFFER_COLOR_ATTACHMENTS :
return limits . maxColorAttachments ;
case LIMIT_MAX_TEXTURES_PER_UNIFORM_SET :
return limits . maxDescriptorSetSampledImages ;
case LIMIT_MAX_SAMPLERS_PER_UNIFORM_SET :
return limits . maxDescriptorSetSamplers ;
case LIMIT_MAX_STORAGE_BUFFERS_PER_UNIFORM_SET :
return limits . maxDescriptorSetStorageBuffers ;
case LIMIT_MAX_STORAGE_IMAGES_PER_UNIFORM_SET :
return limits . maxDescriptorSetStorageImages ;
case LIMIT_MAX_UNIFORM_BUFFERS_PER_UNIFORM_SET :
return limits . maxDescriptorSetUniformBuffers ;
case LIMIT_MAX_DRAW_INDEXED_INDEX :
return limits . maxDrawIndexedIndexValue ;
case LIMIT_MAX_FRAMEBUFFER_HEIGHT :
return limits . maxFramebufferHeight ;
case LIMIT_MAX_FRAMEBUFFER_WIDTH :
return limits . maxFramebufferWidth ;
case LIMIT_MAX_TEXTURE_ARRAY_LAYERS :
return limits . maxImageArrayLayers ;
case LIMIT_MAX_TEXTURE_SIZE_1D :
return limits . maxImageDimension1D ;
case LIMIT_MAX_TEXTURE_SIZE_2D :
return limits . maxImageDimension2D ;
case LIMIT_MAX_TEXTURE_SIZE_3D :
return limits . maxImageDimension3D ;
case LIMIT_MAX_TEXTURE_SIZE_CUBE :
return limits . maxImageDimensionCube ;
case LIMIT_MAX_TEXTURES_PER_SHADER_STAGE :
return limits . maxPerStageDescriptorSampledImages ;
case LIMIT_MAX_SAMPLERS_PER_SHADER_STAGE :
return limits . maxPerStageDescriptorSamplers ;
case LIMIT_MAX_STORAGE_BUFFERS_PER_SHADER_STAGE :
return limits . maxPerStageDescriptorStorageBuffers ;
case LIMIT_MAX_STORAGE_IMAGES_PER_SHADER_STAGE :
return limits . maxPerStageDescriptorStorageImages ;
case LIMIT_MAX_UNIFORM_BUFFERS_PER_SHADER_STAGE :
return limits . maxPerStageDescriptorUniformBuffers ;
case LIMIT_MAX_PUSH_CONSTANT_SIZE :
return limits . maxPushConstantsSize ;
case LIMIT_MAX_UNIFORM_BUFFER_SIZE :
return limits . maxUniformBufferRange ;
case LIMIT_MAX_VERTEX_INPUT_ATTRIBUTE_OFFSET :
return limits . maxVertexInputAttributeOffset ;
case LIMIT_MAX_VERTEX_INPUT_ATTRIBUTES :
return limits . maxVertexInputAttributes ;
case LIMIT_MAX_VERTEX_INPUT_BINDINGS :
return limits . maxVertexInputBindings ;
case LIMIT_MAX_VERTEX_INPUT_BINDING_STRIDE :
return limits . maxVertexInputBindingStride ;
case LIMIT_MIN_UNIFORM_BUFFER_OFFSET_ALIGNMENT :
return limits . minUniformBufferOffsetAlignment ;
case LIMIT_MAX_COMPUTE_WORKGROUP_COUNT_X :
return limits . maxComputeWorkGroupCount [ 0 ] ;
case LIMIT_MAX_COMPUTE_WORKGROUP_COUNT_Y :
return limits . maxComputeWorkGroupCount [ 1 ] ;
case LIMIT_MAX_COMPUTE_WORKGROUP_COUNT_Z :
return limits . maxComputeWorkGroupCount [ 2 ] ;
case LIMIT_MAX_COMPUTE_WORKGROUP_INVOCATIONS :
return limits . maxComputeWorkGroupInvocations ;
case LIMIT_MAX_COMPUTE_WORKGROUP_SIZE_X :
return limits . maxComputeWorkGroupSize [ 0 ] ;
case LIMIT_MAX_COMPUTE_WORKGROUP_SIZE_Y :
return limits . maxComputeWorkGroupSize [ 1 ] ;
case LIMIT_MAX_COMPUTE_WORKGROUP_SIZE_Z :
return limits . maxComputeWorkGroupSize [ 2 ] ;
default :
ERR_FAIL_V ( 0 ) ;
2019-07-10 20:44:55 +00:00
}
return 0 ;
}
2019-06-07 16:07:57 +00:00
void RenderingDeviceVulkan : : finalize ( ) {
2019-06-24 19:13:06 +00:00
//free all resources
2019-08-26 20:43:58 +00:00
_flush ( false ) ;
2019-06-24 19:13:06 +00:00
2019-09-25 19:44:44 +00:00
_free_rids ( render_pipeline_owner , " Pipeline " ) ;
_free_rids ( compute_pipeline_owner , " Compute " ) ;
2019-06-24 19:13:06 +00:00
_free_rids ( uniform_set_owner , " UniformSet " ) ;
_free_rids ( texture_buffer_owner , " TextureBuffer " ) ;
_free_rids ( storage_buffer_owner , " StorageBuffer " ) ;
_free_rids ( uniform_buffer_owner , " UniformBuffer " ) ;
_free_rids ( shader_owner , " Shader " ) ;
_free_rids ( index_array_owner , " IndexArray " ) ;
_free_rids ( index_buffer_owner , " IndexBuffer " ) ;
_free_rids ( vertex_array_owner , " VertexArray " ) ;
_free_rids ( vertex_buffer_owner , " VertexBuffer " ) ;
_free_rids ( framebuffer_owner , " Framebuffer " ) ;
_free_rids ( sampler_owner , " Sampler " ) ;
{
//for textures it's a bit more difficult because they may be shared
List < RID > owned ;
texture_owner . get_owned_list ( & owned ) ;
if ( owned . size ( ) ) {
2021-05-03 14:05:38 +00:00
if ( owned . size ( ) = = 1 ) {
WARN_PRINT ( " 1 RID of type \" Texture \" was leaked. " ) ;
} else {
WARN_PRINT ( vformat ( " %d RIDs of type \" Texture \" were leaked. " , owned . size ( ) ) ) ;
}
2019-06-24 19:13:06 +00:00
//free shared first
for ( List < RID > : : Element * E = owned . front ( ) ; E ; ) {
List < RID > : : Element * N = E - > next ( ) ;
if ( texture_is_shared ( E - > get ( ) ) ) {
free ( E - > get ( ) ) ;
owned . erase ( E - > get ( ) ) ;
}
E = N ;
}
//free non shared second, this will avoid an error trying to free unexisting textures due to dependencies.
for ( List < RID > : : Element * E = owned . front ( ) ; E ; E = E - > next ( ) ) {
free ( E - > get ( ) ) ;
}
}
}
//free everything pending
for ( int i = 0 ; i < frame_count ; i + + ) {
int f = ( frame + i ) % frame_count ;
_free_pending_resources ( f ) ;
2020-04-01 23:20:12 +00:00
vkDestroyCommandPool ( device , frames [ i ] . command_pool , nullptr ) ;
vkDestroyQueryPool ( device , frames [ i ] . timestamp_pool , nullptr ) ;
2019-09-20 20:58:06 +00:00
memdelete_arr ( frames [ i ] . timestamp_names ) ;
memdelete_arr ( frames [ i ] . timestamp_cpu_values ) ;
memdelete_arr ( frames [ i ] . timestamp_result_names ) ;
memdelete_arr ( frames [ i ] . timestamp_result_values ) ;
memdelete_arr ( frames [ i ] . timestamp_cpu_result_values ) ;
2019-06-24 19:13:06 +00:00
}
for ( int i = 0 ; i < split_draw_list_allocators . size ( ) ; i + + ) {
2020-04-01 23:20:12 +00:00
vkDestroyCommandPool ( device , split_draw_list_allocators [ i ] . command_pool , nullptr ) ;
2019-06-24 19:13:06 +00:00
}
2019-06-07 16:07:57 +00:00
memdelete_arr ( frames ) ;
2019-06-24 19:13:06 +00:00
for ( int i = 0 ; i < staging_buffer_blocks . size ( ) ; i + + ) {
vmaDestroyBuffer ( allocator , staging_buffer_blocks [ i ] . buffer , staging_buffer_blocks [ i ] . allocation ) ;
}
2020-04-04 18:16:48 +00:00
vmaDestroyAllocator ( allocator ) ;
2019-06-24 19:13:06 +00:00
2020-04-06 16:07:31 +00:00
while ( vertex_formats . size ( ) ) {
Map < VertexFormatID , VertexDescriptionCache > : : Element * temp = vertex_formats . front ( ) ;
memdelete_arr ( temp - > get ( ) . bindings ) ;
memdelete_arr ( temp - > get ( ) . attributes ) ;
vertex_formats . erase ( temp ) ;
}
2020-04-15 08:27:57 +00:00
for ( int i = 0 ; i < framebuffer_formats . size ( ) ; i + + ) {
vkDestroyRenderPass ( device , framebuffer_formats [ i ] . render_pass , nullptr ) ;
}
framebuffer_formats . clear ( ) ;
2019-06-24 19:13:06 +00:00
//all these should be clear at this point
ERR_FAIL_COND ( descriptor_pools . size ( ) ) ;
ERR_FAIL_COND ( dependency_map . size ( ) ) ;
ERR_FAIL_COND ( reverse_dependency_map . size ( ) ) ;
2019-06-07 16:07:57 +00:00
}
2020-04-18 23:30:57 +00:00
RenderingDevice * RenderingDeviceVulkan : : create_local_device ( ) {
RenderingDeviceVulkan * rd = memnew ( RenderingDeviceVulkan ) ;
rd - > initialize ( context , true ) ;
return rd ;
}
2019-06-07 16:07:57 +00:00
RenderingDeviceVulkan : : RenderingDeviceVulkan ( ) {
2021-03-22 10:04:55 +00:00
device_capabilities . device_family = DEVICE_VULKAN ;
2019-06-07 16:07:57 +00:00
}
2020-04-18 23:30:57 +00:00
RenderingDeviceVulkan : : ~ RenderingDeviceVulkan ( ) {
if ( local_device . is_valid ( ) ) {
finalize ( ) ;
context - > local_device_free ( local_device ) ;
}
}