1/*
2 Simple DirectMedia Layer
3 Copyright (C) 1997-2024 Sam Lantinga <slouken@libsdl.org>
4
5 This software is provided 'as-is', without any express or implied
6 warranty. In no event will the authors be held liable for any damages
7 arising from the use of this software.
8
9 Permission is granted to anyone to use this software for any purpose,
10 including commercial applications, and to alter it and redistribute it
11 freely, subject to the following restrictions:
12
13 1. The origin of this software must not be misrepresented; you must not
14 claim that you wrote the original software. If you use this software
15 in a product, an acknowledgment in the product documentation would be
16 appreciated but is not required.
17 2. Altered source versions must be plainly marked as such, and must not be
18 misrepresented as being the original software.
19 3. This notice may not be removed or altered from any source distribution.
20*/
21
22#include "SDL_internal.h"
23
24#ifdef SDL_GPU_VULKAN
25
26// Needed for VK_KHR_portability_subset
27#define VK_ENABLE_BETA_EXTENSIONS
28
29#define VK_NO_PROTOTYPES
30#include "../../video/khronos/vulkan/vulkan.h"
31
32#include <SDL3/SDL_vulkan.h>
33
34#include "../SDL_sysgpu.h"
35
36#define VULKAN_INTERNAL_clamp(val, min, max) SDL_max(min, SDL_min(val, max))
37
38// Global Vulkan Loader Entry Points
39
40static PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = NULL;
41
42#define VULKAN_GLOBAL_FUNCTION(name) \
43 static PFN_##name name = NULL;
44#include "SDL_gpu_vulkan_vkfuncs.h"
45
46typedef struct VulkanExtensions
47{
48 // These extensions are required!
49
50 // Globally supported
51 Uint8 KHR_swapchain;
52 // Core since 1.1, needed for negative VkViewport::height
53 Uint8 KHR_maintenance1;
54
55 // These extensions are optional!
56
57 // Core since 1.2, but requires annoying paperwork to implement
58 Uint8 KHR_driver_properties;
59 // EXT, probably not going to be Core
60 Uint8 EXT_vertex_attribute_divisor;
61 // Only required for special implementations (i.e. MoltenVK)
62 Uint8 KHR_portability_subset;
63 // Only required for decoding HDR ASTC textures
64 Uint8 EXT_texture_compression_astc_hdr;
65} VulkanExtensions;
66
67// Defines
68
69#define SMALL_ALLOCATION_THRESHOLD 2097152 // 2 MiB
70#define SMALL_ALLOCATION_SIZE 16777216 // 16 MiB
71#define LARGE_ALLOCATION_INCREMENT 67108864 // 64 MiB
72#define MAX_UBO_SECTION_SIZE 4096 // 4 KiB
73#define DESCRIPTOR_POOL_SIZE 128
74#define WINDOW_PROPERTY_DATA "SDL_GPUVulkanWindowPropertyData"
75
76#define IDENTITY_SWIZZLE \
77 { \
78 VK_COMPONENT_SWIZZLE_IDENTITY, \
79 VK_COMPONENT_SWIZZLE_IDENTITY, \
80 VK_COMPONENT_SWIZZLE_IDENTITY, \
81 VK_COMPONENT_SWIZZLE_IDENTITY \
82 }
83
84#define NULL_DESC_LAYOUT (VkDescriptorSetLayout)0
85#define NULL_PIPELINE_LAYOUT (VkPipelineLayout)0
86#define NULL_RENDER_PASS (SDL_GPURenderPass *)0
87
88#define EXPAND_ELEMENTS_IF_NEEDED(arr, initialValue, type) \
89 do { \
90 if (arr->count == arr->capacity) { \
91 if (arr->capacity == 0) { \
92 arr->capacity = initialValue; \
93 } else { \
94 arr->capacity *= 2; \
95 } \
96 arr->elements = (type *)SDL_realloc( \
97 arr->elements, \
98 arr->capacity * sizeof(type)); \
99 } \
100 } while (0)
101
102#define MOVE_ARRAY_CONTENTS_AND_RESET(i, dstArr, dstCount, srcArr, srcCount) \
103 do { \
104 for ((i) = 0; (i) < (srcCount); (i) += 1) { \
105 (dstArr)[i] = (srcArr)[i]; \
106 } \
107 (dstCount) = (srcCount); \
108 (srcCount) = 0; \
109 while (0)
110
111// Conversions
112
113static const Uint8 DEVICE_PRIORITY_HIGHPERFORMANCE[] = {
114 0, // VK_PHYSICAL_DEVICE_TYPE_OTHER
115 3, // VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU
116 4, // VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU
117 2, // VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU
118 1 // VK_PHYSICAL_DEVICE_TYPE_CPU
119};
120
121static const Uint8 DEVICE_PRIORITY_LOWPOWER[] = {
122 0, // VK_PHYSICAL_DEVICE_TYPE_OTHER
123 4, // VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU
124 3, // VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU
125 2, // VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU
126 1 // VK_PHYSICAL_DEVICE_TYPE_CPU
127};
128
129static VkPresentModeKHR SDLToVK_PresentMode[] = {
130 VK_PRESENT_MODE_FIFO_KHR,
131 VK_PRESENT_MODE_IMMEDIATE_KHR,
132 VK_PRESENT_MODE_MAILBOX_KHR
133};
134
135static VkFormat SDLToVK_TextureFormat[] = {
136 VK_FORMAT_UNDEFINED, // INVALID
137 VK_FORMAT_R8_UNORM, // A8_UNORM
138 VK_FORMAT_R8_UNORM, // R8_UNORM
139 VK_FORMAT_R8G8_UNORM, // R8G8_UNORM
140 VK_FORMAT_R8G8B8A8_UNORM, // R8G8B8A8_UNORM
141 VK_FORMAT_R16_UNORM, // R16_UNORM
142 VK_FORMAT_R16G16_UNORM, // R16G16_UNORM
143 VK_FORMAT_R16G16B16A16_UNORM, // R16G16B16A16_UNORM
144 VK_FORMAT_A2B10G10R10_UNORM_PACK32, // R10G10B10A2_UNORM
145 VK_FORMAT_R5G6B5_UNORM_PACK16, // B5G6R5_UNORM
146 VK_FORMAT_A1R5G5B5_UNORM_PACK16, // B5G5R5A1_UNORM
147 VK_FORMAT_B4G4R4A4_UNORM_PACK16, // B4G4R4A4_UNORM
148 VK_FORMAT_B8G8R8A8_UNORM, // B8G8R8A8_UNORM
149 VK_FORMAT_BC1_RGBA_UNORM_BLOCK, // BC1_UNORM
150 VK_FORMAT_BC2_UNORM_BLOCK, // BC2_UNORM
151 VK_FORMAT_BC3_UNORM_BLOCK, // BC3_UNORM
152 VK_FORMAT_BC4_UNORM_BLOCK, // BC4_UNORM
153 VK_FORMAT_BC5_UNORM_BLOCK, // BC5_UNORM
154 VK_FORMAT_BC7_UNORM_BLOCK, // BC7_UNORM
155 VK_FORMAT_BC6H_SFLOAT_BLOCK, // BC6H_FLOAT
156 VK_FORMAT_BC6H_UFLOAT_BLOCK, // BC6H_UFLOAT
157 VK_FORMAT_R8_SNORM, // R8_SNORM
158 VK_FORMAT_R8G8_SNORM, // R8G8_SNORM
159 VK_FORMAT_R8G8B8A8_SNORM, // R8G8B8A8_SNORM
160 VK_FORMAT_R16_SNORM, // R16_SNORM
161 VK_FORMAT_R16G16_SNORM, // R16G16_SNORM
162 VK_FORMAT_R16G16B16A16_SNORM, // R16G16B16A16_SNORM
163 VK_FORMAT_R16_SFLOAT, // R16_FLOAT
164 VK_FORMAT_R16G16_SFLOAT, // R16G16_FLOAT
165 VK_FORMAT_R16G16B16A16_SFLOAT, // R16G16B16A16_FLOAT
166 VK_FORMAT_R32_SFLOAT, // R32_FLOAT
167 VK_FORMAT_R32G32_SFLOAT, // R32G32_FLOAT
168 VK_FORMAT_R32G32B32A32_SFLOAT, // R32G32B32A32_FLOAT
169 VK_FORMAT_B10G11R11_UFLOAT_PACK32, // R11G11B10_UFLOAT
170 VK_FORMAT_R8_UINT, // R8_UINT
171 VK_FORMAT_R8G8_UINT, // R8G8_UINT
172 VK_FORMAT_R8G8B8A8_UINT, // R8G8B8A8_UINT
173 VK_FORMAT_R16_UINT, // R16_UINT
174 VK_FORMAT_R16G16_UINT, // R16G16_UINT
175 VK_FORMAT_R16G16B16A16_UINT, // R16G16B16A16_UINT
176 VK_FORMAT_R32_UINT, // R32_UINT
177 VK_FORMAT_R32G32_UINT, // R32G32_UINT
178 VK_FORMAT_R32G32B32A32_UINT, // R32G32B32A32_UINT
179 VK_FORMAT_R8_SINT, // R8_INT
180 VK_FORMAT_R8G8_SINT, // R8G8_INT
181 VK_FORMAT_R8G8B8A8_SINT, // R8G8B8A8_INT
182 VK_FORMAT_R16_SINT, // R16_INT
183 VK_FORMAT_R16G16_SINT, // R16G16_INT
184 VK_FORMAT_R16G16B16A16_SINT, // R16G16B16A16_INT
185 VK_FORMAT_R32_SINT, // R32_INT
186 VK_FORMAT_R32G32_SINT, // R32G32_INT
187 VK_FORMAT_R32G32B32A32_SINT, // R32G32B32A32_INT
188 VK_FORMAT_R8G8B8A8_SRGB, // R8G8B8A8_UNORM_SRGB
189 VK_FORMAT_B8G8R8A8_SRGB, // B8G8R8A8_UNORM_SRGB
190 VK_FORMAT_BC1_RGBA_SRGB_BLOCK, // BC1_UNORM_SRGB
191 VK_FORMAT_BC2_SRGB_BLOCK, // BC3_UNORM_SRGB
192 VK_FORMAT_BC3_SRGB_BLOCK, // BC3_UNORM_SRGB
193 VK_FORMAT_BC7_SRGB_BLOCK, // BC7_UNORM_SRGB
194 VK_FORMAT_D16_UNORM, // D16_UNORM
195 VK_FORMAT_X8_D24_UNORM_PACK32, // D24_UNORM
196 VK_FORMAT_D32_SFLOAT, // D32_FLOAT
197 VK_FORMAT_D24_UNORM_S8_UINT, // D24_UNORM_S8_UINT
198 VK_FORMAT_D32_SFLOAT_S8_UINT, // D32_FLOAT_S8_UINT
199 VK_FORMAT_ASTC_4x4_UNORM_BLOCK, // ASTC_4x4_UNORM
200 VK_FORMAT_ASTC_5x4_UNORM_BLOCK, // ASTC_5x4_UNORM
201 VK_FORMAT_ASTC_5x5_UNORM_BLOCK, // ASTC_5x5_UNORM
202 VK_FORMAT_ASTC_6x5_UNORM_BLOCK, // ASTC_6x5_UNORM
203 VK_FORMAT_ASTC_6x6_UNORM_BLOCK, // ASTC_6x6_UNORM
204 VK_FORMAT_ASTC_8x5_UNORM_BLOCK, // ASTC_8x5_UNORM
205 VK_FORMAT_ASTC_8x6_UNORM_BLOCK, // ASTC_8x6_UNORM
206 VK_FORMAT_ASTC_8x8_UNORM_BLOCK, // ASTC_8x8_UNORM
207 VK_FORMAT_ASTC_10x5_UNORM_BLOCK, // ASTC_10x5_UNORM
208 VK_FORMAT_ASTC_10x6_UNORM_BLOCK, // ASTC_10x6_UNORM
209 VK_FORMAT_ASTC_10x8_UNORM_BLOCK, // ASTC_10x8_UNORM
210 VK_FORMAT_ASTC_10x10_UNORM_BLOCK, // ASTC_10x10_UNORM
211 VK_FORMAT_ASTC_12x10_UNORM_BLOCK, // ASTC_12x10_UNORM
212 VK_FORMAT_ASTC_12x12_UNORM_BLOCK, // ASTC_12x12_UNORM
213 VK_FORMAT_ASTC_4x4_SRGB_BLOCK, // ASTC_4x4_UNORM_SRGB
214 VK_FORMAT_ASTC_5x4_SRGB_BLOCK, // ASTC_5x4_UNORM_SRGB
215 VK_FORMAT_ASTC_5x5_SRGB_BLOCK, // ASTC_5x5_UNORM_SRGB
216 VK_FORMAT_ASTC_6x5_SRGB_BLOCK, // ASTC_6x5_UNORM_SRGB
217 VK_FORMAT_ASTC_6x6_SRGB_BLOCK, // ASTC_6x6_UNORM_SRGB
218 VK_FORMAT_ASTC_8x5_SRGB_BLOCK, // ASTC_8x5_UNORM_SRGB
219 VK_FORMAT_ASTC_8x6_SRGB_BLOCK, // ASTC_8x6_UNORM_SRGB
220 VK_FORMAT_ASTC_8x8_SRGB_BLOCK, // ASTC_8x8_UNORM_SRGB
221 VK_FORMAT_ASTC_10x5_SRGB_BLOCK, // ASTC_10x5_UNORM_SRGB
222 VK_FORMAT_ASTC_10x6_SRGB_BLOCK, // ASTC_10x6_UNORM_SRGB
223 VK_FORMAT_ASTC_10x8_SRGB_BLOCK, // ASTC_10x8_UNORM_SRGB
224 VK_FORMAT_ASTC_10x10_SRGB_BLOCK, // ASTC_10x10_UNORM_SRGB
225 VK_FORMAT_ASTC_12x10_SRGB_BLOCK, // ASTC_12x10_UNORM_SRGB
226 VK_FORMAT_ASTC_12x12_SRGB_BLOCK, // ASTC_12x12_UNORM_SRGB
227 VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT, // ASTC_4x4_FLOAT
228 VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT, // ASTC_5x4_FLOAT
229 VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT, // ASTC_5x5_FLOAT
230 VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK_EXT, // ASTC_6x5_FLOAT
231 VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK_EXT, // ASTC_6x6_FLOAT
232 VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK_EXT, // ASTC_8x5_FLOAT
233 VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK_EXT, // ASTC_8x6_FLOAT
234 VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK_EXT, // ASTC_8x8_FLOAT
235 VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK_EXT, // ASTC_10x5_FLOAT
236 VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK_EXT, // ASTC_10x6_FLOAT
237 VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK_EXT, // ASTC_10x8_FLOAT
238 VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT, // ASTC_10x10_FLOAT
239 VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT, // ASTC_12x10_FLOAT
240 VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK // ASTC_12x12_FLOAT
241};
242SDL_COMPILE_TIME_ASSERT(SDLToVK_TextureFormat, SDL_arraysize(SDLToVK_TextureFormat) == SDL_GPU_TEXTUREFORMAT_MAX_ENUM_VALUE);
243
244static VkComponentMapping SwizzleForSDLFormat(SDL_GPUTextureFormat format)
245{
246 if (format == SDL_GPU_TEXTUREFORMAT_A8_UNORM) {
247 // TODO: use VK_FORMAT_A8_UNORM_KHR from VK_KHR_maintenance5 when available
248 return (VkComponentMapping){
249 VK_COMPONENT_SWIZZLE_ZERO,
250 VK_COMPONENT_SWIZZLE_ZERO,
251 VK_COMPONENT_SWIZZLE_ZERO,
252 VK_COMPONENT_SWIZZLE_R,
253 };
254 }
255
256 if (format == SDL_GPU_TEXTUREFORMAT_B4G4R4A4_UNORM) {
257 // ARGB -> BGRA
258 // TODO: use VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT from VK_EXT_4444_formats when available
259 return (VkComponentMapping){
260 VK_COMPONENT_SWIZZLE_G,
261 VK_COMPONENT_SWIZZLE_R,
262 VK_COMPONENT_SWIZZLE_A,
263 VK_COMPONENT_SWIZZLE_B,
264 };
265 }
266
267 return (VkComponentMapping)IDENTITY_SWIZZLE;
268}
269
270static VkFormat SwapchainCompositionToFormat[] = {
271 VK_FORMAT_B8G8R8A8_UNORM, // SDR
272 VK_FORMAT_B8G8R8A8_SRGB, // SDR_LINEAR
273 VK_FORMAT_R16G16B16A16_SFLOAT, // HDR_EXTENDED_LINEAR
274 VK_FORMAT_A2B10G10R10_UNORM_PACK32 // HDR10_ST2084
275};
276
277static VkFormat SwapchainCompositionToFallbackFormat[] = {
278 VK_FORMAT_R8G8B8A8_UNORM, // SDR
279 VK_FORMAT_R8G8B8A8_SRGB, // SDR_LINEAR
280 VK_FORMAT_UNDEFINED, // HDR_EXTENDED_LINEAR (no fallback)
281 VK_FORMAT_UNDEFINED // HDR10_ST2084 (no fallback)
282};
283
284static SDL_GPUTextureFormat SwapchainCompositionToSDLFormat(
285 SDL_GPUSwapchainComposition composition,
286 bool usingFallback)
287{
288 switch (composition) {
289 case SDL_GPU_SWAPCHAINCOMPOSITION_SDR:
290 return usingFallback ? SDL_GPU_TEXTUREFORMAT_R8G8B8A8_UNORM : SDL_GPU_TEXTUREFORMAT_B8G8R8A8_UNORM;
291 case SDL_GPU_SWAPCHAINCOMPOSITION_SDR_LINEAR:
292 return usingFallback ? SDL_GPU_TEXTUREFORMAT_R8G8B8A8_UNORM_SRGB : SDL_GPU_TEXTUREFORMAT_B8G8R8A8_UNORM_SRGB;
293 case SDL_GPU_SWAPCHAINCOMPOSITION_HDR_EXTENDED_LINEAR:
294 return SDL_GPU_TEXTUREFORMAT_R16G16B16A16_FLOAT;
295 case SDL_GPU_SWAPCHAINCOMPOSITION_HDR10_ST2084:
296 return SDL_GPU_TEXTUREFORMAT_R10G10B10A2_UNORM;
297 default:
298 return SDL_GPU_TEXTUREFORMAT_INVALID;
299 }
300}
301
302static VkColorSpaceKHR SwapchainCompositionToColorSpace[] = {
303 VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, // SDR
304 VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, // SDR_LINEAR
305 VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT, // HDR_EXTENDED_LINEAR
306 VK_COLOR_SPACE_HDR10_ST2084_EXT // HDR10_ST2084
307};
308
309static VkComponentMapping SwapchainCompositionSwizzle[] = {
310 IDENTITY_SWIZZLE, // SDR
311 IDENTITY_SWIZZLE, // SDR_LINEAR
312 IDENTITY_SWIZZLE, // HDR_EXTENDED_LINEAR
313 {
314 // HDR10_ST2084
315 VK_COMPONENT_SWIZZLE_R,
316 VK_COMPONENT_SWIZZLE_G,
317 VK_COMPONENT_SWIZZLE_B,
318 VK_COMPONENT_SWIZZLE_A,
319 }
320};
321
322static VkFormat SDLToVK_VertexFormat[] = {
323 VK_FORMAT_UNDEFINED, // INVALID
324 VK_FORMAT_R32_SINT, // INT
325 VK_FORMAT_R32G32_SINT, // INT2
326 VK_FORMAT_R32G32B32_SINT, // INT3
327 VK_FORMAT_R32G32B32A32_SINT, // INT4
328 VK_FORMAT_R32_UINT, // UINT
329 VK_FORMAT_R32G32_UINT, // UINT2
330 VK_FORMAT_R32G32B32_UINT, // UINT3
331 VK_FORMAT_R32G32B32A32_UINT, // UINT4
332 VK_FORMAT_R32_SFLOAT, // FLOAT
333 VK_FORMAT_R32G32_SFLOAT, // FLOAT2
334 VK_FORMAT_R32G32B32_SFLOAT, // FLOAT3
335 VK_FORMAT_R32G32B32A32_SFLOAT, // FLOAT4
336 VK_FORMAT_R8G8_SINT, // BYTE2
337 VK_FORMAT_R8G8B8A8_SINT, // BYTE4
338 VK_FORMAT_R8G8_UINT, // UBYTE2
339 VK_FORMAT_R8G8B8A8_UINT, // UBYTE4
340 VK_FORMAT_R8G8_SNORM, // BYTE2_NORM
341 VK_FORMAT_R8G8B8A8_SNORM, // BYTE4_NORM
342 VK_FORMAT_R8G8_UNORM, // UBYTE2_NORM
343 VK_FORMAT_R8G8B8A8_UNORM, // UBYTE4_NORM
344 VK_FORMAT_R16G16_SINT, // SHORT2
345 VK_FORMAT_R16G16B16A16_SINT, // SHORT4
346 VK_FORMAT_R16G16_UINT, // USHORT2
347 VK_FORMAT_R16G16B16A16_UINT, // USHORT4
348 VK_FORMAT_R16G16_SNORM, // SHORT2_NORM
349 VK_FORMAT_R16G16B16A16_SNORM, // SHORT4_NORM
350 VK_FORMAT_R16G16_UNORM, // USHORT2_NORM
351 VK_FORMAT_R16G16B16A16_UNORM, // USHORT4_NORM
352 VK_FORMAT_R16G16_SFLOAT, // HALF2
353 VK_FORMAT_R16G16B16A16_SFLOAT // HALF4
354};
355SDL_COMPILE_TIME_ASSERT(SDLToVK_VertexFormat, SDL_arraysize(SDLToVK_VertexFormat) == SDL_GPU_VERTEXELEMENTFORMAT_MAX_ENUM_VALUE);
356
357static VkIndexType SDLToVK_IndexType[] = {
358 VK_INDEX_TYPE_UINT16,
359 VK_INDEX_TYPE_UINT32
360};
361
362static VkPrimitiveTopology SDLToVK_PrimitiveType[] = {
363 VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,
364 VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,
365 VK_PRIMITIVE_TOPOLOGY_LINE_LIST,
366 VK_PRIMITIVE_TOPOLOGY_LINE_STRIP,
367 VK_PRIMITIVE_TOPOLOGY_POINT_LIST
368};
369
370static VkCullModeFlags SDLToVK_CullMode[] = {
371 VK_CULL_MODE_NONE,
372 VK_CULL_MODE_FRONT_BIT,
373 VK_CULL_MODE_BACK_BIT,
374 VK_CULL_MODE_FRONT_AND_BACK
375};
376
377static VkFrontFace SDLToVK_FrontFace[] = {
378 VK_FRONT_FACE_COUNTER_CLOCKWISE,
379 VK_FRONT_FACE_CLOCKWISE
380};
381
382static VkBlendFactor SDLToVK_BlendFactor[] = {
383 VK_BLEND_FACTOR_ZERO, // INVALID
384 VK_BLEND_FACTOR_ZERO,
385 VK_BLEND_FACTOR_ONE,
386 VK_BLEND_FACTOR_SRC_COLOR,
387 VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR,
388 VK_BLEND_FACTOR_DST_COLOR,
389 VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR,
390 VK_BLEND_FACTOR_SRC_ALPHA,
391 VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA,
392 VK_BLEND_FACTOR_DST_ALPHA,
393 VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA,
394 VK_BLEND_FACTOR_CONSTANT_COLOR,
395 VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR,
396 VK_BLEND_FACTOR_SRC_ALPHA_SATURATE
397};
398SDL_COMPILE_TIME_ASSERT(SDLToVK_BlendFactor, SDL_arraysize(SDLToVK_BlendFactor) == SDL_GPU_BLENDFACTOR_MAX_ENUM_VALUE);
399
400static VkBlendOp SDLToVK_BlendOp[] = {
401 VK_BLEND_OP_ADD, // INVALID
402 VK_BLEND_OP_ADD,
403 VK_BLEND_OP_SUBTRACT,
404 VK_BLEND_OP_REVERSE_SUBTRACT,
405 VK_BLEND_OP_MIN,
406 VK_BLEND_OP_MAX
407};
408SDL_COMPILE_TIME_ASSERT(SDLToVK_BlendOp, SDL_arraysize(SDLToVK_BlendOp) == SDL_GPU_BLENDOP_MAX_ENUM_VALUE);
409
410static VkCompareOp SDLToVK_CompareOp[] = {
411 VK_COMPARE_OP_NEVER, // INVALID
412 VK_COMPARE_OP_NEVER,
413 VK_COMPARE_OP_LESS,
414 VK_COMPARE_OP_EQUAL,
415 VK_COMPARE_OP_LESS_OR_EQUAL,
416 VK_COMPARE_OP_GREATER,
417 VK_COMPARE_OP_NOT_EQUAL,
418 VK_COMPARE_OP_GREATER_OR_EQUAL,
419 VK_COMPARE_OP_ALWAYS
420};
421SDL_COMPILE_TIME_ASSERT(SDLToVK_CompareOp, SDL_arraysize(SDLToVK_CompareOp) == SDL_GPU_COMPAREOP_MAX_ENUM_VALUE);
422
423static VkStencilOp SDLToVK_StencilOp[] = {
424 VK_STENCIL_OP_KEEP, // INVALID
425 VK_STENCIL_OP_KEEP,
426 VK_STENCIL_OP_ZERO,
427 VK_STENCIL_OP_REPLACE,
428 VK_STENCIL_OP_INCREMENT_AND_CLAMP,
429 VK_STENCIL_OP_DECREMENT_AND_CLAMP,
430 VK_STENCIL_OP_INVERT,
431 VK_STENCIL_OP_INCREMENT_AND_WRAP,
432 VK_STENCIL_OP_DECREMENT_AND_WRAP
433};
434SDL_COMPILE_TIME_ASSERT(SDLToVK_StencilOp, SDL_arraysize(SDLToVK_StencilOp) == SDL_GPU_STENCILOP_MAX_ENUM_VALUE);
435
436static VkAttachmentLoadOp SDLToVK_LoadOp[] = {
437 VK_ATTACHMENT_LOAD_OP_LOAD,
438 VK_ATTACHMENT_LOAD_OP_CLEAR,
439 VK_ATTACHMENT_LOAD_OP_DONT_CARE
440};
441
442static VkAttachmentStoreOp SDLToVK_StoreOp[] = {
443 VK_ATTACHMENT_STORE_OP_STORE,
444 VK_ATTACHMENT_STORE_OP_DONT_CARE,
445 VK_ATTACHMENT_STORE_OP_DONT_CARE,
446 VK_ATTACHMENT_STORE_OP_STORE
447};
448
449static VkSampleCountFlagBits SDLToVK_SampleCount[] = {
450 VK_SAMPLE_COUNT_1_BIT,
451 VK_SAMPLE_COUNT_2_BIT,
452 VK_SAMPLE_COUNT_4_BIT,
453 VK_SAMPLE_COUNT_8_BIT
454};
455
456static VkVertexInputRate SDLToVK_VertexInputRate[] = {
457 VK_VERTEX_INPUT_RATE_VERTEX,
458 VK_VERTEX_INPUT_RATE_INSTANCE
459};
460
461static VkFilter SDLToVK_Filter[] = {
462 VK_FILTER_NEAREST,
463 VK_FILTER_LINEAR
464};
465
466static VkSamplerMipmapMode SDLToVK_SamplerMipmapMode[] = {
467 VK_SAMPLER_MIPMAP_MODE_NEAREST,
468 VK_SAMPLER_MIPMAP_MODE_LINEAR
469};
470
471static VkSamplerAddressMode SDLToVK_SamplerAddressMode[] = {
472 VK_SAMPLER_ADDRESS_MODE_REPEAT,
473 VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,
474 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE
475};
476
477// Structures
478
479typedef struct VulkanMemoryAllocation VulkanMemoryAllocation;
480typedef struct VulkanBuffer VulkanBuffer;
481typedef struct VulkanBufferContainer VulkanBufferContainer;
482typedef struct VulkanTexture VulkanTexture;
483typedef struct VulkanTextureContainer VulkanTextureContainer;
484
485typedef struct VulkanFenceHandle
486{
487 VkFence fence;
488 SDL_AtomicInt referenceCount;
489} VulkanFenceHandle;
490
491// Memory Allocation
492
493typedef struct VulkanMemoryFreeRegion
494{
495 VulkanMemoryAllocation *allocation;
496 VkDeviceSize offset;
497 VkDeviceSize size;
498 Uint32 allocationIndex;
499 Uint32 sortedIndex;
500} VulkanMemoryFreeRegion;
501
502typedef struct VulkanMemoryUsedRegion
503{
504 VulkanMemoryAllocation *allocation;
505 VkDeviceSize offset;
506 VkDeviceSize size;
507 VkDeviceSize resourceOffset; // differs from offset based on alignment
508 VkDeviceSize resourceSize; // differs from size based on alignment
509 VkDeviceSize alignment;
510 Uint8 isBuffer;
511 union
512 {
513 VulkanBuffer *vulkanBuffer;
514 VulkanTexture *vulkanTexture;
515 };
516} VulkanMemoryUsedRegion;
517
518typedef struct VulkanMemorySubAllocator
519{
520 Uint32 memoryTypeIndex;
521 VulkanMemoryAllocation **allocations;
522 Uint32 allocationCount;
523 VulkanMemoryFreeRegion **sortedFreeRegions;
524 Uint32 sortedFreeRegionCount;
525 Uint32 sortedFreeRegionCapacity;
526} VulkanMemorySubAllocator;
527
528struct VulkanMemoryAllocation
529{
530 VulkanMemorySubAllocator *allocator;
531 VkDeviceMemory memory;
532 VkDeviceSize size;
533 VulkanMemoryUsedRegion **usedRegions;
534 Uint32 usedRegionCount;
535 Uint32 usedRegionCapacity;
536 VulkanMemoryFreeRegion **freeRegions;
537 Uint32 freeRegionCount;
538 Uint32 freeRegionCapacity;
539 Uint8 availableForAllocation;
540 VkDeviceSize freeSpace;
541 VkDeviceSize usedSpace;
542 Uint8 *mapPointer;
543 SDL_Mutex *memoryLock;
544};
545
546typedef struct VulkanMemoryAllocator
547{
548 VulkanMemorySubAllocator subAllocators[VK_MAX_MEMORY_TYPES];
549} VulkanMemoryAllocator;
550
551// Memory structures
552
553typedef enum VulkanBufferType
554{
555 VULKAN_BUFFER_TYPE_GPU,
556 VULKAN_BUFFER_TYPE_UNIFORM,
557 VULKAN_BUFFER_TYPE_TRANSFER
558} VulkanBufferType;
559
560struct VulkanBuffer
561{
562 VulkanBufferContainer *container;
563 Uint32 containerIndex;
564
565 VkBuffer buffer;
566 VulkanMemoryUsedRegion *usedRegion;
567
568 // Needed for uniforms and defrag
569 VulkanBufferType type;
570 SDL_GPUBufferUsageFlags usage;
571 VkDeviceSize size;
572
573 SDL_AtomicInt referenceCount;
574 bool transitioned;
575 bool markedForDestroy; // so that defrag doesn't double-free
576};
577
578struct VulkanBufferContainer
579{
580 VulkanBuffer *activeBuffer;
581
582 VulkanBuffer **buffers;
583 Uint32 bufferCapacity;
584 Uint32 bufferCount;
585
586 bool dedicated;
587 char *debugName;
588};
589
590// Renderer Structure
591
592typedef struct QueueFamilyIndices
593{
594 Uint32 graphicsFamily;
595 Uint32 presentFamily;
596 Uint32 computeFamily;
597 Uint32 transferFamily;
598} QueueFamilyIndices;
599
600typedef struct VulkanSampler
601{
602 VkSampler sampler;
603 SDL_AtomicInt referenceCount;
604} VulkanSampler;
605
606typedef struct VulkanShader
607{
608 VkShaderModule shaderModule;
609 const char *entrypointName;
610 Uint32 numSamplers;
611 Uint32 numStorageTextures;
612 Uint32 numStorageBuffers;
613 Uint32 numUniformBuffers;
614 SDL_AtomicInt referenceCount;
615} VulkanShader;
616
617/* Textures are made up of individual subresources.
618 * This helps us barrier the resource efficiently.
619 */
620typedef struct VulkanTextureSubresource
621{
622 VulkanTexture *parent;
623 Uint32 layer;
624 Uint32 level;
625
626 VkImageView *renderTargetViews; // One render target view per depth slice
627 VkImageView computeWriteView;
628 VkImageView depthStencilView;
629} VulkanTextureSubresource;
630
631struct VulkanTexture
632{
633 VulkanTextureContainer *container;
634 Uint32 containerIndex;
635
636 VulkanMemoryUsedRegion *usedRegion;
637
638 VkImage image;
639 VkImageView fullView; // used for samplers and storage reads
640 VkComponentMapping swizzle;
641 VkImageAspectFlags aspectFlags;
642 Uint32 depth; // used for cleanup only
643
644 // FIXME: It'd be nice if we didn't have to have this on the texture...
645 SDL_GPUTextureUsageFlags usage; // used for defrag transitions only.
646
647 Uint32 subresourceCount;
648 VulkanTextureSubresource *subresources;
649
650 bool markedForDestroy; // so that defrag doesn't double-free
651 SDL_AtomicInt referenceCount;
652};
653
654struct VulkanTextureContainer
655{
656 TextureCommonHeader header;
657
658 VulkanTexture *activeTexture;
659
660 Uint32 textureCapacity;
661 Uint32 textureCount;
662 VulkanTexture **textures;
663
664 char *debugName;
665 bool canBeCycled;
666};
667
668typedef enum VulkanBufferUsageMode
669{
670 VULKAN_BUFFER_USAGE_MODE_COPY_SOURCE,
671 VULKAN_BUFFER_USAGE_MODE_COPY_DESTINATION,
672 VULKAN_BUFFER_USAGE_MODE_VERTEX_READ,
673 VULKAN_BUFFER_USAGE_MODE_INDEX_READ,
674 VULKAN_BUFFER_USAGE_MODE_INDIRECT,
675 VULKAN_BUFFER_USAGE_MODE_GRAPHICS_STORAGE_READ,
676 VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ,
677 VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE,
678} VulkanBufferUsageMode;
679
680typedef enum VulkanTextureUsageMode
681{
682 VULKAN_TEXTURE_USAGE_MODE_UNINITIALIZED,
683 VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE,
684 VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION,
685 VULKAN_TEXTURE_USAGE_MODE_SAMPLER,
686 VULKAN_TEXTURE_USAGE_MODE_GRAPHICS_STORAGE_READ,
687 VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ,
688 VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE,
689 VULKAN_TEXTURE_USAGE_MODE_COLOR_ATTACHMENT,
690 VULKAN_TEXTURE_USAGE_MODE_DEPTH_STENCIL_ATTACHMENT,
691 VULKAN_TEXTURE_USAGE_MODE_PRESENT
692} VulkanTextureUsageMode;
693
694typedef enum VulkanUniformBufferStage
695{
696 VULKAN_UNIFORM_BUFFER_STAGE_VERTEX,
697 VULKAN_UNIFORM_BUFFER_STAGE_FRAGMENT,
698 VULKAN_UNIFORM_BUFFER_STAGE_COMPUTE
699} VulkanUniformBufferStage;
700
701typedef struct VulkanFramebuffer
702{
703 VkFramebuffer framebuffer;
704 SDL_AtomicInt referenceCount;
705} VulkanFramebuffer;
706
707typedef struct WindowData
708{
709 SDL_Window *window;
710 SDL_GPUSwapchainComposition swapchainComposition;
711 SDL_GPUPresentMode presentMode;
712 bool needsSwapchainRecreate;
713 Uint32 swapchainCreateWidth;
714 Uint32 swapchainCreateHeight;
715
716 // Window surface
717 VkSurfaceKHR surface;
718
719 // Swapchain for window surface
720 VkSwapchainKHR swapchain;
721 VkFormat format;
722 VkColorSpaceKHR colorSpace;
723 VkComponentMapping swapchainSwizzle;
724 bool usingFallbackFormat;
725
726 // Swapchain images
727 VulkanTextureContainer *textureContainers; // use containers so that swapchain textures can use the same API as other textures
728 Uint32 imageCount;
729 Uint32 width;
730 Uint32 height;
731
732 // Synchronization primitives
733 VkSemaphore imageAvailableSemaphore[MAX_FRAMES_IN_FLIGHT];
734 VkSemaphore renderFinishedSemaphore[MAX_FRAMES_IN_FLIGHT];
735 SDL_GPUFence *inFlightFences[MAX_FRAMES_IN_FLIGHT];
736
737 Uint32 frameCounter;
738} WindowData;
739
740typedef struct SwapchainSupportDetails
741{
742 VkSurfaceCapabilitiesKHR capabilities;
743 VkSurfaceFormatKHR *formats;
744 Uint32 formatsLength;
745 VkPresentModeKHR *presentModes;
746 Uint32 presentModesLength;
747} SwapchainSupportDetails;
748
749typedef struct VulkanPresentData
750{
751 WindowData *windowData;
752 Uint32 swapchainImageIndex;
753} VulkanPresentData;
754
755typedef struct VulkanUniformBuffer
756{
757 VulkanBuffer *buffer;
758 Uint32 drawOffset;
759 Uint32 writeOffset;
760} VulkanUniformBuffer;
761
762typedef struct VulkanDescriptorInfo
763{
764 VkDescriptorType descriptorType;
765 VkShaderStageFlagBits stageFlag;
766} VulkanDescriptorInfo;
767
768typedef struct DescriptorSetPool
769{
770 // It's a pool... of pools!!!
771 Uint32 poolCount;
772 VkDescriptorPool *descriptorPools;
773
774 // We'll just manage the descriptor sets ourselves instead of freeing the sets
775 VkDescriptorSet *descriptorSets;
776 Uint32 descriptorSetCount;
777 Uint32 descriptorSetIndex;
778} DescriptorSetPool;
779
780// A command buffer acquires a cache at command buffer acquisition time
781typedef struct DescriptorSetCache
782{
783 // Pools are indexed by DescriptorSetLayoutID which increases monotonically
784 // There's only a certain number of maximum layouts possible since we de-duplicate them.
785 DescriptorSetPool *pools;
786 Uint32 poolCount;
787} DescriptorSetCache;
788
789typedef struct DescriptorSetLayoutHashTableKey
790{
791 VkShaderStageFlagBits shaderStage;
792 // Category 1: read resources
793 Uint32 samplerCount;
794 Uint32 storageBufferCount;
795 Uint32 storageTextureCount;
796 // Category 2: write resources
797 Uint32 writeStorageBufferCount;
798 Uint32 writeStorageTextureCount;
799 // Category 3: uniform buffers
800 Uint32 uniformBufferCount;
801} DescriptorSetLayoutHashTableKey;
802
803typedef uint32_t DescriptorSetLayoutID;
804
805typedef struct DescriptorSetLayout
806{
807 DescriptorSetLayoutID ID;
808 VkDescriptorSetLayout descriptorSetLayout;
809
810 // Category 1: read resources
811 Uint32 samplerCount;
812 Uint32 storageBufferCount;
813 Uint32 storageTextureCount;
814 // Category 2: write resources
815 Uint32 writeStorageBufferCount;
816 Uint32 writeStorageTextureCount;
817 // Category 3: uniform buffers
818 Uint32 uniformBufferCount;
819} DescriptorSetLayout;
820
821typedef struct GraphicsPipelineResourceLayoutHashTableKey
822{
823 Uint32 vertexSamplerCount;
824 Uint32 vertexStorageBufferCount;
825 Uint32 vertexStorageTextureCount;
826 Uint32 vertexUniformBufferCount;
827
828 Uint32 fragmentSamplerCount;
829 Uint32 fragmentStorageBufferCount;
830 Uint32 fragmentStorageTextureCount;
831 Uint32 fragmentUniformBufferCount;
832} GraphicsPipelineResourceLayoutHashTableKey;
833
834typedef struct VulkanGraphicsPipelineResourceLayout
835{
836 VkPipelineLayout pipelineLayout;
837
838 /*
839 * Descriptor set layout is as follows:
840 * 0: vertex resources
841 * 1: vertex uniform buffers
842 * 2: fragment resources
843 * 3: fragment uniform buffers
844 */
845 DescriptorSetLayout *descriptorSetLayouts[4];
846
847 Uint32 vertexSamplerCount;
848 Uint32 vertexStorageBufferCount;
849 Uint32 vertexStorageTextureCount;
850 Uint32 vertexUniformBufferCount;
851
852 Uint32 fragmentSamplerCount;
853 Uint32 fragmentStorageBufferCount;
854 Uint32 fragmentStorageTextureCount;
855 Uint32 fragmentUniformBufferCount;
856} VulkanGraphicsPipelineResourceLayout;
857
858typedef struct VulkanGraphicsPipeline
859{
860 VkPipeline pipeline;
861 SDL_GPUPrimitiveType primitiveType;
862
863 VulkanGraphicsPipelineResourceLayout *resourceLayout;
864
865 VulkanShader *vertexShader;
866 VulkanShader *fragmentShader;
867
868 SDL_AtomicInt referenceCount;
869} VulkanGraphicsPipeline;
870
871typedef struct ComputePipelineResourceLayoutHashTableKey
872{
873 Uint32 samplerCount;
874 Uint32 readonlyStorageTextureCount;
875 Uint32 readonlyStorageBufferCount;
876 Uint32 readWriteStorageTextureCount;
877 Uint32 readWriteStorageBufferCount;
878 Uint32 uniformBufferCount;
879} ComputePipelineResourceLayoutHashTableKey;
880
881typedef struct VulkanComputePipelineResourceLayout
882{
883 VkPipelineLayout pipelineLayout;
884
885 /*
886 * Descriptor set layout is as follows:
887 * 0: samplers, then read-only textures, then read-only buffers
888 * 1: write-only textures, then write-only buffers
889 * 2: uniform buffers
890 */
891 DescriptorSetLayout *descriptorSetLayouts[3];
892
893 Uint32 numSamplers;
894 Uint32 numReadonlyStorageTextures;
895 Uint32 numReadonlyStorageBuffers;
896 Uint32 numReadWriteStorageTextures;
897 Uint32 numReadWriteStorageBuffers;
898 Uint32 numUniformBuffers;
899} VulkanComputePipelineResourceLayout;
900
901typedef struct VulkanComputePipeline
902{
903 VkShaderModule shaderModule;
904 VkPipeline pipeline;
905 VulkanComputePipelineResourceLayout *resourceLayout;
906 SDL_AtomicInt referenceCount;
907} VulkanComputePipeline;
908
909typedef struct RenderPassColorTargetDescription
910{
911 VkFormat format;
912 SDL_GPULoadOp loadOp;
913 SDL_GPUStoreOp storeOp;
914} RenderPassColorTargetDescription;
915
916typedef struct RenderPassDepthStencilTargetDescription
917{
918 VkFormat format;
919 SDL_GPULoadOp loadOp;
920 SDL_GPUStoreOp storeOp;
921 SDL_GPULoadOp stencilLoadOp;
922 SDL_GPUStoreOp stencilStoreOp;
923} RenderPassDepthStencilTargetDescription;
924
925typedef struct CommandPoolHashTableKey
926{
927 SDL_ThreadID threadID;
928} CommandPoolHashTableKey;
929
930typedef struct RenderPassHashTableKey
931{
932 RenderPassColorTargetDescription colorTargetDescriptions[MAX_COLOR_TARGET_BINDINGS];
933 Uint32 numColorTargets;
934 VkFormat resolveTargetFormats[MAX_COLOR_TARGET_BINDINGS];
935 Uint32 numResolveTargets;
936 RenderPassDepthStencilTargetDescription depthStencilTargetDescription;
937 VkSampleCountFlagBits sampleCount;
938} RenderPassHashTableKey;
939
940typedef struct VulkanRenderPassHashTableValue
941{
942 VkRenderPass handle;
943} VulkanRenderPassHashTableValue;
944
945typedef struct FramebufferHashTableKey
946{
947 VkImageView colorAttachmentViews[MAX_COLOR_TARGET_BINDINGS];
948 Uint32 numColorTargets;
949 VkImageView resolveAttachmentViews[MAX_COLOR_TARGET_BINDINGS];
950 Uint32 numResolveAttachments;
951 VkImageView depthStencilAttachmentView;
952 Uint32 width;
953 Uint32 height;
954} FramebufferHashTableKey;
955
956// Command structures
957
958typedef struct VulkanFencePool
959{
960 SDL_Mutex *lock;
961
962 VulkanFenceHandle **availableFences;
963 Uint32 availableFenceCount;
964 Uint32 availableFenceCapacity;
965} VulkanFencePool;
966
967typedef struct VulkanCommandPool VulkanCommandPool;
968
969typedef struct VulkanRenderer VulkanRenderer;
970
971typedef struct VulkanCommandBuffer
972{
973 CommandBufferCommonHeader common;
974 VulkanRenderer *renderer;
975
976 VkCommandBuffer commandBuffer;
977 VulkanCommandPool *commandPool;
978
979 VulkanPresentData *presentDatas;
980 Uint32 presentDataCount;
981 Uint32 presentDataCapacity;
982
983 VkSemaphore *waitSemaphores;
984 Uint32 waitSemaphoreCount;
985 Uint32 waitSemaphoreCapacity;
986
987 VkSemaphore *signalSemaphores;
988 Uint32 signalSemaphoreCount;
989 Uint32 signalSemaphoreCapacity;
990
991 VulkanComputePipeline *currentComputePipeline;
992 VulkanGraphicsPipeline *currentGraphicsPipeline;
993
994 // Keep track of resources transitioned away from their default state to barrier them on pass end
995
996 VulkanTextureSubresource *colorAttachmentSubresources[MAX_COLOR_TARGET_BINDINGS];
997 Uint32 colorAttachmentSubresourceCount;
998 VulkanTextureSubresource *resolveAttachmentSubresources[MAX_COLOR_TARGET_BINDINGS];
999 Uint32 resolveAttachmentSubresourceCount;
1000
1001 VulkanTextureSubresource *depthStencilAttachmentSubresource; // may be NULL
1002
1003 // Dynamic state
1004
1005 VkViewport currentViewport;
1006 VkRect2D currentScissor;
1007 float blendConstants[4];
1008 Uint8 stencilRef;
1009
1010 // Resource bind state
1011
1012 DescriptorSetCache *descriptorSetCache; // acquired when command buffer is acquired
1013
1014 bool needNewVertexResourceDescriptorSet;
1015 bool needNewVertexUniformDescriptorSet;
1016 bool needNewVertexUniformOffsets;
1017 bool needNewFragmentResourceDescriptorSet;
1018 bool needNewFragmentUniformDescriptorSet;
1019 bool needNewFragmentUniformOffsets;
1020
1021 bool needNewComputeReadOnlyDescriptorSet;
1022 bool needNewComputeReadWriteDescriptorSet;
1023 bool needNewComputeUniformDescriptorSet;
1024 bool needNewComputeUniformOffsets;
1025
1026 VkDescriptorSet vertexResourceDescriptorSet;
1027 VkDescriptorSet vertexUniformDescriptorSet;
1028 VkDescriptorSet fragmentResourceDescriptorSet;
1029 VkDescriptorSet fragmentUniformDescriptorSet;
1030
1031 VkDescriptorSet computeReadOnlyDescriptorSet;
1032 VkDescriptorSet computeReadWriteDescriptorSet;
1033 VkDescriptorSet computeUniformDescriptorSet;
1034
1035 VulkanTexture *vertexSamplerTextures[MAX_TEXTURE_SAMPLERS_PER_STAGE];
1036 VulkanSampler *vertexSamplers[MAX_TEXTURE_SAMPLERS_PER_STAGE];
1037 VulkanTexture *vertexStorageTextures[MAX_STORAGE_TEXTURES_PER_STAGE];
1038 VulkanBuffer *vertexStorageBuffers[MAX_STORAGE_BUFFERS_PER_STAGE];
1039
1040 VulkanTexture *fragmentSamplerTextures[MAX_TEXTURE_SAMPLERS_PER_STAGE];
1041 VulkanSampler *fragmentSamplers[MAX_TEXTURE_SAMPLERS_PER_STAGE];
1042 VulkanTexture *fragmentStorageTextures[MAX_STORAGE_TEXTURES_PER_STAGE];
1043 VulkanBuffer *fragmentStorageBuffers[MAX_STORAGE_BUFFERS_PER_STAGE];
1044
1045 VulkanTextureSubresource *readWriteComputeStorageTextureSubresources[MAX_COMPUTE_WRITE_TEXTURES];
1046 Uint32 readWriteComputeStorageTextureSubresourceCount;
1047 VulkanBuffer *readWriteComputeStorageBuffers[MAX_COMPUTE_WRITE_BUFFERS];
1048
1049 VulkanTexture *computeSamplerTextures[MAX_TEXTURE_SAMPLERS_PER_STAGE];
1050 VulkanSampler *computeSamplers[MAX_TEXTURE_SAMPLERS_PER_STAGE];
1051 VulkanTexture *readOnlyComputeStorageTextures[MAX_STORAGE_TEXTURES_PER_STAGE];
1052 VulkanBuffer *readOnlyComputeStorageBuffers[MAX_STORAGE_BUFFERS_PER_STAGE];
1053
1054 // Uniform buffers
1055
1056 VulkanUniformBuffer *vertexUniformBuffers[MAX_UNIFORM_BUFFERS_PER_STAGE];
1057 VulkanUniformBuffer *fragmentUniformBuffers[MAX_UNIFORM_BUFFERS_PER_STAGE];
1058 VulkanUniformBuffer *computeUniformBuffers[MAX_UNIFORM_BUFFERS_PER_STAGE];
1059
1060 // Track used resources
1061
1062 VulkanBuffer **usedBuffers;
1063 Sint32 usedBufferCount;
1064 Sint32 usedBufferCapacity;
1065
1066 VulkanTexture **usedTextures;
1067 Sint32 usedTextureCount;
1068 Sint32 usedTextureCapacity;
1069
1070 VulkanSampler **usedSamplers;
1071 Sint32 usedSamplerCount;
1072 Sint32 usedSamplerCapacity;
1073
1074 VulkanGraphicsPipeline **usedGraphicsPipelines;
1075 Sint32 usedGraphicsPipelineCount;
1076 Sint32 usedGraphicsPipelineCapacity;
1077
1078 VulkanComputePipeline **usedComputePipelines;
1079 Sint32 usedComputePipelineCount;
1080 Sint32 usedComputePipelineCapacity;
1081
1082 VulkanFramebuffer **usedFramebuffers;
1083 Sint32 usedFramebufferCount;
1084 Sint32 usedFramebufferCapacity;
1085
1086 VulkanUniformBuffer **usedUniformBuffers;
1087 Sint32 usedUniformBufferCount;
1088 Sint32 usedUniformBufferCapacity;
1089
1090 VulkanFenceHandle *inFlightFence;
1091 bool autoReleaseFence;
1092
1093 bool isDefrag; // Whether this CB was created for defragging
1094} VulkanCommandBuffer;
1095
1096struct VulkanCommandPool
1097{
1098 SDL_ThreadID threadID;
1099 VkCommandPool commandPool;
1100
1101 VulkanCommandBuffer **inactiveCommandBuffers;
1102 Uint32 inactiveCommandBufferCapacity;
1103 Uint32 inactiveCommandBufferCount;
1104};
1105
1106// Context
1107
1108struct VulkanRenderer
1109{
1110 VkInstance instance;
1111 VkPhysicalDevice physicalDevice;
1112 VkPhysicalDeviceProperties2KHR physicalDeviceProperties;
1113 VkPhysicalDeviceDriverPropertiesKHR physicalDeviceDriverProperties;
1114 VkDevice logicalDevice;
1115 Uint8 integratedMemoryNotification;
1116 Uint8 outOfDeviceLocalMemoryWarning;
1117 Uint8 outofBARMemoryWarning;
1118 Uint8 fillModeOnlyWarning;
1119
1120 bool debugMode;
1121 bool preferLowPower;
1122 Uint32 allowedFramesInFlight;
1123
1124 VulkanExtensions supports;
1125 bool supportsDebugUtils;
1126 bool supportsColorspace;
1127 bool supportsFillModeNonSolid;
1128 bool supportsMultiDrawIndirect;
1129
1130 VulkanMemoryAllocator *memoryAllocator;
1131 VkPhysicalDeviceMemoryProperties memoryProperties;
1132
1133 WindowData **claimedWindows;
1134 Uint32 claimedWindowCount;
1135 Uint32 claimedWindowCapacity;
1136
1137 Uint32 queueFamilyIndex;
1138 VkQueue unifiedQueue;
1139
1140 VulkanCommandBuffer **submittedCommandBuffers;
1141 Uint32 submittedCommandBufferCount;
1142 Uint32 submittedCommandBufferCapacity;
1143
1144 VulkanFencePool fencePool;
1145
1146 SDL_HashTable *commandPoolHashTable;
1147 SDL_HashTable *renderPassHashTable;
1148 SDL_HashTable *framebufferHashTable;
1149 SDL_HashTable *graphicsPipelineResourceLayoutHashTable;
1150 SDL_HashTable *computePipelineResourceLayoutHashTable;
1151 SDL_HashTable *descriptorSetLayoutHashTable;
1152
1153 VulkanUniformBuffer **uniformBufferPool;
1154 Uint32 uniformBufferPoolCount;
1155 Uint32 uniformBufferPoolCapacity;
1156
1157 DescriptorSetCache **descriptorSetCachePool;
1158 Uint32 descriptorSetCachePoolCount;
1159 Uint32 descriptorSetCachePoolCapacity;
1160
1161 SDL_AtomicInt layoutResourceID;
1162
1163 Uint32 minUBOAlignment;
1164
1165 // Deferred resource destruction
1166
1167 VulkanTexture **texturesToDestroy;
1168 Uint32 texturesToDestroyCount;
1169 Uint32 texturesToDestroyCapacity;
1170
1171 VulkanBuffer **buffersToDestroy;
1172 Uint32 buffersToDestroyCount;
1173 Uint32 buffersToDestroyCapacity;
1174
1175 VulkanSampler **samplersToDestroy;
1176 Uint32 samplersToDestroyCount;
1177 Uint32 samplersToDestroyCapacity;
1178
1179 VulkanGraphicsPipeline **graphicsPipelinesToDestroy;
1180 Uint32 graphicsPipelinesToDestroyCount;
1181 Uint32 graphicsPipelinesToDestroyCapacity;
1182
1183 VulkanComputePipeline **computePipelinesToDestroy;
1184 Uint32 computePipelinesToDestroyCount;
1185 Uint32 computePipelinesToDestroyCapacity;
1186
1187 VulkanShader **shadersToDestroy;
1188 Uint32 shadersToDestroyCount;
1189 Uint32 shadersToDestroyCapacity;
1190
1191 VulkanFramebuffer **framebuffersToDestroy;
1192 Uint32 framebuffersToDestroyCount;
1193 Uint32 framebuffersToDestroyCapacity;
1194
1195 SDL_Mutex *allocatorLock;
1196 SDL_Mutex *disposeLock;
1197 SDL_Mutex *submitLock;
1198 SDL_Mutex *acquireCommandBufferLock;
1199 SDL_Mutex *acquireUniformBufferLock;
1200 SDL_Mutex *renderPassFetchLock;
1201 SDL_Mutex *framebufferFetchLock;
1202 SDL_Mutex *windowLock;
1203
1204 Uint8 defragInProgress;
1205
1206 VulkanMemoryAllocation **allocationsToDefrag;
1207 Uint32 allocationsToDefragCount;
1208 Uint32 allocationsToDefragCapacity;
1209
1210#define VULKAN_INSTANCE_FUNCTION(func) \
1211 PFN_##func func;
1212#define VULKAN_DEVICE_FUNCTION(func) \
1213 PFN_##func func;
1214#include "SDL_gpu_vulkan_vkfuncs.h"
1215};
1216
1217// Forward declarations
1218
1219static bool VULKAN_INTERNAL_DefragmentMemory(VulkanRenderer *renderer);
1220static bool VULKAN_INTERNAL_BeginCommandBuffer(VulkanRenderer *renderer, VulkanCommandBuffer *commandBuffer);
1221static void VULKAN_ReleaseWindow(SDL_GPURenderer *driverData, SDL_Window *window);
1222static bool VULKAN_Wait(SDL_GPURenderer *driverData);
1223static bool VULKAN_WaitForFences(SDL_GPURenderer *driverData, bool waitAll, SDL_GPUFence *const *fences, Uint32 numFences);
1224static bool VULKAN_Submit(SDL_GPUCommandBuffer *commandBuffer);
1225static SDL_GPUCommandBuffer *VULKAN_AcquireCommandBuffer(SDL_GPURenderer *driverData);
1226
1227// Error Handling
1228
1229static inline const char *VkErrorMessages(VkResult code)
1230{
1231#define ERR_TO_STR(e) \
1232 case e: \
1233 return #e;
1234 switch (code) {
1235 ERR_TO_STR(VK_ERROR_OUT_OF_HOST_MEMORY)
1236 ERR_TO_STR(VK_ERROR_OUT_OF_DEVICE_MEMORY)
1237 ERR_TO_STR(VK_ERROR_FRAGMENTED_POOL)
1238 ERR_TO_STR(VK_ERROR_OUT_OF_POOL_MEMORY)
1239 ERR_TO_STR(VK_ERROR_INITIALIZATION_FAILED)
1240 ERR_TO_STR(VK_ERROR_LAYER_NOT_PRESENT)
1241 ERR_TO_STR(VK_ERROR_EXTENSION_NOT_PRESENT)
1242 ERR_TO_STR(VK_ERROR_FEATURE_NOT_PRESENT)
1243 ERR_TO_STR(VK_ERROR_TOO_MANY_OBJECTS)
1244 ERR_TO_STR(VK_ERROR_DEVICE_LOST)
1245 ERR_TO_STR(VK_ERROR_INCOMPATIBLE_DRIVER)
1246 ERR_TO_STR(VK_ERROR_OUT_OF_DATE_KHR)
1247 ERR_TO_STR(VK_ERROR_SURFACE_LOST_KHR)
1248 ERR_TO_STR(VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT)
1249 ERR_TO_STR(VK_SUBOPTIMAL_KHR)
1250 ERR_TO_STR(VK_ERROR_NATIVE_WINDOW_IN_USE_KHR)
1251 default:
1252 return "Unhandled VkResult!";
1253 }
1254#undef ERR_TO_STR
1255}
1256
1257#define SET_ERROR_AND_RETURN(fmt, msg, ret) \
1258 do { \
1259 if (renderer->debugMode) { \
1260 SDL_LogError(SDL_LOG_CATEGORY_GPU, fmt, msg); \
1261 } \
1262 SDL_SetError((fmt), (msg)); \
1263 return ret; \
1264 } while (0)
1265
1266#define SET_STRING_ERROR_AND_RETURN(msg, ret) SET_ERROR_AND_RETURN("%s", msg, ret)
1267
1268#define CHECK_VULKAN_ERROR_AND_RETURN(res, fn, ret) \
1269 do { \
1270 if ((res) != VK_SUCCESS) { \
1271 if (renderer->debugMode) { \
1272 SDL_LogError(SDL_LOG_CATEGORY_GPU, "%s %s", #fn, VkErrorMessages(res)); \
1273 } \
1274 SDL_SetError("%s %s", #fn, VkErrorMessages(res)); \
1275 return (ret); \
1276 } \
1277 } while (0)
1278
1279// Utility
1280
1281static inline VkPolygonMode SDLToVK_PolygonMode(
1282 VulkanRenderer *renderer,
1283 SDL_GPUFillMode mode)
1284{
1285 if (mode == SDL_GPU_FILLMODE_FILL) {
1286 return VK_POLYGON_MODE_FILL; // always available!
1287 }
1288
1289 if (renderer->supportsFillModeNonSolid && mode == SDL_GPU_FILLMODE_LINE) {
1290 return VK_POLYGON_MODE_LINE;
1291 }
1292
1293 if (!renderer->fillModeOnlyWarning) {
1294 SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "Unsupported fill mode requested, using FILL!");
1295 renderer->fillModeOnlyWarning = 1;
1296 }
1297 return VK_POLYGON_MODE_FILL;
1298}
1299
1300// Memory Management
1301
1302// Vulkan: Memory Allocation
1303
1304static inline VkDeviceSize VULKAN_INTERNAL_NextHighestAlignment(
1305 VkDeviceSize n,
1306 VkDeviceSize align)
1307{
1308 return align * ((n + align - 1) / align);
1309}
1310
1311static inline Uint32 VULKAN_INTERNAL_NextHighestAlignment32(
1312 Uint32 n,
1313 Uint32 align)
1314{
1315 return align * ((n + align - 1) / align);
1316}
1317
1318static void VULKAN_INTERNAL_MakeMemoryUnavailable(
1319 VulkanRenderer *renderer,
1320 VulkanMemoryAllocation *allocation)
1321{
1322 Uint32 i, j;
1323 VulkanMemoryFreeRegion *freeRegion;
1324
1325 allocation->availableForAllocation = 0;
1326
1327 for (i = 0; i < allocation->freeRegionCount; i += 1) {
1328 freeRegion = allocation->freeRegions[i];
1329
1330 // close the gap in the sorted list
1331 if (allocation->allocator->sortedFreeRegionCount > 1) {
1332 for (j = freeRegion->sortedIndex; j < allocation->allocator->sortedFreeRegionCount - 1; j += 1) {
1333 allocation->allocator->sortedFreeRegions[j] =
1334 allocation->allocator->sortedFreeRegions[j + 1];
1335
1336 allocation->allocator->sortedFreeRegions[j]->sortedIndex = j;
1337 }
1338 }
1339
1340 allocation->allocator->sortedFreeRegionCount -= 1;
1341 }
1342}
1343
1344static void VULKAN_INTERNAL_MarkAllocationsForDefrag(
1345 VulkanRenderer *renderer)
1346{
1347 Uint32 memoryType, allocationIndex;
1348 VulkanMemorySubAllocator *currentAllocator;
1349
1350 for (memoryType = 0; memoryType < VK_MAX_MEMORY_TYPES; memoryType += 1) {
1351 currentAllocator = &renderer->memoryAllocator->subAllocators[memoryType];
1352
1353 for (allocationIndex = 0; allocationIndex < currentAllocator->allocationCount; allocationIndex += 1) {
1354 if (currentAllocator->allocations[allocationIndex]->availableForAllocation == 1) {
1355 if (currentAllocator->allocations[allocationIndex]->freeRegionCount > 1) {
1356 EXPAND_ARRAY_IF_NEEDED(
1357 renderer->allocationsToDefrag,
1358 VulkanMemoryAllocation *,
1359 renderer->allocationsToDefragCount + 1,
1360 renderer->allocationsToDefragCapacity,
1361 renderer->allocationsToDefragCapacity * 2);
1362
1363 renderer->allocationsToDefrag[renderer->allocationsToDefragCount] =
1364 currentAllocator->allocations[allocationIndex];
1365
1366 renderer->allocationsToDefragCount += 1;
1367
1368 VULKAN_INTERNAL_MakeMemoryUnavailable(
1369 renderer,
1370 currentAllocator->allocations[allocationIndex]);
1371 }
1372 }
1373 }
1374 }
1375}
1376
1377static void VULKAN_INTERNAL_RemoveMemoryFreeRegion(
1378 VulkanRenderer *renderer,
1379 VulkanMemoryFreeRegion *freeRegion)
1380{
1381 Uint32 i;
1382
1383 SDL_LockMutex(renderer->allocatorLock);
1384
1385 if (freeRegion->allocation->availableForAllocation) {
1386 // close the gap in the sorted list
1387 if (freeRegion->allocation->allocator->sortedFreeRegionCount > 1) {
1388 for (i = freeRegion->sortedIndex; i < freeRegion->allocation->allocator->sortedFreeRegionCount - 1; i += 1) {
1389 freeRegion->allocation->allocator->sortedFreeRegions[i] =
1390 freeRegion->allocation->allocator->sortedFreeRegions[i + 1];
1391
1392 freeRegion->allocation->allocator->sortedFreeRegions[i]->sortedIndex = i;
1393 }
1394 }
1395
1396 freeRegion->allocation->allocator->sortedFreeRegionCount -= 1;
1397 }
1398
1399 // close the gap in the buffer list
1400 if (freeRegion->allocation->freeRegionCount > 1 && freeRegion->allocationIndex != freeRegion->allocation->freeRegionCount - 1) {
1401 freeRegion->allocation->freeRegions[freeRegion->allocationIndex] =
1402 freeRegion->allocation->freeRegions[freeRegion->allocation->freeRegionCount - 1];
1403
1404 freeRegion->allocation->freeRegions[freeRegion->allocationIndex]->allocationIndex =
1405 freeRegion->allocationIndex;
1406 }
1407
1408 freeRegion->allocation->freeRegionCount -= 1;
1409
1410 freeRegion->allocation->freeSpace -= freeRegion->size;
1411
1412 SDL_free(freeRegion);
1413
1414 SDL_UnlockMutex(renderer->allocatorLock);
1415}
1416
1417static void VULKAN_INTERNAL_NewMemoryFreeRegion(
1418 VulkanRenderer *renderer,
1419 VulkanMemoryAllocation *allocation,
1420 VkDeviceSize offset,
1421 VkDeviceSize size)
1422{
1423 VulkanMemoryFreeRegion *newFreeRegion;
1424 VkDeviceSize newOffset, newSize;
1425 Sint32 insertionIndex = 0;
1426
1427 SDL_LockMutex(renderer->allocatorLock);
1428
1429 // look for an adjacent region to merge
1430 for (Sint32 i = allocation->freeRegionCount - 1; i >= 0; i -= 1) {
1431 // check left side
1432 if (allocation->freeRegions[i]->offset + allocation->freeRegions[i]->size == offset) {
1433 newOffset = allocation->freeRegions[i]->offset;
1434 newSize = allocation->freeRegions[i]->size + size;
1435
1436 VULKAN_INTERNAL_RemoveMemoryFreeRegion(renderer, allocation->freeRegions[i]);
1437 VULKAN_INTERNAL_NewMemoryFreeRegion(renderer, allocation, newOffset, newSize);
1438
1439 SDL_UnlockMutex(renderer->allocatorLock);
1440 return;
1441 }
1442
1443 // check right side
1444 if (allocation->freeRegions[i]->offset == offset + size) {
1445 newOffset = offset;
1446 newSize = allocation->freeRegions[i]->size + size;
1447
1448 VULKAN_INTERNAL_RemoveMemoryFreeRegion(renderer, allocation->freeRegions[i]);
1449 VULKAN_INTERNAL_NewMemoryFreeRegion(renderer, allocation, newOffset, newSize);
1450
1451 SDL_UnlockMutex(renderer->allocatorLock);
1452 return;
1453 }
1454 }
1455
1456 // region is not contiguous with another free region, make a new one
1457 allocation->freeRegionCount += 1;
1458 if (allocation->freeRegionCount > allocation->freeRegionCapacity) {
1459 allocation->freeRegionCapacity *= 2;
1460 allocation->freeRegions = SDL_realloc(
1461 allocation->freeRegions,
1462 sizeof(VulkanMemoryFreeRegion *) * allocation->freeRegionCapacity);
1463 }
1464
1465 newFreeRegion = SDL_malloc(sizeof(VulkanMemoryFreeRegion));
1466 newFreeRegion->offset = offset;
1467 newFreeRegion->size = size;
1468 newFreeRegion->allocation = allocation;
1469
1470 allocation->freeSpace += size;
1471
1472 allocation->freeRegions[allocation->freeRegionCount - 1] = newFreeRegion;
1473 newFreeRegion->allocationIndex = allocation->freeRegionCount - 1;
1474
1475 if (allocation->availableForAllocation) {
1476 for (Uint32 i = 0; i < allocation->allocator->sortedFreeRegionCount; i += 1) {
1477 if (allocation->allocator->sortedFreeRegions[i]->size < size) {
1478 // this is where the new region should go
1479 break;
1480 }
1481
1482 insertionIndex += 1;
1483 }
1484
1485 if (allocation->allocator->sortedFreeRegionCount + 1 > allocation->allocator->sortedFreeRegionCapacity) {
1486 allocation->allocator->sortedFreeRegionCapacity *= 2;
1487 allocation->allocator->sortedFreeRegions = SDL_realloc(
1488 allocation->allocator->sortedFreeRegions,
1489 sizeof(VulkanMemoryFreeRegion *) * allocation->allocator->sortedFreeRegionCapacity);
1490 }
1491
1492 // perform insertion sort
1493 if (allocation->allocator->sortedFreeRegionCount > 0 && (Uint32)insertionIndex != allocation->allocator->sortedFreeRegionCount) {
1494 for (Sint32 i = allocation->allocator->sortedFreeRegionCount; i > insertionIndex && i > 0; i -= 1) {
1495 allocation->allocator->sortedFreeRegions[i] = allocation->allocator->sortedFreeRegions[i - 1];
1496 allocation->allocator->sortedFreeRegions[i]->sortedIndex = i;
1497 }
1498 }
1499
1500 allocation->allocator->sortedFreeRegionCount += 1;
1501 allocation->allocator->sortedFreeRegions[insertionIndex] = newFreeRegion;
1502 newFreeRegion->sortedIndex = insertionIndex;
1503 }
1504
1505 SDL_UnlockMutex(renderer->allocatorLock);
1506}
1507
1508static VulkanMemoryUsedRegion *VULKAN_INTERNAL_NewMemoryUsedRegion(
1509 VulkanRenderer *renderer,
1510 VulkanMemoryAllocation *allocation,
1511 VkDeviceSize offset,
1512 VkDeviceSize size,
1513 VkDeviceSize resourceOffset,
1514 VkDeviceSize resourceSize,
1515 VkDeviceSize alignment)
1516{
1517 VulkanMemoryUsedRegion *memoryUsedRegion;
1518
1519 SDL_LockMutex(renderer->allocatorLock);
1520
1521 if (allocation->usedRegionCount == allocation->usedRegionCapacity) {
1522 allocation->usedRegionCapacity *= 2;
1523 allocation->usedRegions = SDL_realloc(
1524 allocation->usedRegions,
1525 allocation->usedRegionCapacity * sizeof(VulkanMemoryUsedRegion *));
1526 }
1527
1528 memoryUsedRegion = SDL_malloc(sizeof(VulkanMemoryUsedRegion));
1529 memoryUsedRegion->allocation = allocation;
1530 memoryUsedRegion->offset = offset;
1531 memoryUsedRegion->size = size;
1532 memoryUsedRegion->resourceOffset = resourceOffset;
1533 memoryUsedRegion->resourceSize = resourceSize;
1534 memoryUsedRegion->alignment = alignment;
1535
1536 allocation->usedSpace += size;
1537
1538 allocation->usedRegions[allocation->usedRegionCount] = memoryUsedRegion;
1539 allocation->usedRegionCount += 1;
1540
1541 SDL_UnlockMutex(renderer->allocatorLock);
1542
1543 return memoryUsedRegion;
1544}
1545
1546static void VULKAN_INTERNAL_RemoveMemoryUsedRegion(
1547 VulkanRenderer *renderer,
1548 VulkanMemoryUsedRegion *usedRegion)
1549{
1550 Uint32 i;
1551
1552 SDL_LockMutex(renderer->allocatorLock);
1553
1554 for (i = 0; i < usedRegion->allocation->usedRegionCount; i += 1) {
1555 if (usedRegion->allocation->usedRegions[i] == usedRegion) {
1556 // plug the hole
1557 if (i != usedRegion->allocation->usedRegionCount - 1) {
1558 usedRegion->allocation->usedRegions[i] = usedRegion->allocation->usedRegions[usedRegion->allocation->usedRegionCount - 1];
1559 }
1560
1561 break;
1562 }
1563 }
1564
1565 usedRegion->allocation->usedSpace -= usedRegion->size;
1566
1567 usedRegion->allocation->usedRegionCount -= 1;
1568
1569 VULKAN_INTERNAL_NewMemoryFreeRegion(
1570 renderer,
1571 usedRegion->allocation,
1572 usedRegion->offset,
1573 usedRegion->size);
1574
1575 SDL_free(usedRegion);
1576
1577 SDL_UnlockMutex(renderer->allocatorLock);
1578}
1579
1580static bool VULKAN_INTERNAL_CheckMemoryTypeArrayUnique(
1581 Uint32 memoryTypeIndex,
1582 Uint32 *memoryTypeIndexArray,
1583 Uint32 count)
1584{
1585 Uint32 i = 0;
1586
1587 for (i = 0; i < count; i += 1) {
1588 if (memoryTypeIndexArray[i] == memoryTypeIndex) {
1589 return false;
1590 }
1591 }
1592
1593 return true;
1594}
1595
1596/* Returns an array of memory type indices in order of preference.
1597 * Memory types are requested with the following three guidelines:
1598 *
1599 * Required: Absolutely necessary
1600 * Preferred: Nice to have, but not necessary
1601 * Tolerable: Can be allowed if there are no other options
1602 *
1603 * We return memory types in this order:
1604 * 1. Required and preferred. This is the best category.
1605 * 2. Required only.
1606 * 3. Required, preferred, and tolerable.
1607 * 4. Required and tolerable. This is the worst category.
1608 */
1609static Uint32 *VULKAN_INTERNAL_FindBestMemoryTypes(
1610 VulkanRenderer *renderer,
1611 Uint32 typeFilter,
1612 VkMemoryPropertyFlags requiredProperties,
1613 VkMemoryPropertyFlags preferredProperties,
1614 VkMemoryPropertyFlags tolerableProperties,
1615 Uint32 *pCount)
1616{
1617 Uint32 i;
1618 Uint32 index = 0;
1619 Uint32 *result = SDL_malloc(sizeof(Uint32) * renderer->memoryProperties.memoryTypeCount);
1620
1621 // required + preferred + !tolerable
1622 for (i = 0; i < renderer->memoryProperties.memoryTypeCount; i += 1) {
1623 if ((typeFilter & (1 << i)) &&
1624 (renderer->memoryProperties.memoryTypes[i].propertyFlags & requiredProperties) == requiredProperties &&
1625 (renderer->memoryProperties.memoryTypes[i].propertyFlags & preferredProperties) == preferredProperties &&
1626 (renderer->memoryProperties.memoryTypes[i].propertyFlags & tolerableProperties) == 0) {
1627 if (VULKAN_INTERNAL_CheckMemoryTypeArrayUnique(
1628 i,
1629 result,
1630 index)) {
1631 result[index] = i;
1632 index += 1;
1633 }
1634 }
1635 }
1636
1637 // required + !preferred + !tolerable
1638 for (i = 0; i < renderer->memoryProperties.memoryTypeCount; i += 1) {
1639 if ((typeFilter & (1 << i)) &&
1640 (renderer->memoryProperties.memoryTypes[i].propertyFlags & requiredProperties) == requiredProperties &&
1641 (renderer->memoryProperties.memoryTypes[i].propertyFlags & preferredProperties) == 0 &&
1642 (renderer->memoryProperties.memoryTypes[i].propertyFlags & tolerableProperties) == 0) {
1643 if (VULKAN_INTERNAL_CheckMemoryTypeArrayUnique(
1644 i,
1645 result,
1646 index)) {
1647 result[index] = i;
1648 index += 1;
1649 }
1650 }
1651 }
1652
1653 // required + preferred + tolerable
1654 for (i = 0; i < renderer->memoryProperties.memoryTypeCount; i += 1) {
1655 if ((typeFilter & (1 << i)) &&
1656 (renderer->memoryProperties.memoryTypes[i].propertyFlags & requiredProperties) == requiredProperties &&
1657 (renderer->memoryProperties.memoryTypes[i].propertyFlags & preferredProperties) == preferredProperties &&
1658 (renderer->memoryProperties.memoryTypes[i].propertyFlags & tolerableProperties) == tolerableProperties) {
1659 if (VULKAN_INTERNAL_CheckMemoryTypeArrayUnique(
1660 i,
1661 result,
1662 index)) {
1663 result[index] = i;
1664 index += 1;
1665 }
1666 }
1667 }
1668
1669 // required + !preferred + tolerable
1670 for (i = 0; i < renderer->memoryProperties.memoryTypeCount; i += 1) {
1671 if ((typeFilter & (1 << i)) &&
1672 (renderer->memoryProperties.memoryTypes[i].propertyFlags & requiredProperties) == requiredProperties &&
1673 (renderer->memoryProperties.memoryTypes[i].propertyFlags & preferredProperties) == 0 &&
1674 (renderer->memoryProperties.memoryTypes[i].propertyFlags & tolerableProperties) == tolerableProperties) {
1675 if (VULKAN_INTERNAL_CheckMemoryTypeArrayUnique(
1676 i,
1677 result,
1678 index)) {
1679 result[index] = i;
1680 index += 1;
1681 }
1682 }
1683 }
1684
1685 *pCount = index;
1686 return result;
1687}
1688
1689static Uint32 *VULKAN_INTERNAL_FindBestBufferMemoryTypes(
1690 VulkanRenderer *renderer,
1691 VkBuffer buffer,
1692 VkMemoryPropertyFlags requiredMemoryProperties,
1693 VkMemoryPropertyFlags preferredMemoryProperties,
1694 VkMemoryPropertyFlags tolerableMemoryProperties,
1695 VkMemoryRequirements *pMemoryRequirements,
1696 Uint32 *pCount)
1697{
1698 renderer->vkGetBufferMemoryRequirements(
1699 renderer->logicalDevice,
1700 buffer,
1701 pMemoryRequirements);
1702
1703 return VULKAN_INTERNAL_FindBestMemoryTypes(
1704 renderer,
1705 pMemoryRequirements->memoryTypeBits,
1706 requiredMemoryProperties,
1707 preferredMemoryProperties,
1708 tolerableMemoryProperties,
1709 pCount);
1710}
1711
1712static Uint32 *VULKAN_INTERNAL_FindBestImageMemoryTypes(
1713 VulkanRenderer *renderer,
1714 VkImage image,
1715 VkMemoryPropertyFlags preferredMemoryPropertyFlags,
1716 VkMemoryRequirements *pMemoryRequirements,
1717 Uint32 *pCount)
1718{
1719 renderer->vkGetImageMemoryRequirements(
1720 renderer->logicalDevice,
1721 image,
1722 pMemoryRequirements);
1723
1724 return VULKAN_INTERNAL_FindBestMemoryTypes(
1725 renderer,
1726 pMemoryRequirements->memoryTypeBits,
1727 0,
1728 preferredMemoryPropertyFlags,
1729 0,
1730 pCount);
1731}
1732
1733static void VULKAN_INTERNAL_DeallocateMemory(
1734 VulkanRenderer *renderer,
1735 VulkanMemorySubAllocator *allocator,
1736 Uint32 allocationIndex)
1737{
1738 Uint32 i;
1739
1740 VulkanMemoryAllocation *allocation = allocator->allocations[allocationIndex];
1741
1742 SDL_LockMutex(renderer->allocatorLock);
1743
1744 // If this allocation was marked for defrag, cancel that
1745 for (i = 0; i < renderer->allocationsToDefragCount; i += 1) {
1746 if (allocation == renderer->allocationsToDefrag[i]) {
1747 renderer->allocationsToDefrag[i] = renderer->allocationsToDefrag[renderer->allocationsToDefragCount - 1];
1748 renderer->allocationsToDefragCount -= 1;
1749
1750 break;
1751 }
1752 }
1753
1754 for (i = 0; i < allocation->freeRegionCount; i += 1) {
1755 VULKAN_INTERNAL_RemoveMemoryFreeRegion(
1756 renderer,
1757 allocation->freeRegions[i]);
1758 }
1759 SDL_free(allocation->freeRegions);
1760
1761 /* no need to iterate used regions because deallocate
1762 * only happens when there are 0 used regions
1763 */
1764 SDL_free(allocation->usedRegions);
1765
1766 renderer->vkFreeMemory(
1767 renderer->logicalDevice,
1768 allocation->memory,
1769 NULL);
1770
1771 SDL_DestroyMutex(allocation->memoryLock);
1772 SDL_free(allocation);
1773
1774 if (allocationIndex != allocator->allocationCount - 1) {
1775 allocator->allocations[allocationIndex] = allocator->allocations[allocator->allocationCount - 1];
1776 }
1777
1778 allocator->allocationCount -= 1;
1779
1780 SDL_UnlockMutex(renderer->allocatorLock);
1781}
1782
1783static Uint8 VULKAN_INTERNAL_AllocateMemory(
1784 VulkanRenderer *renderer,
1785 VkBuffer buffer,
1786 VkImage image,
1787 Uint32 memoryTypeIndex,
1788 VkDeviceSize allocationSize,
1789 Uint8 isHostVisible,
1790 VulkanMemoryAllocation **pMemoryAllocation)
1791{
1792 VulkanMemoryAllocation *allocation;
1793 VulkanMemorySubAllocator *allocator = &renderer->memoryAllocator->subAllocators[memoryTypeIndex];
1794 VkMemoryAllocateInfo allocInfo;
1795 VkResult result;
1796
1797 allocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
1798 allocInfo.pNext = NULL;
1799 allocInfo.memoryTypeIndex = memoryTypeIndex;
1800 allocInfo.allocationSize = allocationSize;
1801
1802 allocation = SDL_malloc(sizeof(VulkanMemoryAllocation));
1803 allocation->size = allocationSize;
1804 allocation->freeSpace = 0; // added by FreeRegions
1805 allocation->usedSpace = 0; // added by UsedRegions
1806 allocation->memoryLock = SDL_CreateMutex();
1807
1808 allocator->allocationCount += 1;
1809 allocator->allocations = SDL_realloc(
1810 allocator->allocations,
1811 sizeof(VulkanMemoryAllocation *) * allocator->allocationCount);
1812
1813 allocator->allocations[allocator->allocationCount - 1] = allocation;
1814
1815 allocInfo.pNext = NULL;
1816 allocation->availableForAllocation = 1;
1817
1818 allocation->usedRegions = SDL_malloc(sizeof(VulkanMemoryUsedRegion *));
1819 allocation->usedRegionCount = 0;
1820 allocation->usedRegionCapacity = 1;
1821
1822 allocation->freeRegions = SDL_malloc(sizeof(VulkanMemoryFreeRegion *));
1823 allocation->freeRegionCount = 0;
1824 allocation->freeRegionCapacity = 1;
1825
1826 allocation->allocator = allocator;
1827
1828 result = renderer->vkAllocateMemory(
1829 renderer->logicalDevice,
1830 &allocInfo,
1831 NULL,
1832 &allocation->memory);
1833
1834 if (result != VK_SUCCESS) {
1835 // Uh oh, we couldn't allocate, time to clean up
1836 SDL_free(allocation->freeRegions);
1837
1838 allocator->allocationCount -= 1;
1839 allocator->allocations = SDL_realloc(
1840 allocator->allocations,
1841 sizeof(VulkanMemoryAllocation *) * allocator->allocationCount);
1842
1843 SDL_free(allocation);
1844
1845 return 0;
1846 }
1847
1848 // Persistent mapping for host-visible memory
1849 if (isHostVisible) {
1850 result = renderer->vkMapMemory(
1851 renderer->logicalDevice,
1852 allocation->memory,
1853 0,
1854 VK_WHOLE_SIZE,
1855 0,
1856 (void **)&allocation->mapPointer);
1857 CHECK_VULKAN_ERROR_AND_RETURN(result, vkMapMemory, 0);
1858 } else {
1859 allocation->mapPointer = NULL;
1860 }
1861
1862 VULKAN_INTERNAL_NewMemoryFreeRegion(
1863 renderer,
1864 allocation,
1865 0,
1866 allocation->size);
1867
1868 *pMemoryAllocation = allocation;
1869 return 1;
1870}
1871
1872static Uint8 VULKAN_INTERNAL_BindBufferMemory(
1873 VulkanRenderer *renderer,
1874 VulkanMemoryUsedRegion *usedRegion,
1875 VkDeviceSize alignedOffset,
1876 VkBuffer buffer)
1877{
1878 VkResult vulkanResult;
1879
1880 SDL_LockMutex(usedRegion->allocation->memoryLock);
1881
1882 vulkanResult = renderer->vkBindBufferMemory(
1883 renderer->logicalDevice,
1884 buffer,
1885 usedRegion->allocation->memory,
1886 alignedOffset);
1887
1888 SDL_UnlockMutex(usedRegion->allocation->memoryLock);
1889
1890 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkBindBufferMemory, 0);
1891
1892 return 1;
1893}
1894
1895static Uint8 VULKAN_INTERNAL_BindImageMemory(
1896 VulkanRenderer *renderer,
1897 VulkanMemoryUsedRegion *usedRegion,
1898 VkDeviceSize alignedOffset,
1899 VkImage image)
1900{
1901 VkResult vulkanResult;
1902
1903 SDL_LockMutex(usedRegion->allocation->memoryLock);
1904
1905 vulkanResult = renderer->vkBindImageMemory(
1906 renderer->logicalDevice,
1907 image,
1908 usedRegion->allocation->memory,
1909 alignedOffset);
1910
1911 SDL_UnlockMutex(usedRegion->allocation->memoryLock);
1912
1913 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkBindImageMemory, 0);
1914
1915 return 1;
1916}
1917
1918static Uint8 VULKAN_INTERNAL_BindResourceMemory(
1919 VulkanRenderer *renderer,
1920 Uint32 memoryTypeIndex,
1921 VkMemoryRequirements *memoryRequirements,
1922 VkDeviceSize resourceSize, // may be different from requirements size!
1923 bool dedicated, // the entire memory allocation should be used for this resource
1924 VkBuffer buffer, // may be VK_NULL_HANDLE
1925 VkImage image, // may be VK_NULL_HANDLE
1926 VulkanMemoryUsedRegion **pMemoryUsedRegion)
1927{
1928 VulkanMemoryAllocation *allocation;
1929 VulkanMemorySubAllocator *allocator;
1930 VulkanMemoryFreeRegion *region;
1931 VulkanMemoryFreeRegion *selectedRegion;
1932 VulkanMemoryUsedRegion *usedRegion;
1933
1934 VkDeviceSize requiredSize, allocationSize;
1935 VkDeviceSize alignedOffset = 0;
1936 VkDeviceSize newRegionSize, newRegionOffset;
1937 Uint8 isHostVisible, smallAllocation, allocationResult;
1938 Sint32 i;
1939
1940 isHostVisible =
1941 (renderer->memoryProperties.memoryTypes[memoryTypeIndex].propertyFlags &
1942 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
1943
1944 allocator = &renderer->memoryAllocator->subAllocators[memoryTypeIndex];
1945 requiredSize = memoryRequirements->size;
1946 smallAllocation = requiredSize <= SMALL_ALLOCATION_THRESHOLD;
1947
1948 if ((buffer == VK_NULL_HANDLE && image == VK_NULL_HANDLE) ||
1949 (buffer != VK_NULL_HANDLE && image != VK_NULL_HANDLE)) {
1950 SDL_LogError(SDL_LOG_CATEGORY_GPU, "BindResourceMemory must be given either a VulkanBuffer or a VulkanTexture");
1951 return 0;
1952 }
1953
1954 SDL_LockMutex(renderer->allocatorLock);
1955
1956 selectedRegion = NULL;
1957
1958 if (dedicated) {
1959 // Force an allocation
1960 allocationSize = requiredSize;
1961 } else {
1962 // Search for a suitable existing free region
1963 for (i = allocator->sortedFreeRegionCount - 1; i >= 0; i -= 1) {
1964 region = allocator->sortedFreeRegions[i];
1965
1966 if (smallAllocation && region->allocation->size != SMALL_ALLOCATION_SIZE) {
1967 // region is not in a small allocation
1968 continue;
1969 }
1970
1971 if (!smallAllocation && region->allocation->size == SMALL_ALLOCATION_SIZE) {
1972 // allocation is not small and current region is in a small allocation
1973 continue;
1974 }
1975
1976 alignedOffset = VULKAN_INTERNAL_NextHighestAlignment(
1977 region->offset,
1978 memoryRequirements->alignment);
1979
1980 if (alignedOffset + requiredSize <= region->offset + region->size) {
1981 selectedRegion = region;
1982 break;
1983 }
1984 }
1985
1986 if (selectedRegion != NULL) {
1987 region = selectedRegion;
1988 allocation = region->allocation;
1989
1990 usedRegion = VULKAN_INTERNAL_NewMemoryUsedRegion(
1991 renderer,
1992 allocation,
1993 region->offset,
1994 requiredSize + (alignedOffset - region->offset),
1995 alignedOffset,
1996 resourceSize,
1997 memoryRequirements->alignment);
1998
1999 usedRegion->isBuffer = buffer != VK_NULL_HANDLE;
2000
2001 newRegionSize = region->size - ((alignedOffset - region->offset) + requiredSize);
2002 newRegionOffset = alignedOffset + requiredSize;
2003
2004 // remove and add modified region to re-sort
2005 VULKAN_INTERNAL_RemoveMemoryFreeRegion(renderer, region);
2006
2007 // if size is 0, no need to re-insert
2008 if (newRegionSize != 0) {
2009 VULKAN_INTERNAL_NewMemoryFreeRegion(
2010 renderer,
2011 allocation,
2012 newRegionOffset,
2013 newRegionSize);
2014 }
2015
2016 SDL_UnlockMutex(renderer->allocatorLock);
2017
2018 if (buffer != VK_NULL_HANDLE) {
2019 if (!VULKAN_INTERNAL_BindBufferMemory(
2020 renderer,
2021 usedRegion,
2022 alignedOffset,
2023 buffer)) {
2024 VULKAN_INTERNAL_RemoveMemoryUsedRegion(
2025 renderer,
2026 usedRegion);
2027
2028 return 0;
2029 }
2030 } else if (image != VK_NULL_HANDLE) {
2031 if (!VULKAN_INTERNAL_BindImageMemory(
2032 renderer,
2033 usedRegion,
2034 alignedOffset,
2035 image)) {
2036 VULKAN_INTERNAL_RemoveMemoryUsedRegion(
2037 renderer,
2038 usedRegion);
2039
2040 return 0;
2041 }
2042 }
2043
2044 *pMemoryUsedRegion = usedRegion;
2045 return 1;
2046 }
2047
2048 // No suitable free regions exist, allocate a new memory region
2049 if (
2050 renderer->allocationsToDefragCount == 0 &&
2051 !renderer->defragInProgress) {
2052 // Mark currently fragmented allocations for defrag
2053 VULKAN_INTERNAL_MarkAllocationsForDefrag(renderer);
2054 }
2055
2056 if (requiredSize > SMALL_ALLOCATION_THRESHOLD) {
2057 // allocate a page of required size aligned to LARGE_ALLOCATION_INCREMENT increments
2058 allocationSize =
2059 VULKAN_INTERNAL_NextHighestAlignment(requiredSize, LARGE_ALLOCATION_INCREMENT);
2060 } else {
2061 allocationSize = SMALL_ALLOCATION_SIZE;
2062 }
2063 }
2064
2065 allocationResult = VULKAN_INTERNAL_AllocateMemory(
2066 renderer,
2067 buffer,
2068 image,
2069 memoryTypeIndex,
2070 allocationSize,
2071 isHostVisible,
2072 &allocation);
2073
2074 // Uh oh, we're out of memory
2075 if (allocationResult == 0) {
2076 SDL_UnlockMutex(renderer->allocatorLock);
2077
2078 // Responsibility of the caller to handle being out of memory
2079 return 2;
2080 }
2081
2082 usedRegion = VULKAN_INTERNAL_NewMemoryUsedRegion(
2083 renderer,
2084 allocation,
2085 0,
2086 requiredSize,
2087 0,
2088 resourceSize,
2089 memoryRequirements->alignment);
2090
2091 usedRegion->isBuffer = buffer != VK_NULL_HANDLE;
2092
2093 region = allocation->freeRegions[0];
2094
2095 newRegionOffset = region->offset + requiredSize;
2096 newRegionSize = region->size - requiredSize;
2097
2098 VULKAN_INTERNAL_RemoveMemoryFreeRegion(renderer, region);
2099
2100 if (newRegionSize != 0) {
2101 VULKAN_INTERNAL_NewMemoryFreeRegion(
2102 renderer,
2103 allocation,
2104 newRegionOffset,
2105 newRegionSize);
2106 }
2107
2108 SDL_UnlockMutex(renderer->allocatorLock);
2109
2110 if (buffer != VK_NULL_HANDLE) {
2111 if (!VULKAN_INTERNAL_BindBufferMemory(
2112 renderer,
2113 usedRegion,
2114 0,
2115 buffer)) {
2116 VULKAN_INTERNAL_RemoveMemoryUsedRegion(
2117 renderer,
2118 usedRegion);
2119
2120 return 0;
2121 }
2122 } else if (image != VK_NULL_HANDLE) {
2123 if (!VULKAN_INTERNAL_BindImageMemory(
2124 renderer,
2125 usedRegion,
2126 0,
2127 image)) {
2128 VULKAN_INTERNAL_RemoveMemoryUsedRegion(
2129 renderer,
2130 usedRegion);
2131
2132 return 0;
2133 }
2134 }
2135
2136 *pMemoryUsedRegion = usedRegion;
2137 return 1;
2138}
2139
2140static Uint8 VULKAN_INTERNAL_BindMemoryForImage(
2141 VulkanRenderer *renderer,
2142 VkImage image,
2143 VulkanMemoryUsedRegion **usedRegion)
2144{
2145 Uint8 bindResult = 0;
2146 Uint32 memoryTypeCount = 0;
2147 Uint32 *memoryTypesToTry = NULL;
2148 Uint32 selectedMemoryTypeIndex = 0;
2149 Uint32 i;
2150 VkMemoryPropertyFlags preferredMemoryPropertyFlags;
2151 VkMemoryRequirements memoryRequirements;
2152
2153 /* Vulkan memory types have several memory properties.
2154 *
2155 * Unlike buffers, images are always optimally stored device-local,
2156 * so that is the only property we prefer here.
2157 *
2158 * If memory is constrained, it is fine for the texture to not
2159 * be device-local.
2160 */
2161 preferredMemoryPropertyFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
2162
2163 memoryTypesToTry = VULKAN_INTERNAL_FindBestImageMemoryTypes(
2164 renderer,
2165 image,
2166 preferredMemoryPropertyFlags,
2167 &memoryRequirements,
2168 &memoryTypeCount);
2169
2170 for (i = 0; i < memoryTypeCount; i += 1) {
2171 bindResult = VULKAN_INTERNAL_BindResourceMemory(
2172 renderer,
2173 memoryTypesToTry[i],
2174 &memoryRequirements,
2175 memoryRequirements.size,
2176 false,
2177 VK_NULL_HANDLE,
2178 image,
2179 usedRegion);
2180
2181 if (bindResult == 1) {
2182 selectedMemoryTypeIndex = memoryTypesToTry[i];
2183 break;
2184 }
2185 }
2186
2187 SDL_free(memoryTypesToTry);
2188
2189 // Check for warnings on success
2190 if (bindResult == 1) {
2191 if (!renderer->outOfDeviceLocalMemoryWarning) {
2192 if ((renderer->memoryProperties.memoryTypes[selectedMemoryTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) == 0) {
2193 SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "Out of device-local memory, allocating textures on host-local memory!");
2194 renderer->outOfDeviceLocalMemoryWarning = 1;
2195 }
2196 }
2197 }
2198
2199 return bindResult;
2200}
2201
2202static Uint8 VULKAN_INTERNAL_BindMemoryForBuffer(
2203 VulkanRenderer *renderer,
2204 VkBuffer buffer,
2205 VkDeviceSize size,
2206 VulkanBufferType type,
2207 bool dedicated,
2208 VulkanMemoryUsedRegion **usedRegion)
2209{
2210 Uint8 bindResult = 0;
2211 Uint32 memoryTypeCount = 0;
2212 Uint32 *memoryTypesToTry = NULL;
2213 Uint32 selectedMemoryTypeIndex = 0;
2214 Uint32 i;
2215 VkMemoryPropertyFlags requiredMemoryPropertyFlags = 0;
2216 VkMemoryPropertyFlags preferredMemoryPropertyFlags = 0;
2217 VkMemoryPropertyFlags tolerableMemoryPropertyFlags = 0;
2218 VkMemoryRequirements memoryRequirements;
2219
2220 /* Buffers need to be optimally bound to a memory type
2221 * based on their use case and the architecture of the system.
2222 *
2223 * It is important to understand the distinction between device and host.
2224 *
2225 * On a traditional high-performance desktop computer,
2226 * the "device" would be the GPU, and the "host" would be the CPU.
2227 * Memory being copied between these two must cross the PCI bus.
2228 * On these systems we have to be concerned about bandwidth limitations
2229 * and causing memory stalls, so we have taken a great deal of care
2230 * to structure this API to guide the client towards optimal usage.
2231 *
2232 * Other kinds of devices do not necessarily have this distinction.
2233 * On an iPhone or Nintendo Switch, all memory is accessible both to the
2234 * GPU and the CPU at all times. These kinds of systems are known as
2235 * UMA, or Unified Memory Architecture. A desktop computer using the
2236 * CPU's integrated graphics can also be thought of as UMA.
2237 *
2238 * Vulkan memory types have several memory properties.
2239 * The relevant memory properties are as follows:
2240 *
2241 * DEVICE_LOCAL:
2242 * This memory is on-device and most efficient for device access.
2243 * On UMA systems all memory is device-local.
2244 * If memory is not device-local, then it is host-local.
2245 *
2246 * HOST_VISIBLE:
2247 * This memory can be mapped for host access, meaning we can obtain
2248 * a pointer to directly access the memory.
2249 *
2250 * HOST_COHERENT:
2251 * Host-coherent memory does not require cache management operations
2252 * when mapped, so we always set this alongside HOST_VISIBLE
2253 * to avoid extra record keeping.
2254 *
2255 * HOST_CACHED:
2256 * Host-cached memory is faster to access than uncached memory
2257 * but memory of this type might not always be available.
2258 *
2259 * GPU buffers, like vertex buffers, indirect buffers, etc
2260 * are optimally stored in device-local memory.
2261 * However, if device-local memory is low, these buffers
2262 * can be accessed from host-local memory with a performance penalty.
2263 *
2264 * Uniform buffers must be host-visible and coherent because
2265 * the client uses them to quickly push small amounts of data.
2266 * We prefer uniform buffers to also be device-local because
2267 * they are accessed by shaders, but the amount of memory
2268 * that is both device-local and host-visible
2269 * is often constrained, particularly on low-end devices.
2270 *
2271 * Transfer buffers must be host-visible and coherent because
2272 * the client uses them to stage data to be transferred
2273 * to device-local memory, or to read back data transferred
2274 * from the device. We prefer the cache bit for performance
2275 * but it isn't strictly necessary. We tolerate device-local
2276 * memory in this situation because, as mentioned above,
2277 * on certain devices all memory is device-local, and even
2278 * though the transfer isn't strictly necessary it is still
2279 * useful for correctly timelining data.
2280 */
2281 if (type == VULKAN_BUFFER_TYPE_GPU) {
2282 preferredMemoryPropertyFlags |=
2283 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
2284 } else if (type == VULKAN_BUFFER_TYPE_UNIFORM) {
2285 requiredMemoryPropertyFlags |=
2286 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
2287 VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
2288
2289 preferredMemoryPropertyFlags |=
2290 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
2291 } else if (type == VULKAN_BUFFER_TYPE_TRANSFER) {
2292 requiredMemoryPropertyFlags |=
2293 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
2294 VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
2295
2296 preferredMemoryPropertyFlags |=
2297 VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
2298
2299 tolerableMemoryPropertyFlags |=
2300 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
2301 } else {
2302 SDL_LogError(SDL_LOG_CATEGORY_GPU, "Unrecognized buffer type!");
2303 return 0;
2304 }
2305
2306 memoryTypesToTry = VULKAN_INTERNAL_FindBestBufferMemoryTypes(
2307 renderer,
2308 buffer,
2309 requiredMemoryPropertyFlags,
2310 preferredMemoryPropertyFlags,
2311 tolerableMemoryPropertyFlags,
2312 &memoryRequirements,
2313 &memoryTypeCount);
2314
2315 for (i = 0; i < memoryTypeCount; i += 1) {
2316 bindResult = VULKAN_INTERNAL_BindResourceMemory(
2317 renderer,
2318 memoryTypesToTry[i],
2319 &memoryRequirements,
2320 size,
2321 dedicated,
2322 buffer,
2323 VK_NULL_HANDLE,
2324 usedRegion);
2325
2326 if (bindResult == 1) {
2327 selectedMemoryTypeIndex = memoryTypesToTry[i];
2328 break;
2329 }
2330 }
2331
2332 SDL_free(memoryTypesToTry);
2333
2334 // Check for warnings on success
2335 if (bindResult == 1) {
2336 if (type == VULKAN_BUFFER_TYPE_GPU) {
2337 if (!renderer->outOfDeviceLocalMemoryWarning) {
2338 if ((renderer->memoryProperties.memoryTypes[selectedMemoryTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) == 0) {
2339 SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "Out of device-local memory, allocating buffers on host-local memory, expect degraded performance!");
2340 renderer->outOfDeviceLocalMemoryWarning = 1;
2341 }
2342 }
2343 } else if (type == VULKAN_BUFFER_TYPE_UNIFORM) {
2344 if (!renderer->outofBARMemoryWarning) {
2345 if ((renderer->memoryProperties.memoryTypes[selectedMemoryTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) == 0) {
2346 SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "Out of BAR memory, allocating uniform buffers on host-local memory, expect degraded performance!");
2347 renderer->outofBARMemoryWarning = 1;
2348 }
2349 }
2350 } else if (type == VULKAN_BUFFER_TYPE_TRANSFER) {
2351 if (!renderer->integratedMemoryNotification) {
2352 if ((renderer->memoryProperties.memoryTypes[selectedMemoryTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) == VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) {
2353 SDL_LogInfo(SDL_LOG_CATEGORY_GPU, "Integrated memory detected, allocating TransferBuffers on device-local memory!");
2354 renderer->integratedMemoryNotification = 1;
2355 }
2356 }
2357 }
2358 }
2359
2360 return bindResult;
2361}
2362
2363// Resource tracking
2364
2365#define ADD_TO_ARRAY_UNIQUE(resource, type, array, count, capacity) \
2366 Uint32 i; \
2367 \
2368 for (i = 0; i < commandBuffer->count; i += 1) { \
2369 if (commandBuffer->array[i] == resource) { \
2370 return; \
2371 } \
2372 } \
2373 \
2374 if (commandBuffer->count == commandBuffer->capacity) { \
2375 commandBuffer->capacity += 1; \
2376 commandBuffer->array = SDL_realloc( \
2377 commandBuffer->array, \
2378 commandBuffer->capacity * sizeof(type)); \
2379 } \
2380 commandBuffer->array[commandBuffer->count] = resource; \
2381 commandBuffer->count += 1;
2382
2383#define TRACK_RESOURCE(resource, type, array, count, capacity) \
2384 for (Sint32 i = commandBuffer->count - 1; i >= 0; i -= 1) { \
2385 if (commandBuffer->array[i] == resource) { \
2386 return; \
2387 } \
2388 } \
2389 \
2390 if (commandBuffer->count == commandBuffer->capacity) { \
2391 commandBuffer->capacity += 1; \
2392 commandBuffer->array = SDL_realloc( \
2393 commandBuffer->array, \
2394 commandBuffer->capacity * sizeof(type)); \
2395 } \
2396 commandBuffer->array[commandBuffer->count] = resource; \
2397 commandBuffer->count += 1; \
2398 SDL_AtomicIncRef(&resource->referenceCount);
2399
2400static void VULKAN_INTERNAL_TrackBuffer(
2401 VulkanCommandBuffer *commandBuffer,
2402 VulkanBuffer *buffer)
2403{
2404 TRACK_RESOURCE(
2405 buffer,
2406 VulkanBuffer *,
2407 usedBuffers,
2408 usedBufferCount,
2409 usedBufferCapacity)
2410}
2411
2412static void VULKAN_INTERNAL_TrackTexture(
2413 VulkanCommandBuffer *commandBuffer,
2414 VulkanTexture *texture)
2415{
2416 TRACK_RESOURCE(
2417 texture,
2418 VulkanTexture *,
2419 usedTextures,
2420 usedTextureCount,
2421 usedTextureCapacity)
2422}
2423
2424static void VULKAN_INTERNAL_TrackSampler(
2425 VulkanCommandBuffer *commandBuffer,
2426 VulkanSampler *sampler)
2427{
2428 TRACK_RESOURCE(
2429 sampler,
2430 VulkanSampler *,
2431 usedSamplers,
2432 usedSamplerCount,
2433 usedSamplerCapacity)
2434}
2435
2436static void VULKAN_INTERNAL_TrackGraphicsPipeline(
2437 VulkanCommandBuffer *commandBuffer,
2438 VulkanGraphicsPipeline *graphicsPipeline)
2439{
2440 TRACK_RESOURCE(
2441 graphicsPipeline,
2442 VulkanGraphicsPipeline *,
2443 usedGraphicsPipelines,
2444 usedGraphicsPipelineCount,
2445 usedGraphicsPipelineCapacity)
2446}
2447
2448static void VULKAN_INTERNAL_TrackComputePipeline(
2449 VulkanCommandBuffer *commandBuffer,
2450 VulkanComputePipeline *computePipeline)
2451{
2452 TRACK_RESOURCE(
2453 computePipeline,
2454 VulkanComputePipeline *,
2455 usedComputePipelines,
2456 usedComputePipelineCount,
2457 usedComputePipelineCapacity)
2458}
2459
2460static void VULKAN_INTERNAL_TrackFramebuffer(
2461 VulkanRenderer *renderer,
2462 VulkanCommandBuffer *commandBuffer,
2463 VulkanFramebuffer *framebuffer)
2464{
2465 TRACK_RESOURCE(
2466 framebuffer,
2467 VulkanFramebuffer *,
2468 usedFramebuffers,
2469 usedFramebufferCount,
2470 usedFramebufferCapacity);
2471}
2472
2473static void VULKAN_INTERNAL_TrackUniformBuffer(
2474 VulkanCommandBuffer *commandBuffer,
2475 VulkanUniformBuffer *uniformBuffer)
2476{
2477 for (Sint32 i = commandBuffer->usedUniformBufferCount - 1; i >= 0; i -= 1) {
2478 if (commandBuffer->usedUniformBuffers[i] == uniformBuffer) {
2479 return;
2480 }
2481 }
2482
2483 if (commandBuffer->usedUniformBufferCount == commandBuffer->usedUniformBufferCapacity) {
2484 commandBuffer->usedUniformBufferCapacity += 1;
2485 commandBuffer->usedUniformBuffers = SDL_realloc(
2486 commandBuffer->usedUniformBuffers,
2487 commandBuffer->usedUniformBufferCapacity * sizeof(VulkanUniformBuffer *));
2488 }
2489 commandBuffer->usedUniformBuffers[commandBuffer->usedUniformBufferCount] = uniformBuffer;
2490 commandBuffer->usedUniformBufferCount += 1;
2491
2492 VULKAN_INTERNAL_TrackBuffer(
2493 commandBuffer,
2494 uniformBuffer->buffer);
2495}
2496
2497#undef TRACK_RESOURCE
2498
2499// Memory Barriers
2500
2501/*
2502 * In Vulkan, we must manually synchronize operations that write to resources on the GPU
2503 * so that read-after-write, write-after-read, and write-after-write hazards do not occur.
2504 * Additionally, textures are required to be in specific layouts for specific use cases.
2505 * Both of these tasks are accomplished with vkCmdPipelineBarrier.
2506 *
2507 * To insert the correct barriers, we keep track of "usage modes" for buffers and textures.
2508 * These indicate the current usage of that resource on the command buffer.
2509 * The transition from one usage mode to another indicates how the barrier should be constructed.
2510 *
2511 * Pipeline barriers cannot be inserted during a render pass, but they can be inserted
2512 * during a compute or copy pass.
2513 *
2514 * This means that the "default" usage mode of any given resource should be that it should be
2515 * ready for a graphics-read operation, because we cannot barrier during a render pass.
2516 * In the case where a resource is only used in compute, its default usage mode can be compute-read.
2517 * This strategy allows us to avoid expensive record keeping of command buffer/resource usage mode pairs,
2518 * and it fully covers synchronization between all combinations of stages.
2519 *
2520 * In Upload and Copy functions, we transition the resource immediately before and after the copy command.
2521 *
2522 * When binding a resource for compute, we transition when the Bind functions are called.
2523 * If a bind slot containing a resource is overwritten, we transition the resource in that slot back to its default.
2524 * When EndComputePass is called we transition all bound resources back to their default state.
2525 *
2526 * When binding a texture as a render pass attachment, we transition the resource on BeginRenderPass
2527 * and transition it back to its default on EndRenderPass.
2528 *
2529 * This strategy imposes certain limitations on resource usage flags.
2530 * For example, a texture cannot have both the SAMPLER and GRAPHICS_STORAGE usage flags,
2531 * because then it is impossible for the backend to infer which default usage mode the texture should use.
2532 *
2533 * Sync hazards can be detected by setting VK_KHRONOS_VALIDATION_VALIDATE_SYNC=1 when using validation layers.
2534 */
2535
2536static void VULKAN_INTERNAL_BufferMemoryBarrier(
2537 VulkanRenderer *renderer,
2538 VulkanCommandBuffer *commandBuffer,
2539 VulkanBufferUsageMode sourceUsageMode,
2540 VulkanBufferUsageMode destinationUsageMode,
2541 VulkanBuffer *buffer)
2542{
2543 VkPipelineStageFlags srcStages = 0;
2544 VkPipelineStageFlags dstStages = 0;
2545 VkBufferMemoryBarrier memoryBarrier;
2546
2547 memoryBarrier.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER;
2548 memoryBarrier.pNext = NULL;
2549 memoryBarrier.srcAccessMask = 0;
2550 memoryBarrier.dstAccessMask = 0;
2551 memoryBarrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
2552 memoryBarrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
2553 memoryBarrier.buffer = buffer->buffer;
2554 memoryBarrier.offset = 0;
2555 memoryBarrier.size = buffer->size;
2556
2557 if (sourceUsageMode == VULKAN_BUFFER_USAGE_MODE_COPY_SOURCE) {
2558 srcStages = VK_PIPELINE_STAGE_TRANSFER_BIT;
2559 memoryBarrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
2560 } else if (sourceUsageMode == VULKAN_BUFFER_USAGE_MODE_COPY_DESTINATION) {
2561 srcStages = VK_PIPELINE_STAGE_TRANSFER_BIT;
2562 memoryBarrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
2563 } else if (sourceUsageMode == VULKAN_BUFFER_USAGE_MODE_VERTEX_READ) {
2564 srcStages = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT;
2565 memoryBarrier.srcAccessMask = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT;
2566 } else if (sourceUsageMode == VULKAN_BUFFER_USAGE_MODE_INDEX_READ) {
2567 srcStages = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT;
2568 memoryBarrier.srcAccessMask = VK_ACCESS_INDEX_READ_BIT;
2569 } else if (sourceUsageMode == VULKAN_BUFFER_USAGE_MODE_INDIRECT) {
2570 srcStages = VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT;
2571 memoryBarrier.srcAccessMask = VK_ACCESS_INDIRECT_COMMAND_READ_BIT;
2572 } else if (sourceUsageMode == VULKAN_BUFFER_USAGE_MODE_GRAPHICS_STORAGE_READ) {
2573 srcStages = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
2574 memoryBarrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT;
2575 } else if (sourceUsageMode == VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ) {
2576 srcStages = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
2577 memoryBarrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT;
2578 } else if (sourceUsageMode == VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE) {
2579 srcStages = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
2580 memoryBarrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
2581 } else {
2582 SDL_LogError(SDL_LOG_CATEGORY_GPU, "Unrecognized buffer source barrier type!");
2583 return;
2584 }
2585
2586 if (destinationUsageMode == VULKAN_BUFFER_USAGE_MODE_COPY_SOURCE) {
2587 dstStages = VK_PIPELINE_STAGE_TRANSFER_BIT;
2588 memoryBarrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
2589 } else if (destinationUsageMode == VULKAN_BUFFER_USAGE_MODE_COPY_DESTINATION) {
2590 dstStages = VK_PIPELINE_STAGE_TRANSFER_BIT;
2591 memoryBarrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
2592 } else if (destinationUsageMode == VULKAN_BUFFER_USAGE_MODE_VERTEX_READ) {
2593 dstStages = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT;
2594 memoryBarrier.dstAccessMask = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT;
2595 } else if (destinationUsageMode == VULKAN_BUFFER_USAGE_MODE_INDEX_READ) {
2596 dstStages = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT;
2597 memoryBarrier.dstAccessMask = VK_ACCESS_INDEX_READ_BIT;
2598 } else if (destinationUsageMode == VULKAN_BUFFER_USAGE_MODE_INDIRECT) {
2599 dstStages = VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT;
2600 memoryBarrier.dstAccessMask = VK_ACCESS_INDIRECT_COMMAND_READ_BIT;
2601 } else if (destinationUsageMode == VULKAN_BUFFER_USAGE_MODE_GRAPHICS_STORAGE_READ) {
2602 dstStages = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
2603 memoryBarrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
2604 } else if (destinationUsageMode == VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ) {
2605 dstStages = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
2606 memoryBarrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
2607 } else if (destinationUsageMode == VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE) {
2608 dstStages = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
2609 memoryBarrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
2610 } else {
2611 SDL_LogError(SDL_LOG_CATEGORY_GPU, "Unrecognized buffer destination barrier type!");
2612 return;
2613 }
2614
2615 renderer->vkCmdPipelineBarrier(
2616 commandBuffer->commandBuffer,
2617 srcStages,
2618 dstStages,
2619 0,
2620 0,
2621 NULL,
2622 1,
2623 &memoryBarrier,
2624 0,
2625 NULL);
2626
2627 buffer->transitioned = true;
2628}
2629
2630static void VULKAN_INTERNAL_TextureSubresourceMemoryBarrier(
2631 VulkanRenderer *renderer,
2632 VulkanCommandBuffer *commandBuffer,
2633 VulkanTextureUsageMode sourceUsageMode,
2634 VulkanTextureUsageMode destinationUsageMode,
2635 VulkanTextureSubresource *textureSubresource)
2636{
2637 VkPipelineStageFlags srcStages = 0;
2638 VkPipelineStageFlags dstStages = 0;
2639 VkImageMemoryBarrier memoryBarrier;
2640
2641 memoryBarrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
2642 memoryBarrier.pNext = NULL;
2643 memoryBarrier.srcAccessMask = 0;
2644 memoryBarrier.dstAccessMask = 0;
2645 memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
2646 memoryBarrier.newLayout = VK_IMAGE_LAYOUT_UNDEFINED;
2647 memoryBarrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
2648 memoryBarrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
2649 memoryBarrier.image = textureSubresource->parent->image;
2650 memoryBarrier.subresourceRange.aspectMask = textureSubresource->parent->aspectFlags;
2651 memoryBarrier.subresourceRange.baseArrayLayer = textureSubresource->layer;
2652 memoryBarrier.subresourceRange.layerCount = 1;
2653 memoryBarrier.subresourceRange.baseMipLevel = textureSubresource->level;
2654 memoryBarrier.subresourceRange.levelCount = 1;
2655
2656 if (sourceUsageMode == VULKAN_TEXTURE_USAGE_MODE_UNINITIALIZED) {
2657 srcStages = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
2658 memoryBarrier.srcAccessMask = 0;
2659 memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
2660 } else if (sourceUsageMode == VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE) {
2661 srcStages = VK_PIPELINE_STAGE_TRANSFER_BIT;
2662 memoryBarrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
2663 memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
2664 } else if (sourceUsageMode == VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION) {
2665 srcStages = VK_PIPELINE_STAGE_TRANSFER_BIT;
2666 memoryBarrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
2667 memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
2668 } else if (sourceUsageMode == VULKAN_TEXTURE_USAGE_MODE_SAMPLER) {
2669 srcStages = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
2670 memoryBarrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT;
2671 memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
2672 } else if (sourceUsageMode == VULKAN_TEXTURE_USAGE_MODE_GRAPHICS_STORAGE_READ) {
2673 srcStages = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
2674 memoryBarrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT;
2675 memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
2676 } else if (sourceUsageMode == VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ) {
2677 srcStages = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
2678 memoryBarrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT;
2679 memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
2680 } else if (sourceUsageMode == VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE) {
2681 srcStages = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
2682 memoryBarrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
2683 memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
2684 } else if (sourceUsageMode == VULKAN_TEXTURE_USAGE_MODE_COLOR_ATTACHMENT) {
2685 srcStages = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
2686 memoryBarrier.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
2687 memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
2688 } else if (sourceUsageMode == VULKAN_TEXTURE_USAGE_MODE_DEPTH_STENCIL_ATTACHMENT) {
2689 srcStages = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
2690 memoryBarrier.srcAccessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
2691 memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
2692 } else {
2693 SDL_LogError(SDL_LOG_CATEGORY_GPU, "Unrecognized texture source barrier type!");
2694 return;
2695 }
2696
2697 if (destinationUsageMode == VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE) {
2698 dstStages = VK_PIPELINE_STAGE_TRANSFER_BIT;
2699 memoryBarrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
2700 memoryBarrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
2701 } else if (destinationUsageMode == VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION) {
2702 dstStages = VK_PIPELINE_STAGE_TRANSFER_BIT;
2703 memoryBarrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
2704 memoryBarrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
2705 } else if (destinationUsageMode == VULKAN_TEXTURE_USAGE_MODE_SAMPLER) {
2706 dstStages = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
2707 memoryBarrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
2708 memoryBarrier.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
2709 } else if (destinationUsageMode == VULKAN_TEXTURE_USAGE_MODE_GRAPHICS_STORAGE_READ) {
2710 dstStages = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
2711 memoryBarrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
2712 memoryBarrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
2713 } else if (destinationUsageMode == VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ) {
2714 dstStages = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
2715 memoryBarrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
2716 memoryBarrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
2717 } else if (destinationUsageMode == VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE) {
2718 dstStages = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
2719 memoryBarrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
2720 memoryBarrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
2721 } else if (destinationUsageMode == VULKAN_TEXTURE_USAGE_MODE_COLOR_ATTACHMENT) {
2722 dstStages = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
2723 memoryBarrier.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
2724 memoryBarrier.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
2725 } else if (destinationUsageMode == VULKAN_TEXTURE_USAGE_MODE_DEPTH_STENCIL_ATTACHMENT) {
2726 dstStages = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
2727 memoryBarrier.dstAccessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
2728 memoryBarrier.newLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
2729 } else if (destinationUsageMode == VULKAN_TEXTURE_USAGE_MODE_PRESENT) {
2730 dstStages = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
2731 memoryBarrier.dstAccessMask = 0;
2732 memoryBarrier.newLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
2733 } else {
2734 SDL_LogError(SDL_LOG_CATEGORY_GPU, "Unrecognized texture destination barrier type!");
2735 return;
2736 }
2737
2738 renderer->vkCmdPipelineBarrier(
2739 commandBuffer->commandBuffer,
2740 srcStages,
2741 dstStages,
2742 0,
2743 0,
2744 NULL,
2745 0,
2746 NULL,
2747 1,
2748 &memoryBarrier);
2749}
2750
2751static VulkanBufferUsageMode VULKAN_INTERNAL_DefaultBufferUsageMode(
2752 VulkanBuffer *buffer)
2753{
2754 // NOTE: order matters here!
2755
2756 if (buffer->usage & SDL_GPU_BUFFERUSAGE_VERTEX) {
2757 return VULKAN_BUFFER_USAGE_MODE_VERTEX_READ;
2758 } else if (buffer->usage & SDL_GPU_BUFFERUSAGE_INDEX) {
2759 return VULKAN_BUFFER_USAGE_MODE_INDEX_READ;
2760 } else if (buffer->usage & SDL_GPU_BUFFERUSAGE_INDIRECT) {
2761 return VULKAN_BUFFER_USAGE_MODE_INDIRECT;
2762 } else if (buffer->usage & SDL_GPU_BUFFERUSAGE_GRAPHICS_STORAGE_READ) {
2763 return VULKAN_BUFFER_USAGE_MODE_GRAPHICS_STORAGE_READ;
2764 } else if (buffer->usage & SDL_GPU_BUFFERUSAGE_COMPUTE_STORAGE_READ) {
2765 return VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ;
2766 } else if (buffer->usage & SDL_GPU_BUFFERUSAGE_COMPUTE_STORAGE_WRITE) {
2767 return VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE;
2768 } else {
2769 SDL_LogError(SDL_LOG_CATEGORY_GPU, "Buffer has no default usage mode!");
2770 return VULKAN_BUFFER_USAGE_MODE_VERTEX_READ;
2771 }
2772}
2773
2774static VulkanTextureUsageMode VULKAN_INTERNAL_DefaultTextureUsageMode(
2775 VulkanTexture *texture)
2776{
2777 // NOTE: order matters here!
2778 // NOTE: graphics storage bits and sampler bit are mutually exclusive!
2779
2780 if (texture->usage & SDL_GPU_TEXTUREUSAGE_SAMPLER) {
2781 return VULKAN_TEXTURE_USAGE_MODE_SAMPLER;
2782 } else if (texture->usage & SDL_GPU_TEXTUREUSAGE_GRAPHICS_STORAGE_READ) {
2783 return VULKAN_TEXTURE_USAGE_MODE_GRAPHICS_STORAGE_READ;
2784 } else if (texture->usage & SDL_GPU_TEXTUREUSAGE_COLOR_TARGET) {
2785 return VULKAN_TEXTURE_USAGE_MODE_COLOR_ATTACHMENT;
2786 } else if (texture->usage & SDL_GPU_TEXTUREUSAGE_DEPTH_STENCIL_TARGET) {
2787 return VULKAN_TEXTURE_USAGE_MODE_DEPTH_STENCIL_ATTACHMENT;
2788 } else if (texture->usage & SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_READ) {
2789 return VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ;
2790 } else if (texture->usage & SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_WRITE) {
2791 return VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE;
2792 } else if (texture->usage & SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_SIMULTANEOUS_READ_WRITE) {
2793 return VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE;
2794 } else {
2795 SDL_LogError(SDL_LOG_CATEGORY_GPU, "Texture has no default usage mode!");
2796 return VULKAN_TEXTURE_USAGE_MODE_SAMPLER;
2797 }
2798}
2799
2800static void VULKAN_INTERNAL_BufferTransitionFromDefaultUsage(
2801 VulkanRenderer *renderer,
2802 VulkanCommandBuffer *commandBuffer,
2803 VulkanBufferUsageMode destinationUsageMode,
2804 VulkanBuffer *buffer)
2805{
2806 VULKAN_INTERNAL_BufferMemoryBarrier(
2807 renderer,
2808 commandBuffer,
2809 VULKAN_INTERNAL_DefaultBufferUsageMode(buffer),
2810 destinationUsageMode,
2811 buffer);
2812}
2813
2814static void VULKAN_INTERNAL_BufferTransitionToDefaultUsage(
2815 VulkanRenderer *renderer,
2816 VulkanCommandBuffer *commandBuffer,
2817 VulkanBufferUsageMode sourceUsageMode,
2818 VulkanBuffer *buffer)
2819{
2820 VULKAN_INTERNAL_BufferMemoryBarrier(
2821 renderer,
2822 commandBuffer,
2823 sourceUsageMode,
2824 VULKAN_INTERNAL_DefaultBufferUsageMode(buffer),
2825 buffer);
2826}
2827
2828static void VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage(
2829 VulkanRenderer *renderer,
2830 VulkanCommandBuffer *commandBuffer,
2831 VulkanTextureUsageMode destinationUsageMode,
2832 VulkanTextureSubresource *textureSubresource)
2833{
2834 VULKAN_INTERNAL_TextureSubresourceMemoryBarrier(
2835 renderer,
2836 commandBuffer,
2837 VULKAN_INTERNAL_DefaultTextureUsageMode(textureSubresource->parent),
2838 destinationUsageMode,
2839 textureSubresource);
2840}
2841
2842static void VULKAN_INTERNAL_TextureTransitionFromDefaultUsage(
2843 VulkanRenderer *renderer,
2844 VulkanCommandBuffer *commandBuffer,
2845 VulkanTextureUsageMode destinationUsageMode,
2846 VulkanTexture *texture)
2847{
2848 for (Uint32 i = 0; i < texture->subresourceCount; i += 1) {
2849 VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage(
2850 renderer,
2851 commandBuffer,
2852 destinationUsageMode,
2853 &texture->subresources[i]);
2854 }
2855}
2856
2857static void VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
2858 VulkanRenderer *renderer,
2859 VulkanCommandBuffer *commandBuffer,
2860 VulkanTextureUsageMode sourceUsageMode,
2861 VulkanTextureSubresource *textureSubresource)
2862{
2863 VULKAN_INTERNAL_TextureSubresourceMemoryBarrier(
2864 renderer,
2865 commandBuffer,
2866 sourceUsageMode,
2867 VULKAN_INTERNAL_DefaultTextureUsageMode(textureSubresource->parent),
2868 textureSubresource);
2869}
2870
2871static void VULKAN_INTERNAL_TextureTransitionToDefaultUsage(
2872 VulkanRenderer *renderer,
2873 VulkanCommandBuffer *commandBuffer,
2874 VulkanTextureUsageMode sourceUsageMode,
2875 VulkanTexture *texture)
2876{
2877 // FIXME: could optimize this barrier
2878 for (Uint32 i = 0; i < texture->subresourceCount; i += 1) {
2879 VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
2880 renderer,
2881 commandBuffer,
2882 sourceUsageMode,
2883 &texture->subresources[i]);
2884 }
2885}
2886
2887// Resource Disposal
2888
2889static void VULKAN_INTERNAL_ReleaseFramebuffer(
2890 VulkanRenderer *renderer,
2891 VulkanFramebuffer *framebuffer)
2892{
2893 SDL_LockMutex(renderer->disposeLock);
2894
2895 EXPAND_ARRAY_IF_NEEDED(
2896 renderer->framebuffersToDestroy,
2897 VulkanFramebuffer *,
2898 renderer->framebuffersToDestroyCount + 1,
2899 renderer->framebuffersToDestroyCapacity,
2900 renderer->framebuffersToDestroyCapacity * 2);
2901
2902 renderer->framebuffersToDestroy[renderer->framebuffersToDestroyCount] = framebuffer;
2903 renderer->framebuffersToDestroyCount += 1;
2904
2905 SDL_UnlockMutex(renderer->disposeLock);
2906}
2907
2908static void VULKAN_INTERNAL_DestroyFramebuffer(
2909 VulkanRenderer *renderer,
2910 VulkanFramebuffer *framebuffer)
2911{
2912 renderer->vkDestroyFramebuffer(
2913 renderer->logicalDevice,
2914 framebuffer->framebuffer,
2915 NULL);
2916
2917 SDL_free(framebuffer);
2918}
2919
2920static void VULKAN_INTERNAL_RemoveFramebuffersContainingView(
2921 VulkanRenderer *renderer,
2922 VkImageView view)
2923{
2924 FramebufferHashTableKey *key;
2925 VulkanFramebuffer *value;
2926 void *iter = NULL;
2927
2928 // Can't remove while iterating!
2929 Uint32 keysToRemoveCapacity = 8;
2930 Uint32 keysToRemoveCount = 0;
2931 FramebufferHashTableKey **keysToRemove = SDL_malloc(keysToRemoveCapacity * sizeof(FramebufferHashTableKey *));
2932
2933 SDL_LockMutex(renderer->framebufferFetchLock);
2934
2935 while (SDL_IterateHashTable(renderer->framebufferHashTable, (const void **)&key, (const void **)&value, &iter)) {
2936 bool remove = false;
2937 for (Uint32 i = 0; i < key->numColorTargets; i += 1) {
2938 if (key->colorAttachmentViews[i] == view) {
2939 remove = true;
2940 }
2941 }
2942 for (Uint32 i = 0; i < key->numResolveAttachments; i += 1) {
2943 if (key->resolveAttachmentViews[i] == view) {
2944 remove = true;
2945 }
2946 }
2947 if (key->depthStencilAttachmentView == view) {
2948 remove = true;
2949 }
2950
2951 if (remove) {
2952 if (keysToRemoveCount == keysToRemoveCapacity) {
2953 keysToRemoveCapacity *= 2;
2954 keysToRemove = SDL_realloc(
2955 keysToRemove,
2956 keysToRemoveCapacity * sizeof(FramebufferHashTableKey *));
2957 }
2958
2959 keysToRemove[keysToRemoveCount] = key;
2960 keysToRemoveCount += 1;
2961 }
2962 }
2963
2964 for (Uint32 i = 0; i < keysToRemoveCount; i += 1) {
2965 SDL_RemoveFromHashTable(renderer->framebufferHashTable, (void *)keysToRemove[i]);
2966 }
2967
2968 SDL_UnlockMutex(renderer->framebufferFetchLock);
2969
2970 SDL_free(keysToRemove);
2971}
2972
2973static void VULKAN_INTERNAL_DestroyTexture(
2974 VulkanRenderer *renderer,
2975 VulkanTexture *texture)
2976{
2977 // Clean up subresources
2978 for (Uint32 subresourceIndex = 0; subresourceIndex < texture->subresourceCount; subresourceIndex += 1) {
2979 if (texture->subresources[subresourceIndex].renderTargetViews != NULL) {
2980 for (Uint32 depthIndex = 0; depthIndex < texture->depth; depthIndex += 1) {
2981 VULKAN_INTERNAL_RemoveFramebuffersContainingView(
2982 renderer,
2983 texture->subresources[subresourceIndex].renderTargetViews[depthIndex]);
2984 }
2985
2986 for (Uint32 depthIndex = 0; depthIndex < texture->depth; depthIndex += 1) {
2987 renderer->vkDestroyImageView(
2988 renderer->logicalDevice,
2989 texture->subresources[subresourceIndex].renderTargetViews[depthIndex],
2990 NULL);
2991 }
2992 SDL_free(texture->subresources[subresourceIndex].renderTargetViews);
2993 }
2994
2995 if (texture->subresources[subresourceIndex].computeWriteView != VK_NULL_HANDLE) {
2996 renderer->vkDestroyImageView(
2997 renderer->logicalDevice,
2998 texture->subresources[subresourceIndex].computeWriteView,
2999 NULL);
3000 }
3001
3002 if (texture->subresources[subresourceIndex].depthStencilView != VK_NULL_HANDLE) {
3003 VULKAN_INTERNAL_RemoveFramebuffersContainingView(
3004 renderer,
3005 texture->subresources[subresourceIndex].depthStencilView);
3006 renderer->vkDestroyImageView(
3007 renderer->logicalDevice,
3008 texture->subresources[subresourceIndex].depthStencilView,
3009 NULL);
3010 }
3011 }
3012
3013 SDL_free(texture->subresources);
3014
3015 if (texture->fullView) {
3016 renderer->vkDestroyImageView(
3017 renderer->logicalDevice,
3018 texture->fullView,
3019 NULL);
3020 }
3021
3022 if (texture->image) {
3023 renderer->vkDestroyImage(
3024 renderer->logicalDevice,
3025 texture->image,
3026 NULL);
3027 }
3028
3029 if (texture->usedRegion) {
3030 VULKAN_INTERNAL_RemoveMemoryUsedRegion(
3031 renderer,
3032 texture->usedRegion);
3033 }
3034
3035 SDL_free(texture);
3036}
3037
3038static void VULKAN_INTERNAL_DestroyBuffer(
3039 VulkanRenderer *renderer,
3040 VulkanBuffer *buffer)
3041{
3042 renderer->vkDestroyBuffer(
3043 renderer->logicalDevice,
3044 buffer->buffer,
3045 NULL);
3046
3047 VULKAN_INTERNAL_RemoveMemoryUsedRegion(
3048 renderer,
3049 buffer->usedRegion);
3050
3051 SDL_free(buffer);
3052}
3053
3054static void VULKAN_INTERNAL_DestroyCommandPool(
3055 VulkanRenderer *renderer,
3056 VulkanCommandPool *commandPool)
3057{
3058 Uint32 i;
3059 VulkanCommandBuffer *commandBuffer;
3060
3061 renderer->vkDestroyCommandPool(
3062 renderer->logicalDevice,
3063 commandPool->commandPool,
3064 NULL);
3065
3066 for (i = 0; i < commandPool->inactiveCommandBufferCount; i += 1) {
3067 commandBuffer = commandPool->inactiveCommandBuffers[i];
3068
3069 SDL_free(commandBuffer->presentDatas);
3070 SDL_free(commandBuffer->waitSemaphores);
3071 SDL_free(commandBuffer->signalSemaphores);
3072 SDL_free(commandBuffer->usedBuffers);
3073 SDL_free(commandBuffer->usedTextures);
3074 SDL_free(commandBuffer->usedSamplers);
3075 SDL_free(commandBuffer->usedGraphicsPipelines);
3076 SDL_free(commandBuffer->usedComputePipelines);
3077 SDL_free(commandBuffer->usedFramebuffers);
3078 SDL_free(commandBuffer->usedUniformBuffers);
3079
3080 SDL_free(commandBuffer);
3081 }
3082
3083 SDL_free(commandPool->inactiveCommandBuffers);
3084 SDL_free(commandPool);
3085}
3086
3087static void VULKAN_INTERNAL_DestroyDescriptorSetLayout(
3088 VulkanRenderer *renderer,
3089 DescriptorSetLayout *layout)
3090{
3091 if (layout == NULL) {
3092 return;
3093 }
3094
3095 if (layout->descriptorSetLayout != VK_NULL_HANDLE) {
3096 renderer->vkDestroyDescriptorSetLayout(
3097 renderer->logicalDevice,
3098 layout->descriptorSetLayout,
3099 NULL);
3100 }
3101
3102 SDL_free(layout);
3103}
3104
3105static void VULKAN_INTERNAL_DestroyGraphicsPipeline(
3106 VulkanRenderer *renderer,
3107 VulkanGraphicsPipeline *graphicsPipeline)
3108{
3109 renderer->vkDestroyPipeline(
3110 renderer->logicalDevice,
3111 graphicsPipeline->pipeline,
3112 NULL);
3113
3114 (void)SDL_AtomicDecRef(&graphicsPipeline->vertexShader->referenceCount);
3115 (void)SDL_AtomicDecRef(&graphicsPipeline->fragmentShader->referenceCount);
3116
3117 SDL_free(graphicsPipeline);
3118}
3119
3120static void VULKAN_INTERNAL_DestroyComputePipeline(
3121 VulkanRenderer *renderer,
3122 VulkanComputePipeline *computePipeline)
3123{
3124 if (computePipeline->pipeline != VK_NULL_HANDLE) {
3125 renderer->vkDestroyPipeline(
3126 renderer->logicalDevice,
3127 computePipeline->pipeline,
3128 NULL);
3129 }
3130
3131 if (computePipeline->shaderModule != VK_NULL_HANDLE) {
3132 renderer->vkDestroyShaderModule(
3133 renderer->logicalDevice,
3134 computePipeline->shaderModule,
3135 NULL);
3136 }
3137
3138 SDL_free(computePipeline);
3139}
3140
3141static void VULKAN_INTERNAL_DestroyShader(
3142 VulkanRenderer *renderer,
3143 VulkanShader *vulkanShader)
3144{
3145 renderer->vkDestroyShaderModule(
3146 renderer->logicalDevice,
3147 vulkanShader->shaderModule,
3148 NULL);
3149
3150 SDL_free((void *)vulkanShader->entrypointName);
3151 SDL_free(vulkanShader);
3152}
3153
3154static void VULKAN_INTERNAL_DestroySampler(
3155 VulkanRenderer *renderer,
3156 VulkanSampler *vulkanSampler)
3157{
3158 renderer->vkDestroySampler(
3159 renderer->logicalDevice,
3160 vulkanSampler->sampler,
3161 NULL);
3162
3163 SDL_free(vulkanSampler);
3164}
3165
3166static void VULKAN_INTERNAL_DestroySwapchain(
3167 VulkanRenderer *renderer,
3168 WindowData *windowData)
3169{
3170 Uint32 i;
3171
3172 if (windowData == NULL) {
3173 return;
3174 }
3175
3176 for (i = 0; i < windowData->imageCount; i += 1) {
3177 VULKAN_INTERNAL_RemoveFramebuffersContainingView(
3178 renderer,
3179 windowData->textureContainers[i].activeTexture->subresources[0].renderTargetViews[0]);
3180 renderer->vkDestroyImageView(
3181 renderer->logicalDevice,
3182 windowData->textureContainers[i].activeTexture->subresources[0].renderTargetViews[0],
3183 NULL);
3184 SDL_free(windowData->textureContainers[i].activeTexture->subresources[0].renderTargetViews);
3185 SDL_free(windowData->textureContainers[i].activeTexture->subresources);
3186 SDL_free(windowData->textureContainers[i].activeTexture);
3187 }
3188 windowData->imageCount = 0;
3189
3190 SDL_free(windowData->textureContainers);
3191 windowData->textureContainers = NULL;
3192
3193 if (windowData->swapchain) {
3194 renderer->vkDestroySwapchainKHR(
3195 renderer->logicalDevice,
3196 windowData->swapchain,
3197 NULL);
3198 windowData->swapchain = VK_NULL_HANDLE;
3199 }
3200
3201 if (windowData->surface) {
3202 renderer->vkDestroySurfaceKHR(
3203 renderer->instance,
3204 windowData->surface,
3205 NULL);
3206 windowData->surface = VK_NULL_HANDLE;
3207 }
3208
3209 for (i = 0; i < MAX_FRAMES_IN_FLIGHT; i += 1) {
3210 if (windowData->imageAvailableSemaphore[i]) {
3211 renderer->vkDestroySemaphore(
3212 renderer->logicalDevice,
3213 windowData->imageAvailableSemaphore[i],
3214 NULL);
3215 windowData->imageAvailableSemaphore[i] = VK_NULL_HANDLE;
3216 }
3217
3218 if (windowData->renderFinishedSemaphore[i]) {
3219 renderer->vkDestroySemaphore(
3220 renderer->logicalDevice,
3221 windowData->renderFinishedSemaphore[i],
3222 NULL);
3223 windowData->renderFinishedSemaphore[i] = VK_NULL_HANDLE;
3224 }
3225 }
3226}
3227
3228static void VULKAN_INTERNAL_DestroyGraphicsPipelineResourceLayout(
3229 VulkanRenderer *renderer,
3230 VulkanGraphicsPipelineResourceLayout *resourceLayout)
3231{
3232 if (resourceLayout->pipelineLayout != VK_NULL_HANDLE) {
3233 renderer->vkDestroyPipelineLayout(
3234 renderer->logicalDevice,
3235 resourceLayout->pipelineLayout,
3236 NULL);
3237 }
3238
3239 SDL_free(resourceLayout);
3240}
3241
3242static void VULKAN_INTERNAL_DestroyComputePipelineResourceLayout(
3243 VulkanRenderer *renderer,
3244 VulkanComputePipelineResourceLayout *resourceLayout)
3245{
3246 if (resourceLayout->pipelineLayout != VK_NULL_HANDLE) {
3247 renderer->vkDestroyPipelineLayout(
3248 renderer->logicalDevice,
3249 resourceLayout->pipelineLayout,
3250 NULL);
3251 }
3252
3253 SDL_free(resourceLayout);
3254}
3255
3256static void VULKAN_INTERNAL_DestroyDescriptorSetCache(
3257 VulkanRenderer *renderer,
3258 DescriptorSetCache *descriptorSetCache)
3259{
3260 for (Uint32 i = 0; i < descriptorSetCache->poolCount; i += 1) {
3261 for (Uint32 j = 0; j < descriptorSetCache->pools[i].poolCount; j += 1) {
3262 renderer->vkDestroyDescriptorPool(
3263 renderer->logicalDevice,
3264 descriptorSetCache->pools[i].descriptorPools[j],
3265 NULL);
3266 }
3267 SDL_free(descriptorSetCache->pools[i].descriptorSets);
3268 SDL_free(descriptorSetCache->pools[i].descriptorPools);
3269 }
3270 SDL_free(descriptorSetCache->pools);
3271 SDL_free(descriptorSetCache);
3272}
3273
3274// Hashtable functions
3275
3276static Uint32 VULKAN_INTERNAL_GraphicsPipelineResourceLayoutHashFunction(const void *key, void *data)
3277{
3278 GraphicsPipelineResourceLayoutHashTableKey *hashTableKey = (GraphicsPipelineResourceLayoutHashTableKey *)key;
3279 /* The algorithm for this hashing function
3280 * is taken from Josh Bloch's "Effective Java".
3281 * (https://stackoverflow.com/a/113600/12492383)
3282 */
3283 const Uint32 hashFactor = 31;
3284 Uint32 result = 1;
3285 result = result * hashFactor + hashTableKey->vertexSamplerCount;
3286 result = result * hashFactor + hashTableKey->vertexStorageBufferCount;
3287 result = result * hashFactor + hashTableKey->vertexStorageTextureCount;
3288 result = result * hashFactor + hashTableKey->vertexUniformBufferCount;
3289 result = result * hashFactor + hashTableKey->fragmentSamplerCount;
3290 result = result * hashFactor + hashTableKey->fragmentStorageBufferCount;
3291 result = result * hashFactor + hashTableKey->fragmentStorageTextureCount;
3292 result = result * hashFactor + hashTableKey->fragmentUniformBufferCount;
3293 return result;
3294}
3295static bool VULKAN_INTERNAL_GraphicsPipelineResourceLayoutHashKeyMatch(const void *aKey, const void *bKey, void *data)
3296{
3297 return SDL_memcmp(aKey, bKey, sizeof(GraphicsPipelineResourceLayoutHashTableKey)) == 0;
3298}
3299static void VULKAN_INTERNAL_GraphicsPipelineResourceLayoutHashNuke(const void *key, const void *value, void *data)
3300{
3301 VulkanRenderer *renderer = (VulkanRenderer *)data;
3302 VulkanGraphicsPipelineResourceLayout *resourceLayout = (VulkanGraphicsPipelineResourceLayout *)value;
3303 VULKAN_INTERNAL_DestroyGraphicsPipelineResourceLayout(renderer, resourceLayout);
3304 SDL_free((void*)key);
3305}
3306
3307static Uint32 VULKAN_INTERNAL_ComputePipelineResourceLayoutHashFunction(const void *key, void *data)
3308{
3309 ComputePipelineResourceLayoutHashTableKey *hashTableKey = (ComputePipelineResourceLayoutHashTableKey *)key;
3310 /* The algorithm for this hashing function
3311 * is taken from Josh Bloch's "Effective Java".
3312 * (https://stackoverflow.com/a/113600/12492383)
3313 */
3314 const Uint32 hashFactor = 31;
3315 Uint32 result = 1;
3316 result = result * hashFactor + hashTableKey->samplerCount;
3317 result = result * hashFactor + hashTableKey->readonlyStorageTextureCount;
3318 result = result * hashFactor + hashTableKey->readonlyStorageBufferCount;
3319 result = result * hashFactor + hashTableKey->readWriteStorageTextureCount;
3320 result = result * hashFactor + hashTableKey->readWriteStorageBufferCount;
3321 result = result * hashFactor + hashTableKey->uniformBufferCount;
3322 return result;
3323}
3324
3325static bool VULKAN_INTERNAL_ComputePipelineResourceLayoutHashKeyMatch(const void *aKey, const void *bKey, void *data)
3326{
3327 return SDL_memcmp(aKey, bKey, sizeof(ComputePipelineResourceLayoutHashTableKey)) == 0;
3328}
3329
3330static void VULKAN_INTERNAL_ComputePipelineResourceLayoutHashNuke(const void *key, const void *value, void *data)
3331{
3332 VulkanRenderer *renderer = (VulkanRenderer *)data;
3333 VulkanComputePipelineResourceLayout *resourceLayout = (VulkanComputePipelineResourceLayout *)value;
3334 VULKAN_INTERNAL_DestroyComputePipelineResourceLayout(renderer, resourceLayout);
3335 SDL_free((void*)key);
3336}
3337
3338static Uint32 VULKAN_INTERNAL_DescriptorSetLayoutHashFunction(const void *key, void *data)
3339{
3340 DescriptorSetLayoutHashTableKey *hashTableKey = (DescriptorSetLayoutHashTableKey *)key;
3341
3342 /* The algorithm for this hashing function
3343 * is taken from Josh Bloch's "Effective Java".
3344 * (https://stackoverflow.com/a/113600/12492383)
3345 */
3346 const Uint32 hashFactor = 31;
3347 Uint32 result = 1;
3348 result = result * hashFactor + hashTableKey->shaderStage;
3349 result = result * hashFactor + hashTableKey->samplerCount;
3350 result = result * hashFactor + hashTableKey->storageTextureCount;
3351 result = result * hashFactor + hashTableKey->storageBufferCount;
3352 result = result * hashFactor + hashTableKey->writeStorageTextureCount;
3353 result = result * hashFactor + hashTableKey->writeStorageBufferCount;
3354 result = result * hashFactor + hashTableKey->uniformBufferCount;
3355 return result;
3356}
3357
3358static bool VULKAN_INTERNAL_DescriptorSetLayoutHashKeyMatch(const void *aKey, const void *bKey, void *data)
3359{
3360 return SDL_memcmp(aKey, bKey, sizeof(DescriptorSetLayoutHashTableKey)) == 0;
3361}
3362
3363static void VULKAN_INTERNAL_DescriptorSetLayoutHashNuke(const void *key, const void *value, void *data)
3364{
3365 VulkanRenderer *renderer = (VulkanRenderer *)data;
3366 DescriptorSetLayout *layout = (DescriptorSetLayout *)value;
3367 VULKAN_INTERNAL_DestroyDescriptorSetLayout(renderer, layout);
3368 SDL_free((void*)key);
3369}
3370
3371static Uint32 VULKAN_INTERNAL_CommandPoolHashFunction(const void *key, void *data)
3372{
3373 return (Uint32)((CommandPoolHashTableKey *)key)->threadID;
3374}
3375
3376static bool VULKAN_INTERNAL_CommandPoolHashKeyMatch(const void *aKey, const void *bKey, void *data)
3377{
3378 CommandPoolHashTableKey *a = (CommandPoolHashTableKey *)aKey;
3379 CommandPoolHashTableKey *b = (CommandPoolHashTableKey *)bKey;
3380 return a->threadID == b->threadID;
3381}
3382
3383static void VULKAN_INTERNAL_CommandPoolHashNuke(const void *key, const void *value, void *data)
3384{
3385 VulkanRenderer *renderer = (VulkanRenderer *)data;
3386 VulkanCommandPool *pool = (VulkanCommandPool *)value;
3387 VULKAN_INTERNAL_DestroyCommandPool(renderer, pool);
3388 SDL_free((void *)key);
3389}
3390
3391static Uint32 VULKAN_INTERNAL_RenderPassHashFunction(
3392 const void *key,
3393 void *data)
3394{
3395 RenderPassHashTableKey *hashTableKey = (RenderPassHashTableKey *)key;
3396
3397 /* The algorithm for this hashing function
3398 * is taken from Josh Bloch's "Effective Java".
3399 * (https://stackoverflow.com/a/113600/12492383)
3400 */
3401 const Uint32 hashFactor = 31;
3402 Uint32 result = 1;
3403
3404 for (Uint32 i = 0; i < hashTableKey->numColorTargets; i += 1) {
3405 result = result * hashFactor + hashTableKey->colorTargetDescriptions[i].loadOp;
3406 result = result * hashFactor + hashTableKey->colorTargetDescriptions[i].storeOp;
3407 result = result * hashFactor + hashTableKey->colorTargetDescriptions[i].format;
3408 }
3409
3410 for (Uint32 i = 0; i < hashTableKey->numResolveTargets; i += 1) {
3411 result = result * hashFactor + hashTableKey->resolveTargetFormats[i];
3412 }
3413
3414 result = result * hashFactor + hashTableKey->depthStencilTargetDescription.loadOp;
3415 result = result * hashFactor + hashTableKey->depthStencilTargetDescription.storeOp;
3416 result = result * hashFactor + hashTableKey->depthStencilTargetDescription.stencilLoadOp;
3417 result = result * hashFactor + hashTableKey->depthStencilTargetDescription.stencilStoreOp;
3418 result = result * hashFactor + hashTableKey->depthStencilTargetDescription.format;
3419
3420 result = result * hashFactor + hashTableKey->sampleCount;
3421
3422 return result;
3423}
3424
3425static bool VULKAN_INTERNAL_RenderPassHashKeyMatch(
3426 const void *aKey,
3427 const void *bKey,
3428 void *data)
3429{
3430 RenderPassHashTableKey *a = (RenderPassHashTableKey *)aKey;
3431 RenderPassHashTableKey *b = (RenderPassHashTableKey *)bKey;
3432
3433 if (a->numColorTargets != b->numColorTargets) {
3434 return 0;
3435 }
3436
3437 if (a->numResolveTargets != b->numResolveTargets) {
3438 return 0;
3439 }
3440
3441 if (a->sampleCount != b->sampleCount) {
3442 return 0;
3443 }
3444
3445 for (Uint32 i = 0; i < a->numColorTargets; i += 1) {
3446 if (a->colorTargetDescriptions[i].format != b->colorTargetDescriptions[i].format) {
3447 return 0;
3448 }
3449
3450 if (a->colorTargetDescriptions[i].loadOp != b->colorTargetDescriptions[i].loadOp) {
3451 return 0;
3452 }
3453
3454 if (a->colorTargetDescriptions[i].storeOp != b->colorTargetDescriptions[i].storeOp) {
3455 return 0;
3456 }
3457 }
3458
3459 for (Uint32 i = 0; i < a->numResolveTargets; i += 1) {
3460 if (a->resolveTargetFormats[i] != b->resolveTargetFormats[i]) {
3461 return 0;
3462 }
3463 }
3464
3465 if (a->depthStencilTargetDescription.format != b->depthStencilTargetDescription.format) {
3466 return 0;
3467 }
3468
3469 if (a->depthStencilTargetDescription.loadOp != b->depthStencilTargetDescription.loadOp) {
3470 return 0;
3471 }
3472
3473 if (a->depthStencilTargetDescription.storeOp != b->depthStencilTargetDescription.storeOp) {
3474 return 0;
3475 }
3476
3477 if (a->depthStencilTargetDescription.stencilLoadOp != b->depthStencilTargetDescription.stencilLoadOp) {
3478 return 0;
3479 }
3480
3481 if (a->depthStencilTargetDescription.stencilStoreOp != b->depthStencilTargetDescription.stencilStoreOp) {
3482 return 0;
3483 }
3484
3485 return 1;
3486}
3487
3488static void VULKAN_INTERNAL_RenderPassHashNuke(const void *key, const void *value, void *data)
3489{
3490 VulkanRenderer *renderer = (VulkanRenderer *)data;
3491 VulkanRenderPassHashTableValue *renderPassWrapper = (VulkanRenderPassHashTableValue *)value;
3492 renderer->vkDestroyRenderPass(
3493 renderer->logicalDevice,
3494 renderPassWrapper->handle,
3495 NULL);
3496 SDL_free(renderPassWrapper);
3497 SDL_free((void *)key);
3498}
3499
3500static Uint32 VULKAN_INTERNAL_FramebufferHashFunction(
3501 const void *key,
3502 void *data)
3503{
3504 FramebufferHashTableKey *hashTableKey = (FramebufferHashTableKey *)key;
3505
3506 /* The algorithm for this hashing function
3507 * is taken from Josh Bloch's "Effective Java".
3508 * (https://stackoverflow.com/a/113600/12492383)
3509 */
3510 const Uint32 hashFactor = 31;
3511 Uint32 result = 1;
3512
3513 for (Uint32 i = 0; i < hashTableKey->numColorTargets; i += 1) {
3514 result = result * hashFactor + (Uint32)(uintptr_t)hashTableKey->colorAttachmentViews[i];
3515 }
3516 for (Uint32 i = 0; i < hashTableKey->numResolveAttachments; i += 1) {
3517 result = result * hashFactor + (Uint32)(uintptr_t)hashTableKey->resolveAttachmentViews[i];
3518 }
3519
3520 result = result * hashFactor + (Uint32)(uintptr_t)hashTableKey->depthStencilAttachmentView;
3521 result = result * hashFactor + hashTableKey->width;
3522 result = result * hashFactor + hashTableKey->height;
3523
3524 return result;
3525}
3526
3527static bool VULKAN_INTERNAL_FramebufferHashKeyMatch(
3528 const void *aKey,
3529 const void *bKey,
3530 void *data)
3531{
3532 FramebufferHashTableKey *a = (FramebufferHashTableKey *)aKey;
3533 FramebufferHashTableKey *b = (FramebufferHashTableKey *)bKey;
3534
3535 if (a->numColorTargets != b->numColorTargets) {
3536 return 0;
3537 }
3538
3539 if (a->numResolveAttachments != b->numResolveAttachments) {
3540 return 0;
3541 }
3542
3543 for (Uint32 i = 0; i < a->numColorTargets; i += 1) {
3544 if (a->colorAttachmentViews[i] != b->colorAttachmentViews[i]) {
3545 return 0;
3546 }
3547 }
3548
3549 for (Uint32 i = 0; i < a->numResolveAttachments; i += 1) {
3550 if (a->resolveAttachmentViews[i] != b->resolveAttachmentViews[i]) {
3551 return 0;
3552 }
3553 }
3554
3555 if (a->depthStencilAttachmentView != b->depthStencilAttachmentView) {
3556 return 0;
3557 }
3558
3559 if (a->width != b->width) {
3560 return 0;
3561 }
3562
3563 if (a->height != b->height) {
3564 return 0;
3565 }
3566
3567 return 1;
3568}
3569
3570static void VULKAN_INTERNAL_FramebufferHashNuke(const void *key, const void *value, void *data)
3571{
3572 VulkanRenderer *renderer = (VulkanRenderer *)data;
3573 VulkanFramebuffer *framebuffer = (VulkanFramebuffer *)value;
3574 VULKAN_INTERNAL_ReleaseFramebuffer(renderer, framebuffer);
3575 SDL_free((void *)key);
3576}
3577
3578// Descriptor pools
3579
3580static bool VULKAN_INTERNAL_AllocateDescriptorSets(
3581 VulkanRenderer *renderer,
3582 VkDescriptorPool descriptorPool,
3583 VkDescriptorSetLayout descriptorSetLayout,
3584 Uint32 descriptorSetCount,
3585 VkDescriptorSet *descriptorSetArray)
3586{
3587 VkDescriptorSetAllocateInfo descriptorSetAllocateInfo;
3588 VkDescriptorSetLayout *descriptorSetLayouts = SDL_stack_alloc(VkDescriptorSetLayout, descriptorSetCount);
3589 VkResult vulkanResult;
3590 Uint32 i;
3591
3592 for (i = 0; i < descriptorSetCount; i += 1) {
3593 descriptorSetLayouts[i] = descriptorSetLayout;
3594 }
3595
3596 descriptorSetAllocateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
3597 descriptorSetAllocateInfo.pNext = NULL;
3598 descriptorSetAllocateInfo.descriptorPool = descriptorPool;
3599 descriptorSetAllocateInfo.descriptorSetCount = descriptorSetCount;
3600 descriptorSetAllocateInfo.pSetLayouts = descriptorSetLayouts;
3601
3602 vulkanResult = renderer->vkAllocateDescriptorSets(
3603 renderer->logicalDevice,
3604 &descriptorSetAllocateInfo,
3605 descriptorSetArray);
3606
3607 SDL_stack_free(descriptorSetLayouts);
3608
3609 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkAllocateDescriptorSets, false);
3610
3611 return true;
3612}
3613
3614static bool VULKAN_INTERNAL_AllocateDescriptorsFromPool(
3615 VulkanRenderer *renderer,
3616 DescriptorSetLayout *descriptorSetLayout,
3617 DescriptorSetPool *descriptorSetPool)
3618{
3619 VkDescriptorPoolSize descriptorPoolSizes[
3620 MAX_TEXTURE_SAMPLERS_PER_STAGE +
3621 MAX_STORAGE_TEXTURES_PER_STAGE +
3622 MAX_STORAGE_BUFFERS_PER_STAGE +
3623 MAX_COMPUTE_WRITE_TEXTURES +
3624 MAX_COMPUTE_WRITE_BUFFERS +
3625 MAX_UNIFORM_BUFFERS_PER_STAGE];
3626 VkDescriptorPoolCreateInfo descriptorPoolInfo;
3627 VkDescriptorPool pool;
3628 VkResult vulkanResult;
3629
3630 // Category 1
3631 for (Uint32 i = 0; i < descriptorSetLayout->samplerCount; i += 1) {
3632 descriptorPoolSizes[i].type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
3633 descriptorPoolSizes[i].descriptorCount = DESCRIPTOR_POOL_SIZE;
3634 }
3635
3636 for (Uint32 i = descriptorSetLayout->samplerCount; i < descriptorSetLayout->samplerCount + descriptorSetLayout->storageTextureCount; i += 1) {
3637 descriptorPoolSizes[i].type = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
3638 descriptorPoolSizes[i].descriptorCount = DESCRIPTOR_POOL_SIZE;
3639 }
3640
3641 for (Uint32 i = descriptorSetLayout->samplerCount + descriptorSetLayout->storageTextureCount; i < descriptorSetLayout->samplerCount + descriptorSetLayout->storageTextureCount + descriptorSetLayout->storageBufferCount; i += 1) {
3642 descriptorPoolSizes[i].type = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
3643 descriptorPoolSizes[i].descriptorCount = DESCRIPTOR_POOL_SIZE;
3644 }
3645
3646 // Category 2
3647 for (Uint32 i = 0; i < descriptorSetLayout->writeStorageTextureCount; i += 1) {
3648 descriptorPoolSizes[i].type = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
3649 descriptorPoolSizes[i].descriptorCount = DESCRIPTOR_POOL_SIZE;
3650 }
3651
3652 for (Uint32 i = descriptorSetLayout->writeStorageTextureCount; i < descriptorSetLayout->writeStorageTextureCount + descriptorSetLayout->writeStorageBufferCount; i += 1) {
3653 descriptorPoolSizes[i].type = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
3654 descriptorPoolSizes[i].descriptorCount = DESCRIPTOR_POOL_SIZE;
3655 }
3656
3657 // Category 3
3658 for (Uint32 i = 0; i < descriptorSetLayout->uniformBufferCount; i += 1) {
3659 descriptorPoolSizes[i].type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
3660 descriptorPoolSizes[i].descriptorCount = DESCRIPTOR_POOL_SIZE;
3661 }
3662
3663 descriptorPoolInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
3664 descriptorPoolInfo.pNext = NULL;
3665 descriptorPoolInfo.flags = 0;
3666 descriptorPoolInfo.maxSets = DESCRIPTOR_POOL_SIZE;
3667 descriptorPoolInfo.poolSizeCount =
3668 descriptorSetLayout->samplerCount +
3669 descriptorSetLayout->storageTextureCount +
3670 descriptorSetLayout->storageBufferCount +
3671 descriptorSetLayout->writeStorageTextureCount +
3672 descriptorSetLayout->writeStorageBufferCount +
3673 descriptorSetLayout->uniformBufferCount;
3674 descriptorPoolInfo.pPoolSizes = descriptorPoolSizes;
3675
3676 vulkanResult = renderer->vkCreateDescriptorPool(
3677 renderer->logicalDevice,
3678 &descriptorPoolInfo,
3679 NULL,
3680 &pool);
3681
3682 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateDescriptorPool, false);
3683
3684 descriptorSetPool->poolCount += 1;
3685 descriptorSetPool->descriptorPools = SDL_realloc(
3686 descriptorSetPool->descriptorPools,
3687 sizeof(VkDescriptorPool) * descriptorSetPool->poolCount);
3688
3689 descriptorSetPool->descriptorPools[descriptorSetPool->poolCount - 1] = pool;
3690
3691 descriptorSetPool->descriptorSets = SDL_realloc(
3692 descriptorSetPool->descriptorSets,
3693 sizeof(VkDescriptorSet) * descriptorSetPool->poolCount * DESCRIPTOR_POOL_SIZE);
3694
3695 if (!VULKAN_INTERNAL_AllocateDescriptorSets(
3696 renderer,
3697 pool,
3698 descriptorSetLayout->descriptorSetLayout,
3699 DESCRIPTOR_POOL_SIZE,
3700 &descriptorSetPool->descriptorSets[descriptorSetPool->descriptorSetCount])) {
3701 return false;
3702 }
3703
3704 descriptorSetPool->descriptorSetCount += DESCRIPTOR_POOL_SIZE;
3705
3706 return true;
3707}
3708
3709// NOTE: these categories should be mutually exclusive
3710static DescriptorSetLayout *VULKAN_INTERNAL_FetchDescriptorSetLayout(
3711 VulkanRenderer *renderer,
3712 VkShaderStageFlagBits shaderStage,
3713 // Category 1: read resources
3714 Uint32 samplerCount,
3715 Uint32 storageTextureCount,
3716 Uint32 storageBufferCount,
3717 // Category 2: write resources
3718 Uint32 writeStorageTextureCount,
3719 Uint32 writeStorageBufferCount,
3720 // Category 3: uniform buffers
3721 Uint32 uniformBufferCount)
3722{
3723 DescriptorSetLayoutHashTableKey key;
3724 SDL_zero(key);
3725 DescriptorSetLayout *layout = NULL;
3726
3727 key.shaderStage = shaderStage;
3728 key.samplerCount = samplerCount;
3729 key.storageTextureCount = storageTextureCount;
3730 key.storageBufferCount = storageBufferCount;
3731 key.writeStorageTextureCount = writeStorageTextureCount;
3732 key.writeStorageBufferCount = writeStorageBufferCount;
3733 key.uniformBufferCount = uniformBufferCount;
3734
3735 if (SDL_FindInHashTable(
3736 renderer->descriptorSetLayoutHashTable,
3737 (const void *)&key,
3738 (const void **)&layout)) {
3739 return layout;
3740 }
3741
3742 VkDescriptorSetLayout descriptorSetLayout;
3743 VkDescriptorSetLayoutBinding descriptorSetLayoutBindings[
3744 MAX_TEXTURE_SAMPLERS_PER_STAGE +
3745 MAX_STORAGE_TEXTURES_PER_STAGE +
3746 MAX_STORAGE_BUFFERS_PER_STAGE +
3747 MAX_COMPUTE_WRITE_TEXTURES +
3748 MAX_COMPUTE_WRITE_BUFFERS];
3749
3750 VkDescriptorSetLayoutCreateInfo descriptorSetLayoutCreateInfo;
3751 descriptorSetLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
3752 descriptorSetLayoutCreateInfo.pNext = NULL;
3753 descriptorSetLayoutCreateInfo.flags = 0;
3754
3755 // Category 1
3756 for (Uint32 i = 0; i < samplerCount; i += 1) {
3757 descriptorSetLayoutBindings[i].binding = i;
3758 descriptorSetLayoutBindings[i].descriptorCount = 1;
3759 descriptorSetLayoutBindings[i].descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
3760 descriptorSetLayoutBindings[i].stageFlags = shaderStage;
3761 descriptorSetLayoutBindings[i].pImmutableSamplers = NULL;
3762 }
3763
3764 for (Uint32 i = samplerCount; i < samplerCount + storageTextureCount; i += 1) {
3765 descriptorSetLayoutBindings[i].binding = i;
3766 descriptorSetLayoutBindings[i].descriptorCount = 1;
3767 descriptorSetLayoutBindings[i].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
3768 descriptorSetLayoutBindings[i].stageFlags = shaderStage;
3769 descriptorSetLayoutBindings[i].pImmutableSamplers = NULL;
3770 }
3771
3772 for (Uint32 i = samplerCount + storageTextureCount; i < samplerCount + storageTextureCount + storageBufferCount; i += 1) {
3773 descriptorSetLayoutBindings[i].binding = i;
3774 descriptorSetLayoutBindings[i].descriptorCount = 1;
3775 descriptorSetLayoutBindings[i].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
3776 descriptorSetLayoutBindings[i].stageFlags = shaderStage;
3777 descriptorSetLayoutBindings[i].pImmutableSamplers = NULL;
3778 }
3779
3780 // Category 2
3781 for (Uint32 i = 0; i < writeStorageTextureCount; i += 1) {
3782 descriptorSetLayoutBindings[i].binding = i;
3783 descriptorSetLayoutBindings[i].descriptorCount = 1;
3784 descriptorSetLayoutBindings[i].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
3785 descriptorSetLayoutBindings[i].stageFlags = shaderStage;
3786 descriptorSetLayoutBindings[i].pImmutableSamplers = NULL;
3787 }
3788
3789 for (Uint32 i = writeStorageTextureCount; i < writeStorageTextureCount + writeStorageBufferCount; i += 1) {
3790 descriptorSetLayoutBindings[i].binding = i;
3791 descriptorSetLayoutBindings[i].descriptorCount = 1;
3792 descriptorSetLayoutBindings[i].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
3793 descriptorSetLayoutBindings[i].stageFlags = shaderStage;
3794 descriptorSetLayoutBindings[i].pImmutableSamplers = NULL;
3795 }
3796
3797 // Category 3
3798 for (Uint32 i = 0; i < uniformBufferCount; i += 1) {
3799 descriptorSetLayoutBindings[i].binding = i;
3800 descriptorSetLayoutBindings[i].descriptorCount = 1;
3801 descriptorSetLayoutBindings[i].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
3802 descriptorSetLayoutBindings[i].stageFlags = shaderStage;
3803 descriptorSetLayoutBindings[i].pImmutableSamplers = NULL;
3804 }
3805
3806 descriptorSetLayoutCreateInfo.pBindings = descriptorSetLayoutBindings;
3807 descriptorSetLayoutCreateInfo.bindingCount =
3808 samplerCount +
3809 storageTextureCount +
3810 storageBufferCount +
3811 writeStorageTextureCount +
3812 writeStorageBufferCount +
3813 uniformBufferCount;
3814
3815 VkResult vulkanResult = renderer->vkCreateDescriptorSetLayout(
3816 renderer->logicalDevice,
3817 &descriptorSetLayoutCreateInfo,
3818 NULL,
3819 &descriptorSetLayout);
3820
3821 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateDescriptorSetLayout, NULL);
3822
3823 layout = SDL_malloc(sizeof(DescriptorSetLayout));
3824 layout->descriptorSetLayout = descriptorSetLayout;
3825
3826 layout->samplerCount = samplerCount;
3827 layout->storageBufferCount = storageBufferCount;
3828 layout->storageTextureCount = storageTextureCount;
3829 layout->writeStorageBufferCount = writeStorageBufferCount;
3830 layout->writeStorageTextureCount = writeStorageTextureCount;
3831 layout->uniformBufferCount = uniformBufferCount;
3832
3833 layout->ID = SDL_AtomicIncRef(&renderer->layoutResourceID);
3834
3835 DescriptorSetLayoutHashTableKey *allocedKey = SDL_malloc(sizeof(DescriptorSetLayoutHashTableKey));
3836 SDL_memcpy(allocedKey, &key, sizeof(DescriptorSetLayoutHashTableKey));
3837
3838 SDL_InsertIntoHashTable(
3839 renderer->descriptorSetLayoutHashTable,
3840 (const void *)allocedKey,
3841 (const void *)layout);
3842
3843 return layout;
3844}
3845
3846static VulkanGraphicsPipelineResourceLayout *VULKAN_INTERNAL_FetchGraphicsPipelineResourceLayout(
3847 VulkanRenderer *renderer,
3848 VulkanShader *vertexShader,
3849 VulkanShader *fragmentShader)
3850{
3851 GraphicsPipelineResourceLayoutHashTableKey key;
3852 SDL_zero(key);
3853 VulkanGraphicsPipelineResourceLayout *pipelineResourceLayout = NULL;
3854
3855 key.vertexSamplerCount = vertexShader->numSamplers;
3856 key.vertexStorageTextureCount = vertexShader->numStorageTextures;
3857 key.vertexStorageBufferCount = vertexShader->numStorageBuffers;
3858 key.vertexUniformBufferCount = vertexShader->numUniformBuffers;
3859 key.fragmentSamplerCount = fragmentShader->numSamplers;
3860 key.fragmentStorageTextureCount = fragmentShader->numStorageTextures;
3861 key.fragmentStorageBufferCount = fragmentShader->numStorageBuffers;
3862 key.fragmentUniformBufferCount = fragmentShader->numUniformBuffers;
3863 if (SDL_FindInHashTable(
3864 renderer->graphicsPipelineResourceLayoutHashTable,
3865 (const void *)&key,
3866 (const void **)&pipelineResourceLayout)) {
3867 return pipelineResourceLayout;
3868 }
3869
3870 VkPipelineLayoutCreateInfo pipelineLayoutCreateInfo;
3871 VkDescriptorSetLayout descriptorSetLayouts[4];
3872 VkResult vulkanResult;
3873
3874 pipelineResourceLayout = SDL_calloc(1, sizeof(VulkanGraphicsPipelineResourceLayout));
3875
3876 pipelineResourceLayout->descriptorSetLayouts[0] = VULKAN_INTERNAL_FetchDescriptorSetLayout(
3877 renderer,
3878 VK_SHADER_STAGE_VERTEX_BIT,
3879 vertexShader->numSamplers,
3880 vertexShader->numStorageTextures,
3881 vertexShader->numStorageBuffers,
3882 0,
3883 0,
3884 0);
3885
3886 pipelineResourceLayout->descriptorSetLayouts[1] = VULKAN_INTERNAL_FetchDescriptorSetLayout(
3887 renderer,
3888 VK_SHADER_STAGE_VERTEX_BIT,
3889 0,
3890 0,
3891 0,
3892 0,
3893 0,
3894 vertexShader->numUniformBuffers);
3895
3896 pipelineResourceLayout->descriptorSetLayouts[2] = VULKAN_INTERNAL_FetchDescriptorSetLayout(
3897 renderer,
3898 VK_SHADER_STAGE_FRAGMENT_BIT,
3899 fragmentShader->numSamplers,
3900 fragmentShader->numStorageTextures,
3901 fragmentShader->numStorageBuffers,
3902 0,
3903 0,
3904 0);
3905
3906 pipelineResourceLayout->descriptorSetLayouts[3] = VULKAN_INTERNAL_FetchDescriptorSetLayout(
3907 renderer,
3908 VK_SHADER_STAGE_FRAGMENT_BIT,
3909 0,
3910 0,
3911 0,
3912 0,
3913 0,
3914 fragmentShader->numUniformBuffers);
3915
3916 descriptorSetLayouts[0] = pipelineResourceLayout->descriptorSetLayouts[0]->descriptorSetLayout;
3917 descriptorSetLayouts[1] = pipelineResourceLayout->descriptorSetLayouts[1]->descriptorSetLayout;
3918 descriptorSetLayouts[2] = pipelineResourceLayout->descriptorSetLayouts[2]->descriptorSetLayout;
3919 descriptorSetLayouts[3] = pipelineResourceLayout->descriptorSetLayouts[3]->descriptorSetLayout;
3920
3921 pipelineResourceLayout->vertexSamplerCount = vertexShader->numSamplers;
3922 pipelineResourceLayout->vertexStorageTextureCount = vertexShader->numStorageTextures;
3923 pipelineResourceLayout->vertexStorageBufferCount = vertexShader->numStorageBuffers;
3924 pipelineResourceLayout->vertexUniformBufferCount = vertexShader->numUniformBuffers;
3925
3926 pipelineResourceLayout->fragmentSamplerCount = fragmentShader->numSamplers;
3927 pipelineResourceLayout->fragmentStorageTextureCount = fragmentShader->numStorageTextures;
3928 pipelineResourceLayout->fragmentStorageBufferCount = fragmentShader->numStorageBuffers;
3929 pipelineResourceLayout->fragmentUniformBufferCount = fragmentShader->numUniformBuffers;
3930
3931 // Create the pipeline layout
3932
3933 pipelineLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
3934 pipelineLayoutCreateInfo.pNext = NULL;
3935 pipelineLayoutCreateInfo.flags = 0;
3936 pipelineLayoutCreateInfo.setLayoutCount = 4;
3937 pipelineLayoutCreateInfo.pSetLayouts = descriptorSetLayouts;
3938 pipelineLayoutCreateInfo.pushConstantRangeCount = 0;
3939 pipelineLayoutCreateInfo.pPushConstantRanges = NULL;
3940
3941 vulkanResult = renderer->vkCreatePipelineLayout(
3942 renderer->logicalDevice,
3943 &pipelineLayoutCreateInfo,
3944 NULL,
3945 &pipelineResourceLayout->pipelineLayout);
3946
3947 if (vulkanResult != VK_SUCCESS) {
3948 VULKAN_INTERNAL_DestroyGraphicsPipelineResourceLayout(renderer, pipelineResourceLayout);
3949 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreatePipelineLayout, NULL);
3950 }
3951
3952 GraphicsPipelineResourceLayoutHashTableKey *allocedKey = SDL_malloc(sizeof(GraphicsPipelineResourceLayoutHashTableKey));
3953 SDL_memcpy(allocedKey, &key, sizeof(GraphicsPipelineResourceLayoutHashTableKey));
3954
3955 SDL_InsertIntoHashTable(
3956 renderer->graphicsPipelineResourceLayoutHashTable,
3957 (const void *)allocedKey,
3958 (const void *)pipelineResourceLayout);
3959
3960 return pipelineResourceLayout;
3961}
3962
3963static VulkanComputePipelineResourceLayout *VULKAN_INTERNAL_FetchComputePipelineResourceLayout(
3964 VulkanRenderer *renderer,
3965 const SDL_GPUComputePipelineCreateInfo *createinfo)
3966{
3967 ComputePipelineResourceLayoutHashTableKey key;
3968 SDL_zero(key);
3969 VulkanComputePipelineResourceLayout *pipelineResourceLayout = NULL;
3970
3971 key.samplerCount = createinfo->num_samplers;
3972 key.readonlyStorageTextureCount = createinfo->num_readonly_storage_textures;
3973 key.readonlyStorageBufferCount = createinfo->num_readonly_storage_buffers;
3974 key.readWriteStorageTextureCount = createinfo->num_readwrite_storage_textures;
3975 key.readWriteStorageBufferCount = createinfo->num_readwrite_storage_buffers;
3976 key.uniformBufferCount = createinfo->num_uniform_buffers;
3977
3978 if (SDL_FindInHashTable(
3979 renderer->computePipelineResourceLayoutHashTable,
3980 (const void *)&key,
3981 (const void **)&pipelineResourceLayout)) {
3982 return pipelineResourceLayout;
3983 }
3984
3985 VkDescriptorSetLayout descriptorSetLayouts[3];
3986 VkPipelineLayoutCreateInfo pipelineLayoutCreateInfo;
3987 VkResult vulkanResult;
3988
3989 pipelineResourceLayout = SDL_calloc(1, sizeof(VulkanComputePipelineResourceLayout));
3990
3991 pipelineResourceLayout->descriptorSetLayouts[0] = VULKAN_INTERNAL_FetchDescriptorSetLayout(
3992 renderer,
3993 VK_SHADER_STAGE_COMPUTE_BIT,
3994 createinfo->num_samplers,
3995 createinfo->num_readonly_storage_textures,
3996 createinfo->num_readonly_storage_buffers,
3997 0,
3998 0,
3999 0);
4000
4001 pipelineResourceLayout->descriptorSetLayouts[1] = VULKAN_INTERNAL_FetchDescriptorSetLayout(
4002 renderer,
4003 VK_SHADER_STAGE_COMPUTE_BIT,
4004 0,
4005 0,
4006 0,
4007 createinfo->num_readwrite_storage_textures,
4008 createinfo->num_readwrite_storage_buffers,
4009 0);
4010
4011 pipelineResourceLayout->descriptorSetLayouts[2] = VULKAN_INTERNAL_FetchDescriptorSetLayout(
4012 renderer,
4013 VK_SHADER_STAGE_COMPUTE_BIT,
4014 0,
4015 0,
4016 0,
4017 0,
4018 0,
4019 createinfo->num_uniform_buffers);
4020
4021 descriptorSetLayouts[0] = pipelineResourceLayout->descriptorSetLayouts[0]->descriptorSetLayout;
4022 descriptorSetLayouts[1] = pipelineResourceLayout->descriptorSetLayouts[1]->descriptorSetLayout;
4023 descriptorSetLayouts[2] = pipelineResourceLayout->descriptorSetLayouts[2]->descriptorSetLayout;
4024
4025 pipelineResourceLayout->numSamplers = createinfo->num_samplers;
4026 pipelineResourceLayout->numReadonlyStorageTextures = createinfo->num_readonly_storage_textures;
4027 pipelineResourceLayout->numReadonlyStorageBuffers = createinfo->num_readonly_storage_buffers;
4028 pipelineResourceLayout->numReadWriteStorageTextures = createinfo->num_readwrite_storage_textures;
4029 pipelineResourceLayout->numReadWriteStorageBuffers = createinfo->num_readwrite_storage_buffers;
4030 pipelineResourceLayout->numUniformBuffers = createinfo->num_uniform_buffers;
4031
4032 // Create the pipeline layout
4033
4034 pipelineLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
4035 pipelineLayoutCreateInfo.pNext = NULL;
4036 pipelineLayoutCreateInfo.flags = 0;
4037 pipelineLayoutCreateInfo.setLayoutCount = 3;
4038 pipelineLayoutCreateInfo.pSetLayouts = descriptorSetLayouts;
4039 pipelineLayoutCreateInfo.pushConstantRangeCount = 0;
4040 pipelineLayoutCreateInfo.pPushConstantRanges = NULL;
4041
4042 vulkanResult = renderer->vkCreatePipelineLayout(
4043 renderer->logicalDevice,
4044 &pipelineLayoutCreateInfo,
4045 NULL,
4046 &pipelineResourceLayout->pipelineLayout);
4047
4048 if (vulkanResult != VK_SUCCESS) {
4049 VULKAN_INTERNAL_DestroyComputePipelineResourceLayout(renderer, pipelineResourceLayout);
4050 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreatePipelineLayout, NULL);
4051 }
4052
4053 ComputePipelineResourceLayoutHashTableKey *allocedKey = SDL_malloc(sizeof(ComputePipelineResourceLayoutHashTableKey));
4054 SDL_memcpy(allocedKey, &key, sizeof(ComputePipelineResourceLayoutHashTableKey));
4055
4056 SDL_InsertIntoHashTable(
4057 renderer->computePipelineResourceLayoutHashTable,
4058 (const void *)allocedKey,
4059 (const void *)pipelineResourceLayout);
4060
4061 return pipelineResourceLayout;
4062}
4063
4064// Data Buffer
4065
4066static VulkanBuffer *VULKAN_INTERNAL_CreateBuffer(
4067 VulkanRenderer *renderer,
4068 VkDeviceSize size,
4069 SDL_GPUBufferUsageFlags usageFlags,
4070 VulkanBufferType type,
4071 bool dedicated)
4072{
4073 VulkanBuffer *buffer;
4074 VkResult vulkanResult;
4075 VkBufferCreateInfo createinfo;
4076 VkBufferUsageFlags vulkanUsageFlags = 0;
4077 Uint8 bindResult;
4078
4079 if (usageFlags & SDL_GPU_BUFFERUSAGE_VERTEX) {
4080 vulkanUsageFlags |= VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
4081 }
4082
4083 if (usageFlags & SDL_GPU_BUFFERUSAGE_INDEX) {
4084 vulkanUsageFlags |= VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
4085 }
4086
4087 if (usageFlags & (SDL_GPU_BUFFERUSAGE_GRAPHICS_STORAGE_READ |
4088 SDL_GPU_BUFFERUSAGE_COMPUTE_STORAGE_READ |
4089 SDL_GPU_BUFFERUSAGE_COMPUTE_STORAGE_WRITE)) {
4090 vulkanUsageFlags |= VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
4091 }
4092
4093 if (usageFlags & SDL_GPU_BUFFERUSAGE_INDIRECT) {
4094 vulkanUsageFlags |= VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
4095 }
4096
4097 if (type == VULKAN_BUFFER_TYPE_UNIFORM) {
4098 vulkanUsageFlags |= VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
4099 } else {
4100 // GPU buffers need transfer bits for defrag, transfer buffers need them for transfers
4101 vulkanUsageFlags |= VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4102 }
4103
4104 buffer = SDL_malloc(sizeof(VulkanBuffer));
4105
4106 buffer->size = size;
4107 buffer->usage = usageFlags;
4108 buffer->type = type;
4109 buffer->markedForDestroy = false;
4110 buffer->transitioned = false;
4111
4112 createinfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
4113 createinfo.pNext = NULL;
4114 createinfo.flags = 0;
4115 createinfo.size = size;
4116 createinfo.usage = vulkanUsageFlags;
4117 createinfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
4118 createinfo.queueFamilyIndexCount = 1;
4119 createinfo.pQueueFamilyIndices = &renderer->queueFamilyIndex;
4120
4121 // Set transfer bits so we can defrag
4122 createinfo.usage |= VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4123
4124 vulkanResult = renderer->vkCreateBuffer(
4125 renderer->logicalDevice,
4126 &createinfo,
4127 NULL,
4128 &buffer->buffer);
4129
4130 if (vulkanResult != VK_SUCCESS) {
4131 SDL_free(buffer);
4132 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateBuffer, NULL);
4133 }
4134
4135 bindResult = VULKAN_INTERNAL_BindMemoryForBuffer(
4136 renderer,
4137 buffer->buffer,
4138 buffer->size,
4139 buffer->type,
4140 dedicated,
4141 &buffer->usedRegion);
4142
4143 if (bindResult != 1) {
4144 renderer->vkDestroyBuffer(
4145 renderer->logicalDevice,
4146 buffer->buffer,
4147 NULL);
4148
4149 SDL_free(buffer);
4150 return NULL;
4151 }
4152
4153 buffer->usedRegion->vulkanBuffer = buffer; // lol
4154
4155 SDL_SetAtomicInt(&buffer->referenceCount, 0);
4156
4157 return buffer;
4158}
4159
4160static VulkanBufferContainer *VULKAN_INTERNAL_CreateBufferContainer(
4161 VulkanRenderer *renderer,
4162 VkDeviceSize size,
4163 SDL_GPUBufferUsageFlags usageFlags,
4164 VulkanBufferType type,
4165 bool dedicated)
4166{
4167 VulkanBufferContainer *bufferContainer;
4168 VulkanBuffer *buffer;
4169
4170 buffer = VULKAN_INTERNAL_CreateBuffer(
4171 renderer,
4172 size,
4173 usageFlags,
4174 type,
4175 dedicated);
4176
4177 if (buffer == NULL) {
4178 return NULL;
4179 }
4180
4181 bufferContainer = SDL_malloc(sizeof(VulkanBufferContainer));
4182
4183 bufferContainer->activeBuffer = buffer;
4184 buffer->container = bufferContainer;
4185 buffer->containerIndex = 0;
4186
4187 bufferContainer->bufferCapacity = 1;
4188 bufferContainer->bufferCount = 1;
4189 bufferContainer->buffers = SDL_malloc(
4190 bufferContainer->bufferCapacity * sizeof(VulkanBuffer *));
4191 bufferContainer->buffers[0] = bufferContainer->activeBuffer;
4192 bufferContainer->dedicated = dedicated;
4193 bufferContainer->debugName = NULL;
4194
4195 return bufferContainer;
4196}
4197
4198// Texture Subresource Utilities
4199
4200static Uint32 VULKAN_INTERNAL_GetTextureSubresourceIndex(
4201 Uint32 mipLevel,
4202 Uint32 layer,
4203 Uint32 numLevels)
4204{
4205 return mipLevel + (layer * numLevels);
4206}
4207
4208static VulkanTextureSubresource *VULKAN_INTERNAL_FetchTextureSubresource(
4209 VulkanTextureContainer *textureContainer,
4210 Uint32 layer,
4211 Uint32 level)
4212{
4213 Uint32 index = VULKAN_INTERNAL_GetTextureSubresourceIndex(
4214 level,
4215 layer,
4216 textureContainer->header.info.num_levels);
4217
4218 return &textureContainer->activeTexture->subresources[index];
4219}
4220
4221static bool VULKAN_INTERNAL_CreateRenderTargetView(
4222 VulkanRenderer *renderer,
4223 VulkanTexture *texture,
4224 Uint32 layerOrDepth,
4225 Uint32 level,
4226 VkFormat format,
4227 VkComponentMapping swizzle,
4228 VkImageView *pView)
4229{
4230 VkResult vulkanResult;
4231 VkImageViewCreateInfo imageViewCreateInfo;
4232
4233 // create framebuffer compatible views for RenderTarget
4234 imageViewCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
4235 imageViewCreateInfo.pNext = NULL;
4236 imageViewCreateInfo.flags = 0;
4237 imageViewCreateInfo.image = texture->image;
4238 imageViewCreateInfo.format = format;
4239 imageViewCreateInfo.components = swizzle;
4240 imageViewCreateInfo.subresourceRange.aspectMask = texture->aspectFlags;
4241 imageViewCreateInfo.subresourceRange.baseMipLevel = level;
4242 imageViewCreateInfo.subresourceRange.levelCount = 1;
4243 imageViewCreateInfo.subresourceRange.baseArrayLayer = layerOrDepth;
4244 imageViewCreateInfo.subresourceRange.layerCount = 1;
4245 imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
4246
4247 vulkanResult = renderer->vkCreateImageView(
4248 renderer->logicalDevice,
4249 &imageViewCreateInfo,
4250 NULL,
4251 pView);
4252
4253 if (vulkanResult != VK_SUCCESS) {
4254 *pView = (VkImageView)VK_NULL_HANDLE;
4255 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateImageView, false);
4256 }
4257
4258 return true;
4259}
4260
4261static bool VULKAN_INTERNAL_CreateSubresourceView(
4262 VulkanRenderer *renderer,
4263 const SDL_GPUTextureCreateInfo *createinfo,
4264 VulkanTexture *texture,
4265 Uint32 layer,
4266 Uint32 level,
4267 VkComponentMapping swizzle,
4268 VkImageView *pView)
4269{
4270 VkResult vulkanResult;
4271 VkImageViewCreateInfo imageViewCreateInfo;
4272
4273 // create framebuffer compatible views for RenderTarget
4274 imageViewCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
4275 imageViewCreateInfo.pNext = NULL;
4276 imageViewCreateInfo.flags = 0;
4277 imageViewCreateInfo.image = texture->image;
4278 imageViewCreateInfo.format = SDLToVK_TextureFormat[createinfo->format];
4279 imageViewCreateInfo.components = swizzle;
4280 imageViewCreateInfo.subresourceRange.aspectMask = texture->aspectFlags;
4281 imageViewCreateInfo.subresourceRange.baseMipLevel = level;
4282 imageViewCreateInfo.subresourceRange.levelCount = 1;
4283 imageViewCreateInfo.subresourceRange.baseArrayLayer = layer;
4284 imageViewCreateInfo.subresourceRange.layerCount = 1;
4285 imageViewCreateInfo.viewType = (createinfo->type == SDL_GPU_TEXTURETYPE_3D) ? VK_IMAGE_VIEW_TYPE_3D : VK_IMAGE_VIEW_TYPE_2D;
4286
4287 vulkanResult = renderer->vkCreateImageView(
4288 renderer->logicalDevice,
4289 &imageViewCreateInfo,
4290 NULL,
4291 pView);
4292
4293 if (vulkanResult != VK_SUCCESS) {
4294 *pView = (VkImageView)VK_NULL_HANDLE;
4295 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateImageView, false);
4296 }
4297
4298 return true;
4299}
4300
4301// Swapchain
4302
4303static bool VULKAN_INTERNAL_QuerySwapchainSupport(
4304 VulkanRenderer *renderer,
4305 VkPhysicalDevice physicalDevice,
4306 VkSurfaceKHR surface,
4307 SwapchainSupportDetails *outputDetails)
4308{
4309 VkResult result;
4310 VkBool32 supportsPresent;
4311
4312 renderer->vkGetPhysicalDeviceSurfaceSupportKHR(
4313 physicalDevice,
4314 renderer->queueFamilyIndex,
4315 surface,
4316 &supportsPresent);
4317
4318 // Initialize these in case anything fails
4319 outputDetails->formatsLength = 0;
4320 outputDetails->presentModesLength = 0;
4321
4322 if (!supportsPresent) {
4323 SET_STRING_ERROR_AND_RETURN("This surface does not support presenting!", false);
4324 }
4325
4326 // Run the device surface queries
4327 result = renderer->vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
4328 physicalDevice,
4329 surface,
4330 &outputDetails->capabilities);
4331 CHECK_VULKAN_ERROR_AND_RETURN(result, vkGetPhysicalDeviceSurfaceCapabilitiesKHR, false);
4332
4333 if (!(outputDetails->capabilities.supportedCompositeAlpha & VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR)) {
4334 SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "Opaque presentation unsupported! Expect weird transparency bugs!");
4335 }
4336
4337 result = renderer->vkGetPhysicalDeviceSurfaceFormatsKHR(
4338 physicalDevice,
4339 surface,
4340 &outputDetails->formatsLength,
4341 NULL);
4342 CHECK_VULKAN_ERROR_AND_RETURN(result, vkGetPhysicalDeviceSurfaceFormatsKHR, false);
4343 result = renderer->vkGetPhysicalDeviceSurfacePresentModesKHR(
4344 physicalDevice,
4345 surface,
4346 &outputDetails->presentModesLength,
4347 NULL);
4348 CHECK_VULKAN_ERROR_AND_RETURN(result, vkGetPhysicalDeviceSurfacePresentModesKHR, false);
4349
4350 // Generate the arrays, if applicable
4351
4352 outputDetails->formats = NULL;
4353 if (outputDetails->formatsLength != 0) {
4354 outputDetails->formats = (VkSurfaceFormatKHR *)SDL_malloc(
4355 sizeof(VkSurfaceFormatKHR) * outputDetails->formatsLength);
4356
4357 if (!outputDetails->formats) { // OOM
4358 return false;
4359 }
4360
4361 result = renderer->vkGetPhysicalDeviceSurfaceFormatsKHR(
4362 physicalDevice,
4363 surface,
4364 &outputDetails->formatsLength,
4365 outputDetails->formats);
4366 if (result != VK_SUCCESS) {
4367 SDL_free(outputDetails->formats);
4368 CHECK_VULKAN_ERROR_AND_RETURN(result, vkGetPhysicalDeviceSurfaceFormatsKHR, false);
4369 }
4370 }
4371
4372 outputDetails->presentModes = NULL;
4373 if (outputDetails->presentModesLength != 0) {
4374 outputDetails->presentModes = (VkPresentModeKHR *)SDL_malloc(
4375 sizeof(VkPresentModeKHR) * outputDetails->presentModesLength);
4376
4377 if (!outputDetails->presentModes) { // OOM
4378 SDL_free(outputDetails->formats);
4379 return false;
4380 }
4381
4382 result = renderer->vkGetPhysicalDeviceSurfacePresentModesKHR(
4383 physicalDevice,
4384 surface,
4385 &outputDetails->presentModesLength,
4386 outputDetails->presentModes);
4387 if (result != VK_SUCCESS) {
4388 SDL_free(outputDetails->formats);
4389 SDL_free(outputDetails->presentModes);
4390 CHECK_VULKAN_ERROR_AND_RETURN(result, vkGetPhysicalDeviceSurfacePresentModesKHR, false);
4391 }
4392 }
4393
4394 /* If we made it here, all the queries were successful. This does NOT
4395 * necessarily mean there are any supported formats or present modes!
4396 */
4397 return true;
4398}
4399
4400static bool VULKAN_INTERNAL_VerifySwapSurfaceFormat(
4401 VkFormat desiredFormat,
4402 VkColorSpaceKHR desiredColorSpace,
4403 VkSurfaceFormatKHR *availableFormats,
4404 Uint32 availableFormatsLength)
4405{
4406 Uint32 i;
4407 for (i = 0; i < availableFormatsLength; i += 1) {
4408 if (availableFormats[i].format == desiredFormat &&
4409 availableFormats[i].colorSpace == desiredColorSpace) {
4410 return true;
4411 }
4412 }
4413 return false;
4414}
4415
4416static bool VULKAN_INTERNAL_VerifySwapPresentMode(
4417 VkPresentModeKHR presentMode,
4418 VkPresentModeKHR *availablePresentModes,
4419 Uint32 availablePresentModesLength)
4420{
4421 Uint32 i;
4422 for (i = 0; i < availablePresentModesLength; i += 1) {
4423 if (availablePresentModes[i] == presentMode) {
4424 return true;
4425 }
4426 }
4427 return false;
4428}
4429
4430/* It would be nice if VULKAN_INTERNAL_CreateSwapchain could return a bool.
4431 * Unfortunately, some Win32 NVIDIA drivers are stupid
4432 * and will return surface extents of (0, 0)
4433 * in certain edge cases, and the swapchain extents are not allowed to be 0.
4434 * In this case, the client probably still wants to claim the window
4435 * or recreate the swapchain, so we should return 2 to indicate retry.
4436 * -cosmonaut
4437 */
4438#define VULKAN_INTERNAL_TRY_AGAIN 2
4439
4440static Uint32 VULKAN_INTERNAL_CreateSwapchain(
4441 VulkanRenderer *renderer,
4442 WindowData *windowData)
4443{
4444 VkResult vulkanResult;
4445 VkSwapchainCreateInfoKHR swapchainCreateInfo;
4446 VkImage *swapchainImages;
4447 VkSemaphoreCreateInfo semaphoreCreateInfo;
4448 SwapchainSupportDetails swapchainSupportDetails;
4449 bool hasValidSwapchainComposition, hasValidPresentMode;
4450 Uint32 i;
4451
4452 windowData->frameCounter = 0;
4453
4454 SDL_VideoDevice *_this = SDL_GetVideoDevice();
4455 SDL_assert(_this && _this->Vulkan_CreateSurface);
4456
4457 // Each swapchain must have its own surface.
4458 if (!_this->Vulkan_CreateSurface(
4459 _this,
4460 windowData->window,
4461 renderer->instance,
4462 NULL, // FIXME: VAllocationCallbacks
4463 &windowData->surface)) {
4464 return false;
4465 }
4466
4467 if (!VULKAN_INTERNAL_QuerySwapchainSupport(
4468 renderer,
4469 renderer->physicalDevice,
4470 windowData->surface,
4471 &swapchainSupportDetails)) {
4472 renderer->vkDestroySurfaceKHR(
4473 renderer->instance,
4474 windowData->surface,
4475 NULL);
4476 windowData->surface = VK_NULL_HANDLE;
4477 if (swapchainSupportDetails.formatsLength > 0) {
4478 SDL_free(swapchainSupportDetails.formats);
4479 }
4480 if (swapchainSupportDetails.presentModesLength > 0) {
4481 SDL_free(swapchainSupportDetails.presentModes);
4482 }
4483 return false;
4484 }
4485
4486 // Verify that we can use the requested composition and present mode
4487 windowData->format = SwapchainCompositionToFormat[windowData->swapchainComposition];
4488 windowData->colorSpace = SwapchainCompositionToColorSpace[windowData->swapchainComposition];
4489 windowData->swapchainSwizzle = SwapchainCompositionSwizzle[windowData->swapchainComposition];
4490 windowData->usingFallbackFormat = false;
4491
4492 hasValidSwapchainComposition = VULKAN_INTERNAL_VerifySwapSurfaceFormat(
4493 windowData->format,
4494 windowData->colorSpace,
4495 swapchainSupportDetails.formats,
4496 swapchainSupportDetails.formatsLength);
4497
4498 if (!hasValidSwapchainComposition) {
4499 // Let's try again with the fallback format...
4500 windowData->format = SwapchainCompositionToFallbackFormat[windowData->swapchainComposition];
4501 windowData->usingFallbackFormat = true;
4502 hasValidSwapchainComposition = VULKAN_INTERNAL_VerifySwapSurfaceFormat(
4503 windowData->format,
4504 windowData->colorSpace,
4505 swapchainSupportDetails.formats,
4506 swapchainSupportDetails.formatsLength);
4507 }
4508
4509 hasValidPresentMode = VULKAN_INTERNAL_VerifySwapPresentMode(
4510 SDLToVK_PresentMode[windowData->presentMode],
4511 swapchainSupportDetails.presentModes,
4512 swapchainSupportDetails.presentModesLength);
4513
4514 if (!hasValidSwapchainComposition || !hasValidPresentMode) {
4515 renderer->vkDestroySurfaceKHR(
4516 renderer->instance,
4517 windowData->surface,
4518 NULL);
4519 windowData->surface = VK_NULL_HANDLE;
4520
4521 if (swapchainSupportDetails.formatsLength > 0) {
4522 SDL_free(swapchainSupportDetails.formats);
4523 }
4524
4525 if (swapchainSupportDetails.presentModesLength > 0) {
4526 SDL_free(swapchainSupportDetails.presentModes);
4527 }
4528
4529 if (!hasValidSwapchainComposition) {
4530 SET_STRING_ERROR_AND_RETURN("Device does not support requested swapchain composition!", false);
4531 }
4532 if (!hasValidPresentMode) {
4533 SET_STRING_ERROR_AND_RETURN("Device does not support requested present_mode!", false);
4534 }
4535 return false;
4536 }
4537
4538 // NVIDIA + Win32 can return 0 extent when the window is minimized. Try again!
4539 if (swapchainSupportDetails.capabilities.currentExtent.width == 0 ||
4540 swapchainSupportDetails.capabilities.currentExtent.height == 0) {
4541 renderer->vkDestroySurfaceKHR(
4542 renderer->instance,
4543 windowData->surface,
4544 NULL);
4545 windowData->surface = VK_NULL_HANDLE;
4546 if (swapchainSupportDetails.formatsLength > 0) {
4547 SDL_free(swapchainSupportDetails.formats);
4548 }
4549 if (swapchainSupportDetails.presentModesLength > 0) {
4550 SDL_free(swapchainSupportDetails.presentModes);
4551 }
4552 return VULKAN_INTERNAL_TRY_AGAIN;
4553 }
4554
4555 Uint32 requestedImageCount = renderer->allowedFramesInFlight;
4556
4557#ifdef SDL_PLATFORM_APPLE
4558 windowData->width = swapchainSupportDetails.capabilities.currentExtent.width;
4559 windowData->height = swapchainSupportDetails.capabilities.currentExtent.height;
4560#else
4561 windowData->width = SDL_clamp(
4562 windowData->swapchainCreateWidth,
4563 swapchainSupportDetails.capabilities.minImageExtent.width,
4564 swapchainSupportDetails.capabilities.maxImageExtent.width);
4565 windowData->height = SDL_clamp(windowData->swapchainCreateHeight,
4566 swapchainSupportDetails.capabilities.minImageExtent.height,
4567 swapchainSupportDetails.capabilities.maxImageExtent.height);
4568#endif
4569
4570 if (swapchainSupportDetails.capabilities.maxImageCount > 0 &&
4571 requestedImageCount > swapchainSupportDetails.capabilities.maxImageCount) {
4572 requestedImageCount = swapchainSupportDetails.capabilities.maxImageCount;
4573 }
4574
4575 if (requestedImageCount < swapchainSupportDetails.capabilities.minImageCount) {
4576 requestedImageCount = swapchainSupportDetails.capabilities.minImageCount;
4577 }
4578
4579 if (windowData->presentMode == SDL_GPU_PRESENTMODE_MAILBOX) {
4580 /* Required for proper triple-buffering.
4581 * Note that this is below the above maxImageCount check!
4582 * If the driver advertises MAILBOX but does not support 3 swap
4583 * images, it's not real mailbox support, so let it fail hard.
4584 * -flibit
4585 */
4586 requestedImageCount = SDL_max(requestedImageCount, 3);
4587 }
4588
4589 swapchainCreateInfo.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR;
4590 swapchainCreateInfo.pNext = NULL;
4591 swapchainCreateInfo.flags = 0;
4592 swapchainCreateInfo.surface = windowData->surface;
4593 swapchainCreateInfo.minImageCount = requestedImageCount;
4594 swapchainCreateInfo.imageFormat = windowData->format;
4595 swapchainCreateInfo.imageColorSpace = windowData->colorSpace;
4596 swapchainCreateInfo.imageExtent.width = windowData->width;
4597 swapchainCreateInfo.imageExtent.height = windowData->height;
4598 swapchainCreateInfo.imageArrayLayers = 1;
4599 swapchainCreateInfo.imageUsage =
4600 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
4601 VK_IMAGE_USAGE_TRANSFER_DST_BIT;
4602 swapchainCreateInfo.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
4603 swapchainCreateInfo.queueFamilyIndexCount = 0;
4604 swapchainCreateInfo.pQueueFamilyIndices = NULL;
4605 swapchainCreateInfo.preTransform = swapchainSupportDetails.capabilities.currentTransform;
4606 swapchainCreateInfo.compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
4607 swapchainCreateInfo.presentMode = SDLToVK_PresentMode[windowData->presentMode];
4608 swapchainCreateInfo.clipped = VK_TRUE;
4609 swapchainCreateInfo.oldSwapchain = VK_NULL_HANDLE;
4610
4611 vulkanResult = renderer->vkCreateSwapchainKHR(
4612 renderer->logicalDevice,
4613 &swapchainCreateInfo,
4614 NULL,
4615 &windowData->swapchain);
4616
4617 if (swapchainSupportDetails.formatsLength > 0) {
4618 SDL_free(swapchainSupportDetails.formats);
4619 }
4620 if (swapchainSupportDetails.presentModesLength > 0) {
4621 SDL_free(swapchainSupportDetails.presentModes);
4622 }
4623
4624 if (vulkanResult != VK_SUCCESS) {
4625 renderer->vkDestroySurfaceKHR(
4626 renderer->instance,
4627 windowData->surface,
4628 NULL);
4629 windowData->surface = VK_NULL_HANDLE;
4630 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateSwapchainKHR, false);
4631 }
4632
4633 vulkanResult = renderer->vkGetSwapchainImagesKHR(
4634 renderer->logicalDevice,
4635 windowData->swapchain,
4636 &windowData->imageCount,
4637 NULL);
4638 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkGetSwapchainImagesKHR, false);
4639
4640 windowData->textureContainers = SDL_malloc(
4641 sizeof(VulkanTextureContainer) * windowData->imageCount);
4642
4643 if (!windowData->textureContainers) { // OOM
4644 renderer->vkDestroySurfaceKHR(
4645 renderer->instance,
4646 windowData->surface,
4647 NULL);
4648 renderer->vkDestroySwapchainKHR(
4649 renderer->logicalDevice,
4650 windowData->swapchain,
4651 NULL);
4652 windowData->surface = VK_NULL_HANDLE;
4653 windowData->swapchain = VK_NULL_HANDLE;
4654 return false;
4655 }
4656
4657 swapchainImages = SDL_stack_alloc(VkImage, windowData->imageCount);
4658
4659 vulkanResult = renderer->vkGetSwapchainImagesKHR(
4660 renderer->logicalDevice,
4661 windowData->swapchain,
4662 &windowData->imageCount,
4663 swapchainImages);
4664 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkGetSwapchainImagesKHR, false);
4665
4666 for (i = 0; i < windowData->imageCount; i += 1) {
4667
4668 // Initialize dummy container
4669 SDL_zero(windowData->textureContainers[i]);
4670 windowData->textureContainers[i].canBeCycled = false;
4671 windowData->textureContainers[i].header.info.width = windowData->width;
4672 windowData->textureContainers[i].header.info.height = windowData->height;
4673 windowData->textureContainers[i].header.info.layer_count_or_depth = 1;
4674 windowData->textureContainers[i].header.info.format = SwapchainCompositionToSDLFormat(
4675 windowData->swapchainComposition,
4676 windowData->usingFallbackFormat);
4677 windowData->textureContainers[i].header.info.type = SDL_GPU_TEXTURETYPE_2D;
4678 windowData->textureContainers[i].header.info.num_levels = 1;
4679 windowData->textureContainers[i].header.info.sample_count = SDL_GPU_SAMPLECOUNT_1;
4680 windowData->textureContainers[i].header.info.usage = SDL_GPU_TEXTUREUSAGE_COLOR_TARGET;
4681
4682 windowData->textureContainers[i].activeTexture = SDL_malloc(sizeof(VulkanTexture));
4683 windowData->textureContainers[i].activeTexture->image = swapchainImages[i];
4684
4685 // Swapchain memory is managed by the driver
4686 windowData->textureContainers[i].activeTexture->usedRegion = NULL;
4687
4688 windowData->textureContainers[i].activeTexture->swizzle = windowData->swapchainSwizzle;
4689 windowData->textureContainers[i].activeTexture->aspectFlags = VK_IMAGE_ASPECT_COLOR_BIT;
4690 windowData->textureContainers[i].activeTexture->depth = 1;
4691 windowData->textureContainers[i].activeTexture->usage = SDL_GPU_TEXTUREUSAGE_COLOR_TARGET;
4692 windowData->textureContainers[i].activeTexture->container = &windowData->textureContainers[i];
4693 SDL_SetAtomicInt(&windowData->textureContainers[i].activeTexture->referenceCount, 0);
4694
4695 // Create slice
4696 windowData->textureContainers[i].activeTexture->subresourceCount = 1;
4697 windowData->textureContainers[i].activeTexture->subresources = SDL_malloc(sizeof(VulkanTextureSubresource));
4698 windowData->textureContainers[i].activeTexture->subresources[0].parent = windowData->textureContainers[i].activeTexture;
4699 windowData->textureContainers[i].activeTexture->subresources[0].layer = 0;
4700 windowData->textureContainers[i].activeTexture->subresources[0].level = 0;
4701 windowData->textureContainers[i].activeTexture->subresources[0].renderTargetViews = SDL_malloc(sizeof(VkImageView));
4702 if (!VULKAN_INTERNAL_CreateRenderTargetView(
4703 renderer,
4704 windowData->textureContainers[i].activeTexture,
4705 0,
4706 0,
4707 windowData->format,
4708 windowData->swapchainSwizzle,
4709 &windowData->textureContainers[i].activeTexture->subresources[0].renderTargetViews[0])) {
4710 renderer->vkDestroySurfaceKHR(
4711 renderer->instance,
4712 windowData->surface,
4713 NULL);
4714 renderer->vkDestroySwapchainKHR(
4715 renderer->logicalDevice,
4716 windowData->swapchain,
4717 NULL);
4718 windowData->surface = VK_NULL_HANDLE;
4719 windowData->swapchain = VK_NULL_HANDLE;
4720 return false;
4721 }
4722 }
4723
4724 SDL_stack_free(swapchainImages);
4725
4726 semaphoreCreateInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
4727 semaphoreCreateInfo.pNext = NULL;
4728 semaphoreCreateInfo.flags = 0;
4729
4730 for (i = 0; i < MAX_FRAMES_IN_FLIGHT; i += 1) {
4731 vulkanResult = renderer->vkCreateSemaphore(
4732 renderer->logicalDevice,
4733 &semaphoreCreateInfo,
4734 NULL,
4735 &windowData->imageAvailableSemaphore[i]);
4736
4737 if (vulkanResult != VK_SUCCESS) {
4738 renderer->vkDestroySurfaceKHR(
4739 renderer->instance,
4740 windowData->surface,
4741 NULL);
4742 renderer->vkDestroySwapchainKHR(
4743 renderer->logicalDevice,
4744 windowData->swapchain,
4745 NULL);
4746 windowData->surface = VK_NULL_HANDLE;
4747 windowData->swapchain = VK_NULL_HANDLE;
4748 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateSemaphore, false);
4749 }
4750
4751 renderer->vkCreateSemaphore(
4752 renderer->logicalDevice,
4753 &semaphoreCreateInfo,
4754 NULL,
4755 &windowData->renderFinishedSemaphore[i]);
4756
4757 if (vulkanResult != VK_SUCCESS) {
4758 renderer->vkDestroySurfaceKHR(
4759 renderer->instance,
4760 windowData->surface,
4761 NULL);
4762 renderer->vkDestroySwapchainKHR(
4763 renderer->logicalDevice,
4764 windowData->swapchain,
4765 NULL);
4766 windowData->surface = VK_NULL_HANDLE;
4767 windowData->swapchain = VK_NULL_HANDLE;
4768 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateSemaphore, false);
4769 }
4770
4771 windowData->inFlightFences[i] = NULL;
4772 }
4773
4774 windowData->needsSwapchainRecreate = false;
4775 return true;
4776}
4777
4778// Command Buffers
4779
4780static bool VULKAN_INTERNAL_BeginCommandBuffer(
4781 VulkanRenderer *renderer,
4782 VulkanCommandBuffer *commandBuffer)
4783{
4784 VkCommandBufferBeginInfo beginInfo;
4785 VkResult result;
4786
4787 beginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
4788 beginInfo.pNext = NULL;
4789 beginInfo.flags = 0;
4790 beginInfo.pInheritanceInfo = NULL;
4791 beginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
4792
4793 result = renderer->vkBeginCommandBuffer(
4794 commandBuffer->commandBuffer,
4795 &beginInfo);
4796
4797 CHECK_VULKAN_ERROR_AND_RETURN(result, vkBeginCommandBuffer, false);
4798
4799 return true;
4800}
4801
4802static bool VULKAN_INTERNAL_EndCommandBuffer(
4803 VulkanRenderer *renderer,
4804 VulkanCommandBuffer *commandBuffer)
4805{
4806 VkResult result = renderer->vkEndCommandBuffer(
4807 commandBuffer->commandBuffer);
4808
4809 CHECK_VULKAN_ERROR_AND_RETURN(result, vkEndCommandBuffer, false);
4810
4811 return true;
4812}
4813
4814static void VULKAN_DestroyDevice(
4815 SDL_GPUDevice *device)
4816{
4817 VulkanRenderer *renderer = (VulkanRenderer *)device->driverData;
4818 VulkanMemorySubAllocator *allocator;
4819
4820 VULKAN_Wait(device->driverData);
4821
4822 for (Sint32 i = renderer->claimedWindowCount - 1; i >= 0; i -= 1) {
4823 VULKAN_ReleaseWindow(device->driverData, renderer->claimedWindows[i]->window);
4824 }
4825
4826 SDL_free(renderer->claimedWindows);
4827
4828 VULKAN_Wait(device->driverData);
4829
4830 SDL_free(renderer->submittedCommandBuffers);
4831
4832 for (Uint32 i = 0; i < renderer->uniformBufferPoolCount; i += 1) {
4833 VULKAN_INTERNAL_DestroyBuffer(
4834 renderer,
4835 renderer->uniformBufferPool[i]->buffer);
4836 SDL_free(renderer->uniformBufferPool[i]);
4837 }
4838 SDL_free(renderer->uniformBufferPool);
4839
4840 for (Uint32 i = 0; i < renderer->descriptorSetCachePoolCount; i += 1) {
4841 VULKAN_INTERNAL_DestroyDescriptorSetCache(
4842 renderer,
4843 renderer->descriptorSetCachePool[i]);
4844 }
4845 SDL_free(renderer->descriptorSetCachePool);
4846
4847 for (Uint32 i = 0; i < renderer->fencePool.availableFenceCount; i += 1) {
4848 renderer->vkDestroyFence(
4849 renderer->logicalDevice,
4850 renderer->fencePool.availableFences[i]->fence,
4851 NULL);
4852
4853 SDL_free(renderer->fencePool.availableFences[i]);
4854 }
4855
4856 SDL_free(renderer->fencePool.availableFences);
4857 SDL_DestroyMutex(renderer->fencePool.lock);
4858
4859 SDL_DestroyHashTable(renderer->commandPoolHashTable);
4860 SDL_DestroyHashTable(renderer->renderPassHashTable);
4861 SDL_DestroyHashTable(renderer->framebufferHashTable);
4862 SDL_DestroyHashTable(renderer->graphicsPipelineResourceLayoutHashTable);
4863 SDL_DestroyHashTable(renderer->computePipelineResourceLayoutHashTable);
4864 SDL_DestroyHashTable(renderer->descriptorSetLayoutHashTable);
4865
4866 for (Uint32 i = 0; i < VK_MAX_MEMORY_TYPES; i += 1) {
4867 allocator = &renderer->memoryAllocator->subAllocators[i];
4868
4869 for (Sint32 j = allocator->allocationCount - 1; j >= 0; j -= 1) {
4870 for (Sint32 k = allocator->allocations[j]->usedRegionCount - 1; k >= 0; k -= 1) {
4871 VULKAN_INTERNAL_RemoveMemoryUsedRegion(
4872 renderer,
4873 allocator->allocations[j]->usedRegions[k]);
4874 }
4875
4876 VULKAN_INTERNAL_DeallocateMemory(
4877 renderer,
4878 allocator,
4879 j);
4880 }
4881
4882 if (renderer->memoryAllocator->subAllocators[i].allocations != NULL) {
4883 SDL_free(renderer->memoryAllocator->subAllocators[i].allocations);
4884 }
4885
4886 SDL_free(renderer->memoryAllocator->subAllocators[i].sortedFreeRegions);
4887 }
4888
4889 SDL_free(renderer->memoryAllocator);
4890
4891 SDL_free(renderer->texturesToDestroy);
4892 SDL_free(renderer->buffersToDestroy);
4893 SDL_free(renderer->graphicsPipelinesToDestroy);
4894 SDL_free(renderer->computePipelinesToDestroy);
4895 SDL_free(renderer->shadersToDestroy);
4896 SDL_free(renderer->samplersToDestroy);
4897 SDL_free(renderer->framebuffersToDestroy);
4898 SDL_free(renderer->allocationsToDefrag);
4899
4900 SDL_DestroyMutex(renderer->allocatorLock);
4901 SDL_DestroyMutex(renderer->disposeLock);
4902 SDL_DestroyMutex(renderer->submitLock);
4903 SDL_DestroyMutex(renderer->acquireCommandBufferLock);
4904 SDL_DestroyMutex(renderer->acquireUniformBufferLock);
4905 SDL_DestroyMutex(renderer->renderPassFetchLock);
4906 SDL_DestroyMutex(renderer->framebufferFetchLock);
4907 SDL_DestroyMutex(renderer->windowLock);
4908
4909 renderer->vkDestroyDevice(renderer->logicalDevice, NULL);
4910 renderer->vkDestroyInstance(renderer->instance, NULL);
4911
4912 SDL_free(renderer);
4913 SDL_free(device);
4914 SDL_Vulkan_UnloadLibrary();
4915}
4916
4917static DescriptorSetCache *VULKAN_INTERNAL_AcquireDescriptorSetCache(
4918 VulkanRenderer *renderer)
4919{
4920 DescriptorSetCache *cache;
4921
4922 if (renderer->descriptorSetCachePoolCount == 0) {
4923 cache = SDL_malloc(sizeof(DescriptorSetCache));
4924 cache->poolCount = 0;
4925 cache->pools = NULL;
4926 } else {
4927 cache = renderer->descriptorSetCachePool[renderer->descriptorSetCachePoolCount - 1];
4928 renderer->descriptorSetCachePoolCount -= 1;
4929 }
4930
4931 return cache;
4932}
4933
4934static void VULKAN_INTERNAL_ReturnDescriptorSetCacheToPool(
4935 VulkanRenderer *renderer,
4936 DescriptorSetCache *descriptorSetCache)
4937{
4938 EXPAND_ARRAY_IF_NEEDED(
4939 renderer->descriptorSetCachePool,
4940 DescriptorSetCache *,
4941 renderer->descriptorSetCachePoolCount + 1,
4942 renderer->descriptorSetCachePoolCapacity,
4943 renderer->descriptorSetCachePoolCapacity * 2);
4944
4945 renderer->descriptorSetCachePool[renderer->descriptorSetCachePoolCount] = descriptorSetCache;
4946 renderer->descriptorSetCachePoolCount += 1;
4947
4948 for (Uint32 i = 0; i < descriptorSetCache->poolCount; i += 1) {
4949 descriptorSetCache->pools[i].descriptorSetIndex = 0;
4950 }
4951}
4952
4953static VkDescriptorSet VULKAN_INTERNAL_FetchDescriptorSet(
4954 VulkanRenderer *renderer,
4955 VulkanCommandBuffer *vulkanCommandBuffer,
4956 DescriptorSetLayout *descriptorSetLayout)
4957{
4958 // Grow the pool to meet the descriptor set layout ID
4959 if (descriptorSetLayout->ID >= vulkanCommandBuffer->descriptorSetCache->poolCount) {
4960 vulkanCommandBuffer->descriptorSetCache->pools = SDL_realloc(
4961 vulkanCommandBuffer->descriptorSetCache->pools,
4962 sizeof(DescriptorSetPool) * (descriptorSetLayout->ID + 1));
4963
4964 for (Uint32 i = vulkanCommandBuffer->descriptorSetCache->poolCount; i < descriptorSetLayout->ID + 1; i += 1) {
4965 SDL_zero(vulkanCommandBuffer->descriptorSetCache->pools[i]);
4966 }
4967
4968 vulkanCommandBuffer->descriptorSetCache->poolCount = descriptorSetLayout->ID + 1;
4969 }
4970
4971 DescriptorSetPool *pool =
4972 &vulkanCommandBuffer->descriptorSetCache->pools[descriptorSetLayout->ID];
4973
4974 if (pool->descriptorSetIndex == pool->descriptorSetCount) {
4975 if (!VULKAN_INTERNAL_AllocateDescriptorsFromPool(
4976 renderer,
4977 descriptorSetLayout,
4978 pool)) {
4979 return VK_NULL_HANDLE;
4980 }
4981 }
4982
4983 VkDescriptorSet descriptorSet = pool->descriptorSets[pool->descriptorSetIndex];
4984 pool->descriptorSetIndex += 1;
4985
4986 return descriptorSet;
4987}
4988
4989static void VULKAN_INTERNAL_BindGraphicsDescriptorSets(
4990 VulkanRenderer *renderer,
4991 VulkanCommandBuffer *commandBuffer)
4992{
4993 VulkanGraphicsPipelineResourceLayout *resourceLayout;
4994 DescriptorSetLayout *descriptorSetLayout;
4995 VkWriteDescriptorSet writeDescriptorSets[
4996 (MAX_TEXTURE_SAMPLERS_PER_STAGE +
4997 MAX_STORAGE_TEXTURES_PER_STAGE +
4998 MAX_STORAGE_BUFFERS_PER_STAGE +
4999 MAX_UNIFORM_BUFFERS_PER_STAGE) * 2];
5000 VkDescriptorBufferInfo bufferInfos[MAX_STORAGE_BUFFERS_PER_STAGE * 2];
5001 VkDescriptorImageInfo imageInfos[(MAX_TEXTURE_SAMPLERS_PER_STAGE + MAX_STORAGE_TEXTURES_PER_STAGE) * 2];
5002 Uint32 dynamicOffsets[MAX_UNIFORM_BUFFERS_PER_STAGE * 2];
5003 Uint32 writeCount = 0;
5004 Uint32 bufferInfoCount = 0;
5005 Uint32 imageInfoCount = 0;
5006 Uint32 dynamicOffsetCount = 0;
5007
5008 if (
5009 !commandBuffer->needNewVertexResourceDescriptorSet &&
5010 !commandBuffer->needNewVertexUniformDescriptorSet &&
5011 !commandBuffer->needNewVertexUniformOffsets &&
5012 !commandBuffer->needNewFragmentResourceDescriptorSet &&
5013 !commandBuffer->needNewFragmentUniformDescriptorSet &&
5014 !commandBuffer->needNewFragmentUniformOffsets
5015 ) {
5016 return;
5017 }
5018
5019 resourceLayout = commandBuffer->currentGraphicsPipeline->resourceLayout;
5020
5021 if (commandBuffer->needNewVertexResourceDescriptorSet) {
5022 descriptorSetLayout = resourceLayout->descriptorSetLayouts[0];
5023
5024 commandBuffer->vertexResourceDescriptorSet = VULKAN_INTERNAL_FetchDescriptorSet(
5025 renderer,
5026 commandBuffer,
5027 descriptorSetLayout);
5028
5029 for (Uint32 i = 0; i < resourceLayout->vertexSamplerCount; i += 1) {
5030 VkWriteDescriptorSet *currentWriteDescriptorSet = &writeDescriptorSets[writeCount];
5031
5032 currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
5033 currentWriteDescriptorSet->pNext = NULL;
5034 currentWriteDescriptorSet->descriptorCount = 1;
5035 currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
5036 currentWriteDescriptorSet->dstArrayElement = 0;
5037 currentWriteDescriptorSet->dstBinding = i;
5038 currentWriteDescriptorSet->dstSet = commandBuffer->vertexResourceDescriptorSet;
5039 currentWriteDescriptorSet->pTexelBufferView = NULL;
5040 currentWriteDescriptorSet->pBufferInfo = NULL;
5041
5042 imageInfos[imageInfoCount].sampler = commandBuffer->vertexSamplers[i]->sampler;
5043 imageInfos[imageInfoCount].imageView = commandBuffer->vertexSamplerTextures[i]->fullView;
5044 imageInfos[imageInfoCount].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
5045
5046 currentWriteDescriptorSet->pImageInfo = &imageInfos[imageInfoCount];
5047
5048 writeCount += 1;
5049 imageInfoCount += 1;
5050 }
5051
5052 for (Uint32 i = 0; i < resourceLayout->vertexStorageTextureCount; i += 1) {
5053 VkWriteDescriptorSet *currentWriteDescriptorSet = &writeDescriptorSets[writeCount];
5054
5055 currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
5056 currentWriteDescriptorSet->pNext = NULL;
5057 currentWriteDescriptorSet->descriptorCount = 1;
5058 currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
5059 currentWriteDescriptorSet->dstArrayElement = 0;
5060 currentWriteDescriptorSet->dstBinding = resourceLayout->vertexSamplerCount + i;
5061 currentWriteDescriptorSet->dstSet = commandBuffer->vertexResourceDescriptorSet;
5062 currentWriteDescriptorSet->pTexelBufferView = NULL;
5063 currentWriteDescriptorSet->pBufferInfo = NULL;
5064
5065 imageInfos[imageInfoCount].sampler = VK_NULL_HANDLE;
5066 imageInfos[imageInfoCount].imageView = commandBuffer->vertexStorageTextures[i]->fullView;
5067 imageInfos[imageInfoCount].imageLayout = VK_IMAGE_LAYOUT_GENERAL;
5068
5069 currentWriteDescriptorSet->pImageInfo = &imageInfos[imageInfoCount];
5070
5071 writeCount += 1;
5072 imageInfoCount += 1;
5073 }
5074
5075 for (Uint32 i = 0; i < resourceLayout->vertexStorageBufferCount; i += 1) {
5076 VkWriteDescriptorSet *currentWriteDescriptorSet = &writeDescriptorSets[writeCount];
5077
5078 currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
5079 currentWriteDescriptorSet->pNext = NULL;
5080 currentWriteDescriptorSet->descriptorCount = 1;
5081 currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
5082 currentWriteDescriptorSet->dstArrayElement = 0;
5083 currentWriteDescriptorSet->dstBinding = resourceLayout->vertexSamplerCount + resourceLayout->vertexStorageTextureCount + i;
5084 currentWriteDescriptorSet->dstSet = commandBuffer->vertexResourceDescriptorSet;
5085 currentWriteDescriptorSet->pTexelBufferView = NULL;
5086 currentWriteDescriptorSet->pImageInfo = NULL;
5087
5088 bufferInfos[bufferInfoCount].buffer = commandBuffer->vertexStorageBuffers[i]->buffer;
5089 bufferInfos[bufferInfoCount].offset = 0;
5090 bufferInfos[bufferInfoCount].range = VK_WHOLE_SIZE;
5091
5092 currentWriteDescriptorSet->pBufferInfo = &bufferInfos[bufferInfoCount];
5093
5094 writeCount += 1;
5095 bufferInfoCount += 1;
5096 }
5097
5098 commandBuffer->needNewVertexResourceDescriptorSet = false;
5099 }
5100
5101 if (commandBuffer->needNewVertexUniformDescriptorSet) {
5102 descriptorSetLayout = resourceLayout->descriptorSetLayouts[1];
5103
5104 commandBuffer->vertexUniformDescriptorSet = VULKAN_INTERNAL_FetchDescriptorSet(
5105 renderer,
5106 commandBuffer,
5107 descriptorSetLayout);
5108
5109 for (Uint32 i = 0; i < resourceLayout->vertexUniformBufferCount; i += 1) {
5110 VkWriteDescriptorSet *currentWriteDescriptorSet = &writeDescriptorSets[writeCount];
5111
5112 currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
5113 currentWriteDescriptorSet->pNext = NULL;
5114 currentWriteDescriptorSet->descriptorCount = 1;
5115 currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
5116 currentWriteDescriptorSet->dstArrayElement = 0;
5117 currentWriteDescriptorSet->dstBinding = i;
5118 currentWriteDescriptorSet->dstSet = commandBuffer->vertexUniformDescriptorSet;
5119 currentWriteDescriptorSet->pTexelBufferView = NULL;
5120 currentWriteDescriptorSet->pImageInfo = NULL;
5121
5122 bufferInfos[bufferInfoCount].buffer = commandBuffer->vertexUniformBuffers[i]->buffer->buffer;
5123 bufferInfos[bufferInfoCount].offset = 0;
5124 bufferInfos[bufferInfoCount].range = MAX_UBO_SECTION_SIZE;
5125
5126 currentWriteDescriptorSet->pBufferInfo = &bufferInfos[bufferInfoCount];
5127
5128 writeCount += 1;
5129 bufferInfoCount += 1;
5130 }
5131
5132 commandBuffer->needNewVertexUniformDescriptorSet = false;
5133 }
5134
5135 for (Uint32 i = 0; i < resourceLayout->vertexUniformBufferCount; i += 1) {
5136 dynamicOffsets[dynamicOffsetCount] = commandBuffer->vertexUniformBuffers[i]->drawOffset;
5137 dynamicOffsetCount += 1;
5138 }
5139
5140 if (commandBuffer->needNewFragmentResourceDescriptorSet) {
5141 descriptorSetLayout = resourceLayout->descriptorSetLayouts[2];
5142
5143 commandBuffer->fragmentResourceDescriptorSet = VULKAN_INTERNAL_FetchDescriptorSet(
5144 renderer,
5145 commandBuffer,
5146 descriptorSetLayout);
5147
5148 for (Uint32 i = 0; i < resourceLayout->fragmentSamplerCount; i += 1) {
5149 VkWriteDescriptorSet *currentWriteDescriptorSet = &writeDescriptorSets[writeCount];
5150
5151 currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
5152 currentWriteDescriptorSet->pNext = NULL;
5153 currentWriteDescriptorSet->descriptorCount = 1;
5154 currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
5155 currentWriteDescriptorSet->dstArrayElement = 0;
5156 currentWriteDescriptorSet->dstBinding = i;
5157 currentWriteDescriptorSet->dstSet = commandBuffer->fragmentResourceDescriptorSet;
5158 currentWriteDescriptorSet->pTexelBufferView = NULL;
5159 currentWriteDescriptorSet->pBufferInfo = NULL;
5160
5161 imageInfos[imageInfoCount].sampler = commandBuffer->fragmentSamplers[i]->sampler;
5162 imageInfos[imageInfoCount].imageView = commandBuffer->fragmentSamplerTextures[i]->fullView;
5163 imageInfos[imageInfoCount].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
5164
5165 currentWriteDescriptorSet->pImageInfo = &imageInfos[imageInfoCount];
5166
5167 writeCount += 1;
5168 imageInfoCount += 1;
5169 }
5170
5171 for (Uint32 i = 0; i < resourceLayout->fragmentStorageTextureCount; i += 1) {
5172 VkWriteDescriptorSet *currentWriteDescriptorSet = &writeDescriptorSets[writeCount];
5173
5174 currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
5175 currentWriteDescriptorSet->pNext = NULL;
5176 currentWriteDescriptorSet->descriptorCount = 1;
5177 currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
5178 currentWriteDescriptorSet->dstArrayElement = 0;
5179 currentWriteDescriptorSet->dstBinding = resourceLayout->fragmentSamplerCount + i;
5180 currentWriteDescriptorSet->dstSet = commandBuffer->fragmentResourceDescriptorSet;
5181 currentWriteDescriptorSet->pTexelBufferView = NULL;
5182 currentWriteDescriptorSet->pBufferInfo = NULL;
5183
5184 imageInfos[imageInfoCount].sampler = VK_NULL_HANDLE;
5185 imageInfos[imageInfoCount].imageView = commandBuffer->fragmentStorageTextures[i]->fullView;
5186 imageInfos[imageInfoCount].imageLayout = VK_IMAGE_LAYOUT_GENERAL;
5187
5188 currentWriteDescriptorSet->pImageInfo = &imageInfos[imageInfoCount];
5189
5190 writeCount += 1;
5191 imageInfoCount += 1;
5192 }
5193
5194 for (Uint32 i = 0; i < resourceLayout->fragmentStorageBufferCount; i += 1) {
5195 VkWriteDescriptorSet *currentWriteDescriptorSet = &writeDescriptorSets[writeCount];
5196
5197 currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
5198 currentWriteDescriptorSet->pNext = NULL;
5199 currentWriteDescriptorSet->descriptorCount = 1;
5200 currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
5201 currentWriteDescriptorSet->dstArrayElement = 0;
5202 currentWriteDescriptorSet->dstBinding = resourceLayout->fragmentSamplerCount + resourceLayout->fragmentStorageTextureCount + i;
5203 currentWriteDescriptorSet->dstSet = commandBuffer->fragmentResourceDescriptorSet;
5204 currentWriteDescriptorSet->pTexelBufferView = NULL;
5205 currentWriteDescriptorSet->pImageInfo = NULL;
5206
5207 bufferInfos[bufferInfoCount].buffer = commandBuffer->fragmentStorageBuffers[i]->buffer;
5208 bufferInfos[bufferInfoCount].offset = 0;
5209 bufferInfos[bufferInfoCount].range = VK_WHOLE_SIZE;
5210
5211 currentWriteDescriptorSet->pBufferInfo = &bufferInfos[bufferInfoCount];
5212
5213 writeCount += 1;
5214 bufferInfoCount += 1;
5215 }
5216
5217 commandBuffer->needNewFragmentResourceDescriptorSet = false;
5218 }
5219
5220 if (commandBuffer->needNewFragmentUniformDescriptorSet) {
5221 descriptorSetLayout = resourceLayout->descriptorSetLayouts[3];
5222
5223 commandBuffer->fragmentUniformDescriptorSet = VULKAN_INTERNAL_FetchDescriptorSet(
5224 renderer,
5225 commandBuffer,
5226 descriptorSetLayout);
5227
5228 for (Uint32 i = 0; i < resourceLayout->fragmentUniformBufferCount; i += 1) {
5229 VkWriteDescriptorSet *currentWriteDescriptorSet = &writeDescriptorSets[writeCount];
5230
5231 currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
5232 currentWriteDescriptorSet->pNext = NULL;
5233 currentWriteDescriptorSet->descriptorCount = 1;
5234 currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
5235 currentWriteDescriptorSet->dstArrayElement = 0;
5236 currentWriteDescriptorSet->dstBinding = i;
5237 currentWriteDescriptorSet->dstSet = commandBuffer->fragmentUniformDescriptorSet;
5238 currentWriteDescriptorSet->pTexelBufferView = NULL;
5239 currentWriteDescriptorSet->pImageInfo = NULL;
5240
5241 bufferInfos[bufferInfoCount].buffer = commandBuffer->fragmentUniformBuffers[i]->buffer->buffer;
5242 bufferInfos[bufferInfoCount].offset = 0;
5243 bufferInfos[bufferInfoCount].range = MAX_UBO_SECTION_SIZE;
5244
5245 currentWriteDescriptorSet->pBufferInfo = &bufferInfos[bufferInfoCount];
5246
5247 writeCount += 1;
5248 bufferInfoCount += 1;
5249 }
5250
5251 commandBuffer->needNewFragmentUniformDescriptorSet = false;
5252 }
5253
5254 for (Uint32 i = 0; i < resourceLayout->fragmentUniformBufferCount; i += 1) {
5255 dynamicOffsets[dynamicOffsetCount] = commandBuffer->fragmentUniformBuffers[i]->drawOffset;
5256 dynamicOffsetCount += 1;
5257 }
5258
5259 renderer->vkUpdateDescriptorSets(
5260 renderer->logicalDevice,
5261 writeCount,
5262 writeDescriptorSets,
5263 0,
5264 NULL);
5265
5266 VkDescriptorSet sets[4];
5267 sets[0] = commandBuffer->vertexResourceDescriptorSet;
5268 sets[1] = commandBuffer->vertexUniformDescriptorSet;
5269 sets[2] = commandBuffer->fragmentResourceDescriptorSet;
5270 sets[3] = commandBuffer->fragmentUniformDescriptorSet;
5271
5272 renderer->vkCmdBindDescriptorSets(
5273 commandBuffer->commandBuffer,
5274 VK_PIPELINE_BIND_POINT_GRAPHICS,
5275 resourceLayout->pipelineLayout,
5276 0,
5277 4,
5278 sets,
5279 dynamicOffsetCount,
5280 dynamicOffsets);
5281
5282 commandBuffer->needNewVertexUniformOffsets = false;
5283 commandBuffer->needNewFragmentUniformOffsets = false;
5284}
5285
5286static void VULKAN_DrawIndexedPrimitives(
5287 SDL_GPUCommandBuffer *commandBuffer,
5288 Uint32 numIndices,
5289 Uint32 numInstances,
5290 Uint32 firstIndex,
5291 Sint32 vertexOffset,
5292 Uint32 firstInstance)
5293{
5294 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
5295 VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
5296
5297 VULKAN_INTERNAL_BindGraphicsDescriptorSets(renderer, vulkanCommandBuffer);
5298
5299 renderer->vkCmdDrawIndexed(
5300 vulkanCommandBuffer->commandBuffer,
5301 numIndices,
5302 numInstances,
5303 firstIndex,
5304 vertexOffset,
5305 firstInstance);
5306}
5307
5308static void VULKAN_DrawPrimitives(
5309 SDL_GPUCommandBuffer *commandBuffer,
5310 Uint32 numVertices,
5311 Uint32 numInstances,
5312 Uint32 firstVertex,
5313 Uint32 firstInstance)
5314{
5315 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
5316 VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
5317
5318 VULKAN_INTERNAL_BindGraphicsDescriptorSets(renderer, vulkanCommandBuffer);
5319
5320 renderer->vkCmdDraw(
5321 vulkanCommandBuffer->commandBuffer,
5322 numVertices,
5323 numInstances,
5324 firstVertex,
5325 firstInstance);
5326}
5327
5328static void VULKAN_DrawPrimitivesIndirect(
5329 SDL_GPUCommandBuffer *commandBuffer,
5330 SDL_GPUBuffer *buffer,
5331 Uint32 offset,
5332 Uint32 drawCount)
5333{
5334 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
5335 VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
5336 VulkanBuffer *vulkanBuffer = ((VulkanBufferContainer *)buffer)->activeBuffer;
5337 Uint32 pitch = sizeof(SDL_GPUIndirectDrawCommand);
5338 Uint32 i;
5339
5340 VULKAN_INTERNAL_BindGraphicsDescriptorSets(renderer, vulkanCommandBuffer);
5341
5342 if (renderer->supportsMultiDrawIndirect) {
5343 // Real multi-draw!
5344 renderer->vkCmdDrawIndirect(
5345 vulkanCommandBuffer->commandBuffer,
5346 vulkanBuffer->buffer,
5347 offset,
5348 drawCount,
5349 pitch);
5350 } else {
5351 // Fake multi-draw...
5352 for (i = 0; i < drawCount; i += 1) {
5353 renderer->vkCmdDrawIndirect(
5354 vulkanCommandBuffer->commandBuffer,
5355 vulkanBuffer->buffer,
5356 offset + (pitch * i),
5357 1,
5358 pitch);
5359 }
5360 }
5361
5362 VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, vulkanBuffer);
5363}
5364
5365static void VULKAN_DrawIndexedPrimitivesIndirect(
5366 SDL_GPUCommandBuffer *commandBuffer,
5367 SDL_GPUBuffer *buffer,
5368 Uint32 offset,
5369 Uint32 drawCount)
5370{
5371 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
5372 VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
5373 VulkanBuffer *vulkanBuffer = ((VulkanBufferContainer *)buffer)->activeBuffer;
5374 Uint32 pitch = sizeof(SDL_GPUIndexedIndirectDrawCommand);
5375 Uint32 i;
5376
5377 VULKAN_INTERNAL_BindGraphicsDescriptorSets(renderer, vulkanCommandBuffer);
5378
5379 if (renderer->supportsMultiDrawIndirect) {
5380 // Real multi-draw!
5381 renderer->vkCmdDrawIndexedIndirect(
5382 vulkanCommandBuffer->commandBuffer,
5383 vulkanBuffer->buffer,
5384 offset,
5385 drawCount,
5386 pitch);
5387 } else {
5388 // Fake multi-draw...
5389 for (i = 0; i < drawCount; i += 1) {
5390 renderer->vkCmdDrawIndexedIndirect(
5391 vulkanCommandBuffer->commandBuffer,
5392 vulkanBuffer->buffer,
5393 offset + (pitch * i),
5394 1,
5395 pitch);
5396 }
5397 }
5398
5399 VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, vulkanBuffer);
5400}
5401
5402// Debug Naming
5403
5404static void VULKAN_INTERNAL_SetBufferName(
5405 VulkanRenderer *renderer,
5406 VulkanBuffer *buffer,
5407 const char *text)
5408{
5409 VkDebugUtilsObjectNameInfoEXT nameInfo;
5410
5411 if (renderer->debugMode && renderer->supportsDebugUtils) {
5412 nameInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT;
5413 nameInfo.pNext = NULL;
5414 nameInfo.pObjectName = text;
5415 nameInfo.objectType = VK_OBJECT_TYPE_BUFFER;
5416 nameInfo.objectHandle = (uint64_t)buffer->buffer;
5417
5418 renderer->vkSetDebugUtilsObjectNameEXT(
5419 renderer->logicalDevice,
5420 &nameInfo);
5421 }
5422}
5423
5424static void VULKAN_SetBufferName(
5425 SDL_GPURenderer *driverData,
5426 SDL_GPUBuffer *buffer,
5427 const char *text)
5428{
5429 VulkanRenderer *renderer = (VulkanRenderer *)driverData;
5430 VulkanBufferContainer *container = (VulkanBufferContainer *)buffer;
5431 size_t textLength = SDL_strlen(text) + 1;
5432
5433 if (renderer->debugMode && renderer->supportsDebugUtils) {
5434 container->debugName = SDL_realloc(
5435 container->debugName,
5436 textLength);
5437
5438 SDL_utf8strlcpy(
5439 container->debugName,
5440 text,
5441 textLength);
5442
5443 for (Uint32 i = 0; i < container->bufferCount; i += 1) {
5444 VULKAN_INTERNAL_SetBufferName(
5445 renderer,
5446 container->buffers[i],
5447 text);
5448 }
5449 }
5450}
5451
5452static void VULKAN_INTERNAL_SetTextureName(
5453 VulkanRenderer *renderer,
5454 VulkanTexture *texture,
5455 const char *text)
5456{
5457 VkDebugUtilsObjectNameInfoEXT nameInfo;
5458
5459 if (renderer->debugMode && renderer->supportsDebugUtils) {
5460 nameInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT;
5461 nameInfo.pNext = NULL;
5462 nameInfo.pObjectName = text;
5463 nameInfo.objectType = VK_OBJECT_TYPE_IMAGE;
5464 nameInfo.objectHandle = (uint64_t)texture->image;
5465
5466 renderer->vkSetDebugUtilsObjectNameEXT(
5467 renderer->logicalDevice,
5468 &nameInfo);
5469 }
5470}
5471
5472static void VULKAN_SetTextureName(
5473 SDL_GPURenderer *driverData,
5474 SDL_GPUTexture *texture,
5475 const char *text)
5476{
5477 VulkanRenderer *renderer = (VulkanRenderer *)driverData;
5478 VulkanTextureContainer *container = (VulkanTextureContainer *)texture;
5479 size_t textLength = SDL_strlen(text) + 1;
5480
5481 if (renderer->debugMode && renderer->supportsDebugUtils) {
5482 container->debugName = SDL_realloc(
5483 container->debugName,
5484 textLength);
5485
5486 SDL_utf8strlcpy(
5487 container->debugName,
5488 text,
5489 textLength);
5490
5491 for (Uint32 i = 0; i < container->textureCount; i += 1) {
5492 VULKAN_INTERNAL_SetTextureName(
5493 renderer,
5494 container->textures[i],
5495 text);
5496 }
5497 }
5498}
5499
5500static void VULKAN_InsertDebugLabel(
5501 SDL_GPUCommandBuffer *commandBuffer,
5502 const char *text)
5503{
5504 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
5505 VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
5506 VkDebugUtilsLabelEXT labelInfo;
5507
5508 if (renderer->supportsDebugUtils) {
5509 labelInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT;
5510 labelInfo.pNext = NULL;
5511 labelInfo.pLabelName = text;
5512
5513 renderer->vkCmdInsertDebugUtilsLabelEXT(
5514 vulkanCommandBuffer->commandBuffer,
5515 &labelInfo);
5516 }
5517}
5518
5519static void VULKAN_PushDebugGroup(
5520 SDL_GPUCommandBuffer *commandBuffer,
5521 const char *name)
5522{
5523 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
5524 VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
5525 VkDebugUtilsLabelEXT labelInfo;
5526
5527 if (renderer->supportsDebugUtils) {
5528 labelInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT;
5529 labelInfo.pNext = NULL;
5530 labelInfo.pLabelName = name;
5531
5532 renderer->vkCmdBeginDebugUtilsLabelEXT(
5533 vulkanCommandBuffer->commandBuffer,
5534 &labelInfo);
5535 }
5536}
5537
5538static void VULKAN_PopDebugGroup(
5539 SDL_GPUCommandBuffer *commandBuffer)
5540{
5541 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
5542 VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
5543
5544 if (renderer->supportsDebugUtils) {
5545 renderer->vkCmdEndDebugUtilsLabelEXT(vulkanCommandBuffer->commandBuffer);
5546 }
5547}
5548
5549static VulkanTexture *VULKAN_INTERNAL_CreateTexture(
5550 VulkanRenderer *renderer,
5551 const SDL_GPUTextureCreateInfo *createinfo)
5552{
5553 VkResult vulkanResult;
5554 VkImageCreateInfo imageCreateInfo;
5555 VkImageCreateFlags imageCreateFlags = 0;
5556 VkImageViewCreateInfo imageViewCreateInfo;
5557 Uint8 bindResult;
5558 VkImageUsageFlags vkUsageFlags = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
5559 Uint32 layerCount = (createinfo->type == SDL_GPU_TEXTURETYPE_3D) ? 1 : createinfo->layer_count_or_depth;
5560 Uint32 depth = (createinfo->type == SDL_GPU_TEXTURETYPE_3D) ? createinfo->layer_count_or_depth : 1;
5561
5562 VulkanTexture *texture = SDL_calloc(1, sizeof(VulkanTexture));
5563 texture->swizzle = SwizzleForSDLFormat(createinfo->format);
5564 texture->depth = depth;
5565 texture->usage = createinfo->usage;
5566 SDL_SetAtomicInt(&texture->referenceCount, 0);
5567
5568 if (IsDepthFormat(createinfo->format)) {
5569 texture->aspectFlags = VK_IMAGE_ASPECT_DEPTH_BIT;
5570
5571 if (IsStencilFormat(createinfo->format)) {
5572 texture->aspectFlags |= VK_IMAGE_ASPECT_STENCIL_BIT;
5573 }
5574 } else {
5575 texture->aspectFlags = VK_IMAGE_ASPECT_COLOR_BIT;
5576 }
5577
5578 if (createinfo->type == SDL_GPU_TEXTURETYPE_CUBE || createinfo->type == SDL_GPU_TEXTURETYPE_CUBE_ARRAY) {
5579 imageCreateFlags |= VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT;
5580 } else if (createinfo->type == SDL_GPU_TEXTURETYPE_3D) {
5581 imageCreateFlags |= VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT;
5582 }
5583
5584 if (createinfo->usage & SDL_GPU_TEXTUREUSAGE_SAMPLER) {
5585 vkUsageFlags |= VK_IMAGE_USAGE_SAMPLED_BIT;
5586 }
5587 if (createinfo->usage & SDL_GPU_TEXTUREUSAGE_COLOR_TARGET) {
5588 vkUsageFlags |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
5589 }
5590 if (createinfo->usage & SDL_GPU_TEXTUREUSAGE_DEPTH_STENCIL_TARGET) {
5591 vkUsageFlags |= VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
5592 }
5593 if (createinfo->usage & (SDL_GPU_TEXTUREUSAGE_GRAPHICS_STORAGE_READ |
5594 SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_READ |
5595 SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_WRITE |
5596 SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_SIMULTANEOUS_READ_WRITE)) {
5597 vkUsageFlags |= VK_IMAGE_USAGE_STORAGE_BIT;
5598 }
5599
5600 imageCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
5601 imageCreateInfo.pNext = NULL;
5602 imageCreateInfo.flags = imageCreateFlags;
5603 imageCreateInfo.imageType = createinfo->type == SDL_GPU_TEXTURETYPE_3D ? VK_IMAGE_TYPE_3D : VK_IMAGE_TYPE_2D;
5604 imageCreateInfo.format = SDLToVK_TextureFormat[createinfo->format];
5605 imageCreateInfo.extent.width = createinfo->width;
5606 imageCreateInfo.extent.height = createinfo->height;
5607 imageCreateInfo.extent.depth = depth;
5608 imageCreateInfo.mipLevels = createinfo->num_levels;
5609 imageCreateInfo.arrayLayers = layerCount;
5610 imageCreateInfo.samples = SDLToVK_SampleCount[createinfo->sample_count];
5611 imageCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
5612 imageCreateInfo.usage = vkUsageFlags;
5613 imageCreateInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
5614 imageCreateInfo.queueFamilyIndexCount = 0;
5615 imageCreateInfo.pQueueFamilyIndices = NULL;
5616 imageCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
5617
5618 vulkanResult = renderer->vkCreateImage(
5619 renderer->logicalDevice,
5620 &imageCreateInfo,
5621 NULL,
5622 &texture->image);
5623
5624 if (vulkanResult != VK_SUCCESS) {
5625 VULKAN_INTERNAL_DestroyTexture(renderer, texture);
5626 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateImage, NULL);
5627 }
5628
5629 bindResult = VULKAN_INTERNAL_BindMemoryForImage(
5630 renderer,
5631 texture->image,
5632 &texture->usedRegion);
5633
5634 if (bindResult != 1) {
5635 renderer->vkDestroyImage(
5636 renderer->logicalDevice,
5637 texture->image,
5638 NULL);
5639
5640 VULKAN_INTERNAL_DestroyTexture(renderer, texture);
5641 SET_STRING_ERROR_AND_RETURN("Unable to bind memory for texture!", NULL);
5642 }
5643
5644 texture->usedRegion->vulkanTexture = texture; // lol
5645
5646 if (createinfo->usage & (SDL_GPU_TEXTUREUSAGE_SAMPLER | SDL_GPU_TEXTUREUSAGE_GRAPHICS_STORAGE_READ | SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_READ)) {
5647
5648 imageViewCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
5649 imageViewCreateInfo.pNext = NULL;
5650 imageViewCreateInfo.flags = 0;
5651 imageViewCreateInfo.image = texture->image;
5652 imageViewCreateInfo.format = SDLToVK_TextureFormat[createinfo->format];
5653 imageViewCreateInfo.components = texture->swizzle;
5654 imageViewCreateInfo.subresourceRange.aspectMask = texture->aspectFlags;
5655 imageViewCreateInfo.subresourceRange.baseMipLevel = 0;
5656 imageViewCreateInfo.subresourceRange.levelCount = createinfo->num_levels;
5657 imageViewCreateInfo.subresourceRange.baseArrayLayer = 0;
5658 imageViewCreateInfo.subresourceRange.layerCount = layerCount;
5659
5660 if (createinfo->type == SDL_GPU_TEXTURETYPE_CUBE) {
5661 imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_CUBE;
5662 } else if (createinfo->type == SDL_GPU_TEXTURETYPE_CUBE_ARRAY) {
5663 imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_CUBE_ARRAY;
5664 } else if (createinfo->type == SDL_GPU_TEXTURETYPE_3D) {
5665 imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_3D;
5666 } else if (createinfo->type == SDL_GPU_TEXTURETYPE_2D_ARRAY) {
5667 imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_2D_ARRAY;
5668 } else {
5669 imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
5670 }
5671
5672 vulkanResult = renderer->vkCreateImageView(
5673 renderer->logicalDevice,
5674 &imageViewCreateInfo,
5675 NULL,
5676 &texture->fullView);
5677
5678 if (vulkanResult != VK_SUCCESS) {
5679 VULKAN_INTERNAL_DestroyTexture(renderer, texture);
5680 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, "vkCreateImageView", NULL);
5681 }
5682 }
5683
5684 // Define slices
5685 texture->subresourceCount = layerCount * createinfo->num_levels;
5686 texture->subresources = SDL_calloc(
5687 texture->subresourceCount,
5688 sizeof(VulkanTextureSubresource));
5689
5690 for (Uint32 i = 0; i < layerCount; i += 1) {
5691 for (Uint32 j = 0; j < createinfo->num_levels; j += 1) {
5692 Uint32 subresourceIndex = VULKAN_INTERNAL_GetTextureSubresourceIndex(
5693 j,
5694 i,
5695 createinfo->num_levels);
5696
5697 if (createinfo->usage & SDL_GPU_TEXTUREUSAGE_COLOR_TARGET) {
5698 texture->subresources[subresourceIndex].renderTargetViews = SDL_malloc(
5699 depth * sizeof(VkImageView));
5700
5701 if (depth > 1) {
5702 for (Uint32 k = 0; k < depth; k += 1) {
5703 if (!VULKAN_INTERNAL_CreateRenderTargetView(
5704 renderer,
5705 texture,
5706 k,
5707 j,
5708 SDLToVK_TextureFormat[createinfo->format],
5709 texture->swizzle,
5710 &texture->subresources[subresourceIndex].renderTargetViews[k])) {
5711 VULKAN_INTERNAL_DestroyTexture(renderer, texture);
5712 return NULL;
5713 }
5714 }
5715 } else {
5716 if (!VULKAN_INTERNAL_CreateRenderTargetView(
5717 renderer,
5718 texture,
5719 i,
5720 j,
5721 SDLToVK_TextureFormat[createinfo->format],
5722 texture->swizzle,
5723 &texture->subresources[subresourceIndex].renderTargetViews[0])) {
5724 VULKAN_INTERNAL_DestroyTexture(renderer, texture);
5725 return NULL;
5726 }
5727 }
5728 }
5729
5730 if ((createinfo->usage & SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_WRITE) || (createinfo->usage & SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_SIMULTANEOUS_READ_WRITE)) {
5731 if (!VULKAN_INTERNAL_CreateSubresourceView(
5732 renderer,
5733 createinfo,
5734 texture,
5735 i,
5736 j,
5737 texture->swizzle,
5738 &texture->subresources[subresourceIndex].computeWriteView)) {
5739 VULKAN_INTERNAL_DestroyTexture(renderer, texture);
5740 return NULL;
5741 }
5742 }
5743
5744 if (createinfo->usage & SDL_GPU_TEXTUREUSAGE_DEPTH_STENCIL_TARGET) {
5745 if (!VULKAN_INTERNAL_CreateSubresourceView(
5746 renderer,
5747 createinfo,
5748 texture,
5749 i,
5750 j,
5751 texture->swizzle,
5752 &texture->subresources[subresourceIndex].depthStencilView)) {
5753 VULKAN_INTERNAL_DestroyTexture(renderer, texture);
5754 return NULL;
5755 }
5756 }
5757
5758 texture->subresources[subresourceIndex].parent = texture;
5759 texture->subresources[subresourceIndex].layer = i;
5760 texture->subresources[subresourceIndex].level = j;
5761 }
5762 }
5763
5764 // Let's transition to the default barrier state, because for some reason Vulkan doesn't let us do that with initialLayout.
5765 VulkanCommandBuffer *barrierCommandBuffer = (VulkanCommandBuffer *)VULKAN_AcquireCommandBuffer((SDL_GPURenderer *)renderer);
5766 VULKAN_INTERNAL_TextureTransitionToDefaultUsage(
5767 renderer,
5768 barrierCommandBuffer,
5769 VULKAN_TEXTURE_USAGE_MODE_UNINITIALIZED,
5770 texture);
5771 VULKAN_INTERNAL_TrackTexture(barrierCommandBuffer, texture);
5772 VULKAN_Submit((SDL_GPUCommandBuffer *)barrierCommandBuffer);
5773
5774 return texture;
5775}
5776
5777static void VULKAN_INTERNAL_CycleActiveBuffer(
5778 VulkanRenderer *renderer,
5779 VulkanBufferContainer *container)
5780{
5781 VulkanBuffer *buffer;
5782
5783 // If a previously-cycled buffer is available, we can use that.
5784 for (Uint32 i = 0; i < container->bufferCount; i += 1) {
5785 buffer = container->buffers[i];
5786 if (SDL_GetAtomicInt(&buffer->referenceCount) == 0) {
5787 container->activeBuffer = buffer;
5788 return;
5789 }
5790 }
5791
5792 // No buffer handle is available, create a new one.
5793 buffer = VULKAN_INTERNAL_CreateBuffer(
5794 renderer,
5795 container->activeBuffer->size,
5796 container->activeBuffer->usage,
5797 container->activeBuffer->type,
5798 container->dedicated);
5799
5800 if (!buffer) {
5801 return;
5802 }
5803
5804 EXPAND_ARRAY_IF_NEEDED(
5805 container->buffers,
5806 VulkanBuffer *,
5807 container->bufferCount + 1,
5808 container->bufferCapacity,
5809 container->bufferCapacity * 2);
5810
5811 container->buffers[container->bufferCount] = buffer;
5812 buffer->container = container;
5813 buffer->containerIndex = container->bufferCount;
5814 container->bufferCount += 1;
5815
5816 container->activeBuffer = buffer;
5817
5818 if (renderer->debugMode && renderer->supportsDebugUtils && container->debugName != NULL) {
5819 VULKAN_INTERNAL_SetBufferName(
5820 renderer,
5821 container->activeBuffer,
5822 container->debugName);
5823 }
5824}
5825
5826static void VULKAN_INTERNAL_CycleActiveTexture(
5827 VulkanRenderer *renderer,
5828 VulkanTextureContainer *container)
5829{
5830 VulkanTexture *texture;
5831
5832 // If a previously-cycled texture is available, we can use that.
5833 for (Uint32 i = 0; i < container->textureCount; i += 1) {
5834 texture = container->textures[i];
5835
5836 if (SDL_GetAtomicInt(&texture->referenceCount) == 0) {
5837 container->activeTexture = texture;
5838 return;
5839 }
5840 }
5841
5842 // No texture is available, generate a new one.
5843 texture = VULKAN_INTERNAL_CreateTexture(
5844 renderer,
5845 &container->header.info);
5846
5847 if (!texture) {
5848 return;
5849 }
5850
5851 EXPAND_ARRAY_IF_NEEDED(
5852 container->textures,
5853 VulkanTexture *,
5854 container->textureCount + 1,
5855 container->textureCapacity,
5856 container->textureCapacity * 2);
5857
5858 container->textures[container->textureCount] = texture;
5859 texture->container = container;
5860 texture->containerIndex = container->textureCount;
5861 container->textureCount += 1;
5862
5863 container->activeTexture = texture;
5864
5865 if (renderer->debugMode && renderer->supportsDebugUtils && container->debugName != NULL) {
5866 VULKAN_INTERNAL_SetTextureName(
5867 renderer,
5868 container->activeTexture,
5869 container->debugName);
5870 }
5871}
5872
5873static VulkanBuffer *VULKAN_INTERNAL_PrepareBufferForWrite(
5874 VulkanRenderer *renderer,
5875 VulkanCommandBuffer *commandBuffer,
5876 VulkanBufferContainer *bufferContainer,
5877 bool cycle,
5878 VulkanBufferUsageMode destinationUsageMode)
5879{
5880 if (
5881 cycle &&
5882 SDL_GetAtomicInt(&bufferContainer->activeBuffer->referenceCount) > 0) {
5883 VULKAN_INTERNAL_CycleActiveBuffer(
5884 renderer,
5885 bufferContainer);
5886 }
5887
5888 VULKAN_INTERNAL_BufferTransitionFromDefaultUsage(
5889 renderer,
5890 commandBuffer,
5891 destinationUsageMode,
5892 bufferContainer->activeBuffer);
5893
5894 return bufferContainer->activeBuffer;
5895}
5896
5897static VulkanTextureSubresource *VULKAN_INTERNAL_PrepareTextureSubresourceForWrite(
5898 VulkanRenderer *renderer,
5899 VulkanCommandBuffer *commandBuffer,
5900 VulkanTextureContainer *textureContainer,
5901 Uint32 layer,
5902 Uint32 level,
5903 bool cycle,
5904 VulkanTextureUsageMode destinationUsageMode)
5905{
5906 VulkanTextureSubresource *textureSubresource = VULKAN_INTERNAL_FetchTextureSubresource(
5907 textureContainer,
5908 layer,
5909 level);
5910
5911 if (
5912 cycle &&
5913 textureContainer->canBeCycled &&
5914 SDL_GetAtomicInt(&textureContainer->activeTexture->referenceCount) > 0) {
5915 VULKAN_INTERNAL_CycleActiveTexture(
5916 renderer,
5917 textureContainer);
5918
5919 textureSubresource = VULKAN_INTERNAL_FetchTextureSubresource(
5920 textureContainer,
5921 layer,
5922 level);
5923 }
5924
5925 // always do barrier because of layout transitions
5926 VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage(
5927 renderer,
5928 commandBuffer,
5929 destinationUsageMode,
5930 textureSubresource);
5931
5932 return textureSubresource;
5933}
5934
5935static VkRenderPass VULKAN_INTERNAL_CreateRenderPass(
5936 VulkanRenderer *renderer,
5937 VulkanCommandBuffer *commandBuffer,
5938 const SDL_GPUColorTargetInfo *colorTargetInfos,
5939 Uint32 numColorTargets,
5940 const SDL_GPUDepthStencilTargetInfo *depthStencilTargetInfo)
5941{
5942 VkResult vulkanResult;
5943 VkAttachmentDescription attachmentDescriptions[2 * MAX_COLOR_TARGET_BINDINGS + 1 /* depth */];
5944 VkAttachmentReference colorAttachmentReferences[MAX_COLOR_TARGET_BINDINGS];
5945 VkAttachmentReference resolveReferences[MAX_COLOR_TARGET_BINDINGS];
5946 VkAttachmentReference depthStencilAttachmentReference;
5947 VkRenderPassCreateInfo renderPassCreateInfo;
5948 VkSubpassDescription subpass;
5949 VkRenderPass renderPass;
5950 Uint32 i;
5951
5952 Uint32 attachmentDescriptionCount = 0;
5953 Uint32 colorAttachmentReferenceCount = 0;
5954 Uint32 resolveReferenceCount = 0;
5955
5956 for (i = 0; i < numColorTargets; i += 1) {
5957 VulkanTextureContainer *container = (VulkanTextureContainer *)colorTargetInfos[i].texture;
5958 attachmentDescriptions[attachmentDescriptionCount].flags = 0;
5959 attachmentDescriptions[attachmentDescriptionCount].format = SDLToVK_TextureFormat[container->header.info.format];
5960 attachmentDescriptions[attachmentDescriptionCount].samples = SDLToVK_SampleCount[container->header.info.sample_count];
5961 attachmentDescriptions[attachmentDescriptionCount].loadOp = SDLToVK_LoadOp[colorTargetInfos[i].load_op];
5962 attachmentDescriptions[attachmentDescriptionCount].storeOp = SDLToVK_StoreOp[colorTargetInfos[i].store_op];
5963 attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
5964 attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
5965 attachmentDescriptions[attachmentDescriptionCount].initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
5966 attachmentDescriptions[attachmentDescriptionCount].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
5967
5968 colorAttachmentReferences[colorAttachmentReferenceCount].attachment = attachmentDescriptionCount;
5969 colorAttachmentReferences[colorAttachmentReferenceCount].layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
5970
5971 attachmentDescriptionCount += 1;
5972 colorAttachmentReferenceCount += 1;
5973
5974 if (colorTargetInfos[i].store_op == SDL_GPU_STOREOP_RESOLVE || colorTargetInfos[i].store_op == SDL_GPU_STOREOP_RESOLVE_AND_STORE) {
5975 VulkanTextureContainer *resolveContainer = (VulkanTextureContainer *)colorTargetInfos[i].resolve_texture;
5976
5977 attachmentDescriptions[attachmentDescriptionCount].flags = 0;
5978 attachmentDescriptions[attachmentDescriptionCount].format = SDLToVK_TextureFormat[resolveContainer->header.info.format];
5979 attachmentDescriptions[attachmentDescriptionCount].samples = SDLToVK_SampleCount[resolveContainer->header.info.sample_count];
5980 attachmentDescriptions[attachmentDescriptionCount].loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE; // The texture will be overwritten anyway
5981 attachmentDescriptions[attachmentDescriptionCount].storeOp = VK_ATTACHMENT_STORE_OP_STORE; // Always store the resolve texture
5982 attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
5983 attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
5984 attachmentDescriptions[attachmentDescriptionCount].initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
5985 attachmentDescriptions[attachmentDescriptionCount].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
5986
5987 resolveReferences[resolveReferenceCount].attachment = attachmentDescriptionCount;
5988 resolveReferences[resolveReferenceCount].layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
5989
5990 attachmentDescriptionCount += 1;
5991 resolveReferenceCount += 1;
5992 }
5993 }
5994
5995 subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
5996 subpass.flags = 0;
5997 subpass.inputAttachmentCount = 0;
5998 subpass.pInputAttachments = NULL;
5999 subpass.colorAttachmentCount = numColorTargets;
6000 subpass.pColorAttachments = colorAttachmentReferences;
6001 subpass.preserveAttachmentCount = 0;
6002 subpass.pPreserveAttachments = NULL;
6003
6004 if (depthStencilTargetInfo == NULL) {
6005 subpass.pDepthStencilAttachment = NULL;
6006 } else {
6007 VulkanTextureContainer *container = (VulkanTextureContainer *)depthStencilTargetInfo->texture;
6008
6009 attachmentDescriptions[attachmentDescriptionCount].flags = 0;
6010 attachmentDescriptions[attachmentDescriptionCount].format = SDLToVK_TextureFormat[container->header.info.format];
6011 attachmentDescriptions[attachmentDescriptionCount].samples = SDLToVK_SampleCount[container->header.info.sample_count];
6012 attachmentDescriptions[attachmentDescriptionCount].loadOp = SDLToVK_LoadOp[depthStencilTargetInfo->load_op];
6013 attachmentDescriptions[attachmentDescriptionCount].storeOp = SDLToVK_StoreOp[depthStencilTargetInfo->store_op];
6014 attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp = SDLToVK_LoadOp[depthStencilTargetInfo->stencil_load_op];
6015 attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp = SDLToVK_StoreOp[depthStencilTargetInfo->stencil_store_op];
6016 attachmentDescriptions[attachmentDescriptionCount].initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
6017 attachmentDescriptions[attachmentDescriptionCount].finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
6018
6019 depthStencilAttachmentReference.attachment = attachmentDescriptionCount;
6020 depthStencilAttachmentReference.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
6021
6022 subpass.pDepthStencilAttachment = &depthStencilAttachmentReference;
6023
6024 attachmentDescriptionCount += 1;
6025 }
6026
6027 if (resolveReferenceCount > 0) {
6028 subpass.pResolveAttachments = resolveReferences;
6029 } else {
6030 subpass.pResolveAttachments = NULL;
6031 }
6032
6033 renderPassCreateInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
6034 renderPassCreateInfo.pNext = NULL;
6035 renderPassCreateInfo.flags = 0;
6036 renderPassCreateInfo.pAttachments = attachmentDescriptions;
6037 renderPassCreateInfo.attachmentCount = attachmentDescriptionCount;
6038 renderPassCreateInfo.subpassCount = 1;
6039 renderPassCreateInfo.pSubpasses = &subpass;
6040 renderPassCreateInfo.dependencyCount = 0;
6041 renderPassCreateInfo.pDependencies = NULL;
6042
6043 vulkanResult = renderer->vkCreateRenderPass(
6044 renderer->logicalDevice,
6045 &renderPassCreateInfo,
6046 NULL,
6047 &renderPass);
6048
6049 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateRenderPass, VK_NULL_HANDLE);
6050
6051 return renderPass;
6052}
6053
6054static VkRenderPass VULKAN_INTERNAL_CreateTransientRenderPass(
6055 VulkanRenderer *renderer,
6056 SDL_GPUGraphicsPipelineTargetInfo targetInfo,
6057 VkSampleCountFlagBits sampleCount)
6058{
6059 VkAttachmentDescription attachmentDescriptions[MAX_COLOR_TARGET_BINDINGS + 1 /* depth */];
6060 VkAttachmentReference colorAttachmentReferences[MAX_COLOR_TARGET_BINDINGS];
6061 VkAttachmentReference depthStencilAttachmentReference;
6062 SDL_GPUColorTargetDescription attachmentDescription;
6063 VkSubpassDescription subpass;
6064 VkRenderPassCreateInfo renderPassCreateInfo;
6065 VkRenderPass renderPass;
6066 VkResult result;
6067
6068 Uint32 attachmentDescriptionCount = 0;
6069 Uint32 colorAttachmentReferenceCount = 0;
6070 Uint32 i;
6071
6072 for (i = 0; i < targetInfo.num_color_targets; i += 1) {
6073 attachmentDescription = targetInfo.color_target_descriptions[i];
6074
6075 attachmentDescriptions[attachmentDescriptionCount].flags = 0;
6076 attachmentDescriptions[attachmentDescriptionCount].format = SDLToVK_TextureFormat[attachmentDescription.format];
6077 attachmentDescriptions[attachmentDescriptionCount].samples = sampleCount;
6078 attachmentDescriptions[attachmentDescriptionCount].loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
6079 attachmentDescriptions[attachmentDescriptionCount].storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
6080 attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
6081 attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
6082 attachmentDescriptions[attachmentDescriptionCount].initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
6083 attachmentDescriptions[attachmentDescriptionCount].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
6084
6085 colorAttachmentReferences[colorAttachmentReferenceCount].attachment = attachmentDescriptionCount;
6086 colorAttachmentReferences[colorAttachmentReferenceCount].layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
6087
6088 attachmentDescriptionCount += 1;
6089 colorAttachmentReferenceCount += 1;
6090 }
6091
6092 subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
6093 subpass.flags = 0;
6094 subpass.inputAttachmentCount = 0;
6095 subpass.pInputAttachments = NULL;
6096 subpass.colorAttachmentCount = targetInfo.num_color_targets;
6097 subpass.pColorAttachments = colorAttachmentReferences;
6098 subpass.preserveAttachmentCount = 0;
6099 subpass.pPreserveAttachments = NULL;
6100
6101 if (targetInfo.has_depth_stencil_target) {
6102 attachmentDescriptions[attachmentDescriptionCount].flags = 0;
6103 attachmentDescriptions[attachmentDescriptionCount].format = SDLToVK_TextureFormat[targetInfo.depth_stencil_format];
6104 attachmentDescriptions[attachmentDescriptionCount].samples = sampleCount;
6105 attachmentDescriptions[attachmentDescriptionCount].loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
6106 attachmentDescriptions[attachmentDescriptionCount].storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
6107 attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
6108 attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
6109 attachmentDescriptions[attachmentDescriptionCount].initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
6110 attachmentDescriptions[attachmentDescriptionCount].finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
6111
6112 depthStencilAttachmentReference.attachment = attachmentDescriptionCount;
6113 depthStencilAttachmentReference.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
6114
6115 subpass.pDepthStencilAttachment = &depthStencilAttachmentReference;
6116
6117 attachmentDescriptionCount += 1;
6118 } else {
6119 subpass.pDepthStencilAttachment = NULL;
6120 }
6121
6122 // Resolve attachments aren't needed for transient passes
6123 subpass.pResolveAttachments = NULL;
6124
6125 renderPassCreateInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
6126 renderPassCreateInfo.pNext = NULL;
6127 renderPassCreateInfo.flags = 0;
6128 renderPassCreateInfo.pAttachments = attachmentDescriptions;
6129 renderPassCreateInfo.attachmentCount = attachmentDescriptionCount;
6130 renderPassCreateInfo.subpassCount = 1;
6131 renderPassCreateInfo.pSubpasses = &subpass;
6132 renderPassCreateInfo.dependencyCount = 0;
6133 renderPassCreateInfo.pDependencies = NULL;
6134
6135 result = renderer->vkCreateRenderPass(
6136 renderer->logicalDevice,
6137 &renderPassCreateInfo,
6138 NULL,
6139 &renderPass);
6140
6141 CHECK_VULKAN_ERROR_AND_RETURN(result, vkCreateRenderPass, VK_NULL_HANDLE);
6142
6143 return renderPass;
6144}
6145
6146static SDL_GPUGraphicsPipeline *VULKAN_CreateGraphicsPipeline(
6147 SDL_GPURenderer *driverData,
6148 const SDL_GPUGraphicsPipelineCreateInfo *createinfo)
6149{
6150 VkResult vulkanResult;
6151 Uint32 i;
6152
6153 VulkanGraphicsPipeline *graphicsPipeline = (VulkanGraphicsPipeline *)SDL_malloc(sizeof(VulkanGraphicsPipeline));
6154 VkGraphicsPipelineCreateInfo vkPipelineCreateInfo;
6155
6156 VkPipelineShaderStageCreateInfo shaderStageCreateInfos[2];
6157
6158 VkPipelineVertexInputStateCreateInfo vertexInputStateCreateInfo;
6159 VkPipelineVertexInputDivisorStateCreateInfoEXT divisorStateCreateInfo;
6160 VkVertexInputBindingDescription *vertexInputBindingDescriptions = SDL_stack_alloc(VkVertexInputBindingDescription, createinfo->vertex_input_state.num_vertex_buffers);
6161 VkVertexInputAttributeDescription *vertexInputAttributeDescriptions = SDL_stack_alloc(VkVertexInputAttributeDescription, createinfo->vertex_input_state.num_vertex_attributes);
6162 VkVertexInputBindingDivisorDescriptionEXT *divisorDescriptions = SDL_stack_alloc(VkVertexInputBindingDivisorDescriptionEXT, createinfo->vertex_input_state.num_vertex_buffers);
6163 Uint32 divisorDescriptionCount = 0;
6164
6165 VkPipelineInputAssemblyStateCreateInfo inputAssemblyStateCreateInfo;
6166
6167 VkPipelineViewportStateCreateInfo viewportStateCreateInfo;
6168
6169 VkPipelineRasterizationStateCreateInfo rasterizationStateCreateInfo;
6170
6171 VkPipelineMultisampleStateCreateInfo multisampleStateCreateInfo;
6172
6173 VkPipelineDepthStencilStateCreateInfo depthStencilStateCreateInfo;
6174 VkStencilOpState frontStencilState;
6175 VkStencilOpState backStencilState;
6176
6177 VkPipelineColorBlendStateCreateInfo colorBlendStateCreateInfo;
6178 VkPipelineColorBlendAttachmentState *colorBlendAttachmentStates = SDL_stack_alloc(
6179 VkPipelineColorBlendAttachmentState,
6180 createinfo->target_info.num_color_targets);
6181
6182 static const VkDynamicState dynamicStates[] = {
6183 VK_DYNAMIC_STATE_VIEWPORT,
6184 VK_DYNAMIC_STATE_SCISSOR,
6185 VK_DYNAMIC_STATE_BLEND_CONSTANTS,
6186 VK_DYNAMIC_STATE_STENCIL_REFERENCE
6187 };
6188 VkPipelineDynamicStateCreateInfo dynamicStateCreateInfo;
6189
6190 VulkanRenderer *renderer = (VulkanRenderer *)driverData;
6191
6192 // Create a "compatible" render pass
6193
6194 VkRenderPass transientRenderPass = VULKAN_INTERNAL_CreateTransientRenderPass(
6195 renderer,
6196 createinfo->target_info,
6197 SDLToVK_SampleCount[createinfo->multisample_state.sample_count]);
6198
6199 // Dynamic state
6200
6201 dynamicStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
6202 dynamicStateCreateInfo.pNext = NULL;
6203 dynamicStateCreateInfo.flags = 0;
6204 dynamicStateCreateInfo.dynamicStateCount = SDL_arraysize(dynamicStates);
6205 dynamicStateCreateInfo.pDynamicStates = dynamicStates;
6206
6207 // Shader stages
6208
6209 graphicsPipeline->vertexShader = (VulkanShader *)createinfo->vertex_shader;
6210 SDL_AtomicIncRef(&graphicsPipeline->vertexShader->referenceCount);
6211
6212 shaderStageCreateInfos[0].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
6213 shaderStageCreateInfos[0].pNext = NULL;
6214 shaderStageCreateInfos[0].flags = 0;
6215 shaderStageCreateInfos[0].stage = VK_SHADER_STAGE_VERTEX_BIT;
6216 shaderStageCreateInfos[0].module = graphicsPipeline->vertexShader->shaderModule;
6217 shaderStageCreateInfos[0].pName = graphicsPipeline->vertexShader->entrypointName;
6218 shaderStageCreateInfos[0].pSpecializationInfo = NULL;
6219
6220 graphicsPipeline->fragmentShader = (VulkanShader *)createinfo->fragment_shader;
6221 SDL_AtomicIncRef(&graphicsPipeline->fragmentShader->referenceCount);
6222
6223 shaderStageCreateInfos[1].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
6224 shaderStageCreateInfos[1].pNext = NULL;
6225 shaderStageCreateInfos[1].flags = 0;
6226 shaderStageCreateInfos[1].stage = VK_SHADER_STAGE_FRAGMENT_BIT;
6227 shaderStageCreateInfos[1].module = graphicsPipeline->fragmentShader->shaderModule;
6228 shaderStageCreateInfos[1].pName = graphicsPipeline->fragmentShader->entrypointName;
6229 shaderStageCreateInfos[1].pSpecializationInfo = NULL;
6230
6231 // Vertex input
6232
6233 for (i = 0; i < createinfo->vertex_input_state.num_vertex_buffers; i += 1) {
6234 vertexInputBindingDescriptions[i].binding = createinfo->vertex_input_state.vertex_buffer_descriptions[i].slot;
6235 vertexInputBindingDescriptions[i].inputRate = SDLToVK_VertexInputRate[createinfo->vertex_input_state.vertex_buffer_descriptions[i].input_rate];
6236 vertexInputBindingDescriptions[i].stride = createinfo->vertex_input_state.vertex_buffer_descriptions[i].pitch;
6237
6238 if (createinfo->vertex_input_state.vertex_buffer_descriptions[i].input_rate == SDL_GPU_VERTEXINPUTRATE_INSTANCE) {
6239 divisorDescriptionCount += 1;
6240 }
6241 }
6242
6243 for (i = 0; i < createinfo->vertex_input_state.num_vertex_attributes; i += 1) {
6244 vertexInputAttributeDescriptions[i].binding = createinfo->vertex_input_state.vertex_attributes[i].buffer_slot;
6245 vertexInputAttributeDescriptions[i].format = SDLToVK_VertexFormat[createinfo->vertex_input_state.vertex_attributes[i].format];
6246 vertexInputAttributeDescriptions[i].location = createinfo->vertex_input_state.vertex_attributes[i].location;
6247 vertexInputAttributeDescriptions[i].offset = createinfo->vertex_input_state.vertex_attributes[i].offset;
6248 }
6249
6250 vertexInputStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
6251 vertexInputStateCreateInfo.pNext = NULL;
6252 vertexInputStateCreateInfo.flags = 0;
6253 vertexInputStateCreateInfo.vertexBindingDescriptionCount = createinfo->vertex_input_state.num_vertex_buffers;
6254 vertexInputStateCreateInfo.pVertexBindingDescriptions = vertexInputBindingDescriptions;
6255 vertexInputStateCreateInfo.vertexAttributeDescriptionCount = createinfo->vertex_input_state.num_vertex_attributes;
6256 vertexInputStateCreateInfo.pVertexAttributeDescriptions = vertexInputAttributeDescriptions;
6257
6258 if (divisorDescriptionCount > 0) {
6259 divisorDescriptionCount = 0;
6260
6261 for (i = 0; i < createinfo->vertex_input_state.num_vertex_buffers; i += 1) {
6262 if (createinfo->vertex_input_state.vertex_buffer_descriptions[i].input_rate == SDL_GPU_VERTEXINPUTRATE_INSTANCE) {
6263 divisorDescriptions[divisorDescriptionCount].binding = createinfo->vertex_input_state.vertex_buffer_descriptions[i].slot;
6264 divisorDescriptions[divisorDescriptionCount].divisor = createinfo->vertex_input_state.vertex_buffer_descriptions[i].instance_step_rate;
6265
6266 divisorDescriptionCount += 1;
6267 }
6268 }
6269
6270 divisorStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT;
6271 divisorStateCreateInfo.pNext = NULL;
6272 divisorStateCreateInfo.vertexBindingDivisorCount = divisorDescriptionCount;
6273 divisorStateCreateInfo.pVertexBindingDivisors = divisorDescriptions;
6274
6275 vertexInputStateCreateInfo.pNext = &divisorStateCreateInfo;
6276 }
6277
6278 // Topology
6279
6280 inputAssemblyStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
6281 inputAssemblyStateCreateInfo.pNext = NULL;
6282 inputAssemblyStateCreateInfo.flags = 0;
6283 inputAssemblyStateCreateInfo.primitiveRestartEnable = VK_FALSE;
6284 inputAssemblyStateCreateInfo.topology = SDLToVK_PrimitiveType[createinfo->primitive_type];
6285
6286 graphicsPipeline->primitiveType = createinfo->primitive_type;
6287
6288 // Viewport
6289
6290 // NOTE: viewport and scissor are dynamic, and must be set using the command buffer
6291
6292 viewportStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
6293 viewportStateCreateInfo.pNext = NULL;
6294 viewportStateCreateInfo.flags = 0;
6295 viewportStateCreateInfo.viewportCount = 1;
6296 viewportStateCreateInfo.pViewports = NULL;
6297 viewportStateCreateInfo.scissorCount = 1;
6298 viewportStateCreateInfo.pScissors = NULL;
6299
6300 // Rasterization
6301
6302 rasterizationStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
6303 rasterizationStateCreateInfo.pNext = NULL;
6304 rasterizationStateCreateInfo.flags = 0;
6305 rasterizationStateCreateInfo.depthClampEnable = !createinfo->rasterizer_state.enable_depth_clip;
6306 rasterizationStateCreateInfo.rasterizerDiscardEnable = VK_FALSE;
6307 rasterizationStateCreateInfo.polygonMode = SDLToVK_PolygonMode(
6308 renderer,
6309 createinfo->rasterizer_state.fill_mode);
6310 rasterizationStateCreateInfo.cullMode = SDLToVK_CullMode[createinfo->rasterizer_state.cull_mode];
6311 rasterizationStateCreateInfo.frontFace = SDLToVK_FrontFace[createinfo->rasterizer_state.front_face];
6312 rasterizationStateCreateInfo.depthBiasEnable =
6313 createinfo->rasterizer_state.enable_depth_bias;
6314 rasterizationStateCreateInfo.depthBiasConstantFactor =
6315 createinfo->rasterizer_state.depth_bias_constant_factor;
6316 rasterizationStateCreateInfo.depthBiasClamp =
6317 createinfo->rasterizer_state.depth_bias_clamp;
6318 rasterizationStateCreateInfo.depthBiasSlopeFactor =
6319 createinfo->rasterizer_state.depth_bias_slope_factor;
6320 rasterizationStateCreateInfo.lineWidth = 1.0f;
6321
6322 // Multisample
6323
6324 Uint32 sampleMask = createinfo->multisample_state.enable_mask ?
6325 createinfo->multisample_state.sample_mask :
6326 0xFFFFFFFF;
6327
6328 multisampleStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
6329 multisampleStateCreateInfo.pNext = NULL;
6330 multisampleStateCreateInfo.flags = 0;
6331 multisampleStateCreateInfo.rasterizationSamples = SDLToVK_SampleCount[createinfo->multisample_state.sample_count];
6332 multisampleStateCreateInfo.sampleShadingEnable = VK_FALSE;
6333 multisampleStateCreateInfo.minSampleShading = 1.0f;
6334 multisampleStateCreateInfo.pSampleMask = &sampleMask;
6335 multisampleStateCreateInfo.alphaToCoverageEnable = VK_FALSE;
6336 multisampleStateCreateInfo.alphaToOneEnable = VK_FALSE;
6337
6338 // Depth Stencil State
6339
6340 frontStencilState.failOp = SDLToVK_StencilOp[createinfo->depth_stencil_state.front_stencil_state.fail_op];
6341 frontStencilState.passOp = SDLToVK_StencilOp[createinfo->depth_stencil_state.front_stencil_state.pass_op];
6342 frontStencilState.depthFailOp = SDLToVK_StencilOp[createinfo->depth_stencil_state.front_stencil_state.depth_fail_op];
6343 frontStencilState.compareOp = SDLToVK_CompareOp[createinfo->depth_stencil_state.front_stencil_state.compare_op];
6344 frontStencilState.compareMask =
6345 createinfo->depth_stencil_state.compare_mask;
6346 frontStencilState.writeMask =
6347 createinfo->depth_stencil_state.write_mask;
6348 frontStencilState.reference = 0;
6349
6350 backStencilState.failOp = SDLToVK_StencilOp[createinfo->depth_stencil_state.back_stencil_state.fail_op];
6351 backStencilState.passOp = SDLToVK_StencilOp[createinfo->depth_stencil_state.back_stencil_state.pass_op];
6352 backStencilState.depthFailOp = SDLToVK_StencilOp[createinfo->depth_stencil_state.back_stencil_state.depth_fail_op];
6353 backStencilState.compareOp = SDLToVK_CompareOp[createinfo->depth_stencil_state.back_stencil_state.compare_op];
6354 backStencilState.compareMask =
6355 createinfo->depth_stencil_state.compare_mask;
6356 backStencilState.writeMask =
6357 createinfo->depth_stencil_state.write_mask;
6358 backStencilState.reference = 0;
6359
6360 depthStencilStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
6361 depthStencilStateCreateInfo.pNext = NULL;
6362 depthStencilStateCreateInfo.flags = 0;
6363 depthStencilStateCreateInfo.depthTestEnable =
6364 createinfo->depth_stencil_state.enable_depth_test;
6365 depthStencilStateCreateInfo.depthWriteEnable =
6366 createinfo->depth_stencil_state.enable_depth_write;
6367 depthStencilStateCreateInfo.depthCompareOp = SDLToVK_CompareOp[createinfo->depth_stencil_state.compare_op];
6368 depthStencilStateCreateInfo.depthBoundsTestEnable = VK_FALSE;
6369 depthStencilStateCreateInfo.stencilTestEnable =
6370 createinfo->depth_stencil_state.enable_stencil_test;
6371 depthStencilStateCreateInfo.front = frontStencilState;
6372 depthStencilStateCreateInfo.back = backStencilState;
6373 depthStencilStateCreateInfo.minDepthBounds = 0; // unused
6374 depthStencilStateCreateInfo.maxDepthBounds = 0; // unused
6375
6376 // Color Blend
6377
6378 for (i = 0; i < createinfo->target_info.num_color_targets; i += 1) {
6379 SDL_GPUColorTargetBlendState blendState = createinfo->target_info.color_target_descriptions[i].blend_state;
6380 SDL_GPUColorComponentFlags colorWriteMask = blendState.enable_color_write_mask ?
6381 blendState.color_write_mask :
6382 0xF;
6383
6384 colorBlendAttachmentStates[i].blendEnable =
6385 blendState.enable_blend;
6386 colorBlendAttachmentStates[i].srcColorBlendFactor = SDLToVK_BlendFactor[blendState.src_color_blendfactor];
6387 colorBlendAttachmentStates[i].dstColorBlendFactor = SDLToVK_BlendFactor[blendState.dst_color_blendfactor];
6388 colorBlendAttachmentStates[i].colorBlendOp = SDLToVK_BlendOp[blendState.color_blend_op];
6389 colorBlendAttachmentStates[i].srcAlphaBlendFactor = SDLToVK_BlendFactor[blendState.src_alpha_blendfactor];
6390 colorBlendAttachmentStates[i].dstAlphaBlendFactor = SDLToVK_BlendFactor[blendState.dst_alpha_blendfactor];
6391 colorBlendAttachmentStates[i].alphaBlendOp = SDLToVK_BlendOp[blendState.alpha_blend_op];
6392 colorBlendAttachmentStates[i].colorWriteMask =
6393 colorWriteMask;
6394 }
6395
6396 colorBlendStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
6397 colorBlendStateCreateInfo.pNext = NULL;
6398 colorBlendStateCreateInfo.flags = 0;
6399 colorBlendStateCreateInfo.attachmentCount =
6400 createinfo->target_info.num_color_targets;
6401 colorBlendStateCreateInfo.pAttachments =
6402 colorBlendAttachmentStates;
6403 colorBlendStateCreateInfo.blendConstants[0] = 1.0f;
6404 colorBlendStateCreateInfo.blendConstants[1] = 1.0f;
6405 colorBlendStateCreateInfo.blendConstants[2] = 1.0f;
6406 colorBlendStateCreateInfo.blendConstants[3] = 1.0f;
6407
6408 // We don't support LogicOp, so this is easy.
6409 colorBlendStateCreateInfo.logicOpEnable = VK_FALSE;
6410 colorBlendStateCreateInfo.logicOp = 0;
6411
6412 // Pipeline Layout
6413
6414 graphicsPipeline->resourceLayout =
6415 VULKAN_INTERNAL_FetchGraphicsPipelineResourceLayout(
6416 renderer,
6417 graphicsPipeline->vertexShader,
6418 graphicsPipeline->fragmentShader);
6419
6420 if (graphicsPipeline->resourceLayout == NULL) {
6421 SDL_stack_free(vertexInputBindingDescriptions);
6422 SDL_stack_free(vertexInputAttributeDescriptions);
6423 SDL_stack_free(colorBlendAttachmentStates);
6424 SDL_stack_free(divisorDescriptions);
6425 SDL_free(graphicsPipeline);
6426 SET_STRING_ERROR_AND_RETURN("Failed to initialize pipeline resource layout!", NULL);
6427 }
6428
6429 // Pipeline
6430
6431 vkPipelineCreateInfo.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
6432 vkPipelineCreateInfo.pNext = NULL;
6433 vkPipelineCreateInfo.flags = 0;
6434 vkPipelineCreateInfo.stageCount = 2;
6435 vkPipelineCreateInfo.pStages = shaderStageCreateInfos;
6436 vkPipelineCreateInfo.pVertexInputState = &vertexInputStateCreateInfo;
6437 vkPipelineCreateInfo.pInputAssemblyState = &inputAssemblyStateCreateInfo;
6438 vkPipelineCreateInfo.pTessellationState = VK_NULL_HANDLE;
6439 vkPipelineCreateInfo.pViewportState = &viewportStateCreateInfo;
6440 vkPipelineCreateInfo.pRasterizationState = &rasterizationStateCreateInfo;
6441 vkPipelineCreateInfo.pMultisampleState = &multisampleStateCreateInfo;
6442 vkPipelineCreateInfo.pDepthStencilState = &depthStencilStateCreateInfo;
6443 vkPipelineCreateInfo.pColorBlendState = &colorBlendStateCreateInfo;
6444 vkPipelineCreateInfo.pDynamicState = &dynamicStateCreateInfo;
6445 vkPipelineCreateInfo.layout = graphicsPipeline->resourceLayout->pipelineLayout;
6446 vkPipelineCreateInfo.renderPass = transientRenderPass;
6447 vkPipelineCreateInfo.subpass = 0;
6448 vkPipelineCreateInfo.basePipelineHandle = VK_NULL_HANDLE;
6449 vkPipelineCreateInfo.basePipelineIndex = 0;
6450
6451 // TODO: enable pipeline caching
6452 vulkanResult = renderer->vkCreateGraphicsPipelines(
6453 renderer->logicalDevice,
6454 VK_NULL_HANDLE,
6455 1,
6456 &vkPipelineCreateInfo,
6457 NULL,
6458 &graphicsPipeline->pipeline);
6459
6460 SDL_stack_free(vertexInputBindingDescriptions);
6461 SDL_stack_free(vertexInputAttributeDescriptions);
6462 SDL_stack_free(colorBlendAttachmentStates);
6463 SDL_stack_free(divisorDescriptions);
6464
6465 renderer->vkDestroyRenderPass(
6466 renderer->logicalDevice,
6467 transientRenderPass,
6468 NULL);
6469
6470 if (vulkanResult != VK_SUCCESS) {
6471 SDL_free(graphicsPipeline);
6472 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateGraphicsPipelines, NULL);
6473 }
6474
6475 SDL_SetAtomicInt(&graphicsPipeline->referenceCount, 0);
6476
6477 return (SDL_GPUGraphicsPipeline *)graphicsPipeline;
6478}
6479
6480static SDL_GPUComputePipeline *VULKAN_CreateComputePipeline(
6481 SDL_GPURenderer *driverData,
6482 const SDL_GPUComputePipelineCreateInfo *createinfo)
6483{
6484 VkShaderModuleCreateInfo shaderModuleCreateInfo;
6485 VkComputePipelineCreateInfo vkShaderCreateInfo;
6486 VkPipelineShaderStageCreateInfo pipelineShaderStageCreateInfo;
6487 VkResult vulkanResult;
6488 VulkanRenderer *renderer = (VulkanRenderer *)driverData;
6489 VulkanComputePipeline *vulkanComputePipeline;
6490
6491 if (createinfo->format != SDL_GPU_SHADERFORMAT_SPIRV) {
6492 SET_STRING_ERROR_AND_RETURN("Incompatible shader format for Vulkan!", NULL);
6493 }
6494
6495 vulkanComputePipeline = SDL_malloc(sizeof(VulkanComputePipeline));
6496 shaderModuleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
6497 shaderModuleCreateInfo.pNext = NULL;
6498 shaderModuleCreateInfo.flags = 0;
6499 shaderModuleCreateInfo.codeSize = createinfo->code_size;
6500 shaderModuleCreateInfo.pCode = (Uint32 *)createinfo->code;
6501
6502 vulkanResult = renderer->vkCreateShaderModule(
6503 renderer->logicalDevice,
6504 &shaderModuleCreateInfo,
6505 NULL,
6506 &vulkanComputePipeline->shaderModule);
6507
6508 if (vulkanResult != VK_SUCCESS) {
6509 SDL_free(vulkanComputePipeline);
6510 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateShaderModule, NULL);
6511 }
6512
6513 pipelineShaderStageCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
6514 pipelineShaderStageCreateInfo.pNext = NULL;
6515 pipelineShaderStageCreateInfo.flags = 0;
6516 pipelineShaderStageCreateInfo.stage = VK_SHADER_STAGE_COMPUTE_BIT;
6517 pipelineShaderStageCreateInfo.module = vulkanComputePipeline->shaderModule;
6518 pipelineShaderStageCreateInfo.pName = createinfo->entrypoint;
6519 pipelineShaderStageCreateInfo.pSpecializationInfo = NULL;
6520
6521 vulkanComputePipeline->resourceLayout = VULKAN_INTERNAL_FetchComputePipelineResourceLayout(
6522 renderer,
6523 createinfo);
6524
6525 if (vulkanComputePipeline->resourceLayout == NULL) {
6526 renderer->vkDestroyShaderModule(
6527 renderer->logicalDevice,
6528 vulkanComputePipeline->shaderModule,
6529 NULL);
6530 SDL_free(vulkanComputePipeline);
6531 return NULL;
6532 }
6533
6534 vkShaderCreateInfo.sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO;
6535 vkShaderCreateInfo.pNext = NULL;
6536 vkShaderCreateInfo.flags = 0;
6537 vkShaderCreateInfo.stage = pipelineShaderStageCreateInfo;
6538 vkShaderCreateInfo.layout = vulkanComputePipeline->resourceLayout->pipelineLayout;
6539 vkShaderCreateInfo.basePipelineHandle = (VkPipeline)VK_NULL_HANDLE;
6540 vkShaderCreateInfo.basePipelineIndex = 0;
6541
6542 vulkanResult = renderer->vkCreateComputePipelines(
6543 renderer->logicalDevice,
6544 (VkPipelineCache)VK_NULL_HANDLE,
6545 1,
6546 &vkShaderCreateInfo,
6547 NULL,
6548 &vulkanComputePipeline->pipeline);
6549
6550 if (vulkanResult != VK_SUCCESS) {
6551 VULKAN_INTERNAL_DestroyComputePipeline(renderer, vulkanComputePipeline);
6552 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateComputePipeline, NULL);
6553 return NULL;
6554 }
6555
6556 SDL_SetAtomicInt(&vulkanComputePipeline->referenceCount, 0);
6557
6558 return (SDL_GPUComputePipeline *)vulkanComputePipeline;
6559}
6560
6561static SDL_GPUSampler *VULKAN_CreateSampler(
6562 SDL_GPURenderer *driverData,
6563 const SDL_GPUSamplerCreateInfo *createinfo)
6564{
6565 VulkanRenderer *renderer = (VulkanRenderer *)driverData;
6566 VulkanSampler *vulkanSampler = SDL_malloc(sizeof(VulkanSampler));
6567 VkResult vulkanResult;
6568
6569 VkSamplerCreateInfo vkSamplerCreateInfo;
6570 vkSamplerCreateInfo.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
6571 vkSamplerCreateInfo.pNext = NULL;
6572 vkSamplerCreateInfo.flags = 0;
6573 vkSamplerCreateInfo.magFilter = SDLToVK_Filter[createinfo->mag_filter];
6574 vkSamplerCreateInfo.minFilter = SDLToVK_Filter[createinfo->min_filter];
6575 vkSamplerCreateInfo.mipmapMode = SDLToVK_SamplerMipmapMode[createinfo->mipmap_mode];
6576 vkSamplerCreateInfo.addressModeU = SDLToVK_SamplerAddressMode[createinfo->address_mode_u];
6577 vkSamplerCreateInfo.addressModeV = SDLToVK_SamplerAddressMode[createinfo->address_mode_v];
6578 vkSamplerCreateInfo.addressModeW = SDLToVK_SamplerAddressMode[createinfo->address_mode_w];
6579 vkSamplerCreateInfo.mipLodBias = createinfo->mip_lod_bias;
6580 vkSamplerCreateInfo.anisotropyEnable = createinfo->enable_anisotropy;
6581 vkSamplerCreateInfo.maxAnisotropy = createinfo->max_anisotropy;
6582 vkSamplerCreateInfo.compareEnable = createinfo->enable_compare;
6583 vkSamplerCreateInfo.compareOp = SDLToVK_CompareOp[createinfo->compare_op];
6584 vkSamplerCreateInfo.minLod = createinfo->min_lod;
6585 vkSamplerCreateInfo.maxLod = createinfo->max_lod;
6586 vkSamplerCreateInfo.borderColor = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK; // arbitrary, unused
6587 vkSamplerCreateInfo.unnormalizedCoordinates = VK_FALSE;
6588
6589 vulkanResult = renderer->vkCreateSampler(
6590 renderer->logicalDevice,
6591 &vkSamplerCreateInfo,
6592 NULL,
6593 &vulkanSampler->sampler);
6594
6595 if (vulkanResult != VK_SUCCESS) {
6596 SDL_free(vulkanSampler);
6597 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateSampler, NULL);
6598 }
6599
6600 SDL_SetAtomicInt(&vulkanSampler->referenceCount, 0);
6601
6602 return (SDL_GPUSampler *)vulkanSampler;
6603}
6604
6605static SDL_GPUShader *VULKAN_CreateShader(
6606 SDL_GPURenderer *driverData,
6607 const SDL_GPUShaderCreateInfo *createinfo)
6608{
6609 VulkanShader *vulkanShader;
6610 VkResult vulkanResult;
6611 VkShaderModuleCreateInfo vkShaderModuleCreateInfo;
6612 VulkanRenderer *renderer = (VulkanRenderer *)driverData;
6613 size_t entryPointNameLength;
6614
6615 vulkanShader = SDL_malloc(sizeof(VulkanShader));
6616 vkShaderModuleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
6617 vkShaderModuleCreateInfo.pNext = NULL;
6618 vkShaderModuleCreateInfo.flags = 0;
6619 vkShaderModuleCreateInfo.codeSize = createinfo->code_size;
6620 vkShaderModuleCreateInfo.pCode = (Uint32 *)createinfo->code;
6621
6622 vulkanResult = renderer->vkCreateShaderModule(
6623 renderer->logicalDevice,
6624 &vkShaderModuleCreateInfo,
6625 NULL,
6626 &vulkanShader->shaderModule);
6627
6628 if (vulkanResult != VK_SUCCESS) {
6629 SDL_free(vulkanShader);
6630 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateShaderModule, NULL);
6631 }
6632
6633 entryPointNameLength = SDL_strlen(createinfo->entrypoint) + 1;
6634 vulkanShader->entrypointName = SDL_malloc(entryPointNameLength);
6635 SDL_utf8strlcpy((char *)vulkanShader->entrypointName, createinfo->entrypoint, entryPointNameLength);
6636
6637 vulkanShader->numSamplers = createinfo->num_samplers;
6638 vulkanShader->numStorageTextures = createinfo->num_storage_textures;
6639 vulkanShader->numStorageBuffers = createinfo->num_storage_buffers;
6640 vulkanShader->numUniformBuffers = createinfo->num_uniform_buffers;
6641
6642 SDL_SetAtomicInt(&vulkanShader->referenceCount, 0);
6643
6644 return (SDL_GPUShader *)vulkanShader;
6645}
6646
6647static bool VULKAN_SupportsSampleCount(
6648 SDL_GPURenderer *driverData,
6649 SDL_GPUTextureFormat format,
6650 SDL_GPUSampleCount sampleCount)
6651{
6652 VulkanRenderer *renderer = (VulkanRenderer *)driverData;
6653 VkSampleCountFlags bits = IsDepthFormat(format) ? renderer->physicalDeviceProperties.properties.limits.framebufferDepthSampleCounts : renderer->physicalDeviceProperties.properties.limits.framebufferColorSampleCounts;
6654 VkSampleCountFlagBits vkSampleCount = SDLToVK_SampleCount[sampleCount];
6655 return !!(bits & vkSampleCount);
6656}
6657
6658static SDL_GPUTexture *VULKAN_CreateTexture(
6659 SDL_GPURenderer *driverData,
6660 const SDL_GPUTextureCreateInfo *createinfo)
6661{
6662 VulkanRenderer *renderer = (VulkanRenderer *)driverData;
6663 VulkanTexture *texture;
6664 VulkanTextureContainer *container;
6665
6666 texture = VULKAN_INTERNAL_CreateTexture(
6667 renderer,
6668 createinfo);
6669
6670 if (texture == NULL) {
6671 return NULL;
6672 }
6673
6674 container = SDL_malloc(sizeof(VulkanTextureContainer));
6675 container->header.info = *createinfo;
6676 container->canBeCycled = true;
6677 container->activeTexture = texture;
6678 container->textureCapacity = 1;
6679 container->textureCount = 1;
6680 container->textures = SDL_malloc(
6681 container->textureCapacity * sizeof(VulkanTexture *));
6682 container->textures[0] = container->activeTexture;
6683 container->debugName = NULL;
6684
6685 texture->container = container;
6686 texture->containerIndex = 0;
6687
6688 return (SDL_GPUTexture *)container;
6689}
6690
6691static SDL_GPUBuffer *VULKAN_CreateBuffer(
6692 SDL_GPURenderer *driverData,
6693 SDL_GPUBufferUsageFlags usageFlags,
6694 Uint32 size)
6695{
6696 return (SDL_GPUBuffer *)VULKAN_INTERNAL_CreateBufferContainer(
6697 (VulkanRenderer *)driverData,
6698 (VkDeviceSize)size,
6699 usageFlags,
6700 VULKAN_BUFFER_TYPE_GPU,
6701 false);
6702}
6703
6704static VulkanUniformBuffer *VULKAN_INTERNAL_CreateUniformBuffer(
6705 VulkanRenderer *renderer,
6706 Uint32 size)
6707{
6708 VulkanUniformBuffer *uniformBuffer = SDL_malloc(sizeof(VulkanUniformBuffer));
6709
6710 uniformBuffer->buffer = VULKAN_INTERNAL_CreateBuffer(
6711 renderer,
6712 (VkDeviceSize)size,
6713 0,
6714 VULKAN_BUFFER_TYPE_UNIFORM,
6715 false);
6716
6717 uniformBuffer->drawOffset = 0;
6718 uniformBuffer->writeOffset = 0;
6719 uniformBuffer->buffer->container = (VulkanBufferContainer *)uniformBuffer; // little hack for defrag
6720
6721 return uniformBuffer;
6722}
6723
6724static SDL_GPUTransferBuffer *VULKAN_CreateTransferBuffer(
6725 SDL_GPURenderer *driverData,
6726 SDL_GPUTransferBufferUsage usage,
6727 Uint32 size)
6728{
6729 // We use dedicated allocations for download buffers to avoid an issue
6730 // where a defrag is triggered after submitting a download but before
6731 // waiting on the fence.
6732 return (SDL_GPUTransferBuffer *)VULKAN_INTERNAL_CreateBufferContainer(
6733 (VulkanRenderer *)driverData,
6734 (VkDeviceSize)size,
6735 0,
6736 VULKAN_BUFFER_TYPE_TRANSFER,
6737 usage == SDL_GPU_TRANSFERBUFFERUSAGE_DOWNLOAD);
6738}
6739
6740static void VULKAN_INTERNAL_ReleaseTexture(
6741 VulkanRenderer *renderer,
6742 VulkanTexture *vulkanTexture)
6743{
6744 if (vulkanTexture->markedForDestroy) {
6745 return;
6746 }
6747
6748 SDL_LockMutex(renderer->disposeLock);
6749
6750 EXPAND_ARRAY_IF_NEEDED(
6751 renderer->texturesToDestroy,
6752 VulkanTexture *,
6753 renderer->texturesToDestroyCount + 1,
6754 renderer->texturesToDestroyCapacity,
6755 renderer->texturesToDestroyCapacity * 2);
6756
6757 renderer->texturesToDestroy[renderer->texturesToDestroyCount] = vulkanTexture;
6758 renderer->texturesToDestroyCount += 1;
6759
6760 vulkanTexture->markedForDestroy = true;
6761
6762 SDL_UnlockMutex(renderer->disposeLock);
6763}
6764
6765static void VULKAN_ReleaseTexture(
6766 SDL_GPURenderer *driverData,
6767 SDL_GPUTexture *texture)
6768{
6769 VulkanRenderer *renderer = (VulkanRenderer *)driverData;
6770 VulkanTextureContainer *vulkanTextureContainer = (VulkanTextureContainer *)texture;
6771 Uint32 i;
6772
6773 SDL_LockMutex(renderer->disposeLock);
6774
6775 for (i = 0; i < vulkanTextureContainer->textureCount; i += 1) {
6776 VULKAN_INTERNAL_ReleaseTexture(renderer, vulkanTextureContainer->textures[i]);
6777 }
6778
6779 // Containers are just client handles, so we can destroy immediately
6780 if (vulkanTextureContainer->debugName != NULL) {
6781 SDL_free(vulkanTextureContainer->debugName);
6782 }
6783 SDL_free(vulkanTextureContainer->textures);
6784 SDL_free(vulkanTextureContainer);
6785
6786 SDL_UnlockMutex(renderer->disposeLock);
6787}
6788
6789static void VULKAN_ReleaseSampler(
6790 SDL_GPURenderer *driverData,
6791 SDL_GPUSampler *sampler)
6792{
6793 VulkanRenderer *renderer = (VulkanRenderer *)driverData;
6794 VulkanSampler *vulkanSampler = (VulkanSampler *)sampler;
6795
6796 SDL_LockMutex(renderer->disposeLock);
6797
6798 EXPAND_ARRAY_IF_NEEDED(
6799 renderer->samplersToDestroy,
6800 VulkanSampler *,
6801 renderer->samplersToDestroyCount + 1,
6802 renderer->samplersToDestroyCapacity,
6803 renderer->samplersToDestroyCapacity * 2);
6804
6805 renderer->samplersToDestroy[renderer->samplersToDestroyCount] = vulkanSampler;
6806 renderer->samplersToDestroyCount += 1;
6807
6808 SDL_UnlockMutex(renderer->disposeLock);
6809}
6810
6811static void VULKAN_INTERNAL_ReleaseBuffer(
6812 VulkanRenderer *renderer,
6813 VulkanBuffer *vulkanBuffer)
6814{
6815 if (vulkanBuffer->markedForDestroy) {
6816 return;
6817 }
6818
6819 SDL_LockMutex(renderer->disposeLock);
6820
6821 EXPAND_ARRAY_IF_NEEDED(
6822 renderer->buffersToDestroy,
6823 VulkanBuffer *,
6824 renderer->buffersToDestroyCount + 1,
6825 renderer->buffersToDestroyCapacity,
6826 renderer->buffersToDestroyCapacity * 2);
6827
6828 renderer->buffersToDestroy[renderer->buffersToDestroyCount] = vulkanBuffer;
6829 renderer->buffersToDestroyCount += 1;
6830
6831 vulkanBuffer->markedForDestroy = 1;
6832
6833 SDL_UnlockMutex(renderer->disposeLock);
6834}
6835
6836static void VULKAN_INTERNAL_ReleaseBufferContainer(
6837 VulkanRenderer *renderer,
6838 VulkanBufferContainer *bufferContainer)
6839{
6840 Uint32 i;
6841
6842 SDL_LockMutex(renderer->disposeLock);
6843
6844 for (i = 0; i < bufferContainer->bufferCount; i += 1) {
6845 VULKAN_INTERNAL_ReleaseBuffer(renderer, bufferContainer->buffers[i]);
6846 }
6847
6848 // Containers are just client handles, so we can free immediately
6849 if (bufferContainer->debugName != NULL) {
6850 SDL_free(bufferContainer->debugName);
6851 }
6852 SDL_free(bufferContainer->buffers);
6853 SDL_free(bufferContainer);
6854
6855 SDL_UnlockMutex(renderer->disposeLock);
6856}
6857
6858static void VULKAN_ReleaseBuffer(
6859 SDL_GPURenderer *driverData,
6860 SDL_GPUBuffer *buffer)
6861{
6862 VulkanRenderer *renderer = (VulkanRenderer *)driverData;
6863 VulkanBufferContainer *vulkanBufferContainer = (VulkanBufferContainer *)buffer;
6864
6865 VULKAN_INTERNAL_ReleaseBufferContainer(
6866 renderer,
6867 vulkanBufferContainer);
6868}
6869
6870static void VULKAN_ReleaseTransferBuffer(
6871 SDL_GPURenderer *driverData,
6872 SDL_GPUTransferBuffer *transferBuffer)
6873{
6874 VulkanRenderer *renderer = (VulkanRenderer *)driverData;
6875 VulkanBufferContainer *transferBufferContainer = (VulkanBufferContainer *)transferBuffer;
6876
6877 VULKAN_INTERNAL_ReleaseBufferContainer(
6878 renderer,
6879 transferBufferContainer);
6880}
6881
6882static void VULKAN_ReleaseShader(
6883 SDL_GPURenderer *driverData,
6884 SDL_GPUShader *shader)
6885{
6886 VulkanRenderer *renderer = (VulkanRenderer *)driverData;
6887 VulkanShader *vulkanShader = (VulkanShader *)shader;
6888
6889 SDL_LockMutex(renderer->disposeLock);
6890
6891 EXPAND_ARRAY_IF_NEEDED(
6892 renderer->shadersToDestroy,
6893 VulkanShader *,
6894 renderer->shadersToDestroyCount + 1,
6895 renderer->shadersToDestroyCapacity,
6896 renderer->shadersToDestroyCapacity * 2);
6897
6898 renderer->shadersToDestroy[renderer->shadersToDestroyCount] = vulkanShader;
6899 renderer->shadersToDestroyCount += 1;
6900
6901 SDL_UnlockMutex(renderer->disposeLock);
6902}
6903
6904static void VULKAN_ReleaseComputePipeline(
6905 SDL_GPURenderer *driverData,
6906 SDL_GPUComputePipeline *computePipeline)
6907{
6908 VulkanRenderer *renderer = (VulkanRenderer *)driverData;
6909 VulkanComputePipeline *vulkanComputePipeline = (VulkanComputePipeline *)computePipeline;
6910
6911 SDL_LockMutex(renderer->disposeLock);
6912
6913 EXPAND_ARRAY_IF_NEEDED(
6914 renderer->computePipelinesToDestroy,
6915 VulkanComputePipeline *,
6916 renderer->computePipelinesToDestroyCount + 1,
6917 renderer->computePipelinesToDestroyCapacity,
6918 renderer->computePipelinesToDestroyCapacity * 2);
6919
6920 renderer->computePipelinesToDestroy[renderer->computePipelinesToDestroyCount] = vulkanComputePipeline;
6921 renderer->computePipelinesToDestroyCount += 1;
6922
6923 SDL_UnlockMutex(renderer->disposeLock);
6924}
6925
6926static void VULKAN_ReleaseGraphicsPipeline(
6927 SDL_GPURenderer *driverData,
6928 SDL_GPUGraphicsPipeline *graphicsPipeline)
6929{
6930 VulkanRenderer *renderer = (VulkanRenderer *)driverData;
6931 VulkanGraphicsPipeline *vulkanGraphicsPipeline = (VulkanGraphicsPipeline *)graphicsPipeline;
6932
6933 SDL_LockMutex(renderer->disposeLock);
6934
6935 EXPAND_ARRAY_IF_NEEDED(
6936 renderer->graphicsPipelinesToDestroy,
6937 VulkanGraphicsPipeline *,
6938 renderer->graphicsPipelinesToDestroyCount + 1,
6939 renderer->graphicsPipelinesToDestroyCapacity,
6940 renderer->graphicsPipelinesToDestroyCapacity * 2);
6941
6942 renderer->graphicsPipelinesToDestroy[renderer->graphicsPipelinesToDestroyCount] = vulkanGraphicsPipeline;
6943 renderer->graphicsPipelinesToDestroyCount += 1;
6944
6945 SDL_UnlockMutex(renderer->disposeLock);
6946}
6947
6948// Command Buffer render state
6949
6950static VkRenderPass VULKAN_INTERNAL_FetchRenderPass(
6951 VulkanRenderer *renderer,
6952 VulkanCommandBuffer *commandBuffer,
6953 const SDL_GPUColorTargetInfo *colorTargetInfos,
6954 Uint32 numColorTargets,
6955 const SDL_GPUDepthStencilTargetInfo *depthStencilTargetInfo)
6956{
6957 VulkanRenderPassHashTableValue *renderPassWrapper = NULL;
6958 VkRenderPass renderPassHandle;
6959 RenderPassHashTableKey key;
6960 Uint32 i;
6961
6962 SDL_zero(key);
6963
6964 for (i = 0; i < numColorTargets; i += 1) {
6965 key.colorTargetDescriptions[i].format = SDLToVK_TextureFormat[((VulkanTextureContainer *)colorTargetInfos[i].texture)->header.info.format];
6966 key.colorTargetDescriptions[i].loadOp = colorTargetInfos[i].load_op;
6967 key.colorTargetDescriptions[i].storeOp = colorTargetInfos[i].store_op;
6968
6969 if (colorTargetInfos[i].resolve_texture != NULL) {
6970 key.resolveTargetFormats[key.numResolveTargets] = SDLToVK_TextureFormat[((VulkanTextureContainer *)colorTargetInfos[i].resolve_texture)->header.info.format];
6971 key.numResolveTargets += 1;
6972 }
6973 }
6974
6975 key.sampleCount = VK_SAMPLE_COUNT_1_BIT;
6976 if (numColorTargets > 0) {
6977 key.sampleCount = SDLToVK_SampleCount[((VulkanTextureContainer *)colorTargetInfos[0].texture)->header.info.sample_count];
6978 }
6979
6980 key.numColorTargets = numColorTargets;
6981
6982 if (depthStencilTargetInfo == NULL) {
6983 key.depthStencilTargetDescription.format = 0;
6984 key.depthStencilTargetDescription.loadOp = SDL_GPU_LOADOP_DONT_CARE;
6985 key.depthStencilTargetDescription.storeOp = SDL_GPU_STOREOP_DONT_CARE;
6986 key.depthStencilTargetDescription.stencilLoadOp = SDL_GPU_LOADOP_DONT_CARE;
6987 key.depthStencilTargetDescription.stencilStoreOp = SDL_GPU_STOREOP_DONT_CARE;
6988 } else {
6989 key.depthStencilTargetDescription.format = SDLToVK_TextureFormat[((VulkanTextureContainer *)depthStencilTargetInfo->texture)->header.info.format];
6990 key.depthStencilTargetDescription.loadOp = depthStencilTargetInfo->load_op;
6991 key.depthStencilTargetDescription.storeOp = depthStencilTargetInfo->store_op;
6992 key.depthStencilTargetDescription.stencilLoadOp = depthStencilTargetInfo->stencil_load_op;
6993 key.depthStencilTargetDescription.stencilStoreOp = depthStencilTargetInfo->stencil_store_op;
6994 }
6995
6996 SDL_LockMutex(renderer->renderPassFetchLock);
6997
6998 bool result = SDL_FindInHashTable(
6999 renderer->renderPassHashTable,
7000 (const void *)&key,
7001 (const void **)&renderPassWrapper);
7002
7003 SDL_UnlockMutex(renderer->renderPassFetchLock);
7004
7005 if (result) {
7006 return renderPassWrapper->handle;
7007 }
7008
7009 renderPassHandle = VULKAN_INTERNAL_CreateRenderPass(
7010 renderer,
7011 commandBuffer,
7012 colorTargetInfos,
7013 numColorTargets,
7014 depthStencilTargetInfo);
7015
7016 if (renderPassHandle == VK_NULL_HANDLE) {
7017 return VK_NULL_HANDLE;
7018 }
7019
7020 // Have to malloc the key to store it in the hashtable
7021 RenderPassHashTableKey *allocedKey = SDL_malloc(sizeof(RenderPassHashTableKey));
7022 SDL_memcpy(allocedKey, &key, sizeof(RenderPassHashTableKey));
7023
7024 renderPassWrapper = SDL_malloc(sizeof(VulkanRenderPassHashTableValue));
7025 renderPassWrapper->handle = renderPassHandle;
7026
7027 SDL_LockMutex(renderer->renderPassFetchLock);
7028
7029 SDL_InsertIntoHashTable(
7030 renderer->renderPassHashTable,
7031 (const void *)allocedKey,
7032 (const void *)renderPassWrapper);
7033
7034 SDL_UnlockMutex(renderer->renderPassFetchLock);
7035 return renderPassHandle;
7036}
7037
7038static VulkanFramebuffer *VULKAN_INTERNAL_FetchFramebuffer(
7039 VulkanRenderer *renderer,
7040 VkRenderPass renderPass,
7041 const SDL_GPUColorTargetInfo *colorTargetInfos,
7042 Uint32 numColorTargets,
7043 const SDL_GPUDepthStencilTargetInfo *depthStencilTargetInfo,
7044 Uint32 width,
7045 Uint32 height)
7046{
7047 VulkanFramebuffer *vulkanFramebuffer = NULL;
7048 VkFramebufferCreateInfo framebufferInfo;
7049 VkResult result;
7050 VkImageView imageViewAttachments[2 * MAX_COLOR_TARGET_BINDINGS + 1 /* depth */];
7051 FramebufferHashTableKey key;
7052 Uint32 attachmentCount = 0;
7053 Uint32 i;
7054
7055 SDL_zero(imageViewAttachments);
7056 SDL_zero(key);
7057
7058 key.numColorTargets = numColorTargets;
7059
7060 for (i = 0; i < numColorTargets; i += 1) {
7061 VulkanTextureContainer *container = (VulkanTextureContainer *)colorTargetInfos[i].texture;
7062 VulkanTextureSubresource *subresource = VULKAN_INTERNAL_FetchTextureSubresource(
7063 container,
7064 container->header.info.type == SDL_GPU_TEXTURETYPE_3D ? 0 : colorTargetInfos[i].layer_or_depth_plane,
7065 colorTargetInfos[i].mip_level);
7066
7067 Uint32 rtvIndex =
7068 container->header.info.type == SDL_GPU_TEXTURETYPE_3D ? colorTargetInfos[i].layer_or_depth_plane : 0;
7069 key.colorAttachmentViews[i] = subresource->renderTargetViews[rtvIndex];
7070
7071 if (colorTargetInfos[i].resolve_texture != NULL) {
7072 VulkanTextureContainer *resolveTextureContainer = (VulkanTextureContainer *)colorTargetInfos[i].resolve_texture;
7073 VulkanTextureSubresource *resolveSubresource = VULKAN_INTERNAL_FetchTextureSubresource(
7074 resolveTextureContainer,
7075 colorTargetInfos[i].layer_or_depth_plane,
7076 colorTargetInfos[i].mip_level);
7077
7078 key.resolveAttachmentViews[key.numResolveAttachments] = resolveSubresource->renderTargetViews[0];
7079 key.numResolveAttachments += 1;
7080 }
7081 }
7082
7083 if (depthStencilTargetInfo == NULL) {
7084 key.depthStencilAttachmentView = VK_NULL_HANDLE;
7085 } else {
7086 VulkanTextureSubresource *subresource = VULKAN_INTERNAL_FetchTextureSubresource(
7087 (VulkanTextureContainer *)depthStencilTargetInfo->texture,
7088 0,
7089 0);
7090 key.depthStencilAttachmentView = subresource->depthStencilView;
7091 }
7092
7093 key.width = width;
7094 key.height = height;
7095
7096 SDL_LockMutex(renderer->framebufferFetchLock);
7097
7098 bool findResult = SDL_FindInHashTable(
7099 renderer->framebufferHashTable,
7100 (const void *)&key,
7101 (const void **)&vulkanFramebuffer);
7102
7103 SDL_UnlockMutex(renderer->framebufferFetchLock);
7104
7105 if (findResult) {
7106 return vulkanFramebuffer;
7107 }
7108
7109 vulkanFramebuffer = SDL_malloc(sizeof(VulkanFramebuffer));
7110
7111 SDL_SetAtomicInt(&vulkanFramebuffer->referenceCount, 0);
7112
7113 // Create a new framebuffer
7114
7115 for (i = 0; i < numColorTargets; i += 1) {
7116 VulkanTextureContainer *container = (VulkanTextureContainer *)colorTargetInfos[i].texture;
7117 VulkanTextureSubresource *subresource = VULKAN_INTERNAL_FetchTextureSubresource(
7118 container,
7119 container->header.info.type == SDL_GPU_TEXTURETYPE_3D ? 0 : colorTargetInfos[i].layer_or_depth_plane,
7120 colorTargetInfos[i].mip_level);
7121
7122 Uint32 rtvIndex =
7123 container->header.info.type == SDL_GPU_TEXTURETYPE_3D ? colorTargetInfos[i].layer_or_depth_plane : 0;
7124
7125 imageViewAttachments[attachmentCount] = subresource->renderTargetViews[rtvIndex];
7126
7127 attachmentCount += 1;
7128
7129 if (colorTargetInfos[i].store_op == SDL_GPU_STOREOP_RESOLVE || colorTargetInfos[i].store_op == SDL_GPU_STOREOP_RESOLVE_AND_STORE) {
7130 VulkanTextureContainer *resolveContainer = (VulkanTextureContainer *)colorTargetInfos[i].resolve_texture;
7131 VulkanTextureSubresource *resolveSubresource = VULKAN_INTERNAL_FetchTextureSubresource(
7132 resolveContainer,
7133 colorTargetInfos[i].resolve_layer,
7134 colorTargetInfos[i].resolve_mip_level);
7135
7136 imageViewAttachments[attachmentCount] = resolveSubresource->renderTargetViews[0];
7137
7138 attachmentCount += 1;
7139 }
7140 }
7141
7142 if (depthStencilTargetInfo != NULL) {
7143 VulkanTextureSubresource *subresource = VULKAN_INTERNAL_FetchTextureSubresource(
7144 (VulkanTextureContainer *)depthStencilTargetInfo->texture,
7145 0,
7146 0);
7147 imageViewAttachments[attachmentCount] = subresource->depthStencilView;
7148
7149 attachmentCount += 1;
7150 }
7151
7152 framebufferInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
7153 framebufferInfo.pNext = NULL;
7154 framebufferInfo.flags = 0;
7155 framebufferInfo.renderPass = renderPass;
7156 framebufferInfo.attachmentCount = attachmentCount;
7157 framebufferInfo.pAttachments = imageViewAttachments;
7158 framebufferInfo.width = key.width;
7159 framebufferInfo.height = key.height;
7160 framebufferInfo.layers = 1;
7161
7162 result = renderer->vkCreateFramebuffer(
7163 renderer->logicalDevice,
7164 &framebufferInfo,
7165 NULL,
7166 &vulkanFramebuffer->framebuffer);
7167
7168 if (result == VK_SUCCESS) {
7169 // Have to malloc the key to store it in the hashtable
7170 FramebufferHashTableKey *allocedKey = SDL_malloc(sizeof(FramebufferHashTableKey));
7171 SDL_memcpy(allocedKey, &key, sizeof(FramebufferHashTableKey));
7172
7173 SDL_LockMutex(renderer->framebufferFetchLock);
7174
7175 SDL_InsertIntoHashTable(
7176 renderer->framebufferHashTable,
7177 (const void *)allocedKey,
7178 (const void *)vulkanFramebuffer);
7179
7180 SDL_UnlockMutex(renderer->framebufferFetchLock);
7181 } else {
7182 SDL_free(vulkanFramebuffer);
7183 CHECK_VULKAN_ERROR_AND_RETURN(result, vkCreateFramebuffer, NULL);
7184 }
7185
7186 return vulkanFramebuffer;
7187}
7188
7189static void VULKAN_INTERNAL_SetCurrentViewport(
7190 VulkanCommandBuffer *commandBuffer,
7191 const SDL_GPUViewport *viewport)
7192{
7193 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
7194 VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
7195
7196 vulkanCommandBuffer->currentViewport.x = viewport->x;
7197 vulkanCommandBuffer->currentViewport.width = viewport->w;
7198 vulkanCommandBuffer->currentViewport.minDepth = viewport->min_depth;
7199 vulkanCommandBuffer->currentViewport.maxDepth = viewport->max_depth;
7200
7201 // Viewport flip for consistency with other backends
7202 vulkanCommandBuffer->currentViewport.y = viewport->y + viewport->h;
7203 vulkanCommandBuffer->currentViewport.height = -viewport->h;
7204
7205 renderer->vkCmdSetViewport(
7206 vulkanCommandBuffer->commandBuffer,
7207 0,
7208 1,
7209 &vulkanCommandBuffer->currentViewport);
7210}
7211
7212static void VULKAN_SetViewport(
7213 SDL_GPUCommandBuffer *commandBuffer,
7214 const SDL_GPUViewport *viewport)
7215{
7216 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
7217
7218 VULKAN_INTERNAL_SetCurrentViewport(
7219 vulkanCommandBuffer,
7220 viewport);
7221}
7222
7223static void VULKAN_INTERNAL_SetCurrentScissor(
7224 VulkanCommandBuffer *vulkanCommandBuffer,
7225 const SDL_Rect *scissor)
7226{
7227 VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
7228
7229 vulkanCommandBuffer->currentScissor.offset.x = scissor->x;
7230 vulkanCommandBuffer->currentScissor.offset.y = scissor->y;
7231 vulkanCommandBuffer->currentScissor.extent.width = scissor->w;
7232 vulkanCommandBuffer->currentScissor.extent.height = scissor->h;
7233
7234 renderer->vkCmdSetScissor(
7235 vulkanCommandBuffer->commandBuffer,
7236 0,
7237 1,
7238 &vulkanCommandBuffer->currentScissor);
7239}
7240
7241static void VULKAN_SetScissor(
7242 SDL_GPUCommandBuffer *commandBuffer,
7243 const SDL_Rect *scissor)
7244{
7245 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
7246
7247 VULKAN_INTERNAL_SetCurrentScissor(
7248 vulkanCommandBuffer,
7249 scissor);
7250}
7251
7252static void VULKAN_INTERNAL_SetCurrentBlendConstants(
7253 VulkanCommandBuffer *vulkanCommandBuffer,
7254 SDL_FColor blendConstants)
7255{
7256 VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
7257
7258 vulkanCommandBuffer->blendConstants[0] = blendConstants.r;
7259 vulkanCommandBuffer->blendConstants[1] = blendConstants.g;
7260 vulkanCommandBuffer->blendConstants[2] = blendConstants.b;
7261 vulkanCommandBuffer->blendConstants[3] = blendConstants.a;
7262
7263 renderer->vkCmdSetBlendConstants(
7264 vulkanCommandBuffer->commandBuffer,
7265 vulkanCommandBuffer->blendConstants);
7266}
7267
7268static void VULKAN_SetBlendConstants(
7269 SDL_GPUCommandBuffer *commandBuffer,
7270 SDL_FColor blendConstants)
7271{
7272 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
7273
7274 VULKAN_INTERNAL_SetCurrentBlendConstants(
7275 vulkanCommandBuffer,
7276 blendConstants);
7277}
7278
7279static void VULKAN_INTERNAL_SetCurrentStencilReference(
7280 VulkanCommandBuffer *vulkanCommandBuffer,
7281 Uint8 reference)
7282{
7283 VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
7284
7285 vulkanCommandBuffer->stencilRef = reference;
7286
7287 renderer->vkCmdSetStencilReference(
7288 vulkanCommandBuffer->commandBuffer,
7289 VK_STENCIL_FACE_FRONT_AND_BACK,
7290 vulkanCommandBuffer->stencilRef);
7291}
7292
7293static void VULKAN_SetStencilReference(
7294 SDL_GPUCommandBuffer *commandBuffer,
7295 Uint8 reference)
7296{
7297 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
7298
7299 VULKAN_INTERNAL_SetCurrentStencilReference(
7300 vulkanCommandBuffer,
7301 reference);
7302}
7303
7304static void VULKAN_BindVertexSamplers(
7305 SDL_GPUCommandBuffer *commandBuffer,
7306 Uint32 firstSlot,
7307 const SDL_GPUTextureSamplerBinding *textureSamplerBindings,
7308 Uint32 numBindings)
7309{
7310 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
7311
7312 for (Uint32 i = 0; i < numBindings; i += 1) {
7313 VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)textureSamplerBindings[i].texture;
7314 vulkanCommandBuffer->vertexSamplerTextures[firstSlot + i] = textureContainer->activeTexture;
7315 vulkanCommandBuffer->vertexSamplers[firstSlot + i] = (VulkanSampler *)textureSamplerBindings[i].sampler;
7316
7317 VULKAN_INTERNAL_TrackSampler(
7318 vulkanCommandBuffer,
7319 (VulkanSampler *)textureSamplerBindings[i].sampler);
7320
7321 VULKAN_INTERNAL_TrackTexture(
7322 vulkanCommandBuffer,
7323 textureContainer->activeTexture);
7324 }
7325
7326 vulkanCommandBuffer->needNewVertexResourceDescriptorSet = true;
7327}
7328
7329static void VULKAN_BindVertexStorageTextures(
7330 SDL_GPUCommandBuffer *commandBuffer,
7331 Uint32 firstSlot,
7332 SDL_GPUTexture *const *storageTextures,
7333 Uint32 numBindings)
7334{
7335 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
7336
7337 for (Uint32 i = 0; i < numBindings; i += 1) {
7338 VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)storageTextures[i];
7339
7340 vulkanCommandBuffer->vertexStorageTextures[firstSlot + i] = textureContainer->activeTexture;
7341
7342 VULKAN_INTERNAL_TrackTexture(
7343 vulkanCommandBuffer,
7344 textureContainer->activeTexture);
7345 }
7346
7347 vulkanCommandBuffer->needNewVertexResourceDescriptorSet = true;
7348}
7349
7350static void VULKAN_BindVertexStorageBuffers(
7351 SDL_GPUCommandBuffer *commandBuffer,
7352 Uint32 firstSlot,
7353 SDL_GPUBuffer *const *storageBuffers,
7354 Uint32 numBindings)
7355{
7356 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
7357 VulkanBufferContainer *bufferContainer;
7358 Uint32 i;
7359
7360 for (i = 0; i < numBindings; i += 1) {
7361 bufferContainer = (VulkanBufferContainer *)storageBuffers[i];
7362
7363 vulkanCommandBuffer->vertexStorageBuffers[firstSlot + i] = bufferContainer->activeBuffer;
7364
7365 VULKAN_INTERNAL_TrackBuffer(
7366 vulkanCommandBuffer,
7367 bufferContainer->activeBuffer);
7368 }
7369
7370 vulkanCommandBuffer->needNewVertexResourceDescriptorSet = true;
7371}
7372
7373static void VULKAN_BindFragmentSamplers(
7374 SDL_GPUCommandBuffer *commandBuffer,
7375 Uint32 firstSlot,
7376 const SDL_GPUTextureSamplerBinding *textureSamplerBindings,
7377 Uint32 numBindings)
7378{
7379 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
7380
7381 for (Uint32 i = 0; i < numBindings; i += 1) {
7382 VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)textureSamplerBindings[i].texture;
7383 vulkanCommandBuffer->fragmentSamplerTextures[firstSlot + i] = textureContainer->activeTexture;
7384 vulkanCommandBuffer->fragmentSamplers[firstSlot + i] = (VulkanSampler *)textureSamplerBindings[i].sampler;
7385
7386 VULKAN_INTERNAL_TrackSampler(
7387 vulkanCommandBuffer,
7388 (VulkanSampler *)textureSamplerBindings[i].sampler);
7389
7390 VULKAN_INTERNAL_TrackTexture(
7391 vulkanCommandBuffer,
7392 textureContainer->activeTexture);
7393 }
7394
7395 vulkanCommandBuffer->needNewFragmentResourceDescriptorSet = true;
7396}
7397
7398static void VULKAN_BindFragmentStorageTextures(
7399 SDL_GPUCommandBuffer *commandBuffer,
7400 Uint32 firstSlot,
7401 SDL_GPUTexture *const *storageTextures,
7402 Uint32 numBindings)
7403{
7404 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
7405
7406 for (Uint32 i = 0; i < numBindings; i += 1) {
7407 VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)storageTextures[i];
7408
7409 vulkanCommandBuffer->fragmentStorageTextures[firstSlot + i] =
7410 textureContainer->activeTexture;
7411
7412 VULKAN_INTERNAL_TrackTexture(
7413 vulkanCommandBuffer,
7414 textureContainer->activeTexture);
7415 }
7416
7417 vulkanCommandBuffer->needNewFragmentResourceDescriptorSet = true;
7418}
7419
7420static void VULKAN_BindFragmentStorageBuffers(
7421 SDL_GPUCommandBuffer *commandBuffer,
7422 Uint32 firstSlot,
7423 SDL_GPUBuffer *const *storageBuffers,
7424 Uint32 numBindings)
7425{
7426 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
7427 VulkanBufferContainer *bufferContainer;
7428 Uint32 i;
7429
7430 for (i = 0; i < numBindings; i += 1) {
7431 bufferContainer = (VulkanBufferContainer *)storageBuffers[i];
7432
7433 vulkanCommandBuffer->fragmentStorageBuffers[firstSlot + i] = bufferContainer->activeBuffer;
7434
7435 VULKAN_INTERNAL_TrackBuffer(
7436 vulkanCommandBuffer,
7437 bufferContainer->activeBuffer);
7438 }
7439
7440 vulkanCommandBuffer->needNewFragmentResourceDescriptorSet = true;
7441}
7442
7443static VulkanUniformBuffer *VULKAN_INTERNAL_AcquireUniformBufferFromPool(
7444 VulkanCommandBuffer *commandBuffer)
7445{
7446 VulkanRenderer *renderer = commandBuffer->renderer;
7447 VulkanUniformBuffer *uniformBuffer;
7448
7449 SDL_LockMutex(renderer->acquireUniformBufferLock);
7450
7451 if (renderer->uniformBufferPoolCount > 0) {
7452 uniformBuffer = renderer->uniformBufferPool[renderer->uniformBufferPoolCount - 1];
7453 renderer->uniformBufferPoolCount -= 1;
7454 } else {
7455 uniformBuffer = VULKAN_INTERNAL_CreateUniformBuffer(
7456 renderer,
7457 UNIFORM_BUFFER_SIZE);
7458 }
7459
7460 SDL_UnlockMutex(renderer->acquireUniformBufferLock);
7461
7462 VULKAN_INTERNAL_TrackUniformBuffer(commandBuffer, uniformBuffer);
7463
7464 return uniformBuffer;
7465}
7466
7467static void VULKAN_INTERNAL_ReturnUniformBufferToPool(
7468 VulkanRenderer *renderer,
7469 VulkanUniformBuffer *uniformBuffer)
7470{
7471 if (renderer->uniformBufferPoolCount >= renderer->uniformBufferPoolCapacity) {
7472 renderer->uniformBufferPoolCapacity *= 2;
7473 renderer->uniformBufferPool = SDL_realloc(
7474 renderer->uniformBufferPool,
7475 renderer->uniformBufferPoolCapacity * sizeof(VulkanUniformBuffer *));
7476 }
7477
7478 renderer->uniformBufferPool[renderer->uniformBufferPoolCount] = uniformBuffer;
7479 renderer->uniformBufferPoolCount += 1;
7480
7481 uniformBuffer->writeOffset = 0;
7482 uniformBuffer->drawOffset = 0;
7483}
7484
7485static void VULKAN_INTERNAL_PushUniformData(
7486 VulkanCommandBuffer *commandBuffer,
7487 VulkanUniformBufferStage uniformBufferStage,
7488 Uint32 slotIndex,
7489 const void *data,
7490 Uint32 length)
7491{
7492 Uint32 blockSize =
7493 VULKAN_INTERNAL_NextHighestAlignment32(
7494 length,
7495 commandBuffer->renderer->minUBOAlignment);
7496
7497 VulkanUniformBuffer *uniformBuffer;
7498
7499 if (uniformBufferStage == VULKAN_UNIFORM_BUFFER_STAGE_VERTEX) {
7500 if (commandBuffer->vertexUniformBuffers[slotIndex] == NULL) {
7501 commandBuffer->vertexUniformBuffers[slotIndex] = VULKAN_INTERNAL_AcquireUniformBufferFromPool(
7502 commandBuffer);
7503 }
7504 uniformBuffer = commandBuffer->vertexUniformBuffers[slotIndex];
7505 } else if (uniformBufferStage == VULKAN_UNIFORM_BUFFER_STAGE_FRAGMENT) {
7506 if (commandBuffer->fragmentUniformBuffers[slotIndex] == NULL) {
7507 commandBuffer->fragmentUniformBuffers[slotIndex] = VULKAN_INTERNAL_AcquireUniformBufferFromPool(
7508 commandBuffer);
7509 }
7510 uniformBuffer = commandBuffer->fragmentUniformBuffers[slotIndex];
7511 } else if (uniformBufferStage == VULKAN_UNIFORM_BUFFER_STAGE_COMPUTE) {
7512 if (commandBuffer->computeUniformBuffers[slotIndex] == NULL) {
7513 commandBuffer->computeUniformBuffers[slotIndex] = VULKAN_INTERNAL_AcquireUniformBufferFromPool(
7514 commandBuffer);
7515 }
7516 uniformBuffer = commandBuffer->computeUniformBuffers[slotIndex];
7517 } else {
7518 SDL_LogError(SDL_LOG_CATEGORY_GPU, "Unrecognized shader stage!");
7519 return;
7520 }
7521
7522 // If there is no more room, acquire a new uniform buffer
7523 if (uniformBuffer->writeOffset + blockSize + MAX_UBO_SECTION_SIZE >= uniformBuffer->buffer->size) {
7524 uniformBuffer = VULKAN_INTERNAL_AcquireUniformBufferFromPool(commandBuffer);
7525
7526 uniformBuffer->drawOffset = 0;
7527 uniformBuffer->writeOffset = 0;
7528
7529 if (uniformBufferStage == VULKAN_UNIFORM_BUFFER_STAGE_VERTEX) {
7530 commandBuffer->vertexUniformBuffers[slotIndex] = uniformBuffer;
7531 commandBuffer->needNewVertexUniformDescriptorSet = true;
7532 } else if (uniformBufferStage == VULKAN_UNIFORM_BUFFER_STAGE_FRAGMENT) {
7533 commandBuffer->fragmentUniformBuffers[slotIndex] = uniformBuffer;
7534 commandBuffer->needNewFragmentUniformDescriptorSet = true;
7535 } else if (uniformBufferStage == VULKAN_UNIFORM_BUFFER_STAGE_COMPUTE) {
7536 commandBuffer->computeUniformBuffers[slotIndex] = uniformBuffer;
7537 commandBuffer->needNewComputeUniformDescriptorSet = true;
7538 } else {
7539 SDL_LogError(SDL_LOG_CATEGORY_GPU, "Unrecognized shader stage!");
7540 return;
7541 }
7542 }
7543
7544 uniformBuffer->drawOffset = uniformBuffer->writeOffset;
7545
7546 Uint8 *dst =
7547 uniformBuffer->buffer->usedRegion->allocation->mapPointer +
7548 uniformBuffer->buffer->usedRegion->resourceOffset +
7549 uniformBuffer->writeOffset;
7550
7551 SDL_memcpy(
7552 dst,
7553 data,
7554 length);
7555
7556 uniformBuffer->writeOffset += blockSize;
7557
7558 if (uniformBufferStage == VULKAN_UNIFORM_BUFFER_STAGE_VERTEX) {
7559 commandBuffer->needNewVertexUniformOffsets = true;
7560 } else if (uniformBufferStage == VULKAN_UNIFORM_BUFFER_STAGE_FRAGMENT) {
7561 commandBuffer->needNewFragmentUniformOffsets = true;
7562 } else if (uniformBufferStage == VULKAN_UNIFORM_BUFFER_STAGE_COMPUTE) {
7563 commandBuffer->needNewComputeUniformOffsets = true;
7564 } else {
7565 SDL_LogError(SDL_LOG_CATEGORY_GPU, "Unrecognized shader stage!");
7566 return;
7567 }
7568}
7569
7570static void VULKAN_BeginRenderPass(
7571 SDL_GPUCommandBuffer *commandBuffer,
7572 const SDL_GPUColorTargetInfo *colorTargetInfos,
7573 Uint32 numColorTargets,
7574 const SDL_GPUDepthStencilTargetInfo *depthStencilTargetInfo)
7575{
7576 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
7577 VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
7578 VkRenderPass renderPass;
7579 VulkanFramebuffer *framebuffer;
7580
7581 Uint32 w, h;
7582 VkClearValue *clearValues;
7583 Uint32 clearCount = 0;
7584 Uint32 totalColorAttachmentCount = 0;
7585 Uint32 i;
7586 SDL_GPUViewport defaultViewport;
7587 SDL_Rect defaultScissor;
7588 SDL_FColor defaultBlendConstants;
7589 Uint32 framebufferWidth = SDL_MAX_UINT32;
7590 Uint32 framebufferHeight = SDL_MAX_UINT32;
7591
7592 for (i = 0; i < numColorTargets; i += 1) {
7593 VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)colorTargetInfos[i].texture;
7594
7595 w = textureContainer->header.info.width >> colorTargetInfos[i].mip_level;
7596 h = textureContainer->header.info.height >> colorTargetInfos[i].mip_level;
7597
7598 // The framebuffer cannot be larger than the smallest attachment.
7599
7600 if (w < framebufferWidth) {
7601 framebufferWidth = w;
7602 }
7603
7604 if (h < framebufferHeight) {
7605 framebufferHeight = h;
7606 }
7607 }
7608
7609 if (depthStencilTargetInfo != NULL) {
7610 VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)depthStencilTargetInfo->texture;
7611
7612 w = textureContainer->header.info.width;
7613 h = textureContainer->header.info.height;
7614
7615 // The framebuffer cannot be larger than the smallest attachment.
7616
7617 if (w < framebufferWidth) {
7618 framebufferWidth = w;
7619 }
7620
7621 if (h < framebufferHeight) {
7622 framebufferHeight = h;
7623 }
7624 }
7625
7626 for (i = 0; i < numColorTargets; i += 1) {
7627 VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)colorTargetInfos[i].texture;
7628 VulkanTextureSubresource *subresource = VULKAN_INTERNAL_PrepareTextureSubresourceForWrite(
7629 renderer,
7630 vulkanCommandBuffer,
7631 textureContainer,
7632 textureContainer->header.info.type == SDL_GPU_TEXTURETYPE_3D ? 0 : colorTargetInfos[i].layer_or_depth_plane,
7633 colorTargetInfos[i].mip_level,
7634 colorTargetInfos[i].cycle,
7635 VULKAN_TEXTURE_USAGE_MODE_COLOR_ATTACHMENT);
7636
7637 vulkanCommandBuffer->colorAttachmentSubresources[vulkanCommandBuffer->colorAttachmentSubresourceCount] = subresource;
7638 vulkanCommandBuffer->colorAttachmentSubresourceCount += 1;
7639 VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, subresource->parent);
7640 totalColorAttachmentCount += 1;
7641 clearCount += 1;
7642
7643 if (colorTargetInfos[i].store_op == SDL_GPU_STOREOP_RESOLVE || colorTargetInfos[i].store_op == SDL_GPU_STOREOP_RESOLVE_AND_STORE) {
7644 VulkanTextureContainer *resolveContainer = (VulkanTextureContainer *)colorTargetInfos[i].resolve_texture;
7645 VulkanTextureSubresource *resolveSubresource = VULKAN_INTERNAL_PrepareTextureSubresourceForWrite(
7646 renderer,
7647 vulkanCommandBuffer,
7648 resolveContainer,
7649 colorTargetInfos[i].resolve_layer,
7650 colorTargetInfos[i].resolve_mip_level,
7651 colorTargetInfos[i].cycle_resolve_texture,
7652 VULKAN_TEXTURE_USAGE_MODE_COLOR_ATTACHMENT);
7653
7654 vulkanCommandBuffer->resolveAttachmentSubresources[vulkanCommandBuffer->resolveAttachmentSubresourceCount] = resolveSubresource;
7655 vulkanCommandBuffer->resolveAttachmentSubresourceCount += 1;
7656 VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, resolveSubresource->parent);
7657 totalColorAttachmentCount += 1;
7658 clearCount += 1;
7659 }
7660 }
7661
7662 if (depthStencilTargetInfo != NULL) {
7663 VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)depthStencilTargetInfo->texture;
7664 VulkanTextureSubresource *subresource = VULKAN_INTERNAL_PrepareTextureSubresourceForWrite(
7665 renderer,
7666 vulkanCommandBuffer,
7667 textureContainer,
7668 0,
7669 0,
7670 depthStencilTargetInfo->cycle,
7671 VULKAN_TEXTURE_USAGE_MODE_DEPTH_STENCIL_ATTACHMENT);
7672
7673 vulkanCommandBuffer->depthStencilAttachmentSubresource = subresource;
7674 VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, subresource->parent);
7675 clearCount += 1;
7676 }
7677
7678 // Fetch required render objects
7679
7680 renderPass = VULKAN_INTERNAL_FetchRenderPass(
7681 renderer,
7682 vulkanCommandBuffer,
7683 colorTargetInfos,
7684 numColorTargets,
7685 depthStencilTargetInfo);
7686
7687 if (renderPass == VK_NULL_HANDLE) {
7688 return;
7689 }
7690
7691 framebuffer = VULKAN_INTERNAL_FetchFramebuffer(
7692 renderer,
7693 renderPass,
7694 colorTargetInfos,
7695 numColorTargets,
7696 depthStencilTargetInfo,
7697 framebufferWidth,
7698 framebufferHeight);
7699
7700 if (framebuffer == NULL) {
7701 return;
7702 }
7703
7704 VULKAN_INTERNAL_TrackFramebuffer(renderer, vulkanCommandBuffer, framebuffer);
7705
7706 // Set clear values
7707
7708 clearValues = SDL_stack_alloc(VkClearValue, clearCount);
7709
7710 for (i = 0; i < totalColorAttachmentCount; i += 1) {
7711 clearValues[i].color.float32[0] = colorTargetInfos[i].clear_color.r;
7712 clearValues[i].color.float32[1] = colorTargetInfos[i].clear_color.g;
7713 clearValues[i].color.float32[2] = colorTargetInfos[i].clear_color.b;
7714 clearValues[i].color.float32[3] = colorTargetInfos[i].clear_color.a;
7715
7716 if (colorTargetInfos[i].store_op == SDL_GPU_STOREOP_RESOLVE || colorTargetInfos[i].store_op == SDL_GPU_STOREOP_RESOLVE_AND_STORE) {
7717 // Skip over the resolve texture, we're not clearing it
7718 i += 1;
7719 }
7720 }
7721
7722 if (depthStencilTargetInfo != NULL) {
7723 clearValues[totalColorAttachmentCount].depthStencil.depth =
7724 depthStencilTargetInfo->clear_depth;
7725 clearValues[totalColorAttachmentCount].depthStencil.stencil =
7726 depthStencilTargetInfo->clear_stencil;
7727 }
7728
7729 VkRenderPassBeginInfo renderPassBeginInfo;
7730 renderPassBeginInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
7731 renderPassBeginInfo.pNext = NULL;
7732 renderPassBeginInfo.renderPass = renderPass;
7733 renderPassBeginInfo.framebuffer = framebuffer->framebuffer;
7734 renderPassBeginInfo.pClearValues = clearValues;
7735 renderPassBeginInfo.clearValueCount = clearCount;
7736 renderPassBeginInfo.renderArea.extent.width = framebufferWidth;
7737 renderPassBeginInfo.renderArea.extent.height = framebufferHeight;
7738 renderPassBeginInfo.renderArea.offset.x = 0;
7739 renderPassBeginInfo.renderArea.offset.y = 0;
7740
7741 renderer->vkCmdBeginRenderPass(
7742 vulkanCommandBuffer->commandBuffer,
7743 &renderPassBeginInfo,
7744 VK_SUBPASS_CONTENTS_INLINE);
7745
7746 SDL_stack_free(clearValues);
7747
7748 // Set sensible default states
7749
7750 defaultViewport.x = 0;
7751 defaultViewport.y = 0;
7752 defaultViewport.w = (float)framebufferWidth;
7753 defaultViewport.h = (float)framebufferHeight;
7754 defaultViewport.min_depth = 0;
7755 defaultViewport.max_depth = 1;
7756
7757 VULKAN_INTERNAL_SetCurrentViewport(
7758 vulkanCommandBuffer,
7759 &defaultViewport);
7760
7761 defaultScissor.x = 0;
7762 defaultScissor.y = 0;
7763 defaultScissor.w = (Sint32)framebufferWidth;
7764 defaultScissor.h = (Sint32)framebufferHeight;
7765
7766 VULKAN_INTERNAL_SetCurrentScissor(
7767 vulkanCommandBuffer,
7768 &defaultScissor);
7769
7770 defaultBlendConstants.r = 1.0f;
7771 defaultBlendConstants.g = 1.0f;
7772 defaultBlendConstants.b = 1.0f;
7773 defaultBlendConstants.a = 1.0f;
7774
7775 VULKAN_INTERNAL_SetCurrentBlendConstants(
7776 vulkanCommandBuffer,
7777 defaultBlendConstants);
7778
7779 VULKAN_INTERNAL_SetCurrentStencilReference(
7780 vulkanCommandBuffer,
7781 0);
7782}
7783
7784static void VULKAN_BindGraphicsPipeline(
7785 SDL_GPUCommandBuffer *commandBuffer,
7786 SDL_GPUGraphicsPipeline *graphicsPipeline)
7787{
7788 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
7789 VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
7790 VulkanGraphicsPipeline *pipeline = (VulkanGraphicsPipeline *)graphicsPipeline;
7791
7792 renderer->vkCmdBindPipeline(
7793 vulkanCommandBuffer->commandBuffer,
7794 VK_PIPELINE_BIND_POINT_GRAPHICS,
7795 pipeline->pipeline);
7796
7797 vulkanCommandBuffer->currentGraphicsPipeline = pipeline;
7798
7799 VULKAN_INTERNAL_TrackGraphicsPipeline(vulkanCommandBuffer, pipeline);
7800
7801 // Acquire uniform buffers if necessary
7802 for (Uint32 i = 0; i < pipeline->resourceLayout->vertexUniformBufferCount; i += 1) {
7803 if (vulkanCommandBuffer->vertexUniformBuffers[i] == NULL) {
7804 vulkanCommandBuffer->vertexUniformBuffers[i] = VULKAN_INTERNAL_AcquireUniformBufferFromPool(
7805 vulkanCommandBuffer);
7806 }
7807 }
7808
7809 for (Uint32 i = 0; i < pipeline->resourceLayout->fragmentUniformBufferCount; i += 1) {
7810 if (vulkanCommandBuffer->fragmentUniformBuffers[i] == NULL) {
7811 vulkanCommandBuffer->fragmentUniformBuffers[i] = VULKAN_INTERNAL_AcquireUniformBufferFromPool(
7812 vulkanCommandBuffer);
7813 }
7814 }
7815
7816 // Mark bindings as needed
7817 vulkanCommandBuffer->needNewVertexResourceDescriptorSet = true;
7818 vulkanCommandBuffer->needNewFragmentResourceDescriptorSet = true;
7819 vulkanCommandBuffer->needNewVertexUniformDescriptorSet = true;
7820 vulkanCommandBuffer->needNewFragmentUniformDescriptorSet = true;
7821 vulkanCommandBuffer->needNewVertexUniformOffsets = true;
7822 vulkanCommandBuffer->needNewFragmentUniformOffsets = true;
7823}
7824
7825static void VULKAN_BindVertexBuffers(
7826 SDL_GPUCommandBuffer *commandBuffer,
7827 Uint32 firstSlot,
7828 const SDL_GPUBufferBinding *bindings,
7829 Uint32 numBindings)
7830{
7831 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
7832 VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
7833 VulkanBuffer *currentVulkanBuffer;
7834 VkBuffer *buffers = SDL_stack_alloc(VkBuffer, numBindings);
7835 VkDeviceSize *offsets = SDL_stack_alloc(VkDeviceSize, numBindings);
7836 Uint32 i;
7837
7838 for (i = 0; i < numBindings; i += 1) {
7839 currentVulkanBuffer = ((VulkanBufferContainer *)bindings[i].buffer)->activeBuffer;
7840 buffers[i] = currentVulkanBuffer->buffer;
7841 offsets[i] = (VkDeviceSize)bindings[i].offset;
7842 VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, currentVulkanBuffer);
7843 }
7844
7845 renderer->vkCmdBindVertexBuffers(
7846 vulkanCommandBuffer->commandBuffer,
7847 firstSlot,
7848 numBindings,
7849 buffers,
7850 offsets);
7851
7852 SDL_stack_free(buffers);
7853 SDL_stack_free(offsets);
7854}
7855
7856static void VULKAN_BindIndexBuffer(
7857 SDL_GPUCommandBuffer *commandBuffer,
7858 const SDL_GPUBufferBinding *binding,
7859 SDL_GPUIndexElementSize indexElementSize)
7860{
7861 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
7862 VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
7863 VulkanBuffer *vulkanBuffer = ((VulkanBufferContainer *)binding->buffer)->activeBuffer;
7864
7865 VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, vulkanBuffer);
7866
7867 renderer->vkCmdBindIndexBuffer(
7868 vulkanCommandBuffer->commandBuffer,
7869 vulkanBuffer->buffer,
7870 (VkDeviceSize)binding->offset,
7871 SDLToVK_IndexType[indexElementSize]);
7872}
7873
7874static void VULKAN_PushVertexUniformData(
7875 SDL_GPUCommandBuffer *commandBuffer,
7876 Uint32 slotIndex,
7877 const void *data,
7878 Uint32 length)
7879{
7880 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
7881
7882 VULKAN_INTERNAL_PushUniformData(
7883 vulkanCommandBuffer,
7884 VULKAN_UNIFORM_BUFFER_STAGE_VERTEX,
7885 slotIndex,
7886 data,
7887 length);
7888}
7889
7890static void VULKAN_PushFragmentUniformData(
7891 SDL_GPUCommandBuffer *commandBuffer,
7892 Uint32 slotIndex,
7893 const void *data,
7894 Uint32 length)
7895{
7896 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
7897
7898 VULKAN_INTERNAL_PushUniformData(
7899 vulkanCommandBuffer,
7900 VULKAN_UNIFORM_BUFFER_STAGE_FRAGMENT,
7901 slotIndex,
7902 data,
7903 length);
7904}
7905
7906static void VULKAN_EndRenderPass(
7907 SDL_GPUCommandBuffer *commandBuffer)
7908{
7909 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
7910 VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
7911 Uint32 i;
7912
7913 renderer->vkCmdEndRenderPass(
7914 vulkanCommandBuffer->commandBuffer);
7915
7916 for (i = 0; i < vulkanCommandBuffer->colorAttachmentSubresourceCount; i += 1) {
7917 VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
7918 renderer,
7919 vulkanCommandBuffer,
7920 VULKAN_TEXTURE_USAGE_MODE_COLOR_ATTACHMENT,
7921 vulkanCommandBuffer->colorAttachmentSubresources[i]);
7922 }
7923 vulkanCommandBuffer->colorAttachmentSubresourceCount = 0;
7924
7925 for (i = 0; i < vulkanCommandBuffer->resolveAttachmentSubresourceCount; i += 1) {
7926 VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
7927 renderer,
7928 vulkanCommandBuffer,
7929 VULKAN_TEXTURE_USAGE_MODE_COLOR_ATTACHMENT,
7930 vulkanCommandBuffer->resolveAttachmentSubresources[i]);
7931 }
7932 vulkanCommandBuffer->resolveAttachmentSubresourceCount = 0;
7933
7934 if (vulkanCommandBuffer->depthStencilAttachmentSubresource != NULL) {
7935 VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
7936 renderer,
7937 vulkanCommandBuffer,
7938 VULKAN_TEXTURE_USAGE_MODE_DEPTH_STENCIL_ATTACHMENT,
7939 vulkanCommandBuffer->depthStencilAttachmentSubresource);
7940 vulkanCommandBuffer->depthStencilAttachmentSubresource = NULL;
7941 }
7942
7943 vulkanCommandBuffer->currentGraphicsPipeline = NULL;
7944
7945 vulkanCommandBuffer->vertexResourceDescriptorSet = VK_NULL_HANDLE;
7946 vulkanCommandBuffer->vertexUniformDescriptorSet = VK_NULL_HANDLE;
7947 vulkanCommandBuffer->fragmentResourceDescriptorSet = VK_NULL_HANDLE;
7948 vulkanCommandBuffer->fragmentUniformDescriptorSet = VK_NULL_HANDLE;
7949
7950 // Reset bind state
7951 SDL_zeroa(vulkanCommandBuffer->colorAttachmentSubresources);
7952 SDL_zeroa(vulkanCommandBuffer->resolveAttachmentSubresources);
7953 vulkanCommandBuffer->depthStencilAttachmentSubresource = NULL;
7954
7955 SDL_zeroa(vulkanCommandBuffer->vertexSamplers);
7956 SDL_zeroa(vulkanCommandBuffer->vertexSamplerTextures);
7957 SDL_zeroa(vulkanCommandBuffer->vertexStorageTextures);
7958 SDL_zeroa(vulkanCommandBuffer->vertexStorageBuffers);
7959
7960 SDL_zeroa(vulkanCommandBuffer->fragmentSamplers);
7961 SDL_zeroa(vulkanCommandBuffer->fragmentSamplerTextures);
7962 SDL_zeroa(vulkanCommandBuffer->fragmentStorageTextures);
7963 SDL_zeroa(vulkanCommandBuffer->fragmentStorageBuffers);
7964}
7965
7966static void VULKAN_BeginComputePass(
7967 SDL_GPUCommandBuffer *commandBuffer,
7968 const SDL_GPUStorageTextureReadWriteBinding *storageTextureBindings,
7969 Uint32 numStorageTextureBindings,
7970 const SDL_GPUStorageBufferReadWriteBinding *storageBufferBindings,
7971 Uint32 numStorageBufferBindings)
7972{
7973 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
7974 VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
7975 VulkanBufferContainer *bufferContainer;
7976 VulkanBuffer *buffer;
7977 Uint32 i;
7978
7979 vulkanCommandBuffer->readWriteComputeStorageTextureSubresourceCount = numStorageTextureBindings;
7980
7981 for (i = 0; i < numStorageTextureBindings; i += 1) {
7982 VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)storageTextureBindings[i].texture;
7983 VulkanTextureSubresource *subresource = VULKAN_INTERNAL_PrepareTextureSubresourceForWrite(
7984 renderer,
7985 vulkanCommandBuffer,
7986 textureContainer,
7987 storageTextureBindings[i].layer,
7988 storageTextureBindings[i].mip_level,
7989 storageTextureBindings[i].cycle,
7990 VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE);
7991
7992 vulkanCommandBuffer->readWriteComputeStorageTextureSubresources[i] = subresource;
7993
7994 VULKAN_INTERNAL_TrackTexture(
7995 vulkanCommandBuffer,
7996 subresource->parent);
7997 }
7998
7999 for (i = 0; i < numStorageBufferBindings; i += 1) {
8000 bufferContainer = (VulkanBufferContainer *)storageBufferBindings[i].buffer;
8001 buffer = VULKAN_INTERNAL_PrepareBufferForWrite(
8002 renderer,
8003 vulkanCommandBuffer,
8004 bufferContainer,
8005 storageBufferBindings[i].cycle,
8006 VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ);
8007
8008 vulkanCommandBuffer->readWriteComputeStorageBuffers[i] = buffer;
8009
8010 VULKAN_INTERNAL_TrackBuffer(
8011 vulkanCommandBuffer,
8012 buffer);
8013 }
8014}
8015
8016static void VULKAN_BindComputePipeline(
8017 SDL_GPUCommandBuffer *commandBuffer,
8018 SDL_GPUComputePipeline *computePipeline)
8019{
8020 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
8021 VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
8022 VulkanComputePipeline *vulkanComputePipeline = (VulkanComputePipeline *)computePipeline;
8023
8024 renderer->vkCmdBindPipeline(
8025 vulkanCommandBuffer->commandBuffer,
8026 VK_PIPELINE_BIND_POINT_COMPUTE,
8027 vulkanComputePipeline->pipeline);
8028
8029 vulkanCommandBuffer->currentComputePipeline = vulkanComputePipeline;
8030
8031 VULKAN_INTERNAL_TrackComputePipeline(vulkanCommandBuffer, vulkanComputePipeline);
8032
8033 // Acquire uniform buffers if necessary
8034 for (Uint32 i = 0; i < vulkanComputePipeline->resourceLayout->numUniformBuffers; i += 1) {
8035 if (vulkanCommandBuffer->computeUniformBuffers[i] == NULL) {
8036 vulkanCommandBuffer->computeUniformBuffers[i] = VULKAN_INTERNAL_AcquireUniformBufferFromPool(
8037 vulkanCommandBuffer);
8038 }
8039 }
8040
8041 // Mark binding as needed
8042 vulkanCommandBuffer->needNewComputeReadWriteDescriptorSet = true;
8043 vulkanCommandBuffer->needNewComputeReadOnlyDescriptorSet = true;
8044 vulkanCommandBuffer->needNewComputeUniformDescriptorSet = true;
8045 vulkanCommandBuffer->needNewComputeUniformOffsets = true;
8046}
8047
8048static void VULKAN_BindComputeSamplers(
8049 SDL_GPUCommandBuffer *commandBuffer,
8050 Uint32 firstSlot,
8051 const SDL_GPUTextureSamplerBinding *textureSamplerBindings,
8052 Uint32 numBindings)
8053{
8054 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
8055
8056 for (Uint32 i = 0; i < numBindings; i += 1) {
8057 VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)textureSamplerBindings[i].texture;
8058 vulkanCommandBuffer->computeSamplerTextures[firstSlot + i] = textureContainer->activeTexture;
8059 vulkanCommandBuffer->computeSamplers[firstSlot + i] = (VulkanSampler *)textureSamplerBindings[i].sampler;
8060
8061 VULKAN_INTERNAL_TrackSampler(
8062 vulkanCommandBuffer,
8063 (VulkanSampler *)textureSamplerBindings[i].sampler);
8064
8065 VULKAN_INTERNAL_TrackTexture(
8066 vulkanCommandBuffer,
8067 textureContainer->activeTexture);
8068 }
8069
8070 vulkanCommandBuffer->needNewComputeReadOnlyDescriptorSet = true;
8071}
8072
8073static void VULKAN_BindComputeStorageTextures(
8074 SDL_GPUCommandBuffer *commandBuffer,
8075 Uint32 firstSlot,
8076 SDL_GPUTexture *const *storageTextures,
8077 Uint32 numBindings)
8078{
8079 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
8080 VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
8081
8082 for (Uint32 i = 0; i < numBindings; i += 1) {
8083 if (vulkanCommandBuffer->readOnlyComputeStorageTextures[firstSlot + i] != NULL) {
8084 VULKAN_INTERNAL_TextureTransitionToDefaultUsage(
8085 renderer,
8086 vulkanCommandBuffer,
8087 VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ,
8088 vulkanCommandBuffer->readOnlyComputeStorageTextures[firstSlot + i]);
8089 }
8090
8091 VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)storageTextures[i];
8092
8093 vulkanCommandBuffer->readOnlyComputeStorageTextures[firstSlot + i] =
8094 textureContainer->activeTexture;
8095
8096 VULKAN_INTERNAL_TextureTransitionFromDefaultUsage(
8097 renderer,
8098 vulkanCommandBuffer,
8099 VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ,
8100 textureContainer->activeTexture);
8101
8102 VULKAN_INTERNAL_TrackTexture(
8103 vulkanCommandBuffer,
8104 textureContainer->activeTexture);
8105 }
8106
8107 vulkanCommandBuffer->needNewComputeReadOnlyDescriptorSet = true;
8108}
8109
8110static void VULKAN_BindComputeStorageBuffers(
8111 SDL_GPUCommandBuffer *commandBuffer,
8112 Uint32 firstSlot,
8113 SDL_GPUBuffer *const *storageBuffers,
8114 Uint32 numBindings)
8115{
8116 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
8117 VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
8118 VulkanBufferContainer *bufferContainer;
8119 Uint32 i;
8120
8121 for (i = 0; i < numBindings; i += 1) {
8122 if (vulkanCommandBuffer->readOnlyComputeStorageBuffers[firstSlot + i] != NULL) {
8123 VULKAN_INTERNAL_BufferTransitionToDefaultUsage(
8124 renderer,
8125 vulkanCommandBuffer,
8126 VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ,
8127 vulkanCommandBuffer->readOnlyComputeStorageBuffers[firstSlot + i]);
8128 }
8129
8130 bufferContainer = (VulkanBufferContainer *)storageBuffers[i];
8131
8132 vulkanCommandBuffer->readOnlyComputeStorageBuffers[firstSlot + i] = bufferContainer->activeBuffer;
8133
8134 VULKAN_INTERNAL_BufferTransitionFromDefaultUsage(
8135 renderer,
8136 vulkanCommandBuffer,
8137 VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ,
8138 bufferContainer->activeBuffer);
8139
8140 VULKAN_INTERNAL_TrackBuffer(
8141 vulkanCommandBuffer,
8142 bufferContainer->activeBuffer);
8143 }
8144
8145 vulkanCommandBuffer->needNewComputeReadOnlyDescriptorSet = true;
8146}
8147
8148static void VULKAN_PushComputeUniformData(
8149 SDL_GPUCommandBuffer *commandBuffer,
8150 Uint32 slotIndex,
8151 const void *data,
8152 Uint32 length)
8153{
8154 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
8155
8156 VULKAN_INTERNAL_PushUniformData(
8157 vulkanCommandBuffer,
8158 VULKAN_UNIFORM_BUFFER_STAGE_COMPUTE,
8159 slotIndex,
8160 data,
8161 length);
8162}
8163
8164static void VULKAN_INTERNAL_BindComputeDescriptorSets(
8165 VulkanRenderer *renderer,
8166 VulkanCommandBuffer *commandBuffer)
8167{
8168 VulkanComputePipelineResourceLayout *resourceLayout;
8169 DescriptorSetLayout *descriptorSetLayout;
8170 VkWriteDescriptorSet writeDescriptorSets[
8171 MAX_TEXTURE_SAMPLERS_PER_STAGE +
8172 MAX_STORAGE_TEXTURES_PER_STAGE +
8173 MAX_STORAGE_BUFFERS_PER_STAGE +
8174 MAX_COMPUTE_WRITE_TEXTURES +
8175 MAX_COMPUTE_WRITE_BUFFERS +
8176 MAX_UNIFORM_BUFFERS_PER_STAGE];
8177 VkDescriptorBufferInfo bufferInfos[MAX_STORAGE_BUFFERS_PER_STAGE + MAX_COMPUTE_WRITE_BUFFERS + MAX_UNIFORM_BUFFERS_PER_STAGE];
8178 VkDescriptorImageInfo imageInfos[MAX_TEXTURE_SAMPLERS_PER_STAGE + MAX_STORAGE_TEXTURES_PER_STAGE + MAX_COMPUTE_WRITE_TEXTURES];
8179 Uint32 dynamicOffsets[MAX_UNIFORM_BUFFERS_PER_STAGE];
8180 Uint32 writeCount = 0;
8181 Uint32 bufferInfoCount = 0;
8182 Uint32 imageInfoCount = 0;
8183 Uint32 dynamicOffsetCount = 0;
8184
8185 if (
8186 !commandBuffer->needNewComputeReadOnlyDescriptorSet &&
8187 !commandBuffer->needNewComputeReadWriteDescriptorSet &&
8188 !commandBuffer->needNewComputeUniformDescriptorSet &&
8189 !commandBuffer->needNewComputeUniformOffsets
8190 ) {
8191 return;
8192 }
8193
8194 resourceLayout = commandBuffer->currentComputePipeline->resourceLayout;
8195
8196 if (commandBuffer->needNewComputeReadOnlyDescriptorSet) {
8197 descriptorSetLayout = resourceLayout->descriptorSetLayouts[0];
8198
8199 commandBuffer->computeReadOnlyDescriptorSet = VULKAN_INTERNAL_FetchDescriptorSet(
8200 renderer,
8201 commandBuffer,
8202 descriptorSetLayout);
8203
8204 for (Uint32 i = 0; i < resourceLayout->numSamplers; i += 1) {
8205 VkWriteDescriptorSet *currentWriteDescriptorSet = &writeDescriptorSets[writeCount];
8206
8207 currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
8208 currentWriteDescriptorSet->pNext = NULL;
8209 currentWriteDescriptorSet->descriptorCount = 1;
8210 currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
8211 currentWriteDescriptorSet->dstArrayElement = 0;
8212 currentWriteDescriptorSet->dstBinding = i;
8213 currentWriteDescriptorSet->dstSet = commandBuffer->computeReadOnlyDescriptorSet;
8214 currentWriteDescriptorSet->pTexelBufferView = NULL;
8215 currentWriteDescriptorSet->pBufferInfo = NULL;
8216
8217 imageInfos[imageInfoCount].sampler = commandBuffer->computeSamplers[i]->sampler;
8218 imageInfos[imageInfoCount].imageView = commandBuffer->computeSamplerTextures[i]->fullView;
8219 imageInfos[imageInfoCount].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
8220
8221 currentWriteDescriptorSet->pImageInfo = &imageInfos[imageInfoCount];
8222
8223 writeCount += 1;
8224 imageInfoCount += 1;
8225 }
8226
8227 for (Uint32 i = 0; i < resourceLayout->numReadonlyStorageTextures; i += 1) {
8228 VkWriteDescriptorSet *currentWriteDescriptorSet = &writeDescriptorSets[writeCount];
8229
8230 currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
8231 currentWriteDescriptorSet->pNext = NULL;
8232 currentWriteDescriptorSet->descriptorCount = 1;
8233 currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
8234 currentWriteDescriptorSet->dstArrayElement = 0;
8235 currentWriteDescriptorSet->dstBinding = resourceLayout->numSamplers + i;
8236 currentWriteDescriptorSet->dstSet = commandBuffer->computeReadOnlyDescriptorSet;
8237 currentWriteDescriptorSet->pTexelBufferView = NULL;
8238 currentWriteDescriptorSet->pBufferInfo = NULL;
8239
8240 imageInfos[imageInfoCount].sampler = VK_NULL_HANDLE;
8241 imageInfos[imageInfoCount].imageView = commandBuffer->readOnlyComputeStorageTextures[i]->fullView;
8242 imageInfos[imageInfoCount].imageLayout = VK_IMAGE_LAYOUT_GENERAL;
8243
8244 currentWriteDescriptorSet->pImageInfo = &imageInfos[imageInfoCount];
8245
8246 writeCount += 1;
8247 imageInfoCount += 1;
8248 }
8249
8250 for (Uint32 i = 0; i < resourceLayout->numReadonlyStorageBuffers; i += 1) {
8251 VkWriteDescriptorSet *currentWriteDescriptorSet = &writeDescriptorSets[writeCount];
8252
8253 currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
8254 currentWriteDescriptorSet->pNext = NULL;
8255 currentWriteDescriptorSet->descriptorCount = 1;
8256 currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
8257 currentWriteDescriptorSet->dstArrayElement = 0;
8258 currentWriteDescriptorSet->dstBinding = resourceLayout->numSamplers + resourceLayout->numReadonlyStorageTextures + i;
8259 currentWriteDescriptorSet->dstSet = commandBuffer->computeReadOnlyDescriptorSet;
8260 currentWriteDescriptorSet->pTexelBufferView = NULL;
8261 currentWriteDescriptorSet->pImageInfo = NULL;
8262
8263 bufferInfos[bufferInfoCount].buffer = commandBuffer->readOnlyComputeStorageBuffers[i]->buffer;
8264 bufferInfos[bufferInfoCount].offset = 0;
8265 bufferInfos[bufferInfoCount].range = VK_WHOLE_SIZE;
8266
8267 currentWriteDescriptorSet->pBufferInfo = &bufferInfos[bufferInfoCount];
8268
8269 writeCount += 1;
8270 bufferInfoCount += 1;
8271 }
8272
8273 commandBuffer->needNewComputeReadOnlyDescriptorSet = false;
8274 }
8275
8276 if (commandBuffer->needNewComputeReadWriteDescriptorSet) {
8277 descriptorSetLayout = resourceLayout->descriptorSetLayouts[1];
8278
8279 commandBuffer->computeReadWriteDescriptorSet = VULKAN_INTERNAL_FetchDescriptorSet(
8280 renderer,
8281 commandBuffer,
8282 descriptorSetLayout);
8283
8284 for (Uint32 i = 0; i < resourceLayout->numReadWriteStorageTextures; i += 1) {
8285 VkWriteDescriptorSet *currentWriteDescriptorSet = &writeDescriptorSets[writeCount];
8286
8287 currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
8288 currentWriteDescriptorSet->pNext = NULL;
8289 currentWriteDescriptorSet->descriptorCount = 1;
8290 currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
8291 currentWriteDescriptorSet->dstArrayElement = 0;
8292 currentWriteDescriptorSet->dstBinding = i;
8293 currentWriteDescriptorSet->dstSet = commandBuffer->computeReadWriteDescriptorSet;
8294 currentWriteDescriptorSet->pTexelBufferView = NULL;
8295 currentWriteDescriptorSet->pBufferInfo = NULL;
8296
8297 imageInfos[imageInfoCount].sampler = VK_NULL_HANDLE;
8298 imageInfos[imageInfoCount].imageView = commandBuffer->readWriteComputeStorageTextureSubresources[i]->computeWriteView;
8299 imageInfos[imageInfoCount].imageLayout = VK_IMAGE_LAYOUT_GENERAL;
8300
8301 currentWriteDescriptorSet->pImageInfo = &imageInfos[imageInfoCount];
8302
8303 writeCount += 1;
8304 imageInfoCount += 1;
8305 }
8306
8307 for (Uint32 i = 0; i < resourceLayout->numReadWriteStorageBuffers; i += 1) {
8308 VkWriteDescriptorSet *currentWriteDescriptorSet = &writeDescriptorSets[writeCount];
8309
8310 currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
8311 currentWriteDescriptorSet->pNext = NULL;
8312 currentWriteDescriptorSet->descriptorCount = 1;
8313 currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
8314 currentWriteDescriptorSet->dstArrayElement = 0;
8315 currentWriteDescriptorSet->dstBinding = resourceLayout->numReadWriteStorageTextures + i;
8316 currentWriteDescriptorSet->dstSet = commandBuffer->computeReadWriteDescriptorSet;
8317 currentWriteDescriptorSet->pTexelBufferView = NULL;
8318 currentWriteDescriptorSet->pImageInfo = NULL;
8319
8320 bufferInfos[bufferInfoCount].buffer = commandBuffer->readWriteComputeStorageBuffers[i]->buffer;
8321 bufferInfos[bufferInfoCount].offset = 0;
8322 bufferInfos[bufferInfoCount].range = VK_WHOLE_SIZE;
8323
8324 currentWriteDescriptorSet->pBufferInfo = &bufferInfos[bufferInfoCount];
8325
8326 writeCount += 1;
8327 bufferInfoCount += 1;
8328 }
8329
8330 commandBuffer->needNewComputeReadWriteDescriptorSet = false;
8331 }
8332
8333 if (commandBuffer->needNewComputeUniformDescriptorSet) {
8334 descriptorSetLayout = resourceLayout->descriptorSetLayouts[2];
8335
8336 commandBuffer->computeUniformDescriptorSet = VULKAN_INTERNAL_FetchDescriptorSet(
8337 renderer,
8338 commandBuffer,
8339 descriptorSetLayout);
8340
8341
8342 for (Uint32 i = 0; i < resourceLayout->numUniformBuffers; i += 1) {
8343 VkWriteDescriptorSet *currentWriteDescriptorSet = &writeDescriptorSets[writeCount];
8344
8345 currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
8346 currentWriteDescriptorSet->pNext = NULL;
8347 currentWriteDescriptorSet->descriptorCount = 1;
8348 currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
8349 currentWriteDescriptorSet->dstArrayElement = 0;
8350 currentWriteDescriptorSet->dstBinding = i;
8351 currentWriteDescriptorSet->dstSet = commandBuffer->computeUniformDescriptorSet;
8352 currentWriteDescriptorSet->pTexelBufferView = NULL;
8353 currentWriteDescriptorSet->pImageInfo = NULL;
8354
8355 bufferInfos[bufferInfoCount].buffer = commandBuffer->computeUniformBuffers[i]->buffer->buffer;
8356 bufferInfos[bufferInfoCount].offset = 0;
8357 bufferInfos[bufferInfoCount].range = MAX_UBO_SECTION_SIZE;
8358
8359 currentWriteDescriptorSet->pBufferInfo = &bufferInfos[bufferInfoCount];
8360
8361 writeCount += 1;
8362 bufferInfoCount += 1;
8363 }
8364
8365 commandBuffer->needNewComputeUniformDescriptorSet = false;
8366 }
8367
8368 for (Uint32 i = 0; i < resourceLayout->numUniformBuffers; i += 1) {
8369 dynamicOffsets[i] = commandBuffer->computeUniformBuffers[i]->drawOffset;
8370 dynamicOffsetCount += 1;
8371 }
8372
8373 renderer->vkUpdateDescriptorSets(
8374 renderer->logicalDevice,
8375 writeCount,
8376 writeDescriptorSets,
8377 0,
8378 NULL);
8379
8380 VkDescriptorSet sets[3];
8381 sets[0] = commandBuffer->computeReadOnlyDescriptorSet;
8382 sets[1] = commandBuffer->computeReadWriteDescriptorSet;
8383 sets[2] = commandBuffer->computeUniformDescriptorSet;
8384
8385 renderer->vkCmdBindDescriptorSets(
8386 commandBuffer->commandBuffer,
8387 VK_PIPELINE_BIND_POINT_COMPUTE,
8388 resourceLayout->pipelineLayout,
8389 0,
8390 3,
8391 sets,
8392 dynamicOffsetCount,
8393 dynamicOffsets);
8394
8395 commandBuffer->needNewVertexUniformOffsets = false;
8396}
8397
8398static void VULKAN_DispatchCompute(
8399 SDL_GPUCommandBuffer *commandBuffer,
8400 Uint32 groupcountX,
8401 Uint32 groupcountY,
8402 Uint32 groupcountZ)
8403{
8404 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
8405 VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
8406
8407 VULKAN_INTERNAL_BindComputeDescriptorSets(renderer, vulkanCommandBuffer);
8408
8409 renderer->vkCmdDispatch(
8410 vulkanCommandBuffer->commandBuffer,
8411 groupcountX,
8412 groupcountY,
8413 groupcountZ);
8414}
8415
8416static void VULKAN_DispatchComputeIndirect(
8417 SDL_GPUCommandBuffer *commandBuffer,
8418 SDL_GPUBuffer *buffer,
8419 Uint32 offset)
8420{
8421 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
8422 VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
8423 VulkanBuffer *vulkanBuffer = ((VulkanBufferContainer *)buffer)->activeBuffer;
8424
8425 VULKAN_INTERNAL_BindComputeDescriptorSets(renderer, vulkanCommandBuffer);
8426
8427 renderer->vkCmdDispatchIndirect(
8428 vulkanCommandBuffer->commandBuffer,
8429 vulkanBuffer->buffer,
8430 offset);
8431
8432 VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, vulkanBuffer);
8433}
8434
8435static void VULKAN_EndComputePass(
8436 SDL_GPUCommandBuffer *commandBuffer)
8437{
8438 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
8439 Uint32 i;
8440
8441 for (i = 0; i < vulkanCommandBuffer->readWriteComputeStorageTextureSubresourceCount; i += 1) {
8442 VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
8443 vulkanCommandBuffer->renderer,
8444 vulkanCommandBuffer,
8445 VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE,
8446 vulkanCommandBuffer->readWriteComputeStorageTextureSubresources[i]);
8447 vulkanCommandBuffer->readWriteComputeStorageTextureSubresources[i] = NULL;
8448 }
8449 vulkanCommandBuffer->readWriteComputeStorageTextureSubresourceCount = 0;
8450
8451 for (i = 0; i < MAX_COMPUTE_WRITE_BUFFERS; i += 1) {
8452 if (vulkanCommandBuffer->readWriteComputeStorageBuffers[i] != NULL) {
8453 VULKAN_INTERNAL_BufferTransitionToDefaultUsage(
8454 vulkanCommandBuffer->renderer,
8455 vulkanCommandBuffer,
8456 VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE,
8457 vulkanCommandBuffer->readWriteComputeStorageBuffers[i]);
8458
8459 vulkanCommandBuffer->readWriteComputeStorageBuffers[i] = NULL;
8460 }
8461 }
8462
8463 for (i = 0; i < MAX_STORAGE_TEXTURES_PER_STAGE; i += 1) {
8464 if (vulkanCommandBuffer->readOnlyComputeStorageTextures[i] != NULL) {
8465 VULKAN_INTERNAL_TextureTransitionToDefaultUsage(
8466 vulkanCommandBuffer->renderer,
8467 vulkanCommandBuffer,
8468 VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ,
8469 vulkanCommandBuffer->readOnlyComputeStorageTextures[i]);
8470
8471 vulkanCommandBuffer->readOnlyComputeStorageTextures[i] = NULL;
8472 }
8473 }
8474
8475 for (i = 0; i < MAX_STORAGE_BUFFERS_PER_STAGE; i += 1) {
8476 if (vulkanCommandBuffer->readOnlyComputeStorageBuffers[i] != NULL) {
8477 VULKAN_INTERNAL_BufferTransitionToDefaultUsage(
8478 vulkanCommandBuffer->renderer,
8479 vulkanCommandBuffer,
8480 VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ,
8481 vulkanCommandBuffer->readOnlyComputeStorageBuffers[i]);
8482
8483 vulkanCommandBuffer->readOnlyComputeStorageBuffers[i] = NULL;
8484 }
8485 }
8486
8487 // we don't need a barrier because sampler state is always the default if sampler bit is set
8488 SDL_zeroa(vulkanCommandBuffer->computeSamplerTextures);
8489 SDL_zeroa(vulkanCommandBuffer->computeSamplers);
8490
8491 vulkanCommandBuffer->currentComputePipeline = NULL;
8492
8493 vulkanCommandBuffer->computeReadOnlyDescriptorSet = VK_NULL_HANDLE;
8494 vulkanCommandBuffer->computeReadWriteDescriptorSet = VK_NULL_HANDLE;
8495 vulkanCommandBuffer->computeUniformDescriptorSet = VK_NULL_HANDLE;
8496}
8497
8498static void *VULKAN_MapTransferBuffer(
8499 SDL_GPURenderer *driverData,
8500 SDL_GPUTransferBuffer *transferBuffer,
8501 bool cycle)
8502{
8503 VulkanRenderer *renderer = (VulkanRenderer *)driverData;
8504 VulkanBufferContainer *transferBufferContainer = (VulkanBufferContainer *)transferBuffer;
8505
8506 if (
8507 cycle &&
8508 SDL_GetAtomicInt(&transferBufferContainer->activeBuffer->referenceCount) > 0) {
8509 VULKAN_INTERNAL_CycleActiveBuffer(
8510 renderer,
8511 transferBufferContainer);
8512 }
8513
8514 Uint8 *bufferPointer =
8515 transferBufferContainer->activeBuffer->usedRegion->allocation->mapPointer +
8516 transferBufferContainer->activeBuffer->usedRegion->resourceOffset;
8517
8518 return bufferPointer;
8519}
8520
8521static void VULKAN_UnmapTransferBuffer(
8522 SDL_GPURenderer *driverData,
8523 SDL_GPUTransferBuffer *transferBuffer)
8524{
8525 // no-op because transfer buffers are persistently mapped
8526 (void)driverData;
8527 (void)transferBuffer;
8528}
8529
8530static void VULKAN_BeginCopyPass(
8531 SDL_GPUCommandBuffer *commandBuffer)
8532{
8533 // no-op
8534 (void)commandBuffer;
8535}
8536
8537static void VULKAN_UploadToTexture(
8538 SDL_GPUCommandBuffer *commandBuffer,
8539 const SDL_GPUTextureTransferInfo *source,
8540 const SDL_GPUTextureRegion *destination,
8541 bool cycle)
8542{
8543 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
8544 VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
8545 VulkanBufferContainer *transferBufferContainer = (VulkanBufferContainer *)source->transfer_buffer;
8546 VulkanTextureContainer *vulkanTextureContainer = (VulkanTextureContainer *)destination->texture;
8547 VulkanTextureSubresource *vulkanTextureSubresource;
8548 VkBufferImageCopy imageCopy;
8549
8550 // Note that the transfer buffer does not need a barrier, as it is synced by the client
8551
8552 vulkanTextureSubresource = VULKAN_INTERNAL_PrepareTextureSubresourceForWrite(
8553 renderer,
8554 vulkanCommandBuffer,
8555 vulkanTextureContainer,
8556 destination->layer,
8557 destination->mip_level,
8558 cycle,
8559 VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION);
8560
8561 imageCopy.imageExtent.width = destination->w;
8562 imageCopy.imageExtent.height = destination->h;
8563 imageCopy.imageExtent.depth = destination->d;
8564 imageCopy.imageOffset.x = destination->x;
8565 imageCopy.imageOffset.y = destination->y;
8566 imageCopy.imageOffset.z = destination->z;
8567 imageCopy.imageSubresource.aspectMask = vulkanTextureSubresource->parent->aspectFlags;
8568 imageCopy.imageSubresource.baseArrayLayer = destination->layer;
8569 imageCopy.imageSubresource.layerCount = 1;
8570 imageCopy.imageSubresource.mipLevel = destination->mip_level;
8571 imageCopy.bufferOffset = source->offset;
8572 imageCopy.bufferRowLength = source->pixels_per_row;
8573 imageCopy.bufferImageHeight = source->rows_per_layer;
8574
8575 renderer->vkCmdCopyBufferToImage(
8576 vulkanCommandBuffer->commandBuffer,
8577 transferBufferContainer->activeBuffer->buffer,
8578 vulkanTextureSubresource->parent->image,
8579 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
8580 1,
8581 &imageCopy);
8582
8583 VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
8584 renderer,
8585 vulkanCommandBuffer,
8586 VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION,
8587 vulkanTextureSubresource);
8588
8589 VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, transferBufferContainer->activeBuffer);
8590 VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, vulkanTextureSubresource->parent);
8591}
8592
8593static void VULKAN_UploadToBuffer(
8594 SDL_GPUCommandBuffer *commandBuffer,
8595 const SDL_GPUTransferBufferLocation *source,
8596 const SDL_GPUBufferRegion *destination,
8597 bool cycle)
8598{
8599 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
8600 VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
8601 VulkanBufferContainer *transferBufferContainer = (VulkanBufferContainer *)source->transfer_buffer;
8602 VulkanBufferContainer *bufferContainer = (VulkanBufferContainer *)destination->buffer;
8603 VkBufferCopy bufferCopy;
8604
8605 // Note that the transfer buffer does not need a barrier, as it is synced by the client
8606
8607 VulkanBuffer *vulkanBuffer = VULKAN_INTERNAL_PrepareBufferForWrite(
8608 renderer,
8609 vulkanCommandBuffer,
8610 bufferContainer,
8611 cycle,
8612 VULKAN_BUFFER_USAGE_MODE_COPY_DESTINATION);
8613
8614 bufferCopy.srcOffset = source->offset;
8615 bufferCopy.dstOffset = destination->offset;
8616 bufferCopy.size = destination->size;
8617
8618 renderer->vkCmdCopyBuffer(
8619 vulkanCommandBuffer->commandBuffer,
8620 transferBufferContainer->activeBuffer->buffer,
8621 vulkanBuffer->buffer,
8622 1,
8623 &bufferCopy);
8624
8625 VULKAN_INTERNAL_BufferTransitionToDefaultUsage(
8626 renderer,
8627 vulkanCommandBuffer,
8628 VULKAN_BUFFER_USAGE_MODE_COPY_DESTINATION,
8629 vulkanBuffer);
8630
8631 VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, transferBufferContainer->activeBuffer);
8632 VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, vulkanBuffer);
8633}
8634
8635// Readback
8636
8637static void VULKAN_DownloadFromTexture(
8638 SDL_GPUCommandBuffer *commandBuffer,
8639 const SDL_GPUTextureRegion *source,
8640 const SDL_GPUTextureTransferInfo *destination)
8641{
8642 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
8643 VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
8644 VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)source->texture;
8645 VulkanTextureSubresource *vulkanTextureSubresource;
8646 VulkanBufferContainer *transferBufferContainer = (VulkanBufferContainer *)destination->transfer_buffer;
8647 VkBufferImageCopy imageCopy;
8648 vulkanTextureSubresource = VULKAN_INTERNAL_FetchTextureSubresource(
8649 textureContainer,
8650 source->layer,
8651 source->mip_level);
8652
8653 // Note that the transfer buffer does not need a barrier, as it is synced by the client
8654
8655 VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage(
8656 renderer,
8657 vulkanCommandBuffer,
8658 VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE,
8659 vulkanTextureSubresource);
8660
8661 imageCopy.imageExtent.width = source->w;
8662 imageCopy.imageExtent.height = source->h;
8663 imageCopy.imageExtent.depth = source->d;
8664 imageCopy.imageOffset.x = source->x;
8665 imageCopy.imageOffset.y = source->y;
8666 imageCopy.imageOffset.z = source->z;
8667 imageCopy.imageSubresource.aspectMask = vulkanTextureSubresource->parent->aspectFlags;
8668 imageCopy.imageSubresource.baseArrayLayer = source->layer;
8669 imageCopy.imageSubresource.layerCount = 1;
8670 imageCopy.imageSubresource.mipLevel = source->mip_level;
8671 imageCopy.bufferOffset = destination->offset;
8672 imageCopy.bufferRowLength = destination->pixels_per_row;
8673 imageCopy.bufferImageHeight = destination->rows_per_layer;
8674
8675 renderer->vkCmdCopyImageToBuffer(
8676 vulkanCommandBuffer->commandBuffer,
8677 vulkanTextureSubresource->parent->image,
8678 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
8679 transferBufferContainer->activeBuffer->buffer,
8680 1,
8681 &imageCopy);
8682
8683 VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
8684 renderer,
8685 vulkanCommandBuffer,
8686 VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE,
8687 vulkanTextureSubresource);
8688
8689 VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, transferBufferContainer->activeBuffer);
8690 VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, vulkanTextureSubresource->parent);
8691}
8692
8693static void VULKAN_DownloadFromBuffer(
8694 SDL_GPUCommandBuffer *commandBuffer,
8695 const SDL_GPUBufferRegion *source,
8696 const SDL_GPUTransferBufferLocation *destination)
8697{
8698 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
8699 VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
8700 VulkanBufferContainer *bufferContainer = (VulkanBufferContainer *)source->buffer;
8701 VulkanBufferContainer *transferBufferContainer = (VulkanBufferContainer *)destination->transfer_buffer;
8702 VkBufferCopy bufferCopy;
8703
8704 // Note that transfer buffer does not need a barrier, as it is synced by the client
8705
8706 VULKAN_INTERNAL_BufferTransitionFromDefaultUsage(
8707 renderer,
8708 vulkanCommandBuffer,
8709 VULKAN_BUFFER_USAGE_MODE_COPY_SOURCE,
8710 bufferContainer->activeBuffer);
8711
8712 bufferCopy.srcOffset = source->offset;
8713 bufferCopy.dstOffset = destination->offset;
8714 bufferCopy.size = source->size;
8715
8716 renderer->vkCmdCopyBuffer(
8717 vulkanCommandBuffer->commandBuffer,
8718 bufferContainer->activeBuffer->buffer,
8719 transferBufferContainer->activeBuffer->buffer,
8720 1,
8721 &bufferCopy);
8722
8723 VULKAN_INTERNAL_BufferTransitionToDefaultUsage(
8724 renderer,
8725 vulkanCommandBuffer,
8726 VULKAN_BUFFER_USAGE_MODE_COPY_SOURCE,
8727 bufferContainer->activeBuffer);
8728
8729 VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, transferBufferContainer->activeBuffer);
8730 VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, bufferContainer->activeBuffer);
8731}
8732
8733static void VULKAN_CopyTextureToTexture(
8734 SDL_GPUCommandBuffer *commandBuffer,
8735 const SDL_GPUTextureLocation *source,
8736 const SDL_GPUTextureLocation *destination,
8737 Uint32 w,
8738 Uint32 h,
8739 Uint32 d,
8740 bool cycle)
8741{
8742 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
8743 VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
8744 VulkanTextureSubresource *srcSubresource;
8745 VulkanTextureSubresource *dstSubresource;
8746 VkImageCopy imageCopy;
8747
8748 srcSubresource = VULKAN_INTERNAL_FetchTextureSubresource(
8749 (VulkanTextureContainer *)source->texture,
8750 source->layer,
8751 source->mip_level);
8752
8753 dstSubresource = VULKAN_INTERNAL_PrepareTextureSubresourceForWrite(
8754 renderer,
8755 vulkanCommandBuffer,
8756 (VulkanTextureContainer *)destination->texture,
8757 destination->layer,
8758 destination->mip_level,
8759 cycle,
8760 VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION);
8761
8762 VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage(
8763 renderer,
8764 vulkanCommandBuffer,
8765 VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE,
8766 srcSubresource);
8767
8768 imageCopy.srcOffset.x = source->x;
8769 imageCopy.srcOffset.y = source->y;
8770 imageCopy.srcOffset.z = source->z;
8771 imageCopy.srcSubresource.aspectMask = srcSubresource->parent->aspectFlags;
8772 imageCopy.srcSubresource.baseArrayLayer = source->layer;
8773 imageCopy.srcSubresource.layerCount = 1;
8774 imageCopy.srcSubresource.mipLevel = source->mip_level;
8775 imageCopy.dstOffset.x = destination->x;
8776 imageCopy.dstOffset.y = destination->y;
8777 imageCopy.dstOffset.z = destination->z;
8778 imageCopy.dstSubresource.aspectMask = dstSubresource->parent->aspectFlags;
8779 imageCopy.dstSubresource.baseArrayLayer = destination->layer;
8780 imageCopy.dstSubresource.layerCount = 1;
8781 imageCopy.dstSubresource.mipLevel = destination->mip_level;
8782 imageCopy.extent.width = w;
8783 imageCopy.extent.height = h;
8784 imageCopy.extent.depth = d;
8785
8786 renderer->vkCmdCopyImage(
8787 vulkanCommandBuffer->commandBuffer,
8788 srcSubresource->parent->image,
8789 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
8790 dstSubresource->parent->image,
8791 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
8792 1,
8793 &imageCopy);
8794
8795 VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
8796 renderer,
8797 vulkanCommandBuffer,
8798 VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE,
8799 srcSubresource);
8800
8801 VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
8802 renderer,
8803 vulkanCommandBuffer,
8804 VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION,
8805 dstSubresource);
8806
8807 VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, srcSubresource->parent);
8808 VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, dstSubresource->parent);
8809}
8810
8811static void VULKAN_CopyBufferToBuffer(
8812 SDL_GPUCommandBuffer *commandBuffer,
8813 const SDL_GPUBufferLocation *source,
8814 const SDL_GPUBufferLocation *destination,
8815 Uint32 size,
8816 bool cycle)
8817{
8818 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
8819 VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
8820 VulkanBufferContainer *srcContainer = (VulkanBufferContainer *)source->buffer;
8821 VulkanBufferContainer *dstContainer = (VulkanBufferContainer *)destination->buffer;
8822 VkBufferCopy bufferCopy;
8823
8824 VulkanBuffer *dstBuffer = VULKAN_INTERNAL_PrepareBufferForWrite(
8825 renderer,
8826 vulkanCommandBuffer,
8827 dstContainer,
8828 cycle,
8829 VULKAN_BUFFER_USAGE_MODE_COPY_DESTINATION);
8830
8831 VULKAN_INTERNAL_BufferTransitionFromDefaultUsage(
8832 renderer,
8833 vulkanCommandBuffer,
8834 VULKAN_BUFFER_USAGE_MODE_COPY_SOURCE,
8835 srcContainer->activeBuffer);
8836
8837 bufferCopy.srcOffset = source->offset;
8838 bufferCopy.dstOffset = destination->offset;
8839 bufferCopy.size = size;
8840
8841 renderer->vkCmdCopyBuffer(
8842 vulkanCommandBuffer->commandBuffer,
8843 srcContainer->activeBuffer->buffer,
8844 dstBuffer->buffer,
8845 1,
8846 &bufferCopy);
8847
8848 VULKAN_INTERNAL_BufferTransitionToDefaultUsage(
8849 renderer,
8850 vulkanCommandBuffer,
8851 VULKAN_BUFFER_USAGE_MODE_COPY_SOURCE,
8852 srcContainer->activeBuffer);
8853
8854 VULKAN_INTERNAL_BufferTransitionToDefaultUsage(
8855 renderer,
8856 vulkanCommandBuffer,
8857 VULKAN_BUFFER_USAGE_MODE_COPY_DESTINATION,
8858 dstBuffer);
8859
8860 VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, srcContainer->activeBuffer);
8861 VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, dstBuffer);
8862}
8863
8864static void VULKAN_GenerateMipmaps(
8865 SDL_GPUCommandBuffer *commandBuffer,
8866 SDL_GPUTexture *texture)
8867{
8868 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
8869 VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
8870 VulkanTextureContainer *container = (VulkanTextureContainer *)texture;
8871 VulkanTextureSubresource *srcTextureSubresource;
8872 VulkanTextureSubresource *dstTextureSubresource;
8873 VkImageBlit blit;
8874
8875 // Blit each slice sequentially. Barriers, barriers everywhere!
8876 for (Uint32 layerOrDepthIndex = 0; layerOrDepthIndex < container->header.info.layer_count_or_depth; layerOrDepthIndex += 1)
8877 for (Uint32 level = 1; level < container->header.info.num_levels; level += 1) {
8878 Uint32 layer = container->header.info.type == SDL_GPU_TEXTURETYPE_3D ? 0 : layerOrDepthIndex;
8879 Uint32 depth = container->header.info.type == SDL_GPU_TEXTURETYPE_3D ? layerOrDepthIndex : 0;
8880
8881 Uint32 srcSubresourceIndex = VULKAN_INTERNAL_GetTextureSubresourceIndex(
8882 level - 1,
8883 layer,
8884 container->header.info.num_levels);
8885 Uint32 dstSubresourceIndex = VULKAN_INTERNAL_GetTextureSubresourceIndex(
8886 level,
8887 layer,
8888 container->header.info.num_levels);
8889
8890 srcTextureSubresource = &container->activeTexture->subresources[srcSubresourceIndex];
8891 dstTextureSubresource = &container->activeTexture->subresources[dstSubresourceIndex];
8892
8893 VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage(
8894 renderer,
8895 vulkanCommandBuffer,
8896 VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE,
8897 srcTextureSubresource);
8898
8899 VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage(
8900 renderer,
8901 vulkanCommandBuffer,
8902 VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION,
8903 dstTextureSubresource);
8904
8905 blit.srcOffsets[0].x = 0;
8906 blit.srcOffsets[0].y = 0;
8907 blit.srcOffsets[0].z = depth;
8908
8909 blit.srcOffsets[1].x = container->header.info.width >> (level - 1);
8910 blit.srcOffsets[1].y = container->header.info.height >> (level - 1);
8911 blit.srcOffsets[1].z = depth + 1;
8912
8913 blit.dstOffsets[0].x = 0;
8914 blit.dstOffsets[0].y = 0;
8915 blit.dstOffsets[0].z = depth;
8916
8917 blit.dstOffsets[1].x = container->header.info.width >> level;
8918 blit.dstOffsets[1].y = container->header.info.height >> level;
8919 blit.dstOffsets[1].z = depth + 1;
8920
8921 blit.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
8922 blit.srcSubresource.baseArrayLayer = layer;
8923 blit.srcSubresource.layerCount = 1;
8924 blit.srcSubresource.mipLevel = level - 1;
8925
8926 blit.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
8927 blit.dstSubresource.baseArrayLayer = layer;
8928 blit.dstSubresource.layerCount = 1;
8929 blit.dstSubresource.mipLevel = level;
8930
8931 renderer->vkCmdBlitImage(
8932 vulkanCommandBuffer->commandBuffer,
8933 container->activeTexture->image,
8934 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
8935 container->activeTexture->image,
8936 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
8937 1,
8938 &blit,
8939 VK_FILTER_LINEAR);
8940
8941 VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
8942 renderer,
8943 vulkanCommandBuffer,
8944 VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE,
8945 srcTextureSubresource);
8946
8947 VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
8948 renderer,
8949 vulkanCommandBuffer,
8950 VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION,
8951 dstTextureSubresource);
8952
8953 VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, srcTextureSubresource->parent);
8954 VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, dstTextureSubresource->parent);
8955 }
8956}
8957
8958static void VULKAN_EndCopyPass(
8959 SDL_GPUCommandBuffer *commandBuffer)
8960{
8961 // no-op
8962 (void)commandBuffer;
8963}
8964
8965static void VULKAN_Blit(
8966 SDL_GPUCommandBuffer *commandBuffer,
8967 const SDL_GPUBlitInfo *info)
8968{
8969 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
8970 VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
8971 TextureCommonHeader *srcHeader = (TextureCommonHeader *)info->source.texture;
8972 TextureCommonHeader *dstHeader = (TextureCommonHeader *)info->destination.texture;
8973 VkImageBlit region;
8974 Uint32 srcLayer = srcHeader->info.type == SDL_GPU_TEXTURETYPE_3D ? 0 : info->source.layer_or_depth_plane;
8975 Uint32 srcDepth = srcHeader->info.type == SDL_GPU_TEXTURETYPE_3D ? info->source.layer_or_depth_plane : 0;
8976 Uint32 dstLayer = dstHeader->info.type == SDL_GPU_TEXTURETYPE_3D ? 0 : info->destination.layer_or_depth_plane;
8977 Uint32 dstDepth = dstHeader->info.type == SDL_GPU_TEXTURETYPE_3D ? info->destination.layer_or_depth_plane : 0;
8978 int32_t swap;
8979
8980 // Using BeginRenderPass to clear because vkCmdClearColorImage requires barriers anyway
8981 if (info->load_op == SDL_GPU_LOADOP_CLEAR) {
8982 SDL_GPUColorTargetInfo targetInfo;
8983 SDL_zero(targetInfo);
8984 targetInfo.texture = info->destination.texture;
8985 targetInfo.mip_level = info->destination.mip_level;
8986 targetInfo.layer_or_depth_plane = info->destination.layer_or_depth_plane;
8987 targetInfo.load_op = SDL_GPU_LOADOP_CLEAR;
8988 targetInfo.store_op = SDL_GPU_STOREOP_STORE;
8989 targetInfo.clear_color = info->clear_color;
8990 targetInfo.cycle = info->cycle;
8991 VULKAN_BeginRenderPass(
8992 commandBuffer,
8993 &targetInfo,
8994 1,
8995 NULL);
8996 VULKAN_EndRenderPass(commandBuffer);
8997 }
8998
8999 VulkanTextureSubresource *srcSubresource = VULKAN_INTERNAL_FetchTextureSubresource(
9000 (VulkanTextureContainer *)info->source.texture,
9001 srcLayer,
9002 info->source.mip_level);
9003
9004 VulkanTextureSubresource *dstSubresource = VULKAN_INTERNAL_PrepareTextureSubresourceForWrite(
9005 renderer,
9006 vulkanCommandBuffer,
9007 (VulkanTextureContainer *)info->destination.texture,
9008 dstLayer,
9009 info->destination.mip_level,
9010 info->cycle,
9011 VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION);
9012
9013 VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage(
9014 renderer,
9015 vulkanCommandBuffer,
9016 VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE,
9017 srcSubresource);
9018
9019 region.srcSubresource.aspectMask = srcSubresource->parent->aspectFlags;
9020 region.srcSubresource.baseArrayLayer = srcSubresource->layer;
9021 region.srcSubresource.layerCount = 1;
9022 region.srcSubresource.mipLevel = srcSubresource->level;
9023 region.srcOffsets[0].x = info->source.x;
9024 region.srcOffsets[0].y = info->source.y;
9025 region.srcOffsets[0].z = srcDepth;
9026 region.srcOffsets[1].x = info->source.x + info->source.w;
9027 region.srcOffsets[1].y = info->source.y + info->source.h;
9028 region.srcOffsets[1].z = srcDepth + 1;
9029
9030 if (info->flip_mode & SDL_FLIP_HORIZONTAL) {
9031 // flip the x positions
9032 swap = region.srcOffsets[0].x;
9033 region.srcOffsets[0].x = region.srcOffsets[1].x;
9034 region.srcOffsets[1].x = swap;
9035 }
9036
9037 if (info->flip_mode & SDL_FLIP_VERTICAL) {
9038 // flip the y positions
9039 swap = region.srcOffsets[0].y;
9040 region.srcOffsets[0].y = region.srcOffsets[1].y;
9041 region.srcOffsets[1].y = swap;
9042 }
9043
9044 region.dstSubresource.aspectMask = dstSubresource->parent->aspectFlags;
9045 region.dstSubresource.baseArrayLayer = dstSubresource->layer;
9046 region.dstSubresource.layerCount = 1;
9047 region.dstSubresource.mipLevel = dstSubresource->level;
9048 region.dstOffsets[0].x = info->destination.x;
9049 region.dstOffsets[0].y = info->destination.y;
9050 region.dstOffsets[0].z = dstDepth;
9051 region.dstOffsets[1].x = info->destination.x + info->destination.w;
9052 region.dstOffsets[1].y = info->destination.y + info->destination.h;
9053 region.dstOffsets[1].z = dstDepth + 1;
9054
9055 renderer->vkCmdBlitImage(
9056 vulkanCommandBuffer->commandBuffer,
9057 srcSubresource->parent->image,
9058 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
9059 dstSubresource->parent->image,
9060 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
9061 1,
9062 ®ion,
9063 SDLToVK_Filter[info->filter]);
9064
9065 VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
9066 renderer,
9067 vulkanCommandBuffer,
9068 VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE,
9069 srcSubresource);
9070
9071 VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
9072 renderer,
9073 vulkanCommandBuffer,
9074 VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION,
9075 dstSubresource);
9076
9077 VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, srcSubresource->parent);
9078 VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, dstSubresource->parent);
9079}
9080
9081static bool VULKAN_INTERNAL_AllocateCommandBuffer(
9082 VulkanRenderer *renderer,
9083 VulkanCommandPool *vulkanCommandPool)
9084{
9085 VkCommandBufferAllocateInfo allocateInfo;
9086 VkResult vulkanResult;
9087 VkCommandBuffer commandBufferHandle;
9088 VulkanCommandBuffer *commandBuffer;
9089
9090 vulkanCommandPool->inactiveCommandBufferCapacity += 1;
9091
9092 vulkanCommandPool->inactiveCommandBuffers = SDL_realloc(
9093 vulkanCommandPool->inactiveCommandBuffers,
9094 sizeof(VulkanCommandBuffer *) *
9095 vulkanCommandPool->inactiveCommandBufferCapacity);
9096
9097 allocateInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
9098 allocateInfo.pNext = NULL;
9099 allocateInfo.commandPool = vulkanCommandPool->commandPool;
9100 allocateInfo.commandBufferCount = 1;
9101 allocateInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
9102
9103 vulkanResult = renderer->vkAllocateCommandBuffers(
9104 renderer->logicalDevice,
9105 &allocateInfo,
9106 &commandBufferHandle);
9107
9108 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkAllocateCommandBuffers, false);
9109
9110 commandBuffer = SDL_malloc(sizeof(VulkanCommandBuffer));
9111 commandBuffer->renderer = renderer;
9112 commandBuffer->commandPool = vulkanCommandPool;
9113 commandBuffer->commandBuffer = commandBufferHandle;
9114
9115 commandBuffer->inFlightFence = VK_NULL_HANDLE;
9116
9117 // Presentation tracking
9118
9119 commandBuffer->presentDataCapacity = 1;
9120 commandBuffer->presentDataCount = 0;
9121 commandBuffer->presentDatas = SDL_malloc(
9122 commandBuffer->presentDataCapacity * sizeof(VulkanPresentData));
9123
9124 commandBuffer->waitSemaphoreCapacity = 1;
9125 commandBuffer->waitSemaphoreCount = 0;
9126 commandBuffer->waitSemaphores = SDL_malloc(
9127 commandBuffer->waitSemaphoreCapacity * sizeof(VkSemaphore));
9128
9129 commandBuffer->signalSemaphoreCapacity = 1;
9130 commandBuffer->signalSemaphoreCount = 0;
9131 commandBuffer->signalSemaphores = SDL_malloc(
9132 commandBuffer->signalSemaphoreCapacity * sizeof(VkSemaphore));
9133
9134 // Resource bind tracking
9135
9136 commandBuffer->needNewVertexResourceDescriptorSet = true;
9137 commandBuffer->needNewVertexUniformDescriptorSet = true;
9138 commandBuffer->needNewVertexUniformOffsets = true;
9139 commandBuffer->needNewFragmentResourceDescriptorSet = true;
9140 commandBuffer->needNewFragmentUniformDescriptorSet = true;
9141 commandBuffer->needNewFragmentUniformOffsets = true;
9142
9143 commandBuffer->needNewComputeReadWriteDescriptorSet = true;
9144 commandBuffer->needNewComputeReadOnlyDescriptorSet = true;
9145 commandBuffer->needNewComputeUniformDescriptorSet = true;
9146 commandBuffer->needNewComputeUniformOffsets = true;
9147
9148 commandBuffer->vertexResourceDescriptorSet = VK_NULL_HANDLE;
9149 commandBuffer->vertexUniformDescriptorSet = VK_NULL_HANDLE;
9150 commandBuffer->fragmentResourceDescriptorSet = VK_NULL_HANDLE;
9151 commandBuffer->fragmentUniformDescriptorSet = VK_NULL_HANDLE;
9152
9153 commandBuffer->computeReadOnlyDescriptorSet = VK_NULL_HANDLE;
9154 commandBuffer->computeReadWriteDescriptorSet = VK_NULL_HANDLE;
9155 commandBuffer->computeUniformDescriptorSet = VK_NULL_HANDLE;
9156
9157 // Resource tracking
9158
9159 commandBuffer->usedBufferCapacity = 4;
9160 commandBuffer->usedBufferCount = 0;
9161 commandBuffer->usedBuffers = SDL_malloc(
9162 commandBuffer->usedBufferCapacity * sizeof(VulkanBuffer *));
9163
9164 commandBuffer->usedTextureCapacity = 4;
9165 commandBuffer->usedTextureCount = 0;
9166 commandBuffer->usedTextures = SDL_malloc(
9167 commandBuffer->usedTextureCapacity * sizeof(VulkanTexture *));
9168
9169 commandBuffer->usedSamplerCapacity = 4;
9170 commandBuffer->usedSamplerCount = 0;
9171 commandBuffer->usedSamplers = SDL_malloc(
9172 commandBuffer->usedSamplerCapacity * sizeof(VulkanSampler *));
9173
9174 commandBuffer->usedGraphicsPipelineCapacity = 4;
9175 commandBuffer->usedGraphicsPipelineCount = 0;
9176 commandBuffer->usedGraphicsPipelines = SDL_malloc(
9177 commandBuffer->usedGraphicsPipelineCapacity * sizeof(VulkanGraphicsPipeline *));
9178
9179 commandBuffer->usedComputePipelineCapacity = 4;
9180 commandBuffer->usedComputePipelineCount = 0;
9181 commandBuffer->usedComputePipelines = SDL_malloc(
9182 commandBuffer->usedComputePipelineCapacity * sizeof(VulkanComputePipeline *));
9183
9184 commandBuffer->usedFramebufferCapacity = 4;
9185 commandBuffer->usedFramebufferCount = 0;
9186 commandBuffer->usedFramebuffers = SDL_malloc(
9187 commandBuffer->usedFramebufferCapacity * sizeof(VulkanFramebuffer *));
9188
9189 commandBuffer->usedUniformBufferCapacity = 4;
9190 commandBuffer->usedUniformBufferCount = 0;
9191 commandBuffer->usedUniformBuffers = SDL_malloc(
9192 commandBuffer->usedUniformBufferCapacity * sizeof(VulkanUniformBuffer *));
9193
9194 // Pool it!
9195
9196 vulkanCommandPool->inactiveCommandBuffers[vulkanCommandPool->inactiveCommandBufferCount] = commandBuffer;
9197 vulkanCommandPool->inactiveCommandBufferCount += 1;
9198
9199 return true;
9200}
9201
9202static VulkanCommandPool *VULKAN_INTERNAL_FetchCommandPool(
9203 VulkanRenderer *renderer,
9204 SDL_ThreadID threadID)
9205{
9206 VulkanCommandPool *vulkanCommandPool = NULL;
9207 VkCommandPoolCreateInfo commandPoolCreateInfo;
9208 VkResult vulkanResult;
9209 CommandPoolHashTableKey key;
9210 key.threadID = threadID;
9211
9212 bool result = SDL_FindInHashTable(
9213 renderer->commandPoolHashTable,
9214 (const void *)&key,
9215 (const void **)&vulkanCommandPool);
9216
9217 if (result) {
9218 return vulkanCommandPool;
9219 }
9220
9221 vulkanCommandPool = (VulkanCommandPool *)SDL_malloc(sizeof(VulkanCommandPool));
9222
9223 commandPoolCreateInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
9224 commandPoolCreateInfo.pNext = NULL;
9225 commandPoolCreateInfo.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
9226 commandPoolCreateInfo.queueFamilyIndex = renderer->queueFamilyIndex;
9227
9228 vulkanResult = renderer->vkCreateCommandPool(
9229 renderer->logicalDevice,
9230 &commandPoolCreateInfo,
9231 NULL,
9232 &vulkanCommandPool->commandPool);
9233
9234 if (vulkanResult != VK_SUCCESS) {
9235 SDL_free(vulkanCommandPool);
9236 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateCommandPool, NULL);
9237 return NULL;
9238 }
9239
9240 vulkanCommandPool->threadID = threadID;
9241
9242 vulkanCommandPool->inactiveCommandBufferCapacity = 0;
9243 vulkanCommandPool->inactiveCommandBufferCount = 0;
9244 vulkanCommandPool->inactiveCommandBuffers = NULL;
9245
9246 if (!VULKAN_INTERNAL_AllocateCommandBuffer(
9247 renderer,
9248 vulkanCommandPool)) {
9249 VULKAN_INTERNAL_DestroyCommandPool(renderer, vulkanCommandPool);
9250 return NULL;
9251 }
9252
9253 CommandPoolHashTableKey *allocedKey = SDL_malloc(sizeof(CommandPoolHashTableKey));
9254 allocedKey->threadID = threadID;
9255
9256 SDL_InsertIntoHashTable(
9257 renderer->commandPoolHashTable,
9258 (const void *)allocedKey,
9259 (const void *)vulkanCommandPool);
9260
9261 return vulkanCommandPool;
9262}
9263
9264static VulkanCommandBuffer *VULKAN_INTERNAL_GetInactiveCommandBufferFromPool(
9265 VulkanRenderer *renderer,
9266 SDL_ThreadID threadID)
9267{
9268 VulkanCommandPool *commandPool =
9269 VULKAN_INTERNAL_FetchCommandPool(renderer, threadID);
9270 VulkanCommandBuffer *commandBuffer;
9271
9272 if (commandPool == NULL) {
9273 return NULL;
9274 }
9275
9276 if (commandPool->inactiveCommandBufferCount == 0) {
9277 if (!VULKAN_INTERNAL_AllocateCommandBuffer(
9278 renderer,
9279 commandPool)) {
9280 return NULL;
9281 }
9282 }
9283
9284 commandBuffer = commandPool->inactiveCommandBuffers[commandPool->inactiveCommandBufferCount - 1];
9285 commandPool->inactiveCommandBufferCount -= 1;
9286
9287 return commandBuffer;
9288}
9289
9290static SDL_GPUCommandBuffer *VULKAN_AcquireCommandBuffer(
9291 SDL_GPURenderer *driverData)
9292{
9293 VulkanRenderer *renderer = (VulkanRenderer *)driverData;
9294 VkResult result;
9295 Uint32 i;
9296
9297 SDL_ThreadID threadID = SDL_GetCurrentThreadID();
9298
9299 SDL_LockMutex(renderer->acquireCommandBufferLock);
9300
9301 VulkanCommandBuffer *commandBuffer =
9302 VULKAN_INTERNAL_GetInactiveCommandBufferFromPool(renderer, threadID);
9303
9304 commandBuffer->descriptorSetCache = VULKAN_INTERNAL_AcquireDescriptorSetCache(renderer);
9305
9306 SDL_UnlockMutex(renderer->acquireCommandBufferLock);
9307
9308 if (commandBuffer == NULL) {
9309 return NULL;
9310 }
9311
9312 // Reset state
9313
9314 commandBuffer->currentComputePipeline = NULL;
9315 commandBuffer->currentGraphicsPipeline = NULL;
9316
9317 SDL_zeroa(commandBuffer->colorAttachmentSubresources);
9318 SDL_zeroa(commandBuffer->resolveAttachmentSubresources);
9319 commandBuffer->depthStencilAttachmentSubresource = NULL;
9320 commandBuffer->colorAttachmentSubresourceCount = 0;
9321 commandBuffer->resolveAttachmentSubresourceCount = 0;
9322
9323 for (i = 0; i < MAX_UNIFORM_BUFFERS_PER_STAGE; i += 1) {
9324 commandBuffer->vertexUniformBuffers[i] = NULL;
9325 commandBuffer->fragmentUniformBuffers[i] = NULL;
9326 commandBuffer->computeUniformBuffers[i] = NULL;
9327 }
9328
9329 commandBuffer->needNewVertexResourceDescriptorSet = true;
9330 commandBuffer->needNewVertexUniformDescriptorSet = true;
9331 commandBuffer->needNewVertexUniformOffsets = true;
9332 commandBuffer->needNewFragmentResourceDescriptorSet = true;
9333 commandBuffer->needNewFragmentUniformDescriptorSet = true;
9334 commandBuffer->needNewFragmentUniformOffsets = true;
9335
9336 commandBuffer->needNewComputeReadOnlyDescriptorSet = true;
9337 commandBuffer->needNewComputeUniformDescriptorSet = true;
9338 commandBuffer->needNewComputeUniformOffsets = true;
9339
9340 commandBuffer->vertexResourceDescriptorSet = VK_NULL_HANDLE;
9341 commandBuffer->vertexUniformDescriptorSet = VK_NULL_HANDLE;
9342 commandBuffer->fragmentResourceDescriptorSet = VK_NULL_HANDLE;
9343 commandBuffer->fragmentUniformDescriptorSet = VK_NULL_HANDLE;
9344
9345 commandBuffer->computeReadOnlyDescriptorSet = VK_NULL_HANDLE;
9346 commandBuffer->computeReadWriteDescriptorSet = VK_NULL_HANDLE;
9347 commandBuffer->computeUniformDescriptorSet = VK_NULL_HANDLE;
9348
9349 SDL_zeroa(commandBuffer->vertexSamplerTextures);
9350 SDL_zeroa(commandBuffer->vertexSamplers);
9351 SDL_zeroa(commandBuffer->vertexStorageTextures);
9352 SDL_zeroa(commandBuffer->vertexStorageBuffers);
9353
9354 SDL_zeroa(commandBuffer->fragmentSamplerTextures);
9355 SDL_zeroa(commandBuffer->fragmentSamplers);
9356 SDL_zeroa(commandBuffer->fragmentStorageTextures);
9357 SDL_zeroa(commandBuffer->fragmentStorageBuffers);
9358
9359 SDL_zeroa(commandBuffer->readWriteComputeStorageTextureSubresources);
9360 commandBuffer->readWriteComputeStorageTextureSubresourceCount = 0;
9361 SDL_zeroa(commandBuffer->readWriteComputeStorageBuffers);
9362 SDL_zeroa(commandBuffer->computeSamplerTextures);
9363 SDL_zeroa(commandBuffer->computeSamplers);
9364 SDL_zeroa(commandBuffer->readOnlyComputeStorageTextures);
9365 SDL_zeroa(commandBuffer->readOnlyComputeStorageBuffers);
9366
9367 commandBuffer->autoReleaseFence = true;
9368
9369 commandBuffer->isDefrag = 0;
9370
9371 /* Reset the command buffer here to avoid resets being called
9372 * from a separate thread than where the command buffer was acquired
9373 */
9374 result = renderer->vkResetCommandBuffer(
9375 commandBuffer->commandBuffer,
9376 VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT);
9377
9378 CHECK_VULKAN_ERROR_AND_RETURN(result, vkResetCommandBuffer, NULL);
9379
9380 if (!VULKAN_INTERNAL_BeginCommandBuffer(renderer, commandBuffer)) {
9381 return NULL;
9382 }
9383
9384 return (SDL_GPUCommandBuffer *)commandBuffer;
9385}
9386
9387static bool VULKAN_QueryFence(
9388 SDL_GPURenderer *driverData,
9389 SDL_GPUFence *fence)
9390{
9391 VulkanRenderer *renderer = (VulkanRenderer *)driverData;
9392 VkResult result;
9393
9394 result = renderer->vkGetFenceStatus(
9395 renderer->logicalDevice,
9396 ((VulkanFenceHandle *)fence)->fence);
9397
9398 if (result == VK_SUCCESS) {
9399 return true;
9400 } else if (result == VK_NOT_READY) {
9401 return false;
9402 } else {
9403 SET_ERROR_AND_RETURN("vkGetFenceStatus: %s", VkErrorMessages(result), false);
9404 }
9405}
9406
9407static void VULKAN_INTERNAL_ReturnFenceToPool(
9408 VulkanRenderer *renderer,
9409 VulkanFenceHandle *fenceHandle)
9410{
9411 SDL_LockMutex(renderer->fencePool.lock);
9412
9413 EXPAND_ARRAY_IF_NEEDED(
9414 renderer->fencePool.availableFences,
9415 VulkanFenceHandle *,
9416 renderer->fencePool.availableFenceCount + 1,
9417 renderer->fencePool.availableFenceCapacity,
9418 renderer->fencePool.availableFenceCapacity * 2);
9419
9420 renderer->fencePool.availableFences[renderer->fencePool.availableFenceCount] = fenceHandle;
9421 renderer->fencePool.availableFenceCount += 1;
9422
9423 SDL_UnlockMutex(renderer->fencePool.lock);
9424}
9425
9426static void VULKAN_ReleaseFence(
9427 SDL_GPURenderer *driverData,
9428 SDL_GPUFence *fence)
9429{
9430 VulkanFenceHandle *handle = (VulkanFenceHandle *)fence;
9431
9432 if (SDL_AtomicDecRef(&handle->referenceCount)) {
9433 VULKAN_INTERNAL_ReturnFenceToPool((VulkanRenderer *)driverData, handle);
9434 }
9435}
9436
9437static WindowData *VULKAN_INTERNAL_FetchWindowData(
9438 SDL_Window *window)
9439{
9440 SDL_PropertiesID properties = SDL_GetWindowProperties(window);
9441 return (WindowData *)SDL_GetPointerProperty(properties, WINDOW_PROPERTY_DATA, NULL);
9442}
9443
9444static bool VULKAN_INTERNAL_OnWindowResize(void *userdata, SDL_Event *e)
9445{
9446 SDL_Window *w = (SDL_Window *)userdata;
9447 WindowData *data;
9448 if (e->type == SDL_EVENT_WINDOW_PIXEL_SIZE_CHANGED && e->window.windowID == SDL_GetWindowID(w)) {
9449 data = VULKAN_INTERNAL_FetchWindowData(w);
9450 data->needsSwapchainRecreate = true;
9451 data->swapchainCreateWidth = e->window.data1;
9452 data->swapchainCreateHeight = e->window.data2;
9453 }
9454
9455 return true;
9456}
9457
9458static bool VULKAN_SupportsSwapchainComposition(
9459 SDL_GPURenderer *driverData,
9460 SDL_Window *window,
9461 SDL_GPUSwapchainComposition swapchainComposition)
9462{
9463 VulkanRenderer *renderer = (VulkanRenderer *)driverData;
9464 WindowData *windowData = VULKAN_INTERNAL_FetchWindowData(window);
9465 VkSurfaceKHR surface;
9466 SwapchainSupportDetails supportDetails;
9467 bool result = false;
9468
9469 if (windowData == NULL) {
9470 SET_STRING_ERROR_AND_RETURN("Must claim window before querying swapchain composition support!", false);
9471 }
9472
9473 surface = windowData->surface;
9474
9475 if (VULKAN_INTERNAL_QuerySwapchainSupport(
9476 renderer,
9477 renderer->physicalDevice,
9478 surface,
9479 &supportDetails)) {
9480
9481 result = VULKAN_INTERNAL_VerifySwapSurfaceFormat(
9482 SwapchainCompositionToFormat[swapchainComposition],
9483 SwapchainCompositionToColorSpace[swapchainComposition],
9484 supportDetails.formats,
9485 supportDetails.formatsLength);
9486
9487 if (!result) {
9488 // Let's try again with the fallback format...
9489 result = VULKAN_INTERNAL_VerifySwapSurfaceFormat(
9490 SwapchainCompositionToFallbackFormat[swapchainComposition],
9491 SwapchainCompositionToColorSpace[swapchainComposition],
9492 supportDetails.formats,
9493 supportDetails.formatsLength);
9494 }
9495
9496 SDL_free(supportDetails.formats);
9497 SDL_free(supportDetails.presentModes);
9498 }
9499
9500 return result;
9501}
9502
9503static bool VULKAN_SupportsPresentMode(
9504 SDL_GPURenderer *driverData,
9505 SDL_Window *window,
9506 SDL_GPUPresentMode presentMode)
9507{
9508 VulkanRenderer *renderer = (VulkanRenderer *)driverData;
9509 WindowData *windowData = VULKAN_INTERNAL_FetchWindowData(window);
9510 VkSurfaceKHR surface;
9511 SwapchainSupportDetails supportDetails;
9512 bool result = false;
9513
9514 if (windowData == NULL) {
9515 SET_STRING_ERROR_AND_RETURN("Must claim window before querying present mode support!", false);
9516 }
9517
9518 surface = windowData->surface;
9519
9520 if (VULKAN_INTERNAL_QuerySwapchainSupport(
9521 renderer,
9522 renderer->physicalDevice,
9523 surface,
9524 &supportDetails)) {
9525
9526 result = VULKAN_INTERNAL_VerifySwapPresentMode(
9527 SDLToVK_PresentMode[presentMode],
9528 supportDetails.presentModes,
9529 supportDetails.presentModesLength);
9530
9531 SDL_free(supportDetails.formats);
9532 SDL_free(supportDetails.presentModes);
9533 }
9534
9535 return result;
9536}
9537
9538static bool VULKAN_ClaimWindow(
9539 SDL_GPURenderer *driverData,
9540 SDL_Window *window)
9541{
9542 VulkanRenderer *renderer = (VulkanRenderer *)driverData;
9543 WindowData *windowData = VULKAN_INTERNAL_FetchWindowData(window);
9544
9545 if (windowData == NULL) {
9546 windowData = SDL_calloc(1, sizeof(WindowData));
9547 windowData->window = window;
9548 windowData->presentMode = SDL_GPU_PRESENTMODE_VSYNC;
9549 windowData->swapchainComposition = SDL_GPU_SWAPCHAINCOMPOSITION_SDR;
9550
9551 // On non-Apple platforms the swapchain capability currentExtent can be different from the window,
9552 // so we have to query the window size.
9553#ifndef SDL_PLATFORM_APPLE
9554 int w, h;
9555 SDL_SyncWindow(window);
9556 SDL_GetWindowSizeInPixels(window, &w, &h);
9557 windowData->swapchainCreateWidth = w;
9558 windowData->swapchainCreateHeight = h;
9559#endif
9560
9561 Uint32 createSwapchainResult = VULKAN_INTERNAL_CreateSwapchain(renderer, windowData);
9562 if (createSwapchainResult == 1) {
9563 SDL_SetPointerProperty(SDL_GetWindowProperties(window), WINDOW_PROPERTY_DATA, windowData);
9564
9565 SDL_LockMutex(renderer->windowLock);
9566 if (renderer->claimedWindowCount >= renderer->claimedWindowCapacity) {
9567 renderer->claimedWindowCapacity *= 2;
9568 renderer->claimedWindows = SDL_realloc(
9569 renderer->claimedWindows,
9570 renderer->claimedWindowCapacity * sizeof(WindowData *));
9571 }
9572
9573 renderer->claimedWindows[renderer->claimedWindowCount] = windowData;
9574 renderer->claimedWindowCount += 1;
9575 SDL_UnlockMutex(renderer->windowLock);
9576
9577 SDL_AddEventWatch(VULKAN_INTERNAL_OnWindowResize, window);
9578
9579 return true;
9580 } else if (createSwapchainResult == VULKAN_INTERNAL_TRY_AGAIN) {
9581 windowData->needsSwapchainRecreate = true;
9582 return true;
9583 } else {
9584 SDL_free(windowData);
9585 return false;
9586 }
9587 } else {
9588 SET_STRING_ERROR_AND_RETURN("Window already claimed!", false);
9589 }
9590}
9591
9592static void VULKAN_ReleaseWindow(
9593 SDL_GPURenderer *driverData,
9594 SDL_Window *window)
9595{
9596 VulkanRenderer *renderer = (VulkanRenderer *)driverData;
9597 WindowData *windowData = VULKAN_INTERNAL_FetchWindowData(window);
9598 Uint32 i;
9599
9600 if (windowData == NULL) {
9601 return;
9602 }
9603
9604 VULKAN_Wait(driverData);
9605
9606 for (i = 0; i < MAX_FRAMES_IN_FLIGHT; i += 1) {
9607 if (windowData->inFlightFences[i] != NULL) {
9608 VULKAN_ReleaseFence(
9609 driverData,
9610 windowData->inFlightFences[i]);
9611 }
9612 }
9613
9614 VULKAN_INTERNAL_DestroySwapchain(
9615 (VulkanRenderer *)driverData,
9616 windowData);
9617
9618
9619 SDL_LockMutex(renderer->windowLock);
9620 for (i = 0; i < renderer->claimedWindowCount; i += 1) {
9621 if (renderer->claimedWindows[i]->window == window) {
9622 renderer->claimedWindows[i] = renderer->claimedWindows[renderer->claimedWindowCount - 1];
9623 renderer->claimedWindowCount -= 1;
9624 break;
9625 }
9626 }
9627 SDL_UnlockMutex(renderer->windowLock);
9628
9629 SDL_free(windowData);
9630
9631 SDL_ClearProperty(SDL_GetWindowProperties(window), WINDOW_PROPERTY_DATA);
9632 SDL_RemoveEventWatch(VULKAN_INTERNAL_OnWindowResize, window);
9633}
9634
9635static Uint32 VULKAN_INTERNAL_RecreateSwapchain(
9636 VulkanRenderer *renderer,
9637 WindowData *windowData)
9638{
9639 Uint32 i;
9640
9641 if (!VULKAN_Wait((SDL_GPURenderer *)renderer)) {
9642 return false;
9643 }
9644
9645 for (i = 0; i < MAX_FRAMES_IN_FLIGHT; i += 1) {
9646 if (windowData->inFlightFences[i] != NULL) {
9647 VULKAN_ReleaseFence(
9648 (SDL_GPURenderer *)renderer,
9649 windowData->inFlightFences[i]);
9650 windowData->inFlightFences[i] = NULL;
9651 }
9652 }
9653
9654 VULKAN_INTERNAL_DestroySwapchain(renderer, windowData);
9655 return VULKAN_INTERNAL_CreateSwapchain(renderer, windowData);
9656}
9657
9658static bool VULKAN_WaitForSwapchain(
9659 SDL_GPURenderer *driverData,
9660 SDL_Window *window)
9661{
9662 VulkanRenderer *renderer = (VulkanRenderer *)driverData;
9663 WindowData *windowData = VULKAN_INTERNAL_FetchWindowData(window);
9664
9665 if (windowData == NULL) {
9666 SET_STRING_ERROR_AND_RETURN("Cannot wait for a swapchain from an unclaimed window!", false);
9667 }
9668
9669 if (windowData->inFlightFences[windowData->frameCounter] != NULL) {
9670 if (!VULKAN_WaitForFences(
9671 driverData,
9672 true,
9673 &windowData->inFlightFences[windowData->frameCounter],
9674 1)) {
9675 return false;
9676 }
9677 }
9678
9679 return true;
9680}
9681
9682static bool VULKAN_INTERNAL_AcquireSwapchainTexture(
9683 bool block,
9684 SDL_GPUCommandBuffer *commandBuffer,
9685 SDL_Window *window,
9686 SDL_GPUTexture **swapchainTexture,
9687 Uint32 *swapchainTextureWidth,
9688 Uint32 *swapchainTextureHeight)
9689{
9690 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
9691 VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
9692 Uint32 swapchainImageIndex;
9693 WindowData *windowData;
9694 VkResult acquireResult = VK_SUCCESS;
9695 VulkanTextureContainer *swapchainTextureContainer = NULL;
9696 VulkanPresentData *presentData;
9697
9698 *swapchainTexture = NULL;
9699 if (swapchainTextureWidth) {
9700 *swapchainTextureWidth = 0;
9701 }
9702 if (swapchainTextureHeight) {
9703 *swapchainTextureHeight = 0;
9704 }
9705
9706 windowData = VULKAN_INTERNAL_FetchWindowData(window);
9707 if (windowData == NULL) {
9708 SET_STRING_ERROR_AND_RETURN("Cannot acquire a swapchain texture from an unclaimed window!", false);
9709 }
9710
9711 // If window data marked as needing swapchain recreate, try to recreate
9712 if (windowData->needsSwapchainRecreate) {
9713 Uint32 recreateSwapchainResult = VULKAN_INTERNAL_RecreateSwapchain(renderer, windowData);
9714 if (!recreateSwapchainResult) {
9715 return false;
9716 } else if (recreateSwapchainResult == VULKAN_INTERNAL_TRY_AGAIN) {
9717 // Edge case, texture is filled in with NULL but not an error
9718 if (windowData->inFlightFences[windowData->frameCounter] != NULL) {
9719 VULKAN_ReleaseFence(
9720 (SDL_GPURenderer *)renderer,
9721 windowData->inFlightFences[windowData->frameCounter]);
9722 windowData->inFlightFences[windowData->frameCounter] = NULL;
9723 }
9724 return true;
9725 }
9726 }
9727
9728 if (swapchainTextureWidth) {
9729 *swapchainTextureWidth = windowData->width;
9730 }
9731 if (swapchainTextureHeight) {
9732 *swapchainTextureHeight = windowData->height;
9733 }
9734
9735 if (windowData->inFlightFences[windowData->frameCounter] != NULL) {
9736 if (block) {
9737 // If we are blocking, just wait for the fence!
9738 if (!VULKAN_WaitForFences(
9739 (SDL_GPURenderer *)renderer,
9740 true,
9741 &windowData->inFlightFences[windowData->frameCounter],
9742 1)) {
9743 return false;
9744 }
9745 } else {
9746 // If we are not blocking and the least recent fence is not signaled,
9747 // return true to indicate that there is no error but rendering should be skipped.
9748 if (!VULKAN_QueryFence(
9749 (SDL_GPURenderer *)renderer,
9750 windowData->inFlightFences[windowData->frameCounter])) {
9751 return true;
9752 }
9753 }
9754
9755 VULKAN_ReleaseFence(
9756 (SDL_GPURenderer *)renderer,
9757 windowData->inFlightFences[windowData->frameCounter]);
9758
9759 windowData->inFlightFences[windowData->frameCounter] = NULL;
9760 }
9761
9762 // Finally, try to acquire!
9763 acquireResult = renderer->vkAcquireNextImageKHR(
9764 renderer->logicalDevice,
9765 windowData->swapchain,
9766 SDL_MAX_UINT64,
9767 windowData->imageAvailableSemaphore[windowData->frameCounter],
9768 VK_NULL_HANDLE,
9769 &swapchainImageIndex);
9770
9771 // Acquisition is invalid, let's try to recreate
9772 if (acquireResult != VK_SUCCESS && acquireResult != VK_SUBOPTIMAL_KHR) {
9773 Uint32 recreateSwapchainResult = VULKAN_INTERNAL_RecreateSwapchain(renderer, windowData);
9774 if (!recreateSwapchainResult) {
9775 return false;
9776 } else if (recreateSwapchainResult == VULKAN_INTERNAL_TRY_AGAIN) {
9777 // Edge case, texture is filled in with NULL but not an error
9778 return true;
9779 }
9780
9781 acquireResult = renderer->vkAcquireNextImageKHR(
9782 renderer->logicalDevice,
9783 windowData->swapchain,
9784 SDL_MAX_UINT64,
9785 windowData->imageAvailableSemaphore[windowData->frameCounter],
9786 VK_NULL_HANDLE,
9787 &swapchainImageIndex);
9788
9789 if (acquireResult != VK_SUCCESS && acquireResult != VK_SUBOPTIMAL_KHR) {
9790 return false;
9791 }
9792 }
9793
9794 swapchainTextureContainer = &windowData->textureContainers[swapchainImageIndex];
9795
9796 // We need a special execution dependency with pWaitDstStageMask or image transition can start before acquire finishes
9797
9798 VkImageMemoryBarrier imageBarrier;
9799 imageBarrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
9800 imageBarrier.pNext = NULL;
9801 imageBarrier.srcAccessMask = 0;
9802 imageBarrier.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
9803 imageBarrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
9804 imageBarrier.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
9805 imageBarrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
9806 imageBarrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
9807 imageBarrier.image = swapchainTextureContainer->activeTexture->image;
9808 imageBarrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
9809 imageBarrier.subresourceRange.baseMipLevel = 0;
9810 imageBarrier.subresourceRange.levelCount = 1;
9811 imageBarrier.subresourceRange.baseArrayLayer = 0;
9812 imageBarrier.subresourceRange.layerCount = 1;
9813
9814 renderer->vkCmdPipelineBarrier(
9815 vulkanCommandBuffer->commandBuffer,
9816 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
9817 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
9818 0,
9819 0,
9820 NULL,
9821 0,
9822 NULL,
9823 1,
9824 &imageBarrier);
9825
9826 // Set up present struct
9827
9828 if (vulkanCommandBuffer->presentDataCount == vulkanCommandBuffer->presentDataCapacity) {
9829 vulkanCommandBuffer->presentDataCapacity += 1;
9830 vulkanCommandBuffer->presentDatas = SDL_realloc(
9831 vulkanCommandBuffer->presentDatas,
9832 vulkanCommandBuffer->presentDataCapacity * sizeof(VulkanPresentData));
9833 }
9834
9835 presentData = &vulkanCommandBuffer->presentDatas[vulkanCommandBuffer->presentDataCount];
9836 vulkanCommandBuffer->presentDataCount += 1;
9837
9838 presentData->windowData = windowData;
9839 presentData->swapchainImageIndex = swapchainImageIndex;
9840
9841 // Set up present semaphores
9842
9843 if (vulkanCommandBuffer->waitSemaphoreCount == vulkanCommandBuffer->waitSemaphoreCapacity) {
9844 vulkanCommandBuffer->waitSemaphoreCapacity += 1;
9845 vulkanCommandBuffer->waitSemaphores = SDL_realloc(
9846 vulkanCommandBuffer->waitSemaphores,
9847 vulkanCommandBuffer->waitSemaphoreCapacity * sizeof(VkSemaphore));
9848 }
9849
9850 vulkanCommandBuffer->waitSemaphores[vulkanCommandBuffer->waitSemaphoreCount] =
9851 windowData->imageAvailableSemaphore[windowData->frameCounter];
9852 vulkanCommandBuffer->waitSemaphoreCount += 1;
9853
9854 if (vulkanCommandBuffer->signalSemaphoreCount == vulkanCommandBuffer->signalSemaphoreCapacity) {
9855 vulkanCommandBuffer->signalSemaphoreCapacity += 1;
9856 vulkanCommandBuffer->signalSemaphores = SDL_realloc(
9857 vulkanCommandBuffer->signalSemaphores,
9858 vulkanCommandBuffer->signalSemaphoreCapacity * sizeof(VkSemaphore));
9859 }
9860
9861 vulkanCommandBuffer->signalSemaphores[vulkanCommandBuffer->signalSemaphoreCount] =
9862 windowData->renderFinishedSemaphore[windowData->frameCounter];
9863 vulkanCommandBuffer->signalSemaphoreCount += 1;
9864
9865 *swapchainTexture = (SDL_GPUTexture *)swapchainTextureContainer;
9866 return true;
9867}
9868
9869static bool VULKAN_AcquireSwapchainTexture(
9870 SDL_GPUCommandBuffer *command_buffer,
9871 SDL_Window *window,
9872 SDL_GPUTexture **swapchain_texture,
9873 Uint32 *swapchain_texture_width,
9874 Uint32 *swapchain_texture_height
9875) {
9876 return VULKAN_INTERNAL_AcquireSwapchainTexture(
9877 false,
9878 command_buffer,
9879 window,
9880 swapchain_texture,
9881 swapchain_texture_width,
9882 swapchain_texture_height);
9883}
9884
9885static bool VULKAN_WaitAndAcquireSwapchainTexture(
9886 SDL_GPUCommandBuffer *command_buffer,
9887 SDL_Window *window,
9888 SDL_GPUTexture **swapchain_texture,
9889 Uint32 *swapchain_texture_width,
9890 Uint32 *swapchain_texture_height
9891) {
9892 return VULKAN_INTERNAL_AcquireSwapchainTexture(
9893 true,
9894 command_buffer,
9895 window,
9896 swapchain_texture,
9897 swapchain_texture_width,
9898 swapchain_texture_height);
9899}
9900
9901static SDL_GPUTextureFormat VULKAN_GetSwapchainTextureFormat(
9902 SDL_GPURenderer *driverData,
9903 SDL_Window *window)
9904{
9905 VulkanRenderer *renderer = (VulkanRenderer*)driverData;
9906 WindowData *windowData = VULKAN_INTERNAL_FetchWindowData(window);
9907
9908 if (windowData == NULL) {
9909 SET_STRING_ERROR_AND_RETURN("Cannot get swapchain format, window has not been claimed!", SDL_GPU_TEXTUREFORMAT_INVALID);
9910 }
9911
9912 return SwapchainCompositionToSDLFormat(
9913 windowData->swapchainComposition,
9914 windowData->usingFallbackFormat);
9915}
9916
9917static bool VULKAN_SetSwapchainParameters(
9918 SDL_GPURenderer *driverData,
9919 SDL_Window *window,
9920 SDL_GPUSwapchainComposition swapchainComposition,
9921 SDL_GPUPresentMode presentMode)
9922{
9923 VulkanRenderer *renderer = (VulkanRenderer *)driverData;
9924 WindowData *windowData = VULKAN_INTERNAL_FetchWindowData(window);
9925
9926 if (windowData == NULL) {
9927 SET_STRING_ERROR_AND_RETURN("Cannot set swapchain parameters on unclaimed window!", false);
9928 }
9929
9930 if (!VULKAN_SupportsSwapchainComposition(driverData, window, swapchainComposition)) {
9931 SET_STRING_ERROR_AND_RETURN("Swapchain composition not supported!", false);
9932 }
9933
9934 if (!VULKAN_SupportsPresentMode(driverData, window, presentMode)) {
9935 SET_STRING_ERROR_AND_RETURN("Present mode not supported!", false);
9936 }
9937
9938 windowData->presentMode = presentMode;
9939 windowData->swapchainComposition = swapchainComposition;
9940
9941 Uint32 recreateSwapchainResult = VULKAN_INTERNAL_RecreateSwapchain(renderer, windowData);
9942 if (!recreateSwapchainResult) {
9943 return false;
9944 } else if (recreateSwapchainResult == VULKAN_INTERNAL_TRY_AGAIN) {
9945 // Edge case, swapchain extent is (0, 0) but this is not an error
9946 windowData->needsSwapchainRecreate = true;
9947 return true;
9948 }
9949
9950 return true;
9951}
9952
9953static bool VULKAN_SetAllowedFramesInFlight(
9954 SDL_GPURenderer *driverData,
9955 Uint32 allowedFramesInFlight)
9956{
9957 VulkanRenderer *renderer = (VulkanRenderer *)driverData;
9958
9959 renderer->allowedFramesInFlight = allowedFramesInFlight;
9960
9961 for (Uint32 i = 0; i < renderer->claimedWindowCount; i += 1) {
9962 WindowData *windowData = renderer->claimedWindows[i];
9963
9964 Uint32 recreateResult = VULKAN_INTERNAL_RecreateSwapchain(renderer, windowData);
9965 if (!recreateResult) {
9966 return false;
9967 } else if (recreateResult == VULKAN_INTERNAL_TRY_AGAIN) {
9968 // Edge case, swapchain extent is (0, 0) but this is not an error
9969 windowData->needsSwapchainRecreate = true;
9970 }
9971 }
9972
9973 return true;
9974}
9975
9976// Submission structure
9977
9978static VulkanFenceHandle *VULKAN_INTERNAL_AcquireFenceFromPool(
9979 VulkanRenderer *renderer)
9980{
9981 VulkanFenceHandle *handle;
9982 VkFenceCreateInfo fenceCreateInfo;
9983 VkFence fence;
9984 VkResult vulkanResult;
9985
9986 if (renderer->fencePool.availableFenceCount == 0) {
9987 // Create fence
9988 fenceCreateInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
9989 fenceCreateInfo.pNext = NULL;
9990 fenceCreateInfo.flags = 0;
9991
9992 vulkanResult = renderer->vkCreateFence(
9993 renderer->logicalDevice,
9994 &fenceCreateInfo,
9995 NULL,
9996 &fence);
9997
9998 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateFence, NULL);
9999
10000 handle = SDL_malloc(sizeof(VulkanFenceHandle));
10001 handle->fence = fence;
10002 SDL_SetAtomicInt(&handle->referenceCount, 0);
10003 return handle;
10004 }
10005
10006 SDL_LockMutex(renderer->fencePool.lock);
10007
10008 handle = renderer->fencePool.availableFences[renderer->fencePool.availableFenceCount - 1];
10009 renderer->fencePool.availableFenceCount -= 1;
10010
10011 vulkanResult = renderer->vkResetFences(
10012 renderer->logicalDevice,
10013 1,
10014 &handle->fence);
10015
10016 SDL_UnlockMutex(renderer->fencePool.lock);
10017
10018 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkResetFences, NULL);
10019
10020 return handle;
10021}
10022
10023static void VULKAN_INTERNAL_PerformPendingDestroys(
10024 VulkanRenderer *renderer)
10025{
10026 SDL_LockMutex(renderer->disposeLock);
10027
10028 for (Sint32 i = renderer->texturesToDestroyCount - 1; i >= 0; i -= 1) {
10029 if (SDL_GetAtomicInt(&renderer->texturesToDestroy[i]->referenceCount) == 0) {
10030 VULKAN_INTERNAL_DestroyTexture(
10031 renderer,
10032 renderer->texturesToDestroy[i]);
10033
10034 renderer->texturesToDestroy[i] = renderer->texturesToDestroy[renderer->texturesToDestroyCount - 1];
10035 renderer->texturesToDestroyCount -= 1;
10036 }
10037 }
10038
10039 for (Sint32 i = renderer->buffersToDestroyCount - 1; i >= 0; i -= 1) {
10040 if (SDL_GetAtomicInt(&renderer->buffersToDestroy[i]->referenceCount) == 0) {
10041 VULKAN_INTERNAL_DestroyBuffer(
10042 renderer,
10043 renderer->buffersToDestroy[i]);
10044
10045 renderer->buffersToDestroy[i] = renderer->buffersToDestroy[renderer->buffersToDestroyCount - 1];
10046 renderer->buffersToDestroyCount -= 1;
10047 }
10048 }
10049
10050 for (Sint32 i = renderer->graphicsPipelinesToDestroyCount - 1; i >= 0; i -= 1) {
10051 if (SDL_GetAtomicInt(&renderer->graphicsPipelinesToDestroy[i]->referenceCount) == 0) {
10052 VULKAN_INTERNAL_DestroyGraphicsPipeline(
10053 renderer,
10054 renderer->graphicsPipelinesToDestroy[i]);
10055
10056 renderer->graphicsPipelinesToDestroy[i] = renderer->graphicsPipelinesToDestroy[renderer->graphicsPipelinesToDestroyCount - 1];
10057 renderer->graphicsPipelinesToDestroyCount -= 1;
10058 }
10059 }
10060
10061 for (Sint32 i = renderer->computePipelinesToDestroyCount - 1; i >= 0; i -= 1) {
10062 if (SDL_GetAtomicInt(&renderer->computePipelinesToDestroy[i]->referenceCount) == 0) {
10063 VULKAN_INTERNAL_DestroyComputePipeline(
10064 renderer,
10065 renderer->computePipelinesToDestroy[i]);
10066
10067 renderer->computePipelinesToDestroy[i] = renderer->computePipelinesToDestroy[renderer->computePipelinesToDestroyCount - 1];
10068 renderer->computePipelinesToDestroyCount -= 1;
10069 }
10070 }
10071
10072 for (Sint32 i = renderer->shadersToDestroyCount - 1; i >= 0; i -= 1) {
10073 if (SDL_GetAtomicInt(&renderer->shadersToDestroy[i]->referenceCount) == 0) {
10074 VULKAN_INTERNAL_DestroyShader(
10075 renderer,
10076 renderer->shadersToDestroy[i]);
10077
10078 renderer->shadersToDestroy[i] = renderer->shadersToDestroy[renderer->shadersToDestroyCount - 1];
10079 renderer->shadersToDestroyCount -= 1;
10080 }
10081 }
10082
10083 for (Sint32 i = renderer->samplersToDestroyCount - 1; i >= 0; i -= 1) {
10084 if (SDL_GetAtomicInt(&renderer->samplersToDestroy[i]->referenceCount) == 0) {
10085 VULKAN_INTERNAL_DestroySampler(
10086 renderer,
10087 renderer->samplersToDestroy[i]);
10088
10089 renderer->samplersToDestroy[i] = renderer->samplersToDestroy[renderer->samplersToDestroyCount - 1];
10090 renderer->samplersToDestroyCount -= 1;
10091 }
10092 }
10093
10094 for (Sint32 i = renderer->framebuffersToDestroyCount - 1; i >= 0; i -= 1) {
10095 if (SDL_GetAtomicInt(&renderer->framebuffersToDestroy[i]->referenceCount) == 0) {
10096 VULKAN_INTERNAL_DestroyFramebuffer(
10097 renderer,
10098 renderer->framebuffersToDestroy[i]);
10099
10100 renderer->framebuffersToDestroy[i] = renderer->framebuffersToDestroy[renderer->framebuffersToDestroyCount - 1];
10101 renderer->framebuffersToDestroyCount -= 1;
10102 }
10103 }
10104
10105 SDL_UnlockMutex(renderer->disposeLock);
10106}
10107
10108static void VULKAN_INTERNAL_CleanCommandBuffer(
10109 VulkanRenderer *renderer,
10110 VulkanCommandBuffer *commandBuffer,
10111 bool cancel)
10112{
10113 if (commandBuffer->autoReleaseFence) {
10114 VULKAN_ReleaseFence(
10115 (SDL_GPURenderer *)renderer,
10116 (SDL_GPUFence *)commandBuffer->inFlightFence);
10117
10118 commandBuffer->inFlightFence = NULL;
10119 }
10120
10121 // Uniform buffers are now available
10122
10123 SDL_LockMutex(renderer->acquireUniformBufferLock);
10124
10125 for (Sint32 i = 0; i < commandBuffer->usedUniformBufferCount; i += 1) {
10126 VULKAN_INTERNAL_ReturnUniformBufferToPool(
10127 renderer,
10128 commandBuffer->usedUniformBuffers[i]);
10129 }
10130 commandBuffer->usedUniformBufferCount = 0;
10131
10132 SDL_UnlockMutex(renderer->acquireUniformBufferLock);
10133
10134 // Decrement reference counts
10135
10136 for (Sint32 i = 0; i < commandBuffer->usedBufferCount; i += 1) {
10137 (void)SDL_AtomicDecRef(&commandBuffer->usedBuffers[i]->referenceCount);
10138 }
10139 commandBuffer->usedBufferCount = 0;
10140
10141 for (Sint32 i = 0; i < commandBuffer->usedTextureCount; i += 1) {
10142 (void)SDL_AtomicDecRef(&commandBuffer->usedTextures[i]->referenceCount);
10143 }
10144 commandBuffer->usedTextureCount = 0;
10145
10146 for (Sint32 i = 0; i < commandBuffer->usedSamplerCount; i += 1) {
10147 (void)SDL_AtomicDecRef(&commandBuffer->usedSamplers[i]->referenceCount);
10148 }
10149 commandBuffer->usedSamplerCount = 0;
10150
10151 for (Sint32 i = 0; i < commandBuffer->usedGraphicsPipelineCount; i += 1) {
10152 (void)SDL_AtomicDecRef(&commandBuffer->usedGraphicsPipelines[i]->referenceCount);
10153 }
10154 commandBuffer->usedGraphicsPipelineCount = 0;
10155
10156 for (Sint32 i = 0; i < commandBuffer->usedComputePipelineCount; i += 1) {
10157 (void)SDL_AtomicDecRef(&commandBuffer->usedComputePipelines[i]->referenceCount);
10158 }
10159 commandBuffer->usedComputePipelineCount = 0;
10160
10161 for (Sint32 i = 0; i < commandBuffer->usedFramebufferCount; i += 1) {
10162 (void)SDL_AtomicDecRef(&commandBuffer->usedFramebuffers[i]->referenceCount);
10163 }
10164 commandBuffer->usedFramebufferCount = 0;
10165
10166 // Reset presentation data
10167
10168 commandBuffer->presentDataCount = 0;
10169 commandBuffer->waitSemaphoreCount = 0;
10170 commandBuffer->signalSemaphoreCount = 0;
10171
10172 // Reset defrag state
10173
10174 if (commandBuffer->isDefrag) {
10175 renderer->defragInProgress = 0;
10176 }
10177
10178 // Return command buffer to pool
10179
10180 SDL_LockMutex(renderer->acquireCommandBufferLock);
10181
10182 if (commandBuffer->commandPool->inactiveCommandBufferCount == commandBuffer->commandPool->inactiveCommandBufferCapacity) {
10183 commandBuffer->commandPool->inactiveCommandBufferCapacity += 1;
10184 commandBuffer->commandPool->inactiveCommandBuffers = SDL_realloc(
10185 commandBuffer->commandPool->inactiveCommandBuffers,
10186 commandBuffer->commandPool->inactiveCommandBufferCapacity * sizeof(VulkanCommandBuffer *));
10187 }
10188
10189 commandBuffer->commandPool->inactiveCommandBuffers[commandBuffer->commandPool->inactiveCommandBufferCount] = commandBuffer;
10190 commandBuffer->commandPool->inactiveCommandBufferCount += 1;
10191
10192 // Release descriptor set cache
10193
10194 VULKAN_INTERNAL_ReturnDescriptorSetCacheToPool(
10195 renderer,
10196 commandBuffer->descriptorSetCache);
10197
10198 commandBuffer->descriptorSetCache = NULL;
10199
10200 SDL_UnlockMutex(renderer->acquireCommandBufferLock);
10201
10202 // Remove this command buffer from the submitted list
10203 if (!cancel) {
10204 for (Uint32 i = 0; i < renderer->submittedCommandBufferCount; i += 1) {
10205 if (renderer->submittedCommandBuffers[i] == commandBuffer) {
10206 renderer->submittedCommandBuffers[i] = renderer->submittedCommandBuffers[renderer->submittedCommandBufferCount - 1];
10207 renderer->submittedCommandBufferCount -= 1;
10208 }
10209 }
10210 }
10211}
10212
10213static bool VULKAN_WaitForFences(
10214 SDL_GPURenderer *driverData,
10215 bool waitAll,
10216 SDL_GPUFence *const *fences,
10217 Uint32 numFences)
10218{
10219 VulkanRenderer *renderer = (VulkanRenderer *)driverData;
10220 VkFence *vkFences = SDL_stack_alloc(VkFence, numFences);
10221 VkResult result;
10222
10223 for (Uint32 i = 0; i < numFences; i += 1) {
10224 vkFences[i] = ((VulkanFenceHandle *)fences[i])->fence;
10225 }
10226
10227 result = renderer->vkWaitForFences(
10228 renderer->logicalDevice,
10229 numFences,
10230 vkFences,
10231 waitAll,
10232 SDL_MAX_UINT64);
10233
10234 CHECK_VULKAN_ERROR_AND_RETURN(result, vkWaitForFences, false);
10235
10236 SDL_stack_free(vkFences);
10237
10238 SDL_LockMutex(renderer->submitLock);
10239
10240 for (Sint32 i = renderer->submittedCommandBufferCount - 1; i >= 0; i -= 1) {
10241 result = renderer->vkGetFenceStatus(
10242 renderer->logicalDevice,
10243 renderer->submittedCommandBuffers[i]->inFlightFence->fence);
10244
10245 if (result == VK_SUCCESS) {
10246 VULKAN_INTERNAL_CleanCommandBuffer(
10247 renderer,
10248 renderer->submittedCommandBuffers[i],
10249 false);
10250 }
10251 }
10252
10253 VULKAN_INTERNAL_PerformPendingDestroys(renderer);
10254
10255 SDL_UnlockMutex(renderer->submitLock);
10256
10257 return true;
10258}
10259
10260static bool VULKAN_Wait(
10261 SDL_GPURenderer *driverData)
10262{
10263 VulkanRenderer *renderer = (VulkanRenderer *)driverData;
10264 VulkanCommandBuffer *commandBuffer;
10265 VkResult result;
10266 Sint32 i;
10267
10268 result = renderer->vkDeviceWaitIdle(renderer->logicalDevice);
10269
10270 CHECK_VULKAN_ERROR_AND_RETURN(result, vkDeviceWaitIdle, false);
10271
10272 SDL_LockMutex(renderer->submitLock);
10273
10274 for (i = renderer->submittedCommandBufferCount - 1; i >= 0; i -= 1) {
10275 commandBuffer = renderer->submittedCommandBuffers[i];
10276 VULKAN_INTERNAL_CleanCommandBuffer(renderer, commandBuffer, false);
10277 }
10278
10279 VULKAN_INTERNAL_PerformPendingDestroys(renderer);
10280
10281 SDL_UnlockMutex(renderer->submitLock);
10282
10283 return true;
10284}
10285
10286static SDL_GPUFence *VULKAN_SubmitAndAcquireFence(
10287 SDL_GPUCommandBuffer *commandBuffer)
10288{
10289 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
10290 vulkanCommandBuffer->autoReleaseFence = false;
10291 if (!VULKAN_Submit(commandBuffer)) {
10292 return NULL;
10293 }
10294 return (SDL_GPUFence *)vulkanCommandBuffer->inFlightFence;
10295}
10296
10297static void VULKAN_INTERNAL_ReleaseCommandBuffer(VulkanCommandBuffer *vulkanCommandBuffer)
10298{
10299 VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
10300
10301 if (renderer->submittedCommandBufferCount + 1 >= renderer->submittedCommandBufferCapacity) {
10302 renderer->submittedCommandBufferCapacity = renderer->submittedCommandBufferCount + 1;
10303
10304 renderer->submittedCommandBuffers = SDL_realloc(
10305 renderer->submittedCommandBuffers,
10306 sizeof(VulkanCommandBuffer *) * renderer->submittedCommandBufferCapacity);
10307 }
10308
10309 renderer->submittedCommandBuffers[renderer->submittedCommandBufferCount] = vulkanCommandBuffer;
10310 renderer->submittedCommandBufferCount += 1;
10311}
10312
10313static bool VULKAN_Submit(
10314 SDL_GPUCommandBuffer *commandBuffer)
10315{
10316 VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
10317 VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
10318 VkSubmitInfo submitInfo;
10319 VkPresentInfoKHR presentInfo;
10320 VulkanPresentData *presentData;
10321 VkResult vulkanResult, presentResult = VK_SUCCESS;
10322 VkPipelineStageFlags waitStages[MAX_PRESENT_COUNT];
10323 Uint32 swapchainImageIndex;
10324 VulkanTextureSubresource *swapchainTextureSubresource;
10325 Uint8 commandBufferCleaned = 0;
10326 VulkanMemorySubAllocator *allocator;
10327 bool presenting = false;
10328
10329 SDL_LockMutex(renderer->submitLock);
10330
10331 // FIXME: Can this just be permanent?
10332 for (Uint32 i = 0; i < MAX_PRESENT_COUNT; i += 1) {
10333 waitStages[i] = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
10334 }
10335
10336 for (Uint32 j = 0; j < vulkanCommandBuffer->presentDataCount; j += 1) {
10337 swapchainImageIndex = vulkanCommandBuffer->presentDatas[j].swapchainImageIndex;
10338 swapchainTextureSubresource = VULKAN_INTERNAL_FetchTextureSubresource(
10339 &vulkanCommandBuffer->presentDatas[j].windowData->textureContainers[swapchainImageIndex],
10340 0,
10341 0);
10342
10343 VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage(
10344 renderer,
10345 vulkanCommandBuffer,
10346 VULKAN_TEXTURE_USAGE_MODE_PRESENT,
10347 swapchainTextureSubresource);
10348 }
10349
10350 if (!VULKAN_INTERNAL_EndCommandBuffer(renderer, vulkanCommandBuffer)) {
10351 SDL_UnlockMutex(renderer->submitLock);
10352 return false;
10353 }
10354
10355 vulkanCommandBuffer->inFlightFence = VULKAN_INTERNAL_AcquireFenceFromPool(renderer);
10356 if (vulkanCommandBuffer->inFlightFence == NULL) {
10357 SDL_UnlockMutex(renderer->submitLock);
10358 return false;
10359 }
10360
10361 // Command buffer has a reference to the in-flight fence
10362 (void)SDL_AtomicIncRef(&vulkanCommandBuffer->inFlightFence->referenceCount);
10363
10364 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
10365 submitInfo.pNext = NULL;
10366 submitInfo.commandBufferCount = 1;
10367 submitInfo.pCommandBuffers = &vulkanCommandBuffer->commandBuffer;
10368
10369 submitInfo.pWaitDstStageMask = waitStages;
10370 submitInfo.pWaitSemaphores = vulkanCommandBuffer->waitSemaphores;
10371 submitInfo.waitSemaphoreCount = vulkanCommandBuffer->waitSemaphoreCount;
10372 submitInfo.pSignalSemaphores = vulkanCommandBuffer->signalSemaphores;
10373 submitInfo.signalSemaphoreCount = vulkanCommandBuffer->signalSemaphoreCount;
10374
10375 vulkanResult = renderer->vkQueueSubmit(
10376 renderer->unifiedQueue,
10377 1,
10378 &submitInfo,
10379 vulkanCommandBuffer->inFlightFence->fence);
10380
10381 if (vulkanResult != VK_SUCCESS) {
10382 SDL_UnlockMutex(renderer->submitLock);
10383 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkQueueSubmit, false);
10384 }
10385
10386 // Present, if applicable
10387 bool result = true;
10388
10389 for (Uint32 j = 0; j < vulkanCommandBuffer->presentDataCount; j += 1) {
10390 presenting = true;
10391
10392 presentData = &vulkanCommandBuffer->presentDatas[j];
10393
10394 presentInfo.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR;
10395 presentInfo.pNext = NULL;
10396 presentInfo.pWaitSemaphores =
10397 &presentData->windowData->renderFinishedSemaphore[presentData->windowData->frameCounter];
10398 presentInfo.waitSemaphoreCount = 1;
10399 presentInfo.pSwapchains = &presentData->windowData->swapchain;
10400 presentInfo.swapchainCount = 1;
10401 presentInfo.pImageIndices = &presentData->swapchainImageIndex;
10402 presentInfo.pResults = NULL;
10403
10404 presentResult = renderer->vkQueuePresentKHR(
10405 renderer->unifiedQueue,
10406 &presentInfo);
10407
10408 if (presentResult == VK_SUCCESS || presentResult == VK_SUBOPTIMAL_KHR || presentResult == VK_ERROR_OUT_OF_DATE_KHR) {
10409 // If presenting, the swapchain is using the in-flight fence
10410 presentData->windowData->inFlightFences[presentData->windowData->frameCounter] = (SDL_GPUFence*)vulkanCommandBuffer->inFlightFence;
10411 (void)SDL_AtomicIncRef(&vulkanCommandBuffer->inFlightFence->referenceCount);
10412
10413 if (presentResult == VK_SUBOPTIMAL_KHR || presentResult == VK_ERROR_OUT_OF_DATE_KHR) {
10414 presentData->windowData->needsSwapchainRecreate = true;
10415 }
10416 } else {
10417 if (presentResult != VK_SUCCESS) {
10418 VULKAN_INTERNAL_ReleaseCommandBuffer(vulkanCommandBuffer);
10419 SDL_UnlockMutex(renderer->submitLock);
10420 }
10421
10422 CHECK_VULKAN_ERROR_AND_RETURN(presentResult, vkQueuePresentKHR, false);
10423 }
10424
10425 presentData->windowData->frameCounter =
10426 (presentData->windowData->frameCounter + 1) % renderer->allowedFramesInFlight;
10427 }
10428
10429 // Check if we can perform any cleanups
10430
10431 for (Sint32 i = renderer->submittedCommandBufferCount - 1; i >= 0; i -= 1) {
10432 vulkanResult = renderer->vkGetFenceStatus(
10433 renderer->logicalDevice,
10434 renderer->submittedCommandBuffers[i]->inFlightFence->fence);
10435
10436 if (vulkanResult == VK_SUCCESS) {
10437 VULKAN_INTERNAL_CleanCommandBuffer(
10438 renderer,
10439 renderer->submittedCommandBuffers[i],
10440 false);
10441
10442 commandBufferCleaned = 1;
10443 }
10444 }
10445
10446 if (commandBufferCleaned) {
10447 SDL_LockMutex(renderer->allocatorLock);
10448
10449 for (Uint32 i = 0; i < VK_MAX_MEMORY_TYPES; i += 1) {
10450 allocator = &renderer->memoryAllocator->subAllocators[i];
10451
10452 for (Sint32 j = allocator->allocationCount - 1; j >= 0; j -= 1) {
10453 if (allocator->allocations[j]->usedRegionCount == 0) {
10454 VULKAN_INTERNAL_DeallocateMemory(
10455 renderer,
10456 allocator,
10457 j);
10458 }
10459 }
10460 }
10461
10462 SDL_UnlockMutex(renderer->allocatorLock);
10463 }
10464
10465 // Check pending destroys
10466 VULKAN_INTERNAL_PerformPendingDestroys(renderer);
10467
10468 // Defrag!
10469 if (
10470 presenting &&
10471 renderer->allocationsToDefragCount > 0 &&
10472 !renderer->defragInProgress) {
10473 result = VULKAN_INTERNAL_DefragmentMemory(renderer);
10474 }
10475
10476 // Mark command buffer as submitted
10477 // This must happen after defrag, because it will try to acquire new command buffers.
10478 VULKAN_INTERNAL_ReleaseCommandBuffer(vulkanCommandBuffer);
10479
10480 SDL_UnlockMutex(renderer->submitLock);
10481
10482 return result;
10483}
10484
10485static bool VULKAN_Cancel(
10486 SDL_GPUCommandBuffer *commandBuffer)
10487{
10488 VulkanRenderer *renderer;
10489 VulkanCommandBuffer *vulkanCommandBuffer;
10490 VkResult result;
10491
10492 vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
10493 renderer = vulkanCommandBuffer->renderer;
10494
10495 result = renderer->vkResetCommandBuffer(
10496 vulkanCommandBuffer->commandBuffer,
10497 VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT);
10498 CHECK_VULKAN_ERROR_AND_RETURN(result, vkResetCommandBuffer, false);
10499
10500 vulkanCommandBuffer->autoReleaseFence = false;
10501 SDL_LockMutex(renderer->submitLock);
10502 VULKAN_INTERNAL_CleanCommandBuffer(renderer, vulkanCommandBuffer, true);
10503 SDL_UnlockMutex(renderer->submitLock);
10504
10505 return true;
10506}
10507
10508static bool VULKAN_INTERNAL_DefragmentMemory(
10509 VulkanRenderer *renderer)
10510{
10511 VulkanMemoryAllocation *allocation;
10512 VulkanMemoryUsedRegion *currentRegion;
10513 VulkanBuffer *newBuffer;
10514 VulkanTexture *newTexture;
10515 VkBufferCopy bufferCopy;
10516 VkImageCopy imageCopy;
10517 VulkanCommandBuffer *commandBuffer;
10518 VulkanTextureSubresource *srcSubresource;
10519 VulkanTextureSubresource *dstSubresource;
10520 Uint32 i, subresourceIndex;
10521
10522 renderer->defragInProgress = 1;
10523
10524 commandBuffer = (VulkanCommandBuffer *)VULKAN_AcquireCommandBuffer((SDL_GPURenderer *)renderer);
10525 if (commandBuffer == NULL) {
10526 return false;
10527 }
10528 commandBuffer->isDefrag = 1;
10529
10530 SDL_LockMutex(renderer->allocatorLock);
10531
10532 allocation = renderer->allocationsToDefrag[renderer->allocationsToDefragCount - 1];
10533 renderer->allocationsToDefragCount -= 1;
10534
10535 /* For each used region in the allocation
10536 * create a new resource, copy the data
10537 * and re-point the resource containers
10538 */
10539 for (i = 0; i < allocation->usedRegionCount; i += 1) {
10540 currentRegion = allocation->usedRegions[i];
10541
10542 if (currentRegion->isBuffer && !currentRegion->vulkanBuffer->markedForDestroy) {
10543 currentRegion->vulkanBuffer->usage |= VK_BUFFER_USAGE_TRANSFER_DST_BIT;
10544
10545 newBuffer = VULKAN_INTERNAL_CreateBuffer(
10546 renderer,
10547 currentRegion->vulkanBuffer->size,
10548 currentRegion->vulkanBuffer->usage,
10549 currentRegion->vulkanBuffer->type,
10550 false);
10551
10552 if (newBuffer == NULL) {
10553 SDL_UnlockMutex(renderer->allocatorLock);
10554 return false;
10555 }
10556
10557 if (
10558 renderer->debugMode &&
10559 renderer->supportsDebugUtils &&
10560 currentRegion->vulkanBuffer->container != NULL &&
10561 currentRegion->vulkanBuffer->container->debugName != NULL) {
10562 VULKAN_INTERNAL_SetBufferName(
10563 renderer,
10564 newBuffer,
10565 currentRegion->vulkanBuffer->container->debugName);
10566 }
10567
10568 // Copy buffer contents if necessary
10569 if (
10570 currentRegion->vulkanBuffer->type == VULKAN_BUFFER_TYPE_GPU && currentRegion->vulkanBuffer->transitioned) {
10571 VULKAN_INTERNAL_BufferTransitionFromDefaultUsage(
10572 renderer,
10573 commandBuffer,
10574 VULKAN_BUFFER_USAGE_MODE_COPY_SOURCE,
10575 currentRegion->vulkanBuffer);
10576
10577 VULKAN_INTERNAL_BufferTransitionFromDefaultUsage(
10578 renderer,
10579 commandBuffer,
10580 VULKAN_BUFFER_USAGE_MODE_COPY_DESTINATION,
10581 newBuffer);
10582
10583 bufferCopy.srcOffset = 0;
10584 bufferCopy.dstOffset = 0;
10585 bufferCopy.size = currentRegion->resourceSize;
10586
10587 renderer->vkCmdCopyBuffer(
10588 commandBuffer->commandBuffer,
10589 currentRegion->vulkanBuffer->buffer,
10590 newBuffer->buffer,
10591 1,
10592 &bufferCopy);
10593
10594 VULKAN_INTERNAL_BufferTransitionToDefaultUsage(
10595 renderer,
10596 commandBuffer,
10597 VULKAN_BUFFER_USAGE_MODE_COPY_DESTINATION,
10598 newBuffer);
10599
10600 VULKAN_INTERNAL_TrackBuffer(commandBuffer, currentRegion->vulkanBuffer);
10601 VULKAN_INTERNAL_TrackBuffer(commandBuffer, newBuffer);
10602 }
10603
10604 // re-point original container to new buffer
10605 newBuffer->container = currentRegion->vulkanBuffer->container;
10606 newBuffer->containerIndex = currentRegion->vulkanBuffer->containerIndex;
10607 if (newBuffer->type == VULKAN_BUFFER_TYPE_UNIFORM) {
10608 ((VulkanUniformBuffer *)newBuffer->container)->buffer = newBuffer;
10609 } else {
10610 newBuffer->container->buffers[newBuffer->containerIndex] = newBuffer;
10611 if (newBuffer->container->activeBuffer == currentRegion->vulkanBuffer) {
10612 newBuffer->container->activeBuffer = newBuffer;
10613 }
10614 }
10615
10616 VULKAN_INTERNAL_ReleaseBuffer(renderer, currentRegion->vulkanBuffer);
10617 } else if (!currentRegion->isBuffer && !currentRegion->vulkanTexture->markedForDestroy) {
10618 newTexture = VULKAN_INTERNAL_CreateTexture(
10619 renderer,
10620 ¤tRegion->vulkanTexture->container->header.info);
10621
10622 if (newTexture == NULL) {
10623 SDL_UnlockMutex(renderer->allocatorLock);
10624 return false;
10625 }
10626
10627 SDL_GPUTextureCreateInfo info = currentRegion->vulkanTexture->container->header.info;
10628 for (subresourceIndex = 0; subresourceIndex < currentRegion->vulkanTexture->subresourceCount; subresourceIndex += 1) {
10629 // copy subresource if necessary
10630 srcSubresource = ¤tRegion->vulkanTexture->subresources[subresourceIndex];
10631 dstSubresource = &newTexture->subresources[subresourceIndex];
10632
10633 // Set debug name if it exists
10634 if (
10635 renderer->debugMode &&
10636 renderer->supportsDebugUtils &&
10637 srcSubresource->parent->container != NULL &&
10638 srcSubresource->parent->container->debugName != NULL) {
10639 VULKAN_INTERNAL_SetTextureName(
10640 renderer,
10641 currentRegion->vulkanTexture,
10642 srcSubresource->parent->container->debugName);
10643 }
10644
10645 VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage(
10646 renderer,
10647 commandBuffer,
10648 VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE,
10649 srcSubresource);
10650
10651 VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage(
10652 renderer,
10653 commandBuffer,
10654 VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION,
10655 dstSubresource);
10656
10657 imageCopy.srcOffset.x = 0;
10658 imageCopy.srcOffset.y = 0;
10659 imageCopy.srcOffset.z = 0;
10660 imageCopy.srcSubresource.aspectMask = srcSubresource->parent->aspectFlags;
10661 imageCopy.srcSubresource.baseArrayLayer = srcSubresource->layer;
10662 imageCopy.srcSubresource.layerCount = 1;
10663 imageCopy.srcSubresource.mipLevel = srcSubresource->level;
10664 imageCopy.extent.width = SDL_max(1, info.width >> srcSubresource->level);
10665 imageCopy.extent.height = SDL_max(1, info.height >> srcSubresource->level);
10666 imageCopy.extent.depth = info.type == SDL_GPU_TEXTURETYPE_3D ? info.layer_count_or_depth : 1;
10667 imageCopy.dstOffset.x = 0;
10668 imageCopy.dstOffset.y = 0;
10669 imageCopy.dstOffset.z = 0;
10670 imageCopy.dstSubresource.aspectMask = dstSubresource->parent->aspectFlags;
10671 imageCopy.dstSubresource.baseArrayLayer = dstSubresource->layer;
10672 imageCopy.dstSubresource.layerCount = 1;
10673 imageCopy.dstSubresource.mipLevel = dstSubresource->level;
10674
10675 renderer->vkCmdCopyImage(
10676 commandBuffer->commandBuffer,
10677 currentRegion->vulkanTexture->image,
10678 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
10679 newTexture->image,
10680 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
10681 1,
10682 &imageCopy);
10683
10684 VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
10685 renderer,
10686 commandBuffer,
10687 VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION,
10688 dstSubresource);
10689
10690 VULKAN_INTERNAL_TrackTexture(commandBuffer, srcSubresource->parent);
10691 VULKAN_INTERNAL_TrackTexture(commandBuffer, dstSubresource->parent);
10692 }
10693
10694 // re-point original container to new texture
10695 newTexture->container = currentRegion->vulkanTexture->container;
10696 newTexture->containerIndex = currentRegion->vulkanTexture->containerIndex;
10697 newTexture->container->textures[currentRegion->vulkanTexture->containerIndex] = newTexture;
10698 if (currentRegion->vulkanTexture == currentRegion->vulkanTexture->container->activeTexture) {
10699 newTexture->container->activeTexture = newTexture;
10700 }
10701
10702 VULKAN_INTERNAL_ReleaseTexture(renderer, currentRegion->vulkanTexture);
10703 }
10704 }
10705
10706 SDL_UnlockMutex(renderer->allocatorLock);
10707
10708 return VULKAN_Submit(
10709 (SDL_GPUCommandBuffer *)commandBuffer);
10710}
10711
10712// Format Info
10713
10714static bool VULKAN_SupportsTextureFormat(
10715 SDL_GPURenderer *driverData,
10716 SDL_GPUTextureFormat format,
10717 SDL_GPUTextureType type,
10718 SDL_GPUTextureUsageFlags usage)
10719{
10720 VulkanRenderer *renderer = (VulkanRenderer *)driverData;
10721 VkFormat vulkanFormat = SDLToVK_TextureFormat[format];
10722 VkImageUsageFlags vulkanUsage = 0;
10723 VkImageCreateFlags createFlags = 0;
10724 VkImageFormatProperties properties;
10725 VkResult vulkanResult;
10726
10727 if (usage & SDL_GPU_TEXTUREUSAGE_SAMPLER) {
10728 vulkanUsage |= VK_IMAGE_USAGE_SAMPLED_BIT;
10729 }
10730 if (usage & SDL_GPU_TEXTUREUSAGE_COLOR_TARGET) {
10731 vulkanUsage |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
10732 }
10733 if (usage & SDL_GPU_TEXTUREUSAGE_DEPTH_STENCIL_TARGET) {
10734 vulkanUsage |= VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
10735 }
10736 if (usage & (SDL_GPU_TEXTUREUSAGE_GRAPHICS_STORAGE_READ |
10737 SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_READ |
10738 SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_WRITE |
10739 SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_SIMULTANEOUS_READ_WRITE)) {
10740 vulkanUsage |= VK_IMAGE_USAGE_STORAGE_BIT;
10741 }
10742
10743 if (type == SDL_GPU_TEXTURETYPE_CUBE || type == SDL_GPU_TEXTURETYPE_CUBE_ARRAY) {
10744 createFlags = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT;
10745 }
10746
10747 vulkanResult = renderer->vkGetPhysicalDeviceImageFormatProperties(
10748 renderer->physicalDevice,
10749 vulkanFormat,
10750 (type == SDL_GPU_TEXTURETYPE_3D) ? VK_IMAGE_TYPE_3D : VK_IMAGE_TYPE_2D,
10751 VK_IMAGE_TILING_OPTIMAL,
10752 vulkanUsage,
10753 createFlags,
10754 &properties);
10755
10756 return vulkanResult == VK_SUCCESS;
10757}
10758
10759// Device instantiation
10760
10761static inline Uint8 CheckDeviceExtensions(
10762 VkExtensionProperties *extensions,
10763 Uint32 numExtensions,
10764 VulkanExtensions *supports)
10765{
10766 Uint32 i;
10767
10768 SDL_memset(supports, '\0', sizeof(VulkanExtensions));
10769 for (i = 0; i < numExtensions; i += 1) {
10770 const char *name = extensions[i].extensionName;
10771#define CHECK(ext) \
10772 if (SDL_strcmp(name, "VK_" #ext) == 0) { \
10773 supports->ext = 1; \
10774 }
10775 CHECK(KHR_swapchain)
10776 else CHECK(KHR_maintenance1) else CHECK(KHR_driver_properties) else CHECK(EXT_vertex_attribute_divisor) else CHECK(KHR_portability_subset) else CHECK(EXT_texture_compression_astc_hdr)
10777#undef CHECK
10778 }
10779
10780 return (supports->KHR_swapchain &&
10781 supports->KHR_maintenance1);
10782}
10783
10784static inline Uint32 GetDeviceExtensionCount(VulkanExtensions *supports)
10785{
10786 return (
10787 supports->KHR_swapchain +
10788 supports->KHR_maintenance1 +
10789 supports->KHR_driver_properties +
10790 supports->EXT_vertex_attribute_divisor +
10791 supports->KHR_portability_subset +
10792 supports->EXT_texture_compression_astc_hdr);
10793}
10794
10795static inline void CreateDeviceExtensionArray(
10796 VulkanExtensions *supports,
10797 const char **extensions)
10798{
10799 Uint8 cur = 0;
10800#define CHECK(ext) \
10801 if (supports->ext) { \
10802 extensions[cur++] = "VK_" #ext; \
10803 }
10804 CHECK(KHR_swapchain)
10805 CHECK(KHR_maintenance1)
10806 CHECK(KHR_driver_properties)
10807 CHECK(EXT_vertex_attribute_divisor)
10808 CHECK(KHR_portability_subset)
10809 CHECK(EXT_texture_compression_astc_hdr)
10810#undef CHECK
10811}
10812
10813static inline Uint8 SupportsInstanceExtension(
10814 const char *ext,
10815 VkExtensionProperties *availableExtensions,
10816 Uint32 numAvailableExtensions)
10817{
10818 Uint32 i;
10819 for (i = 0; i < numAvailableExtensions; i += 1) {
10820 if (SDL_strcmp(ext, availableExtensions[i].extensionName) == 0) {
10821 return 1;
10822 }
10823 }
10824 return 0;
10825}
10826
10827static Uint8 VULKAN_INTERNAL_CheckInstanceExtensions(
10828 const char **requiredExtensions,
10829 Uint32 requiredExtensionsLength,
10830 bool *supportsDebugUtils,
10831 bool *supportsColorspace)
10832{
10833 Uint32 extensionCount, i;
10834 VkExtensionProperties *availableExtensions;
10835 Uint8 allExtensionsSupported = 1;
10836
10837 vkEnumerateInstanceExtensionProperties(
10838 NULL,
10839 &extensionCount,
10840 NULL);
10841 availableExtensions = SDL_malloc(
10842 extensionCount * sizeof(VkExtensionProperties));
10843 vkEnumerateInstanceExtensionProperties(
10844 NULL,
10845 &extensionCount,
10846 availableExtensions);
10847
10848 for (i = 0; i < requiredExtensionsLength; i += 1) {
10849 if (!SupportsInstanceExtension(
10850 requiredExtensions[i],
10851 availableExtensions,
10852 extensionCount)) {
10853 allExtensionsSupported = 0;
10854 break;
10855 }
10856 }
10857
10858 // This is optional, but nice to have!
10859 *supportsDebugUtils = SupportsInstanceExtension(
10860 VK_EXT_DEBUG_UTILS_EXTENSION_NAME,
10861 availableExtensions,
10862 extensionCount);
10863
10864 // Also optional and nice to have!
10865 *supportsColorspace = SupportsInstanceExtension(
10866 VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME,
10867 availableExtensions,
10868 extensionCount);
10869
10870 SDL_free(availableExtensions);
10871 return allExtensionsSupported;
10872}
10873
10874static Uint8 VULKAN_INTERNAL_CheckDeviceExtensions(
10875 VulkanRenderer *renderer,
10876 VkPhysicalDevice physicalDevice,
10877 VulkanExtensions *physicalDeviceExtensions)
10878{
10879 Uint32 extensionCount;
10880 VkExtensionProperties *availableExtensions;
10881 Uint8 allExtensionsSupported;
10882
10883 renderer->vkEnumerateDeviceExtensionProperties(
10884 physicalDevice,
10885 NULL,
10886 &extensionCount,
10887 NULL);
10888 availableExtensions = (VkExtensionProperties *)SDL_malloc(
10889 extensionCount * sizeof(VkExtensionProperties));
10890 renderer->vkEnumerateDeviceExtensionProperties(
10891 physicalDevice,
10892 NULL,
10893 &extensionCount,
10894 availableExtensions);
10895
10896 allExtensionsSupported = CheckDeviceExtensions(
10897 availableExtensions,
10898 extensionCount,
10899 physicalDeviceExtensions);
10900
10901 SDL_free(availableExtensions);
10902 return allExtensionsSupported;
10903}
10904
10905static Uint8 VULKAN_INTERNAL_CheckValidationLayers(
10906 const char **validationLayers,
10907 Uint32 validationLayersLength)
10908{
10909 Uint32 layerCount;
10910 VkLayerProperties *availableLayers;
10911 Uint32 i, j;
10912 Uint8 layerFound = 0;
10913
10914 vkEnumerateInstanceLayerProperties(&layerCount, NULL);
10915 availableLayers = (VkLayerProperties *)SDL_malloc(
10916 layerCount * sizeof(VkLayerProperties));
10917 vkEnumerateInstanceLayerProperties(&layerCount, availableLayers);
10918
10919 for (i = 0; i < validationLayersLength; i += 1) {
10920 layerFound = 0;
10921
10922 for (j = 0; j < layerCount; j += 1) {
10923 if (SDL_strcmp(validationLayers[i], availableLayers[j].layerName) == 0) {
10924 layerFound = 1;
10925 break;
10926 }
10927 }
10928
10929 if (!layerFound) {
10930 break;
10931 }
10932 }
10933
10934 SDL_free(availableLayers);
10935 return layerFound;
10936}
10937
10938static Uint8 VULKAN_INTERNAL_CreateInstance(VulkanRenderer *renderer)
10939{
10940 VkResult vulkanResult;
10941 VkApplicationInfo appInfo;
10942 VkInstanceCreateFlags createFlags;
10943 const char *const *originalInstanceExtensionNames;
10944 const char **instanceExtensionNames;
10945 Uint32 instanceExtensionCount;
10946 VkInstanceCreateInfo createInfo;
10947 static const char *layerNames[] = { "VK_LAYER_KHRONOS_validation" };
10948
10949 appInfo.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
10950 appInfo.pNext = NULL;
10951 appInfo.pApplicationName = NULL;
10952 appInfo.applicationVersion = 0;
10953 appInfo.pEngineName = "SDLGPU";
10954 appInfo.engineVersion = SDL_VERSION;
10955 appInfo.apiVersion = VK_MAKE_VERSION(1, 0, 0);
10956
10957 createFlags = 0;
10958
10959 originalInstanceExtensionNames = SDL_Vulkan_GetInstanceExtensions(&instanceExtensionCount);
10960 if (!originalInstanceExtensionNames) {
10961 SDL_LogError(
10962 SDL_LOG_CATEGORY_GPU,
10963 "SDL_Vulkan_GetInstanceExtensions(): getExtensionCount: %s",
10964 SDL_GetError());
10965
10966 return 0;
10967 }
10968
10969 /* Extra space for the following extensions:
10970 * VK_KHR_get_physical_device_properties2
10971 * VK_EXT_swapchain_colorspace
10972 * VK_EXT_debug_utils
10973 * VK_KHR_portability_enumeration
10974 */
10975 instanceExtensionNames = SDL_stack_alloc(
10976 const char *,
10977 instanceExtensionCount + 4);
10978 SDL_memcpy((void *)instanceExtensionNames, originalInstanceExtensionNames, instanceExtensionCount * sizeof(const char *));
10979
10980 // Core since 1.1
10981 instanceExtensionNames[instanceExtensionCount++] =
10982 VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME;
10983
10984#ifdef SDL_PLATFORM_APPLE
10985 instanceExtensionNames[instanceExtensionCount++] =
10986 VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME;
10987 createFlags |= VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR;
10988#endif
10989
10990 if (!VULKAN_INTERNAL_CheckInstanceExtensions(
10991 instanceExtensionNames,
10992 instanceExtensionCount,
10993 &renderer->supportsDebugUtils,
10994 &renderer->supportsColorspace)) {
10995 SDL_stack_free((char *)instanceExtensionNames);
10996 SET_STRING_ERROR_AND_RETURN("Required Vulkan instance extensions not supported", false);
10997 }
10998
10999 if (renderer->supportsDebugUtils) {
11000 // Append the debug extension
11001 instanceExtensionNames[instanceExtensionCount++] =
11002 VK_EXT_DEBUG_UTILS_EXTENSION_NAME;
11003 } else {
11004 SDL_LogWarn(
11005 SDL_LOG_CATEGORY_GPU,
11006 "%s is not supported!",
11007 VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
11008 }
11009
11010 if (renderer->supportsColorspace) {
11011 // Append colorspace extension
11012 instanceExtensionNames[instanceExtensionCount++] =
11013 VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME;
11014 }
11015
11016 createInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
11017 createInfo.pNext = NULL;
11018 createInfo.flags = createFlags;
11019 createInfo.pApplicationInfo = &appInfo;
11020 createInfo.ppEnabledLayerNames = layerNames;
11021 createInfo.enabledExtensionCount = instanceExtensionCount;
11022 createInfo.ppEnabledExtensionNames = instanceExtensionNames;
11023 if (renderer->debugMode) {
11024 createInfo.enabledLayerCount = SDL_arraysize(layerNames);
11025 if (!VULKAN_INTERNAL_CheckValidationLayers(
11026 layerNames,
11027 createInfo.enabledLayerCount)) {
11028 SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "Validation layers not found, continuing without validation");
11029 createInfo.enabledLayerCount = 0;
11030 } else {
11031 SDL_LogInfo(SDL_LOG_CATEGORY_GPU, "Validation layers enabled, expect debug level performance!");
11032 }
11033 } else {
11034 createInfo.enabledLayerCount = 0;
11035 }
11036
11037 vulkanResult = vkCreateInstance(&createInfo, NULL, &renderer->instance);
11038 SDL_stack_free((char *)instanceExtensionNames);
11039
11040 if (vulkanResult != VK_SUCCESS) {
11041 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateInstance, 0);
11042 }
11043
11044 return 1;
11045}
11046
11047static Uint8 VULKAN_INTERNAL_IsDeviceSuitable(
11048 VulkanRenderer *renderer,
11049 VkPhysicalDevice physicalDevice,
11050 VulkanExtensions *physicalDeviceExtensions,
11051 Uint32 *queueFamilyIndex,
11052 Uint8 *deviceRank)
11053{
11054 Uint32 queueFamilyCount, queueFamilyRank, queueFamilyBest;
11055 VkQueueFamilyProperties *queueProps;
11056 bool supportsPresent;
11057 VkPhysicalDeviceProperties deviceProperties;
11058 VkPhysicalDeviceFeatures deviceFeatures;
11059 Uint32 i;
11060
11061 const Uint8 *devicePriority = renderer->preferLowPower ? DEVICE_PRIORITY_LOWPOWER : DEVICE_PRIORITY_HIGHPERFORMANCE;
11062
11063 /* Get the device rank before doing any checks, in case one fails.
11064 * Note: If no dedicated device exists, one that supports our features
11065 * would be fine
11066 */
11067 renderer->vkGetPhysicalDeviceProperties(
11068 physicalDevice,
11069 &deviceProperties);
11070 if (*deviceRank < devicePriority[deviceProperties.deviceType]) {
11071 /* This device outranks the best device we've found so far!
11072 * This includes a dedicated GPU that has less features than an
11073 * integrated GPU, because this is a freak case that is almost
11074 * never intentionally desired by the end user
11075 */
11076 *deviceRank = devicePriority[deviceProperties.deviceType];
11077 } else if (*deviceRank > devicePriority[deviceProperties.deviceType]) {
11078 /* Device is outranked by a previous device, don't even try to
11079 * run a query and reset the rank to avoid overwrites
11080 */
11081 *deviceRank = 0;
11082 return 0;
11083 }
11084
11085 renderer->vkGetPhysicalDeviceFeatures(
11086 physicalDevice,
11087 &deviceFeatures);
11088 if (!deviceFeatures.independentBlend ||
11089 !deviceFeatures.imageCubeArray ||
11090 !deviceFeatures.depthClamp ||
11091 !deviceFeatures.shaderClipDistance ||
11092 !deviceFeatures.drawIndirectFirstInstance) {
11093 return 0;
11094 }
11095
11096 if (!VULKAN_INTERNAL_CheckDeviceExtensions(
11097 renderer,
11098 physicalDevice,
11099 physicalDeviceExtensions)) {
11100 return 0;
11101 }
11102
11103 renderer->vkGetPhysicalDeviceQueueFamilyProperties(
11104 physicalDevice,
11105 &queueFamilyCount,
11106 NULL);
11107
11108 queueProps = (VkQueueFamilyProperties *)SDL_stack_alloc(
11109 VkQueueFamilyProperties,
11110 queueFamilyCount);
11111 renderer->vkGetPhysicalDeviceQueueFamilyProperties(
11112 physicalDevice,
11113 &queueFamilyCount,
11114 queueProps);
11115
11116 queueFamilyBest = 0;
11117 *queueFamilyIndex = SDL_MAX_UINT32;
11118 for (i = 0; i < queueFamilyCount; i += 1) {
11119 supportsPresent = SDL_Vulkan_GetPresentationSupport(
11120 renderer->instance,
11121 physicalDevice,
11122 i);
11123 if (!supportsPresent ||
11124 !(queueProps[i].queueFlags & VK_QUEUE_GRAPHICS_BIT)) {
11125 // Not a graphics family, ignore.
11126 continue;
11127 }
11128
11129 /* The queue family bitflags are kind of annoying.
11130 *
11131 * We of course need a graphics family, but we ideally want the
11132 * _primary_ graphics family. The spec states that at least one
11133 * graphics family must also be a compute family, so generally
11134 * drivers make that the first one. But hey, maybe something
11135 * genuinely can't do compute or something, and FNA doesn't
11136 * need it, so we'll be open to a non-compute queue family.
11137 *
11138 * Additionally, it's common to see the primary queue family
11139 * have the transfer bit set, which is great! But this is
11140 * actually optional; it's impossible to NOT have transfers in
11141 * graphics/compute but it _is_ possible for a graphics/compute
11142 * family, even the primary one, to just decide not to set the
11143 * bitflag. Admittedly, a driver may want to isolate transfer
11144 * queues to a dedicated family so that queues made solely for
11145 * transfers can have an optimized DMA queue.
11146 *
11147 * That, or the driver author got lazy and decided not to set
11148 * the bit. Looking at you, Android.
11149 *
11150 * -flibit
11151 */
11152 if (queueProps[i].queueFlags & VK_QUEUE_COMPUTE_BIT) {
11153 if (queueProps[i].queueFlags & VK_QUEUE_TRANSFER_BIT) {
11154 // Has all attribs!
11155 queueFamilyRank = 3;
11156 } else {
11157 // Probably has a DMA transfer queue family
11158 queueFamilyRank = 2;
11159 }
11160 } else {
11161 // Just a graphics family, probably has something better
11162 queueFamilyRank = 1;
11163 }
11164 if (queueFamilyRank > queueFamilyBest) {
11165 *queueFamilyIndex = i;
11166 queueFamilyBest = queueFamilyRank;
11167 }
11168 }
11169
11170 SDL_stack_free(queueProps);
11171
11172 if (*queueFamilyIndex == SDL_MAX_UINT32) {
11173 // Somehow no graphics queues existed. Compute-only device?
11174 return 0;
11175 }
11176
11177 // FIXME: Need better structure for checking vs storing swapchain support details
11178 return 1;
11179}
11180
11181static Uint8 VULKAN_INTERNAL_DeterminePhysicalDevice(VulkanRenderer *renderer)
11182{
11183 VkResult vulkanResult;
11184 VkPhysicalDevice *physicalDevices;
11185 VulkanExtensions *physicalDeviceExtensions;
11186 Uint32 i, physicalDeviceCount;
11187 Sint32 suitableIndex;
11188 Uint32 queueFamilyIndex, suitableQueueFamilyIndex;
11189 Uint8 deviceRank, highestRank;
11190
11191 vulkanResult = renderer->vkEnumeratePhysicalDevices(
11192 renderer->instance,
11193 &physicalDeviceCount,
11194 NULL);
11195 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkEnumeratePhysicalDevices, 0);
11196
11197 if (physicalDeviceCount == 0) {
11198 SDL_LogInfo(SDL_LOG_CATEGORY_GPU, "Failed to find any GPUs with Vulkan support");
11199 return 0;
11200 }
11201
11202 physicalDevices = SDL_stack_alloc(VkPhysicalDevice, physicalDeviceCount);
11203 physicalDeviceExtensions = SDL_stack_alloc(VulkanExtensions, physicalDeviceCount);
11204
11205 vulkanResult = renderer->vkEnumeratePhysicalDevices(
11206 renderer->instance,
11207 &physicalDeviceCount,
11208 physicalDevices);
11209
11210 /* This should be impossible to hit, but from what I can tell this can
11211 * be triggered not because the array is too small, but because there
11212 * were drivers that turned out to be bogus, so this is the loader's way
11213 * of telling us that the list is now smaller than expected :shrug:
11214 */
11215 if (vulkanResult == VK_INCOMPLETE) {
11216 SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "vkEnumeratePhysicalDevices returned VK_INCOMPLETE, will keep trying anyway...");
11217 vulkanResult = VK_SUCCESS;
11218 }
11219
11220 if (vulkanResult != VK_SUCCESS) {
11221 SDL_LogWarn(
11222 SDL_LOG_CATEGORY_GPU,
11223 "vkEnumeratePhysicalDevices failed: %s",
11224 VkErrorMessages(vulkanResult));
11225 SDL_stack_free(physicalDevices);
11226 SDL_stack_free(physicalDeviceExtensions);
11227 return 0;
11228 }
11229
11230 // Any suitable device will do, but we'd like the best
11231 suitableIndex = -1;
11232 suitableQueueFamilyIndex = 0;
11233 highestRank = 0;
11234 for (i = 0; i < physicalDeviceCount; i += 1) {
11235 deviceRank = highestRank;
11236 if (VULKAN_INTERNAL_IsDeviceSuitable(
11237 renderer,
11238 physicalDevices[i],
11239 &physicalDeviceExtensions[i],
11240 &queueFamilyIndex,
11241 &deviceRank)) {
11242 /* Use this for rendering.
11243 * Note that this may override a previous device that
11244 * supports rendering, but shares the same device rank.
11245 */
11246 suitableIndex = i;
11247 suitableQueueFamilyIndex = queueFamilyIndex;
11248 highestRank = deviceRank;
11249 } else if (deviceRank > highestRank) {
11250 /* In this case, we found a... "realer?" GPU,
11251 * but it doesn't actually support our Vulkan.
11252 * We should disqualify all devices below as a
11253 * result, because if we don't we end up
11254 * ignoring real hardware and risk using
11255 * something like LLVMpipe instead!
11256 * -flibit
11257 */
11258 suitableIndex = -1;
11259 highestRank = deviceRank;
11260 }
11261 }
11262
11263 if (suitableIndex != -1) {
11264 renderer->supports = physicalDeviceExtensions[suitableIndex];
11265 renderer->physicalDevice = physicalDevices[suitableIndex];
11266 renderer->queueFamilyIndex = suitableQueueFamilyIndex;
11267 } else {
11268 SDL_stack_free(physicalDevices);
11269 SDL_stack_free(physicalDeviceExtensions);
11270 return 0;
11271 }
11272
11273 renderer->physicalDeviceProperties.sType =
11274 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
11275 if (renderer->supports.KHR_driver_properties) {
11276 renderer->physicalDeviceDriverProperties.sType =
11277 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR;
11278 renderer->physicalDeviceDriverProperties.pNext = NULL;
11279
11280 renderer->physicalDeviceProperties.pNext =
11281 &renderer->physicalDeviceDriverProperties;
11282
11283 renderer->vkGetPhysicalDeviceProperties2KHR(
11284 renderer->physicalDevice,
11285 &renderer->physicalDeviceProperties);
11286 } else {
11287 renderer->physicalDeviceProperties.pNext = NULL;
11288
11289 renderer->vkGetPhysicalDeviceProperties(
11290 renderer->physicalDevice,
11291 &renderer->physicalDeviceProperties.properties);
11292 }
11293
11294 renderer->vkGetPhysicalDeviceMemoryProperties(
11295 renderer->physicalDevice,
11296 &renderer->memoryProperties);
11297
11298 SDL_stack_free(physicalDevices);
11299 SDL_stack_free(physicalDeviceExtensions);
11300 return 1;
11301}
11302
11303static Uint8 VULKAN_INTERNAL_CreateLogicalDevice(
11304 VulkanRenderer *renderer)
11305{
11306 VkResult vulkanResult;
11307 VkDeviceCreateInfo deviceCreateInfo;
11308 VkPhysicalDeviceFeatures desiredDeviceFeatures;
11309 VkPhysicalDeviceFeatures haveDeviceFeatures;
11310 VkPhysicalDevicePortabilitySubsetFeaturesKHR portabilityFeatures;
11311 const char **deviceExtensions;
11312
11313 VkDeviceQueueCreateInfo queueCreateInfo;
11314 float queuePriority = 1.0f;
11315
11316 queueCreateInfo.sType =
11317 VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
11318 queueCreateInfo.pNext = NULL;
11319 queueCreateInfo.flags = 0;
11320 queueCreateInfo.queueFamilyIndex = renderer->queueFamilyIndex;
11321 queueCreateInfo.queueCount = 1;
11322 queueCreateInfo.pQueuePriorities = &queuePriority;
11323
11324 // check feature support
11325
11326 renderer->vkGetPhysicalDeviceFeatures(
11327 renderer->physicalDevice,
11328 &haveDeviceFeatures);
11329
11330 // specifying used device features
11331
11332 SDL_zero(desiredDeviceFeatures);
11333 desiredDeviceFeatures.independentBlend = VK_TRUE;
11334 desiredDeviceFeatures.samplerAnisotropy = VK_TRUE;
11335 desiredDeviceFeatures.imageCubeArray = VK_TRUE;
11336 desiredDeviceFeatures.depthClamp = VK_TRUE;
11337 desiredDeviceFeatures.shaderClipDistance = VK_TRUE;
11338 desiredDeviceFeatures.drawIndirectFirstInstance = VK_TRUE;
11339
11340 if (haveDeviceFeatures.fillModeNonSolid) {
11341 desiredDeviceFeatures.fillModeNonSolid = VK_TRUE;
11342 renderer->supportsFillModeNonSolid = true;
11343 }
11344
11345 if (haveDeviceFeatures.multiDrawIndirect) {
11346 desiredDeviceFeatures.multiDrawIndirect = VK_TRUE;
11347 renderer->supportsMultiDrawIndirect = true;
11348 }
11349
11350 // creating the logical device
11351
11352 deviceCreateInfo.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
11353 if (renderer->supports.KHR_portability_subset) {
11354 portabilityFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR;
11355 portabilityFeatures.pNext = NULL;
11356 portabilityFeatures.constantAlphaColorBlendFactors = VK_FALSE;
11357 portabilityFeatures.events = VK_FALSE;
11358 portabilityFeatures.imageViewFormatReinterpretation = VK_FALSE;
11359 portabilityFeatures.imageViewFormatSwizzle = VK_TRUE;
11360 portabilityFeatures.imageView2DOn3DImage = VK_FALSE;
11361 portabilityFeatures.multisampleArrayImage = VK_FALSE;
11362 portabilityFeatures.mutableComparisonSamplers = VK_FALSE;
11363 portabilityFeatures.pointPolygons = VK_FALSE;
11364 portabilityFeatures.samplerMipLodBias = VK_FALSE; // Technically should be true, but eh
11365 portabilityFeatures.separateStencilMaskRef = VK_FALSE;
11366 portabilityFeatures.shaderSampleRateInterpolationFunctions = VK_FALSE;
11367 portabilityFeatures.tessellationIsolines = VK_FALSE;
11368 portabilityFeatures.tessellationPointMode = VK_FALSE;
11369 portabilityFeatures.triangleFans = VK_FALSE;
11370 portabilityFeatures.vertexAttributeAccessBeyondStride = VK_FALSE;
11371 deviceCreateInfo.pNext = &portabilityFeatures;
11372 } else {
11373 deviceCreateInfo.pNext = NULL;
11374 }
11375 deviceCreateInfo.flags = 0;
11376 deviceCreateInfo.queueCreateInfoCount = 1;
11377 deviceCreateInfo.pQueueCreateInfos = &queueCreateInfo;
11378 deviceCreateInfo.enabledLayerCount = 0;
11379 deviceCreateInfo.ppEnabledLayerNames = NULL;
11380 deviceCreateInfo.enabledExtensionCount = GetDeviceExtensionCount(
11381 &renderer->supports);
11382 deviceExtensions = SDL_stack_alloc(
11383 const char *,
11384 deviceCreateInfo.enabledExtensionCount);
11385 CreateDeviceExtensionArray(&renderer->supports, deviceExtensions);
11386 deviceCreateInfo.ppEnabledExtensionNames = deviceExtensions;
11387 deviceCreateInfo.pEnabledFeatures = &desiredDeviceFeatures;
11388
11389 vulkanResult = renderer->vkCreateDevice(
11390 renderer->physicalDevice,
11391 &deviceCreateInfo,
11392 NULL,
11393 &renderer->logicalDevice);
11394 SDL_stack_free((void *)deviceExtensions);
11395 CHECK_VULKAN_ERROR_AND_RETURN(vulkanResult, vkCreateDevice, 0);
11396
11397 // Load vkDevice entry points
11398
11399#define VULKAN_DEVICE_FUNCTION(func) \
11400 renderer->func = (PFN_##func) \
11401 renderer->vkGetDeviceProcAddr( \
11402 renderer->logicalDevice, \
11403 #func);
11404#include "SDL_gpu_vulkan_vkfuncs.h"
11405
11406 renderer->vkGetDeviceQueue(
11407 renderer->logicalDevice,
11408 renderer->queueFamilyIndex,
11409 0,
11410 &renderer->unifiedQueue);
11411
11412 return 1;
11413}
11414
11415static void VULKAN_INTERNAL_LoadEntryPoints(void)
11416{
11417 // Required for MoltenVK support
11418 SDL_setenv_unsafe("MVK_CONFIG_FULL_IMAGE_VIEW_SWIZZLE", "1", 1);
11419
11420 // Load Vulkan entry points
11421 if (!SDL_Vulkan_LoadLibrary(NULL)) {
11422 SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "Vulkan: SDL_Vulkan_LoadLibrary failed!");
11423 return;
11424 }
11425
11426#ifdef HAVE_GCC_DIAGNOSTIC_PRAGMA
11427#pragma GCC diagnostic push
11428#pragma GCC diagnostic ignored "-Wpedantic"
11429#endif
11430 vkGetInstanceProcAddr = (PFN_vkGetInstanceProcAddr)SDL_Vulkan_GetVkGetInstanceProcAddr();
11431#ifdef HAVE_GCC_DIAGNOSTIC_PRAGMA
11432#pragma GCC diagnostic pop
11433#endif
11434 if (vkGetInstanceProcAddr == NULL) {
11435 SDL_LogWarn(
11436 SDL_LOG_CATEGORY_GPU,
11437 "SDL_Vulkan_GetVkGetInstanceProcAddr(): %s",
11438 SDL_GetError());
11439 return;
11440 }
11441
11442#define VULKAN_GLOBAL_FUNCTION(name) \
11443 name = (PFN_##name)vkGetInstanceProcAddr(VK_NULL_HANDLE, #name); \
11444 if (name == NULL) { \
11445 SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "vkGetInstanceProcAddr(VK_NULL_HANDLE, \"" #name "\") failed"); \
11446 return; \
11447 }
11448#include "SDL_gpu_vulkan_vkfuncs.h"
11449}
11450
11451static bool VULKAN_INTERNAL_PrepareVulkan(
11452 VulkanRenderer *renderer)
11453{
11454 VULKAN_INTERNAL_LoadEntryPoints();
11455
11456 if (!VULKAN_INTERNAL_CreateInstance(renderer)) {
11457 SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "Vulkan: Could not create Vulkan instance");
11458 return false;
11459 }
11460
11461#define VULKAN_INSTANCE_FUNCTION(func) \
11462 renderer->func = (PFN_##func)vkGetInstanceProcAddr(renderer->instance, #func);
11463#include "SDL_gpu_vulkan_vkfuncs.h"
11464
11465 if (!VULKAN_INTERNAL_DeterminePhysicalDevice(renderer)) {
11466 SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "Vulkan: Failed to determine a suitable physical device");
11467 return false;
11468 }
11469 return true;
11470}
11471
11472static bool VULKAN_PrepareDriver(SDL_VideoDevice *_this)
11473{
11474 // Set up dummy VulkanRenderer
11475 VulkanRenderer *renderer;
11476 Uint8 result;
11477
11478 if (_this->Vulkan_CreateSurface == NULL) {
11479 return false;
11480 }
11481
11482 if (!SDL_Vulkan_LoadLibrary(NULL)) {
11483 return false;
11484 }
11485
11486 renderer = (VulkanRenderer *)SDL_malloc(sizeof(VulkanRenderer));
11487 SDL_memset(renderer, '\0', sizeof(VulkanRenderer));
11488
11489 result = VULKAN_INTERNAL_PrepareVulkan(renderer);
11490
11491 if (result) {
11492 renderer->vkDestroyInstance(renderer->instance, NULL);
11493 }
11494 SDL_free(renderer);
11495 SDL_Vulkan_UnloadLibrary();
11496 return result;
11497}
11498
11499static SDL_GPUDevice *VULKAN_CreateDevice(bool debugMode, bool preferLowPower, SDL_PropertiesID props)
11500{
11501 VulkanRenderer *renderer;
11502
11503 SDL_GPUDevice *result;
11504 Uint32 i;
11505
11506 if (!SDL_Vulkan_LoadLibrary(NULL)) {
11507 SDL_assert(!"This should have failed in PrepareDevice first!");
11508 return NULL;
11509 }
11510
11511 renderer = (VulkanRenderer *)SDL_malloc(sizeof(VulkanRenderer));
11512 SDL_memset(renderer, '\0', sizeof(VulkanRenderer));
11513 renderer->debugMode = debugMode;
11514 renderer->preferLowPower = preferLowPower;
11515 renderer->allowedFramesInFlight = 2;
11516
11517 if (!VULKAN_INTERNAL_PrepareVulkan(renderer)) {
11518 SDL_free(renderer);
11519 SDL_Vulkan_UnloadLibrary();
11520 SET_STRING_ERROR_AND_RETURN("Failed to initialize Vulkan!", NULL);
11521 }
11522
11523 SDL_LogInfo(SDL_LOG_CATEGORY_GPU, "SDL_GPU Driver: Vulkan");
11524 SDL_LogInfo(
11525 SDL_LOG_CATEGORY_GPU,
11526 "Vulkan Device: %s",
11527 renderer->physicalDeviceProperties.properties.deviceName);
11528 if (renderer->supports.KHR_driver_properties) {
11529 SDL_LogInfo(
11530 SDL_LOG_CATEGORY_GPU,
11531 "Vulkan Driver: %s %s",
11532 renderer->physicalDeviceDriverProperties.driverName,
11533 renderer->physicalDeviceDriverProperties.driverInfo);
11534 SDL_LogInfo(
11535 SDL_LOG_CATEGORY_GPU,
11536 "Vulkan Conformance: %u.%u.%u",
11537 renderer->physicalDeviceDriverProperties.conformanceVersion.major,
11538 renderer->physicalDeviceDriverProperties.conformanceVersion.minor,
11539 renderer->physicalDeviceDriverProperties.conformanceVersion.patch);
11540 } else {
11541 SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "KHR_driver_properties unsupported! Bother your vendor about this!");
11542 }
11543
11544 if (!VULKAN_INTERNAL_CreateLogicalDevice(
11545 renderer)) {
11546 SDL_free(renderer);
11547 SDL_Vulkan_UnloadLibrary();
11548 SET_STRING_ERROR_AND_RETURN("Failed to create logical device!", NULL);
11549 }
11550
11551 // FIXME: just move this into this function
11552 result = (SDL_GPUDevice *)SDL_malloc(sizeof(SDL_GPUDevice));
11553 ASSIGN_DRIVER(VULKAN)
11554
11555 result->driverData = (SDL_GPURenderer *)renderer;
11556
11557 /*
11558 * Create initial swapchain array
11559 */
11560
11561 renderer->claimedWindowCapacity = 1;
11562 renderer->claimedWindowCount = 0;
11563 renderer->claimedWindows = SDL_malloc(
11564 renderer->claimedWindowCapacity * sizeof(WindowData *));
11565
11566 // Threading
11567
11568 renderer->allocatorLock = SDL_CreateMutex();
11569 renderer->disposeLock = SDL_CreateMutex();
11570 renderer->submitLock = SDL_CreateMutex();
11571 renderer->acquireCommandBufferLock = SDL_CreateMutex();
11572 renderer->acquireUniformBufferLock = SDL_CreateMutex();
11573 renderer->renderPassFetchLock = SDL_CreateMutex();
11574 renderer->framebufferFetchLock = SDL_CreateMutex();
11575 renderer->windowLock = SDL_CreateMutex();
11576
11577 /*
11578 * Create submitted command buffer list
11579 */
11580
11581 renderer->submittedCommandBufferCapacity = 16;
11582 renderer->submittedCommandBufferCount = 0;
11583 renderer->submittedCommandBuffers = SDL_malloc(sizeof(VulkanCommandBuffer *) * renderer->submittedCommandBufferCapacity);
11584
11585 // Memory Allocator
11586
11587 renderer->memoryAllocator = (VulkanMemoryAllocator *)SDL_malloc(
11588 sizeof(VulkanMemoryAllocator));
11589
11590 for (i = 0; i < VK_MAX_MEMORY_TYPES; i += 1) {
11591 renderer->memoryAllocator->subAllocators[i].memoryTypeIndex = i;
11592 renderer->memoryAllocator->subAllocators[i].allocations = NULL;
11593 renderer->memoryAllocator->subAllocators[i].allocationCount = 0;
11594 renderer->memoryAllocator->subAllocators[i].sortedFreeRegions = SDL_malloc(
11595 sizeof(VulkanMemoryFreeRegion *) * 4);
11596 renderer->memoryAllocator->subAllocators[i].sortedFreeRegionCount = 0;
11597 renderer->memoryAllocator->subAllocators[i].sortedFreeRegionCapacity = 4;
11598 }
11599
11600 // Create uniform buffer pool
11601
11602 renderer->uniformBufferPoolCount = 32;
11603 renderer->uniformBufferPoolCapacity = 32;
11604 renderer->uniformBufferPool = SDL_malloc(
11605 renderer->uniformBufferPoolCapacity * sizeof(VulkanUniformBuffer *));
11606
11607 for (i = 0; i < renderer->uniformBufferPoolCount; i += 1) {
11608 renderer->uniformBufferPool[i] = VULKAN_INTERNAL_CreateUniformBuffer(
11609 renderer,
11610 UNIFORM_BUFFER_SIZE);
11611 }
11612
11613 renderer->descriptorSetCachePoolCapacity = 8;
11614 renderer->descriptorSetCachePoolCount = 0;
11615 renderer->descriptorSetCachePool = SDL_calloc(renderer->descriptorSetCachePoolCapacity, sizeof(DescriptorSetCache *));
11616
11617 SDL_SetAtomicInt(&renderer->layoutResourceID, 0);
11618
11619 // Device limits
11620
11621 renderer->minUBOAlignment = (Uint32)renderer->physicalDeviceProperties.properties.limits.minUniformBufferOffsetAlignment;
11622
11623 // Initialize caches
11624
11625 renderer->commandPoolHashTable = SDL_CreateHashTable(
11626 (void *)renderer,
11627 64,
11628 VULKAN_INTERNAL_CommandPoolHashFunction,
11629 VULKAN_INTERNAL_CommandPoolHashKeyMatch,
11630 VULKAN_INTERNAL_CommandPoolHashNuke,
11631 false, false);
11632
11633 renderer->renderPassHashTable = SDL_CreateHashTable(
11634 (void *)renderer,
11635 64,
11636 VULKAN_INTERNAL_RenderPassHashFunction,
11637 VULKAN_INTERNAL_RenderPassHashKeyMatch,
11638 VULKAN_INTERNAL_RenderPassHashNuke,
11639 false, false);
11640
11641 renderer->framebufferHashTable = SDL_CreateHashTable(
11642 (void *)renderer,
11643 64,
11644 VULKAN_INTERNAL_FramebufferHashFunction,
11645 VULKAN_INTERNAL_FramebufferHashKeyMatch,
11646 VULKAN_INTERNAL_FramebufferHashNuke,
11647 false, false);
11648
11649 renderer->graphicsPipelineResourceLayoutHashTable = SDL_CreateHashTable(
11650 (void *)renderer,
11651 64,
11652 VULKAN_INTERNAL_GraphicsPipelineResourceLayoutHashFunction,
11653 VULKAN_INTERNAL_GraphicsPipelineResourceLayoutHashKeyMatch,
11654 VULKAN_INTERNAL_GraphicsPipelineResourceLayoutHashNuke,
11655 false, false);
11656
11657 renderer->computePipelineResourceLayoutHashTable = SDL_CreateHashTable(
11658 (void *)renderer,
11659 64,
11660 VULKAN_INTERNAL_ComputePipelineResourceLayoutHashFunction,
11661 VULKAN_INTERNAL_ComputePipelineResourceLayoutHashKeyMatch,
11662 VULKAN_INTERNAL_ComputePipelineResourceLayoutHashNuke,
11663 false, false);
11664
11665 renderer->descriptorSetLayoutHashTable = SDL_CreateHashTable(
11666 (void *)renderer,
11667 64,
11668 VULKAN_INTERNAL_DescriptorSetLayoutHashFunction,
11669 VULKAN_INTERNAL_DescriptorSetLayoutHashKeyMatch,
11670 VULKAN_INTERNAL_DescriptorSetLayoutHashNuke,
11671 false, false);
11672
11673 // Initialize fence pool
11674
11675 renderer->fencePool.lock = SDL_CreateMutex();
11676
11677 renderer->fencePool.availableFenceCapacity = 4;
11678 renderer->fencePool.availableFenceCount = 0;
11679 renderer->fencePool.availableFences = SDL_malloc(
11680 renderer->fencePool.availableFenceCapacity * sizeof(VulkanFenceHandle *));
11681
11682 // Deferred destroy storage
11683
11684 renderer->texturesToDestroyCapacity = 16;
11685 renderer->texturesToDestroyCount = 0;
11686
11687 renderer->texturesToDestroy = (VulkanTexture **)SDL_malloc(
11688 sizeof(VulkanTexture *) *
11689 renderer->texturesToDestroyCapacity);
11690
11691 renderer->buffersToDestroyCapacity = 16;
11692 renderer->buffersToDestroyCount = 0;
11693
11694 renderer->buffersToDestroy = SDL_malloc(
11695 sizeof(VulkanBuffer *) *
11696 renderer->buffersToDestroyCapacity);
11697
11698 renderer->samplersToDestroyCapacity = 16;
11699 renderer->samplersToDestroyCount = 0;
11700
11701 renderer->samplersToDestroy = SDL_malloc(
11702 sizeof(VulkanSampler *) *
11703 renderer->samplersToDestroyCapacity);
11704
11705 renderer->graphicsPipelinesToDestroyCapacity = 16;
11706 renderer->graphicsPipelinesToDestroyCount = 0;
11707
11708 renderer->graphicsPipelinesToDestroy = SDL_malloc(
11709 sizeof(VulkanGraphicsPipeline *) *
11710 renderer->graphicsPipelinesToDestroyCapacity);
11711
11712 renderer->computePipelinesToDestroyCapacity = 16;
11713 renderer->computePipelinesToDestroyCount = 0;
11714
11715 renderer->computePipelinesToDestroy = SDL_malloc(
11716 sizeof(VulkanComputePipeline *) *
11717 renderer->computePipelinesToDestroyCapacity);
11718
11719 renderer->shadersToDestroyCapacity = 16;
11720 renderer->shadersToDestroyCount = 0;
11721
11722 renderer->shadersToDestroy = SDL_malloc(
11723 sizeof(VulkanShader *) *
11724 renderer->shadersToDestroyCapacity);
11725
11726 renderer->framebuffersToDestroyCapacity = 16;
11727 renderer->framebuffersToDestroyCount = 0;
11728 renderer->framebuffersToDestroy = SDL_malloc(
11729 sizeof(VulkanFramebuffer *) *
11730 renderer->framebuffersToDestroyCapacity);
11731
11732 // Defrag state
11733
11734 renderer->defragInProgress = 0;
11735
11736 renderer->allocationsToDefragCount = 0;
11737 renderer->allocationsToDefragCapacity = 4;
11738 renderer->allocationsToDefrag = SDL_malloc(
11739 renderer->allocationsToDefragCapacity * sizeof(VulkanMemoryAllocation *));
11740
11741 return result;
11742}
11743
11744SDL_GPUBootstrap VulkanDriver = {
11745 "vulkan",
11746 SDL_GPU_SHADERFORMAT_SPIRV,
11747 VULKAN_PrepareDriver,
11748 VULKAN_CreateDevice
11749};
11750
11751#endif // SDL_GPU_VULKAN