The open source OpenXR runtime
1// Copyright 2019-2024, Collabora, Ltd.
2// Copyright 2024-2025, NVIDIA CORPORATION.
3// SPDX-License-Identifier: BSL-1.0
4/*!
5 * @file
6 * @brief Helper implementation for native compositors.
7 * @author Jakob Bornecrantz <jakob@collabora.com>
8 * @author Lubosz Sarnecki <lubosz.sarnecki@collabora.com>
9 * @ingroup comp_util
10 */
11
12
13#include "comp_scratch.h"
14
15#include "util/u_handles.h"
16#include "util/u_limited_unique_id.h"
17
18#include "vk/vk_mini_helpers.h"
19#include "vk/vk_image_allocator.h"
20
21
22/*
23 *
24 * Helpers.
25 *
26 */
27
28static inline void
29fill_info(VkExtent2D extent, VkFormat srgb_format, VkFormat unorm_format, struct xrt_swapchain_create_info *out_info)
30{
31 // Must be true.
32 assert(unorm_format != VK_FORMAT_UNDEFINED);
33
34 enum xrt_swapchain_create_flags create = 0;
35
36 enum xrt_swapchain_usage_bits bits = //
37 XRT_SWAPCHAIN_USAGE_COLOR | //
38 XRT_SWAPCHAIN_USAGE_SAMPLED | //
39 XRT_SWAPCHAIN_USAGE_TRANSFER_SRC | //
40 XRT_SWAPCHAIN_USAGE_TRANSFER_DST | //
41 XRT_SWAPCHAIN_USAGE_UNORDERED_ACCESS; //
42
43 struct xrt_swapchain_create_info info = {
44 .create = create,
45 .bits = bits,
46 .format = unorm_format,
47 .sample_count = 1,
48 .width = extent.width,
49 .height = extent.height,
50 .face_count = 1,
51 .array_size = 1,
52 .mip_count = 1,
53 };
54
55 if (srgb_format != VK_FORMAT_UNDEFINED) {
56 // Use format list to get good performance everywhere.
57 info.bits |= XRT_SWAPCHAIN_USAGE_MUTABLE_FORMAT;
58 info.formats[info.format_count++] = unorm_format;
59 info.formats[info.format_count++] = srgb_format;
60 } else {
61 assert(info.format_count == 0);
62 }
63
64 *out_info = info;
65}
66
67
68/*
69 *
70 * Indices functions
71 *
72 */
73
74#define INVALID_INDEX ((uint32_t)(-1))
75
76static inline void
77indices_init(struct comp_scratch_indices *i)
78{
79 i->current = INVALID_INDEX;
80 i->last = INVALID_INDEX;
81}
82
83static inline void
84indices_get(struct comp_scratch_indices *i, uint32_t *out_index)
85{
86 assert(i->current == INVALID_INDEX);
87
88 uint32_t current = i->last;
89 if (current == INVALID_INDEX) {
90 current = 0;
91 } else {
92 if (++current >= COMP_SCRATCH_NUM_IMAGES) {
93 current = 0;
94 }
95 }
96
97 i->current = current;
98 *out_index = current;
99}
100
101static inline uint32_t
102indices_done(struct comp_scratch_indices *i)
103{
104 assert(i->current != INVALID_INDEX);
105 assert(i->current < COMP_SCRATCH_NUM_IMAGES);
106
107 i->last = i->current;
108 i->current = INVALID_INDEX;
109
110 return i->last;
111}
112
113static inline void
114indices_discard(struct comp_scratch_indices *i)
115{
116 assert(i->current != INVALID_INDEX);
117 assert(i->current < COMP_SCRATCH_NUM_IMAGES);
118
119 i->current = INVALID_INDEX;
120}
121
122
123/*
124 *
125 * Temp struct helpers.
126 *
127 */
128
129struct tmp
130{
131 //! Images created.
132 struct vk_image_collection vkic;
133
134 //! Handles retrieved.
135 xrt_graphics_buffer_handle_t handles[COMP_SCRATCH_NUM_IMAGES];
136
137 //! For automatic conversion to linear, only populated on mutable.
138 VkImageView srgb_views[COMP_SCRATCH_NUM_IMAGES];
139
140 //! For storage operations in compute shaders.
141 VkImageView unorm_views[COMP_SCRATCH_NUM_IMAGES];
142};
143
144static inline bool
145tmp_init_and_create(struct tmp *t,
146 struct vk_bundle *vk,
147 const struct xrt_swapchain_create_info *info,
148 const VkFormat srgb_format,
149 const VkFormat unorm_format)
150{
151 VkResult ret;
152
153 // Completely init the struct so it's safe to destroy on failure.
154 U_ZERO(t);
155 for (uint32_t i = 0; i < COMP_SCRATCH_NUM_IMAGES; i++) {
156 t->handles[i] = XRT_GRAPHICS_BUFFER_HANDLE_INVALID;
157 }
158
159 // Do the allocation.
160 ret = vk_ic_allocate(vk, info, COMP_SCRATCH_NUM_IMAGES, &t->vkic);
161 VK_CHK_WITH_RET(ret, "vk_ic_allocate", false);
162
163 ret = vk_ic_get_handles(vk, &t->vkic, COMP_SCRATCH_NUM_IMAGES, t->handles);
164 VK_CHK_WITH_GOTO(ret, "vk_ic_get_handles", err_destroy_vkic);
165
166
167 /*
168 * Create the image views.
169 */
170
171 // Base info.
172 const VkImageViewType view_type = VK_IMAGE_VIEW_TYPE_2D;
173
174 // Both usages are common.
175 const VkImageUsageFlags unorm_usage = VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
176
177 // Very few cards support SRGB storage.
178 const VkImageUsageFlags srgb_usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
179
180 const VkImageSubresourceRange subresource_range = {
181 .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
182 .baseMipLevel = 0,
183 .levelCount = VK_REMAINING_MIP_LEVELS,
184 .baseArrayLayer = 0,
185 .layerCount = VK_REMAINING_ARRAY_LAYERS,
186 };
187
188 for (uint32_t i = 0; i < COMP_SCRATCH_NUM_IMAGES; i++) {
189 VkImage image = t->vkic.images[i].handle;
190
191 if (srgb_format != VK_FORMAT_UNDEFINED) {
192 ret = vk_create_view_usage( //
193 vk, // vk_bundle
194 image, // image
195 view_type, // type
196 srgb_format, // format
197 srgb_usage, // image_usage
198 subresource_range, // subresource_range
199 &t->srgb_views[i]); // out_image_view
200 VK_CHK_WITH_GOTO(ret, "vk_create_view_usage(srgb)", err_destroy_views);
201
202 VK_NAME_IMAGE_VIEW(vk, t->srgb_views[i], "comp_scratch_image_view(srgb)");
203 }
204
205 ret = vk_create_view_usage( //
206 vk, // vk_bundle
207 image, // image
208 view_type, // type
209 unorm_format, // format
210 unorm_usage, // image_usage
211 subresource_range, // subresource_range
212 &t->unorm_views[i]); // out_image_view
213 VK_CHK_WITH_GOTO(ret, "vk_create_view_usage(unorm)", err_destroy_views);
214
215 VK_NAME_IMAGE_VIEW(vk, t->unorm_views[i], "comp_scratch_image_view(unorm)");
216 }
217
218 return true;
219
220err_destroy_views:
221 for (uint32_t i = 0; i < COMP_SCRATCH_NUM_IMAGES; i++) {
222 D(ImageView, t->srgb_views[i]);
223 D(ImageView, t->unorm_views[i]);
224 }
225
226err_destroy_vkic:
227 vk_ic_destroy(vk, &t->vkic);
228
229 return false;
230}
231
232static inline void
233tmp_take(struct tmp *t,
234 struct xrt_image_native native_images[COMP_SCRATCH_NUM_IMAGES],
235 struct render_scratch_color_image images[COMP_SCRATCH_NUM_IMAGES])
236{
237 for (uint32_t i = 0; i < COMP_SCRATCH_NUM_IMAGES; i++) {
238 images[i].image = t->vkic.images[i].handle;
239 images[i].device_memory = t->vkic.images[i].memory;
240 images[i].srgb_view = t->srgb_views[i];
241 images[i].unorm_view = t->unorm_views[i];
242
243 native_images[i].size = t->vkic.images[i].size;
244 native_images[i].use_dedicated_allocation = t->vkic.images[i].use_dedicated_allocation;
245 native_images[i].handle = t->handles[i]; // Move
246
247 t->srgb_views[i] = VK_NULL_HANDLE;
248 t->unorm_views[i] = VK_NULL_HANDLE;
249 t->handles[i] = XRT_GRAPHICS_BUFFER_HANDLE_INVALID;
250 }
251
252 // We now own everything.
253 U_ZERO(&t->vkic);
254}
255
256static inline void
257tmp_destroy(struct tmp *t, struct vk_bundle *vk)
258{
259 vk_ic_destroy(vk, &t->vkic);
260 for (uint32_t i = 0; i < COMP_SCRATCH_NUM_IMAGES; i++) {
261 u_graphics_buffer_unref(&t->handles[i]);
262 D(ImageView, t->srgb_views[i]);
263 D(ImageView, t->unorm_views[i]);
264 }
265}
266
267
268/*
269 *
270 * Helper single functions.
271 *
272 */
273
274bool
275ensure(struct comp_scratch_single_images *cssi,
276 struct vk_bundle *vk,
277 VkExtent2D extent,
278 const VkFormat srgb_format,
279 const VkFormat unorm_format)
280{
281 if (cssi->info.width == extent.width && //
282 cssi->info.height == extent.height && //
283 cssi->info.formats[0] == unorm_format && //
284 cssi->info.formats[1] == srgb_format) { //
285 // Our work here is done!
286 return true;
287 }
288
289 struct xrt_swapchain_create_info info = XRT_STRUCT_INIT;
290 fill_info(extent, srgb_format, unorm_format, &info);
291
292 struct tmp t; // Is initialized in function.
293 if (!tmp_init_and_create(&t, vk, &info, srgb_format, unorm_format)) {
294 VK_ERROR(vk, "Failed to allocate images");
295 return false;
296 }
297
298 // Clear old information, we haven't touched this struct yet.
299 comp_scratch_single_images_free(cssi, vk);
300
301 // Copy out images and information.
302 tmp_take(&t, cssi->native_images, cssi->images);
303
304 // Generate new unique id for caching and set info.
305 cssi->limited_unique_id = u_limited_unique_id_get();
306 cssi->info = info;
307
308 return true;
309}
310
311
312/*
313 *
314 * 'Exported' single functions.
315 *
316 */
317
318void
319comp_scratch_single_images_init(struct comp_scratch_single_images *cssi)
320{
321 // Just to be sure.
322 U_ZERO(cssi);
323
324 indices_init(&cssi->indices);
325
326 u_native_images_debug_init(&cssi->unid);
327
328 // Invalid handle may be different to zero.
329 for (uint32_t i = 0; i < COMP_SCRATCH_NUM_IMAGES; i++) {
330 cssi->native_images[i].handle = XRT_GRAPHICS_BUFFER_HANDLE_INVALID;
331 }
332}
333
334bool
335comp_scratch_single_images_ensure(struct comp_scratch_single_images *cssi,
336 struct vk_bundle *vk,
337 VkExtent2D extent,
338 const VkFormat format)
339{
340 return ensure(cssi, vk, extent, VK_FORMAT_UNDEFINED, format);
341}
342
343/*!
344 * Ensure that the scratch images are allocated and match @p extent size, and @p srgb_format @p unorm_format formats.
345 *
346 * @public @memberof comp_scratch_single_images
347 *
348 * @ingroup comp_util
349 */
350bool
351comp_scratch_single_images_ensure_mutable(struct comp_scratch_single_images *cssi,
352 struct vk_bundle *vk,
353 VkExtent2D extent)
354{
355 return ensure(cssi, vk, extent, VK_FORMAT_R8G8B8A8_SRGB, VK_FORMAT_R8G8B8A8_UNORM);
356}
357
358void
359comp_scratch_single_images_free(struct comp_scratch_single_images *cssi, struct vk_bundle *vk)
360{
361 // Make sure nothing refers to the images.
362 u_native_images_debug_clear(&cssi->unid);
363
364 for (uint32_t i = 0; i < COMP_SCRATCH_NUM_IMAGES; i++) {
365 u_graphics_buffer_unref(&cssi->native_images[i].handle);
366
367 D(ImageView, cssi->images[i].srgb_view);
368 D(ImageView, cssi->images[i].unorm_view);
369 D(Image, cssi->images[i].image);
370 DF(Memory, cssi->images[i].device_memory);
371 }
372
373 // Clear info, so ensure will recreate.
374 U_ZERO(&cssi->info);
375
376 // Clear unique id so to force recreate.
377 cssi->limited_unique_id.data = 0;
378
379 // Clear indices.
380 indices_init(&cssi->indices);
381}
382
383void
384comp_scratch_single_images_get(struct comp_scratch_single_images *cssi, uint32_t *out_index)
385{
386 indices_get(&cssi->indices, out_index);
387}
388
389void
390comp_scratch_single_images_done(struct comp_scratch_single_images *cssi)
391{
392 uint32_t last = indices_done(&cssi->indices);
393
394 assert(cssi->info.width > 0);
395 assert(cssi->info.height > 0);
396
397 u_native_images_debug_set( //
398 &cssi->unid, //
399 cssi->limited_unique_id, //
400 cssi->native_images, //
401 COMP_SCRATCH_NUM_IMAGES, //
402 &cssi->info, //
403 last, //
404 false); //
405}
406
407void
408comp_scratch_single_images_discard(struct comp_scratch_single_images *cssi)
409{
410 indices_discard(&cssi->indices);
411
412 u_native_images_debug_clear(&cssi->unid);
413}
414
415void
416comp_scratch_single_images_clear_debug(struct comp_scratch_single_images *cssi)
417{
418 u_native_images_debug_clear(&cssi->unid);
419}
420
421void
422comp_scratch_single_images_destroy(struct comp_scratch_single_images *cssi)
423{
424 u_native_images_debug_destroy(&cssi->unid);
425}
426
427
428/*
429 *
430 * 'Exported' stereo functions.
431 *
432 */
433
434void
435comp_scratch_stereo_images_init(struct comp_scratch_stereo_images *cssi)
436{
437 // Just to be sure.
438 U_ZERO(cssi);
439
440 indices_init(&cssi->indices);
441
442 u_native_images_debug_init(&cssi->views[0].unid);
443 u_native_images_debug_init(&cssi->views[1].unid);
444
445 for (uint32_t view = 0; view < 2; view++) {
446 for (uint32_t i = 0; i < COMP_SCRATCH_NUM_IMAGES; i++) {
447 cssi->views[view].native_images[i].handle = XRT_GRAPHICS_BUFFER_HANDLE_INVALID;
448 }
449 }
450}
451
452bool
453comp_scratch_stereo_images_ensure(struct comp_scratch_stereo_images *cssi, struct vk_bundle *vk, VkExtent2D extent)
454{
455 const VkFormat srgb_format = VK_FORMAT_R8G8B8A8_SRGB;
456 const VkFormat unorm_format = VK_FORMAT_R8G8B8A8_UNORM;
457
458 if (cssi->info.width == extent.width && cssi->info.height == extent.height) {
459 // Our work here is done!
460 return true;
461 }
462
463 // Get info we need to share with.
464 struct xrt_swapchain_create_info info = XRT_STRUCT_INIT;
465 fill_info(extent, srgb_format, unorm_format, &info);
466
467 struct tmp ts[2]; // Is initialized in function.
468 if (!tmp_init_and_create(&ts[0], vk, &info, srgb_format, unorm_format)) {
469 VK_ERROR(vk, "Failed to allocate images for view 0");
470 return false;
471 }
472
473 if (!tmp_init_and_create(&ts[1], vk, &info, srgb_format, unorm_format)) {
474 VK_ERROR(vk, "Failed to allocate images for view 1");
475 goto err_destroy;
476 }
477
478 // Clear old information, we haven't touched this struct yet.
479 comp_scratch_stereo_images_free(cssi, vk);
480
481 for (uint32_t view = 0; view < 2; view++) {
482 struct render_scratch_color_image images[COMP_SCRATCH_NUM_IMAGES];
483
484 tmp_take(&ts[view], cssi->views[view].native_images, images);
485
486 // Deal with SoA vs AoS.
487 for (uint32_t i = 0; i < COMP_SCRATCH_NUM_IMAGES; i++) {
488 cssi->rsis[i].extent = extent;
489 cssi->rsis[i].color[view] = images[i];
490 }
491 }
492
493 // Generate new unique id for caching and set info.
494 cssi->limited_unique_id = u_limited_unique_id_get();
495 cssi->info = info;
496
497 return true;
498
499err_destroy:
500 tmp_destroy(&ts[0], vk);
501 tmp_destroy(&ts[1], vk);
502
503 return false;
504}
505
506void
507comp_scratch_stereo_images_free(struct comp_scratch_stereo_images *cssi, struct vk_bundle *vk)
508{
509 // Make sure nothing refers to the images.
510 u_native_images_debug_clear(&cssi->views[0].unid);
511 u_native_images_debug_clear(&cssi->views[1].unid);
512
513 for (uint32_t view = 0; view < 2; view++) {
514 for (uint32_t i = 0; i < COMP_SCRATCH_NUM_IMAGES; i++) {
515 // Organised into views, then native images.
516 u_graphics_buffer_unref(&cssi->views[view].native_images[i].handle);
517
518 // Organised into scratch images, then views.
519 D(ImageView, cssi->rsis[i].color[view].srgb_view);
520 D(ImageView, cssi->rsis[i].color[view].unorm_view);
521 D(Image, cssi->rsis[i].color[view].image);
522 DF(Memory, cssi->rsis[i].color[view].device_memory);
523 }
524 }
525
526 // Clear info, so ensure will recreate.
527 U_ZERO(&cssi->info);
528
529 // Clear unique id so to force recreate.
530 cssi->limited_unique_id.data = 0;
531
532 // Clear indices.
533 indices_init(&cssi->indices);
534}
535
536void
537comp_scratch_stereo_images_get(struct comp_scratch_stereo_images *cssi, uint32_t *out_index)
538{
539 indices_get(&cssi->indices, out_index);
540}
541
542void
543comp_scratch_stereo_images_done(struct comp_scratch_stereo_images *cssi)
544{
545 uint32_t last = indices_done(&cssi->indices);
546
547 assert(cssi->info.width > 0);
548 assert(cssi->info.height > 0);
549
550 for (uint32_t view = 0; view < 2; view++) {
551 u_native_images_debug_set( //
552 &cssi->views[view].unid, //
553 cssi->limited_unique_id, //
554 cssi->views[view].native_images, //
555 COMP_SCRATCH_NUM_IMAGES, //
556 &cssi->info, //
557 last, //
558 false); //
559 }
560}
561
562void
563comp_scratch_stereo_images_discard(struct comp_scratch_stereo_images *cssi)
564{
565 indices_discard(&cssi->indices);
566
567 u_native_images_debug_clear(&cssi->views[0].unid);
568 u_native_images_debug_clear(&cssi->views[1].unid);
569}
570
571void
572comp_scratch_stereo_images_clear_debug(struct comp_scratch_stereo_images *cssi)
573{
574 u_native_images_debug_clear(&cssi->views[0].unid);
575 u_native_images_debug_clear(&cssi->views[1].unid);
576}
577
578void
579comp_scratch_stereo_images_destroy(struct comp_scratch_stereo_images *cssi)
580{
581 // Make sure nothing refers to the images.
582 u_native_images_debug_destroy(&cssi->views[0].unid);
583 u_native_images_debug_destroy(&cssi->views[1].unid);
584}