MayaFlux 0.4.0
Digital-First Multimedia Processing Framework
Loading...
Searching...
No Matches
BackendResoureManager.cpp
Go to the documentation of this file.
2
6
10
11namespace MayaFlux::Core {
12
13namespace {
14 struct FencedSubmission {
15 vk::CommandBuffer cmd;
16 vk::Fence fence;
17 };
18}
19
21 : m_context(context)
22 , m_command_manager(command_manager)
23{
24}
25
26void BackendResourceManager::setup_backend_service(const std::shared_ptr<Registry::Service::BufferService>& buffer_service)
27{
28 buffer_service->initialize_buffer = [this](const std::shared_ptr<void>& vk_buf) -> void {
29 auto buffer = std::static_pointer_cast<Buffers::VKBuffer>(vk_buf);
30 this->initialize_buffer(buffer);
31 };
32
33 buffer_service->destroy_buffer = [this](const std::shared_ptr<void>& vk_buf) {
34 auto buffer = std::static_pointer_cast<Buffers::VKBuffer>(vk_buf);
35 this->cleanup_buffer(buffer);
36 };
37
38 buffer_service->get_buffer_device_address = [this](const std::shared_ptr<void>& vk_buf) -> uint64_t {
39 auto buffer = std::static_pointer_cast<Buffers::VKBuffer>(vk_buf);
40 auto address = this->get_buffer_device_address(buffer);
41 return static_cast<uint64_t>(address);
42 };
43
44 buffer_service->execute_immediate = [this](const std::function<void(void*)>& recorder) {
45 this->execute_immediate_commands([recorder](vk::CommandBuffer cmd) {
46 recorder(static_cast<void*>(cmd));
47 });
48 };
49
50 buffer_service->record_deferred = [this](const std::function<void(void*)>& recorder) {
51 this->record_deferred_commands([recorder](vk::CommandBuffer cmd) {
52 recorder(static_cast<void*>(cmd));
53 });
54 };
55
56 buffer_service->flush_range = [this](void* memory, size_t offset, size_t size) {
57 vk::DeviceMemory mem(reinterpret_cast<VkDeviceMemory>(memory));
58 vk::MappedMemoryRange range { mem, offset, size == 0 ? VK_WHOLE_SIZE : size };
59 if (auto result = m_context.get_device().flushMappedMemoryRanges(1, &range); result != vk::Result::eSuccess) {
61 "Failed to flush mapped memory range: {}", vk::to_string(result));
62 }
63 };
64
65 buffer_service->invalidate_range = [this](void* memory, size_t offset, size_t size) {
66 vk::DeviceMemory mem(reinterpret_cast<VkDeviceMemory>(memory));
67 vk::MappedMemoryRange range { mem, offset, size == 0 ? VK_WHOLE_SIZE : size };
68 if (auto result = m_context.get_device().invalidateMappedMemoryRanges(1, &range); result != vk::Result::eSuccess) {
70 "Failed to invalidate mapped memory range: {}", vk::to_string(result));
71 }
72 };
73
74 buffer_service->map_buffer = [this](void* memory, size_t offset, size_t size) -> void* {
75 vk::DeviceMemory mem(reinterpret_cast<VkDeviceMemory>(memory));
76 return m_context.get_device().mapMemory(mem, offset, size == 0 ? VK_WHOLE_SIZE : size);
77 };
78
79 buffer_service->unmap_buffer = [this](void* memory) {
80 vk::DeviceMemory mem(reinterpret_cast<VkDeviceMemory>(memory));
81 m_context.get_device().unmapMemory(mem);
82 };
83
84 buffer_service->execute_fenced = [this](const std::function<void(void*)>& recorder)
85 -> std::shared_ptr<void> {
87
88 recorder(static_cast<void*>(cmd));
89
90 cmd.end();
91
92 auto device = m_context.get_device();
93 vk::FenceCreateInfo fence_info {};
94 vk::Fence fence = device.createFence(fence_info);
95
96 vk::SubmitInfo submit_info {};
97 submit_info.commandBufferCount = 1;
98 submit_info.pCommandBuffers = &cmd;
99
100 if (auto result = m_context.get_graphics_queue().submit(1, &submit_info, fence);
101 result != vk::Result::eSuccess) {
103 "execute_fenced: queue submit failed: {}", vk::to_string(result));
104 device.destroyFence(fence);
106 return nullptr;
107 }
108
109 auto handle = std::make_shared<FencedSubmission>();
110 handle->cmd = cmd;
111 handle->fence = fence;
112 return handle;
113 };
114
115 buffer_service->wait_fenced = [this](const std::shared_ptr<void>& handle) {
116 if (!handle)
117 return;
118 auto sub = std::static_pointer_cast<FencedSubmission>(handle);
119 if (!sub->fence)
120 return;
121
122 auto device = m_context.get_device();
123 if (auto result = device.waitForFences(1, &sub->fence, VK_TRUE, UINT64_MAX);
124 result != vk::Result::eSuccess) {
126 "wait_fenced: waitForFences failed: {}", vk::to_string(result));
127 }
128 };
129
130 buffer_service->release_fenced = [this](const std::shared_ptr<void>& handle) {
131 if (!handle)
132 return;
133 auto sub = std::static_pointer_cast<FencedSubmission>(handle);
134
135 auto device = m_context.get_device();
136 if (sub->fence) {
137 device.destroyFence(sub->fence);
138 sub->fence = nullptr;
139 }
140 if (sub->cmd) {
142 sub->cmd = nullptr;
143 }
144 };
145}
146
147void BackendResourceManager::initialize_buffer(const std::shared_ptr<Buffers::VKBuffer>& buffer)
148{
149 if (!buffer) {
151 "Attempted to initialize null VulkanBuffer");
152 return;
153 }
154
155 if (buffer->is_initialized()) {
157 "VulkanBuffer already initialized, skipping");
158 return;
159 }
160
161 vk::BufferCreateInfo buffer_info {};
162 buffer_info.size = buffer->get_size_bytes();
163 buffer_info.usage = buffer->get_usage_flags();
164 buffer_info.sharingMode = vk::SharingMode::eExclusive;
165
166 vk::Buffer vk_buffer;
167 try {
168 vk_buffer = m_context.get_device().createBuffer(buffer_info);
169 } catch (const vk::SystemError& e) {
170 error_rethrow(
173 std::source_location::current(),
174 "Failed to create VkBuffer: " + std::string(e.what()));
175 }
176
177 vk::MemoryRequirements mem_requirements;
178 mem_requirements = m_context.get_device().getBufferMemoryRequirements(vk_buffer);
179
180 vk::MemoryAllocateInfo alloc_info;
181 alloc_info.allocationSize = mem_requirements.size;
182
183 alloc_info.memoryTypeIndex = find_memory_type(
184 mem_requirements.memoryTypeBits,
185 vk::MemoryPropertyFlags(buffer->get_memory_properties()));
186
187 vk::DeviceMemory memory;
188 try {
189 memory = m_context.get_device().allocateMemory(alloc_info);
190 } catch (const vk::SystemError& e) {
191 m_context.get_device().destroyBuffer(vk_buffer);
192 error_rethrow(
195 std::source_location::current(),
196 "Failed to allocate VkDeviceMemory: " + std::string(e.what()));
197 }
198
199 try {
200 m_context.get_device().bindBufferMemory(vk_buffer, memory, 0);
201 } catch (const vk::SystemError& e) {
202 m_context.get_device().freeMemory(memory);
203 m_context.get_device().destroyBuffer(vk_buffer);
204
205 error_rethrow(
208 std::source_location::current(),
209 "Failed to bind buffer memory: " + std::string(e.what()));
210 }
211
212 void* mapped_ptr = nullptr;
213 if (buffer->is_host_visible()) {
214 try {
215 mapped_ptr = m_context.get_device().mapMemory(memory, 0, buffer->get_size_bytes());
216 } catch (const vk::SystemError& e) {
217 m_context.get_device().freeMemory(memory);
218 m_context.get_device().destroyBuffer(vk_buffer);
219
220 error_rethrow(
223 std::source_location::current(),
224 "Failed to map buffer memory: " + std::string(e.what()));
225 }
226 }
227
228 Buffers::VKBufferResources resources { .buffer = vk_buffer, .memory = memory, .mapped_ptr = mapped_ptr };
229 buffer->set_buffer_resources(resources);
230 m_managed_buffers.push_back(buffer);
231
233 "VulkanBuffer initialized: {} bytes, modality: {}, VkBuffer: {:p}",
234 buffer->get_size_bytes(),
235 Kakshya::modality_to_string(buffer->get_modality()),
236 (void*)buffer->get_buffer());
237}
238
239void BackendResourceManager::cleanup_buffer(const std::shared_ptr<Buffers::VKBuffer>& buffer)
240{
241 if (!buffer) {
243 "Attempted to cleanup null VulkanBuffer");
244 return;
245 }
246
247 auto it = std::ranges::find(m_managed_buffers, buffer);
248 if (it == m_managed_buffers.end()) {
249 return;
250 }
251
252 auto& res = it->get()->get_buffer_resources();
253
254 if (res.mapped_ptr) {
255 m_context.get_device().unmapMemory(res.memory);
256 }
257
258 if (res.index_buffer) {
259 m_context.get_device().destroyBuffer(res.index_buffer);
260 }
261
262 if (res.index_memory) {
263 m_context.get_device().freeMemory(res.index_memory);
264 }
265
266 if (res.buffer) {
267 m_context.get_device().destroyBuffer(res.buffer);
268 }
269
270 if (res.memory) {
271 m_context.get_device().freeMemory(res.memory);
272 }
273
275 "VulkanBuffer cleaned up: {:p}", static_cast<void*>(res.buffer));
276
277 m_managed_buffers.erase(it);
278}
279
281{
282 for (auto& buffer_wrapper : m_managed_buffers) {
283 auto& resources = buffer_wrapper->get_buffer_resources();
284 auto dirty_ranges = buffer_wrapper->get_and_clear_dirty_ranges();
285 if (!dirty_ranges.empty()) {
286 for (auto [offset, size] : dirty_ranges) {
287 vk::MappedMemoryRange range;
288 range.memory = resources.memory;
289 range.offset = offset;
290 range.size = size;
291 if (auto result = m_context.get_device().flushMappedMemoryRanges(1, &range); result != vk::Result::eSuccess) {
293 "Failed to flush mapped memory range: {}", vk::to_string(result));
294 }
295 }
297 "Flushed {} dirty ranges for buffer {:p}", dirty_ranges.size(),
298 (void*)buffer_wrapper->get_buffer());
299 }
300
301 auto invalid_ranges = buffer_wrapper->get_and_clear_invalid_ranges();
302 if (!invalid_ranges.empty()) {
303 for (auto [offset, size] : invalid_ranges) {
304 vk::MappedMemoryRange range;
305 range.memory = buffer_wrapper->get_buffer_resources().memory;
306 range.offset = offset;
307 range.size = size;
308 if (auto result = m_context.get_device().invalidateMappedMemoryRanges(1, &range); result != vk::Result::eSuccess) {
310 "Failed to invalidate mapped memory range: {}", vk::to_string(result));
311 }
312 }
314 "Invalidated {} ranges for buffer {:p}", invalid_ranges.size(),
315 (void*)buffer_wrapper->get_buffer());
316 }
317 }
318}
319
321 const std::shared_ptr<Buffers::VKBuffer>& buffer) const
322{
323 if (!buffer || !buffer->is_initialized()) {
325 "get_buffer_device_address: buffer not initialized");
326 return 0;
327 }
328
329 vk::BufferDeviceAddressInfo info {};
330 info.buffer = buffer->get_buffer();
331 // return static_cast<uint64_t>(m_context.get_device().getBufferAddress(info));
332 return m_context.get_device().getBufferAddress(info);
333}
334
335void BackendResourceManager::initialize_image(const std::shared_ptr<VKImage>& image)
336{
337 if (!image) {
339 "Attempted to initialize null VKImage");
340 return;
341 }
342
343 if (image->is_initialized()) {
345 "VKImage already initialized, skipping");
346 return;
347 }
348
349 // ========================================================================
350 // Step 1: Create VkImage
351 // ========================================================================
352
353 vk::ImageCreateInfo image_info {};
354
355 switch (image->get_type()) {
357 image_info.imageType = vk::ImageType::e1D;
358 break;
361 image_info.imageType = vk::ImageType::e2D;
362 break;
364 image_info.imageType = vk::ImageType::e3D;
365 break;
366 }
367
368 image_info.extent.width = image->get_width();
369 image_info.extent.height = image->get_height();
370 image_info.extent.depth = image->get_depth();
371 image_info.mipLevels = image->get_mip_levels();
372 image_info.arrayLayers = image->get_array_layers();
373 image_info.format = image->get_format();
374 image_info.tiling = vk::ImageTiling::eOptimal;
375 image_info.initialLayout = vk::ImageLayout::eUndefined;
376 image_info.usage = image->get_usage_flags();
377 image_info.sharingMode = vk::SharingMode::eExclusive;
378 image_info.samples = vk::SampleCountFlagBits::e1; // No MSAA for now
379 image_info.flags = (image->get_type() == VKImage::Type::TYPE_CUBE)
380 ? vk::ImageCreateFlagBits::eCubeCompatible
381 : vk::ImageCreateFlags {};
382
383 vk::Image vk_image;
384 try {
385 vk_image = m_context.get_device().createImage(image_info);
386 } catch (const vk::SystemError& e) {
387 error_rethrow(
390 std::source_location::current(),
391 "Failed to create VkImage: " + std::string(e.what()));
392 }
393
394 // ========================================================================
395 // Step 2: Allocate memory
396 // ========================================================================
397
398 vk::MemoryRequirements mem_requirements;
399 mem_requirements = m_context.get_device().getImageMemoryRequirements(vk_image);
400
401 vk::MemoryAllocateInfo alloc_info {};
402 alloc_info.allocationSize = mem_requirements.size;
403 alloc_info.memoryTypeIndex = find_memory_type(
404 mem_requirements.memoryTypeBits,
405 image->get_memory_properties());
406
407 vk::DeviceMemory memory;
408 try {
409 memory = m_context.get_device().allocateMemory(alloc_info);
410 } catch (const vk::SystemError& e) {
411 m_context.get_device().destroyImage(vk_image);
412 error_rethrow(
415 std::source_location::current(),
416 "Failed to allocate VkDeviceMemory for image: " + std::string(e.what()));
417 }
418
419 // ========================================================================
420 // Step 3: Bind memory to image
421 // ========================================================================
422
423 try {
424 m_context.get_device().bindImageMemory(vk_image, memory, 0);
425 } catch (const vk::SystemError& e) {
426 m_context.get_device().freeMemory(memory);
427 m_context.get_device().destroyImage(vk_image);
428 error_rethrow(
431 std::source_location::current(),
432 "Failed to bind memory to VkImage: " + std::string(e.what()));
433 }
434
435 // ========================================================================
436 // Step 4: Create image view
437 // ========================================================================
438
439 vk::ImageViewCreateInfo view_info {};
440
441 switch (image->get_type()) {
443 view_info.viewType = (image->get_array_layers() > 1)
444 ? vk::ImageViewType::e1DArray
445 : vk::ImageViewType::e1D;
446 break;
448 view_info.viewType = (image->get_array_layers() > 1)
449 ? vk::ImageViewType::e2DArray
450 : vk::ImageViewType::e2D;
451 break;
453 view_info.viewType = vk::ImageViewType::e3D;
454 break;
456 view_info.viewType = vk::ImageViewType::eCube;
457 break;
458 }
459
460 view_info.image = vk_image;
461 view_info.format = image->get_format();
462 view_info.subresourceRange.aspectMask = image->get_aspect_flags();
463 view_info.subresourceRange.baseMipLevel = 0;
464 view_info.subresourceRange.levelCount = image->get_mip_levels();
465 view_info.subresourceRange.baseArrayLayer = 0;
466 view_info.subresourceRange.layerCount = image->get_array_layers();
467
468 view_info.components.r = vk::ComponentSwizzle::eIdentity;
469 view_info.components.g = vk::ComponentSwizzle::eIdentity;
470 view_info.components.b = vk::ComponentSwizzle::eIdentity;
471 view_info.components.a = vk::ComponentSwizzle::eIdentity;
472
473 vk::ImageView image_view;
474 try {
475 image_view = m_context.get_device().createImageView(view_info);
476 } catch (const vk::SystemError& e) {
477 m_context.get_device().freeMemory(memory);
478 m_context.get_device().destroyImage(vk_image);
479 error_rethrow(
482 std::source_location::current(),
483 "Failed to create VkImageView: " + std::string(e.what()));
484 }
485
486 // ========================================================================
487 // Step 5: Store handles in VKImage
488 // ========================================================================
489
490 VKImageResources resources {};
491 resources.image = vk_image;
492 resources.image_view = image_view;
493 resources.memory = memory;
494 resources.sampler = nullptr;
495
496 image->set_image_resources(resources);
497 image->set_current_layout(vk::ImageLayout::eUndefined);
498
500 "VKImage initialized: {}x{}x{}, format: {}, {} mips, {} layers",
501 image->get_width(), image->get_height(), image->get_depth(),
502 vk::to_string(image->get_format()),
503 image->get_mip_levels(), image->get_array_layers());
504}
505
506void BackendResourceManager::cleanup_image(const std::shared_ptr<VKImage>& image)
507{
508 if (!image || !image->is_initialized()) {
509 return;
510 }
511
512 const auto& resources = image->get_image_resources();
513
514 if (resources.image_view) {
515 m_context.get_device().destroyImageView(resources.image_view);
516 }
517
518 if (resources.image) {
519 m_context.get_device().destroyImage(resources.image);
520 }
521
522 if (resources.memory) {
523 m_context.get_device().freeMemory(resources.memory);
524 }
525
527 "VKImage cleaned up");
528}
529
531 vk::Image image,
532 vk::ImageLayout old_layout,
533 vk::ImageLayout new_layout,
534 uint32_t mip_levels,
535 uint32_t array_layers,
536 vk::ImageAspectFlags aspect_flags)
537{
538 execute_immediate_commands([&](vk::CommandBuffer cmd) {
539 vk::ImageMemoryBarrier barrier {};
540 barrier.oldLayout = old_layout;
541 barrier.newLayout = new_layout;
542 barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
543 barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
544 barrier.image = image;
545 barrier.subresourceRange.aspectMask = aspect_flags;
546 barrier.subresourceRange.baseMipLevel = 0;
547 barrier.subresourceRange.levelCount = mip_levels;
548 barrier.subresourceRange.baseArrayLayer = 0;
549 barrier.subresourceRange.layerCount = array_layers;
550
551 vk::PipelineStageFlags src_stage;
552 vk::PipelineStageFlags dst_stage;
553
554 if (old_layout == vk::ImageLayout::eUndefined && new_layout == vk::ImageLayout::eTransferDstOptimal) {
555 barrier.srcAccessMask = vk::AccessFlags {};
556 barrier.dstAccessMask = vk::AccessFlagBits::eTransferWrite;
557 src_stage = vk::PipelineStageFlagBits::eTopOfPipe;
558 dst_stage = vk::PipelineStageFlagBits::eTransfer;
559 } else if (old_layout == vk::ImageLayout::eTransferDstOptimal && new_layout == vk::ImageLayout::eShaderReadOnlyOptimal) {
560 barrier.srcAccessMask = vk::AccessFlagBits::eTransferWrite;
561 barrier.dstAccessMask = vk::AccessFlagBits::eShaderRead;
562 src_stage = vk::PipelineStageFlagBits::eTransfer;
563 dst_stage = vk::PipelineStageFlagBits::eFragmentShader;
564 } else if (old_layout == vk::ImageLayout::eUndefined && new_layout == vk::ImageLayout::eShaderReadOnlyOptimal) {
565 barrier.srcAccessMask = vk::AccessFlags {};
566 barrier.dstAccessMask = vk::AccessFlagBits::eShaderRead;
567 src_stage = vk::PipelineStageFlagBits::eTopOfPipe;
568 dst_stage = vk::PipelineStageFlagBits::eFragmentShader;
569 } else if (old_layout == vk::ImageLayout::eUndefined && new_layout == vk::ImageLayout::eColorAttachmentOptimal) {
570 barrier.srcAccessMask = vk::AccessFlags {};
571 barrier.dstAccessMask = vk::AccessFlagBits::eColorAttachmentWrite;
572 src_stage = vk::PipelineStageFlagBits::eTopOfPipe;
573 dst_stage = vk::PipelineStageFlagBits::eColorAttachmentOutput;
574 } else if (old_layout == vk::ImageLayout::eUndefined && new_layout == vk::ImageLayout::eDepthStencilAttachmentOptimal) {
575 barrier.srcAccessMask = vk::AccessFlags {};
576 barrier.dstAccessMask = vk::AccessFlagBits::eDepthStencilAttachmentRead | vk::AccessFlagBits::eDepthStencilAttachmentWrite;
577 src_stage = vk::PipelineStageFlagBits::eTopOfPipe;
578 dst_stage = vk::PipelineStageFlagBits::eEarlyFragmentTests;
579 } else if (old_layout == vk::ImageLayout::eUndefined && new_layout == vk::ImageLayout::eGeneral) {
580 barrier.srcAccessMask = vk::AccessFlags {};
581 barrier.dstAccessMask = vk::AccessFlagBits::eShaderRead | vk::AccessFlagBits::eShaderWrite;
582 src_stage = vk::PipelineStageFlagBits::eTopOfPipe;
583 dst_stage = vk::PipelineStageFlagBits::eComputeShader;
584 } else {
585 barrier.srcAccessMask = vk::AccessFlagBits::eMemoryRead | vk::AccessFlagBits::eMemoryWrite;
586 barrier.dstAccessMask = vk::AccessFlagBits::eMemoryRead | vk::AccessFlagBits::eMemoryWrite;
587 src_stage = vk::PipelineStageFlagBits::eAllCommands;
588 dst_stage = vk::PipelineStageFlagBits::eAllCommands;
589
591 "Using generic image layout transition");
592 }
593
594 cmd.pipelineBarrier(
595 src_stage, dst_stage,
596 vk::DependencyFlags {},
597 0, nullptr, // Memory barriers
598 0, nullptr, // Buffer barriers
599 1, &barrier // Image barriers
600 );
601 });
602
604 "Image layout transitioned: {} -> {}",
605 vk::to_string(old_layout), vk::to_string(new_layout));
606}
607
609 std::shared_ptr<VKImage> image,
610 const void* data,
611 size_t size)
612{
613 if (!image || !data) {
615 "Invalid parameters for upload_image_data");
616 return;
617 }
618
619 auto staging = std::make_shared<Buffers::VKBuffer>(
620 size,
623
624 initialize_buffer(staging);
625
626 void* mapped = staging->get_mapped_ptr();
627 if (!mapped) {
629 "Failed to map staging buffer for image upload");
630 cleanup_buffer(staging);
631 return;
632 }
633
634 std::memcpy(mapped, data, size);
635 staging->mark_dirty_range(0, size);
636
637 auto& resources = staging->get_buffer_resources();
638 vk::MappedMemoryRange range { resources.memory, 0, VK_WHOLE_SIZE };
639
640 if (auto result = m_context.get_device().flushMappedMemoryRanges(1, &range); result != vk::Result::eSuccess) {
642 "Failed to flush mapped memory range: {}", vk::to_string(result));
643 }
644
645 execute_immediate_commands([&](vk::CommandBuffer cmd) {
646 vk::ImageMemoryBarrier barrier {};
647 barrier.oldLayout = image->get_current_layout();
648 barrier.newLayout = vk::ImageLayout::eTransferDstOptimal;
649 barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
650 barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
651 barrier.image = image->get_image();
652 barrier.subresourceRange.aspectMask = image->get_aspect_flags();
653 barrier.subresourceRange.baseMipLevel = 0;
654 barrier.subresourceRange.levelCount = image->get_mip_levels();
655 barrier.subresourceRange.baseArrayLayer = 0;
656 barrier.subresourceRange.layerCount = image->get_array_layers();
657 barrier.srcAccessMask = vk::AccessFlagBits::eShaderRead;
658 barrier.dstAccessMask = vk::AccessFlagBits::eTransferWrite;
659
660 cmd.pipelineBarrier(
661 vk::PipelineStageFlagBits::eFragmentShader,
662 vk::PipelineStageFlagBits::eTransfer,
663 vk::DependencyFlags {},
664 0, nullptr, 0, nullptr, 1, &barrier);
665
666 vk::BufferImageCopy region {};
667 region.bufferOffset = 0;
668 region.bufferRowLength = 0;
669 region.bufferImageHeight = 0;
670 region.imageSubresource.aspectMask = image->get_aspect_flags();
671 region.imageSubresource.mipLevel = 0;
672 region.imageSubresource.baseArrayLayer = 0;
673 region.imageSubresource.layerCount = image->get_array_layers();
674 region.imageOffset = vk::Offset3D { 0, 0, 0 };
675 region.imageExtent = vk::Extent3D {
676 image->get_width(),
677 image->get_height(),
678 image->get_depth()
679 };
680
681 cmd.copyBufferToImage(
682 staging->get_buffer(),
683 image->get_image(),
684 vk::ImageLayout::eTransferDstOptimal,
685 1, &region);
686
687 barrier.oldLayout = vk::ImageLayout::eTransferDstOptimal;
688 barrier.newLayout = vk::ImageLayout::eShaderReadOnlyOptimal;
689 barrier.srcAccessMask = vk::AccessFlagBits::eTransferWrite;
690 barrier.dstAccessMask = vk::AccessFlagBits::eShaderRead;
691
692 cmd.pipelineBarrier(
693 vk::PipelineStageFlagBits::eTransfer,
694 vk::PipelineStageFlagBits::eFragmentShader,
695 vk::DependencyFlags {},
696 0, nullptr, 0, nullptr, 1, &barrier);
697 });
698
699 image->set_current_layout(vk::ImageLayout::eShaderReadOnlyOptimal);
700
702 "Uploaded {} bytes to image {}x{}",
703 size, image->get_width(), image->get_height());
704}
705
707 std::shared_ptr<VKImage> image,
708 const void* data,
709 size_t size,
710 const std::shared_ptr<Buffers::VKBuffer>& staging)
711{
712 if (!image || !data || !staging) {
714 "Invalid parameters for upload_image_data_with_staging");
715 return;
716 }
717
718 void* mapped = staging->get_mapped_ptr();
719 if (!mapped) {
721 "upload_image_data_with_staging: staging buffer has no mapped pointer");
722 return;
723 }
724
725 std::memcpy(mapped, data, size);
726 staging->mark_dirty_range(0, size);
727
728 auto& resources = staging->get_buffer_resources();
729 vk::MappedMemoryRange range { resources.memory, 0, VK_WHOLE_SIZE };
730
731 if (auto result = m_context.get_device().flushMappedMemoryRanges(1, &range);
732 result != vk::Result::eSuccess) {
734 "upload_image_data_with_staging: flush failed: {}", vk::to_string(result));
735 }
736
737 execute_immediate_commands([&](vk::CommandBuffer cmd) {
738 vk::ImageMemoryBarrier barrier {};
739 barrier.oldLayout = image->get_current_layout();
740 barrier.newLayout = vk::ImageLayout::eTransferDstOptimal;
741 barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
742 barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
743 barrier.image = image->get_image();
744 barrier.subresourceRange = {
745 image->get_aspect_flags(), 0,
746 image->get_mip_levels(), 0,
747 image->get_array_layers()
748 };
749 barrier.srcAccessMask = vk::AccessFlagBits::eShaderRead;
750 barrier.dstAccessMask = vk::AccessFlagBits::eTransferWrite;
751
752 cmd.pipelineBarrier(
753 vk::PipelineStageFlagBits::eFragmentShader,
754 vk::PipelineStageFlagBits::eTransfer,
755 {}, 0, nullptr, 0, nullptr, 1, &barrier);
756
757 vk::BufferImageCopy region {};
758 region.imageSubresource.aspectMask = image->get_aspect_flags();
759 region.imageSubresource.layerCount = image->get_array_layers();
760 region.imageOffset = vk::Offset3D { 0, 0, 0 };
761 region.imageExtent = vk::Extent3D {
762 image->get_width(),
763 image->get_height(),
764 image->get_depth()
765 };
766
767 cmd.copyBufferToImage(
768 staging->get_buffer(),
769 image->get_image(),
770 vk::ImageLayout::eTransferDstOptimal,
771 1, &region);
772
773 barrier.oldLayout = vk::ImageLayout::eTransferDstOptimal;
774 barrier.newLayout = vk::ImageLayout::eShaderReadOnlyOptimal;
775 barrier.srcAccessMask = vk::AccessFlagBits::eTransferWrite;
776 barrier.dstAccessMask = vk::AccessFlagBits::eShaderRead;
777
778 cmd.pipelineBarrier(
779 vk::PipelineStageFlagBits::eTransfer,
780 vk::PipelineStageFlagBits::eFragmentShader,
781 {}, 0, nullptr, 0, nullptr, 1, &barrier);
782 });
783
784 image->set_current_layout(vk::ImageLayout::eShaderReadOnlyOptimal);
785
787 "upload_image_data_with_staging: {} bytes to image {}x{}",
788 size, image->get_width(), image->get_height());
789}
790
792 std::shared_ptr<VKImage> image,
793 void* data,
794 size_t size,
795 vk::ImageLayout restore_layout,
796 vk::PipelineStageFlags restore_stage)
797{
798 if (!image || !data) {
800 "Invalid parameters for download_image_data");
801 return;
802 }
803
804 auto staging = std::make_shared<Buffers::VKBuffer>(
805 size,
808
809 initialize_buffer(staging);
810
811 execute_immediate_commands([&](vk::CommandBuffer cmd) {
812 vk::ImageMemoryBarrier barrier {};
813 barrier.oldLayout = image->get_current_layout();
814 barrier.newLayout = vk::ImageLayout::eTransferSrcOptimal;
815 barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
816 barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
817 barrier.image = image->get_image();
818 barrier.subresourceRange.aspectMask = image->get_aspect_flags();
819 barrier.subresourceRange.baseMipLevel = 0;
820 barrier.subresourceRange.levelCount = image->get_mip_levels();
821 barrier.subresourceRange.baseArrayLayer = 0;
822 barrier.subresourceRange.layerCount = image->get_array_layers();
823 barrier.srcAccessMask = vk::AccessFlagBits::eShaderRead;
824 barrier.dstAccessMask = vk::AccessFlagBits::eTransferRead;
825
826 cmd.pipelineBarrier(
827 vk::PipelineStageFlagBits::eFragmentShader,
828 vk::PipelineStageFlagBits::eTransfer,
829 vk::DependencyFlags {}, {}, {}, barrier);
830
831 vk::BufferImageCopy region {};
832 region.bufferOffset = 0;
833 region.bufferRowLength = 0;
834 region.bufferImageHeight = 0;
835 region.imageSubresource.aspectMask = image->get_aspect_flags();
836 region.imageSubresource.mipLevel = 0;
837 region.imageSubresource.baseArrayLayer = 0;
838 region.imageSubresource.layerCount = image->get_array_layers();
839 region.imageOffset = vk::Offset3D { 0, 0, 0 };
840 region.imageExtent = vk::Extent3D {
841 image->get_width(),
842 image->get_height(),
843 image->get_depth()
844 };
845
846 cmd.copyImageToBuffer(
847 image->get_image(),
848 vk::ImageLayout::eTransferSrcOptimal,
849 staging->get_buffer(),
850 1, &region);
851
852 barrier.oldLayout = vk::ImageLayout::eTransferSrcOptimal;
853 barrier.newLayout = restore_layout;
854 barrier.srcAccessMask = vk::AccessFlagBits::eTransferRead;
855 barrier.dstAccessMask = vk::AccessFlagBits::eMemoryRead;
856
857 cmd.pipelineBarrier(
858 vk::PipelineStageFlagBits::eTransfer,
859 restore_stage,
860 vk::DependencyFlags {}, {}, {}, barrier);
861 });
862
863 staging->mark_invalid_range(0, size);
864 auto& resources = staging->get_buffer_resources();
865 vk::MappedMemoryRange range { resources.memory, 0, VK_WHOLE_SIZE };
866
867 if (auto result = m_context.get_device().invalidateMappedMemoryRanges(1, &range);
868 result != vk::Result::eSuccess) {
870 "Failed to invalidate mapped memory range: {}", vk::to_string(result));
871 }
872
873 if (void* mapped = staging->get_mapped_ptr()) {
874 std::memcpy(data, mapped, size);
875 }
876
877 cleanup_buffer(staging);
878
880 "Downloaded {} bytes from image {}x{}",
881 size, image->get_width(), image->get_height());
882}
883
885 vk::Filter filter,
886 vk::SamplerAddressMode address_mode,
887 float max_anisotropy)
888{
889 size_t hash = 0;
890 auto hash_combine = [](size_t& seed, size_t value) {
891 seed ^= value + 0x9e3779b9 + (seed << 6) + (seed >> 2);
892 };
893
894 hash_combine(hash, static_cast<size_t>(filter));
895 hash_combine(hash, static_cast<size_t>(address_mode));
896 hash_combine(hash, std::hash<float> {}(max_anisotropy));
897
898 auto it = m_sampler_cache.find(hash);
899 if (it != m_sampler_cache.end()) {
901 "Reusing cached sampler (hash: 0x{:X})", hash);
902 return it->second;
903 }
904
905 vk::SamplerCreateInfo sampler_info;
906 sampler_info.magFilter = filter;
907 sampler_info.minFilter = filter;
908 sampler_info.mipmapMode = vk::SamplerMipmapMode::eLinear;
909 sampler_info.addressModeU = address_mode;
910 sampler_info.addressModeV = address_mode;
911 sampler_info.addressModeW = address_mode;
912 sampler_info.mipLodBias = 0.0F;
913 sampler_info.anisotropyEnable = max_anisotropy > 0.0F;
914 sampler_info.maxAnisotropy = max_anisotropy;
915 sampler_info.compareEnable = VK_FALSE;
916 sampler_info.compareOp = vk::CompareOp::eAlways;
917 sampler_info.minLod = 0.0F;
918 sampler_info.maxLod = VK_LOD_CLAMP_NONE;
919 sampler_info.borderColor = vk::BorderColor::eFloatOpaqueBlack;
920 sampler_info.unnormalizedCoordinates = VK_FALSE;
921
922 vk::Sampler sampler;
923 try {
924 sampler = m_context.get_device().createSampler(sampler_info);
925 } catch (const vk::SystemError& e) {
927 "Failed to create sampler: {}", e.what());
928 return nullptr;
929 }
930
931 m_sampler_cache[hash] = sampler;
932
934 "Created sampler (filter: {}, address: {}, anisotropy: {}, hash: 0x{:X})",
935 vk::to_string(filter), vk::to_string(address_mode), max_anisotropy, hash);
936
937 return sampler;
938}
939
941{
942 if (!sampler) {
943 return;
944 }
945
946 for (auto it = m_sampler_cache.begin(); it != m_sampler_cache.end(); ++it) {
947 if (it->second == sampler) {
948 m_sampler_cache.erase(it);
949 break;
950 }
951 }
952
953 m_context.get_device().destroySampler(sampler);
954
956 "Destroyed sampler");
957}
958
959uint32_t BackendResourceManager::find_memory_type(uint32_t type_filter, vk::MemoryPropertyFlags properties) const
960{
961 vk::PhysicalDeviceMemoryProperties mem_properties;
962 mem_properties = m_context.get_physical_device().getMemoryProperties();
963
964 for (uint32_t i = 0; i < mem_properties.memoryTypeCount; i++) {
965 if ((type_filter & (1 << i)) && (mem_properties.memoryTypes[i].propertyFlags & properties) == properties) {
966 return i;
967 }
968 }
969
970 error<std::runtime_error>(
973 std::source_location::current(),
974 "Failed to find suitable memory type");
975
976 return 0;
977}
978
979void BackendResourceManager::execute_immediate_commands(const std::function<void(vk::CommandBuffer)>& recorder)
980{
982 recorder(cmd);
984}
985
986void BackendResourceManager::record_deferred_commands(const std::function<void(vk::CommandBuffer)>& recorder)
987{
988 // TODO: batch commands for later submission
989 // For now, just execute immediately
991}
992
994{
995 for (auto& [hash, sampler] : m_sampler_cache) {
996 if (sampler) {
997 m_context.get_device().destroySampler(sampler);
998 }
999 }
1000 m_sampler_cache.clear();
1001
1002 for (auto& buffer : m_managed_buffers) {
1003 if (buffer && buffer->is_initialized()) {
1004 cleanup_buffer(buffer);
1005 }
1006 }
1007 m_managed_buffers.clear();
1008}
1009
1010size_t BackendResourceManager::compute_sampler_hash(vk::Filter filter, vk::SamplerAddressMode address_mode, float max_anisotropy) const
1011{
1012 size_t hash = 0;
1013 auto hash_combine = [](size_t& seed, size_t value) {
1014 seed ^= value + 0x9e3779b9 + (seed << 6) + (seed >> 2);
1015 };
1016
1017 hash_combine(hash, static_cast<size_t>(filter));
1018 hash_combine(hash, static_cast<size_t>(address_mode));
1019 hash_combine(hash, std::hash<float> {}(max_anisotropy));
1020
1021 return hash;
1022}
1023
1024}
#define MF_INFO(comp, ctx,...)
#define MF_ERROR(comp, ctx,...)
#define MF_WARN(comp, ctx,...)
#define MF_DEBUG(comp, ctx,...)
vk::Fence fence
vk::CommandBuffer cmd
IO::ImageData image
Range size
@ STAGING
Host-visible staging buffer (CPU-writable)
void cleanup_image(const std::shared_ptr< VKImage > &image)
Cleanup a VKImage (destroy view, image, and free memory)
void cleanup_buffer(const std::shared_ptr< Buffers::VKBuffer > &buffer)
Cleanup a buffer and release associated resources.
void flush_pending_buffer_operations()
Flush any pending buffer operations (e.g., uploads/downloads)
vk::DeviceAddress get_buffer_device_address(const std::shared_ptr< Buffers::VKBuffer > &buffer) const
Query the Vulkan device address of an initialized BDA-capable buffer.
size_t compute_sampler_hash(vk::Filter filter, vk::SamplerAddressMode address_mode, float max_anisotropy) const
void execute_immediate_commands(const std::function< void(vk::CommandBuffer)> &recorder)
Execute immediate command recording for buffer operations.
BackendResourceManager(VKContext &context, VKCommandManager &command_manager)
std::unordered_map< size_t, vk::Sampler > m_sampler_cache
void initialize_image(const std::shared_ptr< VKImage > &image)
Initialize a VKImage (allocate VkImage, memory, and create image view)
void destroy_sampler(vk::Sampler sampler)
Destroy sampler.
vk::Sampler create_sampler(vk::Filter filter=vk::Filter::eLinear, vk::SamplerAddressMode address_mode=vk::SamplerAddressMode::eRepeat, float max_anisotropy=0.0F)
Create sampler.
void download_image_data(std::shared_ptr< VKImage > image, void *data, size_t size, vk::ImageLayout restore_layout=vk::ImageLayout::eShaderReadOnlyOptimal, vk::PipelineStageFlags restore_stage=vk::PipelineStageFlagBits::eFragmentShader)
Download data from an image into a caller-supplied buffer.
uint32_t find_memory_type(uint32_t type_filter, vk::MemoryPropertyFlags properties) const
Find a suitable memory type for Vulkan buffer allocation.
void transition_image_layout(vk::Image image, vk::ImageLayout old_layout, vk::ImageLayout new_layout, uint32_t mip_levels=1, uint32_t array_layers=1, vk::ImageAspectFlags aspect_flags=vk::ImageAspectFlagBits::eColor)
Transition image layout using a pipeline barrier.
void upload_image_data_with_staging(std::shared_ptr< VKImage > image, const void *data, size_t size, const std::shared_ptr< Buffers::VKBuffer > &staging)
Upload image data using a caller-supplied persistent staging buffer.
void initialize_buffer(const std::shared_ptr< Buffers::VKBuffer > &buffer)
Initialize a buffer for use with the graphics backend.
void setup_backend_service(const std::shared_ptr< Registry::Service::BufferService > &buffer_service)
void record_deferred_commands(const std::function< void(vk::CommandBuffer)> &recorder)
Record deferred command recording for buffer operations.
void upload_image_data(std::shared_ptr< VKImage > image, const void *data, size_t size)
Upload data to an image (creates staging buffer internally)
std::vector< std::shared_ptr< Buffers::VKBuffer > > m_managed_buffers
void end_single_time_commands(vk::CommandBuffer command_buffer, vk::Queue queue)
End and submit single-time command.
void free_command_buffer(vk::CommandBuffer command_buffer)
Free a command buffer back to the pool.
vk::CommandBuffer begin_single_time_commands()
Begin single-time command (for transfers, etc.)
Manages Vulkan command pools and command buffers.
vk::Device get_device() const
Get logical device.
Definition VKContext.hpp:49
vk::Queue get_graphics_queue() const
Get graphics queue.
Definition VKContext.hpp:54
vk::PhysicalDevice get_physical_device() const
Get physical device.
Definition VKContext.hpp:44
High-level wrapper for Vulkan instance and device.
Definition VKContext.hpp:16
@ GraphicsBackend
Graphics/visual rendering backend (Vulkan, OpenGL)
@ Core
Core engine, backend, subsystems.
@ IMAGE_COLOR
2D RGB/RGBA image
std::string_view modality_to_string(DataModality modality)
Convert DataModality enum to string representation.
Definition NDData.cpp:83
Raw Vulkan handles owned by a VKBuffer instance.
Definition VKBuffer.hpp:32
Vulkan image resource handles.
Definition VKImage.hpp:15