MayaFlux 0.1.0
Digital-First Multimedia Processing Framework
Loading...
Searching...
No Matches
BackendResoureManager.cpp
Go to the documentation of this file.
2
6
10
11namespace MayaFlux::Core {
12
14 : m_context(context)
15 , m_command_manager(command_manager)
16{
17}
18
19void BackendResourceManager::setup_backend_service(const std::shared_ptr<Registry::Service::BufferService>& buffer_service)
20{
21 buffer_service->initialize_buffer = [this](const std::shared_ptr<void>& vk_buf) -> void {
22 auto buffer = std::static_pointer_cast<Buffers::VKBuffer>(vk_buf);
23 this->initialize_buffer(buffer);
24 };
25
26 buffer_service->destroy_buffer = [this](const std::shared_ptr<void>& vk_buf) {
27 auto buffer = std::static_pointer_cast<Buffers::VKBuffer>(vk_buf);
28 this->cleanup_buffer(buffer);
29 };
30
31 buffer_service->execute_immediate = [this](const std::function<void(void*)>& recorder) {
32 this->execute_immediate_commands([recorder](vk::CommandBuffer cmd) {
33 recorder(static_cast<void*>(cmd));
34 });
35 };
36
37 buffer_service->record_deferred = [this](const std::function<void(void*)>& recorder) {
38 this->record_deferred_commands([recorder](vk::CommandBuffer cmd) {
39 recorder(static_cast<void*>(cmd));
40 });
41 };
42
43 buffer_service->flush_range = [this](void* memory, size_t offset, size_t size) {
44 vk::DeviceMemory mem(reinterpret_cast<VkDeviceMemory>(memory));
45 vk::MappedMemoryRange range { mem, offset, size == 0 ? VK_WHOLE_SIZE : size };
46 if (auto result = m_context.get_device().flushMappedMemoryRanges(1, &range); result != vk::Result::eSuccess) {
48 "Failed to flush mapped memory range: {}", vk::to_string(result));
49 }
50 };
51
52 buffer_service->invalidate_range = [this](void* memory, size_t offset, size_t size) {
53 vk::DeviceMemory mem(reinterpret_cast<VkDeviceMemory>(memory));
54 vk::MappedMemoryRange range { mem, offset, size == 0 ? VK_WHOLE_SIZE : size };
55 if (auto result = m_context.get_device().invalidateMappedMemoryRanges(1, &range); result != vk::Result::eSuccess) {
57 "Failed to invalidate mapped memory range: {}", vk::to_string(result));
58 }
59 };
60
61 buffer_service->map_buffer = [this](void* memory, size_t offset, size_t size) -> void* {
62 vk::DeviceMemory mem(reinterpret_cast<VkDeviceMemory>(memory));
63 return m_context.get_device().mapMemory(mem, offset, size == 0 ? VK_WHOLE_SIZE : size);
64 };
65
66 buffer_service->unmap_buffer = [this](void* memory) {
67 vk::DeviceMemory mem(reinterpret_cast<VkDeviceMemory>(memory));
68 m_context.get_device().unmapMemory(mem);
69 };
70}
71
72void BackendResourceManager::initialize_buffer(const std::shared_ptr<Buffers::VKBuffer>& buffer)
73{
74 if (!buffer) {
76 "Attempted to initialize null VulkanBuffer");
77 return;
78 }
79
80 if (buffer->is_initialized()) {
82 "VulkanBuffer already initialized, skipping");
83 return;
84 }
85
86 vk::BufferCreateInfo buffer_info {};
87 buffer_info.size = buffer->get_size_bytes();
88 buffer_info.usage = buffer->get_usage_flags();
89 buffer_info.sharingMode = vk::SharingMode::eExclusive;
90
91 vk::Buffer vk_buffer;
92 try {
93 vk_buffer = m_context.get_device().createBuffer(buffer_info);
94 } catch (const vk::SystemError& e) {
95 error_rethrow(
98 std::source_location::current(),
99 "Failed to create VkBuffer: " + std::string(e.what()));
100 }
101
102 vk::MemoryRequirements mem_requirements;
103 mem_requirements = m_context.get_device().getBufferMemoryRequirements(vk_buffer);
104
105 vk::MemoryAllocateInfo alloc_info;
106 alloc_info.allocationSize = mem_requirements.size;
107
108 alloc_info.memoryTypeIndex = find_memory_type(
109 mem_requirements.memoryTypeBits,
110 vk::MemoryPropertyFlags(buffer->get_memory_properties()));
111
112 vk::DeviceMemory memory;
113 try {
114 memory = m_context.get_device().allocateMemory(alloc_info);
115 } catch (const vk::SystemError& e) {
116 m_context.get_device().destroyBuffer(vk_buffer);
117 error_rethrow(
120 std::source_location::current(),
121 "Failed to allocate VkDeviceMemory: " + std::string(e.what()));
122 }
123
124 try {
125 m_context.get_device().bindBufferMemory(vk_buffer, memory, 0);
126 } catch (const vk::SystemError& e) {
127 m_context.get_device().freeMemory(memory);
128 m_context.get_device().destroyBuffer(vk_buffer);
129
130 error_rethrow(
133 std::source_location::current(),
134 "Failed to bind buffer memory: " + std::string(e.what()));
135 }
136
137 void* mapped_ptr = nullptr;
138 if (buffer->is_host_visible()) {
139 try {
140 mapped_ptr = m_context.get_device().mapMemory(memory, 0, buffer->get_size_bytes());
141 } catch (const vk::SystemError& e) {
142 m_context.get_device().freeMemory(memory);
143 m_context.get_device().destroyBuffer(vk_buffer);
144
145 error_rethrow(
148 std::source_location::current(),
149 "Failed to map buffer memory: " + std::string(e.what()));
150 }
151 }
152
153 Buffers::VKBufferResources resources { .buffer = vk_buffer, .memory = memory, .mapped_ptr = mapped_ptr };
154 buffer->set_buffer_resources(resources);
155 m_managed_buffers.push_back(buffer);
156
158 "VulkanBuffer initialized: {} bytes, modality: {}, VkBuffer: {:p}",
159 buffer->get_size_bytes(),
160 Kakshya::modality_to_string(buffer->get_modality()),
161 (void*)buffer->get_buffer());
162}
163
164void BackendResourceManager::cleanup_buffer(const std::shared_ptr<Buffers::VKBuffer>& buffer)
165{
166 if (!buffer) {
168 "Attempted to cleanup null VulkanBuffer");
169 return;
170 }
171
172 auto it = std::ranges::find(m_managed_buffers, buffer);
173 if (it == m_managed_buffers.end()) {
174 return;
175 }
176
177 auto& [vk_buffer, memory, mapped_ptr] = it->get()->get_buffer_resources();
178
179 if (mapped_ptr) {
180 m_context.get_device().unmapMemory(memory);
181 }
182
183 if (vk_buffer) {
184 m_context.get_device().destroyBuffer(vk_buffer);
185 }
186
187 if (memory) {
188 m_context.get_device().freeMemory(memory);
189 }
190
191 m_managed_buffers.erase(it);
192
194 "VulkanBuffer cleaned up: {:p}", (void*)vk_buffer);
195}
196
198{
199 for (auto& buffer_wrapper : m_managed_buffers) {
200 auto& resources = buffer_wrapper->get_buffer_resources();
201 auto dirty_ranges = buffer_wrapper->get_and_clear_dirty_ranges();
202 if (!dirty_ranges.empty()) {
203 for (auto [offset, size] : dirty_ranges) {
204 vk::MappedMemoryRange range;
205 range.memory = resources.memory;
206 range.offset = offset;
207 range.size = size;
208 if (auto result = m_context.get_device().flushMappedMemoryRanges(1, &range); result != vk::Result::eSuccess) {
210 "Failed to flush mapped memory range: {}", vk::to_string(result));
211 }
212 }
214 "Flushed {} dirty ranges for buffer {:p}", dirty_ranges.size(),
215 (void*)buffer_wrapper->get_buffer());
216 }
217
218 auto invalid_ranges = buffer_wrapper->get_and_clear_invalid_ranges();
219 if (!invalid_ranges.empty()) {
220 for (auto [offset, size] : invalid_ranges) {
221 vk::MappedMemoryRange range;
222 range.memory = buffer_wrapper->get_buffer_resources().memory;
223 range.offset = offset;
224 range.size = size;
225 if (auto result = m_context.get_device().invalidateMappedMemoryRanges(1, &range); result != vk::Result::eSuccess) {
227 "Failed to invalidate mapped memory range: {}", vk::to_string(result));
228 }
229 }
231 "Invalidated {} ranges for buffer {:p}", invalid_ranges.size(),
232 (void*)buffer_wrapper->get_buffer());
233 }
234 }
235}
236
237void BackendResourceManager::initialize_image(const std::shared_ptr<VKImage>& image)
238{
239 if (!image) {
241 "Attempted to initialize null VKImage");
242 return;
243 }
244
245 if (image->is_initialized()) {
247 "VKImage already initialized, skipping");
248 return;
249 }
250
251 // ========================================================================
252 // Step 1: Create VkImage
253 // ========================================================================
254
255 vk::ImageCreateInfo image_info {};
256
257 switch (image->get_type()) {
259 image_info.imageType = vk::ImageType::e1D;
260 break;
263 image_info.imageType = vk::ImageType::e2D;
264 break;
266 image_info.imageType = vk::ImageType::e3D;
267 break;
268 }
269
270 image_info.extent.width = image->get_width();
271 image_info.extent.height = image->get_height();
272 image_info.extent.depth = image->get_depth();
273 image_info.mipLevels = image->get_mip_levels();
274 image_info.arrayLayers = image->get_array_layers();
275 image_info.format = image->get_format();
276 image_info.tiling = vk::ImageTiling::eOptimal;
277 image_info.initialLayout = vk::ImageLayout::eUndefined;
278 image_info.usage = image->get_usage_flags();
279 image_info.sharingMode = vk::SharingMode::eExclusive;
280 image_info.samples = vk::SampleCountFlagBits::e1; // No MSAA for now
281 image_info.flags = (image->get_type() == VKImage::Type::TYPE_CUBE)
282 ? vk::ImageCreateFlagBits::eCubeCompatible
283 : vk::ImageCreateFlags {};
284
285 vk::Image vk_image;
286 try {
287 vk_image = m_context.get_device().createImage(image_info);
288 } catch (const vk::SystemError& e) {
289 error_rethrow(
292 std::source_location::current(),
293 "Failed to create VkImage: " + std::string(e.what()));
294 }
295
296 // ========================================================================
297 // Step 2: Allocate memory
298 // ========================================================================
299
300 vk::MemoryRequirements mem_requirements;
301 mem_requirements = m_context.get_device().getImageMemoryRequirements(vk_image);
302
303 vk::MemoryAllocateInfo alloc_info {};
304 alloc_info.allocationSize = mem_requirements.size;
305 alloc_info.memoryTypeIndex = find_memory_type(
306 mem_requirements.memoryTypeBits,
307 image->get_memory_properties());
308
309 vk::DeviceMemory memory;
310 try {
311 memory = m_context.get_device().allocateMemory(alloc_info);
312 } catch (const vk::SystemError& e) {
313 m_context.get_device().destroyImage(vk_image);
314 error_rethrow(
317 std::source_location::current(),
318 "Failed to allocate VkDeviceMemory for image: " + std::string(e.what()));
319 }
320
321 // ========================================================================
322 // Step 3: Bind memory to image
323 // ========================================================================
324
325 try {
326 m_context.get_device().bindImageMemory(vk_image, memory, 0);
327 } catch (const vk::SystemError& e) {
328 m_context.get_device().freeMemory(memory);
329 m_context.get_device().destroyImage(vk_image);
330 error_rethrow(
333 std::source_location::current(),
334 "Failed to bind memory to VkImage: " + std::string(e.what()));
335 }
336
337 // ========================================================================
338 // Step 4: Create image view
339 // ========================================================================
340
341 vk::ImageViewCreateInfo view_info {};
342
343 switch (image->get_type()) {
345 view_info.viewType = (image->get_array_layers() > 1)
346 ? vk::ImageViewType::e1DArray
347 : vk::ImageViewType::e1D;
348 break;
350 view_info.viewType = (image->get_array_layers() > 1)
351 ? vk::ImageViewType::e2DArray
352 : vk::ImageViewType::e2D;
353 break;
355 view_info.viewType = vk::ImageViewType::e3D;
356 break;
358 view_info.viewType = vk::ImageViewType::eCube;
359 break;
360 }
361
362 view_info.image = vk_image;
363 view_info.format = image->get_format();
364 view_info.subresourceRange.aspectMask = image->get_aspect_flags();
365 view_info.subresourceRange.baseMipLevel = 0;
366 view_info.subresourceRange.levelCount = image->get_mip_levels();
367 view_info.subresourceRange.baseArrayLayer = 0;
368 view_info.subresourceRange.layerCount = image->get_array_layers();
369
370 view_info.components.r = vk::ComponentSwizzle::eIdentity;
371 view_info.components.g = vk::ComponentSwizzle::eIdentity;
372 view_info.components.b = vk::ComponentSwizzle::eIdentity;
373 view_info.components.a = vk::ComponentSwizzle::eIdentity;
374
375 vk::ImageView image_view;
376 try {
377 image_view = m_context.get_device().createImageView(view_info);
378 } catch (const vk::SystemError& e) {
379 m_context.get_device().freeMemory(memory);
380 m_context.get_device().destroyImage(vk_image);
381 error_rethrow(
384 std::source_location::current(),
385 "Failed to create VkImageView: " + std::string(e.what()));
386 }
387
388 // ========================================================================
389 // Step 5: Store handles in VKImage
390 // ========================================================================
391
392 VKImageResources resources {};
393 resources.image = vk_image;
394 resources.image_view = image_view;
395 resources.memory = memory;
396 resources.sampler = nullptr;
397
398 image->set_image_resources(resources);
399 image->set_current_layout(vk::ImageLayout::eUndefined);
400
402 "VKImage initialized: {}x{}x{}, format: {}, {} mips, {} layers",
403 image->get_width(), image->get_height(), image->get_depth(),
404 vk::to_string(image->get_format()),
405 image->get_mip_levels(), image->get_array_layers());
406}
407
408void BackendResourceManager::cleanup_image(const std::shared_ptr<VKImage>& image)
409{
410 if (!image || !image->is_initialized()) {
411 return;
412 }
413
414 const auto& resources = image->get_image_resources();
415
416 if (resources.image_view) {
417 m_context.get_device().destroyImageView(resources.image_view);
418 }
419
420 if (resources.image) {
421 m_context.get_device().destroyImage(resources.image);
422 }
423
424 if (resources.memory) {
425 m_context.get_device().freeMemory(resources.memory);
426 }
427
429 "VKImage cleaned up");
430}
431
433 vk::Image image,
434 vk::ImageLayout old_layout,
435 vk::ImageLayout new_layout,
436 uint32_t mip_levels,
437 uint32_t array_layers,
438 vk::ImageAspectFlags aspect_flags)
439{
440 execute_immediate_commands([&](vk::CommandBuffer cmd) {
441 vk::ImageMemoryBarrier barrier {};
442 barrier.oldLayout = old_layout;
443 barrier.newLayout = new_layout;
444 barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
445 barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
446 barrier.image = image;
447 barrier.subresourceRange.aspectMask = aspect_flags;
448 barrier.subresourceRange.baseMipLevel = 0;
449 barrier.subresourceRange.levelCount = mip_levels;
450 barrier.subresourceRange.baseArrayLayer = 0;
451 barrier.subresourceRange.layerCount = array_layers;
452
453 vk::PipelineStageFlags src_stage;
454 vk::PipelineStageFlags dst_stage;
455
456 if (old_layout == vk::ImageLayout::eUndefined && new_layout == vk::ImageLayout::eTransferDstOptimal) {
457 barrier.srcAccessMask = vk::AccessFlags {};
458 barrier.dstAccessMask = vk::AccessFlagBits::eTransferWrite;
459 src_stage = vk::PipelineStageFlagBits::eTopOfPipe;
460 dst_stage = vk::PipelineStageFlagBits::eTransfer;
461 } else if (old_layout == vk::ImageLayout::eTransferDstOptimal && new_layout == vk::ImageLayout::eShaderReadOnlyOptimal) {
462 barrier.srcAccessMask = vk::AccessFlagBits::eTransferWrite;
463 barrier.dstAccessMask = vk::AccessFlagBits::eShaderRead;
464 src_stage = vk::PipelineStageFlagBits::eTransfer;
465 dst_stage = vk::PipelineStageFlagBits::eFragmentShader;
466 } else if (old_layout == vk::ImageLayout::eUndefined && new_layout == vk::ImageLayout::eColorAttachmentOptimal) {
467 barrier.srcAccessMask = vk::AccessFlags {};
468 barrier.dstAccessMask = vk::AccessFlagBits::eColorAttachmentWrite;
469 src_stage = vk::PipelineStageFlagBits::eTopOfPipe;
470 dst_stage = vk::PipelineStageFlagBits::eColorAttachmentOutput;
471 } else if (old_layout == vk::ImageLayout::eUndefined && new_layout == vk::ImageLayout::eDepthStencilAttachmentOptimal) {
472 barrier.srcAccessMask = vk::AccessFlags {};
473 barrier.dstAccessMask = vk::AccessFlagBits::eDepthStencilAttachmentRead | vk::AccessFlagBits::eDepthStencilAttachmentWrite;
474 src_stage = vk::PipelineStageFlagBits::eTopOfPipe;
475 dst_stage = vk::PipelineStageFlagBits::eEarlyFragmentTests;
476 } else if (old_layout == vk::ImageLayout::eUndefined && new_layout == vk::ImageLayout::eGeneral) {
477 barrier.srcAccessMask = vk::AccessFlags {};
478 barrier.dstAccessMask = vk::AccessFlagBits::eShaderRead | vk::AccessFlagBits::eShaderWrite;
479 src_stage = vk::PipelineStageFlagBits::eTopOfPipe;
480 dst_stage = vk::PipelineStageFlagBits::eComputeShader;
481 } else {
482 barrier.srcAccessMask = vk::AccessFlagBits::eMemoryRead | vk::AccessFlagBits::eMemoryWrite;
483 barrier.dstAccessMask = vk::AccessFlagBits::eMemoryRead | vk::AccessFlagBits::eMemoryWrite;
484 src_stage = vk::PipelineStageFlagBits::eAllCommands;
485 dst_stage = vk::PipelineStageFlagBits::eAllCommands;
486
488 "Using generic image layout transition");
489 }
490
491 cmd.pipelineBarrier(
492 src_stage, dst_stage,
493 vk::DependencyFlags {},
494 0, nullptr, // Memory barriers
495 0, nullptr, // Buffer barriers
496 1, &barrier // Image barriers
497 );
498 });
499
501 "Image layout transitioned: {} -> {}",
502 vk::to_string(old_layout), vk::to_string(new_layout));
503}
504
506 std::shared_ptr<VKImage> image,
507 const void* data,
508 size_t size)
509{
510 if (!image || !data) {
512 "Invalid parameters for upload_image_data");
513 return;
514 }
515
516 auto staging = std::make_shared<Buffers::VKBuffer>(
517 size,
520
521 initialize_buffer(staging);
522
523 void* mapped = staging->get_mapped_ptr();
524 if (!mapped) {
526 "Failed to map staging buffer for image upload");
527 cleanup_buffer(staging);
528 return;
529 }
530
531 std::memcpy(mapped, data, size);
532 staging->mark_dirty_range(0, size);
533
534 auto& resources = staging->get_buffer_resources();
535 vk::MappedMemoryRange range { resources.memory, 0, VK_WHOLE_SIZE };
536
537 if (auto result = m_context.get_device().flushMappedMemoryRanges(1, &range); result != vk::Result::eSuccess) {
539 "Failed to flush mapped memory range: {}", vk::to_string(result));
540 }
541
542 execute_immediate_commands([&](vk::CommandBuffer cmd) {
543 vk::ImageMemoryBarrier barrier {};
544 barrier.oldLayout = image->get_current_layout();
545 barrier.newLayout = vk::ImageLayout::eTransferDstOptimal;
546 barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
547 barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
548 barrier.image = image->get_image();
549 barrier.subresourceRange.aspectMask = image->get_aspect_flags();
550 barrier.subresourceRange.baseMipLevel = 0;
551 barrier.subresourceRange.levelCount = image->get_mip_levels();
552 barrier.subresourceRange.baseArrayLayer = 0;
553 barrier.subresourceRange.layerCount = image->get_array_layers();
554 barrier.srcAccessMask = vk::AccessFlagBits::eShaderRead;
555 barrier.dstAccessMask = vk::AccessFlagBits::eTransferWrite;
556
557 cmd.pipelineBarrier(
558 vk::PipelineStageFlagBits::eFragmentShader,
559 vk::PipelineStageFlagBits::eTransfer,
560 vk::DependencyFlags {},
561 0, nullptr, 0, nullptr, 1, &barrier);
562
563 vk::BufferImageCopy region {};
564 region.bufferOffset = 0;
565 region.bufferRowLength = 0;
566 region.bufferImageHeight = 0;
567 region.imageSubresource.aspectMask = image->get_aspect_flags();
568 region.imageSubresource.mipLevel = 0;
569 region.imageSubresource.baseArrayLayer = 0;
570 region.imageSubresource.layerCount = image->get_array_layers();
571 region.imageOffset = vk::Offset3D { 0, 0, 0 };
572 region.imageExtent = vk::Extent3D {
573 image->get_width(),
574 image->get_height(),
575 image->get_depth()
576 };
577
578 cmd.copyBufferToImage(
579 staging->get_buffer(),
580 image->get_image(),
581 vk::ImageLayout::eTransferDstOptimal,
582 1, &region);
583
584 barrier.oldLayout = vk::ImageLayout::eTransferDstOptimal;
585 barrier.newLayout = vk::ImageLayout::eShaderReadOnlyOptimal;
586 barrier.srcAccessMask = vk::AccessFlagBits::eTransferWrite;
587 barrier.dstAccessMask = vk::AccessFlagBits::eShaderRead;
588
589 cmd.pipelineBarrier(
590 vk::PipelineStageFlagBits::eTransfer,
591 vk::PipelineStageFlagBits::eFragmentShader,
592 vk::DependencyFlags {},
593 0, nullptr, 0, nullptr, 1, &barrier);
594 });
595
596 image->set_current_layout(vk::ImageLayout::eShaderReadOnlyOptimal);
597
599 "Uploaded {} bytes to image {}x{}",
600 size, image->get_width(), image->get_height());
601}
602
604 std::shared_ptr<VKImage> image,
605 void* data,
606 size_t size)
607{
608 if (!image || !data) {
610 "Invalid parameters for download_image_data");
611 return;
612 }
613
614 auto staging = std::make_shared<Buffers::VKBuffer>(
615 size,
618
619 initialize_buffer(staging);
620
621 execute_immediate_commands([&](vk::CommandBuffer cmd) {
622 vk::ImageMemoryBarrier barrier {};
623 barrier.oldLayout = image->get_current_layout();
624 barrier.newLayout = vk::ImageLayout::eTransferSrcOptimal;
625 barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
626 barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
627 barrier.image = image->get_image();
628 barrier.subresourceRange.aspectMask = image->get_aspect_flags();
629 barrier.subresourceRange.baseMipLevel = 0;
630 barrier.subresourceRange.levelCount = image->get_mip_levels();
631 barrier.subresourceRange.baseArrayLayer = 0;
632 barrier.subresourceRange.layerCount = image->get_array_layers();
633 barrier.srcAccessMask = vk::AccessFlagBits::eShaderRead;
634 barrier.dstAccessMask = vk::AccessFlagBits::eTransferRead;
635
636 cmd.pipelineBarrier(
637 vk::PipelineStageFlagBits::eFragmentShader,
638 vk::PipelineStageFlagBits::eTransfer,
639 vk::DependencyFlags {},
640 0, nullptr, 0, nullptr, 1, &barrier);
641
642 vk::BufferImageCopy region {};
643 region.bufferOffset = 0;
644 region.bufferRowLength = 0;
645 region.bufferImageHeight = 0;
646 region.imageSubresource.aspectMask = image->get_aspect_flags();
647 region.imageSubresource.mipLevel = 0;
648 region.imageSubresource.baseArrayLayer = 0;
649 region.imageSubresource.layerCount = image->get_array_layers();
650 region.imageOffset = vk::Offset3D { 0, 0, 0 };
651 region.imageExtent = vk::Extent3D {
652 image->get_width(),
653 image->get_height(),
654 image->get_depth()
655 };
656
657 cmd.copyImageToBuffer(
658 image->get_image(),
659 vk::ImageLayout::eTransferSrcOptimal,
660 staging->get_buffer(),
661 1, &region);
662
663 barrier.oldLayout = vk::ImageLayout::eTransferSrcOptimal;
664 barrier.newLayout = image->get_current_layout();
665 barrier.srcAccessMask = vk::AccessFlagBits::eTransferRead;
666 barrier.dstAccessMask = vk::AccessFlagBits::eShaderRead;
667
668 cmd.pipelineBarrier(
669 vk::PipelineStageFlagBits::eTransfer,
670 vk::PipelineStageFlagBits::eFragmentShader,
671 vk::DependencyFlags {},
672 0, nullptr, 0, nullptr, 1, &barrier);
673 });
674
675 staging->mark_invalid_range(0, size);
676 auto& resources = staging->get_buffer_resources();
677 vk::MappedMemoryRange range { resources.memory, 0, VK_WHOLE_SIZE };
678
679 if (auto result = m_context.get_device().invalidateMappedMemoryRanges(1, &range); result != vk::Result::eSuccess) {
681 "Failed to invalidate mapped memory range: {}", vk::to_string(result));
682 }
683
684 void* mapped = staging->get_mapped_ptr();
685 if (mapped) {
686 std::memcpy(data, mapped, size);
687 }
688
689 cleanup_buffer(staging);
690
692 "Downloaded {} bytes from image {}x{}",
693 size, image->get_width(), image->get_height());
694}
695
697 vk::Filter filter,
698 vk::SamplerAddressMode address_mode,
699 float max_anisotropy)
700{
701 size_t hash = 0;
702 auto hash_combine = [](size_t& seed, size_t value) {
703 seed ^= value + 0x9e3779b9 + (seed << 6) + (seed >> 2);
704 };
705
706 hash_combine(hash, static_cast<size_t>(filter));
707 hash_combine(hash, static_cast<size_t>(address_mode));
708 hash_combine(hash, std::hash<float> {}(max_anisotropy));
709
710 auto it = m_sampler_cache.find(hash);
711 if (it != m_sampler_cache.end()) {
713 "Reusing cached sampler (hash: 0x{:X})", hash);
714 return it->second;
715 }
716
717 vk::SamplerCreateInfo sampler_info;
718 sampler_info.magFilter = filter;
719 sampler_info.minFilter = filter;
720 sampler_info.mipmapMode = vk::SamplerMipmapMode::eLinear;
721 sampler_info.addressModeU = address_mode;
722 sampler_info.addressModeV = address_mode;
723 sampler_info.addressModeW = address_mode;
724 sampler_info.mipLodBias = 0.0F;
725 sampler_info.anisotropyEnable = max_anisotropy > 0.0F;
726 sampler_info.maxAnisotropy = max_anisotropy;
727 sampler_info.compareEnable = VK_FALSE;
728 sampler_info.compareOp = vk::CompareOp::eAlways;
729 sampler_info.minLod = 0.0F;
730 sampler_info.maxLod = VK_LOD_CLAMP_NONE;
731 sampler_info.borderColor = vk::BorderColor::eFloatOpaqueBlack;
732 sampler_info.unnormalizedCoordinates = VK_FALSE;
733
734 vk::Sampler sampler;
735 try {
736 sampler = m_context.get_device().createSampler(sampler_info);
737 } catch (const vk::SystemError& e) {
739 "Failed to create sampler: {}", e.what());
740 return nullptr;
741 }
742
743 m_sampler_cache[hash] = sampler;
744
746 "Created sampler (filter: {}, address: {}, anisotropy: {}, hash: 0x{:X})",
747 vk::to_string(filter), vk::to_string(address_mode), max_anisotropy, hash);
748
749 return sampler;
750}
751
753{
754 if (!sampler) {
755 return;
756 }
757
758 for (auto it = m_sampler_cache.begin(); it != m_sampler_cache.end(); ++it) {
759 if (it->second == sampler) {
760 m_sampler_cache.erase(it);
761 break;
762 }
763 }
764
765 m_context.get_device().destroySampler(sampler);
766
768 "Destroyed sampler");
769}
770
771uint32_t BackendResourceManager::find_memory_type(uint32_t type_filter, vk::MemoryPropertyFlags properties) const
772{
773 vk::PhysicalDeviceMemoryProperties mem_properties;
774 mem_properties = m_context.get_physical_device().getMemoryProperties();
775
776 for (uint32_t i = 0; i < mem_properties.memoryTypeCount; i++) {
777 if ((type_filter & (1 << i)) && (mem_properties.memoryTypes[i].propertyFlags & properties) == properties) {
778 return i;
779 }
780 }
781
782 error<std::runtime_error>(
785 std::source_location::current(),
786 "Failed to find suitable memory type");
787
788 return 0;
789}
790
791void BackendResourceManager::execute_immediate_commands(const std::function<void(vk::CommandBuffer)>& recorder)
792{
793 vk::CommandBuffer cmd = m_command_manager.begin_single_time_commands();
794 recorder(cmd);
796}
797
798void BackendResourceManager::record_deferred_commands(const std::function<void(vk::CommandBuffer)>& recorder)
799{
800 // TODO: batch commands for later submission
801 // For now, just execute immediately
803}
804
806{
807 for (auto& [hash, sampler] : m_sampler_cache) {
808 if (sampler) {
809 m_context.get_device().destroySampler(sampler);
810 }
811 }
812 m_sampler_cache.clear();
813
814 for (auto& buffer : m_managed_buffers) {
815 if (buffer && buffer->is_initialized()) {
816 cleanup_buffer(buffer);
817 }
818 }
819 m_managed_buffers.clear();
820}
821
822size_t BackendResourceManager::compute_sampler_hash(vk::Filter filter, vk::SamplerAddressMode address_mode, float max_anisotropy) const
823{
824 size_t hash = 0;
825 auto hash_combine = [](size_t& seed, size_t value) {
826 seed ^= value + 0x9e3779b9 + (seed << 6) + (seed >> 2);
827 };
828
829 hash_combine(hash, static_cast<size_t>(filter));
830 hash_combine(hash, static_cast<size_t>(address_mode));
831 hash_combine(hash, std::hash<float> {}(max_anisotropy));
832
833 return hash;
834}
835
836}
#define MF_INFO(comp, ctx,...)
#define MF_ERROR(comp, ctx,...)
#define MF_WARN(comp, ctx,...)
#define MF_DEBUG(comp, ctx,...)
@ STAGING
Host-visible staging buffer (CPU-writable)
void download_image_data(std::shared_ptr< VKImage > image, void *data, size_t size)
Download data from an image (creates staging buffer internally)
void cleanup_image(const std::shared_ptr< VKImage > &image)
Cleanup a VKImage (destroy view, image, and free memory)
void cleanup_buffer(const std::shared_ptr< Buffers::VKBuffer > &buffer)
Cleanup a buffer and release associated resources.
void flush_pending_buffer_operations()
Flush any pending buffer operations (e.g., uploads/downloads)
size_t compute_sampler_hash(vk::Filter filter, vk::SamplerAddressMode address_mode, float max_anisotropy) const
void execute_immediate_commands(const std::function< void(vk::CommandBuffer)> &recorder)
Execute immediate command recording for buffer operations.
BackendResourceManager(VKContext &context, VKCommandManager &command_manager)
std::unordered_map< size_t, vk::Sampler > m_sampler_cache
void initialize_image(const std::shared_ptr< VKImage > &image)
Initialize a VKImage (allocate VkImage, memory, and create image view)
void destroy_sampler(vk::Sampler sampler)
Destroy sampler.
vk::Sampler create_sampler(vk::Filter filter=vk::Filter::eLinear, vk::SamplerAddressMode address_mode=vk::SamplerAddressMode::eRepeat, float max_anisotropy=0.0F)
Create sampler.
uint32_t find_memory_type(uint32_t type_filter, vk::MemoryPropertyFlags properties) const
Find a suitable memory type for Vulkan buffer allocation.
void transition_image_layout(vk::Image image, vk::ImageLayout old_layout, vk::ImageLayout new_layout, uint32_t mip_levels=1, uint32_t array_layers=1, vk::ImageAspectFlags aspect_flags=vk::ImageAspectFlagBits::eColor)
Transition image layout using a pipeline barrier.
void initialize_buffer(const std::shared_ptr< Buffers::VKBuffer > &buffer)
Initialize a buffer for use with the graphics backend.
void setup_backend_service(const std::shared_ptr< Registry::Service::BufferService > &buffer_service)
void record_deferred_commands(const std::function< void(vk::CommandBuffer)> &recorder)
Record deferred command recording for buffer operations.
void upload_image_data(std::shared_ptr< VKImage > image, const void *data, size_t size)
Upload data to an image (creates staging buffer internally)
std::vector< std::shared_ptr< Buffers::VKBuffer > > m_managed_buffers
void end_single_time_commands(vk::CommandBuffer command_buffer, vk::Queue queue)
End and submit single-time command.
vk::CommandBuffer begin_single_time_commands()
Begin single-time command (for transfers, etc.)
Manages Vulkan command pools and command buffers.
vk::Device get_device() const
Get logical device.
Definition VKContext.hpp:49
vk::Queue get_graphics_queue() const
Get graphics queue.
Definition VKContext.hpp:54
vk::PhysicalDevice get_physical_device() const
Get physical device.
Definition VKContext.hpp:44
High-level wrapper for Vulkan instance and device.
Definition VKContext.hpp:16
@ GraphicsBackend
Graphics/visual rendering backend (Vulkan, OpenGL)
@ Core
Core engine, backend, subsystems.
@ IMAGE_COLOR
2D RGB/RGBA image
std::string_view modality_to_string(DataModality modality)
Convert DataModality enum to string representation.
Definition NDData.cpp:80
Vulkan image resource handles.
Definition VKImage.hpp:15