From 66e57a40e3a2515734cb09a9dd09310cb6605bfb Mon Sep 17 00:00:00 2001 From: Lander Gallastegi Date: Thu, 1 May 2025 13:23:11 +0200 Subject: [PATCH] Remove memory coveragte logic --- src/video_core/buffer_cache/buffer_cache.cpp | 45 ------------------- src/video_core/buffer_cache/buffer_cache.h | 11 ----- .../renderer_vulkan/vk_rasterizer.cpp | 4 -- 3 files changed, 60 deletions(-) diff --git a/src/video_core/buffer_cache/buffer_cache.cpp b/src/video_core/buffer_cache/buffer_cache.cpp index a6d2ecf18..70d45ad3e 100644 --- a/src/video_core/buffer_cache/buffer_cache.cpp +++ b/src/video_core/buffer_cache/buffer_cache.cpp @@ -415,45 +415,6 @@ BufferId BufferCache::FindBuffer(VAddr device_addr, u32 size) { return CreateBuffer(device_addr, size); } -void BufferCache::QueueMemoryCoverage(VAddr device_addr, u64 size) { - std::scoped_lock lk{covered_regions_mutex}; - const VAddr start = device_addr; - const VAddr end = device_addr + size; - auto queue_range = decltype(queued_converages)::interval_type::right_open(start, end); - queued_converages += queue_range; -} - -void BufferCache::CoverQueuedRegions() { - std::scoped_lock lk{covered_regions_mutex}; - if (queued_converages.empty()) { - return; - } - for (const auto& range : queued_converages) { - CoverMemory(range.lower(), range.upper()); - } - queued_converages.clear(); -} - -void BufferCache::CoverMemory(u64 start, u64 end) { - const u64 page_start = start >> CACHING_PAGEBITS; - const u64 page_end = Common::DivCeil(end, CACHING_PAGESIZE); - auto interval = decltype(convered_regions)::interval_type::right_open(page_start, page_end); - auto interval_set = boost::icl::interval_set{interval}; - auto uncovered_ranges = interval_set - convered_regions; - if (uncovered_ranges.empty()) { - return; - } - // We fill any holes within the given range - for (const auto& range : uncovered_ranges) { - const u64 range_start = range.lower(); - const u64 range_end = range.upper(); - void* cpu_addr = reinterpret_cast(range_start << CACHING_PAGEBITS); - const u64 range_size = (range_end - range_start) << CACHING_PAGEBITS; - // Here to implement import of the mapped region - convered_regions += range; - } -} - BufferCache::OverlapResult BufferCache::ResolveOverlaps(VAddr device_addr, u32 wanted_size) { static constexpr int STREAM_LEAP_THRESHOLD = 16; boost::container::small_vector overlap_ids; @@ -608,12 +569,6 @@ BufferId BufferCache::CreateBuffer(VAddr device_addr, u32 wanted_size) { } WriteDataBuffer(bda_pagetable_buffer, start_page * sizeof(vk::DeviceAddress), bda_addrs.data(), bda_addrs.size() * sizeof(vk::DeviceAddress)); - { - // Mark the pages as covered - std::scoped_lock lk{covered_regions_mutex}; - convered_regions += boost::icl::interval_set::interval_type::right_open( - start_page, start_page + size_pages); - } const size_t size_bytes = new_buffer.SizeBytes(); const auto cmdbuf = scheduler.CommandBuffer(); scheduler.EndRendering(); diff --git a/src/video_core/buffer_cache/buffer_cache.h b/src/video_core/buffer_cache/buffer_cache.h index b60790c28..9da7bd804 100644 --- a/src/video_core/buffer_cache/buffer_cache.h +++ b/src/video_core/buffer_cache/buffer_cache.h @@ -133,12 +133,6 @@ public: /// Return buffer id for the specified region BufferId FindBuffer(VAddr device_addr, u32 size); - /// Queue a region for coverage for DMA. - void QueueMemoryCoverage(VAddr device_addr, u64 size); - - /// Covers all queued regions. - void CoverQueuedRegions(); - /// Processes the fault buffer. void ProcessFaultBuffer(); @@ -191,8 +185,6 @@ private: void DeleteBuffer(BufferId buffer_id); - void CoverMemory(u64 start, u64 end); - const Vulkan::Instance& instance; Vulkan::Scheduler& scheduler; Vulkan::Rasterizer& rasterizer; @@ -205,9 +197,6 @@ private: Buffer gds_buffer; Buffer bda_pagetable_buffer; Buffer fault_buffer; - boost::icl::interval_set queued_converages; - boost::icl::interval_set convered_regions; - std::shared_mutex covered_regions_mutex; std::shared_mutex slot_buffers_mutex; Common::SlotVector slot_buffers; RangeSet gpu_modified_ranges; diff --git a/src/video_core/renderer_vulkan/vk_rasterizer.cpp b/src/video_core/renderer_vulkan/vk_rasterizer.cpp index 04e8484c9..aca39e77d 100644 --- a/src/video_core/renderer_vulkan/vk_rasterizer.cpp +++ b/src/video_core/renderer_vulkan/vk_rasterizer.cpp @@ -477,9 +477,6 @@ bool Rasterizer::BindResources(const Pipeline* pipeline) { if (uses_dma) { fault_process_pending = true; // We only use fault buffer for DMA right now. - // First, import any queued host memory, then sync every mapped - // region that is cached on GPU memory. - buffer_cache.CoverQueuedRegions(); { std::shared_lock lock{dma_sync_mapped_ranges_mutex}; for (const auto& range : dma_sync_mapped_ranges) { @@ -991,7 +988,6 @@ void Rasterizer::MapMemory(VAddr addr, u64 size) { dma_sync_mapped_ranges = mapped_ranges & dma_sync_ranges; } page_manager.OnGpuMap(addr, size); - buffer_cache.QueueMemoryCoverage(addr, size); } void Rasterizer::UnmapMemory(VAddr addr, u64 size) {