Queue coverage

This commit is contained in:
Lander Gallastegi 2025-04-17 14:26:07 +02:00
parent d5e45fb492
commit 83255ee68f
3 changed files with 32 additions and 4 deletions

View File

@ -323,9 +323,28 @@ BufferId BufferCache::FindBuffer(VAddr device_addr, u32 size) {
return CreateBuffer(device_addr, size); return CreateBuffer(device_addr, size);
} }
void BufferCache::MapMemory(VAddr device_addr, u64 size) { void BufferCache::QueueCoverage(VAddr device_addr, u64 size) {
const u64 page_start = device_addr >> CACHING_PAGEBITS; std::scoped_lock lk{mutex};
const u64 page_end = Common::DivCeil(device_addr + size, CACHING_PAGESIZE); const u64 start = device_addr;
const u64 end = device_addr + size;
auto queue_range = decltype(covered_regions)::interval_type::right_open(start, end);
queued_coverage += queue_range;
}
void BufferCache::CoverQueuedRegions() {
std::scoped_lock lk{mutex};
if (queued_coverage.empty()) {
return;
}
for (const auto& range : queued_coverage) {
CoverMemory(range.lower(), range.upper());
}
queued_coverage.clear();
}
void BufferCache::CoverMemory(u64 start, u64 end) {
const u64 page_start = start >> CACHING_PAGEBITS;
const u64 page_end = Common::DivCeil(end, CACHING_PAGESIZE);
auto interval = decltype(covered_regions)::interval_type::right_open(page_start, page_end); auto interval = decltype(covered_regions)::interval_type::right_open(page_start, page_end);
auto interval_set = boost::icl::interval_set<u64>{interval}; auto interval_set = boost::icl::interval_set<u64>{interval};
auto uncovered_ranges = interval_set - covered_regions; auto uncovered_ranges = interval_set - covered_regions;

View File

@ -119,8 +119,15 @@ public:
/// Return true when a CPU region is modified from the GPU /// Return true when a CPU region is modified from the GPU
[[nodiscard]] bool IsRegionGpuModified(VAddr addr, size_t size); [[nodiscard]] bool IsRegionGpuModified(VAddr addr, size_t size);
/// Return buffer id for the specified region
[[nodiscard]] BufferId FindBuffer(VAddr device_addr, u32 size); [[nodiscard]] BufferId FindBuffer(VAddr device_addr, u32 size);
/// Queue a region for coverage for DMA.
void QueueCoverage(VAddr device_addr, u64 size);
/// Covers all queued regions.
void CoverQueuedRegions();
private: private:
template <typename Func> template <typename Func>
void ForEachBufferInRange(VAddr device_addr, u64 size, Func&& func) { void ForEachBufferInRange(VAddr device_addr, u64 size, Func&& func) {
@ -164,7 +171,7 @@ private:
void DeleteBuffer(BufferId buffer_id); void DeleteBuffer(BufferId buffer_id);
void MapMemory(VAddr device_addr, u64 size); void CoverMemory(u64 start, u64 end);
const Vulkan::Instance& instance; const Vulkan::Instance& instance;
Vulkan::Scheduler& scheduler; Vulkan::Scheduler& scheduler;
@ -176,6 +183,7 @@ private:
StreamBuffer stream_buffer; StreamBuffer stream_buffer;
Buffer gds_buffer; Buffer gds_buffer;
Buffer bda_pagetable_buffer; Buffer bda_pagetable_buffer;
boost::icl::interval_set<VAddr> queued_coverage;
boost::icl::interval_set<u64> covered_regions; boost::icl::interval_set<u64> covered_regions;
std::vector<ImportedHostBuffer> imported_buffers; std::vector<ImportedHostBuffer> imported_buffers;
std::shared_mutex mutex; std::shared_mutex mutex;

View File

@ -946,6 +946,7 @@ void Rasterizer::MapMemory(VAddr addr, u64 size) {
mapped_ranges += decltype(mapped_ranges)::interval_type::right_open(addr, addr + size); mapped_ranges += decltype(mapped_ranges)::interval_type::right_open(addr, addr + size);
} }
page_manager.OnGpuMap(addr, size); page_manager.OnGpuMap(addr, size);
buffer_cache.QueueCoverage(addr, size);
} }
void Rasterizer::UnmapMemory(VAddr addr, u64 size) { void Rasterizer::UnmapMemory(VAddr addr, u64 size) {