vulkan: tighten buffer merging & enforce alignment to fix Bloodborne cracks

This commit is contained in:
andreas-patsalos 2025-05-27 01:06:51 +03:00
parent 99ccf56938
commit 4beb21f19b
3 changed files with 22 additions and 14 deletions

View File

@ -216,16 +216,18 @@ void BufferCache::BindVertexBuffers(const Vulkan::GraphicsPipeline& pipeline) {
// Merge connecting ranges together
Vulkan::VertexInputs<BufferRange> ranges_merged{};
if (!ranges.empty()) {
std::ranges::sort(ranges, [](const BufferRange& lhv, const BufferRange& rhv) {
return lhv.base_address < rhv.base_address;
});
std::ranges::sort(ranges, [](auto& a, auto& b) { return a.base_address < b.base_address; });
ranges_merged.emplace_back(ranges[0]);
for (auto range : ranges) {
auto& prev_range = ranges_merged.back();
if (prev_range.end_address < range.base_address) {
ranges_merged.emplace_back(range);
static constexpr VAddr MAX_GAP = 64; // max gap (bytes) to merge
static constexpr VAddr SAFETY_PAD = 128; // pad merged ranges
for (size_t i = 1; i < ranges.size(); ++i) {
auto& curr = ranges[i];
auto& prev = ranges_merged.back();
const VAddr gap = curr.base_address - prev.end_address;
if (gap <= MAX_GAP) {
prev.end_address = curr.end_address + SAFETY_PAD;
} else {
prev_range.end_address = std::max(prev_range.end_address, range.end_address);
ranges_merged.emplace_back(curr);
}
}
}

View File

@ -324,9 +324,12 @@ void GraphicsPipeline::GetVertexInputs(VertexInputs<Attribute>& attributes,
.format = LiverpoolToVK::SurfaceFormat(buffer.GetDataFmt(), buffer.GetNumberFmt()),
.offset = 0,
});
// ensure vertex stride is 4-byte aligned
const u32 rawStride = buffer.GetStride();
const u32 safeStride = Common::AlignUp(rawStride, 4u);
bindings.push_back(Binding{
.binding = attrib.semantic,
.stride = buffer.GetStride(),
.stride = safeStride,
.inputRate = attrib.GetStepRate() == Shader::Gcn::VertexAttribute::InstanceIdType::None
? vk::VertexInputRate::eVertex
: vk::VertexInputRate::eInstance,

View File

@ -577,13 +577,16 @@ void Rasterizer::BindBuffers(const Shader::Info& stage, Shader::Backend::Binding
} else {
const auto [vk_buffer, offset] = buffer_cache.ObtainBuffer(
vsharp.base_address, size, desc.is_written, desc.is_formatted, buffer_id);
const u32 alignment =
// enforce at least 16-byte alignment on vertex buffer offsets
const u32 baseAlign =
is_storage ? instance.StorageMinAlignment() : instance.UniformMinAlignment();
const u32 offset_aligned = Common::AlignDown(offset, alignment);
const u32 adjust = offset - offset_aligned;
ASSERT(adjust % 4 == 0);
const u32 minAlign = std::max<u32>(baseAlign, 16u);
const u32 offset_align = Common::AlignDown(offset, minAlign);
const u32 adjust = offset - offset_align;
ASSERT_MSG(adjust % 4 == 0 && adjust < minAlign,
"Unsafe vertex offset adjust %u >= align %u", adjust, minAlign);
push_data.AddOffset(binding.buffer, adjust);
buffer_infos.emplace_back(vk_buffer->Handle(), offset_aligned, size + adjust);
buffer_infos.emplace_back(vk_buffer->Handle(), offset_align, size + adjust);
if (auto barrier =
vk_buffer->GetBarrier(desc.is_written ? vk::AccessFlagBits2::eShaderWrite
: vk::AccessFlagBits2::eShaderRead,