2019-02-19 04:46:06 +01:00
|
|
|
// Copyright 2018 yuzu Emulator Project
|
|
|
|
// Licensed under GPLv2 or any later version
|
|
|
|
// Refer to the license.txt file included.
|
|
|
|
|
|
|
|
#include <algorithm>
|
2020-12-31 02:58:05 +01:00
|
|
|
#include <bit>
|
2019-02-19 04:46:06 +01:00
|
|
|
#include <optional>
|
|
|
|
#include <vector>
|
2020-01-06 21:14:41 +01:00
|
|
|
|
2019-02-19 04:46:06 +01:00
|
|
|
#include "common/alignment.h"
|
|
|
|
#include "common/assert.h"
|
|
|
|
#include "common/common_types.h"
|
|
|
|
#include "common/logging/log.h"
|
2020-12-26 05:19:46 +01:00
|
|
|
#include "video_core/vulkan_common/vulkan_device.h"
|
2021-01-03 22:17:57 +01:00
|
|
|
#include "video_core/vulkan_common/vulkan_memory_allocator.h"
|
2020-12-25 01:30:11 +01:00
|
|
|
#include "video_core/vulkan_common/vulkan_wrapper.h"
|
2019-02-19 04:46:06 +01:00
|
|
|
|
|
|
|
namespace Vulkan {
|
2020-01-06 21:14:41 +01:00
|
|
|
namespace {
|
2020-12-31 02:58:05 +01:00
|
|
|
struct Range {
|
|
|
|
u64 begin;
|
|
|
|
u64 end;
|
2020-01-06 21:14:41 +01:00
|
|
|
|
2020-12-31 02:58:05 +01:00
|
|
|
[[nodiscard]] bool Contains(u64 iterator, u64 size) const noexcept {
|
|
|
|
return iterator < end && begin < iterator + size;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2021-01-04 00:51:11 +01:00
|
|
|
[[nodiscard]] u64 AllocationChunkSize(u64 required_size) {
|
2020-12-31 02:58:05 +01:00
|
|
|
static constexpr std::array sizes{
|
|
|
|
0x1000ULL << 10, 0x1400ULL << 10, 0x1800ULL << 10, 0x1c00ULL << 10, 0x2000ULL << 10,
|
|
|
|
0x3200ULL << 10, 0x4000ULL << 10, 0x6000ULL << 10, 0x8000ULL << 10, 0xA000ULL << 10,
|
|
|
|
0x10000ULL << 10, 0x18000ULL << 10, 0x20000ULL << 10,
|
|
|
|
};
|
|
|
|
static_assert(std::is_sorted(sizes.begin(), sizes.end()));
|
2020-01-06 21:14:41 +01:00
|
|
|
|
2020-12-31 02:58:05 +01:00
|
|
|
const auto it = std::ranges::lower_bound(sizes, required_size);
|
|
|
|
return it != sizes.end() ? *it : Common::AlignUp(required_size, 4ULL << 20);
|
|
|
|
}
|
2021-01-04 00:51:11 +01:00
|
|
|
|
|
|
|
[[nodiscard]] VkMemoryPropertyFlags MemoryUsagePropertyFlags(MemoryUsage usage) {
|
|
|
|
switch (usage) {
|
|
|
|
case MemoryUsage::DeviceLocal:
|
|
|
|
return VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
|
|
|
|
case MemoryUsage::Upload:
|
|
|
|
return VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
|
|
|
|
case MemoryUsage::Download:
|
|
|
|
return VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT |
|
|
|
|
VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
|
|
|
|
}
|
|
|
|
UNREACHABLE_MSG("Invalid memory usage={}", usage);
|
|
|
|
return VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
|
|
|
|
}
|
2020-01-06 21:14:41 +01:00
|
|
|
} // Anonymous namespace
|
2019-02-19 04:46:06 +01:00
|
|
|
|
2020-12-31 02:58:05 +01:00
|
|
|
class MemoryAllocation {
|
2019-02-19 04:46:06 +01:00
|
|
|
public:
|
2020-12-31 02:58:05 +01:00
|
|
|
explicit MemoryAllocation(const Device& device_, vk::DeviceMemory memory_,
|
2021-01-04 00:51:11 +01:00
|
|
|
VkMemoryPropertyFlags properties, u64 allocation_size_, u32 type)
|
|
|
|
: device{device_}, memory{std::move(memory_)}, allocation_size{allocation_size_},
|
|
|
|
property_flags{properties}, shifted_memory_type{1U << type} {}
|
2019-02-19 04:46:06 +01:00
|
|
|
|
2020-12-31 02:58:05 +01:00
|
|
|
[[nodiscard]] std::optional<MemoryCommit> Commit(VkDeviceSize size, VkDeviceSize alignment) {
|
|
|
|
const std::optional<u64> alloc = FindFreeRegion(size, alignment);
|
|
|
|
if (!alloc) {
|
|
|
|
// Signal out of memory, it'll try to do more allocations.
|
|
|
|
return std::nullopt;
|
2019-02-19 04:46:06 +01:00
|
|
|
}
|
2020-12-31 02:58:05 +01:00
|
|
|
const Range range{
|
|
|
|
.begin = *alloc,
|
|
|
|
.end = *alloc + size,
|
|
|
|
};
|
|
|
|
commits.insert(std::ranges::upper_bound(commits, *alloc, {}, &Range::begin), range);
|
2021-01-06 05:18:37 +01:00
|
|
|
return std::make_optional<MemoryCommit>(this, *memory, *alloc, *alloc + size);
|
2019-02-19 04:46:06 +01:00
|
|
|
}
|
|
|
|
|
2020-12-31 02:58:05 +01:00
|
|
|
void Free(u64 begin) {
|
|
|
|
const auto it = std::ranges::find(commits, begin, &Range::begin);
|
|
|
|
ASSERT_MSG(it != commits.end(), "Invalid commit");
|
|
|
|
commits.erase(it);
|
|
|
|
}
|
2020-01-06 21:14:41 +01:00
|
|
|
|
2020-12-31 02:58:05 +01:00
|
|
|
[[nodiscard]] std::span<u8> Map() {
|
2021-01-04 00:51:11 +01:00
|
|
|
if (memory_mapped_span.empty()) {
|
|
|
|
u8* const raw_pointer = memory.Map(0, allocation_size);
|
|
|
|
memory_mapped_span = std::span<u8>(raw_pointer, allocation_size);
|
2019-02-19 04:46:06 +01:00
|
|
|
}
|
2020-12-31 02:58:05 +01:00
|
|
|
return memory_mapped_span;
|
2019-02-19 04:46:06 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns whether this allocation is compatible with the arguments.
|
2021-01-04 00:51:11 +01:00
|
|
|
[[nodiscard]] bool IsCompatible(VkMemoryPropertyFlags flags, u32 type_mask) const {
|
|
|
|
return (flags & property_flags) && (type_mask & shifted_memory_type) != 0;
|
2019-02-19 04:46:06 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
2020-12-31 02:58:05 +01:00
|
|
|
[[nodiscard]] static constexpr u32 ShiftType(u32 type) {
|
2019-02-19 04:46:06 +01:00
|
|
|
return 1U << type;
|
|
|
|
}
|
|
|
|
|
2020-12-31 02:58:05 +01:00
|
|
|
[[nodiscard]] std::optional<u64> FindFreeRegion(u64 size, u64 alignment) noexcept {
|
|
|
|
ASSERT(std::has_single_bit(alignment));
|
|
|
|
const u64 alignment_log2 = std::countr_zero(alignment);
|
|
|
|
std::optional<u64> candidate;
|
|
|
|
u64 iterator = 0;
|
|
|
|
auto commit = commits.begin();
|
|
|
|
while (iterator + size <= allocation_size) {
|
|
|
|
candidate = candidate.value_or(iterator);
|
|
|
|
if (commit == commits.end()) {
|
|
|
|
break;
|
2019-02-19 04:46:06 +01:00
|
|
|
}
|
2020-12-31 02:58:05 +01:00
|
|
|
if (commit->Contains(*candidate, size)) {
|
|
|
|
candidate = std::nullopt;
|
2019-02-19 04:46:06 +01:00
|
|
|
}
|
2020-12-31 02:58:05 +01:00
|
|
|
iterator = Common::AlignUpLog2(commit->end, alignment_log2);
|
|
|
|
++commit;
|
2019-02-19 04:46:06 +01:00
|
|
|
}
|
2020-12-31 02:58:05 +01:00
|
|
|
return candidate;
|
2019-02-19 04:46:06 +01:00
|
|
|
}
|
|
|
|
|
2021-01-04 00:51:11 +01:00
|
|
|
const Device& device; ///< Vulkan device.
|
|
|
|
const vk::DeviceMemory memory; ///< Vulkan memory allocation handler.
|
|
|
|
const u64 allocation_size; ///< Size of this allocation.
|
|
|
|
const VkMemoryPropertyFlags property_flags; ///< Vulkan memory property flags.
|
|
|
|
const u32 shifted_memory_type; ///< Shifted Vulkan memory type.
|
|
|
|
std::vector<Range> commits; ///< All commit ranges done from this allocation.
|
|
|
|
std::span<u8> memory_mapped_span; ///< Memory mapped span. Empty if not queried before.
|
2020-12-31 02:58:05 +01:00
|
|
|
};
|
2019-02-19 04:46:06 +01:00
|
|
|
|
2021-01-06 05:18:37 +01:00
|
|
|
MemoryCommit::MemoryCommit(MemoryAllocation* allocation_, VkDeviceMemory memory_, u64 begin_,
|
|
|
|
u64 end_) noexcept
|
|
|
|
: allocation{allocation_}, memory{memory_}, begin{begin_}, end{end_} {}
|
2019-02-19 04:46:06 +01:00
|
|
|
|
2020-12-31 02:58:05 +01:00
|
|
|
MemoryCommit::~MemoryCommit() {
|
|
|
|
Release();
|
|
|
|
}
|
|
|
|
|
|
|
|
MemoryCommit& MemoryCommit::operator=(MemoryCommit&& rhs) noexcept {
|
|
|
|
Release();
|
|
|
|
allocation = std::exchange(rhs.allocation, nullptr);
|
|
|
|
memory = rhs.memory;
|
2021-01-06 05:18:37 +01:00
|
|
|
begin = rhs.begin;
|
|
|
|
end = rhs.end;
|
2020-12-31 02:58:05 +01:00
|
|
|
span = std::exchange(rhs.span, std::span<u8>{});
|
|
|
|
return *this;
|
|
|
|
}
|
|
|
|
|
|
|
|
MemoryCommit::MemoryCommit(MemoryCommit&& rhs) noexcept
|
2021-01-06 05:18:37 +01:00
|
|
|
: allocation{std::exchange(rhs.allocation, nullptr)}, memory{rhs.memory}, begin{rhs.begin},
|
|
|
|
end{rhs.end}, span{std::exchange(rhs.span, std::span<u8>{})} {}
|
2020-12-31 02:58:05 +01:00
|
|
|
|
|
|
|
std::span<u8> MemoryCommit::Map() {
|
2021-01-04 00:51:11 +01:00
|
|
|
if (span.empty()) {
|
2021-01-06 05:18:37 +01:00
|
|
|
span = allocation->Map().subspan(begin, end - begin);
|
2020-12-31 02:58:05 +01:00
|
|
|
}
|
|
|
|
return span;
|
|
|
|
}
|
|
|
|
|
|
|
|
void MemoryCommit::Release() {
|
|
|
|
if (allocation) {
|
2021-01-06 05:18:37 +01:00
|
|
|
allocation->Free(begin);
|
2020-12-31 02:58:05 +01:00
|
|
|
}
|
|
|
|
}
|
2019-02-19 04:46:06 +01:00
|
|
|
|
2021-01-03 22:11:01 +01:00
|
|
|
MemoryAllocator::MemoryAllocator(const Device& device_)
|
2020-12-05 10:51:14 +01:00
|
|
|
: device{device_}, properties{device_.GetPhysical().GetMemoryProperties()} {}
|
2019-02-19 04:46:06 +01:00
|
|
|
|
2021-01-03 22:11:01 +01:00
|
|
|
MemoryAllocator::~MemoryAllocator() = default;
|
2019-02-19 04:46:06 +01:00
|
|
|
|
2021-01-03 22:38:15 +01:00
|
|
|
MemoryCommit MemoryAllocator::Commit(const VkMemoryRequirements& requirements, MemoryUsage usage) {
|
2021-01-04 00:51:11 +01:00
|
|
|
// Find the fastest memory flags we can afford with the current requirements
|
|
|
|
const VkMemoryPropertyFlags flags = MemoryPropertyFlags(requirements.memoryTypeBits, usage);
|
|
|
|
if (std::optional<MemoryCommit> commit = TryCommit(requirements, flags)) {
|
2020-12-31 02:58:05 +01:00
|
|
|
return std::move(*commit);
|
2019-02-19 04:46:06 +01:00
|
|
|
}
|
|
|
|
// Commit has failed, allocate more memory.
|
2020-12-31 02:58:05 +01:00
|
|
|
// TODO(Rodrigo): Handle out of memory situations in some way like flushing to guest memory.
|
2021-01-04 00:51:11 +01:00
|
|
|
AllocMemory(flags, requirements.memoryTypeBits, AllocationChunkSize(requirements.size));
|
2019-02-19 04:46:06 +01:00
|
|
|
|
2020-12-31 02:58:05 +01:00
|
|
|
// Commit again, this time it won't fail since there's a fresh allocation above.
|
|
|
|
// If it does, there's a bug.
|
2021-01-04 00:51:11 +01:00
|
|
|
return TryCommit(requirements, flags).value();
|
2019-02-19 04:46:06 +01:00
|
|
|
}
|
|
|
|
|
2021-01-03 22:38:15 +01:00
|
|
|
MemoryCommit MemoryAllocator::Commit(const vk::Buffer& buffer, MemoryUsage usage) {
|
|
|
|
auto commit = Commit(device.GetLogical().GetBufferMemoryRequirements(*buffer), usage);
|
2020-12-31 02:58:05 +01:00
|
|
|
buffer.BindMemory(commit.Memory(), commit.Offset());
|
2019-02-19 04:46:06 +01:00
|
|
|
return commit;
|
|
|
|
}
|
|
|
|
|
2021-01-03 22:38:15 +01:00
|
|
|
MemoryCommit MemoryAllocator::Commit(const vk::Image& image, MemoryUsage usage) {
|
|
|
|
auto commit = Commit(device.GetLogical().GetImageMemoryRequirements(*image), usage);
|
2020-12-31 02:58:05 +01:00
|
|
|
image.BindMemory(commit.Memory(), commit.Offset());
|
2019-02-19 04:46:06 +01:00
|
|
|
return commit;
|
|
|
|
}
|
|
|
|
|
2021-01-04 00:51:11 +01:00
|
|
|
void MemoryAllocator::AllocMemory(VkMemoryPropertyFlags flags, u32 type_mask, u64 size) {
|
|
|
|
const u32 type = FindType(flags, type_mask).value();
|
2020-12-31 02:58:05 +01:00
|
|
|
vk::DeviceMemory memory = device.GetLogical().AllocateMemory({
|
2020-07-17 00:26:30 +02:00
|
|
|
.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
|
|
|
|
.pNext = nullptr,
|
|
|
|
.allocationSize = size,
|
|
|
|
.memoryTypeIndex = type,
|
|
|
|
});
|
2021-01-04 00:51:11 +01:00
|
|
|
allocations.push_back(
|
|
|
|
std::make_unique<MemoryAllocation>(device, std::move(memory), flags, size, type));
|
2019-02-19 04:46:06 +01:00
|
|
|
}
|
|
|
|
|
2021-01-04 00:51:11 +01:00
|
|
|
std::optional<MemoryCommit> MemoryAllocator::TryCommit(const VkMemoryRequirements& requirements,
|
|
|
|
VkMemoryPropertyFlags flags) {
|
2020-01-06 21:14:41 +01:00
|
|
|
for (auto& allocation : allocations) {
|
2021-01-04 00:51:11 +01:00
|
|
|
if (!allocation->IsCompatible(flags, requirements.memoryTypeBits)) {
|
2020-01-06 21:14:41 +01:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
if (auto commit = allocation->Commit(requirements.size, requirements.alignment)) {
|
|
|
|
return commit;
|
|
|
|
}
|
|
|
|
}
|
2020-12-31 02:58:05 +01:00
|
|
|
return std::nullopt;
|
2019-02-19 04:46:06 +01:00
|
|
|
}
|
|
|
|
|
2021-01-04 00:51:11 +01:00
|
|
|
VkMemoryPropertyFlags MemoryAllocator::MemoryPropertyFlags(u32 type_mask, MemoryUsage usage) const {
|
|
|
|
return MemoryPropertyFlags(type_mask, MemoryUsagePropertyFlags(usage));
|
|
|
|
}
|
|
|
|
|
|
|
|
VkMemoryPropertyFlags MemoryAllocator::MemoryPropertyFlags(u32 type_mask,
|
|
|
|
VkMemoryPropertyFlags flags) const {
|
|
|
|
if (FindType(flags, type_mask)) {
|
|
|
|
// Found a memory type with those requirements
|
|
|
|
return flags;
|
|
|
|
}
|
|
|
|
if (flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) {
|
|
|
|
// Remove host cached bit in case it's not supported
|
|
|
|
return MemoryPropertyFlags(type_mask, flags & ~VK_MEMORY_PROPERTY_HOST_CACHED_BIT);
|
|
|
|
}
|
|
|
|
if (flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) {
|
|
|
|
// Remove device local, if it's not supported by the requested resource
|
|
|
|
return MemoryPropertyFlags(type_mask, flags & ~VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
|
|
|
|
}
|
|
|
|
UNREACHABLE_MSG("No compatible memory types found");
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
std::optional<u32> MemoryAllocator::FindType(VkMemoryPropertyFlags flags, u32 type_mask) const {
|
|
|
|
for (u32 type_index = 0; type_index < properties.memoryTypeCount; ++type_index) {
|
|
|
|
const VkMemoryPropertyFlags type_flags = properties.memoryTypes[type_index].propertyFlags;
|
|
|
|
if ((type_mask & (1U << type_index)) && (type_flags & flags)) {
|
|
|
|
// The type matches in type and in the wanted properties.
|
|
|
|
return type_index;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// Failed to find index
|
|
|
|
return std::nullopt;
|
|
|
|
}
|
|
|
|
|
2021-01-03 22:38:15 +01:00
|
|
|
bool IsHostVisible(MemoryUsage usage) noexcept {
|
|
|
|
switch (usage) {
|
|
|
|
case MemoryUsage::DeviceLocal:
|
|
|
|
return false;
|
|
|
|
case MemoryUsage::Upload:
|
|
|
|
case MemoryUsage::Download:
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
UNREACHABLE_MSG("Invalid memory usage={}", usage);
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2019-02-19 04:46:06 +01:00
|
|
|
} // namespace Vulkan
|