2022-04-23 10:59:50 +02:00
|
|
|
// SPDX-FileCopyrightText: Copyright 2019 yuzu Emulator Project
|
|
|
|
// SPDX-License-Identifier: GPL-2.0-or-later
|
2019-02-22 05:27:25 +01:00
|
|
|
|
|
|
|
#pragma once
|
|
|
|
|
2019-12-13 06:24:48 +01:00
|
|
|
#include <condition_variable>
|
2021-01-28 02:19:59 +01:00
|
|
|
#include <cstddef>
|
2019-12-13 06:24:48 +01:00
|
|
|
#include <memory>
|
|
|
|
#include <thread>
|
|
|
|
#include <utility>
|
2021-05-25 23:58:52 +02:00
|
|
|
#include <queue>
|
2021-05-07 11:26:12 +02:00
|
|
|
|
2021-01-28 02:19:59 +01:00
|
|
|
#include "common/alignment.h"
|
2019-02-22 05:27:25 +01:00
|
|
|
#include "common/common_types.h"
|
2022-11-21 17:31:18 +01:00
|
|
|
#include "common/polyfill_thread.h"
|
2021-01-17 00:48:58 +01:00
|
|
|
#include "video_core/renderer_vulkan/vk_master_semaphore.h"
|
2020-12-25 01:30:11 +01:00
|
|
|
#include "video_core/vulkan_common/vulkan_wrapper.h"
|
2019-02-22 05:27:25 +01:00
|
|
|
|
|
|
|
namespace Vulkan {
|
|
|
|
|
2020-09-10 08:43:30 +02:00
|
|
|
class CommandPool;
|
2020-12-26 05:10:53 +01:00
|
|
|
class Device;
|
2020-12-30 06:25:23 +01:00
|
|
|
class Framebuffer;
|
2021-04-01 06:36:22 +02:00
|
|
|
class GraphicsPipeline;
|
2020-02-21 00:35:53 +01:00
|
|
|
class StateTracker;
|
2022-06-26 06:38:44 +02:00
|
|
|
class QueryCache;
|
2019-07-07 08:12:21 +02:00
|
|
|
|
2019-12-13 06:24:48 +01:00
|
|
|
/// The scheduler abstracts command buffer and fence management with an interface that's able to do
|
|
|
|
/// OpenGL-like operations on Vulkan command buffers.
|
2022-06-26 06:34:24 +02:00
|
|
|
class Scheduler {
|
2019-07-07 08:12:21 +02:00
|
|
|
public:
|
2022-06-26 06:34:24 +02:00
|
|
|
explicit Scheduler(const Device& device, StateTracker& state_tracker);
|
|
|
|
~Scheduler();
|
2019-12-13 06:24:48 +01:00
|
|
|
|
|
|
|
/// Sends the current execution context to the GPU.
|
2021-09-02 07:26:18 +02:00
|
|
|
void Flush(VkSemaphore signal_semaphore = nullptr, VkSemaphore wait_semaphore = nullptr);
|
2019-12-13 06:24:48 +01:00
|
|
|
|
|
|
|
/// Sends the current execution context to the GPU and waits for it to complete.
|
2021-09-02 07:26:18 +02:00
|
|
|
void Finish(VkSemaphore signal_semaphore = nullptr, VkSemaphore wait_semaphore = nullptr);
|
2019-12-13 06:24:48 +01:00
|
|
|
|
|
|
|
/// Waits for the worker thread to finish executing everything. After this function returns it's
|
|
|
|
/// safe to touch worker resources.
|
|
|
|
void WaitWorker();
|
|
|
|
|
|
|
|
/// Sends currently recorded work to the worker thread.
|
|
|
|
void DispatchWork();
|
|
|
|
|
|
|
|
/// Requests to begin a renderpass.
|
2020-12-30 06:25:23 +01:00
|
|
|
void RequestRenderpass(const Framebuffer* framebuffer);
|
2019-12-13 06:24:48 +01:00
|
|
|
|
|
|
|
/// Requests the current executino context to be able to execute operations only allowed outside
|
|
|
|
/// of a renderpass.
|
|
|
|
void RequestOutsideRenderPassOperationContext();
|
|
|
|
|
2021-04-01 06:36:22 +02:00
|
|
|
/// Update the pipeline to the current execution context.
|
|
|
|
bool UpdateGraphicsPipeline(GraphicsPipeline* pipeline);
|
2019-07-07 08:12:21 +02:00
|
|
|
|
2021-07-26 03:34:12 +02:00
|
|
|
/// Update the rescaling state. Returns true if the state has to be updated.
|
|
|
|
bool UpdateRescaling(bool is_rescaling);
|
|
|
|
|
2020-12-30 06:25:23 +01:00
|
|
|
/// Invalidates current command buffer state except for render passes
|
|
|
|
void InvalidateState();
|
|
|
|
|
2020-02-11 22:59:44 +01:00
|
|
|
/// Assigns the query cache.
|
2022-06-26 06:38:44 +02:00
|
|
|
void SetQueryCache(QueryCache& query_cache_) {
|
2020-02-11 22:59:44 +01:00
|
|
|
query_cache = &query_cache_;
|
|
|
|
}
|
|
|
|
|
2019-12-13 06:24:48 +01:00
|
|
|
/// Send work to a separate thread.
|
|
|
|
template <typename T>
|
|
|
|
void Record(T&& command) {
|
|
|
|
if (chunk->Record(command)) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
DispatchWork();
|
|
|
|
(void)chunk->Record(command);
|
|
|
|
}
|
2019-02-22 05:27:25 +01:00
|
|
|
|
2021-01-17 00:48:58 +01:00
|
|
|
/// Returns the current command buffer tick.
|
|
|
|
[[nodiscard]] u64 CurrentTick() const noexcept {
|
|
|
|
return master_semaphore->CurrentTick();
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns true when a tick has been triggered by the GPU.
|
|
|
|
[[nodiscard]] bool IsFree(u64 tick) const noexcept {
|
|
|
|
return master_semaphore->IsFree(tick);
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Waits for the given tick to trigger on the GPU.
|
|
|
|
void Wait(u64 tick) {
|
2021-04-27 03:11:31 +02:00
|
|
|
if (tick >= master_semaphore->CurrentTick()) {
|
|
|
|
// Make sure we are not waiting for the current tick without signalling
|
|
|
|
Flush();
|
|
|
|
}
|
2021-01-17 00:48:58 +01:00
|
|
|
master_semaphore->Wait(tick);
|
|
|
|
}
|
|
|
|
|
2020-09-10 08:43:30 +02:00
|
|
|
/// Returns the master timeline semaphore.
|
|
|
|
[[nodiscard]] MasterSemaphore& GetMasterSemaphore() const noexcept {
|
|
|
|
return *master_semaphore;
|
2020-02-11 22:59:44 +01:00
|
|
|
}
|
|
|
|
|
2019-12-13 06:24:48 +01:00
|
|
|
private:
|
|
|
|
class Command {
|
|
|
|
public:
|
|
|
|
virtual ~Command() = default;
|
2019-02-22 05:27:25 +01:00
|
|
|
|
2020-03-27 05:33:21 +01:00
|
|
|
virtual void Execute(vk::CommandBuffer cmdbuf) const = 0;
|
2019-02-22 05:27:25 +01:00
|
|
|
|
2019-12-13 06:24:48 +01:00
|
|
|
Command* GetNext() const {
|
|
|
|
return next;
|
|
|
|
}
|
|
|
|
|
|
|
|
void SetNext(Command* next_) {
|
|
|
|
next = next_;
|
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
|
|
|
Command* next = nullptr;
|
|
|
|
};
|
|
|
|
|
|
|
|
template <typename T>
|
|
|
|
class TypedCommand final : public Command {
|
|
|
|
public:
|
2020-12-05 10:51:14 +01:00
|
|
|
explicit TypedCommand(T&& command_) : command{std::move(command_)} {}
|
2019-12-13 06:24:48 +01:00
|
|
|
~TypedCommand() override = default;
|
|
|
|
|
|
|
|
TypedCommand(TypedCommand&&) = delete;
|
|
|
|
TypedCommand& operator=(TypedCommand&&) = delete;
|
|
|
|
|
2020-03-27 05:33:21 +01:00
|
|
|
void Execute(vk::CommandBuffer cmdbuf) const override {
|
|
|
|
command(cmdbuf);
|
2019-12-13 06:24:48 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
|
|
|
T command;
|
|
|
|
};
|
|
|
|
|
|
|
|
class CommandChunk final {
|
|
|
|
public:
|
2020-03-27 05:33:21 +01:00
|
|
|
void ExecuteAll(vk::CommandBuffer cmdbuf);
|
2019-12-13 06:24:48 +01:00
|
|
|
|
|
|
|
template <typename T>
|
|
|
|
bool Record(T& command) {
|
|
|
|
using FuncType = TypedCommand<T>;
|
|
|
|
static_assert(sizeof(FuncType) < sizeof(data), "Lambda is too large");
|
|
|
|
|
2021-01-28 02:19:59 +01:00
|
|
|
command_offset = Common::AlignUp(command_offset, alignof(FuncType));
|
2019-12-13 06:24:48 +01:00
|
|
|
if (command_offset > sizeof(data) - sizeof(FuncType)) {
|
|
|
|
return false;
|
|
|
|
}
|
2021-01-28 02:19:59 +01:00
|
|
|
Command* const current_last = last;
|
2019-12-13 06:24:48 +01:00
|
|
|
last = new (data.data() + command_offset) FuncType(std::move(command));
|
|
|
|
|
|
|
|
if (current_last) {
|
|
|
|
current_last->SetNext(last);
|
|
|
|
} else {
|
|
|
|
first = last;
|
|
|
|
}
|
|
|
|
command_offset += sizeof(FuncType);
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2021-04-27 03:11:31 +02:00
|
|
|
void MarkSubmit() {
|
|
|
|
submit = true;
|
|
|
|
}
|
|
|
|
|
2019-12-13 06:24:48 +01:00
|
|
|
bool Empty() const {
|
2022-10-28 03:42:43 +02:00
|
|
|
return command_offset == 0;
|
2019-12-13 06:24:48 +01:00
|
|
|
}
|
|
|
|
|
2021-04-27 03:11:31 +02:00
|
|
|
bool HasSubmit() const {
|
|
|
|
return submit;
|
|
|
|
}
|
|
|
|
|
2019-12-13 06:24:48 +01:00
|
|
|
private:
|
|
|
|
Command* first = nullptr;
|
|
|
|
Command* last = nullptr;
|
|
|
|
|
2021-01-28 02:19:59 +01:00
|
|
|
size_t command_offset = 0;
|
2021-04-27 03:11:31 +02:00
|
|
|
bool submit = false;
|
2021-01-28 02:19:59 +01:00
|
|
|
alignas(std::max_align_t) std::array<u8, 0x8000> data{};
|
2019-12-13 06:24:48 +01:00
|
|
|
};
|
|
|
|
|
2020-09-10 08:43:30 +02:00
|
|
|
struct State {
|
|
|
|
VkRenderPass renderpass = nullptr;
|
|
|
|
VkFramebuffer framebuffer = nullptr;
|
|
|
|
VkExtent2D render_area = {0, 0};
|
2021-04-01 06:36:22 +02:00
|
|
|
GraphicsPipeline* graphics_pipeline = nullptr;
|
2021-07-26 03:34:12 +02:00
|
|
|
bool is_rescaling = false;
|
|
|
|
bool rescaling_defined = false;
|
2020-09-10 08:43:30 +02:00
|
|
|
};
|
|
|
|
|
2021-09-16 02:10:25 +02:00
|
|
|
void WorkerThread(std::stop_token stop_token);
|
2019-02-22 05:27:25 +01:00
|
|
|
|
2021-04-27 03:11:31 +02:00
|
|
|
void AllocateWorkerCommandBuffer();
|
|
|
|
|
2021-09-02 07:26:18 +02:00
|
|
|
void SubmitExecution(VkSemaphore signal_semaphore, VkSemaphore wait_semaphore);
|
2019-02-22 05:27:25 +01:00
|
|
|
|
|
|
|
void AllocateNewContext();
|
|
|
|
|
2019-12-13 06:24:48 +01:00
|
|
|
void EndPendingOperations();
|
|
|
|
|
|
|
|
void EndRenderPass();
|
|
|
|
|
|
|
|
void AcquireNewChunk();
|
|
|
|
|
2020-12-26 05:10:53 +01:00
|
|
|
const Device& device;
|
2020-02-21 00:35:53 +01:00
|
|
|
StateTracker& state_tracker;
|
|
|
|
|
2020-09-10 08:43:30 +02:00
|
|
|
std::unique_ptr<MasterSemaphore> master_semaphore;
|
|
|
|
std::unique_ptr<CommandPool> command_pool;
|
|
|
|
|
2022-06-26 06:38:44 +02:00
|
|
|
QueryCache* query_cache = nullptr;
|
2020-02-11 22:59:44 +01:00
|
|
|
|
2019-02-22 05:27:25 +01:00
|
|
|
vk::CommandBuffer current_cmdbuf;
|
2019-12-13 06:24:48 +01:00
|
|
|
|
|
|
|
std::unique_ptr<CommandChunk> chunk;
|
|
|
|
|
2020-09-10 08:43:30 +02:00
|
|
|
State state;
|
2020-12-30 06:25:23 +01:00
|
|
|
|
|
|
|
u32 num_renderpass_images = 0;
|
|
|
|
std::array<VkImage, 9> renderpass_images{};
|
|
|
|
std::array<VkImageSubresourceRange, 9> renderpass_image_ranges{};
|
|
|
|
|
2021-05-07 11:26:12 +02:00
|
|
|
std::queue<std::unique_ptr<CommandChunk>> work_queue;
|
|
|
|
std::vector<std::unique_ptr<CommandChunk>> chunk_reserve;
|
|
|
|
std::mutex reserve_mutex;
|
|
|
|
std::mutex work_mutex;
|
2021-09-16 02:10:25 +02:00
|
|
|
std::condition_variable_any work_cv;
|
2021-05-07 11:26:12 +02:00
|
|
|
std::condition_variable wait_cv;
|
2021-09-24 02:34:02 +02:00
|
|
|
std::jthread worker_thread;
|
2019-02-22 05:27:25 +01:00
|
|
|
};
|
|
|
|
|
|
|
|
} // namespace Vulkan
|