2019-01-23 00:49:31 +01:00
|
|
|
// Copyright 2018 yuzu Emulator Project
|
|
|
|
// Licensed under GPLv2 or any later version
|
|
|
|
// Refer to the license.txt file included.
|
|
|
|
|
2019-07-12 02:54:07 +02:00
|
|
|
#include <bitset>
|
2019-03-06 02:25:01 +01:00
|
|
|
#include "common/assert.h"
|
2019-01-23 00:49:31 +01:00
|
|
|
#include "common/logging/log.h"
|
2019-04-23 01:05:43 +02:00
|
|
|
#include "core/core.h"
|
2019-01-23 00:49:31 +01:00
|
|
|
#include "video_core/engines/kepler_compute.h"
|
2019-04-23 01:05:43 +02:00
|
|
|
#include "video_core/engines/maxwell_3d.h"
|
2019-11-18 22:35:21 +01:00
|
|
|
#include "video_core/engines/shader_type.h"
|
2019-01-23 00:49:31 +01:00
|
|
|
#include "video_core/memory_manager.h"
|
2019-04-23 01:05:43 +02:00
|
|
|
#include "video_core/rasterizer_interface.h"
|
|
|
|
#include "video_core/renderer_base.h"
|
|
|
|
#include "video_core/textures/decoders.h"
|
2019-01-23 00:49:31 +01:00
|
|
|
|
|
|
|
namespace Tegra::Engines {
|
|
|
|
|
2019-04-23 01:05:43 +02:00
|
|
|
KeplerCompute::KeplerCompute(Core::System& system, VideoCore::RasterizerInterface& rasterizer,
|
|
|
|
MemoryManager& memory_manager)
|
|
|
|
: system{system}, rasterizer{rasterizer}, memory_manager{memory_manager}, upload_state{
|
|
|
|
memory_manager,
|
|
|
|
regs.upload} {}
|
2019-01-23 00:49:31 +01:00
|
|
|
|
|
|
|
KeplerCompute::~KeplerCompute() = default;
|
|
|
|
|
2020-04-28 19:53:47 +02:00
|
|
|
void KeplerCompute::CallMethod(u32 method, u32 method_argument, bool is_last_call) {
|
2020-04-28 03:47:58 +02:00
|
|
|
ASSERT_MSG(method < Regs::NUM_REGS,
|
2019-01-23 00:49:31 +01:00
|
|
|
"Invalid KeplerCompute register, increase the size of the Regs structure");
|
|
|
|
|
2020-04-28 03:47:58 +02:00
|
|
|
regs.reg_array[method] = method_argument;
|
2019-01-23 00:49:31 +01:00
|
|
|
|
2020-04-28 03:47:58 +02:00
|
|
|
switch (method) {
|
2019-04-23 01:05:43 +02:00
|
|
|
case KEPLER_COMPUTE_REG_INDEX(exec_upload): {
|
|
|
|
upload_state.ProcessExec(regs.exec_upload.linear != 0);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case KEPLER_COMPUTE_REG_INDEX(data_upload): {
|
2020-04-28 03:47:58 +02:00
|
|
|
upload_state.ProcessData(method_argument, is_last_call);
|
2019-12-27 02:14:10 +01:00
|
|
|
if (is_last_call) {
|
|
|
|
system.GPU().Maxwell3D().OnMemoryWrite();
|
|
|
|
}
|
2019-04-23 01:05:43 +02:00
|
|
|
break;
|
|
|
|
}
|
2019-01-23 00:49:31 +01:00
|
|
|
case KEPLER_COMPUTE_REG_INDEX(launch):
|
2019-04-23 01:05:43 +02:00
|
|
|
ProcessLaunch();
|
2019-01-23 00:49:31 +01:00
|
|
|
break;
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-04-20 19:42:14 +02:00
|
|
|
void KeplerCompute::CallMultiMethod(u32 method, const u32* base_start, u32 amount,
|
|
|
|
u32 methods_pending) {
|
2020-04-20 08:16:56 +02:00
|
|
|
for (std::size_t i = 0; i < amount; i++) {
|
2020-04-28 03:47:58 +02:00
|
|
|
CallMethod(method, base_start[i], methods_pending - static_cast<u32>(i) <= 1);
|
2020-04-20 08:16:56 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-10-20 09:03:33 +02:00
|
|
|
Texture::FullTextureInfo KeplerCompute::GetTexture(std::size_t offset) const {
|
2019-07-12 02:54:07 +02:00
|
|
|
const std::bitset<8> cbuf_mask = launch_description.const_buffer_enable_mask.Value();
|
|
|
|
ASSERT(cbuf_mask[regs.tex_cb_index]);
|
|
|
|
|
|
|
|
const auto& texinfo = launch_description.const_buffer_config[regs.tex_cb_index];
|
|
|
|
ASSERT(texinfo.Address() != 0);
|
|
|
|
|
|
|
|
const GPUVAddr address = texinfo.Address() + offset * sizeof(Texture::TextureHandle);
|
|
|
|
ASSERT(address < texinfo.Address() + texinfo.size);
|
|
|
|
|
|
|
|
const Texture::TextureHandle tex_handle{memory_manager.Read<u32>(address)};
|
2019-10-20 09:03:33 +02:00
|
|
|
return GetTextureInfo(tex_handle);
|
2019-07-12 02:54:07 +02:00
|
|
|
}
|
|
|
|
|
2019-10-20 09:03:33 +02:00
|
|
|
Texture::FullTextureInfo KeplerCompute::GetTextureInfo(Texture::TextureHandle tex_handle) const {
|
|
|
|
return Texture::FullTextureInfo{GetTICEntry(tex_handle.tic_id), GetTSCEntry(tex_handle.tsc_id)};
|
2019-07-12 02:54:07 +02:00
|
|
|
}
|
|
|
|
|
2019-09-23 20:02:02 +02:00
|
|
|
u32 KeplerCompute::AccessConstBuffer32(ShaderType stage, u64 const_buffer, u64 offset) const {
|
|
|
|
ASSERT(stage == ShaderType::Compute);
|
2019-07-12 02:54:07 +02:00
|
|
|
const auto& buffer = launch_description.const_buffer_config[const_buffer];
|
|
|
|
u32 result;
|
|
|
|
std::memcpy(&result, memory_manager.GetPointer(buffer.Address() + offset), sizeof(u32));
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2019-09-25 15:53:18 +02:00
|
|
|
SamplerDescriptor KeplerCompute::AccessBoundSampler(ShaderType stage, u64 offset) const {
|
|
|
|
return AccessBindlessSampler(stage, regs.tex_cb_index, offset * sizeof(Texture::TextureHandle));
|
|
|
|
}
|
|
|
|
|
|
|
|
SamplerDescriptor KeplerCompute::AccessBindlessSampler(ShaderType stage, u64 const_buffer,
|
|
|
|
u64 offset) const {
|
|
|
|
ASSERT(stage == ShaderType::Compute);
|
|
|
|
const auto& tex_info_buffer = launch_description.const_buffer_config[const_buffer];
|
2019-10-01 02:55:25 +02:00
|
|
|
const GPUVAddr tex_info_address = tex_info_buffer.Address() + offset;
|
2020-06-05 04:03:49 +02:00
|
|
|
return AccessSampler(memory_manager.Read<u32>(tex_info_address));
|
|
|
|
}
|
2019-09-25 15:53:18 +02:00
|
|
|
|
2020-06-05 04:03:49 +02:00
|
|
|
SamplerDescriptor KeplerCompute::AccessSampler(u32 handle) const {
|
|
|
|
const Texture::TextureHandle tex_handle{handle};
|
2019-10-20 09:03:33 +02:00
|
|
|
const Texture::FullTextureInfo tex_info = GetTextureInfo(tex_handle);
|
2020-02-29 09:02:27 +01:00
|
|
|
SamplerDescriptor result = SamplerDescriptor::FromTIC(tex_info.tic);
|
2019-09-25 15:53:18 +02:00
|
|
|
result.is_shadow.Assign(tex_info.tsc.depth_compare_enabled.Value());
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2020-01-03 21:16:29 +01:00
|
|
|
VideoCore::GuestDriverProfile& KeplerCompute::AccessGuestDriverProfile() {
|
|
|
|
return rasterizer.AccessGuestDriverProfile();
|
|
|
|
}
|
|
|
|
|
2020-01-08 15:28:29 +01:00
|
|
|
const VideoCore::GuestDriverProfile& KeplerCompute::AccessGuestDriverProfile() const {
|
|
|
|
return rasterizer.AccessGuestDriverProfile();
|
|
|
|
}
|
|
|
|
|
2019-04-23 01:05:43 +02:00
|
|
|
void KeplerCompute::ProcessLaunch() {
|
|
|
|
const GPUVAddr launch_desc_loc = regs.launch_desc_loc.Address();
|
|
|
|
memory_manager.ReadBlockUnsafe(launch_desc_loc, &launch_description,
|
|
|
|
LaunchParams::NUM_LAUNCH_PARAMETERS * sizeof(u32));
|
|
|
|
|
2019-07-15 03:25:13 +02:00
|
|
|
const GPUVAddr code_addr = regs.code_loc.Address() + launch_description.program_start;
|
|
|
|
LOG_TRACE(HW_GPU, "Compute invocation launched at address 0x{:016x}", code_addr);
|
|
|
|
|
|
|
|
rasterizer.DispatchCompute(code_addr);
|
2019-04-23 01:05:43 +02:00
|
|
|
}
|
|
|
|
|
2019-07-12 02:54:07 +02:00
|
|
|
Texture::TICEntry KeplerCompute::GetTICEntry(u32 tic_index) const {
|
|
|
|
const GPUVAddr tic_address_gpu{regs.tic.Address() + tic_index * sizeof(Texture::TICEntry)};
|
|
|
|
|
|
|
|
Texture::TICEntry tic_entry;
|
|
|
|
memory_manager.ReadBlockUnsafe(tic_address_gpu, &tic_entry, sizeof(Texture::TICEntry));
|
|
|
|
|
|
|
|
return tic_entry;
|
|
|
|
}
|
|
|
|
|
|
|
|
Texture::TSCEntry KeplerCompute::GetTSCEntry(u32 tsc_index) const {
|
|
|
|
const GPUVAddr tsc_address_gpu{regs.tsc.Address() + tsc_index * sizeof(Texture::TSCEntry)};
|
|
|
|
|
|
|
|
Texture::TSCEntry tsc_entry;
|
|
|
|
memory_manager.ReadBlockUnsafe(tsc_address_gpu, &tsc_entry, sizeof(Texture::TSCEntry));
|
|
|
|
return tsc_entry;
|
|
|
|
}
|
|
|
|
|
2019-01-23 00:49:31 +01:00
|
|
|
} // namespace Tegra::Engines
|