2022-08-20 12:03:09 +02:00
|
|
|
// Copyright 2022 Citra Emulator Project
|
2015-05-19 06:21:33 +02:00
|
|
|
// Licensed under GPLv2 or any later version
|
|
|
|
// Refer to the license.txt file included.
|
|
|
|
|
2018-10-05 12:37:55 +02:00
|
|
|
#include <optional>
|
2017-11-25 23:38:30 +01:00
|
|
|
#include <boost/range/iterator_range.hpp>
|
2022-08-20 23:50:20 +02:00
|
|
|
#include "common/alignment.h"
|
2016-04-30 17:34:51 +02:00
|
|
|
#include "common/logging/log.h"
|
2015-08-17 23:25:21 +02:00
|
|
|
#include "common/microprofile.h"
|
2023-04-21 09:14:55 +02:00
|
|
|
#include "core/memory.h"
|
2022-08-20 10:40:49 +02:00
|
|
|
#include "video_core/rasterizer_cache/rasterizer_cache.h"
|
2023-04-21 09:14:55 +02:00
|
|
|
#include "video_core/regs.h"
|
2023-03-30 13:24:49 +02:00
|
|
|
#include "video_core/renderer_base.h"
|
2023-04-21 09:14:55 +02:00
|
|
|
#include "video_core/renderer_opengl/gl_texture_runtime.h"
|
2017-12-10 00:00:55 +01:00
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
namespace VideoCore {
|
2017-12-10 00:00:55 +01:00
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
namespace {
|
2017-12-10 00:00:55 +01:00
|
|
|
|
2022-11-04 23:32:57 +01:00
|
|
|
MICROPROFILE_DEFINE(RasterizerCache_CopySurface, "RasterizerCache", "CopySurface",
|
|
|
|
MP_RGB(128, 192, 64));
|
2019-09-07 08:43:20 +02:00
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
constexpr auto RangeFromInterval(const auto& map, const auto& interval) {
|
|
|
|
return boost::make_iterator_range(map.equal_range(interval));
|
2017-12-10 00:00:55 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
enum MatchFlags {
|
2023-04-21 09:14:55 +02:00
|
|
|
Exact = 1 << 0, ///< Surfaces perfectly match
|
|
|
|
SubRect = 1 << 1, ///< Surface encompasses params
|
|
|
|
Copy = 1 << 2, ///< Surface we can copy from
|
|
|
|
Expand = 1 << 3, ///< Surface that can expand params
|
|
|
|
TexCopy = 1 << 4, ///< Surface that will match a display transfer "texture copy" parameters
|
2017-12-10 00:00:55 +01:00
|
|
|
};
|
|
|
|
|
|
|
|
/// Get the best surface match (and its match type) for the given flags
|
|
|
|
template <MatchFlags find_flags>
|
2023-04-21 09:14:55 +02:00
|
|
|
auto FindMatch(const auto& surface_cache, const SurfaceParams& params, ScaleMatch match_scale_type,
|
|
|
|
std::optional<SurfaceInterval> validate_interval = std::nullopt) {
|
|
|
|
RasterizerCache::SurfaceRef match_surface = nullptr;
|
2017-12-10 00:00:55 +01:00
|
|
|
bool match_valid = false;
|
|
|
|
u32 match_scale = 0;
|
|
|
|
SurfaceInterval match_interval{};
|
|
|
|
|
2020-05-18 16:19:54 +02:00
|
|
|
for (const auto& pair : RangeFromInterval(surface_cache, params.GetInterval())) {
|
|
|
|
for (const auto& surface : pair.second) {
|
|
|
|
const bool res_scale_matched = match_scale_type == ScaleMatch::Exact
|
|
|
|
? (params.res_scale == surface->res_scale)
|
|
|
|
: (params.res_scale <= surface->res_scale);
|
2023-04-21 09:14:55 +02:00
|
|
|
// Validity will be checked in GetCopyableInterval
|
|
|
|
const bool is_valid =
|
2017-12-09 21:51:46 +01:00
|
|
|
find_flags & MatchFlags::Copy
|
|
|
|
? true
|
|
|
|
: surface->IsRegionValid(validate_interval.value_or(params.GetInterval()));
|
2017-12-10 00:00:55 +01:00
|
|
|
|
|
|
|
auto IsMatch_Helper = [&](auto check_type, auto match_fn) {
|
|
|
|
if (!(find_flags & check_type))
|
|
|
|
return;
|
|
|
|
|
|
|
|
bool matched;
|
|
|
|
SurfaceInterval surface_interval;
|
|
|
|
std::tie(matched, surface_interval) = match_fn();
|
|
|
|
if (!matched)
|
|
|
|
return;
|
|
|
|
|
|
|
|
if (!res_scale_matched && match_scale_type != ScaleMatch::Ignore &&
|
|
|
|
surface->type != SurfaceType::Fill)
|
|
|
|
return;
|
|
|
|
|
2019-08-07 04:56:56 +02:00
|
|
|
// Found a match, update only if this is better than the previous one
|
2017-12-10 00:00:55 +01:00
|
|
|
auto UpdateMatch = [&] {
|
|
|
|
match_surface = surface;
|
|
|
|
match_valid = is_valid;
|
|
|
|
match_scale = surface->res_scale;
|
|
|
|
match_interval = surface_interval;
|
|
|
|
};
|
|
|
|
|
|
|
|
if (surface->res_scale > match_scale) {
|
|
|
|
UpdateMatch();
|
|
|
|
return;
|
|
|
|
} else if (surface->res_scale < match_scale) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (is_valid && !match_valid) {
|
|
|
|
UpdateMatch();
|
|
|
|
return;
|
|
|
|
} else if (is_valid != match_valid) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (boost::icl::length(surface_interval) > boost::icl::length(match_interval)) {
|
|
|
|
UpdateMatch();
|
|
|
|
}
|
|
|
|
};
|
|
|
|
IsMatch_Helper(std::integral_constant<MatchFlags, MatchFlags::Exact>{}, [&] {
|
|
|
|
return std::make_pair(surface->ExactMatch(params), surface->GetInterval());
|
|
|
|
});
|
|
|
|
IsMatch_Helper(std::integral_constant<MatchFlags, MatchFlags::SubRect>{}, [&] {
|
|
|
|
return std::make_pair(surface->CanSubRect(params), surface->GetInterval());
|
|
|
|
});
|
|
|
|
IsMatch_Helper(std::integral_constant<MatchFlags, MatchFlags::Copy>{}, [&] {
|
2018-10-05 16:51:33 +02:00
|
|
|
ASSERT(validate_interval);
|
2017-12-10 00:00:55 +01:00
|
|
|
auto copy_interval =
|
2023-04-21 09:14:55 +02:00
|
|
|
surface->GetCopyableInterval(params.FromInterval(*validate_interval));
|
2017-12-10 00:00:55 +01:00
|
|
|
bool matched = boost::icl::length(copy_interval & *validate_interval) != 0 &&
|
|
|
|
surface->CanCopy(params, copy_interval);
|
|
|
|
return std::make_pair(matched, copy_interval);
|
|
|
|
});
|
|
|
|
IsMatch_Helper(std::integral_constant<MatchFlags, MatchFlags::Expand>{}, [&] {
|
|
|
|
return std::make_pair(surface->CanExpand(params), surface->GetInterval());
|
|
|
|
});
|
|
|
|
IsMatch_Helper(std::integral_constant<MatchFlags, MatchFlags::TexCopy>{}, [&] {
|
|
|
|
return std::make_pair(surface->CanTexCopy(params), surface->GetInterval());
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return match_surface;
|
|
|
|
}
|
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
} // Anonymous namespace
|
|
|
|
|
|
|
|
RasterizerCache::RasterizerCache(Memory::MemorySystem& memory_, OpenGL::TextureRuntime& runtime_,
|
|
|
|
Pica::Regs& regs_, RendererBase& renderer_)
|
|
|
|
: memory{memory_}, runtime{runtime_}, regs{regs_}, renderer{renderer_},
|
|
|
|
resolution_scale_factor{renderer.GetResolutionScaleFactor()},
|
|
|
|
use_filter{Settings::values.texture_filter.GetValue() != Settings::TextureFilter::None} {}
|
2017-12-10 00:00:55 +01:00
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
RasterizerCache::~RasterizerCache() {
|
2019-08-29 04:44:56 +02:00
|
|
|
#ifndef ANDROID
|
|
|
|
// This is for switching renderers, which is unsupported on Android, and costly on shutdown
|
2020-01-17 07:17:55 +01:00
|
|
|
ClearAll(false);
|
2019-08-29 04:44:56 +02:00
|
|
|
#endif
|
2017-12-10 00:00:55 +01:00
|
|
|
}
|
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
bool RasterizerCache::AccelerateTextureCopy(const GPU::Regs::DisplayTransferConfig& config) {
|
|
|
|
// Texture copy size is aligned to 16 byte units
|
|
|
|
const u32 copy_size = Common::AlignDown(config.texture_copy.size, 16);
|
|
|
|
if (copy_size == 0) {
|
|
|
|
return false;
|
|
|
|
}
|
2018-11-06 13:25:01 +01:00
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
u32 input_gap = config.texture_copy.input_gap * 16;
|
|
|
|
u32 input_width = config.texture_copy.input_width * 16;
|
|
|
|
if (input_width == 0 && input_gap != 0) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
if (input_gap == 0 || input_width >= copy_size) {
|
|
|
|
input_width = copy_size;
|
|
|
|
input_gap = 0;
|
|
|
|
}
|
|
|
|
if (copy_size % input_width != 0) {
|
|
|
|
return false;
|
|
|
|
}
|
2017-12-10 00:00:55 +01:00
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
u32 output_gap = config.texture_copy.output_gap * 16;
|
|
|
|
u32 output_width = config.texture_copy.output_width * 16;
|
|
|
|
if (output_width == 0 && output_gap != 0) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
if (output_gap == 0 || output_width >= copy_size) {
|
|
|
|
output_width = copy_size;
|
|
|
|
output_gap = 0;
|
|
|
|
}
|
|
|
|
if (copy_size % output_width != 0) {
|
|
|
|
return false;
|
2022-08-20 23:50:20 +02:00
|
|
|
}
|
2018-05-18 22:57:55 +02:00
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
SurfaceParams src_params;
|
|
|
|
src_params.addr = config.GetPhysicalInputAddress();
|
|
|
|
src_params.stride = input_width + input_gap; // stride in bytes
|
|
|
|
src_params.width = input_width; // width in bytes
|
|
|
|
src_params.height = copy_size / input_width;
|
|
|
|
src_params.size = ((src_params.height - 1) * src_params.stride) + src_params.width;
|
|
|
|
src_params.end = src_params.addr + src_params.size;
|
|
|
|
|
|
|
|
const auto [src_surface, src_rect] = GetTexCopySurface(src_params);
|
|
|
|
if (!src_surface) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
// If the output gap is nonzero ensure the output width matches the source rectangle width,
|
|
|
|
// otherwise we cannot use hardware accelerated texture copy. The former is in terms of bytes
|
|
|
|
// not pixels so first get the unscaled copy width and calculate the bytes this corresponds to.
|
|
|
|
// Note that tiled textures are laid out sequentially in memory, so we multiply that by eight
|
|
|
|
// to get the correct byte count.
|
|
|
|
if (output_gap != 0 &&
|
|
|
|
(output_width != src_surface->BytesInPixels(src_rect.GetWidth() / src_surface->res_scale) *
|
|
|
|
(src_surface->is_tiled ? 8 : 1) ||
|
|
|
|
output_gap % src_surface->BytesInPixels(src_surface->is_tiled ? 64 : 1) != 0)) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
SurfaceParams dst_params = *src_surface;
|
|
|
|
dst_params.addr = config.GetPhysicalOutputAddress();
|
|
|
|
dst_params.width = src_rect.GetWidth() / src_surface->res_scale;
|
|
|
|
dst_params.stride = dst_params.width + src_surface->PixelsInBytes(
|
|
|
|
src_surface->is_tiled ? output_gap / 8 : output_gap);
|
|
|
|
dst_params.height = src_rect.GetHeight() / src_surface->res_scale;
|
|
|
|
dst_params.res_scale = src_surface->res_scale;
|
|
|
|
dst_params.UpdateParams();
|
|
|
|
|
|
|
|
// Since we are going to invalidate the gap if there is one, we will have to load it first
|
|
|
|
const bool load_gap = output_gap != 0;
|
|
|
|
const auto [dst_surface, dst_rect] =
|
|
|
|
GetSurfaceSubRect(dst_params, ScaleMatch::Upscale, load_gap);
|
|
|
|
|
|
|
|
if (!dst_surface || dst_surface->type == SurfaceType::Texture ||
|
|
|
|
!CheckFormatsBlittable(src_surface->pixel_format, dst_surface->pixel_format)) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
ASSERT(src_rect.GetWidth() == dst_rect.GetWidth());
|
|
|
|
|
|
|
|
const TextureCopy texture_copy = {
|
|
|
|
.src_level = src_surface->LevelOf(src_params.addr),
|
|
|
|
.dst_level = dst_surface->LevelOf(dst_params.addr),
|
|
|
|
.src_offset = {src_rect.left, src_rect.bottom},
|
|
|
|
.dst_offset = {dst_rect.left, dst_rect.bottom},
|
|
|
|
.extent = {src_rect.GetWidth(), src_rect.GetHeight()},
|
|
|
|
};
|
|
|
|
runtime.CopyTextures(*src_surface, *dst_surface, texture_copy);
|
|
|
|
|
|
|
|
InvalidateRegion(dst_params.addr, dst_params.size, dst_surface);
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool RasterizerCache::AccelerateDisplayTransfer(const GPU::Regs::DisplayTransferConfig& config) {
|
|
|
|
SurfaceParams src_params;
|
|
|
|
src_params.addr = config.GetPhysicalInputAddress();
|
|
|
|
src_params.width = config.output_width;
|
|
|
|
src_params.stride = config.input_width;
|
|
|
|
src_params.height = config.output_height;
|
|
|
|
src_params.is_tiled = !config.input_linear;
|
|
|
|
src_params.pixel_format = PixelFormatFromGPUPixelFormat(config.input_format);
|
|
|
|
src_params.UpdateParams();
|
|
|
|
|
|
|
|
SurfaceParams dst_params;
|
|
|
|
dst_params.addr = config.GetPhysicalOutputAddress();
|
|
|
|
dst_params.width = config.scaling != config.NoScale ? config.output_width.Value() / 2
|
|
|
|
: config.output_width.Value();
|
|
|
|
dst_params.height = config.scaling == config.ScaleXY ? config.output_height.Value() / 2
|
|
|
|
: config.output_height.Value();
|
|
|
|
dst_params.is_tiled = config.input_linear != config.dont_swizzle;
|
|
|
|
dst_params.pixel_format = PixelFormatFromGPUPixelFormat(config.output_format);
|
|
|
|
dst_params.UpdateParams();
|
|
|
|
|
|
|
|
auto [src_surface, src_rect] = GetSurfaceSubRect(src_params, ScaleMatch::Ignore, true);
|
|
|
|
if (!src_surface) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
dst_params.res_scale = src_surface->res_scale;
|
|
|
|
|
|
|
|
const auto [dst_surface, dst_rect] = GetSurfaceSubRect(dst_params, ScaleMatch::Upscale, false);
|
|
|
|
if (!dst_surface) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (src_surface->is_tiled != dst_surface->is_tiled) {
|
|
|
|
std::swap(src_rect.top, src_rect.bottom);
|
|
|
|
}
|
|
|
|
if (config.flip_vertically) {
|
|
|
|
std::swap(src_rect.top, src_rect.bottom);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!CheckFormatsBlittable(src_surface->pixel_format, dst_surface->pixel_format)) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
const TextureBlit texture_blit = {
|
|
|
|
.src_level = src_surface->LevelOf(src_params.addr),
|
|
|
|
.dst_level = dst_surface->LevelOf(dst_params.addr),
|
|
|
|
.src_rect = src_rect,
|
|
|
|
.dst_rect = dst_rect,
|
|
|
|
};
|
|
|
|
runtime.BlitTextures(*src_surface, *dst_surface, texture_blit);
|
|
|
|
|
|
|
|
InvalidateRegion(dst_params.addr, dst_params.size, dst_surface);
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool RasterizerCache::AccelerateFill(const GPU::Regs::MemoryFillConfig& config) {
|
|
|
|
SurfaceParams params;
|
|
|
|
params.addr = config.GetStartAddress();
|
|
|
|
params.end = config.GetEndAddress();
|
|
|
|
params.size = params.end - params.addr;
|
|
|
|
params.type = SurfaceType::Fill;
|
|
|
|
params.res_scale = std::numeric_limits<u16>::max();
|
|
|
|
|
|
|
|
SurfaceRef fill_surface = std::make_shared<OpenGL::Surface>(runtime, params);
|
|
|
|
|
|
|
|
std::memcpy(&fill_surface->fill_data[0], &config.value_32bit, sizeof(u32));
|
|
|
|
if (config.fill_32bit) {
|
|
|
|
fill_surface->fill_size = 4;
|
|
|
|
} else if (config.fill_24bit) {
|
|
|
|
fill_surface->fill_size = 3;
|
|
|
|
} else {
|
|
|
|
fill_surface->fill_size = 2;
|
|
|
|
}
|
|
|
|
|
|
|
|
RegisterSurface(fill_surface);
|
|
|
|
InvalidateRegion(fill_surface->addr, fill_surface->size, fill_surface);
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
void RasterizerCache::CopySurface(const SurfaceRef& src_surface, const SurfaceRef& dst_surface,
|
|
|
|
SurfaceInterval copy_interval) {
|
|
|
|
MICROPROFILE_SCOPE(RasterizerCache_CopySurface);
|
|
|
|
|
|
|
|
const PAddr copy_addr = copy_interval.lower();
|
|
|
|
const SurfaceParams subrect_params = dst_surface->FromInterval(copy_interval);
|
|
|
|
const auto dst_rect = dst_surface->GetScaledSubRect(subrect_params);
|
|
|
|
ASSERT(subrect_params.GetInterval() == copy_interval && src_surface != dst_surface);
|
|
|
|
|
|
|
|
if (src_surface->type == SurfaceType::Fill) {
|
|
|
|
const TextureClear clear = {
|
|
|
|
.texture_level = dst_surface->LevelOf(copy_addr),
|
|
|
|
.texture_rect = dst_rect,
|
|
|
|
.value = src_surface->MakeClearValue(copy_addr, dst_surface->pixel_format),
|
|
|
|
};
|
|
|
|
runtime.ClearTexture(*dst_surface, clear);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
const TextureBlit blit = {
|
|
|
|
.src_level = src_surface->LevelOf(copy_addr),
|
|
|
|
.dst_level = dst_surface->LevelOf(copy_addr),
|
|
|
|
.src_rect = src_surface->GetScaledSubRect(subrect_params),
|
|
|
|
.dst_rect = dst_rect,
|
|
|
|
};
|
|
|
|
runtime.BlitTextures(*src_surface, *dst_surface, blit);
|
2017-12-10 00:00:55 +01:00
|
|
|
}
|
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
RasterizerCache::SurfaceRef RasterizerCache::GetSurface(const SurfaceParams& params,
|
|
|
|
ScaleMatch match_res_scale,
|
|
|
|
bool load_if_create) {
|
2017-12-10 00:00:55 +01:00
|
|
|
if (params.addr == 0 || params.height * params.width == 0) {
|
|
|
|
return nullptr;
|
|
|
|
}
|
2017-12-09 21:51:46 +01:00
|
|
|
// Use GetSurfaceSubRect instead
|
|
|
|
ASSERT(params.width == params.stride);
|
2017-12-10 00:00:55 +01:00
|
|
|
|
2018-01-29 00:50:47 +01:00
|
|
|
ASSERT(!params.is_tiled || (params.width % 8 == 0 && params.height % 8 == 0));
|
|
|
|
|
2017-12-10 00:00:55 +01:00
|
|
|
// Check for an exact match in existing surfaces
|
2023-04-21 09:14:55 +02:00
|
|
|
SurfaceRef surface = FindMatch<MatchFlags::Exact>(surface_cache, params, match_res_scale);
|
2017-12-10 00:00:55 +01:00
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
if (!surface) {
|
2017-12-10 00:00:55 +01:00
|
|
|
u16 target_res_scale = params.res_scale;
|
|
|
|
if (match_res_scale != ScaleMatch::Exact) {
|
2020-03-16 15:42:05 +01:00
|
|
|
// This surface may have a subrect of another surface with a higher res_scale, find
|
|
|
|
// it to adjust our params
|
2017-11-25 23:38:30 +01:00
|
|
|
SurfaceParams find_params = params;
|
2023-04-21 09:14:55 +02:00
|
|
|
SurfaceRef expandable =
|
|
|
|
FindMatch<MatchFlags::Expand>(surface_cache, find_params, match_res_scale);
|
|
|
|
if (expandable && expandable->res_scale > target_res_scale) {
|
2017-11-25 23:38:30 +01:00
|
|
|
target_res_scale = expandable->res_scale;
|
|
|
|
}
|
2018-01-16 03:06:35 +01:00
|
|
|
// Keep res_scale when reinterpreting d24s8 -> rgba8
|
|
|
|
if (params.pixel_format == PixelFormat::RGBA8) {
|
|
|
|
find_params.pixel_format = PixelFormat::D24S8;
|
2023-04-21 09:14:55 +02:00
|
|
|
expandable =
|
|
|
|
FindMatch<MatchFlags::Expand>(surface_cache, find_params, match_res_scale);
|
|
|
|
if (expandable && expandable->res_scale > target_res_scale) {
|
2018-01-16 03:06:35 +01:00
|
|
|
target_res_scale = expandable->res_scale;
|
|
|
|
}
|
|
|
|
}
|
2015-05-19 06:21:33 +02:00
|
|
|
}
|
2017-11-25 23:38:30 +01:00
|
|
|
SurfaceParams new_params = params;
|
|
|
|
new_params.res_scale = target_res_scale;
|
|
|
|
surface = CreateSurface(new_params);
|
|
|
|
RegisterSurface(surface);
|
2016-04-17 00:57:57 +02:00
|
|
|
}
|
|
|
|
|
2017-11-25 23:38:30 +01:00
|
|
|
if (load_if_create) {
|
|
|
|
ValidateSurface(surface, params.addr, params.size);
|
|
|
|
}
|
2016-04-17 00:57:57 +02:00
|
|
|
|
2017-11-25 23:38:30 +01:00
|
|
|
return surface;
|
|
|
|
}
|
2016-04-17 00:57:57 +02:00
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
RasterizerCache::SurfaceRect_Tuple RasterizerCache::GetSurfaceSubRect(const SurfaceParams& params,
|
|
|
|
ScaleMatch match_res_scale,
|
|
|
|
bool load_if_create) {
|
2017-11-25 23:38:30 +01:00
|
|
|
if (params.addr == 0 || params.height * params.width == 0) {
|
2019-02-27 04:47:49 +01:00
|
|
|
return std::make_tuple(nullptr, Common::Rectangle<u32>{});
|
2017-11-25 23:38:30 +01:00
|
|
|
}
|
2016-04-17 00:57:57 +02:00
|
|
|
|
2017-11-25 23:38:30 +01:00
|
|
|
// Attempt to find encompassing surface
|
2023-04-21 09:14:55 +02:00
|
|
|
SurfaceRef surface = FindMatch<MatchFlags::SubRect>(surface_cache, params, match_res_scale);
|
2017-11-25 23:38:30 +01:00
|
|
|
|
|
|
|
// Check if FindMatch failed because of res scaling
|
|
|
|
// If that's the case create a new surface with
|
|
|
|
// the dimensions of the lower res_scale surface
|
|
|
|
// to suggest it should not be used again
|
2023-04-21 09:14:55 +02:00
|
|
|
if (!surface && match_res_scale != ScaleMatch::Ignore) {
|
|
|
|
surface = FindMatch<MatchFlags::SubRect>(surface_cache, params, ScaleMatch::Ignore);
|
|
|
|
if (surface) {
|
2017-11-25 23:38:30 +01:00
|
|
|
SurfaceParams new_params = *surface;
|
|
|
|
new_params.res_scale = params.res_scale;
|
|
|
|
|
|
|
|
surface = CreateSurface(new_params);
|
|
|
|
RegisterSurface(surface);
|
2016-04-17 00:57:57 +02:00
|
|
|
}
|
2017-11-25 23:38:30 +01:00
|
|
|
}
|
2015-05-19 06:21:33 +02:00
|
|
|
|
2017-12-30 07:42:32 +01:00
|
|
|
SurfaceParams aligned_params = params;
|
|
|
|
if (params.is_tiled) {
|
|
|
|
aligned_params.height = Common::AlignUp(params.height, 8);
|
|
|
|
aligned_params.width = Common::AlignUp(params.width, 8);
|
|
|
|
aligned_params.stride = Common::AlignUp(params.stride, 8);
|
|
|
|
aligned_params.UpdateParams();
|
|
|
|
}
|
|
|
|
|
2017-11-25 23:38:30 +01:00
|
|
|
// Check for a surface we can expand before creating a new one
|
2023-04-21 09:14:55 +02:00
|
|
|
if (!surface) {
|
|
|
|
surface = FindMatch<MatchFlags::Expand>(surface_cache, aligned_params, match_res_scale);
|
|
|
|
if (surface) {
|
2017-12-30 07:42:32 +01:00
|
|
|
aligned_params.width = aligned_params.stride;
|
|
|
|
aligned_params.UpdateParams();
|
|
|
|
|
2017-11-25 23:38:30 +01:00
|
|
|
SurfaceParams new_params = *surface;
|
2017-12-30 07:42:32 +01:00
|
|
|
new_params.addr = std::min(aligned_params.addr, surface->addr);
|
|
|
|
new_params.end = std::max(aligned_params.end, surface->end);
|
2017-11-25 23:38:30 +01:00
|
|
|
new_params.size = new_params.end - new_params.addr;
|
2017-12-30 07:42:32 +01:00
|
|
|
new_params.height =
|
|
|
|
new_params.size / aligned_params.BytesInPixels(aligned_params.stride);
|
2023-04-21 09:14:55 +02:00
|
|
|
new_params.UpdateParams();
|
2017-12-30 07:42:32 +01:00
|
|
|
ASSERT(new_params.size % aligned_params.BytesInPixels(aligned_params.stride) == 0);
|
2017-11-25 23:38:30 +01:00
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
SurfaceRef new_surface = CreateSurface(new_params);
|
2017-11-25 23:38:30 +01:00
|
|
|
DuplicateSurface(surface, new_surface);
|
2023-04-21 09:14:55 +02:00
|
|
|
UnregisterSurface(surface);
|
2017-11-25 23:38:30 +01:00
|
|
|
|
|
|
|
surface = new_surface;
|
|
|
|
RegisterSurface(new_surface);
|
|
|
|
}
|
2015-05-19 06:21:33 +02:00
|
|
|
}
|
2016-04-17 00:57:57 +02:00
|
|
|
|
|
|
|
// No subrect found - create and return a new surface
|
2023-04-21 09:14:55 +02:00
|
|
|
if (!surface) {
|
2017-12-30 07:42:32 +01:00
|
|
|
SurfaceParams new_params = aligned_params;
|
2017-12-09 21:51:46 +01:00
|
|
|
// Can't have gaps in a surface
|
2017-12-30 07:42:32 +01:00
|
|
|
new_params.width = aligned_params.stride;
|
2017-11-25 23:38:30 +01:00
|
|
|
new_params.UpdateParams();
|
2019-08-07 04:56:56 +02:00
|
|
|
// GetSurface will create the new surface and possibly adjust res_scale if necessary
|
2017-11-25 23:38:30 +01:00
|
|
|
surface = GetSurface(new_params, match_res_scale, load_if_create);
|
|
|
|
} else if (load_if_create) {
|
2017-12-30 07:42:32 +01:00
|
|
|
ValidateSurface(surface, aligned_params.addr, aligned_params.size);
|
2016-04-17 00:57:57 +02:00
|
|
|
}
|
|
|
|
|
2017-12-13 18:22:29 +01:00
|
|
|
return std::make_tuple(surface, surface->GetScaledSubRect(params));
|
2016-04-17 00:57:57 +02:00
|
|
|
}
|
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
RasterizerCache::SurfaceRef RasterizerCache::GetTextureSurface(
|
2018-04-29 11:51:02 +02:00
|
|
|
const Pica::TexturingRegs::FullTextureConfig& config) {
|
2023-04-21 09:14:55 +02:00
|
|
|
const auto info = Pica::Texture::TextureInfo::FromPicaRegister(config.config, config.format);
|
|
|
|
const u32 max_level = MipLevels(info.width, info.height, config.config.lod.max_level) - 1;
|
|
|
|
return GetTextureSurface(info, max_level);
|
2018-04-29 11:51:02 +02:00
|
|
|
}
|
2016-04-17 00:57:57 +02:00
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
RasterizerCache::SurfaceRef RasterizerCache::GetTextureSurface(
|
|
|
|
const Pica::Texture::TextureInfo& info, u32 max_level) {
|
|
|
|
if (info.physical_address == 0) [[unlikely]] {
|
2019-03-10 16:02:56 +01:00
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
2017-11-25 23:38:30 +01:00
|
|
|
SurfaceParams params;
|
2016-04-17 00:57:57 +02:00
|
|
|
params.addr = info.physical_address;
|
|
|
|
params.width = info.width;
|
|
|
|
params.height = info.height;
|
2023-04-21 09:14:55 +02:00
|
|
|
params.levels = max_level + 1;
|
2016-04-17 00:57:57 +02:00
|
|
|
params.is_tiled = true;
|
2022-08-20 11:17:31 +02:00
|
|
|
params.pixel_format = PixelFormatFromTextureFormat(info.format);
|
2023-04-21 09:14:55 +02:00
|
|
|
params.res_scale = use_filter ? resolution_scale_factor : 1;
|
2017-11-25 23:38:30 +01:00
|
|
|
params.UpdateParams();
|
2017-12-30 07:42:32 +01:00
|
|
|
|
2018-07-02 11:55:04 +02:00
|
|
|
u32 min_width = info.width >> max_level;
|
|
|
|
u32 min_height = info.height >> max_level;
|
|
|
|
if (min_width % 8 != 0 || min_height % 8 != 0) {
|
2023-04-21 09:14:55 +02:00
|
|
|
// This code is for 8x4 and 4x4 textures (commonly used by games for health bar)
|
|
|
|
// The implementation might not be accurate and needs further testing.
|
2023-04-04 19:32:14 +02:00
|
|
|
if (min_width % 4 == 0 && min_height % 4 == 0 && min_width * min_height <= 32) {
|
2023-04-21 09:14:55 +02:00
|
|
|
const auto [src_surface, rect] = GetSurfaceSubRect(params, ScaleMatch::Ignore, true);
|
2023-04-04 19:32:14 +02:00
|
|
|
params.res_scale = src_surface->res_scale;
|
2023-04-21 09:14:55 +02:00
|
|
|
SurfaceRef tmp_surface = CreateSurface(params);
|
2023-04-04 19:32:14 +02:00
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
const TextureBlit blit = {
|
|
|
|
.src_level = src_surface->LevelOf(params.addr),
|
|
|
|
.dst_level = 0,
|
|
|
|
.src_rect = rect,
|
|
|
|
.dst_rect = tmp_surface->GetScaledRect(),
|
|
|
|
};
|
|
|
|
runtime.BlitTextures(*src_surface, *tmp_surface, blit);
|
2023-04-04 19:32:14 +02:00
|
|
|
return tmp_surface;
|
|
|
|
}
|
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
LOG_CRITICAL(HW_GPU, "Texture size ({}x{}) is not multiple of 4", min_width, min_height);
|
2018-07-02 11:55:04 +02:00
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
if (info.width != (min_width << max_level) || info.height != (min_height << max_level)) {
|
2023-04-21 09:14:55 +02:00
|
|
|
LOG_CRITICAL(HW_GPU, "Texture size ({}x{}) does not support required mipmap level ({})",
|
2018-07-02 11:55:04 +02:00
|
|
|
params.width, params.height, max_level);
|
|
|
|
return nullptr;
|
|
|
|
}
|
2017-12-30 07:42:32 +01:00
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
return GetSurface(params, ScaleMatch::Ignore, true);
|
2015-05-19 06:21:33 +02:00
|
|
|
}
|
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
RasterizerCache::SurfaceRef RasterizerCache::GetTextureCube(const TextureCubeConfig& config) {
|
|
|
|
auto [it, new_surface] = texture_cube_cache.try_emplace(config);
|
|
|
|
TextureCube& cube = it->second;
|
|
|
|
|
|
|
|
if (new_surface) {
|
|
|
|
SurfaceParams cube_params = {
|
|
|
|
.addr = config.px,
|
|
|
|
.width = config.width,
|
|
|
|
.height = config.width,
|
|
|
|
.stride = config.width,
|
|
|
|
.levels = config.levels,
|
|
|
|
.res_scale = use_filter ? resolution_scale_factor : 1,
|
|
|
|
.texture_type = TextureType::CubeMap,
|
|
|
|
.pixel_format = PixelFormatFromTextureFormat(config.format),
|
|
|
|
.type = SurfaceType::Texture,
|
|
|
|
};
|
|
|
|
cube_params.UpdateParams();
|
|
|
|
cube.surface = CreateSurface(cube_params);
|
|
|
|
}
|
2018-04-29 11:51:02 +02:00
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
const u32 scaled_size = cube.surface->GetScaledWidth();
|
|
|
|
const std::array addresses = {config.px, config.nx, config.py, config.ny, config.pz, config.nz};
|
2018-04-29 11:51:02 +02:00
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
for (u32 i = 0; i < addresses.size(); i++) {
|
|
|
|
if (!addresses[i]) {
|
|
|
|
continue;
|
2018-04-29 11:51:02 +02:00
|
|
|
}
|
2018-03-09 15:46:34 +01:00
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
Pica::Texture::TextureInfo info = {
|
|
|
|
.physical_address = addresses[i],
|
|
|
|
.width = config.width,
|
|
|
|
.height = config.width,
|
|
|
|
.format = config.format,
|
|
|
|
};
|
|
|
|
info.SetDefaultStride();
|
|
|
|
|
|
|
|
SurfaceRef& face_surface = cube.faces[i];
|
|
|
|
if (!face_surface || !face_surface->registered) {
|
|
|
|
face_surface = GetTextureSurface(info, config.levels - 1);
|
|
|
|
ASSERT(face_surface->levels == config.levels);
|
2018-04-29 11:51:02 +02:00
|
|
|
}
|
2023-04-21 09:14:55 +02:00
|
|
|
if (cube.ticks[i] != face_surface->ModificationTick()) {
|
|
|
|
for (u32 level = 0; level < face_surface->levels; level++) {
|
|
|
|
const TextureCopy texture_copy = {
|
|
|
|
.src_level = level,
|
|
|
|
.dst_level = level,
|
|
|
|
.src_layer = 0,
|
|
|
|
.dst_layer = i,
|
|
|
|
.src_offset = {0, 0},
|
|
|
|
.dst_offset = {0, 0},
|
|
|
|
.extent = {scaled_size >> level, scaled_size >> level},
|
|
|
|
};
|
|
|
|
runtime.CopyTextures(*face_surface, *cube.surface, texture_copy);
|
2019-04-15 15:07:36 +02:00
|
|
|
}
|
2023-04-21 09:14:55 +02:00
|
|
|
cube.ticks[i] = face_surface->ModificationTick();
|
2018-04-29 11:51:02 +02:00
|
|
|
}
|
2018-03-09 15:46:34 +01:00
|
|
|
}
|
2018-04-08 01:19:02 +02:00
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
return cube.surface;
|
2018-03-09 15:46:34 +01:00
|
|
|
}
|
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
OpenGL::Framebuffer RasterizerCache::GetFramebufferSurfaces(bool using_color_fb,
|
|
|
|
bool using_depth_fb) {
|
2017-11-25 23:38:30 +01:00
|
|
|
const auto& config = regs.framebuffer.framebuffer;
|
2016-04-17 00:57:57 +02:00
|
|
|
|
2022-08-21 21:24:26 +02:00
|
|
|
// Update resolution_scale_factor and reset cache if changed
|
2023-03-30 13:24:49 +02:00
|
|
|
const u32 scale_factor = renderer.GetResolutionScaleFactor();
|
|
|
|
const bool resolution_scale_changed = resolution_scale_factor != scale_factor;
|
2022-08-21 21:45:08 +02:00
|
|
|
const bool texture_filter_changed =
|
2023-04-21 09:14:55 +02:00
|
|
|
renderer.Settings().texture_filter_update_requested.exchange(false);
|
2022-08-21 21:45:08 +02:00
|
|
|
|
2022-08-21 21:24:26 +02:00
|
|
|
if (resolution_scale_changed || texture_filter_changed) {
|
2023-03-30 13:24:49 +02:00
|
|
|
resolution_scale_factor = scale_factor;
|
2023-04-21 09:14:55 +02:00
|
|
|
use_filter = Settings::values.texture_filter.GetValue() != Settings::TextureFilter::None;
|
2017-11-14 06:30:11 +01:00
|
|
|
FlushAll();
|
2017-11-25 23:38:30 +01:00
|
|
|
while (!surface_cache.empty())
|
|
|
|
UnregisterSurface(*surface_cache.begin()->second.begin());
|
2018-04-29 11:51:02 +02:00
|
|
|
texture_cube_cache.clear();
|
2017-11-14 06:30:11 +01:00
|
|
|
}
|
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
const s32 framebuffer_width = config.GetWidth();
|
|
|
|
const s32 framebuffer_height = config.GetHeight();
|
|
|
|
const auto viewport_rect = regs.rasterizer.GetViewportRect();
|
|
|
|
const Common::Rectangle<u32> viewport_clamped = {
|
|
|
|
static_cast<u32>(std::clamp(viewport_rect.left, 0, framebuffer_width)),
|
|
|
|
static_cast<u32>(std::clamp(viewport_rect.top, 0, framebuffer_height)),
|
|
|
|
static_cast<u32>(std::clamp(viewport_rect.right, 0, framebuffer_width)),
|
|
|
|
static_cast<u32>(std::clamp(viewport_rect.bottom, 0, framebuffer_height)),
|
|
|
|
};
|
2017-11-25 23:38:30 +01:00
|
|
|
|
|
|
|
// get color and depth surfaces
|
|
|
|
SurfaceParams color_params;
|
|
|
|
color_params.is_tiled = true;
|
|
|
|
color_params.res_scale = resolution_scale_factor;
|
|
|
|
color_params.width = config.GetWidth();
|
|
|
|
color_params.height = config.GetHeight();
|
|
|
|
SurfaceParams depth_params = color_params;
|
|
|
|
|
|
|
|
color_params.addr = config.GetColorBufferPhysicalAddress();
|
2022-08-20 11:17:31 +02:00
|
|
|
color_params.pixel_format = PixelFormatFromColorFormat(config.color_format);
|
2017-11-25 23:38:30 +01:00
|
|
|
color_params.UpdateParams();
|
|
|
|
|
|
|
|
depth_params.addr = config.GetDepthBufferPhysicalAddress();
|
2022-08-20 11:17:31 +02:00
|
|
|
depth_params.pixel_format = PixelFormatFromDepthFormat(config.depth_format);
|
2017-11-25 23:38:30 +01:00
|
|
|
depth_params.UpdateParams();
|
|
|
|
|
|
|
|
auto color_vp_interval = color_params.GetSubRectInterval(viewport_clamped);
|
|
|
|
auto depth_vp_interval = depth_params.GetSubRectInterval(viewport_clamped);
|
|
|
|
|
2019-08-07 04:56:56 +02:00
|
|
|
// Make sure that framebuffers don't overlap if both color and depth are being used
|
2017-11-25 23:38:30 +01:00
|
|
|
if (using_color_fb && using_depth_fb &&
|
|
|
|
boost::icl::length(color_vp_interval & depth_vp_interval)) {
|
2023-04-21 09:14:55 +02:00
|
|
|
LOG_CRITICAL(HW_GPU, "Color and depth framebuffer memory regions overlap; "
|
|
|
|
"overlapping framebuffers not supported!");
|
2016-04-17 00:57:57 +02:00
|
|
|
using_depth_fb = false;
|
|
|
|
}
|
|
|
|
|
2019-02-27 04:47:49 +01:00
|
|
|
Common::Rectangle<u32> color_rect{};
|
2023-04-21 09:14:55 +02:00
|
|
|
SurfaceRef color_surface = nullptr;
|
|
|
|
u32 color_level{};
|
2017-11-25 23:38:30 +01:00
|
|
|
if (using_color_fb)
|
|
|
|
std::tie(color_surface, color_rect) =
|
|
|
|
GetSurfaceSubRect(color_params, ScaleMatch::Exact, false);
|
|
|
|
|
2019-02-27 04:47:49 +01:00
|
|
|
Common::Rectangle<u32> depth_rect{};
|
2023-04-21 09:14:55 +02:00
|
|
|
SurfaceRef depth_surface = nullptr;
|
|
|
|
u32 depth_level{};
|
2017-12-10 00:00:55 +01:00
|
|
|
if (using_depth_fb)
|
|
|
|
std::tie(depth_surface, depth_rect) =
|
|
|
|
GetSurfaceSubRect(depth_params, ScaleMatch::Exact, false);
|
2016-04-17 00:57:57 +02:00
|
|
|
|
2019-02-27 04:47:49 +01:00
|
|
|
Common::Rectangle<u32> fb_rect{};
|
2023-04-21 09:14:55 +02:00
|
|
|
if (color_surface && depth_surface) {
|
2017-12-10 00:00:55 +01:00
|
|
|
fb_rect = color_rect;
|
|
|
|
// Color and Depth surfaces must have the same dimensions and offsets
|
2018-01-29 00:50:47 +01:00
|
|
|
if (color_rect.bottom != depth_rect.bottom || color_rect.top != depth_rect.top ||
|
|
|
|
color_rect.left != depth_rect.left || color_rect.right != depth_rect.right) {
|
2017-12-10 00:00:55 +01:00
|
|
|
color_surface = GetSurface(color_params, ScaleMatch::Exact, false);
|
|
|
|
depth_surface = GetSurface(depth_params, ScaleMatch::Exact, false);
|
|
|
|
fb_rect = color_surface->GetScaledRect();
|
|
|
|
}
|
2023-04-21 09:14:55 +02:00
|
|
|
} else if (color_surface) {
|
2017-12-10 00:00:55 +01:00
|
|
|
fb_rect = color_rect;
|
2023-04-21 09:14:55 +02:00
|
|
|
} else if (depth_surface) {
|
2017-12-10 00:00:55 +01:00
|
|
|
fb_rect = depth_rect;
|
2017-11-17 18:27:51 +01:00
|
|
|
}
|
2016-04-17 00:57:57 +02:00
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
if (color_surface) {
|
|
|
|
color_level = color_surface->LevelOf(color_params.addr);
|
2017-12-10 00:00:55 +01:00
|
|
|
ValidateSurface(color_surface, boost::icl::first(color_vp_interval),
|
|
|
|
boost::icl::length(color_vp_interval));
|
|
|
|
}
|
2023-04-21 09:14:55 +02:00
|
|
|
if (depth_surface) {
|
|
|
|
depth_level = depth_surface->LevelOf(depth_params.addr);
|
2017-12-10 00:00:55 +01:00
|
|
|
ValidateSurface(depth_surface, boost::icl::first(depth_vp_interval),
|
|
|
|
boost::icl::length(depth_vp_interval));
|
2017-11-17 18:27:51 +01:00
|
|
|
}
|
2016-04-17 00:57:57 +02:00
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
render_targets = RenderTargets{
|
|
|
|
.color_surface = color_surface,
|
|
|
|
.depth_surface = depth_surface,
|
|
|
|
};
|
2016-04-17 00:57:57 +02:00
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
return OpenGL::Framebuffer{
|
|
|
|
runtime, color_surface.get(), color_level, depth_surface.get(), depth_level, regs, fb_rect};
|
|
|
|
}
|
2017-12-24 00:09:25 +01:00
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
void RasterizerCache::InvalidateFramebuffer(const OpenGL::Framebuffer& framebuffer) {
|
|
|
|
if (framebuffer.HasAttachment(SurfaceType::Color)) {
|
|
|
|
const auto interval = framebuffer.Interval(SurfaceType::Color);
|
|
|
|
InvalidateRegion(boost::icl::first(interval), boost::icl::length(interval),
|
|
|
|
render_targets.color_surface);
|
|
|
|
}
|
|
|
|
if (framebuffer.HasAttachment(SurfaceType::DepthStencil)) {
|
|
|
|
const auto interval = framebuffer.Interval(SurfaceType::DepthStencil);
|
|
|
|
InvalidateRegion(boost::icl::first(interval), boost::icl::length(interval),
|
|
|
|
render_targets.depth_surface);
|
2017-12-10 00:00:55 +01:00
|
|
|
}
|
|
|
|
}
|
2016-04-17 00:57:57 +02:00
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
RasterizerCache::SurfaceRect_Tuple RasterizerCache::GetTexCopySurface(const SurfaceParams& params) {
|
2019-02-27 04:47:49 +01:00
|
|
|
Common::Rectangle<u32> rect{};
|
2016-04-17 00:57:57 +02:00
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
SurfaceRef match_surface =
|
|
|
|
FindMatch<MatchFlags::TexCopy>(surface_cache, params, ScaleMatch::Ignore);
|
2016-04-17 00:57:57 +02:00
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
if (match_surface) {
|
2017-12-10 00:00:55 +01:00
|
|
|
ValidateSurface(match_surface, params.addr, params.size);
|
2016-04-17 00:57:57 +02:00
|
|
|
|
2018-01-15 10:05:56 +01:00
|
|
|
SurfaceParams match_subrect;
|
|
|
|
if (params.width != params.stride) {
|
2018-02-06 04:31:50 +01:00
|
|
|
const u32 tiled_size = match_surface->is_tiled ? 8 : 1;
|
2018-01-15 10:05:56 +01:00
|
|
|
match_subrect = params;
|
2018-02-06 04:31:50 +01:00
|
|
|
match_subrect.width = match_surface->PixelsInBytes(params.width) / tiled_size;
|
|
|
|
match_subrect.stride = match_surface->PixelsInBytes(params.stride) / tiled_size;
|
|
|
|
match_subrect.height *= tiled_size;
|
2018-01-15 10:05:56 +01:00
|
|
|
} else {
|
|
|
|
match_subrect = match_surface->FromInterval(params.GetInterval());
|
|
|
|
ASSERT(match_subrect.GetInterval() == params.GetInterval());
|
2016-04-17 00:57:57 +02:00
|
|
|
}
|
2017-12-10 00:00:55 +01:00
|
|
|
|
|
|
|
rect = match_surface->GetScaledSubRect(match_subrect);
|
2016-04-17 00:57:57 +02:00
|
|
|
}
|
|
|
|
|
2017-12-13 18:22:29 +01:00
|
|
|
return std::make_tuple(match_surface, rect);
|
2017-11-17 18:27:51 +01:00
|
|
|
}
|
2016-04-17 00:57:57 +02:00
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
void RasterizerCache::DuplicateSurface(const SurfaceRef& src_surface,
|
|
|
|
const SurfaceRef& dest_surface) {
|
2017-12-10 00:00:55 +01:00
|
|
|
ASSERT(dest_surface->addr <= src_surface->addr && dest_surface->end >= src_surface->end);
|
2017-11-17 18:27:51 +01:00
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
const auto src_rect = src_surface->GetScaledRect();
|
|
|
|
const auto dst_rect = dest_surface->GetScaledSubRect(*src_surface);
|
|
|
|
ASSERT(src_rect.GetWidth() == dst_rect.GetWidth());
|
|
|
|
|
|
|
|
const TextureCopy copy = {
|
|
|
|
.src_level = 0,
|
|
|
|
.dst_level = 0,
|
2023-04-23 21:46:08 +02:00
|
|
|
.src_offset = {src_rect.left, src_rect.bottom},
|
|
|
|
.dst_offset = {dst_rect.left, dst_rect.bottom},
|
2023-04-21 09:14:55 +02:00
|
|
|
.extent = {src_rect.GetWidth(), src_rect.GetHeight()},
|
|
|
|
};
|
|
|
|
runtime.CopyTextures(*src_surface, *dest_surface, copy);
|
2017-11-17 18:27:51 +01:00
|
|
|
|
2017-12-10 00:00:55 +01:00
|
|
|
dest_surface->invalid_regions -= src_surface->GetInterval();
|
|
|
|
dest_surface->invalid_regions += src_surface->invalid_regions;
|
2017-11-17 18:27:51 +01:00
|
|
|
|
2017-12-10 00:00:55 +01:00
|
|
|
SurfaceRegions regions;
|
2020-05-18 16:19:54 +02:00
|
|
|
for (const auto& pair : RangeFromInterval(dirty_regions, src_surface->GetInterval())) {
|
2017-12-10 00:00:55 +01:00
|
|
|
if (pair.second == src_surface) {
|
|
|
|
regions += pair.first;
|
|
|
|
}
|
|
|
|
}
|
2020-05-18 16:19:54 +02:00
|
|
|
for (const auto& interval : regions) {
|
2017-12-10 00:00:55 +01:00
|
|
|
dirty_regions.set({interval, dest_surface});
|
|
|
|
}
|
|
|
|
}
|
2017-11-17 18:27:51 +01:00
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
void RasterizerCache::ValidateSurface(const SurfaceRef& surface, PAddr addr, u32 size) {
|
|
|
|
if (size == 0) [[unlikely]] {
|
2017-12-10 00:00:55 +01:00
|
|
|
return;
|
2023-04-21 09:14:55 +02:00
|
|
|
}
|
2017-11-17 18:27:51 +01:00
|
|
|
|
2017-12-10 00:00:55 +01:00
|
|
|
const SurfaceInterval validate_interval(addr, addr + size);
|
|
|
|
if (surface->type == SurfaceType::Fill) {
|
|
|
|
ASSERT(surface->IsRegionValid(validate_interval));
|
|
|
|
return;
|
|
|
|
}
|
2017-11-17 18:27:51 +01:00
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
SurfaceRegions validate_regions = surface->invalid_regions & validate_interval;
|
|
|
|
|
2020-04-07 16:12:32 +02:00
|
|
|
auto notify_validated = [&](SurfaceInterval interval) {
|
2023-04-21 09:14:55 +02:00
|
|
|
surface->MarkValid(interval);
|
2020-04-07 16:12:32 +02:00
|
|
|
validate_regions.erase(interval);
|
|
|
|
};
|
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
u32 level = surface->LevelOf(addr);
|
|
|
|
SurfaceInterval level_interval = surface->LevelInterval(level);
|
|
|
|
while (!validate_regions.empty()) {
|
|
|
|
// Take an invalid interval from the validation regions and clamp it
|
|
|
|
// to the current level interval since FromInterval cannot process
|
|
|
|
// intervals that span multiple levels. If the interval is empty
|
|
|
|
// then we have validated the entire level so move to the next.
|
|
|
|
const auto interval = *validate_regions.begin() & level_interval;
|
|
|
|
if (boost::icl::is_empty(interval)) {
|
|
|
|
level_interval = surface->LevelInterval(++level);
|
|
|
|
continue;
|
|
|
|
}
|
2017-11-17 18:27:51 +01:00
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
// Look for a valid surface to copy from.
|
|
|
|
const SurfaceParams params = surface->FromInterval(interval);
|
|
|
|
const SurfaceRef copy_surface =
|
2017-12-10 00:00:55 +01:00
|
|
|
FindMatch<MatchFlags::Copy>(surface_cache, params, ScaleMatch::Ignore, interval);
|
2023-04-21 09:14:55 +02:00
|
|
|
|
|
|
|
if (copy_surface) {
|
|
|
|
const SurfaceInterval copy_interval = copy_surface->GetCopyableInterval(params);
|
2017-12-10 00:00:55 +01:00
|
|
|
CopySurface(copy_surface, surface, copy_interval);
|
2020-04-07 16:12:32 +02:00
|
|
|
notify_validated(copy_interval);
|
2017-12-10 00:00:55 +01:00
|
|
|
continue;
|
2017-11-17 18:27:51 +01:00
|
|
|
}
|
2017-12-10 00:00:55 +01:00
|
|
|
|
2020-04-07 16:12:32 +02:00
|
|
|
// Try to find surface in cache with different format
|
|
|
|
// that can can be reinterpreted to the requested format.
|
|
|
|
if (ValidateByReinterpretation(surface, params, interval)) {
|
|
|
|
notify_validated(interval);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
// Could not find a matching reinterpreter, check if we need to implement a
|
|
|
|
// reinterpreter
|
|
|
|
if (NoUnimplementedReinterpretations(surface, params, interval) &&
|
|
|
|
!IntervalHasInvalidPixelFormat(params, interval)) {
|
|
|
|
// No surfaces were found in the cache that had a matching bit-width.
|
|
|
|
// If the region was created entirely on the GPU,
|
|
|
|
// assume it was a developer mistake and skip flushing.
|
|
|
|
if (boost::icl::contains(dirty_regions, interval)) {
|
2023-04-21 09:14:55 +02:00
|
|
|
LOG_DEBUG(HW_GPU, "Region created fully on GPU and reinterpretation is "
|
|
|
|
"invalid. Skipping validation");
|
2020-04-07 16:12:32 +02:00
|
|
|
validate_regions.erase(interval);
|
2018-01-16 03:06:35 +01:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-12-10 00:00:55 +01:00
|
|
|
// Load data from 3DS memory
|
|
|
|
FlushRegion(params.addr, params.size);
|
2023-04-21 09:14:55 +02:00
|
|
|
UploadSurface(surface, interval);
|
2020-04-07 16:12:32 +02:00
|
|
|
notify_validated(params.GetInterval());
|
2017-11-17 18:27:51 +01:00
|
|
|
}
|
2023-04-21 09:14:55 +02:00
|
|
|
|
|
|
|
// Filtered mipmaps often look really bad. We can achieve better quality by
|
|
|
|
// generating them from the base level.
|
|
|
|
if (surface->res_scale != 1 && level != 0) {
|
|
|
|
runtime.GenerateMipmaps(*surface, surface->levels - 1);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void RasterizerCache::UploadSurface(const SurfaceRef& surface, SurfaceInterval interval) {
|
|
|
|
const SurfaceParams load_info = surface->FromInterval(interval);
|
|
|
|
ASSERT(load_info.addr >= surface->addr && load_info.end <= surface->end);
|
|
|
|
|
|
|
|
const auto staging = runtime.FindStaging(
|
|
|
|
load_info.width * load_info.height * surface->GetInternalBytesPerPixel(), true);
|
|
|
|
|
|
|
|
MemoryRef source_ptr = memory.GetPhysicalRef(load_info.addr);
|
|
|
|
if (!source_ptr) [[unlikely]] {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
const auto upload_data = source_ptr.GetWriteBytes(load_info.end - load_info.addr);
|
|
|
|
DecodeTexture(load_info, load_info.addr, load_info.end, upload_data, staging.mapped,
|
|
|
|
runtime.NeedsConversion(surface->pixel_format));
|
|
|
|
|
|
|
|
const BufferTextureCopy upload = {
|
|
|
|
.buffer_offset = 0,
|
|
|
|
.buffer_size = staging.size,
|
|
|
|
.texture_rect = surface->GetSubRect(load_info),
|
|
|
|
.texture_level = surface->LevelOf(load_info.addr),
|
|
|
|
};
|
|
|
|
surface->Upload(upload, staging);
|
|
|
|
}
|
|
|
|
|
|
|
|
void RasterizerCache::DownloadSurface(const SurfaceRef& surface, SurfaceInterval interval) {
|
|
|
|
const SurfaceParams flush_info = surface->FromInterval(interval);
|
|
|
|
const u32 flush_start = boost::icl::first(interval);
|
|
|
|
const u32 flush_end = boost::icl::last_next(interval);
|
|
|
|
ASSERT(flush_start >= surface->addr && flush_end <= surface->end);
|
|
|
|
|
|
|
|
const auto staging = runtime.FindStaging(
|
|
|
|
flush_info.width * flush_info.height * surface->GetInternalBytesPerPixel(), false);
|
|
|
|
|
|
|
|
const BufferTextureCopy download = {
|
|
|
|
.buffer_offset = 0,
|
|
|
|
.buffer_size = staging.size,
|
|
|
|
.texture_rect = surface->GetSubRect(flush_info),
|
|
|
|
.texture_level = surface->LevelOf(flush_start),
|
|
|
|
};
|
|
|
|
surface->Download(download, staging);
|
|
|
|
|
|
|
|
MemoryRef dest_ptr = memory.GetPhysicalRef(flush_start);
|
|
|
|
if (!dest_ptr) [[unlikely]] {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
const auto download_dest = dest_ptr.GetWriteBytes(flush_end - flush_start);
|
|
|
|
EncodeTexture(flush_info, flush_start, flush_end, staging.mapped, download_dest,
|
|
|
|
runtime.NeedsConversion(surface->pixel_format));
|
2016-04-17 00:57:57 +02:00
|
|
|
}
|
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
void RasterizerCache::DownloadFillSurface(const SurfaceRef& surface, SurfaceInterval interval) {
|
|
|
|
const u32 flush_start = boost::icl::first(interval);
|
|
|
|
const u32 flush_end = boost::icl::last_next(interval);
|
|
|
|
ASSERT(flush_start >= surface->addr && flush_end <= surface->end);
|
|
|
|
|
|
|
|
MemoryRef dest_ptr = memory.GetPhysicalRef(flush_start);
|
|
|
|
if (!dest_ptr) [[unlikely]] {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
const u32 start_offset = flush_start - surface->addr;
|
|
|
|
const u32 download_size =
|
|
|
|
std::clamp(flush_end - flush_start, 0u, static_cast<u32>(dest_ptr.GetSize()));
|
|
|
|
const u32 coarse_start_offset = start_offset - (start_offset % surface->fill_size);
|
|
|
|
const u32 backup_bytes = start_offset % surface->fill_size;
|
|
|
|
|
|
|
|
std::array<u8, 4> backup_data;
|
|
|
|
if (backup_bytes) {
|
|
|
|
std::memcpy(backup_data.data(), &dest_ptr[coarse_start_offset], backup_bytes);
|
|
|
|
}
|
|
|
|
|
|
|
|
for (u32 offset = coarse_start_offset; offset < download_size; offset += surface->fill_size) {
|
|
|
|
std::memcpy(&dest_ptr[offset], &surface->fill_data[0],
|
|
|
|
std::min(surface->fill_size, download_size - offset));
|
|
|
|
}
|
|
|
|
|
|
|
|
if (backup_bytes) {
|
|
|
|
std::memcpy(&dest_ptr[coarse_start_offset], &backup_data[0], backup_bytes);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
bool RasterizerCache::NoUnimplementedReinterpretations(const SurfaceRef& surface,
|
|
|
|
SurfaceParams params,
|
|
|
|
const SurfaceInterval& interval) {
|
2020-04-07 16:12:32 +02:00
|
|
|
static constexpr std::array<PixelFormat, 17> all_formats{
|
|
|
|
PixelFormat::RGBA8, PixelFormat::RGB8, PixelFormat::RGB5A1, PixelFormat::RGB565,
|
|
|
|
PixelFormat::RGBA4, PixelFormat::IA8, PixelFormat::RG8, PixelFormat::I8,
|
|
|
|
PixelFormat::A8, PixelFormat::IA4, PixelFormat::I4, PixelFormat::A4,
|
|
|
|
PixelFormat::ETC1, PixelFormat::ETC1A4, PixelFormat::D16, PixelFormat::D24,
|
|
|
|
PixelFormat::D24S8,
|
|
|
|
};
|
|
|
|
bool implemented = true;
|
|
|
|
for (PixelFormat format : all_formats) {
|
2022-08-20 11:17:31 +02:00
|
|
|
if (GetFormatBpp(format) == surface->GetFormatBpp()) {
|
2020-04-07 16:12:32 +02:00
|
|
|
params.pixel_format = format;
|
|
|
|
// This could potentially be expensive,
|
|
|
|
// although experimentally it hasn't been too bad
|
2023-04-21 09:14:55 +02:00
|
|
|
SurfaceRef test_surface =
|
2020-04-07 16:12:32 +02:00
|
|
|
FindMatch<MatchFlags::Copy>(surface_cache, params, ScaleMatch::Ignore, interval);
|
2023-04-21 09:14:55 +02:00
|
|
|
if (test_surface) {
|
|
|
|
LOG_WARNING(HW_GPU, "Missing pixel_format reinterpreter: {} -> {}",
|
2022-08-20 11:17:31 +02:00
|
|
|
PixelFormatAsString(format),
|
|
|
|
PixelFormatAsString(surface->pixel_format));
|
2020-04-07 16:12:32 +02:00
|
|
|
implemented = false;
|
|
|
|
}
|
|
|
|
}
|
2017-11-17 18:27:51 +01:00
|
|
|
}
|
2020-04-07 16:12:32 +02:00
|
|
|
return implemented;
|
|
|
|
}
|
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
bool RasterizerCache::IntervalHasInvalidPixelFormat(const SurfaceParams& params,
|
|
|
|
const SurfaceInterval& interval) {
|
|
|
|
for (const auto& set : RangeFromInterval(surface_cache, interval)) {
|
|
|
|
for (const auto& surface : set.second) {
|
2022-09-04 11:54:52 +02:00
|
|
|
if (surface->pixel_format == PixelFormat::Invalid) {
|
2023-04-21 09:14:55 +02:00
|
|
|
LOG_DEBUG(HW_GPU, "Surface {:#x} found with invalid pixel format", surface->addr);
|
2020-04-07 16:12:32 +02:00
|
|
|
return true;
|
|
|
|
}
|
2023-04-21 09:14:55 +02:00
|
|
|
}
|
|
|
|
}
|
2020-04-07 16:12:32 +02:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
bool RasterizerCache::ValidateByReinterpretation(const SurfaceRef& surface, SurfaceParams params,
|
|
|
|
const SurfaceInterval& interval) {
|
|
|
|
const PixelFormat dest_format = surface->pixel_format;
|
|
|
|
for (const auto& reinterpreter : runtime.GetPossibleReinterpretations(dest_format)) {
|
2022-08-20 23:50:20 +02:00
|
|
|
params.pixel_format = reinterpreter->GetSourceFormat();
|
2023-04-21 09:14:55 +02:00
|
|
|
SurfaceRef reinterpret_surface =
|
2020-04-07 16:12:32 +02:00
|
|
|
FindMatch<MatchFlags::Copy>(surface_cache, params, ScaleMatch::Ignore, interval);
|
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
if (reinterpret_surface) {
|
|
|
|
auto reinterpret_interval = reinterpret_surface->GetCopyableInterval(params);
|
2022-08-20 23:50:20 +02:00
|
|
|
auto reinterpret_params = surface->FromInterval(reinterpret_interval);
|
2020-04-07 16:12:32 +02:00
|
|
|
auto src_rect = reinterpret_surface->GetScaledSubRect(reinterpret_params);
|
|
|
|
auto dest_rect = surface->GetScaledSubRect(reinterpret_params);
|
2023-04-21 09:14:55 +02:00
|
|
|
reinterpreter->Reinterpret(*reinterpret_surface, src_rect, *surface, dest_rect);
|
2022-08-20 23:50:20 +02:00
|
|
|
|
2020-04-07 16:12:32 +02:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
}
|
2022-08-20 23:50:20 +02:00
|
|
|
|
2020-04-07 16:12:32 +02:00
|
|
|
return false;
|
2016-04-17 00:57:57 +02:00
|
|
|
}
|
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
void RasterizerCache::ClearAll(bool flush) {
|
2020-03-07 22:23:08 +01:00
|
|
|
const auto flush_interval = PageMap::interval_type::right_open(0x0, 0xFFFFFFFF);
|
2020-01-17 07:17:55 +01:00
|
|
|
// Force flush all surfaces from the cache
|
|
|
|
if (flush) {
|
|
|
|
FlushRegion(0x0, 0xFFFFFFFF);
|
|
|
|
}
|
|
|
|
// Unmark all of the marked pages
|
|
|
|
for (auto& pair : RangeFromInterval(cached_pages, flush_interval)) {
|
|
|
|
const auto interval = pair.first & flush_interval;
|
|
|
|
|
2022-11-04 23:32:57 +01:00
|
|
|
const PAddr interval_start_addr = boost::icl::first(interval) << Memory::CITRA_PAGE_BITS;
|
|
|
|
const PAddr interval_end_addr = boost::icl::last_next(interval) << Memory::CITRA_PAGE_BITS;
|
2020-01-17 07:17:55 +01:00
|
|
|
const u32 interval_size = interval_end_addr - interval_start_addr;
|
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
memory.RasterizerMarkRegionCached(interval_start_addr, interval_size, false);
|
2020-01-17 07:17:55 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// Remove the whole cache without really looking at it.
|
|
|
|
cached_pages -= flush_interval;
|
2020-03-07 22:23:08 +01:00
|
|
|
dirty_regions -= SurfaceInterval(0x0, 0xFFFFFFFF);
|
|
|
|
surface_cache -= SurfaceInterval(0x0, 0xFFFFFFFF);
|
2020-01-17 07:17:55 +01:00
|
|
|
remove_surfaces.clear();
|
|
|
|
}
|
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
void RasterizerCache::FlushRegion(PAddr addr, u32 size, SurfaceRef flush_surface) {
|
2017-11-25 21:21:32 +01:00
|
|
|
if (size == 0)
|
2016-04-17 00:57:57 +02:00
|
|
|
return;
|
|
|
|
|
2017-12-10 00:00:55 +01:00
|
|
|
const SurfaceInterval flush_interval(addr, addr + size);
|
|
|
|
SurfaceRegions flushed_intervals;
|
|
|
|
|
2017-11-25 21:21:32 +01:00
|
|
|
for (auto& pair : RangeFromInterval(dirty_regions, flush_interval)) {
|
2019-08-07 04:56:56 +02:00
|
|
|
// small sizes imply that this most likely comes from the cpu, flush the entire region
|
2020-03-16 15:42:05 +01:00
|
|
|
// the point is to avoid thousands of small writes every frame if the cpu decides to
|
|
|
|
// access that region, anything higher than 8 you're guaranteed it comes from a service
|
2017-12-09 21:51:46 +01:00
|
|
|
const auto interval = size <= 8 ? pair.first : pair.first & flush_interval;
|
2017-11-25 21:21:32 +01:00
|
|
|
auto& surface = pair.second;
|
2016-04-17 00:57:57 +02:00
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
if (flush_surface && surface != flush_surface)
|
2017-11-25 21:21:32 +01:00
|
|
|
continue;
|
2016-04-17 00:57:57 +02:00
|
|
|
|
2017-11-25 21:21:32 +01:00
|
|
|
// Sanity check, this surface is the last one that marked this region dirty
|
|
|
|
ASSERT(surface->IsRegionValid(interval));
|
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
if (surface->type == SurfaceType::Fill) {
|
|
|
|
DownloadFillSurface(surface, interval);
|
|
|
|
} else {
|
|
|
|
DownloadSurface(surface, interval);
|
2016-04-17 00:57:57 +02:00
|
|
|
}
|
2022-08-20 23:50:20 +02:00
|
|
|
|
2017-12-10 00:00:55 +01:00
|
|
|
flushed_intervals += interval;
|
2016-04-17 00:57:57 +02:00
|
|
|
}
|
2023-04-21 09:14:55 +02:00
|
|
|
|
2017-11-25 21:21:32 +01:00
|
|
|
// Reset dirty regions
|
2017-12-10 00:00:55 +01:00
|
|
|
dirty_regions -= flushed_intervals;
|
2016-04-17 00:57:57 +02:00
|
|
|
}
|
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
void RasterizerCache::FlushAll() {
|
2017-11-25 21:21:32 +01:00
|
|
|
FlushRegion(0, 0xFFFFFFFF);
|
|
|
|
}
|
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
void RasterizerCache::InvalidateRegion(PAddr addr, u32 size, const SurfaceRef& region_owner) {
|
2017-11-25 21:21:32 +01:00
|
|
|
if (size == 0)
|
|
|
|
return;
|
|
|
|
|
2017-12-10 00:00:55 +01:00
|
|
|
const SurfaceInterval invalid_interval(addr, addr + size);
|
2017-11-25 21:21:32 +01:00
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
if (region_owner) {
|
2017-11-25 21:21:32 +01:00
|
|
|
ASSERT(region_owner->type != SurfaceType::Texture);
|
|
|
|
ASSERT(addr >= region_owner->addr && addr + size <= region_owner->end);
|
2017-12-09 21:51:46 +01:00
|
|
|
// Surfaces can't have a gap
|
|
|
|
ASSERT(region_owner->width == region_owner->stride);
|
2023-04-21 09:14:55 +02:00
|
|
|
region_owner->MarkValid(invalid_interval);
|
2017-11-25 21:21:32 +01:00
|
|
|
}
|
|
|
|
|
2020-05-18 16:19:54 +02:00
|
|
|
for (const auto& pair : RangeFromInterval(surface_cache, invalid_interval)) {
|
|
|
|
for (const auto& cached_surface : pair.second) {
|
2017-11-25 21:21:32 +01:00
|
|
|
if (cached_surface == region_owner)
|
|
|
|
continue;
|
|
|
|
|
2017-12-10 00:00:55 +01:00
|
|
|
// If cpu is invalidating this region we want to remove it
|
|
|
|
// to (likely) mark the memory pages as uncached
|
2023-04-21 09:14:55 +02:00
|
|
|
if (!region_owner && size <= 8) {
|
2017-12-10 00:00:55 +01:00
|
|
|
FlushRegion(cached_surface->addr, cached_surface->size, cached_surface);
|
2023-04-21 09:14:55 +02:00
|
|
|
remove_surfaces.push_back(cached_surface);
|
2017-12-10 00:00:55 +01:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2017-11-25 21:21:32 +01:00
|
|
|
const auto interval = cached_surface->GetInterval() & invalid_interval;
|
2023-04-21 09:14:55 +02:00
|
|
|
cached_surface->MarkInvalid(interval);
|
2017-11-25 21:21:32 +01:00
|
|
|
|
2021-02-03 03:43:41 +01:00
|
|
|
// If the surface has no salvageable data it should be removed from the cache to avoid
|
|
|
|
// clogging the data structure
|
2023-04-21 09:14:55 +02:00
|
|
|
if (cached_surface->IsFullyInvalid()) {
|
|
|
|
remove_surfaces.push_back(cached_surface);
|
2017-11-25 21:21:32 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
if (region_owner) {
|
2017-11-25 21:21:32 +01:00
|
|
|
dirty_regions.set({invalid_interval, region_owner});
|
2023-04-21 09:14:55 +02:00
|
|
|
} else {
|
2017-11-25 21:21:32 +01:00
|
|
|
dirty_regions.erase(invalid_interval);
|
2023-04-21 09:14:55 +02:00
|
|
|
}
|
2017-11-25 21:21:32 +01:00
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
for (const SurfaceRef& remove_surface : remove_surfaces) {
|
2017-11-25 21:21:32 +01:00
|
|
|
UnregisterSurface(remove_surface);
|
|
|
|
}
|
|
|
|
remove_surfaces.clear();
|
|
|
|
}
|
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
RasterizerCache::SurfaceRef RasterizerCache::CreateSurface(const SurfaceParams& params) {
|
|
|
|
SurfaceRef surface = std::make_shared<OpenGL::Surface>(runtime, params);
|
|
|
|
surface->MarkInvalid(surface->GetInterval());
|
2017-11-25 21:21:32 +01:00
|
|
|
return surface;
|
|
|
|
}
|
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
void RasterizerCache::RegisterSurface(const SurfaceRef& surface) {
|
2017-12-30 07:42:32 +01:00
|
|
|
if (surface->registered) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
surface->registered = true;
|
2017-11-25 21:21:32 +01:00
|
|
|
surface_cache.add({surface->GetInterval(), SurfaceSet{surface}});
|
|
|
|
UpdatePagesCachedCount(surface->addr, surface->size, 1);
|
|
|
|
}
|
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
void RasterizerCache::UnregisterSurface(const SurfaceRef& surface) {
|
2017-12-30 07:42:32 +01:00
|
|
|
if (!surface->registered) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
surface->registered = false;
|
2017-11-25 21:21:32 +01:00
|
|
|
UpdatePagesCachedCount(surface->addr, surface->size, -1);
|
|
|
|
surface_cache.subtract({surface->GetInterval(), SurfaceSet{surface}});
|
|
|
|
}
|
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
void RasterizerCache::UpdatePagesCachedCount(PAddr addr, u32 size, int delta) {
|
2017-11-25 21:21:32 +01:00
|
|
|
const u32 num_pages =
|
2022-11-04 23:32:57 +01:00
|
|
|
((addr + size - 1) >> Memory::CITRA_PAGE_BITS) - (addr >> Memory::CITRA_PAGE_BITS) + 1;
|
|
|
|
const u32 page_start = addr >> Memory::CITRA_PAGE_BITS;
|
2017-11-25 21:21:32 +01:00
|
|
|
const u32 page_end = page_start + num_pages;
|
|
|
|
|
2019-08-07 04:56:56 +02:00
|
|
|
// Interval maps will erase segments if count reaches 0, so if delta is negative we have to
|
|
|
|
// subtract after iterating
|
2017-11-25 21:21:32 +01:00
|
|
|
const auto pages_interval = PageMap::interval_type::right_open(page_start, page_end);
|
2023-04-21 09:14:55 +02:00
|
|
|
if (delta > 0) {
|
2017-11-25 21:21:32 +01:00
|
|
|
cached_pages.add({pages_interval, delta});
|
2023-04-21 09:14:55 +02:00
|
|
|
}
|
2017-11-25 21:21:32 +01:00
|
|
|
|
2020-05-18 16:19:54 +02:00
|
|
|
for (const auto& pair : RangeFromInterval(cached_pages, pages_interval)) {
|
2017-11-25 21:21:32 +01:00
|
|
|
const auto interval = pair.first & pages_interval;
|
|
|
|
const int count = pair.second;
|
|
|
|
|
2022-11-04 23:32:57 +01:00
|
|
|
const PAddr interval_start_addr = boost::icl::first(interval) << Memory::CITRA_PAGE_BITS;
|
|
|
|
const PAddr interval_end_addr = boost::icl::last_next(interval) << Memory::CITRA_PAGE_BITS;
|
2017-11-25 21:21:32 +01:00
|
|
|
const u32 interval_size = interval_end_addr - interval_start_addr;
|
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
if (delta > 0 && count == delta) {
|
|
|
|
memory.RasterizerMarkRegionCached(interval_start_addr, interval_size, true);
|
|
|
|
} else if (delta < 0 && count == -delta) {
|
|
|
|
memory.RasterizerMarkRegionCached(interval_start_addr, interval_size, false);
|
|
|
|
} else {
|
2017-11-25 21:21:32 +01:00
|
|
|
ASSERT(count >= 0);
|
2023-04-21 09:14:55 +02:00
|
|
|
}
|
2016-04-17 00:57:57 +02:00
|
|
|
}
|
2017-11-25 21:21:32 +01:00
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
if (delta < 0) {
|
2017-11-25 21:21:32 +01:00
|
|
|
cached_pages.add({pages_interval, delta});
|
2023-04-21 09:14:55 +02:00
|
|
|
}
|
2015-05-19 06:21:33 +02:00
|
|
|
}
|
2018-11-17 08:29:10 +01:00
|
|
|
|
2023-04-21 09:14:55 +02:00
|
|
|
} // namespace VideoCore
|