mirror of
https://github.com/starr-dusT/yuzu-mainline
synced 2024-03-05 21:12:25 -08:00
Garbage Collection: Final tuning.
This commit is contained in:
parent
5e982a7812
commit
9edbbf2af4
@ -464,9 +464,9 @@ BufferCache<P>::BufferCache(VideoCore::RasterizerInterface& rasterizer_,
|
|||||||
const s64 device_memory = static_cast<s64>(runtime.GetDeviceLocalMemory());
|
const s64 device_memory = static_cast<s64>(runtime.GetDeviceLocalMemory());
|
||||||
const s64 min_spacing_expected = device_memory - 1_GiB - 512_MiB;
|
const s64 min_spacing_expected = device_memory - 1_GiB - 512_MiB;
|
||||||
const s64 min_spacing_critical = device_memory - 1_GiB;
|
const s64 min_spacing_critical = device_memory - 1_GiB;
|
||||||
const s64 mem_tresshold = std::min(device_memory, TARGET_THRESHOLD);
|
const s64 mem_threshold = std::min(device_memory, TARGET_THRESHOLD);
|
||||||
const s64 min_vacancy_expected = (6 * mem_tresshold) / 10;
|
const s64 min_vacancy_expected = (6 * mem_threshold) / 10;
|
||||||
const s64 min_vacancy_critical = (3 * mem_tresshold) / 10;
|
const s64 min_vacancy_critical = (3 * mem_threshold) / 10;
|
||||||
minimum_memory = static_cast<u64>(
|
minimum_memory = static_cast<u64>(
|
||||||
std::max(std::min(device_memory - min_vacancy_expected, min_spacing_expected),
|
std::max(std::min(device_memory - min_vacancy_expected, min_spacing_expected),
|
||||||
DEFAULT_EXPECTED_MEMORY));
|
DEFAULT_EXPECTED_MEMORY));
|
||||||
|
@ -150,7 +150,7 @@ u64 BufferCacheRuntime::GetDeviceMemoryUsage() const {
|
|||||||
if (GLAD_GL_NVX_gpu_memory_info) {
|
if (GLAD_GL_NVX_gpu_memory_info) {
|
||||||
GLint cur_avail_mem_kb = 0;
|
GLint cur_avail_mem_kb = 0;
|
||||||
glGetIntegerv(GL_GPU_MEMORY_INFO_CURRENT_AVAILABLE_VIDMEM_NVX, &cur_avail_mem_kb);
|
glGetIntegerv(GL_GPU_MEMORY_INFO_CURRENT_AVAILABLE_VIDMEM_NVX, &cur_avail_mem_kb);
|
||||||
return static_cast<u64>(cur_avail_mem_kb) * 1_KiB;
|
return device_access_memory - static_cast<u64>(cur_avail_mem_kb) * 1_KiB;
|
||||||
}
|
}
|
||||||
return 2_GiB;
|
return 2_GiB;
|
||||||
}
|
}
|
||||||
|
@ -513,7 +513,7 @@ u64 TextureCacheRuntime::GetDeviceMemoryUsage() const {
|
|||||||
if (GLAD_GL_NVX_gpu_memory_info) {
|
if (GLAD_GL_NVX_gpu_memory_info) {
|
||||||
GLint cur_avail_mem_kb = 0;
|
GLint cur_avail_mem_kb = 0;
|
||||||
glGetIntegerv(GL_GPU_MEMORY_INFO_CURRENT_AVAILABLE_VIDMEM_NVX, &cur_avail_mem_kb);
|
glGetIntegerv(GL_GPU_MEMORY_INFO_CURRENT_AVAILABLE_VIDMEM_NVX, &cur_avail_mem_kb);
|
||||||
return static_cast<u64>(cur_avail_mem_kb) * 1_KiB;
|
return device_access_memory - static_cast<u64>(cur_avail_mem_kb) * 1_KiB;
|
||||||
}
|
}
|
||||||
return 2_GiB;
|
return 2_GiB;
|
||||||
}
|
}
|
||||||
@ -695,7 +695,7 @@ Image::Image(TextureCacheRuntime& runtime_, const VideoCommon::ImageInfo& info_,
|
|||||||
}
|
}
|
||||||
if (IsConverted(runtime->device, info.format, info.type)) {
|
if (IsConverted(runtime->device, info.format, info.type)) {
|
||||||
flags |= ImageFlagBits::Converted;
|
flags |= ImageFlagBits::Converted;
|
||||||
flags |= ImageFlagBits::GCProtected;
|
flags |= ImageFlagBits::CostlyLoad;
|
||||||
gl_internal_format = IsPixelFormatSRGB(info.format) ? GL_SRGB8_ALPHA8 : GL_RGBA8;
|
gl_internal_format = IsPixelFormatSRGB(info.format) ? GL_SRGB8_ALPHA8 : GL_RGBA8;
|
||||||
gl_format = GL_RGBA;
|
gl_format = GL_RGBA;
|
||||||
gl_type = GL_UNSIGNED_INT_8_8_8_8_REV;
|
gl_type = GL_UNSIGNED_INT_8_8_8_8_REV;
|
||||||
|
@ -1211,7 +1211,7 @@ Image::Image(TextureCacheRuntime& runtime_, const ImageInfo& info_, GPUVAddr gpu
|
|||||||
} else {
|
} else {
|
||||||
flags |= VideoCommon::ImageFlagBits::Converted;
|
flags |= VideoCommon::ImageFlagBits::Converted;
|
||||||
}
|
}
|
||||||
flags |= VideoCommon::ImageFlagBits::GCProtected;
|
flags |= VideoCommon::ImageFlagBits::CostlyLoad;
|
||||||
}
|
}
|
||||||
if (runtime->device.HasDebuggingToolAttached()) {
|
if (runtime->device.HasDebuggingToolAttached()) {
|
||||||
original_image.SetObjectNameEXT(VideoCommon::Name(*this).c_str());
|
original_image.SetObjectNameEXT(VideoCommon::Name(*this).c_str());
|
||||||
|
@ -29,11 +29,11 @@ enum class ImageFlagBits : u32 {
|
|||||||
Sparse = 1 << 9, ///< Image has non continous submemory.
|
Sparse = 1 << 9, ///< Image has non continous submemory.
|
||||||
|
|
||||||
// Garbage Collection Flags
|
// Garbage Collection Flags
|
||||||
BadOverlap = 1 << 10, ///< This image overlaps other but doesn't fit, has higher
|
BadOverlap = 1 << 10, ///< This image overlaps other but doesn't fit, has higher
|
||||||
///< garbage collection priority
|
///< garbage collection priority
|
||||||
Alias = 1 << 11, ///< This image has aliases and has priority on garbage
|
Alias = 1 << 11, ///< This image has aliases and has priority on garbage
|
||||||
///< collection
|
///< collection
|
||||||
GCProtected = 1 << 12, ///< Protected from low-tier GC as they are costy to load back.
|
CostlyLoad = 1 << 12, ///< Protected from low-tier GC as it is costly to load back.
|
||||||
|
|
||||||
// Rescaler
|
// Rescaler
|
||||||
Rescaled = 1 << 13,
|
Rescaled = 1 << 13,
|
||||||
|
@ -53,17 +53,16 @@ TextureCache<P>::TextureCache(Runtime& runtime_, VideoCore::RasterizerInterface&
|
|||||||
const s64 device_memory = static_cast<s64>(runtime.GetDeviceLocalMemory());
|
const s64 device_memory = static_cast<s64>(runtime.GetDeviceLocalMemory());
|
||||||
const s64 min_spacing_expected = device_memory - 1_GiB - 512_MiB;
|
const s64 min_spacing_expected = device_memory - 1_GiB - 512_MiB;
|
||||||
const s64 min_spacing_critical = device_memory - 1_GiB;
|
const s64 min_spacing_critical = device_memory - 1_GiB;
|
||||||
const s64 mem_tresshold = std::min(device_memory, TARGET_THRESHOLD);
|
const s64 mem_threshold = std::min(device_memory, TARGET_THRESHOLD);
|
||||||
const s64 min_vacancy_expected = (6 * mem_tresshold) / 10;
|
const s64 min_vacancy_expected = (6 * mem_threshold) / 10;
|
||||||
const s64 min_vacancy_critical = (3 * mem_tresshold) / 10;
|
const s64 min_vacancy_critical = (3 * mem_threshold) / 10;
|
||||||
expected_memory = static_cast<u64>(
|
expected_memory = static_cast<u64>(
|
||||||
std::max(std::min(device_memory - min_vacancy_expected, min_spacing_expected),
|
std::max(std::min(device_memory - min_vacancy_expected, min_spacing_expected),
|
||||||
DEFAULT_EXPECTED_MEMORY));
|
DEFAULT_EXPECTED_MEMORY));
|
||||||
critical_memory = static_cast<u64>(
|
critical_memory = static_cast<u64>(
|
||||||
std::max(std::min(device_memory - min_vacancy_critical, min_spacing_critical),
|
std::max(std::min(device_memory - min_vacancy_critical, min_spacing_critical),
|
||||||
DEFAULT_CRITICAL_MEMORY));
|
DEFAULT_CRITICAL_MEMORY));
|
||||||
minimum_memory = static_cast<u64>((device_memory - mem_tresshold) / 2);
|
minimum_memory = static_cast<u64>((device_memory - mem_threshold) / 2);
|
||||||
LOG_CRITICAL(Debug, "Available Memory: {}", device_memory / 1_MiB);
|
|
||||||
} else {
|
} else {
|
||||||
expected_memory = DEFAULT_EXPECTED_MEMORY + 512_MiB;
|
expected_memory = DEFAULT_EXPECTED_MEMORY + 512_MiB;
|
||||||
critical_memory = DEFAULT_CRITICAL_MEMORY + 1_GiB;
|
critical_memory = DEFAULT_CRITICAL_MEMORY + 1_GiB;
|
||||||
@ -73,11 +72,12 @@ TextureCache<P>::TextureCache(Runtime& runtime_, VideoCore::RasterizerInterface&
|
|||||||
|
|
||||||
template <class P>
|
template <class P>
|
||||||
void TextureCache<P>::RunGarbageCollector() {
|
void TextureCache<P>::RunGarbageCollector() {
|
||||||
const bool high_priority_mode = total_used_memory >= expected_memory;
|
bool high_priority_mode = total_used_memory >= expected_memory;
|
||||||
const bool aggressive_mode = total_used_memory >= critical_memory;
|
bool aggressive_mode = total_used_memory >= critical_memory;
|
||||||
const u64 ticks_to_destroy = aggressive_mode ? 10ULL : high_priority_mode ? 25ULL : 100ULL;
|
const u64 ticks_to_destroy = aggressive_mode ? 10ULL : high_priority_mode ? 25ULL : 50ULL;
|
||||||
size_t num_iterations = aggressive_mode ? 300 : (high_priority_mode ? 50 : 10);
|
size_t num_iterations = aggressive_mode ? 40 : (high_priority_mode ? 20 : 10);
|
||||||
const auto clean_up = [this, &num_iterations, high_priority_mode](ImageId image_id) {
|
const auto clean_up = [this, &num_iterations, &high_priority_mode,
|
||||||
|
&aggressive_mode](ImageId image_id) {
|
||||||
if (num_iterations == 0) {
|
if (num_iterations == 0) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@ -85,7 +85,8 @@ void TextureCache<P>::RunGarbageCollector() {
|
|||||||
auto& image = slot_images[image_id];
|
auto& image = slot_images[image_id];
|
||||||
const bool must_download =
|
const bool must_download =
|
||||||
image.IsSafeDownload() && False(image.flags & ImageFlagBits::BadOverlap);
|
image.IsSafeDownload() && False(image.flags & ImageFlagBits::BadOverlap);
|
||||||
if (!high_priority_mode && must_download) {
|
if (!high_priority_mode &&
|
||||||
|
(must_download || True(image.flags & ImageFlagBits::CostlyLoad))) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
if (must_download) {
|
if (must_download) {
|
||||||
@ -100,6 +101,18 @@ void TextureCache<P>::RunGarbageCollector() {
|
|||||||
}
|
}
|
||||||
UnregisterImage(image_id);
|
UnregisterImage(image_id);
|
||||||
DeleteImage(image_id, image.scale_tick > frame_tick + 5);
|
DeleteImage(image_id, image.scale_tick > frame_tick + 5);
|
||||||
|
if (total_used_memory < critical_memory) {
|
||||||
|
if (aggressive_mode) {
|
||||||
|
// Sink the aggresiveness.
|
||||||
|
num_iterations >>= 2;
|
||||||
|
aggressive_mode = false;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (high_priority_mode && total_used_memory < expected_memory) {
|
||||||
|
num_iterations >>= 1;
|
||||||
|
high_priority_mode = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
return false;
|
return false;
|
||||||
};
|
};
|
||||||
lru_cache.ForEachItemBelow(frame_tick - ticks_to_destroy, clean_up);
|
lru_cache.ForEachItemBelow(frame_tick - ticks_to_destroy, clean_up);
|
||||||
@ -120,7 +133,6 @@ void TextureCache<P>::TickFrame() {
|
|||||||
runtime.TickFrame();
|
runtime.TickFrame();
|
||||||
critical_gc = 0;
|
critical_gc = 0;
|
||||||
++frame_tick;
|
++frame_tick;
|
||||||
LOG_CRITICAL(Debug, "Current memory: {}", total_used_memory / 1_MiB);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
template <class P>
|
template <class P>
|
||||||
|
Loading…
Reference in New Issue
Block a user