2022-04-23 01:59:50 -07:00
|
|
|
// SPDX-FileCopyrightText: Copyright 2018 yuzu Emulator Project
|
|
|
|
// SPDX-License-Identifier: GPL-2.0-or-later
|
2018-02-07 18:54:35 -08:00
|
|
|
|
|
|
|
#pragma once
|
|
|
|
|
2021-12-17 07:45:06 -08:00
|
|
|
#include <atomic>
|
2019-03-03 20:54:16 -08:00
|
|
|
#include <map>
|
2018-10-29 21:03:25 -07:00
|
|
|
#include <optional>
|
2020-07-25 21:16:21 -07:00
|
|
|
#include <vector>
|
2018-04-21 09:31:30 -07:00
|
|
|
|
2018-02-07 18:54:35 -08:00
|
|
|
#include "common/common_types.h"
|
2021-11-11 12:24:40 -08:00
|
|
|
#include "common/multi_level_page_table.h"
|
2022-11-17 07:36:53 -08:00
|
|
|
#include "common/range_map.h"
|
2022-02-05 09:15:26 -08:00
|
|
|
#include "common/virtual_buffer.h"
|
2022-11-19 18:07:14 -08:00
|
|
|
#include "video_core/cache_types.h"
|
2022-10-16 08:49:32 -07:00
|
|
|
#include "video_core/pte_kind.h"
|
2018-02-07 18:54:35 -08:00
|
|
|
|
2020-02-15 14:47:15 -08:00
|
|
|
namespace VideoCore {
|
|
|
|
class RasterizerInterface;
|
|
|
|
}
|
|
|
|
|
2023-01-04 19:05:20 -08:00
|
|
|
namespace VideoCommon {
|
|
|
|
class InvalidationAccumulator;
|
|
|
|
}
|
|
|
|
|
2019-07-08 23:17:44 -07:00
|
|
|
namespace Core {
|
2022-02-05 09:15:26 -08:00
|
|
|
class DeviceMemory;
|
|
|
|
namespace Memory {
|
|
|
|
class Memory;
|
|
|
|
} // namespace Memory
|
2019-07-08 23:17:44 -07:00
|
|
|
class System;
|
2022-02-05 09:15:26 -08:00
|
|
|
} // namespace Core
|
2019-07-08 23:17:44 -07:00
|
|
|
|
2018-02-11 20:44:12 -08:00
|
|
|
namespace Tegra {
|
|
|
|
|
2018-02-07 18:54:35 -08:00
|
|
|
class MemoryManager final {
|
|
|
|
public:
|
2021-11-11 12:24:40 -08:00
|
|
|
explicit MemoryManager(Core::System& system_, u64 address_space_bits_ = 40,
|
2022-02-05 09:15:26 -08:00
|
|
|
u64 big_page_bits_ = 16, u64 page_bits_ = 12);
|
2019-05-09 16:04:41 -07:00
|
|
|
~MemoryManager();
|
2018-02-07 18:54:35 -08:00
|
|
|
|
2021-12-17 07:45:06 -08:00
|
|
|
size_t GetID() const {
|
|
|
|
return unique_identifier;
|
|
|
|
}
|
|
|
|
|
2020-06-10 20:58:57 -07:00
|
|
|
/// Binds a renderer to the memory manager.
|
2021-01-04 23:09:39 -08:00
|
|
|
void BindRasterizer(VideoCore::RasterizerInterface* rasterizer);
|
2020-06-10 20:58:57 -07:00
|
|
|
|
2020-08-26 17:14:13 -07:00
|
|
|
[[nodiscard]] std::optional<VAddr> GpuToCpuAddress(GPUVAddr addr) const;
|
2018-02-07 18:54:35 -08:00
|
|
|
|
2021-06-12 18:34:06 -07:00
|
|
|
[[nodiscard]] std::optional<VAddr> GpuToCpuAddress(GPUVAddr addr, std::size_t size) const;
|
|
|
|
|
2019-03-03 20:54:16 -08:00
|
|
|
template <typename T>
|
2020-08-26 17:14:13 -07:00
|
|
|
[[nodiscard]] T Read(GPUVAddr addr) const;
|
2019-02-23 21:15:35 -08:00
|
|
|
|
2019-03-03 20:54:16 -08:00
|
|
|
template <typename T>
|
2019-03-09 11:06:51 -08:00
|
|
|
void Write(GPUVAddr addr, T data);
|
2019-02-23 21:15:35 -08:00
|
|
|
|
2020-08-26 17:14:13 -07:00
|
|
|
[[nodiscard]] u8* GetPointer(GPUVAddr addr);
|
|
|
|
[[nodiscard]] const u8* GetPointer(GPUVAddr addr) const;
|
2019-02-23 21:15:35 -08:00
|
|
|
|
2019-04-16 12:45:24 -07:00
|
|
|
/**
|
2019-04-16 07:11:35 -07:00
|
|
|
* ReadBlock and WriteBlock are full read and write operations over virtual
|
2019-05-09 16:02:52 -07:00
|
|
|
* GPU Memory. It's important to use these when GPU memory may not be continuous
|
2019-04-16 07:11:35 -07:00
|
|
|
* in the Host Memory counterpart. Note: This functions cause Host GPU Memory
|
|
|
|
* Flushes and Invalidations, respectively to each operation.
|
|
|
|
*/
|
2022-11-19 18:07:14 -08:00
|
|
|
void ReadBlock(GPUVAddr gpu_src_addr, void* dest_buffer, std::size_t size,
|
|
|
|
VideoCommon::CacheType which = VideoCommon::CacheType::All) const;
|
|
|
|
void WriteBlock(GPUVAddr gpu_dest_addr, const void* src_buffer, std::size_t size,
|
|
|
|
VideoCommon::CacheType which = VideoCommon::CacheType::All);
|
|
|
|
void CopyBlock(GPUVAddr gpu_dest_addr, GPUVAddr gpu_src_addr, std::size_t size,
|
|
|
|
VideoCommon::CacheType which = VideoCommon::CacheType::All);
|
2019-04-16 07:11:35 -07:00
|
|
|
|
2019-04-16 12:45:24 -07:00
|
|
|
/**
|
2019-04-16 07:11:35 -07:00
|
|
|
* ReadBlockUnsafe and WriteBlockUnsafe are special versions of ReadBlock and
|
|
|
|
* WriteBlock respectively. In this versions, no flushing or invalidation is actually
|
|
|
|
* done and their performance is similar to a memcpy. This functions can be used
|
|
|
|
* on either of this 2 scenarios instead of their safe counterpart:
|
|
|
|
* - Memory which is sure to never be represented in the Host GPU.
|
|
|
|
* - Memory Managed by a Cache Manager. Example: Texture Flushing should use
|
|
|
|
* WriteBlockUnsafe instead of WriteBlock since it shouldn't invalidate the texture
|
|
|
|
* being flushed.
|
|
|
|
*/
|
2020-06-19 19:02:56 -07:00
|
|
|
void ReadBlockUnsafe(GPUVAddr gpu_src_addr, void* dest_buffer, std::size_t size) const;
|
|
|
|
void WriteBlockUnsafe(GPUVAddr gpu_dest_addr, const void* src_buffer, std::size_t size);
|
2023-01-04 19:05:20 -08:00
|
|
|
void WriteBlockCached(GPUVAddr gpu_dest_addr, const void* src_buffer, std::size_t size);
|
2019-04-16 07:11:35 -07:00
|
|
|
|
2020-04-08 10:34:59 -07:00
|
|
|
/**
|
2021-06-20 03:25:59 -07:00
|
|
|
* Checks if a gpu region can be simply read with a pointer.
|
2020-04-08 10:34:59 -07:00
|
|
|
*/
|
2020-08-26 17:14:13 -07:00
|
|
|
[[nodiscard]] bool IsGranularRange(GPUVAddr gpu_addr, std::size_t size) const;
|
2020-04-05 14:23:49 -07:00
|
|
|
|
2021-06-12 18:34:06 -07:00
|
|
|
/**
|
2021-06-20 03:25:59 -07:00
|
|
|
* Checks if a gpu region is mapped by a single range of cpu addresses.
|
2021-06-12 18:34:06 -07:00
|
|
|
*/
|
|
|
|
[[nodiscard]] bool IsContinousRange(GPUVAddr gpu_addr, std::size_t size) const;
|
|
|
|
|
|
|
|
/**
|
2021-06-20 03:25:59 -07:00
|
|
|
* Checks if a gpu region is mapped entirely.
|
2021-06-12 18:34:06 -07:00
|
|
|
*/
|
|
|
|
[[nodiscard]] bool IsFullyMappedRange(GPUVAddr gpu_addr, std::size_t size) const;
|
|
|
|
|
|
|
|
/**
|
2021-06-20 03:25:59 -07:00
|
|
|
* Returns a vector with all the subranges of cpu addresses mapped beneath.
|
2021-06-12 18:34:06 -07:00
|
|
|
* if the region is continous, a single pair will be returned. If it's unmapped, an empty vector
|
|
|
|
* will be returned;
|
|
|
|
*/
|
|
|
|
std::vector<std::pair<GPUVAddr, std::size_t>> GetSubmappedRange(GPUVAddr gpu_addr,
|
2023-01-04 19:05:20 -08:00
|
|
|
std::size_t size) const;
|
2021-06-12 18:34:06 -07:00
|
|
|
|
2022-10-16 08:49:32 -07:00
|
|
|
GPUVAddr Map(GPUVAddr gpu_addr, VAddr cpu_addr, std::size_t size,
|
|
|
|
PTEKind kind = PTEKind::INVALID, bool is_big_pages = true);
|
2022-02-05 09:15:26 -08:00
|
|
|
GPUVAddr MapSparse(GPUVAddr gpu_addr, std::size_t size, bool is_big_pages = true);
|
2020-07-25 21:16:21 -07:00
|
|
|
void Unmap(GPUVAddr gpu_addr, std::size_t size);
|
2019-03-03 20:54:16 -08:00
|
|
|
|
2022-11-19 18:07:14 -08:00
|
|
|
void FlushRegion(GPUVAddr gpu_addr, size_t size,
|
|
|
|
VideoCommon::CacheType which = VideoCommon::CacheType::All) const;
|
2022-01-29 08:42:28 -08:00
|
|
|
|
2022-11-19 18:07:14 -08:00
|
|
|
void InvalidateRegion(GPUVAddr gpu_addr, size_t size,
|
|
|
|
VideoCommon::CacheType which = VideoCommon::CacheType::All) const;
|
2022-08-14 02:36:36 -07:00
|
|
|
|
2022-11-19 18:07:14 -08:00
|
|
|
bool IsMemoryDirty(GPUVAddr gpu_addr, size_t size,
|
|
|
|
VideoCommon::CacheType which = VideoCommon::CacheType::All) const;
|
2022-08-14 02:36:36 -07:00
|
|
|
|
|
|
|
size_t MaxContinousRange(GPUVAddr gpu_addr, size_t size) const;
|
|
|
|
|
2022-04-13 07:20:34 -07:00
|
|
|
bool IsWithinGPUAddressRange(GPUVAddr gpu_addr) const {
|
|
|
|
return gpu_addr < address_space_size;
|
|
|
|
}
|
|
|
|
|
2022-10-16 08:49:32 -07:00
|
|
|
PTEKind GetPageKind(GPUVAddr gpu_addr) const;
|
|
|
|
|
2022-11-09 08:58:10 -08:00
|
|
|
size_t GetMemoryLayoutSize(GPUVAddr gpu_addr,
|
|
|
|
size_t max_size = std::numeric_limits<size_t>::max()) const;
|
2022-10-20 16:46:51 -07:00
|
|
|
|
2023-01-04 19:05:20 -08:00
|
|
|
void FlushCaching();
|
|
|
|
|
2020-07-25 21:16:21 -07:00
|
|
|
private:
|
2022-02-05 09:15:26 -08:00
|
|
|
template <bool is_big_pages, typename FuncMapped, typename FuncReserved, typename FuncUnmapped>
|
|
|
|
inline void MemoryOperation(GPUVAddr gpu_src_addr, std::size_t size, FuncMapped&& func_mapped,
|
|
|
|
FuncReserved&& func_reserved, FuncUnmapped&& func_unmapped) const;
|
|
|
|
|
2023-01-05 03:06:33 -08:00
|
|
|
template <bool is_safe, bool use_fastmem>
|
2022-11-19 18:07:14 -08:00
|
|
|
void ReadBlockImpl(GPUVAddr gpu_src_addr, void* dest_buffer, std::size_t size,
|
|
|
|
VideoCommon::CacheType which) const;
|
2022-02-05 09:15:26 -08:00
|
|
|
|
|
|
|
template <bool is_safe>
|
2022-11-19 18:07:14 -08:00
|
|
|
void WriteBlockImpl(GPUVAddr gpu_dest_addr, const void* src_buffer, std::size_t size,
|
|
|
|
VideoCommon::CacheType which);
|
2021-12-30 20:36:00 -08:00
|
|
|
|
2022-02-05 09:15:26 -08:00
|
|
|
template <bool is_big_page>
|
2022-08-31 20:45:22 -07:00
|
|
|
[[nodiscard]] std::size_t PageEntryIndex(GPUVAddr gpu_addr) const {
|
2022-02-05 09:15:26 -08:00
|
|
|
if constexpr (is_big_page) {
|
|
|
|
return (gpu_addr >> big_page_bits) & big_page_table_mask;
|
|
|
|
} else {
|
|
|
|
return (gpu_addr >> page_bits) & page_table_mask;
|
|
|
|
}
|
2020-07-25 21:16:21 -07:00
|
|
|
}
|
2018-04-21 11:40:51 -07:00
|
|
|
|
2022-02-06 09:51:07 -08:00
|
|
|
inline bool IsBigPageContinous(size_t big_page_index) const;
|
|
|
|
inline void SetBigPageContinous(size_t big_page_index, bool value);
|
|
|
|
|
2023-01-04 19:05:20 -08:00
|
|
|
template <bool is_gpu_address>
|
|
|
|
void GetSubmappedRangeImpl(
|
|
|
|
GPUVAddr gpu_addr, std::size_t size,
|
|
|
|
std::vector<std::pair<std::conditional_t<is_gpu_address, GPUVAddr, VAddr>, std::size_t>>&
|
|
|
|
result) const;
|
|
|
|
|
2020-07-25 21:16:21 -07:00
|
|
|
Core::System& system;
|
2022-02-05 09:15:26 -08:00
|
|
|
Core::Memory::Memory& memory;
|
|
|
|
Core::DeviceMemory& device_memory;
|
2019-03-03 20:54:16 -08:00
|
|
|
|
2021-11-11 12:24:40 -08:00
|
|
|
const u64 address_space_bits;
|
|
|
|
const u64 page_bits;
|
|
|
|
u64 address_space_size;
|
|
|
|
u64 page_size;
|
|
|
|
u64 page_mask;
|
|
|
|
u64 page_table_mask;
|
|
|
|
static constexpr u64 cpu_page_bits{12};
|
|
|
|
|
2022-02-05 09:15:26 -08:00
|
|
|
const u64 big_page_bits;
|
|
|
|
u64 big_page_size;
|
|
|
|
u64 big_page_mask;
|
|
|
|
u64 big_page_table_mask;
|
|
|
|
|
2020-06-10 20:58:57 -07:00
|
|
|
VideoCore::RasterizerInterface* rasterizer = nullptr;
|
2019-07-08 23:17:44 -07:00
|
|
|
|
2021-11-11 12:24:40 -08:00
|
|
|
enum class EntryType : u64 {
|
|
|
|
Free = 0,
|
|
|
|
Reserved = 1,
|
|
|
|
Mapped = 2,
|
|
|
|
};
|
|
|
|
|
|
|
|
std::vector<u64> entries;
|
2022-02-05 09:15:26 -08:00
|
|
|
std::vector<u64> big_entries;
|
2021-11-11 12:24:40 -08:00
|
|
|
|
|
|
|
template <EntryType entry_type>
|
2022-10-16 08:49:32 -07:00
|
|
|
GPUVAddr PageTableOp(GPUVAddr gpu_addr, [[maybe_unused]] VAddr cpu_addr, size_t size,
|
|
|
|
PTEKind kind);
|
2021-11-11 12:24:40 -08:00
|
|
|
|
2022-02-05 09:15:26 -08:00
|
|
|
template <EntryType entry_type>
|
2022-10-16 08:49:32 -07:00
|
|
|
GPUVAddr BigPageTableOp(GPUVAddr gpu_addr, [[maybe_unused]] VAddr cpu_addr, size_t size,
|
|
|
|
PTEKind kind);
|
2022-02-05 09:15:26 -08:00
|
|
|
|
|
|
|
template <bool is_big_page>
|
|
|
|
inline EntryType GetEntry(size_t position) const;
|
2021-01-22 13:31:08 -08:00
|
|
|
|
2022-02-05 09:15:26 -08:00
|
|
|
template <bool is_big_page>
|
|
|
|
inline void SetEntry(size_t position, EntryType entry);
|
2021-01-22 13:33:10 -08:00
|
|
|
|
2021-11-11 12:24:40 -08:00
|
|
|
Common::MultiLevelPageTable<u32> page_table;
|
2022-11-17 07:36:53 -08:00
|
|
|
Common::RangeMap<GPUVAddr, PTEKind> kind_map;
|
2022-02-05 09:15:26 -08:00
|
|
|
Common::VirtualBuffer<u32> big_page_table_cpu;
|
2022-02-06 09:51:07 -08:00
|
|
|
|
|
|
|
std::vector<u64> big_page_continous;
|
2023-01-04 19:05:20 -08:00
|
|
|
std::vector<std::pair<VAddr, std::size_t>> page_stash{};
|
2023-01-05 03:06:33 -08:00
|
|
|
u8* fastmem_arena{};
|
2022-02-06 09:51:07 -08:00
|
|
|
|
|
|
|
constexpr static size_t continous_bits = 64;
|
2021-12-17 07:45:06 -08:00
|
|
|
|
|
|
|
const size_t unique_identifier;
|
2023-01-04 19:05:20 -08:00
|
|
|
std::unique_ptr<VideoCommon::InvalidationAccumulator> accumulator;
|
2021-12-17 07:45:06 -08:00
|
|
|
|
|
|
|
static std::atomic<size_t> unique_identifier_generator;
|
2018-02-07 18:54:35 -08:00
|
|
|
};
|
|
|
|
|
2018-02-11 20:44:12 -08:00
|
|
|
} // namespace Tegra
|