Add toggle for LRU

This commit is contained in:
MrPurple666 2025-04-30 03:41:46 -03:00
parent b695ca5a2a
commit b5bec371ae
8 changed files with 61 additions and 11 deletions

View file

@ -6,13 +6,21 @@
template<typename KeyType, typename ValueType>
class LRUCache {
private:
bool enabled = true;
size_t capacity;
std::list<KeyType> cache_list;
std::unordered_map<KeyType, std::pair<typename std::list<KeyType>::iterator, ValueType>> cache_map;
public:
explicit LRUCache(size_t capacity) : capacity(capacity) {
explicit LRUCache(size_t capacity, bool enabled = true) : enabled(enabled), capacity(capacity) {
cache_map.reserve(capacity);
}
// Returns pointer to value if found, nullptr otherwise
ValueType* get(const KeyType& key) {
if (!enabled) return nullptr;
auto it = cache_map.find(key);
if (it == cache_map.end()) {
return nullptr;
@ -25,12 +33,16 @@ public:
// Returns pointer to value if found (without promoting it), nullptr otherwise
ValueType* peek(const KeyType& key) const {
if (!enabled) return nullptr;
auto it = cache_map.find(key);
return it != cache_map.end() ? &(it->second.second) : nullptr;
}
// Inserts or updates a key-value pair
void put(const KeyType& key, const ValueType& value) {
if (!enabled) return;
auto it = cache_map.find(key);
if (it != cache_map.end()) {
@ -52,6 +64,19 @@ public:
cache_map[key] = {cache_list.begin(), value};
}
// Enable or disable the LRU cache
void setEnabled(bool state) {
enabled = state;
if (!enabled) {
clear();
}
}
// Check if the cache is enabled
bool isEnabled() const {
return enabled;
}
// Attempts to get value, returns std::nullopt if not found
std::optional<ValueType> try_get(const KeyType& key) {
auto* val = get(key);
@ -60,16 +85,18 @@ public:
// Checks if key exists in cache
bool contains(const KeyType& key) const {
if (!enabled) return false;
return cache_map.find(key) != cache_map.end();
}
// Removes a key from the cache if it exists
bool erase(const KeyType& key) {
if (!enabled) return false;
auto it = cache_map.find(key);
if (it == cache_map.end()) {
return false;
}
cache_list.erase(it->second.first);
cache_map.erase(it);
return true;
@ -83,7 +110,7 @@ public:
// Returns current number of elements in cache
size_t size() const {
return cache_map.size();
return enabled ? cache_map.size() : 0;
}
// Returns maximum capacity of cache
@ -93,6 +120,8 @@ public:
// Resizes the cache, evicting LRU items if new capacity is smaller
void resize(size_t new_capacity) {
if (!enabled) return;
capacity = new_capacity;
while (cache_map.size() > capacity) {
auto last = cache_list.back();
@ -101,9 +130,4 @@ public:
}
cache_map.reserve(capacity);
}
private:
size_t capacity;
std::list<KeyType> cache_list;
std::unordered_map<KeyType, std::pair<typename std::list<KeyType>::iterator, ValueType>> cache_map;
};

View file

@ -10,6 +10,7 @@
#include <oaknut/oaknut.hpp>
#include "common/common_types.h"
#include "common/settings.h"
#include "core/hle/kernel/code_set.h"
#include "core/hle/kernel/k_typed_address.h"
#include "core/hle/kernel/physical_memory.h"
@ -61,8 +62,8 @@ private:
void WriteCntpctHandler(ModuleDestLabel module_dest, oaknut::XReg dest_reg);
private:
static constexpr size_t CACHE_SIZE = 1024; // Cache size for patch entries
LRUCache<uintptr_t, PatchTextAddress> patch_cache{CACHE_SIZE};
static constexpr size_t CACHE_SIZE = 4096; // Cache size for patch entries
LRUCache<uintptr_t, PatchTextAddress> patch_cache{CACHE_SIZE, Settings::values.lru_cache_enabled.GetValue()};
void BranchToPatch(uintptr_t module_dest) {
// Try to get existing patch entry from cache