Compare commits
2 commits
master
...
android-su
Author | SHA1 | Date | |
---|---|---|---|
ffea6e2eaf | |||
1f7b08c960 |
8 changed files with 48 additions and 94 deletions
|
@ -230,7 +230,9 @@ class EmulationActivity : AppCompatActivity(), SensorEventListener {
|
||||||
|
|
||||||
override fun dispatchKeyEvent(event: KeyEvent): Boolean {
|
override fun dispatchKeyEvent(event: KeyEvent): Boolean {
|
||||||
if (event.source and InputDevice.SOURCE_JOYSTICK != InputDevice.SOURCE_JOYSTICK &&
|
if (event.source and InputDevice.SOURCE_JOYSTICK != InputDevice.SOURCE_JOYSTICK &&
|
||||||
event.source and InputDevice.SOURCE_GAMEPAD != InputDevice.SOURCE_GAMEPAD
|
event.source and InputDevice.SOURCE_GAMEPAD != InputDevice.SOURCE_GAMEPAD &&
|
||||||
|
event.source and InputDevice.SOURCE_KEYBOARD != InputDevice.SOURCE_KEYBOARD &&
|
||||||
|
event.source and InputDevice.SOURCE_MOUSE != InputDevice.SOURCE_MOUSE
|
||||||
) {
|
) {
|
||||||
return super.dispatchKeyEvent(event)
|
return super.dispatchKeyEvent(event)
|
||||||
}
|
}
|
||||||
|
@ -244,7 +246,9 @@ class EmulationActivity : AppCompatActivity(), SensorEventListener {
|
||||||
|
|
||||||
override fun dispatchGenericMotionEvent(event: MotionEvent): Boolean {
|
override fun dispatchGenericMotionEvent(event: MotionEvent): Boolean {
|
||||||
if (event.source and InputDevice.SOURCE_JOYSTICK != InputDevice.SOURCE_JOYSTICK &&
|
if (event.source and InputDevice.SOURCE_JOYSTICK != InputDevice.SOURCE_JOYSTICK &&
|
||||||
event.source and InputDevice.SOURCE_GAMEPAD != InputDevice.SOURCE_GAMEPAD
|
event.source and InputDevice.SOURCE_GAMEPAD != InputDevice.SOURCE_GAMEPAD &&
|
||||||
|
event.source and InputDevice.SOURCE_KEYBOARD != InputDevice.SOURCE_KEYBOARD &&
|
||||||
|
event.source and InputDevice.SOURCE_MOUSE != InputDevice.SOURCE_MOUSE
|
||||||
) {
|
) {
|
||||||
return super.dispatchGenericMotionEvent(event)
|
return super.dispatchGenericMotionEvent(event)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,6 @@
|
||||||
|
// SPDX-FileCopyrightText: Copyright 2025 Eden Emulator Project
|
||||||
|
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
// SPDX-FileCopyrightText: 2024 yuzu Emulator Project
|
// SPDX-FileCopyrightText: 2024 yuzu Emulator Project
|
||||||
// SPDX-License-Identifier: GPL-2.0-or-later
|
// SPDX-License-Identifier: GPL-2.0-or-later
|
||||||
|
|
||||||
|
@ -149,7 +152,9 @@ class InputDialogFragment : DialogFragment() {
|
||||||
|
|
||||||
private fun onKeyEvent(event: KeyEvent): Boolean {
|
private fun onKeyEvent(event: KeyEvent): Boolean {
|
||||||
if (event.source and InputDevice.SOURCE_JOYSTICK != InputDevice.SOURCE_JOYSTICK &&
|
if (event.source and InputDevice.SOURCE_JOYSTICK != InputDevice.SOURCE_JOYSTICK &&
|
||||||
event.source and InputDevice.SOURCE_GAMEPAD != InputDevice.SOURCE_GAMEPAD
|
event.source and InputDevice.SOURCE_GAMEPAD != InputDevice.SOURCE_GAMEPAD &&
|
||||||
|
event.source and InputDevice.SOURCE_KEYBOARD != InputDevice.SOURCE_KEYBOARD &&
|
||||||
|
event.source and InputDevice.SOURCE_MOUSE != InputDevice.SOURCE_MOUSE
|
||||||
) {
|
) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
@ -173,7 +178,9 @@ class InputDialogFragment : DialogFragment() {
|
||||||
|
|
||||||
private fun onMotionEvent(event: MotionEvent): Boolean {
|
private fun onMotionEvent(event: MotionEvent): Boolean {
|
||||||
if (event.source and InputDevice.SOURCE_JOYSTICK != InputDevice.SOURCE_JOYSTICK &&
|
if (event.source and InputDevice.SOURCE_JOYSTICK != InputDevice.SOURCE_JOYSTICK &&
|
||||||
event.source and InputDevice.SOURCE_GAMEPAD != InputDevice.SOURCE_GAMEPAD
|
event.source and InputDevice.SOURCE_GAMEPAD != InputDevice.SOURCE_GAMEPAD &&
|
||||||
|
event.source and InputDevice.SOURCE_KEYBOARD != InputDevice.SOURCE_KEYBOARD &&
|
||||||
|
event.source and InputDevice.SOURCE_MOUSE != InputDevice.SOURCE_MOUSE
|
||||||
) {
|
) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
|
@ -45,10 +45,6 @@ if (NOT APPLE AND ENABLE_OPENGL)
|
||||||
target_compile_definitions(qt_common PUBLIC HAS_OPENGL)
|
target_compile_definitions(qt_common PUBLIC HAS_OPENGL)
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
if (UNIX AND NOT APPLE)
|
if (NOT WIN32)
|
||||||
if (TARGET Qt6::GuiPrivate)
|
target_include_directories(qt_common PRIVATE ${Qt6Gui_PRIVATE_INCLUDE_DIRS})
|
||||||
target_link_libraries(qt_common PRIVATE Qt6::GuiPrivate)
|
|
||||||
else()
|
|
||||||
target_include_directories(qt_common PRIVATE ${Qt6Gui_PRIVATE_INCLUDE_DIRS})
|
|
||||||
endif()
|
|
||||||
endif()
|
endif()
|
||||||
|
|
|
@ -386,9 +386,11 @@ void BufferCache<P>::BindHostComputeBuffers() {
|
||||||
template <class P>
|
template <class P>
|
||||||
void BufferCache<P>::SetUniformBuffersState(const std::array<u32, NUM_STAGES>& mask,
|
void BufferCache<P>::SetUniformBuffersState(const std::array<u32, NUM_STAGES>& mask,
|
||||||
const UniformBufferSizes* sizes) {
|
const UniformBufferSizes* sizes) {
|
||||||
if (channel_state->enabled_uniform_buffer_masks != mask) {
|
if constexpr (HAS_PERSISTENT_UNIFORM_BUFFER_BINDINGS) {
|
||||||
channel_state->fast_bound_uniform_buffers.fill(0);
|
if (channel_state->enabled_uniform_buffer_masks != mask) {
|
||||||
if constexpr (HAS_PERSISTENT_UNIFORM_BUFFER_BINDINGS) {
|
if constexpr (IS_OPENGL) {
|
||||||
|
channel_state->fast_bound_uniform_buffers.fill(0);
|
||||||
|
}
|
||||||
channel_state->dirty_uniform_buffers.fill(~u32{0});
|
channel_state->dirty_uniform_buffers.fill(~u32{0});
|
||||||
channel_state->uniform_buffer_binding_sizes.fill({});
|
channel_state->uniform_buffer_binding_sizes.fill({});
|
||||||
}
|
}
|
||||||
|
@ -804,7 +806,7 @@ void BufferCache<P>::BindHostGraphicsUniformBuffer(size_t stage, u32 index, u32
|
||||||
channel_state->uniform_buffer_binding_sizes[stage][binding_index] != size;
|
channel_state->uniform_buffer_binding_sizes[stage][binding_index] != size;
|
||||||
if (should_fast_bind) {
|
if (should_fast_bind) {
|
||||||
// We only have to bind when the currently bound buffer is not the fast version
|
// We only have to bind when the currently bound buffer is not the fast version
|
||||||
channel_state->fast_bound_uniform_buffers[stage] |= 1u << binding_index;
|
channel_state->fast_bound_uniform_buffers[stage] |= 1U << binding_index;
|
||||||
channel_state->uniform_buffer_binding_sizes[stage][binding_index] = size;
|
channel_state->uniform_buffer_binding_sizes[stage][binding_index] = size;
|
||||||
runtime.BindFastUniformBuffer(stage, binding_index, size);
|
runtime.BindFastUniformBuffer(stage, binding_index, size);
|
||||||
}
|
}
|
||||||
|
@ -813,8 +815,10 @@ void BufferCache<P>::BindHostGraphicsUniformBuffer(size_t stage, u32 index, u32
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
channel_state->fast_bound_uniform_buffers[stage] |= 1u << binding_index;
|
if constexpr (IS_OPENGL) {
|
||||||
channel_state->uniform_buffer_binding_sizes[stage][binding_index] = size;
|
channel_state->fast_bound_uniform_buffers[stage] |= 1U << binding_index;
|
||||||
|
channel_state->uniform_buffer_binding_sizes[stage][binding_index] = size;
|
||||||
|
}
|
||||||
// Stream buffer path to avoid stalling on non-Nvidia drivers or Vulkan
|
// Stream buffer path to avoid stalling on non-Nvidia drivers or Vulkan
|
||||||
const std::span<u8> span = runtime.BindMappedUniformBuffer(stage, binding_index, size);
|
const std::span<u8> span = runtime.BindMappedUniformBuffer(stage, binding_index, size);
|
||||||
device_memory.ReadBlockUnsafe(device_addr, span.data(), size);
|
device_memory.ReadBlockUnsafe(device_addr, span.data(), size);
|
||||||
|
@ -835,6 +839,9 @@ void BufferCache<P>::BindHostGraphicsUniformBuffer(size_t stage, u32 index, u32
|
||||||
}
|
}
|
||||||
const u32 offset = buffer.Offset(device_addr);
|
const u32 offset = buffer.Offset(device_addr);
|
||||||
if constexpr (IS_OPENGL) {
|
if constexpr (IS_OPENGL) {
|
||||||
|
// Fast buffer will be unbound
|
||||||
|
channel_state->fast_bound_uniform_buffers[stage] &= ~(1U << binding_index);
|
||||||
|
|
||||||
// Mark the index as dirty if offset doesn't match
|
// Mark the index as dirty if offset doesn't match
|
||||||
const bool is_copy_bind = offset != 0 && !runtime.SupportsNonZeroUniformOffset();
|
const bool is_copy_bind = offset != 0 && !runtime.SupportsNonZeroUniformOffset();
|
||||||
channel_state->dirty_uniform_buffers[stage] |= (is_copy_bind ? 1U : 0U) << index;
|
channel_state->dirty_uniform_buffers[stage] |= (is_copy_bind ? 1U : 0U) << index;
|
||||||
|
@ -848,7 +855,6 @@ void BufferCache<P>::BindHostGraphicsUniformBuffer(size_t stage, u32 index, u32
|
||||||
} else {
|
} else {
|
||||||
runtime.BindUniformBuffer(buffer, offset, size);
|
runtime.BindUniformBuffer(buffer, offset, size);
|
||||||
}
|
}
|
||||||
channel_state->fast_bound_uniform_buffers[stage] &= ~(1u << binding_index);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
template <class P>
|
template <class P>
|
||||||
|
@ -1783,7 +1789,12 @@ std::span<u8> BufferCache<P>::ImmediateBuffer(size_t wanted_capacity) {
|
||||||
|
|
||||||
template <class P>
|
template <class P>
|
||||||
bool BufferCache<P>::HasFastUniformBufferBound(size_t stage, u32 binding_index) const noexcept {
|
bool BufferCache<P>::HasFastUniformBufferBound(size_t stage, u32 binding_index) const noexcept {
|
||||||
return ((channel_state->fast_bound_uniform_buffers[stage] >> binding_index) & 1u) != 0;
|
if constexpr (IS_OPENGL) {
|
||||||
|
return ((channel_state->fast_bound_uniform_buffers[stage] >> binding_index) & 1) != 0;
|
||||||
|
} else {
|
||||||
|
// Only OpenGL has fast uniform buffers
|
||||||
|
return false;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
template <class P>
|
template <class P>
|
||||||
|
|
|
@ -54,8 +54,6 @@ constexpr u32 NUM_STORAGE_BUFFERS = 16;
|
||||||
constexpr u32 NUM_TEXTURE_BUFFERS = 32;
|
constexpr u32 NUM_TEXTURE_BUFFERS = 32;
|
||||||
constexpr u32 NUM_STAGES = 5;
|
constexpr u32 NUM_STAGES = 5;
|
||||||
|
|
||||||
static_assert(NUM_GRAPHICS_UNIFORM_BUFFERS <= 32, "fast bitmask must fit u32");
|
|
||||||
|
|
||||||
using UniformBufferSizes = std::array<std::array<u32, NUM_GRAPHICS_UNIFORM_BUFFERS>, NUM_STAGES>;
|
using UniformBufferSizes = std::array<std::array<u32, NUM_GRAPHICS_UNIFORM_BUFFERS>, NUM_STAGES>;
|
||||||
using ComputeUniformBufferSizes = std::array<u32, NUM_COMPUTE_UNIFORM_BUFFERS>;
|
using ComputeUniformBufferSizes = std::array<u32, NUM_COMPUTE_UNIFORM_BUFFERS>;
|
||||||
|
|
||||||
|
@ -139,8 +137,8 @@ public:
|
||||||
u32 written_compute_texture_buffers = 0;
|
u32 written_compute_texture_buffers = 0;
|
||||||
u32 image_compute_texture_buffers = 0;
|
u32 image_compute_texture_buffers = 0;
|
||||||
|
|
||||||
std::array<u32, NUM_GRAPHICS_UNIFORM_BUFFERS> uniform_cache_hits{};
|
std::array<u32, 16> uniform_cache_hits{};
|
||||||
std::array<u32, NUM_GRAPHICS_UNIFORM_BUFFERS> uniform_cache_shots{};
|
std::array<u32, 16> uniform_cache_shots{};
|
||||||
|
|
||||||
u32 uniform_buffer_skip_cache_size = DEFAULT_SKIP_CACHE_SIZE;
|
u32 uniform_buffer_skip_cache_size = DEFAULT_SKIP_CACHE_SIZE;
|
||||||
|
|
||||||
|
|
|
@ -337,11 +337,6 @@ BufferCacheRuntime::BufferCacheRuntime(const Device& device_, MemoryAllocator& m
|
||||||
uint8_pass = std::make_unique<Uint8Pass>(device, scheduler, descriptor_pool, staging_pool,
|
uint8_pass = std::make_unique<Uint8Pass>(device, scheduler, descriptor_pool, staging_pool,
|
||||||
compute_pass_descriptor_queue);
|
compute_pass_descriptor_queue);
|
||||||
}
|
}
|
||||||
const u32 ubo_align = static_cast<u32>(
|
|
||||||
device.GetUniformBufferAlignment() //check if the device has it
|
|
||||||
);
|
|
||||||
// add the ability to change the size in settings in future
|
|
||||||
uniform_ring.Init(device, memory_allocator, 8 * 1024 * 1024 /* 8 MiB */, ubo_align ? ubo_align : 256);
|
|
||||||
quad_array_index_buffer = std::make_shared<QuadArrayIndexBuffer>(device_, memory_allocator_,
|
quad_array_index_buffer = std::make_shared<QuadArrayIndexBuffer>(device_, memory_allocator_,
|
||||||
scheduler_, staging_pool_);
|
scheduler_, staging_pool_);
|
||||||
quad_strip_index_buffer = std::make_shared<QuadStripIndexBuffer>(device_, memory_allocator_,
|
quad_strip_index_buffer = std::make_shared<QuadStripIndexBuffer>(device_, memory_allocator_,
|
||||||
|
@ -360,42 +355,6 @@ void BufferCacheRuntime::FreeDeferredStagingBuffer(StagingBufferRef& ref) {
|
||||||
staging_pool.FreeDeferred(ref);
|
staging_pool.FreeDeferred(ref);
|
||||||
}
|
}
|
||||||
|
|
||||||
void BufferCacheRuntime::UniformRing::Init(const Device& device,
|
|
||||||
MemoryAllocator& alloc,
|
|
||||||
u64 bytes, u32 alignment) {
|
|
||||||
for (size_t i = 0; i < NUM_FRAMES; ++i) {
|
|
||||||
VkBufferCreateInfo ci{
|
|
||||||
.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
|
|
||||||
.pNext = nullptr,
|
|
||||||
.flags = 0,
|
|
||||||
.size = bytes,
|
|
||||||
.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT,
|
|
||||||
.sharingMode = VK_SHARING_MODE_EXCLUSIVE,
|
|
||||||
.queueFamilyIndexCount = 0,
|
|
||||||
.pQueueFamilyIndices = nullptr,
|
|
||||||
};
|
|
||||||
buffers[i] = alloc.CreateBuffer(ci, MemoryUsage::Upload);
|
|
||||||
mapped[i] = buffers[i].Mapped().data();
|
|
||||||
}
|
|
||||||
size = bytes;
|
|
||||||
align = alignment ? alignment : 256;
|
|
||||||
head = 0;
|
|
||||||
current_frame = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
std::span<u8> BufferCacheRuntime::UniformRing::Alloc(u32 bytes, u32& out_offset) {
|
|
||||||
const u64 aligned = Common::AlignUp(head, static_cast<u64>(align));
|
|
||||||
u64 end = aligned + bytes;
|
|
||||||
|
|
||||||
if (end > size) {
|
|
||||||
return {}; // Fallback to staging pool
|
|
||||||
}
|
|
||||||
|
|
||||||
out_offset = static_cast<u32>(aligned);
|
|
||||||
head = end;
|
|
||||||
return {mapped[current_frame] + out_offset, bytes};
|
|
||||||
}
|
|
||||||
|
|
||||||
u64 BufferCacheRuntime::GetDeviceLocalMemory() const {
|
u64 BufferCacheRuntime::GetDeviceLocalMemory() const {
|
||||||
return device.GetDeviceLocalMemory();
|
return device.GetDeviceLocalMemory();
|
||||||
}
|
}
|
||||||
|
@ -416,7 +375,6 @@ void BufferCacheRuntime::TickFrame(Common::SlotVector<Buffer>& slot_buffers) noe
|
||||||
for (auto it = slot_buffers.begin(); it != slot_buffers.end(); it++) {
|
for (auto it = slot_buffers.begin(); it != slot_buffers.end(); it++) {
|
||||||
it->ResetUsageTracking();
|
it->ResetUsageTracking();
|
||||||
}
|
}
|
||||||
uniform_ring.BeginFrame();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void BufferCacheRuntime::Finish() {
|
void BufferCacheRuntime::Finish() {
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
// SPDX-FileCopyrightText: Copyright 2025 Eden Emulator Project
|
|
||||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
|
||||||
|
|
||||||
// SPDX-FileCopyrightText: Copyright 2019 yuzu Emulator Project
|
// SPDX-FileCopyrightText: Copyright 2019 yuzu Emulator Project
|
||||||
// SPDX-License-Identifier: GPL-2.0-or-later
|
// SPDX-License-Identifier: GPL-2.0-or-later
|
||||||
|
|
||||||
|
@ -127,15 +124,8 @@ public:
|
||||||
|
|
||||||
void BindTransformFeedbackBuffers(VideoCommon::HostBindings<Buffer>& bindings);
|
void BindTransformFeedbackBuffers(VideoCommon::HostBindings<Buffer>& bindings);
|
||||||
|
|
||||||
std::span<u8> BindMappedUniformBuffer([[maybe_unused]] size_t /*stage*/,
|
std::span<u8> BindMappedUniformBuffer([[maybe_unused]] size_t stage,
|
||||||
[[maybe_unused]] u32 /*binding_index*/,
|
[[maybe_unused]] u32 binding_index, u32 size) {
|
||||||
u32 size) {
|
|
||||||
u32 offset = 0;
|
|
||||||
if (auto span = uniform_ring.Alloc(size, offset); !span.empty()) {
|
|
||||||
BindBuffer(*uniform_ring.buffers[uniform_ring.current_frame], offset, size);
|
|
||||||
return span;
|
|
||||||
}
|
|
||||||
// Fallback for giant requests
|
|
||||||
const StagingBufferRef ref = staging_pool.Request(size, MemoryUsage::Upload);
|
const StagingBufferRef ref = staging_pool.Request(size, MemoryUsage::Upload);
|
||||||
BindBuffer(ref.buffer, static_cast<u32>(ref.offset), size);
|
BindBuffer(ref.buffer, static_cast<u32>(ref.offset), size);
|
||||||
return ref.mapped_span;
|
return ref.mapped_span;
|
||||||
|
@ -163,24 +153,6 @@ private:
|
||||||
void ReserveNullBuffer();
|
void ReserveNullBuffer();
|
||||||
vk::Buffer CreateNullBuffer();
|
vk::Buffer CreateNullBuffer();
|
||||||
|
|
||||||
struct UniformRing {
|
|
||||||
static constexpr size_t NUM_FRAMES = 3;
|
|
||||||
std::array<vk::Buffer, NUM_FRAMES> buffers{};
|
|
||||||
std::array<u8*, NUM_FRAMES> mapped{};
|
|
||||||
u64 size = 0;
|
|
||||||
u64 head = 0;
|
|
||||||
u32 align = 256;
|
|
||||||
size_t current_frame = 0;
|
|
||||||
|
|
||||||
void Init(const Device& device, MemoryAllocator& alloc, u64 bytes, u32 alignment);
|
|
||||||
void BeginFrame() {
|
|
||||||
current_frame = (current_frame + 1) % NUM_FRAMES;
|
|
||||||
head = 0;
|
|
||||||
}
|
|
||||||
std::span<u8> Alloc(u32 bytes, u32& out_offset);
|
|
||||||
};
|
|
||||||
UniformRing uniform_ring;
|
|
||||||
|
|
||||||
const Device& device;
|
const Device& device;
|
||||||
MemoryAllocator& memory_allocator;
|
MemoryAllocator& memory_allocator;
|
||||||
Scheduler& scheduler;
|
Scheduler& scheduler;
|
||||||
|
|
|
@ -393,8 +393,16 @@ target_link_libraries(yuzu PRIVATE common core input_common frontend_common netw
|
||||||
target_link_libraries(yuzu PRIVATE Boost::headers glad Qt6::Widgets)
|
target_link_libraries(yuzu PRIVATE Boost::headers glad Qt6::Widgets)
|
||||||
target_link_libraries(yuzu PRIVATE ${PLATFORM_LIBRARIES} Threads::Threads)
|
target_link_libraries(yuzu PRIVATE ${PLATFORM_LIBRARIES} Threads::Threads)
|
||||||
|
|
||||||
|
if (NOT WIN32)
|
||||||
|
target_include_directories(yuzu PRIVATE ${Qt6Gui_PRIVATE_INCLUDE_DIRS})
|
||||||
|
endif()
|
||||||
|
|
||||||
if (UNIX AND NOT APPLE)
|
if (UNIX AND NOT APPLE)
|
||||||
target_link_libraries(yuzu PRIVATE Qt6::DBus)
|
target_link_libraries(yuzu PRIVATE Qt6::DBus)
|
||||||
|
|
||||||
|
if (TARGET Qt6::GuiPrivate)
|
||||||
|
target_link_libraries(yuzu PRIVATE Qt6::GuiPrivate)
|
||||||
|
endif()
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
target_compile_definitions(yuzu PRIVATE
|
target_compile_definitions(yuzu PRIVATE
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue