2
1
Fork 0
mirror of https://github.com/yuzu-emu/yuzu.git synced 2024-07-04 23:31:19 +01:00
yuzu/src/video_core/fence_manager.h

165 lines
5.2 KiB
C++
Raw Normal View History

2020-02-18 00:19:26 +00:00
// Copyright 2020 yuzu Emulator Project
// Licensed under GPLv2 or any later version
// Refer to the license.txt file included.
#pragma once
#include <algorithm>
#include <queue>
2020-02-18 00:19:26 +00:00
#include "common/common_types.h"
#include "core/core.h"
#include "video_core/gpu.h"
#include "video_core/memory_manager.h"
#include "video_core/rasterizer_interface.h"
namespace VideoCommon {
class FenceBase {
public:
FenceBase(u32 payload, bool is_stubbed)
: address{}, payload{payload}, is_semaphore{false}, is_stubbed{is_stubbed} {}
FenceBase(GPUVAddr address, u32 payload, bool is_stubbed)
: address{address}, payload{payload}, is_semaphore{true}, is_stubbed{is_stubbed} {}
2020-02-18 00:19:26 +00:00
2020-04-16 17:29:53 +01:00
GPUVAddr GetAddress() const {
2020-02-18 00:19:26 +00:00
return address;
}
2020-04-16 17:29:53 +01:00
u32 GetPayload() const {
2020-02-18 00:19:26 +00:00
return payload;
}
2020-04-16 17:29:53 +01:00
bool IsSemaphore() const {
return is_semaphore;
}
2020-02-18 00:19:26 +00:00
private:
GPUVAddr address;
u32 payload;
bool is_semaphore;
protected:
bool is_stubbed;
2020-02-18 00:19:26 +00:00
};
2020-04-15 21:36:14 +01:00
template <typename TFence, typename TTextureCache, typename TTBufferCache, typename TQueryCache>
2020-02-18 00:19:26 +00:00
class FenceManager {
public:
void SignalSemaphore(GPUVAddr addr, u32 value) {
2020-02-18 00:19:26 +00:00
TryReleasePendingFences();
2020-04-22 16:14:40 +01:00
const bool should_flush = ShouldFlush();
2020-04-16 17:29:53 +01:00
CommitAsyncFlushes();
TFence new_fence = CreateFence(addr, value, !should_flush);
2020-02-19 14:49:07 +00:00
fences.push(new_fence);
QueueFence(new_fence);
if (should_flush) {
rasterizer.FlushCommands();
}
rasterizer.SyncGuestHost();
}
void SignalSyncPoint(u32 value) {
TryReleasePendingFences();
2020-04-22 16:14:40 +01:00
const bool should_flush = ShouldFlush();
2020-04-16 17:29:53 +01:00
CommitAsyncFlushes();
TFence new_fence = CreateFence(value, !should_flush);
fences.push(new_fence);
QueueFence(new_fence);
if (should_flush) {
rasterizer.FlushCommands();
}
2020-02-18 00:19:26 +00:00
rasterizer.SyncGuestHost();
}
void WaitPendingFences() {
while (!fences.empty()) {
TFence& current_fence = fences.front();
2020-04-16 17:29:53 +01:00
if (ShouldWait()) {
2020-02-18 02:15:43 +00:00
WaitFence(current_fence);
}
2020-04-16 17:29:53 +01:00
PopAsyncFlushes();
if (current_fence->IsSemaphore()) {
gpu_memory.template Write<u32>(current_fence->GetAddress(),
current_fence->GetPayload());
} else {
gpu.IncrementSyncPoint(current_fence->GetPayload());
}
2020-02-18 00:19:26 +00:00
fences.pop();
}
}
protected:
explicit FenceManager(VideoCore::RasterizerInterface& rasterizer_, Tegra::GPU& gpu_,
TTextureCache& texture_cache_, TTBufferCache& buffer_cache_,
TQueryCache& query_cache_)
: rasterizer{rasterizer_}, gpu{gpu_}, gpu_memory{gpu.MemoryManager()},
texture_cache{texture_cache_}, buffer_cache{buffer_cache_}, query_cache{query_cache_} {}
2020-02-18 00:19:26 +00:00
virtual ~FenceManager() = default;
2020-04-16 17:29:53 +01:00
/// Creates a Sync Point Fence Interface, does not create a backend fence if 'is_stubbed' is
/// true
virtual TFence CreateFence(u32 value, bool is_stubbed) = 0;
2020-04-16 17:29:53 +01:00
/// Creates a Semaphore Fence Interface, does not create a backend fence if 'is_stubbed' is true
virtual TFence CreateFence(GPUVAddr addr, u32 value, bool is_stubbed) = 0;
2020-04-16 17:29:53 +01:00
/// Queues a fence into the backend if the fence isn't stubbed.
2020-02-18 00:19:26 +00:00
virtual void QueueFence(TFence& fence) = 0;
2020-04-16 17:29:53 +01:00
/// Notifies that the backend fence has been signaled/reached in host GPU.
virtual bool IsFenceSignaled(TFence& fence) const = 0;
/// Waits until a fence has been signalled by the host GPU.
2020-02-18 00:19:26 +00:00
virtual void WaitFence(TFence& fence) = 0;
VideoCore::RasterizerInterface& rasterizer;
Tegra::GPU& gpu;
Tegra::MemoryManager& gpu_memory;
2020-02-18 00:19:26 +00:00
TTextureCache& texture_cache;
TTBufferCache& buffer_cache;
2020-04-15 21:36:14 +01:00
TQueryCache& query_cache;
2020-02-18 00:19:26 +00:00
private:
void TryReleasePendingFences() {
while (!fences.empty()) {
TFence& current_fence = fences.front();
2020-04-16 17:29:53 +01:00
if (ShouldWait() && !IsFenceSignaled(current_fence)) {
2020-02-18 00:19:26 +00:00
return;
}
2020-04-16 17:29:53 +01:00
PopAsyncFlushes();
if (current_fence->IsSemaphore()) {
gpu_memory.template Write<u32>(current_fence->GetAddress(),
current_fence->GetPayload());
} else {
gpu.IncrementSyncPoint(current_fence->GetPayload());
}
2020-02-18 00:19:26 +00:00
fences.pop();
}
}
2020-04-16 17:29:53 +01:00
bool ShouldWait() const {
return texture_cache.ShouldWaitAsyncFlushes() || buffer_cache.ShouldWaitAsyncFlushes() ||
query_cache.ShouldWaitAsyncFlushes();
}
bool ShouldFlush() const {
return texture_cache.HasUncommittedFlushes() || buffer_cache.HasUncommittedFlushes() ||
query_cache.HasUncommittedFlushes();
}
void PopAsyncFlushes() {
texture_cache.PopAsyncFlushes();
buffer_cache.PopAsyncFlushes();
query_cache.PopAsyncFlushes();
}
void CommitAsyncFlushes() {
texture_cache.CommitAsyncFlushes();
buffer_cache.CommitAsyncFlushes();
query_cache.CommitAsyncFlushes();
}
2020-02-18 00:19:26 +00:00
std::queue<TFence> fences;
};
} // namespace VideoCommon