mirror of https://github.com/stenzek/duckstation
Vulkan: Use Vulkan Memory Allocator
parent
56293e4d8f
commit
9d27f7095f
@ -1,253 +0,0 @@
|
||||
// Copyright 2016 Dolphin Emulator Project
|
||||
// Copyright 2020 DuckStation Emulator Project
|
||||
// Licensed under GPLv2+
|
||||
// Refer to the LICENSE file included.
|
||||
|
||||
#include "staging_buffer.h"
|
||||
#include "../assert.h"
|
||||
#include "context.h"
|
||||
#include "util.h"
|
||||
|
||||
namespace Vulkan {
|
||||
StagingBuffer::StagingBuffer() = default;
|
||||
|
||||
StagingBuffer::StagingBuffer(StagingBuffer&& move)
|
||||
: m_type(move.m_type), m_buffer(move.m_buffer), m_memory(move.m_memory), m_size(move.m_size),
|
||||
m_coherent(move.m_coherent), m_map_pointer(move.m_map_pointer), m_map_offset(move.m_map_offset),
|
||||
m_map_size(move.m_map_size)
|
||||
{
|
||||
move.m_type = Type::Upload;
|
||||
move.m_buffer = VK_NULL_HANDLE;
|
||||
move.m_memory = VK_NULL_HANDLE;
|
||||
move.m_size = 0;
|
||||
move.m_coherent = false;
|
||||
move.m_map_pointer = nullptr;
|
||||
move.m_map_offset = 0;
|
||||
move.m_map_size = 0;
|
||||
}
|
||||
|
||||
StagingBuffer::~StagingBuffer()
|
||||
{
|
||||
if (IsValid())
|
||||
Destroy(true);
|
||||
}
|
||||
|
||||
StagingBuffer& StagingBuffer::operator=(StagingBuffer&& move)
|
||||
{
|
||||
if (IsValid())
|
||||
Destroy(true);
|
||||
|
||||
std::swap(m_type, move.m_type);
|
||||
std::swap(m_buffer, move.m_buffer);
|
||||
std::swap(m_memory, move.m_memory);
|
||||
std::swap(m_size, move.m_size);
|
||||
std::swap(m_coherent, move.m_coherent);
|
||||
std::swap(m_map_pointer, move.m_map_pointer);
|
||||
std::swap(m_map_offset, move.m_map_offset);
|
||||
std::swap(m_map_size, move.m_map_size);
|
||||
return *this;
|
||||
}
|
||||
|
||||
bool StagingBuffer::Map(VkDeviceSize offset, VkDeviceSize size)
|
||||
{
|
||||
m_map_offset = offset;
|
||||
if (size == VK_WHOLE_SIZE)
|
||||
m_map_size = m_size - offset;
|
||||
else
|
||||
m_map_size = size;
|
||||
|
||||
Assert(!m_map_pointer);
|
||||
Assert(m_map_offset + m_map_size <= m_size);
|
||||
|
||||
void* map_pointer;
|
||||
VkResult res = vkMapMemory(g_vulkan_context->GetDevice(), m_memory, m_map_offset, m_map_size, 0, &map_pointer);
|
||||
if (res != VK_SUCCESS)
|
||||
{
|
||||
LOG_VULKAN_ERROR(res, "vkMapMemory failed: ");
|
||||
return false;
|
||||
}
|
||||
|
||||
m_map_pointer = reinterpret_cast<char*>(map_pointer);
|
||||
return true;
|
||||
}
|
||||
|
||||
void StagingBuffer::Unmap()
|
||||
{
|
||||
Assert(m_map_pointer);
|
||||
|
||||
vkUnmapMemory(g_vulkan_context->GetDevice(), m_memory);
|
||||
m_map_pointer = nullptr;
|
||||
m_map_offset = 0;
|
||||
m_map_size = 0;
|
||||
}
|
||||
|
||||
void StagingBuffer::FlushCPUCache(VkDeviceSize offset, VkDeviceSize size)
|
||||
{
|
||||
Assert(offset >= m_map_offset);
|
||||
if (m_coherent || !IsMapped())
|
||||
return;
|
||||
|
||||
VkMappedMemoryRange range = {VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, nullptr, m_memory, offset - m_map_offset, size};
|
||||
vkFlushMappedMemoryRanges(g_vulkan_context->GetDevice(), 1, &range);
|
||||
}
|
||||
|
||||
void StagingBuffer::InvalidateGPUCache(VkCommandBuffer command_buffer, VkAccessFlagBits dest_access_flags,
|
||||
VkPipelineStageFlagBits dest_pipeline_stage, VkDeviceSize offset,
|
||||
VkDeviceSize size)
|
||||
{
|
||||
if (m_coherent)
|
||||
return;
|
||||
|
||||
Assert((offset + size) <= m_size || (offset < m_size && size == VK_WHOLE_SIZE));
|
||||
Util::BufferMemoryBarrier(command_buffer, m_buffer, VK_ACCESS_HOST_WRITE_BIT, dest_access_flags, offset, size,
|
||||
VK_PIPELINE_STAGE_HOST_BIT, dest_pipeline_stage);
|
||||
}
|
||||
|
||||
void StagingBuffer::PrepareForGPUWrite(VkCommandBuffer command_buffer, VkAccessFlagBits dst_access_flags,
|
||||
VkPipelineStageFlagBits dst_pipeline_stage, VkDeviceSize offset,
|
||||
VkDeviceSize size)
|
||||
{
|
||||
if (m_coherent)
|
||||
return;
|
||||
|
||||
Assert((offset + size) <= m_size || (offset < m_size && size == VK_WHOLE_SIZE));
|
||||
Util::BufferMemoryBarrier(command_buffer, m_buffer, 0, dst_access_flags, offset, size,
|
||||
VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, dst_pipeline_stage);
|
||||
}
|
||||
|
||||
void StagingBuffer::FlushGPUCache(VkCommandBuffer command_buffer, VkAccessFlagBits src_access_flags,
|
||||
VkPipelineStageFlagBits src_pipeline_stage, VkDeviceSize offset, VkDeviceSize size)
|
||||
{
|
||||
if (m_coherent)
|
||||
return;
|
||||
|
||||
Assert((offset + size) <= m_size || (offset < m_size && size == VK_WHOLE_SIZE));
|
||||
Util::BufferMemoryBarrier(command_buffer, m_buffer, src_access_flags, VK_ACCESS_HOST_READ_BIT, offset, size,
|
||||
src_pipeline_stage, VK_PIPELINE_STAGE_HOST_BIT);
|
||||
}
|
||||
|
||||
void StagingBuffer::InvalidateCPUCache(VkDeviceSize offset, VkDeviceSize size)
|
||||
{
|
||||
Assert(offset >= m_map_offset);
|
||||
if (m_coherent || !IsMapped())
|
||||
return;
|
||||
|
||||
VkMappedMemoryRange range = {VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, nullptr, m_memory, offset - m_map_offset, size};
|
||||
vkInvalidateMappedMemoryRanges(g_vulkan_context->GetDevice(), 1, &range);
|
||||
}
|
||||
|
||||
void StagingBuffer::Read(VkDeviceSize offset, void* data, size_t size, bool invalidate_caches)
|
||||
{
|
||||
Assert((offset + size) <= m_size);
|
||||
Assert(offset >= m_map_offset && size <= (m_map_size + (offset - m_map_offset)));
|
||||
if (invalidate_caches)
|
||||
InvalidateCPUCache(offset, size);
|
||||
|
||||
memcpy(data, m_map_pointer + (offset - m_map_offset), size);
|
||||
}
|
||||
|
||||
void StagingBuffer::Write(VkDeviceSize offset, const void* data, size_t size, bool invalidate_caches)
|
||||
{
|
||||
Assert((offset + size) <= m_size);
|
||||
Assert(offset >= m_map_offset && size <= (m_map_size + (offset - m_map_offset)));
|
||||
|
||||
memcpy(m_map_pointer + (offset - m_map_offset), data, size);
|
||||
if (invalidate_caches)
|
||||
FlushCPUCache(offset, size);
|
||||
}
|
||||
|
||||
bool StagingBuffer::AllocateBuffer(Type type, VkDeviceSize size, VkBufferUsageFlags usage, VkBuffer* out_buffer,
|
||||
VkDeviceMemory* out_memory, bool* out_coherent)
|
||||
{
|
||||
VkBufferCreateInfo buffer_create_info = {
|
||||
VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, // VkStructureType sType
|
||||
nullptr, // const void* pNext
|
||||
0, // VkBufferCreateFlags flags
|
||||
size, // VkDeviceSize size
|
||||
usage, // VkBufferUsageFlags usage
|
||||
VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode
|
||||
0, // uint32_t queueFamilyIndexCount
|
||||
nullptr // const uint32_t* pQueueFamilyIndices
|
||||
};
|
||||
VkResult res = vkCreateBuffer(g_vulkan_context->GetDevice(), &buffer_create_info, nullptr, out_buffer);
|
||||
if (res != VK_SUCCESS)
|
||||
{
|
||||
LOG_VULKAN_ERROR(res, "vkCreateBuffer failed: ");
|
||||
return false;
|
||||
}
|
||||
|
||||
VkMemoryRequirements requirements;
|
||||
vkGetBufferMemoryRequirements(g_vulkan_context->GetDevice(), *out_buffer, &requirements);
|
||||
|
||||
u32 type_index;
|
||||
if (type == Type::Upload)
|
||||
type_index = g_vulkan_context->GetUploadMemoryType(requirements.memoryTypeBits, out_coherent);
|
||||
else
|
||||
type_index = g_vulkan_context->GetReadbackMemoryType(requirements.memoryTypeBits, out_coherent);
|
||||
|
||||
VkMemoryAllocateInfo memory_allocate_info = {
|
||||
VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, // VkStructureType sType
|
||||
nullptr, // const void* pNext
|
||||
requirements.size, // VkDeviceSize allocationSize
|
||||
type_index // uint32_t memoryTypeIndex
|
||||
};
|
||||
res = vkAllocateMemory(g_vulkan_context->GetDevice(), &memory_allocate_info, nullptr, out_memory);
|
||||
if (res != VK_SUCCESS)
|
||||
{
|
||||
LOG_VULKAN_ERROR(res, "vkAllocateMemory failed: ");
|
||||
vkDestroyBuffer(g_vulkan_context->GetDevice(), *out_buffer, nullptr);
|
||||
return false;
|
||||
}
|
||||
|
||||
res = vkBindBufferMemory(g_vulkan_context->GetDevice(), *out_buffer, *out_memory, 0);
|
||||
if (res != VK_SUCCESS)
|
||||
{
|
||||
LOG_VULKAN_ERROR(res, "vkBindBufferMemory failed: ");
|
||||
vkDestroyBuffer(g_vulkan_context->GetDevice(), *out_buffer, nullptr);
|
||||
vkFreeMemory(g_vulkan_context->GetDevice(), *out_memory, nullptr);
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool StagingBuffer::Create(Type type, VkDeviceSize size, VkBufferUsageFlags usage)
|
||||
{
|
||||
if (!AllocateBuffer(type, size, usage, &m_buffer, &m_memory, &m_coherent))
|
||||
return false;
|
||||
|
||||
m_type = type;
|
||||
m_size = size;
|
||||
return true;
|
||||
}
|
||||
|
||||
void StagingBuffer::Destroy(bool defer /* = true */)
|
||||
{
|
||||
if (!IsValid())
|
||||
return;
|
||||
|
||||
// Unmap before destroying
|
||||
if (m_map_pointer)
|
||||
Unmap();
|
||||
|
||||
if (defer)
|
||||
g_vulkan_context->DeferBufferDestruction(m_buffer);
|
||||
else
|
||||
vkDestroyBuffer(g_vulkan_context->GetDevice(), m_buffer, nullptr);
|
||||
|
||||
if (defer)
|
||||
g_vulkan_context->DeferDeviceMemoryDestruction(m_memory);
|
||||
else
|
||||
vkFreeMemory(g_vulkan_context->GetDevice(), m_memory, nullptr);
|
||||
|
||||
m_type = Type::Upload;
|
||||
m_buffer = VK_NULL_HANDLE;
|
||||
m_memory = VK_NULL_HANDLE;
|
||||
m_size = 0;
|
||||
m_coherent = false;
|
||||
m_map_pointer = nullptr;
|
||||
m_map_offset = 0;
|
||||
m_map_size = 0;
|
||||
}
|
||||
|
||||
} // namespace Vulkan
|
||||
@ -1,91 +0,0 @@
|
||||
// Copyright 2016 Dolphin Emulator Project
|
||||
// Copyright 2020 DuckStation Emulator Project
|
||||
// Licensed under GPLv2+
|
||||
// Refer to the LICENSE file included.
|
||||
|
||||
#pragma once
|
||||
#include "../types.h"
|
||||
#include "loader.h"
|
||||
#include <memory>
|
||||
|
||||
namespace Vulkan {
|
||||
|
||||
class StagingBuffer
|
||||
{
|
||||
public:
|
||||
enum class Type
|
||||
{
|
||||
Upload,
|
||||
Readback,
|
||||
Mutable
|
||||
};
|
||||
|
||||
StagingBuffer();
|
||||
StagingBuffer(StagingBuffer&& move);
|
||||
StagingBuffer(const StagingBuffer&) = delete;
|
||||
virtual ~StagingBuffer();
|
||||
|
||||
StagingBuffer& operator=(StagingBuffer&& move);
|
||||
StagingBuffer& operator=(const StagingBuffer&) = delete;
|
||||
|
||||
ALWAYS_INLINE Type GetType() const { return m_type; }
|
||||
ALWAYS_INLINE VkDeviceSize GetSize() const { return m_size; }
|
||||
ALWAYS_INLINE VkBuffer GetBuffer() const { return m_buffer; }
|
||||
ALWAYS_INLINE bool IsMapped() const { return m_map_pointer != nullptr; }
|
||||
ALWAYS_INLINE const char* GetMapPointer() const { return m_map_pointer; }
|
||||
ALWAYS_INLINE char* GetMapPointer() { return m_map_pointer; }
|
||||
ALWAYS_INLINE VkDeviceSize GetMapOffset() const { return m_map_offset; }
|
||||
ALWAYS_INLINE VkDeviceSize GetMapSize() const { return m_map_size; }
|
||||
ALWAYS_INLINE bool IsValid() const { return (m_buffer != VK_NULL_HANDLE); }
|
||||
ALWAYS_INLINE bool IsCoherent() const { return m_coherent; }
|
||||
|
||||
bool Map(VkDeviceSize offset = 0, VkDeviceSize size = VK_WHOLE_SIZE);
|
||||
void Unmap();
|
||||
|
||||
// Upload part 1: Prepare from device read from the CPU side
|
||||
void FlushCPUCache(VkDeviceSize offset = 0, VkDeviceSize size = VK_WHOLE_SIZE);
|
||||
|
||||
// Upload part 2: Prepare for device read from the GPU side
|
||||
// Implicit when submitting the command buffer, so rarely needed.
|
||||
void InvalidateGPUCache(VkCommandBuffer command_buffer, VkAccessFlagBits dst_access_flags,
|
||||
VkPipelineStageFlagBits dst_pipeline_stage, VkDeviceSize offset = 0,
|
||||
VkDeviceSize size = VK_WHOLE_SIZE);
|
||||
|
||||
// Readback part 0: Prepare for GPU usage (if necessary)
|
||||
void PrepareForGPUWrite(VkCommandBuffer command_buffer, VkAccessFlagBits dst_access_flags,
|
||||
VkPipelineStageFlagBits dst_pipeline_stage, VkDeviceSize offset = 0,
|
||||
VkDeviceSize size = VK_WHOLE_SIZE);
|
||||
|
||||
// Readback part 1: Prepare for host readback from the GPU side
|
||||
void FlushGPUCache(VkCommandBuffer command_buffer, VkAccessFlagBits src_access_flags,
|
||||
VkPipelineStageFlagBits src_pipeline_stage, VkDeviceSize offset = 0,
|
||||
VkDeviceSize size = VK_WHOLE_SIZE);
|
||||
|
||||
// Readback part 2: Prepare for host readback from the CPU side
|
||||
void InvalidateCPUCache(VkDeviceSize offset = 0, VkDeviceSize size = VK_WHOLE_SIZE);
|
||||
|
||||
// offset is from the start of the buffer, not from the map offset
|
||||
void Read(VkDeviceSize offset, void* data, size_t size, bool invalidate_caches = true);
|
||||
void Write(VkDeviceSize offset, const void* data, size_t size, bool invalidate_caches = true);
|
||||
|
||||
// Creates the optimal format of image copy.
|
||||
bool Create(Type type, VkDeviceSize size, VkBufferUsageFlags usage);
|
||||
|
||||
void Destroy(bool defer = true);
|
||||
|
||||
// Allocates the resources needed to create a staging buffer.
|
||||
static bool AllocateBuffer(Type type, VkDeviceSize size, VkBufferUsageFlags usage, VkBuffer* out_buffer,
|
||||
VkDeviceMemory* out_memory, bool* out_coherent);
|
||||
|
||||
protected:
|
||||
Type m_type = Type::Upload;
|
||||
VkBuffer m_buffer = VK_NULL_HANDLE;
|
||||
VkDeviceMemory m_memory = VK_NULL_HANDLE;
|
||||
VkDeviceSize m_size = 0;
|
||||
bool m_coherent = false;
|
||||
|
||||
char* m_map_pointer = nullptr;
|
||||
VkDeviceSize m_map_offset = 0;
|
||||
VkDeviceSize m_map_size = 0;
|
||||
};
|
||||
} // namespace Vulkan
|
||||
@ -1,291 +0,0 @@
|
||||
// Copyright 2016 Dolphin Emulator Project
|
||||
// Copyright 2020 DuckStation Emulator Project
|
||||
// Licensed under GPLv2+
|
||||
// Refer to the LICENSE file included.
|
||||
|
||||
#include "staging_texture.h"
|
||||
#include "../assert.h"
|
||||
#include "context.h"
|
||||
#include "util.h"
|
||||
|
||||
namespace Vulkan {
|
||||
|
||||
StagingTexture::StagingTexture() = default;
|
||||
|
||||
StagingTexture::StagingTexture(StagingTexture&& move)
|
||||
: m_staging_buffer(std::move(move.m_staging_buffer)), m_flush_fence_counter(move.m_flush_fence_counter),
|
||||
m_width(move.m_width), m_height(move.m_height), m_texel_size(move.m_texel_size), m_map_stride(move.m_map_stride)
|
||||
{
|
||||
move.m_flush_fence_counter = 0;
|
||||
move.m_width = 0;
|
||||
move.m_height = 0;
|
||||
move.m_texel_size = 0;
|
||||
move.m_map_stride = 0;
|
||||
}
|
||||
|
||||
StagingTexture& StagingTexture::operator=(StagingTexture&& move)
|
||||
{
|
||||
if (IsValid())
|
||||
Destroy(true);
|
||||
|
||||
std::swap(m_staging_buffer, move.m_staging_buffer);
|
||||
std::swap(m_flush_fence_counter, move.m_flush_fence_counter);
|
||||
std::swap(m_width, move.m_width);
|
||||
std::swap(m_height, move.m_height);
|
||||
std::swap(m_texel_size, move.m_texel_size);
|
||||
std::swap(m_map_stride, move.m_map_stride);
|
||||
return *this;
|
||||
}
|
||||
|
||||
StagingTexture::~StagingTexture()
|
||||
{
|
||||
if (IsValid())
|
||||
Destroy(true);
|
||||
}
|
||||
|
||||
bool StagingTexture::Create(StagingBuffer::Type type, VkFormat format, u32 width, u32 height)
|
||||
{
|
||||
const u32 texel_size = Util::GetTexelSize(format);
|
||||
const u32 map_stride = texel_size * width;
|
||||
const u32 buffer_size = map_stride * height;
|
||||
|
||||
VkBufferUsageFlags usage_flags;
|
||||
switch (type)
|
||||
{
|
||||
case StagingBuffer::Type::Readback:
|
||||
usage_flags = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
|
||||
break;
|
||||
case StagingBuffer::Type::Upload:
|
||||
usage_flags = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
|
||||
break;
|
||||
case StagingBuffer::Type::Mutable:
|
||||
default:
|
||||
usage_flags = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
|
||||
break;
|
||||
}
|
||||
|
||||
StagingBuffer new_buffer;
|
||||
if (!new_buffer.Create(type, buffer_size, usage_flags) || !new_buffer.Map())
|
||||
return false;
|
||||
|
||||
if (IsValid())
|
||||
Destroy(true);
|
||||
|
||||
m_staging_buffer = std::move(new_buffer);
|
||||
m_width = width;
|
||||
m_height = height;
|
||||
m_texel_size = texel_size;
|
||||
m_map_stride = map_stride;
|
||||
return true;
|
||||
}
|
||||
|
||||
void StagingTexture::Destroy(bool defer /* = true */)
|
||||
{
|
||||
if (!IsValid())
|
||||
return;
|
||||
|
||||
m_staging_buffer.Destroy(defer);
|
||||
m_flush_fence_counter = 0;
|
||||
m_width = 0;
|
||||
m_height = 0;
|
||||
m_texel_size = 0;
|
||||
m_map_stride = 0;
|
||||
}
|
||||
|
||||
void StagingTexture::CopyFromTexture(VkCommandBuffer command_buffer, Texture& src_texture, u32 src_x, u32 src_y,
|
||||
u32 src_layer, u32 src_level, u32 dst_x, u32 dst_y, u32 width, u32 height)
|
||||
{
|
||||
Assert(m_staging_buffer.GetType() == StagingBuffer::Type::Readback ||
|
||||
m_staging_buffer.GetType() == StagingBuffer::Type::Mutable);
|
||||
Assert((src_x + width) <= src_texture.GetWidth() && (src_y + height) <= src_texture.GetHeight());
|
||||
Assert((dst_x + width) <= m_width && (dst_y + height) <= m_height);
|
||||
|
||||
const Vulkan::Util::DebugScope debugScope(command_buffer,
|
||||
"StagingTexture::CopyFromTexture: {%u,%u} Lyr:%u Lvl:%u {%u,%u} %ux%u",
|
||||
src_x, src_y, src_layer, src_level, dst_x, dst_y, width, height);
|
||||
|
||||
VkImageLayout old_layout = src_texture.GetLayout();
|
||||
src_texture.TransitionToLayout(command_buffer, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
|
||||
|
||||
// Issue the image->buffer copy, but delay it for now.
|
||||
VkBufferImageCopy image_copy = {};
|
||||
const VkImageAspectFlags aspect =
|
||||
Util ::IsDepthFormat(src_texture.GetFormat()) ? VK_IMAGE_ASPECT_DEPTH_BIT : VK_IMAGE_ASPECT_COLOR_BIT;
|
||||
image_copy.bufferOffset = static_cast<VkDeviceSize>(dst_y * m_map_stride + dst_x * m_texel_size);
|
||||
image_copy.bufferRowLength = m_width;
|
||||
image_copy.bufferImageHeight = 0;
|
||||
image_copy.imageSubresource = {aspect, src_level, src_layer, 1};
|
||||
image_copy.imageOffset = {static_cast<int32_t>(src_x), static_cast<int32_t>(src_y), 0};
|
||||
image_copy.imageExtent = {width, height, 1u};
|
||||
vkCmdCopyImageToBuffer(command_buffer, src_texture.GetImage(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
|
||||
m_staging_buffer.GetBuffer(), 1, &image_copy);
|
||||
|
||||
// Restore old source texture layout.
|
||||
src_texture.TransitionToLayout(command_buffer, old_layout);
|
||||
}
|
||||
|
||||
void StagingTexture::CopyFromTexture(Texture& src_texture, u32 src_x, u32 src_y, u32 src_layer, u32 src_level,
|
||||
u32 dst_x, u32 dst_y, u32 width, u32 height)
|
||||
{
|
||||
const Vulkan::Util::DebugScope debugScope(g_vulkan_context->GetCurrentCommandBuffer(),
|
||||
"StagingTexture::CopyFromTexture: {%u,%u} Lyr:%u Lvl:%u {%u,%u} %ux%u",
|
||||
src_x, src_y, src_layer, src_level, dst_x, dst_y, width, height);
|
||||
CopyFromTexture(g_vulkan_context->GetCurrentCommandBuffer(), src_texture, src_x, src_y, src_layer, src_level, dst_x,
|
||||
dst_y, width, height);
|
||||
|
||||
m_needs_flush = true;
|
||||
m_flush_fence_counter = g_vulkan_context->GetCurrentFenceCounter();
|
||||
}
|
||||
|
||||
void StagingTexture::CopyToTexture(VkCommandBuffer command_buffer, u32 src_x, u32 src_y, Texture& dst_texture,
|
||||
u32 dst_x, u32 dst_y, u32 dst_layer, u32 dst_level, u32 width, u32 height)
|
||||
{
|
||||
Assert(m_staging_buffer.GetType() == StagingBuffer::Type::Upload ||
|
||||
m_staging_buffer.GetType() == StagingBuffer::Type::Mutable);
|
||||
Assert((dst_x + width) <= dst_texture.GetWidth() && (dst_y + height) <= dst_texture.GetHeight());
|
||||
Assert((src_x + width) <= m_width && (src_y + height) <= m_height);
|
||||
|
||||
// Flush caches before copying.
|
||||
m_staging_buffer.FlushCPUCache();
|
||||
|
||||
VkImageLayout old_layout = dst_texture.GetLayout();
|
||||
dst_texture.TransitionToLayout(command_buffer, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
|
||||
|
||||
// Issue the image->buffer copy, but delay it for now.
|
||||
VkBufferImageCopy image_copy = {};
|
||||
image_copy.bufferOffset = static_cast<VkDeviceSize>(src_y * m_map_stride + src_x * m_texel_size);
|
||||
image_copy.bufferRowLength = m_width;
|
||||
image_copy.bufferImageHeight = 0;
|
||||
image_copy.imageSubresource = {VK_IMAGE_ASPECT_COLOR_BIT, dst_level, dst_layer, 1};
|
||||
image_copy.imageOffset = {static_cast<int32_t>(dst_x), static_cast<int32_t>(dst_y), 0};
|
||||
image_copy.imageExtent = {width, height, 1u};
|
||||
vkCmdCopyBufferToImage(command_buffer, m_staging_buffer.GetBuffer(), dst_texture.GetImage(),
|
||||
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &image_copy);
|
||||
|
||||
// Restore old source texture layout.
|
||||
dst_texture.TransitionToLayout(command_buffer, old_layout);
|
||||
}
|
||||
|
||||
void StagingTexture::CopyToTexture(u32 src_x, u32 src_y, Texture& dst_texture, u32 dst_x, u32 dst_y, u32 dst_layer,
|
||||
u32 dst_level, u32 width, u32 height)
|
||||
{
|
||||
const Vulkan::Util::DebugScope debugScope(g_vulkan_context->GetCurrentCommandBuffer(),
|
||||
"StagingTexture::CopyToTexture: {%u,%u} | {%u,%u} Lyr:%u Lvl:%u %ux%u",
|
||||
src_x, src_y, dst_x, dst_y, dst_layer, dst_level, width, height);
|
||||
CopyToTexture(g_vulkan_context->GetCurrentCommandBuffer(), src_x, src_y, dst_texture, dst_x, dst_y, dst_layer,
|
||||
dst_level, width, height);
|
||||
|
||||
m_needs_flush = true;
|
||||
m_flush_fence_counter = g_vulkan_context->GetCurrentFenceCounter();
|
||||
}
|
||||
|
||||
void StagingTexture::Flush()
|
||||
{
|
||||
if (!m_needs_flush)
|
||||
return;
|
||||
|
||||
// Is this copy in the current command buffer?
|
||||
if (g_vulkan_context->GetCurrentFenceCounter() == m_flush_fence_counter)
|
||||
{
|
||||
// Execute the command buffer and wait for it to finish.
|
||||
g_vulkan_context->ExecuteCommandBuffer(true);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Wait for the GPU to finish with it.
|
||||
g_vulkan_context->WaitForFenceCounter(m_flush_fence_counter);
|
||||
}
|
||||
|
||||
// For readback textures, invalidate the CPU cache as there is new data there.
|
||||
if (m_staging_buffer.GetType() == StagingBuffer::Type::Readback ||
|
||||
m_staging_buffer.GetType() == StagingBuffer::Type::Mutable)
|
||||
{
|
||||
m_staging_buffer.InvalidateCPUCache();
|
||||
}
|
||||
|
||||
m_needs_flush = false;
|
||||
}
|
||||
|
||||
void StagingTexture::ReadTexels(u32 src_x, u32 src_y, u32 width, u32 height, void* out_ptr, u32 out_stride)
|
||||
{
|
||||
Assert(m_staging_buffer.GetType() != StagingBuffer::Type::Upload);
|
||||
Assert((src_x + width) <= m_width && (src_y + height) <= m_height);
|
||||
PrepareForAccess();
|
||||
|
||||
// Offset pointer to point to start of region being copied out.
|
||||
const char* current_ptr = m_staging_buffer.GetMapPointer();
|
||||
current_ptr += src_y * m_map_stride;
|
||||
current_ptr += src_x * m_texel_size;
|
||||
|
||||
// Optimal path: same dimensions, same stride.
|
||||
if (src_x == 0 && width == m_width && m_map_stride == out_stride)
|
||||
{
|
||||
std::memcpy(out_ptr, current_ptr, m_map_stride * height);
|
||||
return;
|
||||
}
|
||||
|
||||
size_t copy_size = std::min<u32>(width * m_texel_size, m_map_stride);
|
||||
char* dst_ptr = reinterpret_cast<char*>(out_ptr);
|
||||
for (u32 row = 0; row < height; row++)
|
||||
{
|
||||
std::memcpy(dst_ptr, current_ptr, copy_size);
|
||||
current_ptr += m_map_stride;
|
||||
dst_ptr += out_stride;
|
||||
}
|
||||
}
|
||||
|
||||
void StagingTexture::ReadTexel(u32 x, u32 y, void* out_ptr)
|
||||
{
|
||||
Assert(m_staging_buffer.GetType() != StagingBuffer::Type::Upload);
|
||||
Assert(x < m_width && y < m_height);
|
||||
PrepareForAccess();
|
||||
|
||||
const char* src_ptr = GetMappedPointer() + y * GetMappedStride() + x * m_texel_size;
|
||||
std::memcpy(out_ptr, src_ptr, m_texel_size);
|
||||
}
|
||||
|
||||
void StagingTexture::WriteTexels(u32 dst_x, u32 dst_y, u32 width, u32 height, const void* in_ptr, u32 in_stride)
|
||||
{
|
||||
Assert(m_staging_buffer.GetType() != StagingBuffer::Type::Readback);
|
||||
Assert((dst_x + width) <= m_width && (dst_y + height) <= m_height);
|
||||
PrepareForAccess();
|
||||
|
||||
// Offset pointer to point to start of region being copied to.
|
||||
char* current_ptr = GetMappedPointer();
|
||||
current_ptr += dst_y * m_map_stride;
|
||||
current_ptr += dst_x * m_texel_size;
|
||||
|
||||
// Optimal path: same dimensions, same stride.
|
||||
if (dst_x == 0 && width == m_width && m_map_stride == in_stride)
|
||||
{
|
||||
std::memcpy(current_ptr, in_ptr, m_map_stride * height);
|
||||
return;
|
||||
}
|
||||
|
||||
size_t copy_size = std::min<u32>(width * m_texel_size, m_map_stride);
|
||||
const char* src_ptr = reinterpret_cast<const char*>(in_ptr);
|
||||
for (u32 row = 0; row < height; row++)
|
||||
{
|
||||
std::memcpy(current_ptr, src_ptr, copy_size);
|
||||
current_ptr += m_map_stride;
|
||||
src_ptr += in_stride;
|
||||
}
|
||||
}
|
||||
|
||||
void StagingTexture::WriteTexel(u32 x, u32 y, const void* in_ptr)
|
||||
{
|
||||
Assert(x < m_width && y < m_height);
|
||||
PrepareForAccess();
|
||||
|
||||
char* dest_ptr = GetMappedPointer() + y * m_map_stride + x * m_texel_size;
|
||||
std::memcpy(dest_ptr, in_ptr, m_texel_size);
|
||||
}
|
||||
|
||||
void StagingTexture::PrepareForAccess()
|
||||
{
|
||||
Assert(IsMapped());
|
||||
if (m_needs_flush)
|
||||
Flush();
|
||||
}
|
||||
|
||||
} // namespace Vulkan
|
||||
@ -1,77 +0,0 @@
|
||||
// Copyright 2016 Dolphin Emulator Project
|
||||
// Copyright 2020 DuckStation Emulator Project
|
||||
// Licensed under GPLv2+
|
||||
// Refer to the LICENSE file included.
|
||||
|
||||
#pragma once
|
||||
#include "staging_buffer.h"
|
||||
#include "texture.h"
|
||||
|
||||
namespace Vulkan {
|
||||
|
||||
class StagingTexture final
|
||||
{
|
||||
public:
|
||||
StagingTexture();
|
||||
StagingTexture(StagingTexture&& move);
|
||||
StagingTexture(const StagingTexture&) = delete;
|
||||
~StagingTexture();
|
||||
|
||||
StagingTexture& operator=(StagingTexture&& move);
|
||||
StagingTexture& operator=(const StagingTexture&) = delete;
|
||||
|
||||
ALWAYS_INLINE bool IsValid() const { return m_staging_buffer.IsValid(); }
|
||||
ALWAYS_INLINE bool IsMapped() const { return m_staging_buffer.IsMapped(); }
|
||||
ALWAYS_INLINE const char* GetMappedPointer() const { return m_staging_buffer.GetMapPointer(); }
|
||||
ALWAYS_INLINE char* GetMappedPointer() { return m_staging_buffer.GetMapPointer(); }
|
||||
ALWAYS_INLINE u32 GetMappedStride() const { return m_map_stride; }
|
||||
ALWAYS_INLINE u32 GetWidth() const { return m_width; }
|
||||
ALWAYS_INLINE u32 GetHeight() const { return m_height; }
|
||||
|
||||
bool Create(StagingBuffer::Type type, VkFormat format, u32 width, u32 height);
|
||||
void Destroy(bool defer = true);
|
||||
|
||||
// Copies from the GPU texture object to the staging texture, which can be mapped/read by the CPU.
|
||||
// Both src_rect and dst_rect must be with within the bounds of the the specified textures.
|
||||
void CopyFromTexture(VkCommandBuffer command_buffer, Texture& src_texture, u32 src_x, u32 src_y, u32 src_layer,
|
||||
u32 src_level, u32 dst_x, u32 dst_y, u32 width, u32 height);
|
||||
void CopyFromTexture(Texture& src_texture, u32 src_x, u32 src_y, u32 src_layer, u32 src_level, u32 dst_x, u32 dst_y,
|
||||
u32 width, u32 height);
|
||||
|
||||
// Wrapper for copying a whole layer of a texture to a readback texture.
|
||||
// Assumes that the level of src texture and this texture have the same dimensions.
|
||||
void CopyToTexture(VkCommandBuffer command_buffer, u32 src_x, u32 src_y, Texture& dst_texture, u32 dst_x, u32 dst_y,
|
||||
u32 dst_layer, u32 dst_level, u32 width, u32 height);
|
||||
void CopyToTexture(u32 src_x, u32 src_y, Texture& dst_texture, u32 dst_x, u32 dst_y, u32 dst_layer, u32 dst_level,
|
||||
u32 width, u32 height);
|
||||
|
||||
// Flushes pending writes from the CPU to the GPU, and reads from the GPU to the CPU.
|
||||
// This may cause a command buffer flush depending on if one has occurred between the last
|
||||
// call to CopyFromTexture()/CopyToTexture() and the Flush() call.
|
||||
void Flush();
|
||||
|
||||
// Reads the specified rectangle from the staging texture to out_ptr, with the specified stride
|
||||
// (length in bytes of each row). CopyFromTexture must be called first. The contents of any
|
||||
// texels outside of the rectangle used for CopyFromTexture is undefined.
|
||||
void ReadTexels(u32 src_x, u32 src_y, u32 width, u32 height, void* out_ptr, u32 out_stride);
|
||||
void ReadTexel(u32 x, u32 y, void* out_ptr);
|
||||
|
||||
// Copies the texels from in_ptr to the staging texture, which can be read by the GPU, with the
|
||||
// specified stride (length in bytes of each row). After updating the staging texture with all
|
||||
// changes, call CopyToTexture() to update the GPU copy.
|
||||
void WriteTexels(u32 dst_x, u32 dst_y, u32 width, u32 height, const void* in_ptr, u32 in_stride);
|
||||
void WriteTexel(u32 x, u32 y, const void* in_ptr);
|
||||
|
||||
private:
|
||||
void PrepareForAccess();
|
||||
|
||||
StagingBuffer m_staging_buffer;
|
||||
u64 m_flush_fence_counter = 0;
|
||||
u32 m_width = 0;
|
||||
u32 m_height = 0;
|
||||
u32 m_texel_size = 0;
|
||||
u32 m_map_stride = 0;
|
||||
bool m_needs_flush = false;
|
||||
};
|
||||
|
||||
} // namespace Vulkan
|
||||
Loading…
Reference in New Issue