mirror of
https://git.suyu.dev/suyu/suyu.git
synced 2024-11-15 22:54:00 +00:00
vk_update_descriptor: Upload descriptor sets data directly
Instead of copying to a temporary payload before sending the update task to the worker thread, insert elements to the payload directly.
This commit is contained in:
parent
bfa6193eb9
commit
7d763f060e
3 changed files with 30 additions and 42 deletions
|
@ -1154,7 +1154,7 @@ void RasterizerVulkan::SetupTexture(const Tegra::Texture::FullTextureInfo& textu
|
|||
const auto sampler = sampler_cache.GetSampler(texture.tsc);
|
||||
update_descriptor_queue.AddSampledImage(sampler, image_view);
|
||||
|
||||
const auto image_layout = update_descriptor_queue.GetLastImageLayout();
|
||||
VkImageLayout* const image_layout = update_descriptor_queue.LastImageLayout();
|
||||
*image_layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
|
||||
sampled_views.push_back(ImageView{std::move(view), image_layout});
|
||||
}
|
||||
|
@ -1180,7 +1180,7 @@ void RasterizerVulkan::SetupImage(const Tegra::Texture::TICEntry& tic, const Ima
|
|||
view->GetImageView(tic.x_source, tic.y_source, tic.z_source, tic.w_source);
|
||||
update_descriptor_queue.AddImage(image_view);
|
||||
|
||||
const auto image_layout = update_descriptor_queue.GetLastImageLayout();
|
||||
VkImageLayout* const image_layout = update_descriptor_queue.LastImageLayout();
|
||||
*image_layout = VK_IMAGE_LAYOUT_GENERAL;
|
||||
image_views.push_back(ImageView{std::move(view), image_layout});
|
||||
}
|
||||
|
|
|
@ -24,35 +24,25 @@ void VKUpdateDescriptorQueue::TickFrame() {
|
|||
}
|
||||
|
||||
void VKUpdateDescriptorQueue::Acquire() {
|
||||
entries.clear();
|
||||
}
|
||||
// Minimum number of entries required.
|
||||
// This is the maximum number of entries a single draw call migth use.
|
||||
static constexpr std::size_t MIN_ENTRIES = 0x400;
|
||||
|
||||
void VKUpdateDescriptorQueue::Send(VkDescriptorUpdateTemplateKHR update_template,
|
||||
VkDescriptorSet set) {
|
||||
if (payload.size() + entries.size() >= payload.max_size()) {
|
||||
if (payload.size() + MIN_ENTRIES >= payload.max_size()) {
|
||||
LOG_WARNING(Render_Vulkan, "Payload overflow, waiting for worker thread");
|
||||
scheduler.WaitWorker();
|
||||
payload.clear();
|
||||
}
|
||||
upload_start = &*payload.end();
|
||||
}
|
||||
|
||||
// TODO(Rodrigo): Rework to write the payload directly
|
||||
const auto payload_start = payload.data() + payload.size();
|
||||
for (const auto& entry : entries) {
|
||||
if (const auto image = std::get_if<VkDescriptorImageInfo>(&entry)) {
|
||||
payload.push_back(*image);
|
||||
} else if (const auto buffer = std::get_if<VkDescriptorBufferInfo>(&entry)) {
|
||||
payload.push_back(*buffer);
|
||||
} else if (const auto texel = std::get_if<VkBufferView>(&entry)) {
|
||||
payload.push_back(*texel);
|
||||
} else {
|
||||
UNREACHABLE();
|
||||
}
|
||||
}
|
||||
|
||||
scheduler.Record(
|
||||
[payload_start, set, update_template, logical = &device.GetLogical()](vk::CommandBuffer) {
|
||||
logical->UpdateDescriptorSet(set, update_template, payload_start);
|
||||
});
|
||||
void VKUpdateDescriptorQueue::Send(VkDescriptorUpdateTemplateKHR update_template,
|
||||
VkDescriptorSet set) {
|
||||
const void* const data = upload_start;
|
||||
const vk::Device* const logical = &device.GetLogical();
|
||||
scheduler.Record([data, logical, set, update_template](vk::CommandBuffer) {
|
||||
logical->UpdateDescriptorSet(set, update_template, data);
|
||||
});
|
||||
}
|
||||
|
||||
} // namespace Vulkan
|
||||
|
|
|
@ -15,17 +15,13 @@ namespace Vulkan {
|
|||
class VKDevice;
|
||||
class VKScheduler;
|
||||
|
||||
class DescriptorUpdateEntry {
|
||||
public:
|
||||
explicit DescriptorUpdateEntry() {}
|
||||
struct DescriptorUpdateEntry {
|
||||
DescriptorUpdateEntry(VkDescriptorImageInfo image_) : image{image_} {}
|
||||
|
||||
DescriptorUpdateEntry(VkDescriptorImageInfo image) : image{image} {}
|
||||
DescriptorUpdateEntry(VkDescriptorBufferInfo buffer_) : buffer{buffer_} {}
|
||||
|
||||
DescriptorUpdateEntry(VkDescriptorBufferInfo buffer) : buffer{buffer} {}
|
||||
DescriptorUpdateEntry(VkBufferView texel_buffer_) : texel_buffer{texel_buffer_} {}
|
||||
|
||||
DescriptorUpdateEntry(VkBufferView texel_buffer) : texel_buffer{texel_buffer} {}
|
||||
|
||||
private:
|
||||
union {
|
||||
VkDescriptorImageInfo image;
|
||||
VkDescriptorBufferInfo buffer;
|
||||
|
@ -45,32 +41,34 @@ public:
|
|||
void Send(VkDescriptorUpdateTemplateKHR update_template, VkDescriptorSet set);
|
||||
|
||||
void AddSampledImage(VkSampler sampler, VkImageView image_view) {
|
||||
entries.emplace_back(VkDescriptorImageInfo{sampler, image_view, {}});
|
||||
payload.emplace_back(VkDescriptorImageInfo{sampler, image_view, {}});
|
||||
}
|
||||
|
||||
void AddImage(VkImageView image_view) {
|
||||
entries.emplace_back(VkDescriptorImageInfo{{}, image_view, {}});
|
||||
payload.emplace_back(VkDescriptorImageInfo{{}, image_view, {}});
|
||||
}
|
||||
|
||||
void AddBuffer(VkBuffer buffer, u64 offset, std::size_t size) {
|
||||
entries.emplace_back(VkDescriptorBufferInfo{buffer, offset, size});
|
||||
payload.emplace_back(VkDescriptorBufferInfo{buffer, offset, size});
|
||||
}
|
||||
|
||||
void AddTexelBuffer(VkBufferView texel_buffer) {
|
||||
entries.emplace_back(texel_buffer);
|
||||
payload.emplace_back(texel_buffer);
|
||||
}
|
||||
|
||||
VkImageLayout* GetLastImageLayout() {
|
||||
return &std::get<VkDescriptorImageInfo>(entries.back()).imageLayout;
|
||||
VkImageLayout* LastImageLayout() {
|
||||
return &payload.back().image.imageLayout;
|
||||
}
|
||||
|
||||
const VkImageLayout* LastImageLayout() const {
|
||||
return &payload.back().image.imageLayout;
|
||||
}
|
||||
|
||||
private:
|
||||
using Variant = std::variant<VkDescriptorImageInfo, VkDescriptorBufferInfo, VkBufferView>;
|
||||
|
||||
const VKDevice& device;
|
||||
VKScheduler& scheduler;
|
||||
|
||||
boost::container::static_vector<Variant, 0x400> entries;
|
||||
const DescriptorUpdateEntry* upload_start = nullptr;
|
||||
boost::container::static_vector<DescriptorUpdateEntry, 0x10000> payload;
|
||||
};
|
||||
|
||||
|
|
Loading…
Reference in a new issue