mirror of
https://github.com/hyprwm/wlroots-hyprland.git
synced 2024-11-25 22:25:58 +01:00
render/vulkan: release stage buffers after command buffer completes
We need to wait for the pending command buffer to complete before re-using stage buffers. Otherwise we'll overwrite the stage buffer with new contents before the texture is fully uploaded.
This commit is contained in:
parent
2a414c896e
commit
10f543d579
2 changed files with 25 additions and 13 deletions
|
@ -150,6 +150,8 @@ struct wlr_vk_command_buffer {
|
||||||
uint64_t timeline_point;
|
uint64_t timeline_point;
|
||||||
// Textures to destroy after the command buffer completes
|
// Textures to destroy after the command buffer completes
|
||||||
struct wl_list destroy_textures; // wlr_vk_texture.destroy_link
|
struct wl_list destroy_textures; // wlr_vk_texture.destroy_link
|
||||||
|
// Staging shared buffers to release after the command buffer completes
|
||||||
|
struct wl_list stage_buffers; // wlr_vk_shared_buffer.link
|
||||||
};
|
};
|
||||||
|
|
||||||
#define VULKAN_COMMAND_BUFFERS_CAP 64
|
#define VULKAN_COMMAND_BUFFERS_CAP 64
|
||||||
|
@ -292,7 +294,7 @@ struct wlr_vk_allocation {
|
||||||
// List of suballocated staging buffers.
|
// List of suballocated staging buffers.
|
||||||
// Used to upload to/read from device local images.
|
// Used to upload to/read from device local images.
|
||||||
struct wlr_vk_shared_buffer {
|
struct wlr_vk_shared_buffer {
|
||||||
struct wl_list link; // wlr_vk_renderer.stage.buffers
|
struct wl_list link; // wlr_vk_renderer.stage.buffers or wlr_vk_command_buffer.stage_buffers
|
||||||
VkBuffer buffer;
|
VkBuffer buffer;
|
||||||
VkDeviceMemory memory;
|
VkDeviceMemory memory;
|
||||||
VkDeviceSize buf_size;
|
VkDeviceSize buf_size;
|
||||||
|
|
|
@ -192,13 +192,6 @@ static void shared_buffer_destroy(struct wlr_vk_renderer *r,
|
||||||
free(buffer);
|
free(buffer);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void release_stage_allocations(struct wlr_vk_renderer *renderer) {
|
|
||||||
struct wlr_vk_shared_buffer *buf;
|
|
||||||
wl_list_for_each(buf, &renderer->stage.buffers, link) {
|
|
||||||
buf->allocs.size = 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
struct wlr_vk_buffer_span vulkan_get_stage_span(struct wlr_vk_renderer *r,
|
struct wlr_vk_buffer_span vulkan_get_stage_span(struct wlr_vk_renderer *r,
|
||||||
VkDeviceSize size, VkDeviceSize alignment) {
|
VkDeviceSize size, VkDeviceSize alignment) {
|
||||||
// try to find free span
|
// try to find free span
|
||||||
|
@ -426,6 +419,7 @@ static bool init_command_buffer(struct wlr_vk_command_buffer *cb,
|
||||||
.vk = vk_cb,
|
.vk = vk_cb,
|
||||||
};
|
};
|
||||||
wl_list_init(&cb->destroy_textures);
|
wl_list_init(&cb->destroy_textures);
|
||||||
|
wl_list_init(&cb->stage_buffers);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -450,13 +444,22 @@ static bool wait_command_buffer(struct wlr_vk_command_buffer *cb,
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void release_command_buffer_resources(struct wlr_vk_command_buffer *cb) {
|
static void release_command_buffer_resources(struct wlr_vk_command_buffer *cb,
|
||||||
|
struct wlr_vk_renderer *renderer) {
|
||||||
struct wlr_vk_texture *texture, *texture_tmp;
|
struct wlr_vk_texture *texture, *texture_tmp;
|
||||||
wl_list_for_each_safe(texture, texture_tmp, &cb->destroy_textures, destroy_link) {
|
wl_list_for_each_safe(texture, texture_tmp, &cb->destroy_textures, destroy_link) {
|
||||||
wl_list_remove(&texture->destroy_link);
|
wl_list_remove(&texture->destroy_link);
|
||||||
texture->last_used_cb = NULL;
|
texture->last_used_cb = NULL;
|
||||||
wlr_texture_destroy(&texture->wlr_texture);
|
wlr_texture_destroy(&texture->wlr_texture);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
struct wlr_vk_shared_buffer *buf, *buf_tmp;
|
||||||
|
wl_list_for_each_safe(buf, buf_tmp, &cb->stage_buffers, link) {
|
||||||
|
buf->allocs.size = 0;
|
||||||
|
|
||||||
|
wl_list_remove(&buf->link);
|
||||||
|
wl_list_insert(&renderer->stage.buffers, &buf->link);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static struct wlr_vk_command_buffer *get_command_buffer(
|
static struct wlr_vk_command_buffer *get_command_buffer(
|
||||||
|
@ -476,7 +479,7 @@ static struct wlr_vk_command_buffer *get_command_buffer(
|
||||||
struct wlr_vk_command_buffer *cb = &renderer->command_buffers[i];
|
struct wlr_vk_command_buffer *cb = &renderer->command_buffers[i];
|
||||||
if (cb->vk != VK_NULL_HANDLE && !cb->recording &&
|
if (cb->vk != VK_NULL_HANDLE && !cb->recording &&
|
||||||
cb->timeline_point <= current_point) {
|
cb->timeline_point <= current_point) {
|
||||||
release_command_buffer_resources(cb);
|
release_command_buffer_resources(cb, renderer);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -955,14 +958,21 @@ static void vulkan_end(struct wlr_renderer *wlr_renderer) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
struct wlr_vk_shared_buffer *stage_buf, *stage_buf_tmp;
|
||||||
|
wl_list_for_each_safe(stage_buf, stage_buf_tmp, &renderer->stage.buffers, link) {
|
||||||
|
if (stage_buf->allocs.size == 0) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
wl_list_remove(&stage_buf->link);
|
||||||
|
wl_list_insert(&stage_cb->stage_buffers, &stage_buf->link);
|
||||||
|
}
|
||||||
|
|
||||||
// sadly this is required due to the current api/rendering model of wlr
|
// sadly this is required due to the current api/rendering model of wlr
|
||||||
// ideally we could use gpu and cpu in parallel (_without_ the
|
// ideally we could use gpu and cpu in parallel (_without_ the
|
||||||
// implicit synchronization overhead and mess of opengl drivers)
|
// implicit synchronization overhead and mess of opengl drivers)
|
||||||
if (!wait_command_buffer(render_cb, renderer)) {
|
if (!wait_command_buffer(render_cb, renderer)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
release_stage_allocations(renderer);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static bool vulkan_render_subtexture_with_matrix(struct wlr_renderer *wlr_renderer,
|
static bool vulkan_render_subtexture_with_matrix(struct wlr_renderer *wlr_renderer,
|
||||||
|
@ -1162,7 +1172,7 @@ static void vulkan_destroy(struct wlr_renderer *wlr_renderer) {
|
||||||
if (cb->vk == VK_NULL_HANDLE) {
|
if (cb->vk == VK_NULL_HANDLE) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
release_command_buffer_resources(cb);
|
release_command_buffer_resources(cb, renderer);
|
||||||
}
|
}
|
||||||
|
|
||||||
// stage.cb automatically freed with command pool
|
// stage.cb automatically freed with command pool
|
||||||
|
|
Loading…
Reference in a new issue