15 #include "vulkan/vulkan_handles.hpp"
16 #include "vulkan/vulkan_structs.hpp"
30 vk::UniqueCommandPool&& pool,
31 std::vector<vk::UniqueCommandBuffer>&& buffers,
33 std::weak_ptr<CommandPoolRecyclerVK> recycler)
34 : pool_(
std::move(pool)),
35 buffers_(
std::move(buffers)),
36 unused_count_(unused_count),
37 recycler_(
std::move(recycler)) {}
40 auto const recycler = recycler_.lock();
51 for (
auto i = 0u; i < unused_count_; i++) {
56 recycler->Reclaim(std::move(pool_), std::move(buffers_));
64 vk::UniqueCommandPool pool_;
69 std::vector<vk::UniqueCommandBuffer> buffers_;
70 const size_t unused_count_;
71 std::weak_ptr<CommandPoolRecyclerVK> recycler_;
79 auto const context = context_.lock();
83 auto const recycler = context->GetCommandPoolRecycler();
89 size_t unused_count = unused_command_buffers_.size();
90 for (
auto i = 0u; i < unused_command_buffers_.size(); i++) {
91 collected_buffers_.push_back(std::move(unused_command_buffers_[i]));
93 unused_command_buffers_.clear();
96 std::move(pool_), std::move(collected_buffers_), unused_count, recycler);
99 context->GetResourceManager(), std::move(reset_pool_when_dropped));
104 auto const context = context_.lock();
109 Lock lock(pool_mutex_);
113 if (!unused_command_buffers_.empty()) {
114 vk::UniqueCommandBuffer buffer = std::move(unused_command_buffers_.back());
115 unused_command_buffers_.pop_back();
119 auto const device = context->GetDevice();
120 vk::CommandBufferAllocateInfo info;
121 info.setCommandPool(pool_.get());
122 info.setCommandBufferCount(1u);
123 info.setLevel(vk::CommandBufferLevel::ePrimary);
124 auto [result, buffers] = device.allocateCommandBuffersUnique(info);
125 if (result != vk::Result::eSuccess) {
128 return std::move(buffers[0]);
132 Lock lock(pool_mutex_);
139 collected_buffers_.push_back(std::move(buffer));
143 Lock lock(pool_mutex_);
148 for (
auto& buffer : collected_buffers_) {
151 for (
auto& buffer : unused_command_buffers_) {
154 unused_command_buffers_.clear();
155 collected_buffers_.clear();
160 std::unordered_map<uint64_t, std::shared_ptr<CommandPoolVK>>;
175 static std::unordered_map<
177 std::vector<std::weak_ptr<CommandPoolVK>>> g_all_pools_map
182 auto const strong_context = context_.lock();
183 if (!strong_context) {
192 auto const hash = strong_context->GetHash();
193 auto const it = pool_map.find(hash);
194 if (it != pool_map.end()) {
199 auto data = Create();
200 if (!data || !data->pool) {
204 auto const resource = std::make_shared<CommandPoolVK>(
205 std::move(data->pool), std::move(data->buffers), context_);
206 pool_map.emplace(hash, resource);
210 g_all_pools_map[strong_context.get()].push_back(resource);
217 std::optional<CommandPoolRecyclerVK::RecycledData>
218 CommandPoolRecyclerVK::Create() {
220 if (
auto data = Reuse()) {
225 auto context = context_.lock();
229 vk::CommandPoolCreateInfo info;
230 info.setQueueFamilyIndex(context->GetGraphicsQueue()->GetIndex().family);
231 info.setFlags(vk::CommandPoolCreateFlagBits::eTransient);
233 auto device = context->GetDevice();
234 auto [result, pool] = device.createCommandPoolUnique(info);
235 if (result != vk::Result::eSuccess) {
238 return CommandPoolRecyclerVK::RecycledData{.pool = std::move(pool),
242 std::optional<CommandPoolRecyclerVK::RecycledData>
243 CommandPoolRecyclerVK::Reuse() {
245 Lock recycled_lock(recycled_mutex_);
246 if (recycled_.empty()) {
251 auto data = std::move(recycled_.back());
252 recycled_.pop_back();
253 return std::move(data);
257 vk::UniqueCommandPool&& pool,
258 std::vector<vk::UniqueCommandBuffer>&& buffers) {
260 auto strong_context = context_.lock();
261 if (!strong_context) {
264 auto device = strong_context->GetDevice();
265 device.resetCommandPool(pool.get());
268 Lock recycled_lock(recycled_mutex_);
294 auto found = g_all_pools_map.find(context);
295 if (found != g_all_pools_map.end()) {
296 for (
auto& weak_pool : found->second) {
297 auto pool = weak_pool.lock();
305 g_all_pools_map.erase(found);