Flutter Impeller
command_pool_vk.cc
Go to the documentation of this file.
1 // Copyright 2013 The Flutter Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
6 
7 #include <memory>
8 #include <optional>
9 #include <utility>
10 
13 
14 #include "impeller/renderer/backend/vulkan/vk.h" // IWYU pragma: keep.
15 #include "vulkan/vulkan_enums.hpp"
16 #include "vulkan/vulkan_handles.hpp"
17 #include "vulkan/vulkan_structs.hpp"
18 
19 namespace impeller {
20 
21 // Holds the command pool in a background thread, recyling it when not in use.
23  public:
25 
26  // The recycler also recycles command buffers that were never used, up to a
27  // limit of 16 per frame. This number was somewhat arbitrarily chosen.
28  static constexpr size_t kUnusedCommandBufferLimit = 16u;
29 
31  vk::UniqueCommandPool&& pool,
32  std::vector<vk::UniqueCommandBuffer>&& buffers,
33  size_t unused_count,
34  std::weak_ptr<CommandPoolRecyclerVK> recycler)
35  : pool_(std::move(pool)),
36  buffers_(std::move(buffers)),
37  unused_count_(unused_count),
38  recycler_(std::move(recycler)) {}
39 
41  auto const recycler = recycler_.lock();
42 
43  // Not only does this prevent recycling when the context is being destroyed,
44  // but it also prevents the destructor from effectively being called twice;
45  // once for the original BackgroundCommandPoolVK() and once for the moved
46  // BackgroundCommandPoolVK().
47  if (!recycler) {
48  return;
49  }
50  // If there are many unused command buffers, release some of them and
51  // trim the command pool.
52  bool should_trim = unused_count_ > kUnusedCommandBufferLimit;
53  recycler->Reclaim(std::move(pool_), std::move(buffers_),
54  /*should_trim=*/should_trim);
55  }
56 
57  private:
59 
60  BackgroundCommandPoolVK& operator=(const BackgroundCommandPoolVK&) = delete;
61 
62  vk::UniqueCommandPool pool_;
63 
64  // These are retained because the destructor of the C++ UniqueCommandBuffer
65  // wrapper type will attempt to reset the cmd buffer, and doing so may be a
66  // thread safety violation as this may happen on the fence waiter thread.
67  std::vector<vk::UniqueCommandBuffer> buffers_;
68  const size_t unused_count_;
69  std::weak_ptr<CommandPoolRecyclerVK> recycler_;
70 };
71 
73  if (!pool_) {
74  return;
75  }
76 
77  auto const context = context_.lock();
78  if (!context) {
79  return;
80  }
81  auto const recycler = context->GetCommandPoolRecycler();
82  if (!recycler) {
83  return;
84  }
85  // Any unused command buffers are added to the set of used command buffers.
86  // both will be reset to the initial state when the pool is reset.
87  size_t unused_count = unused_command_buffers_.size();
88  for (auto i = 0u; i < unused_command_buffers_.size(); i++) {
89  collected_buffers_.push_back(std::move(unused_command_buffers_[i]));
90  }
91  unused_command_buffers_.clear();
92 
93  auto reset_pool_when_dropped = BackgroundCommandPoolVK(
94  std::move(pool_), std::move(collected_buffers_), unused_count, recycler);
95 
97  context->GetResourceManager(), std::move(reset_pool_when_dropped));
98 }
99 
100 // TODO(matanlurey): Return a status_or<> instead of {} when we have one.
101 vk::UniqueCommandBuffer CommandPoolVK::CreateCommandBuffer() {
102  auto const context = context_.lock();
103  if (!context) {
104  return {};
105  }
106 
107  Lock lock(pool_mutex_);
108  if (!pool_) {
109  return {};
110  }
111  if (!unused_command_buffers_.empty()) {
112  vk::UniqueCommandBuffer buffer = std::move(unused_command_buffers_.back());
113  unused_command_buffers_.pop_back();
114  return buffer;
115  }
116 
117  auto const device = context->GetDevice();
118  vk::CommandBufferAllocateInfo info;
119  info.setCommandPool(pool_.get());
120  info.setCommandBufferCount(1u);
121  info.setLevel(vk::CommandBufferLevel::ePrimary);
122  auto [result, buffers] = device.allocateCommandBuffersUnique(info);
123  if (result != vk::Result::eSuccess) {
124  return {};
125  }
126  return std::move(buffers[0]);
127 }
128 
129 void CommandPoolVK::CollectCommandBuffer(vk::UniqueCommandBuffer&& buffer) {
130  Lock lock(pool_mutex_);
131  if (!pool_) {
132  // If the command pool has already been destroyed, then its buffers have
133  // already been freed.
134  buffer.release();
135  return;
136  }
137  collected_buffers_.push_back(std::move(buffer));
138 }
139 
141  Lock lock(pool_mutex_);
142  pool_.reset();
143 
144  // When the command pool is destroyed, all of its command buffers are freed.
145  // Handles allocated from that pool are now invalid and must be discarded.
146  for (auto& buffer : collected_buffers_) {
147  buffer.release();
148  }
149  for (auto& buffer : unused_command_buffers_) {
150  buffer.release();
151  }
152  unused_command_buffers_.clear();
153  collected_buffers_.clear();
154 }
155 
156 // Associates a resource with a thread and context.
158  std::unordered_map<uint64_t, std::shared_ptr<CommandPoolVK>>;
159 
160 // CommandPoolVK Lifecycle:
161 // 1. End of frame will reset the command pool (clearing this on a thread).
162 // There will still be references to the command pool from the uncompleted
163 // command buffers.
164 // 2. The last reference to the command pool will be released from the fence
165 // waiter thread, which will schedule a task on the resource
166 // manager thread, which in turn will reset the command pool and make it
167 // available for reuse ("recycle").
168 static thread_local std::unique_ptr<CommandPoolMap> tls_command_pool_map;
169 
170 // Map each context to a list of all thread-local command pools associated
171 // with that context.
173 static std::unordered_map<
174  uint64_t,
175  std::unordered_map<std::thread::id,
176  std::weak_ptr<CommandPoolVK>>> g_all_pools_map
178 
180  const std::shared_ptr<ContextVK>& context)
181  : context_(context), context_hash_(context->GetHash()) {}
182 
183 // Visible for testing.
184 // Returns the number of pools in g_all_pools_map for the given context.
186  Lock all_pools_lock(g_all_pools_map_mutex);
187  auto it = g_all_pools_map.find(context.GetHash());
188  return it != g_all_pools_map.end() ? it->second.size() : 0;
189 }
190 
191 // TODO(matanlurey): Return a status_or<> instead of nullptr when we have one.
192 std::shared_ptr<CommandPoolVK> CommandPoolRecyclerVK::Get() {
193  auto const strong_context = context_.lock();
194  if (!strong_context) {
195  return nullptr;
196  }
197 
198  // If there is a resource in used for this thread and context, return it.
199  if (!tls_command_pool_map.get()) {
201  }
202  CommandPoolMap& pool_map = *tls_command_pool_map.get();
203  auto const it = pool_map.find(context_hash_);
204  if (it != pool_map.end()) {
205  return it->second;
206  }
207 
208  // Otherwise, create a new resource and return it.
209  auto data = Create();
210  if (!data || !data->pool) {
211  return nullptr;
212  }
213 
214  auto const resource = std::make_shared<CommandPoolVK>(
215  std::move(data->pool), std::move(data->buffers), context_);
216  pool_map.emplace(context_hash_, resource);
217 
218  {
219  Lock all_pools_lock(g_all_pools_map_mutex);
220  g_all_pools_map[context_hash_][std::this_thread::get_id()] = resource;
221  }
222 
223  return resource;
224 }
225 
226 // TODO(matanlurey): Return a status_or<> instead of nullopt when we have one.
227 std::optional<CommandPoolRecyclerVK::RecycledData>
228 CommandPoolRecyclerVK::Create() {
229  // If we can reuse a command pool and its buffers, do so.
230  if (auto data = Reuse()) {
231  return data;
232  }
233 
234  // Otherwise, create a new one.
235  auto context = context_.lock();
236  if (!context) {
237  return std::nullopt;
238  }
239  vk::CommandPoolCreateInfo info;
240  info.setQueueFamilyIndex(context->GetGraphicsQueue()->GetIndex().family);
241  info.setFlags(vk::CommandPoolCreateFlagBits::eTransient);
242 
243  auto device = context->GetDevice();
244  auto [result, pool] = device.createCommandPoolUnique(info);
245  if (result != vk::Result::eSuccess) {
246  return std::nullopt;
247  }
248  return CommandPoolRecyclerVK::RecycledData{.pool = std::move(pool),
249  .buffers = {}};
250 }
251 
252 std::optional<CommandPoolRecyclerVK::RecycledData>
253 CommandPoolRecyclerVK::Reuse() {
254  // If there are no recycled pools, return nullopt.
255  Lock recycled_lock(recycled_mutex_);
256  if (recycled_.empty()) {
257  return std::nullopt;
258  }
259 
260  // Otherwise, remove and return a recycled pool.
261  auto data = std::move(recycled_.back());
262  recycled_.pop_back();
263  return std::move(data);
264 }
265 
267  vk::UniqueCommandPool&& pool,
268  std::vector<vk::UniqueCommandBuffer>&& buffers,
269  bool should_trim) {
270  // Reset the pool on a background thread.
271  auto strong_context = context_.lock();
272  if (!strong_context) {
273  return;
274  }
275  auto device = strong_context->GetDevice();
276  if (should_trim) {
277  buffers.clear();
278  device.resetCommandPool(pool.get(),
279  vk::CommandPoolResetFlagBits::eReleaseResources);
280  } else {
281  device.resetCommandPool(pool.get(), {});
282  }
283 
284  // Move the pool to the recycled list.
285  Lock recycled_lock(recycled_mutex_);
286  recycled_.push_back(
287  RecycledData{.pool = std::move(pool), .buffers = std::move(buffers)});
288 }
289 
291  CommandPoolMap* pool_map = tls_command_pool_map.get();
292  if (pool_map) {
293  pool_map->erase(context_hash_);
294  }
295 
296  {
297  Lock all_pools_lock(g_all_pools_map_mutex);
298  auto found = g_all_pools_map.find(context_hash_);
299  if (found != g_all_pools_map.end()) {
300  found->second.erase(std::this_thread::get_id());
301  }
302  }
303 }
304 
306  // Delete the context's entry in this thread's command pool map.
307  if (tls_command_pool_map.get()) {
308  tls_command_pool_map.get()->erase(context_hash_);
309  }
310 
311  // Destroy all other thread-local CommandPoolVK instances associated with
312  // this context.
313  Lock all_pools_lock(g_all_pools_map_mutex);
314  auto found = g_all_pools_map.find(context_hash_);
315  if (found != g_all_pools_map.end()) {
316  for (auto& [thread_id, weak_pool] : found->second) {
317  auto pool = weak_pool.lock();
318  if (!pool) {
319  continue;
320  }
321  // Delete all objects held by this pool. The destroyed pool will still
322  // remain in its thread's TLS map until that thread exits.
323  pool->Destroy();
324  }
325  g_all_pools_map.erase(found);
326  }
327 }
328 
329 } // namespace impeller
BackgroundCommandPoolVK(BackgroundCommandPoolVK &&)=default
BackgroundCommandPoolVK(vk::UniqueCommandPool &&pool, std::vector< vk::UniqueCommandBuffer > &&buffers, size_t unused_count, std::weak_ptr< CommandPoolRecyclerVK > recycler)
static constexpr size_t kUnusedCommandBufferLimit
CommandPoolRecyclerVK(const std::shared_ptr< ContextVK > &context)
Creates a recycler for the given |ContextVK|.
void Dispose()
Clears this context's thread-local command pool.
static int GetGlobalPoolCount(const ContextVK &context)
std::shared_ptr< CommandPoolVK > Get()
Gets a command pool for the current thread.
void DestroyThreadLocalPools()
Clean up resources held by all per-thread command pools associated with the context.
void Reclaim(vk::UniqueCommandPool &&pool, std::vector< vk::UniqueCommandBuffer > &&buffers, bool should_trim=false)
Returns a command pool to be reset on a background thread.
void CollectCommandBuffer(vk::UniqueCommandBuffer &&buffer)
Collects the given |vk::CommandBuffer| to be retained.
void Destroy()
Delete all Vulkan objects in this command pool.
vk::UniqueCommandBuffer CreateCommandBuffer()
Creates and returns a new |vk::CommandBuffer|.
uint64_t GetHash() const
Definition: context_vk.h:104
A unique handle to a resource which will be reclaimed by the specified resource manager.
static Mutex g_all_pools_map_mutex
static thread_local std::unique_ptr< CommandPoolMap > tls_command_pool_map
std::unordered_map< uint64_t, std::shared_ptr< CommandPoolVK > > CommandPoolMap
static std::unordered_map< uint64_t, std::unordered_map< std::thread::id, std::weak_ptr< CommandPoolVK > > > g_all_pools_map IPLR_GUARDED_BY(g_all_pools_map_mutex)
Definition: comparable.h:95
A unique command pool and zero or more recycled command buffers.
std::shared_ptr< const fml::Mapping > data
Definition: texture_gles.cc:68