7 #include <unordered_map>
9 #include "fml/concurrent_message_loop.h"
20 #include <sys/resource.h>
30 #include "flutter/fml/cpu_affinity.h"
31 #include "flutter/fml/trace_event.h"
47 VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE
59 const vk::Instance& instance) {
60 for (
const auto& device : instance.enumeratePhysicalDevices().value) {
69 std::initializer_list<QueueIndexVK> queues) {
70 std::map<
size_t ,
size_t > family_index_map;
71 for (
const auto& queue : queues) {
72 family_index_map[queue.family] = 0;
74 for (
const auto& queue : queues) {
75 auto value = family_index_map[queue.family];
76 family_index_map[queue.family] = std::max(
value, queue.index);
79 static float kQueuePriority = 1.0f;
80 std::vector<vk::DeviceQueueCreateInfo> infos;
81 for (
const auto& item : family_index_map) {
82 vk::DeviceQueueCreateInfo info;
83 info.setQueueFamilyIndex(item.first);
84 info.setQueueCount(item.second + 1);
85 info.setQueuePriorities(kQueuePriority);
86 infos.push_back(info);
91 static std::optional<QueueIndexVK>
PickQueue(
const vk::PhysicalDevice& device,
92 vk::QueueFlagBits flags) {
95 const auto families = device.getQueueFamilyProperties();
96 for (
size_t i = 0u; i < families.size(); i++) {
97 if (!(families[i].queueFlags & flags)) {
106 auto context = std::shared_ptr<ContextVK>(
new ContextVK(settings.
flags));
107 context->Setup(std::move(settings));
108 if (!context->IsValid()) {
118 return std::clamp(hardware_concurrency / 2ull, 1ull, 4ull);
122 std::atomic_uint64_t context_count = 0;
123 uint64_t CalculateHash(
void* ptr) {
124 return context_count.fetch_add(1);
128 ContextVK::ContextVK(
const Flags& flags)
129 : Context(flags), hash_(CalculateHash(this)) {}
132 if (device_holder_ && device_holder_->device) {
133 [[maybe_unused]]
auto result = device_holder_->device->waitIdle();
135 if (command_pool_recycler_) {
136 command_pool_recycler_->DestroyThreadLocalPools();
144 void ContextVK::Setup(Settings settings) {
145 TRACE_EVENT0(
"impeller",
"ContextVK::Setup");
147 if (!settings.proc_address_callback) {
155 auto& dispatcher = VULKAN_HPP_DEFAULT_DISPATCHER;
156 dispatcher.init(settings.proc_address_callback);
158 std::vector<std::string> embedder_instance_extensions;
159 std::vector<std::string> embedder_device_extensions;
160 if (settings.embedder_data.has_value()) {
161 embedder_instance_extensions = settings.embedder_data->instance_extensions;
162 embedder_device_extensions = settings.embedder_data->device_extensions;
164 auto caps = std::shared_ptr<CapabilitiesVK>(
new CapabilitiesVK(
165 settings.enable_validation,
166 settings.fatal_missing_validations,
167 settings.embedder_data.has_value(),
168 embedder_instance_extensions,
169 embedder_device_extensions
172 if (!caps->IsValid()) {
179 auto enabled_layers = caps->GetEnabledLayers();
180 auto enabled_extensions = caps->GetEnabledInstanceExtensions();
182 if (!enabled_layers.has_value() || !enabled_extensions.has_value()) {
187 vk::InstanceCreateFlags instance_flags = {};
189 if (std::find(enabled_extensions.value().begin(),
190 enabled_extensions.value().end(),
191 "VK_KHR_portability_enumeration") !=
192 enabled_extensions.value().end()) {
193 instance_flags |= vk::InstanceCreateFlagBits::eEnumeratePortabilityKHR;
196 std::vector<const char*> enabled_layers_c;
197 std::vector<const char*> enabled_extensions_c;
199 for (
const auto& layer : enabled_layers.value()) {
200 enabled_layers_c.push_back(layer.c_str());
203 for (
const auto& ext : enabled_extensions.value()) {
204 enabled_extensions_c.push_back(ext.c_str());
207 vk::ApplicationInfo application_info;
218 application_info.setApplicationVersion(
219 VK_MAKE_API_VERSION(0, 2, 0, 0) );
220 application_info.setApiVersion(VK_API_VERSION_1_1);
221 application_info.setEngineVersion(VK_API_VERSION_1_0);
222 application_info.setPEngineName(
"Impeller");
223 application_info.setPApplicationName(
"Impeller");
225 vk::StructureChain<vk::InstanceCreateInfo, vk::ValidationFeaturesEXT>
228 if (!caps->AreValidationsEnabled()) {
229 instance_chain.unlink<vk::ValidationFeaturesEXT>();
232 std::vector<vk::ValidationFeatureEnableEXT> enabled_validations = {
233 vk::ValidationFeatureEnableEXT::eSynchronizationValidation,
236 auto validation = instance_chain.get<vk::ValidationFeaturesEXT>();
237 validation.setEnabledValidationFeatures(enabled_validations);
239 auto instance_info = instance_chain.get<vk::InstanceCreateInfo>();
240 instance_info.setPEnabledLayerNames(enabled_layers_c);
241 instance_info.setPEnabledExtensionNames(enabled_extensions_c);
242 instance_info.setPApplicationInfo(&application_info);
243 instance_info.setFlags(instance_flags);
245 auto device_holder = std::make_shared<DeviceHolderImpl>();
246 if (!settings.embedder_data.has_value()) {
247 auto instance = vk::createInstanceUnique(instance_info);
248 if (instance.result != vk::Result::eSuccess) {
250 << vk::to_string(instance.result);
253 device_holder->instance = std::move(instance.value);
255 device_holder->instance.reset(settings.embedder_data->instance);
256 device_holder->owned =
false;
258 dispatcher.init(device_holder->instance.get());
267 std::make_unique<DebugReportVK>(*caps, device_holder->instance.get());
269 if (!debug_report->IsValid()) {
277 if (!settings.embedder_data.has_value()) {
278 auto physical_device =
280 if (!physical_device.has_value()) {
284 device_holder->physical_device = physical_device.value();
286 device_holder->physical_device = settings.embedder_data->physical_device;
292 auto graphics_queue =
293 PickQueue(device_holder->physical_device, vk::QueueFlagBits::eGraphics);
294 auto transfer_queue =
295 PickQueue(device_holder->physical_device, vk::QueueFlagBits::eTransfer);
297 PickQueue(device_holder->physical_device, vk::QueueFlagBits::eCompute);
299 if (!graphics_queue.has_value()) {
303 if (!transfer_queue.has_value()) {
304 transfer_queue = graphics_queue.value();
306 if (!compute_queue.has_value()) {
314 auto enabled_device_extensions =
315 caps->GetEnabledDeviceExtensions(device_holder->physical_device);
316 if (!enabled_device_extensions.has_value()) {
322 std::vector<const char*> enabled_device_extensions_c;
323 for (
const auto& ext : enabled_device_extensions.value()) {
324 enabled_device_extensions_c.push_back(ext.c_str());
328 {graphics_queue.value(), compute_queue.value(), transfer_queue.value()});
330 const auto enabled_features =
331 caps->GetEnabledDeviceFeatures(device_holder->physical_device);
332 if (!enabled_features.has_value()) {
338 vk::DeviceCreateInfo device_info;
340 device_info.setPNext(&enabled_features.value().get());
341 device_info.setQueueCreateInfos(queue_create_infos);
342 device_info.setPEnabledExtensionNames(enabled_device_extensions_c);
345 if (!settings.embedder_data.has_value()) {
347 device_holder->physical_device.createDeviceUnique(device_info);
348 if (device_result.result != vk::Result::eSuccess) {
352 device_holder->device = std::move(device_result.value);
354 device_holder->device.reset(settings.embedder_data->device);
357 if (!caps->SetPhysicalDevice(device_holder->physical_device,
358 *enabled_features)) {
366 auto allocator = std::shared_ptr<AllocatorVK>(
new AllocatorVK(
368 application_info.apiVersion,
369 device_holder->physical_device,
371 device_holder->instance.get(),
375 if (!allocator->IsValid()) {
383 auto pipeline_library = std::shared_ptr<PipelineLibraryVK>(
384 new PipelineLibraryVK(device_holder,
386 std::move(settings.cache_directory),
387 raster_message_loop_->GetTaskRunner()
390 if (!pipeline_library->IsValid()) {
395 auto sampler_library =
396 std::shared_ptr<SamplerLibraryVK>(
new SamplerLibraryVK(device_holder));
398 auto shader_library = std::shared_ptr<ShaderLibraryVK>(
399 new ShaderLibraryVK(device_holder,
400 settings.shader_libraries_data)
403 if (!shader_library->IsValid()) {
412 std::shared_ptr<FenceWaiterVK>(
new FenceWaiterVK(device_holder));
418 if (!resource_manager) {
423 auto command_pool_recycler =
424 std::make_shared<CommandPoolRecyclerVK>(shared_from_this());
425 if (!command_pool_recycler) {
430 auto descriptor_pool_recycler =
431 std::make_shared<DescriptorPoolRecyclerVK>(weak_from_this());
432 if (!descriptor_pool_recycler) {
441 if (!settings.embedder_data.has_value()) {
443 graphics_queue.value(),
444 compute_queue.value(),
445 transfer_queue.value()
450 settings.embedder_data->queue_family_index);
452 if (!queues.IsValid()) {
457 VkPhysicalDeviceProperties physical_device_properties;
458 dispatcher.vkGetPhysicalDeviceProperties(device_holder->physical_device,
459 &physical_device_properties);
467 std::make_unique<DriverInfoVK>(device_holder->physical_device);
469 caps->ApplyWorkarounds(workarounds_);
470 sampler_library->ApplyWorkarounds(workarounds_);
472 device_holder_ = std::move(device_holder);
473 idle_waiter_vk_ = std::make_shared<IdleWaiterVK>(device_holder_);
474 driver_info_ = std::move(driver_info);
475 debug_report_ = std::move(debug_report);
476 allocator_ = std::move(allocator);
477 shader_library_ = std::move(shader_library);
478 sampler_library_ = std::move(sampler_library);
479 pipeline_library_ = std::move(pipeline_library);
480 yuv_conversion_library_ = std::shared_ptr<YUVConversionLibraryVK>(
481 new YUVConversionLibraryVK(device_holder_));
482 queues_ = std::move(queues);
483 device_capabilities_ = std::move(caps);
484 fence_waiter_ = std::move(fence_waiter);
485 resource_manager_ = std::move(resource_manager);
486 command_pool_recycler_ = std::move(command_pool_recycler);
487 descriptor_pool_recycler_ = std::move(descriptor_pool_recycler);
488 device_name_ = std::string(physical_device_properties.deviceName);
489 command_queue_vk_ = std::make_shared<CommandQueueVK>(weak_from_this());
490 should_enable_surface_control_ = settings.enable_surface_control;
496 gpu_tracer_ = std::make_shared<GPUTracerVK>(weak_from_this(),
497 settings.enable_gpu_tracing);
498 gpu_tracer_->InitializeQueryPool(*
this);
525 return shader_library_;
529 return sampler_library_;
533 return pipeline_library_;
538 auto tls_pool = recycler->Get();
545 std::shared_ptr<DescriptorPoolVK> descriptor_pool;
547 Lock lock(desc_pool_mutex_);
548 DescriptorPoolMap::iterator current_pool =
549 cached_descriptor_pool_.find(std::this_thread::get_id());
550 if (current_pool == cached_descriptor_pool_.end()) {
551 descriptor_pool = (cached_descriptor_pool_[std::this_thread::get_id()] =
552 descriptor_pool_recycler_->GetDescriptorPool());
554 descriptor_pool = current_pool->second;
558 auto tracked_objects = std::make_shared<TrackedObjectsVK>(
559 weak_from_this(), std::move(tls_pool), std::move(descriptor_pool),
563 if (!tracked_objects || !tracked_objects->IsValid() || !queue) {
567 vk::CommandBufferBeginInfo begin_info;
568 begin_info.flags = vk::CommandBufferUsageFlagBits::eOneTimeSubmit;
569 if (tracked_objects->GetCommandBuffer().begin(begin_info) !=
570 vk::Result::eSuccess) {
575 tracked_objects->GetGPUProbe().RecordCmdBufferStart(
576 tracked_objects->GetCommandBuffer());
581 std::move(tracked_objects)
586 return *device_holder_->instance;
590 return device_holder_->device.get();
593 const std::shared_ptr<fml::ConcurrentTaskRunner>
595 return raster_message_loop_->GetTaskRunner();
604 fence_waiter_->Terminate();
605 resource_manager_.reset();
607 raster_message_loop_->Terminate();
611 return std::make_shared<SurfaceContextVK>(shared_from_this());
615 return device_capabilities_;
623 return device_holder_->physical_device;
627 return fence_waiter_;
631 return resource_manager_;
636 return command_pool_recycler_;
645 return descriptor_pool_recycler_;
649 return command_queue_vk_;
653 std::shared_ptr<CommandBuffer> command_buffer) {
654 if (should_batch_cmd_buffers_) {
655 pending_command_buffers_.push_back(std::move(command_buffer));
663 if (pending_command_buffers_.empty()) {
667 if (should_batch_cmd_buffers_) {
668 bool result =
GetCommandQueue()->Submit(pending_command_buffers_).ok();
669 pending_command_buffers_.clear();
690 attachment.
texture->GetTextureDescriptor().format,
691 attachment.
texture->GetTextureDescriptor().sample_count,
700 depth->texture->GetTextureDescriptor().format,
701 depth->texture->GetTextureDescriptor().sample_count,
706 stencil.has_value()) {
708 stencil->texture->GetTextureDescriptor().format,
709 stencil->texture->GetTextureDescriptor().sample_count,
710 stencil->load_action,
711 stencil->store_action
720 Lock lock(desc_pool_mutex_);
721 cached_descriptor_pool_.erase(std::this_thread::get_id());
723 command_pool_recycler_->Dispose();
726 const std::shared_ptr<YUVConversionLibraryVK>&
728 return yuv_conversion_library_;
736 return should_enable_surface_control_ &&
static CapabilitiesVK & Cast(Capabilities &base)
The Vulkan layers and extensions wrangler.
bool SupportsExternalSemaphoreExtensions() const
void SetOffscreenFormat(PixelFormat pixel_format) const
std::optional< PhysicalDeviceFeatures > GetEnabledDeviceFeatures(const vk::PhysicalDevice &physical_device) const
void SetOffscreenFormat(PixelFormat pixel_format)
std::shared_ptr< Allocator > GetResourceAllocator() const override
Returns the allocator used to create textures and buffers on the device.
std::shared_ptr< ResourceManagerVK > GetResourceManager() const
vk::PhysicalDevice GetPhysicalDevice() const
const std::shared_ptr< YUVConversionLibraryVK > & GetYUVConversionLibrary() const
bool SetDebugName(T handle, std::string_view label) const
bool EnqueueCommandBuffer(std::shared_ptr< CommandBuffer > command_buffer) override
Enqueue command_buffer for submission by the end of the frame.
const vk::Device & GetDevice() const
bool FlushCommandBuffers() override
Flush all pending command buffers.
bool IsValid() const override
Determines if a context is valid. If the caller ever receives an invalid context, they must discard i...
const std::unique_ptr< DriverInfoVK > & GetDriverInfo() const
void DisposeThreadLocalCachedResources() override
std::shared_ptr< CommandBuffer > CreateCommandBuffer() const override
Create a new command buffer. Command buffers can be used to encode graphics, blit,...
virtual bool SubmitOnscreen(std::shared_ptr< CommandBuffer > cmd_buffer) override
Submit the command buffer that renders to the onscreen surface.
std::shared_ptr< SamplerLibrary > GetSamplerLibrary() const override
Returns the library of combined image samplers used in shaders.
static std::shared_ptr< ContextVK > Create(Settings settings)
std::shared_ptr< PipelineLibrary > GetPipelineLibrary() const override
Returns the library of pipelines used by render or compute commands.
const std::shared_ptr< QueueVK > & GetGraphicsQueue() const
const std::shared_ptr< const Capabilities > & GetCapabilities() const override
Get the capabilities of Impeller context. All optionally supported feature of the platform,...
RuntimeStageBackend GetRuntimeStageBackend() const override
Retrieve the runtime stage for this context type.
std::shared_ptr< CommandPoolRecyclerVK > GetCommandPoolRecycler() const
std::shared_ptr< CommandQueue > GetCommandQueue() const override
Return the graphics queue for submitting command buffers.
void InitializeCommonlyUsedShadersIfNeeded() const override
std::shared_ptr< FenceWaiterVK > GetFenceWaiter() const
bool GetShouldEnableSurfaceControlSwapchain() const
Whether the Android Surface control based swapchain should be enabled.
std::shared_ptr< GPUTracerVK > GetGPUTracer() const
BackendType GetBackendType() const override
Get the graphics backend of an Impeller context.
std::string DescribeGpuModel() const override
const WorkaroundsVK & GetWorkarounds() const
const std::shared_ptr< fml::ConcurrentTaskRunner > GetConcurrentWorkerTaskRunner() const
static size_t ChooseThreadCountForWorkers(size_t hardware_concurrency)
std::shared_ptr< ShaderLibrary > GetShaderLibrary() const override
Returns the library of shaders used to specify the programmable stages of a pipeline.
vk::Instance GetInstance() const
std::shared_ptr< DeviceHolderVK > GetDeviceHolder() const
void Shutdown() override
Force all pending asynchronous work to finish. This is achieved by deleting all owned concurrent mess...
std::shared_ptr< DescriptorPoolRecyclerVK > GetDescriptorPoolRecycler() const
std::shared_ptr< SurfaceContextVK > CreateSurfaceContext()
RenderPassBuilderVK & SetDepthStencilAttachment(PixelFormat format, SampleCount sample_count, LoadAction load_action, StoreAction store_action)
RenderPassBuilderVK & SetStencilAttachment(PixelFormat format, SampleCount sample_count, LoadAction load_action, StoreAction store_action)
RenderPassBuilderVK & SetColorAttachment(size_t index, PixelFormat format, SampleCount sample_count, LoadAction load_action, StoreAction store_action, vk::ImageLayout current_layout=vk::ImageLayout::eUndefined, bool is_swapchain=false)
vk::UniqueRenderPass Build(const vk::Device &device) const
a wrapper around the impeller [Allocator] instance that can be used to provide caching of allocated r...
virtual RenderTarget CreateOffscreenMSAA(const Context &context, ISize size, int mip_count, std::string_view label="Offscreen MSAA", RenderTarget::AttachmentConfigMSAA color_attachment_config=RenderTarget::kDefaultColorAttachmentConfigMSAA, std::optional< RenderTarget::AttachmentConfig > stencil_attachment_config=RenderTarget::kDefaultStencilAttachmentConfig, const std::shared_ptr< Texture > &existing_color_msaa_texture=nullptr, const std::shared_ptr< Texture > &existing_color_resolve_texture=nullptr, const std::shared_ptr< Texture > &existing_depth_stencil_texture=nullptr)
bool IterateAllColorAttachments(const std::function< bool(size_t index, const ColorAttachment &attachment)> &iterator) const
const std::optional< DepthAttachment > & GetDepthAttachment() const
const std::optional< StencilAttachment > & GetStencilAttachment() const
static std::shared_ptr< ResourceManagerVK > Create()
Creates a shared resource manager (a dedicated thread).
ScopedObject< Object > Create(CtorArgs &&... args)
bool HasValidationLayers()
static std::optional< QueueIndexVK > PickQueue(const vk::PhysicalDevice &device, vk::QueueFlagBits flags)
static std::optional< vk::PhysicalDevice > PickPhysicalDevice(const CapabilitiesVK &caps, const vk::Instance &instance)
PixelFormat
The Pixel formats supported by Impeller. The naming convention denotes the usage of the component,...
static bool gHasValidationLayers
WorkaroundsVK GetWorkaroundsFromDriverInfo(DriverInfoVK &driver_info)
static std::vector< vk::DeviceQueueCreateInfo > GetQueueCreateInfos(std::initializer_list< QueueIndexVK > queues)
std::shared_ptr< Texture > texture
static QueuesVK FromEmbedderQueue(vk::Queue queue, uint32_t queue_family_index)
static QueuesVK FromQueueIndices(const vk::Device &device, QueueIndexVK graphics, QueueIndexVK compute, QueueIndexVK transfer)
std::shared_ptr< QueueVK > graphics_queue
A non-exhaustive set of driver specific workarounds.
bool batch_submit_command_buffer_timeout