diff options
Diffstat (limited to 'drivers')
| -rw-r--r-- | drivers/gles3/rasterizer_scene_gles3.cpp | 182 | ||||
| -rw-r--r-- | drivers/gles3/shaders/scene.glsl | 18 | ||||
| -rw-r--r-- | drivers/gles3/storage/material_storage.cpp | 2 | ||||
| -rw-r--r-- | drivers/gles3/storage/mesh_storage.cpp | 9 | ||||
| -rw-r--r-- | drivers/gles3/storage/texture_storage.cpp | 3 | ||||
| -rw-r--r-- | drivers/gles3/storage/texture_storage.h | 34 | ||||
| -rw-r--r-- | drivers/png/resource_saver_png.cpp | 2 | ||||
| -rw-r--r-- | drivers/unix/file_access_unix.cpp | 5 | ||||
| -rw-r--r-- | drivers/unix/net_socket_posix.cpp | 20 | ||||
| -rw-r--r-- | drivers/unix/net_socket_posix.h | 1 | ||||
| -rw-r--r-- | drivers/vulkan/rendering_device_vulkan.cpp | 126 | ||||
| -rw-r--r-- | drivers/vulkan/rendering_device_vulkan.h | 1 | ||||
| -rw-r--r-- | drivers/vulkan/vulkan_context.cpp | 2 |
13 files changed, 263 insertions, 142 deletions
diff --git a/drivers/gles3/rasterizer_scene_gles3.cpp b/drivers/gles3/rasterizer_scene_gles3.cpp index e6e7dd6f17..d8ddfe5c32 100644 --- a/drivers/gles3/rasterizer_scene_gles3.cpp +++ b/drivers/gles3/rasterizer_scene_gles3.cpp @@ -608,18 +608,20 @@ void RasterizerSceneGLES3::_setup_sky(const RenderDataGLES3 *p_render_data, cons material = nullptr; } } + } - if (!material) { - sky_material = sky_globals.default_material; - material = static_cast<GLES3::SkyMaterialData *>(material_storage->material_get_data(sky_material, RS::SHADER_SKY)); - } + if (!material) { + sky_material = sky_globals.default_material; + material = static_cast<GLES3::SkyMaterialData *>(material_storage->material_get_data(sky_material, RS::SHADER_SKY)); + } - ERR_FAIL_COND(!material); + ERR_FAIL_COND(!material); - shader_data = material->shader_data; + shader_data = material->shader_data; - ERR_FAIL_COND(!shader_data); + ERR_FAIL_COND(!shader_data); + if (sky) { if (shader_data->uses_time && time - sky->prev_time > 0.00001) { sky->prev_time = time; sky->reflection_dirty = true; @@ -640,111 +642,113 @@ void RasterizerSceneGLES3::_setup_sky(const RenderDataGLES3 *p_render_data, cons sky->prev_position = p_transform.origin; sky->reflection_dirty = true; } + } - glBindBufferBase(GL_UNIFORM_BUFFER, SKY_DIRECTIONAL_LIGHT_UNIFORM_LOCATION, sky_globals.directional_light_buffer); - if (shader_data->uses_light) { - sky_globals.directional_light_count = 0; - for (int i = 0; i < (int)p_lights.size(); i++) { - GLES3::LightInstance *li = GLES3::LightStorage::get_singleton()->get_light_instance(p_lights[i]); - if (!li) { - continue; - } - RID base = li->light; + glBindBufferBase(GL_UNIFORM_BUFFER, SKY_DIRECTIONAL_LIGHT_UNIFORM_LOCATION, sky_globals.directional_light_buffer); + if (shader_data->uses_light) { + sky_globals.directional_light_count = 0; + for (int i = 0; i < (int)p_lights.size(); i++) { + GLES3::LightInstance *li = GLES3::LightStorage::get_singleton()->get_light_instance(p_lights[i]); + if (!li) { + continue; + } + RID base = li->light; - ERR_CONTINUE(base.is_null()); + ERR_CONTINUE(base.is_null()); - RS::LightType type = light_storage->light_get_type(base); - if (type == RS::LIGHT_DIRECTIONAL && light_storage->light_directional_get_sky_mode(base) != RS::LIGHT_DIRECTIONAL_SKY_MODE_LIGHT_ONLY) { - DirectionalLightData &sky_light_data = sky_globals.directional_lights[sky_globals.directional_light_count]; - Transform3D light_transform = li->transform; - Vector3 world_direction = light_transform.basis.xform(Vector3(0, 0, 1)).normalized(); + RS::LightType type = light_storage->light_get_type(base); + if (type == RS::LIGHT_DIRECTIONAL && light_storage->light_directional_get_sky_mode(base) != RS::LIGHT_DIRECTIONAL_SKY_MODE_LIGHT_ONLY) { + DirectionalLightData &sky_light_data = sky_globals.directional_lights[sky_globals.directional_light_count]; + Transform3D light_transform = li->transform; + Vector3 world_direction = light_transform.basis.xform(Vector3(0, 0, 1)).normalized(); - sky_light_data.direction[0] = world_direction.x; - sky_light_data.direction[1] = world_direction.y; - sky_light_data.direction[2] = world_direction.z; + sky_light_data.direction[0] = world_direction.x; + sky_light_data.direction[1] = world_direction.y; + sky_light_data.direction[2] = world_direction.z; - float sign = light_storage->light_is_negative(base) ? -1 : 1; - sky_light_data.energy = sign * light_storage->light_get_param(base, RS::LIGHT_PARAM_ENERGY); + float sign = light_storage->light_is_negative(base) ? -1 : 1; + sky_light_data.energy = sign * light_storage->light_get_param(base, RS::LIGHT_PARAM_ENERGY); - if (is_using_physical_light_units()) { - sky_light_data.energy *= light_storage->light_get_param(base, RS::LIGHT_PARAM_INTENSITY); - } + if (is_using_physical_light_units()) { + sky_light_data.energy *= light_storage->light_get_param(base, RS::LIGHT_PARAM_INTENSITY); + } - if (p_render_data->camera_attributes.is_valid()) { - sky_light_data.energy *= RSG::camera_attributes->camera_attributes_get_exposure_normalization_factor(p_render_data->camera_attributes); - } + if (p_render_data->camera_attributes.is_valid()) { + sky_light_data.energy *= RSG::camera_attributes->camera_attributes_get_exposure_normalization_factor(p_render_data->camera_attributes); + } - Color linear_col = light_storage->light_get_color(base); - sky_light_data.color[0] = linear_col.r; - sky_light_data.color[1] = linear_col.g; - sky_light_data.color[2] = linear_col.b; + Color linear_col = light_storage->light_get_color(base); + sky_light_data.color[0] = linear_col.r; + sky_light_data.color[1] = linear_col.g; + sky_light_data.color[2] = linear_col.b; - sky_light_data.enabled = true; + sky_light_data.enabled = true; - float angular_diameter = light_storage->light_get_param(base, RS::LIGHT_PARAM_SIZE); - if (angular_diameter > 0.0) { - angular_diameter = Math::tan(Math::deg_to_rad(angular_diameter)); - } else { - angular_diameter = 0.0; - } - sky_light_data.size = angular_diameter; - sky_globals.directional_light_count++; - if (sky_globals.directional_light_count >= sky_globals.max_directional_lights) { - break; - } + float angular_diameter = light_storage->light_get_param(base, RS::LIGHT_PARAM_SIZE); + if (angular_diameter > 0.0) { + angular_diameter = Math::tan(Math::deg_to_rad(angular_diameter)); + } else { + angular_diameter = 0.0; } - } - // Check whether the directional_light_buffer changes - bool light_data_dirty = false; - - // Light buffer is dirty if we have fewer or more lights - // If we have fewer lights, make sure that old lights are disabled - if (sky_globals.directional_light_count != sky_globals.last_frame_directional_light_count) { - light_data_dirty = true; - for (uint32_t i = sky_globals.directional_light_count; i < sky_globals.max_directional_lights; i++) { - sky_globals.directional_lights[i].enabled = false; - sky_globals.last_frame_directional_lights[i].enabled = false; + sky_light_data.size = angular_diameter; + sky_globals.directional_light_count++; + if (sky_globals.directional_light_count >= sky_globals.max_directional_lights) { + break; } } + } + // Check whether the directional_light_buffer changes + bool light_data_dirty = false; + + // Light buffer is dirty if we have fewer or more lights + // If we have fewer lights, make sure that old lights are disabled + if (sky_globals.directional_light_count != sky_globals.last_frame_directional_light_count) { + light_data_dirty = true; + for (uint32_t i = sky_globals.directional_light_count; i < sky_globals.max_directional_lights; i++) { + sky_globals.directional_lights[i].enabled = false; + sky_globals.last_frame_directional_lights[i].enabled = false; + } + } - if (!light_data_dirty) { - for (uint32_t i = 0; i < sky_globals.directional_light_count; i++) { - if (sky_globals.directional_lights[i].direction[0] != sky_globals.last_frame_directional_lights[i].direction[0] || - sky_globals.directional_lights[i].direction[1] != sky_globals.last_frame_directional_lights[i].direction[1] || - sky_globals.directional_lights[i].direction[2] != sky_globals.last_frame_directional_lights[i].direction[2] || - sky_globals.directional_lights[i].energy != sky_globals.last_frame_directional_lights[i].energy || - sky_globals.directional_lights[i].color[0] != sky_globals.last_frame_directional_lights[i].color[0] || - sky_globals.directional_lights[i].color[1] != sky_globals.last_frame_directional_lights[i].color[1] || - sky_globals.directional_lights[i].color[2] != sky_globals.last_frame_directional_lights[i].color[2] || - sky_globals.directional_lights[i].enabled != sky_globals.last_frame_directional_lights[i].enabled || - sky_globals.directional_lights[i].size != sky_globals.last_frame_directional_lights[i].size) { - light_data_dirty = true; - break; - } + if (!light_data_dirty) { + for (uint32_t i = 0; i < sky_globals.directional_light_count; i++) { + if (sky_globals.directional_lights[i].direction[0] != sky_globals.last_frame_directional_lights[i].direction[0] || + sky_globals.directional_lights[i].direction[1] != sky_globals.last_frame_directional_lights[i].direction[1] || + sky_globals.directional_lights[i].direction[2] != sky_globals.last_frame_directional_lights[i].direction[2] || + sky_globals.directional_lights[i].energy != sky_globals.last_frame_directional_lights[i].energy || + sky_globals.directional_lights[i].color[0] != sky_globals.last_frame_directional_lights[i].color[0] || + sky_globals.directional_lights[i].color[1] != sky_globals.last_frame_directional_lights[i].color[1] || + sky_globals.directional_lights[i].color[2] != sky_globals.last_frame_directional_lights[i].color[2] || + sky_globals.directional_lights[i].enabled != sky_globals.last_frame_directional_lights[i].enabled || + sky_globals.directional_lights[i].size != sky_globals.last_frame_directional_lights[i].size) { + light_data_dirty = true; + break; } } + } - if (light_data_dirty) { - glBufferData(GL_UNIFORM_BUFFER, sizeof(DirectionalLightData) * sky_globals.max_directional_lights, sky_globals.directional_lights, GL_STREAM_DRAW); - glBindBuffer(GL_UNIFORM_BUFFER, 0); + if (light_data_dirty) { + glBufferData(GL_UNIFORM_BUFFER, sizeof(DirectionalLightData) * sky_globals.max_directional_lights, sky_globals.directional_lights, GL_STREAM_DRAW); + glBindBuffer(GL_UNIFORM_BUFFER, 0); - DirectionalLightData *temp = sky_globals.last_frame_directional_lights; - sky_globals.last_frame_directional_lights = sky_globals.directional_lights; - sky_globals.directional_lights = temp; - sky_globals.last_frame_directional_light_count = sky_globals.directional_light_count; + DirectionalLightData *temp = sky_globals.last_frame_directional_lights; + sky_globals.last_frame_directional_lights = sky_globals.directional_lights; + sky_globals.directional_lights = temp; + sky_globals.last_frame_directional_light_count = sky_globals.directional_light_count; + if (sky) { sky->reflection_dirty = true; } } + } - if (p_render_data->view_count > 1) { - glBindBufferBase(GL_UNIFORM_BUFFER, SKY_MULTIVIEW_UNIFORM_LOCATION, scene_state.multiview_buffer); - glBindBuffer(GL_UNIFORM_BUFFER, 0); - } + if (p_render_data->view_count > 1) { + glBindBufferBase(GL_UNIFORM_BUFFER, SKY_MULTIVIEW_UNIFORM_LOCATION, scene_state.multiview_buffer); + glBindBuffer(GL_UNIFORM_BUFFER, 0); + } - if (!sky->radiance) { - _invalidate_sky(sky); - _update_dirty_skys(); - } + if (sky && !sky->radiance) { + _invalidate_sky(sky); + _update_dirty_skys(); } } diff --git a/drivers/gles3/shaders/scene.glsl b/drivers/gles3/shaders/scene.glsl index aa68febec8..7cba77be2f 100644 --- a/drivers/gles3/shaders/scene.glsl +++ b/drivers/gles3/shaders/scene.glsl @@ -1039,17 +1039,16 @@ void main() { if (alpha < alpha_scissor_threshold) { discard; } -#endif // ALPHA_SCISSOR_USED - +#else +#ifdef MODE_RENDER_DEPTH #ifdef USE_OPAQUE_PREPASS -#if !defined(ALPHA_SCISSOR_USED) if (alpha < opaque_prepass_threshold) { discard; } - -#endif // not ALPHA_SCISSOR_USED #endif // USE_OPAQUE_PREPASS +#endif // MODE_RENDER_DEPTH +#endif // !ALPHA_SCISSOR_USED #endif // !USE_SHADOW_TO_OPACITY @@ -1270,17 +1269,16 @@ void main() { if (alpha < alpha_scissor) { discard; } -#endif // ALPHA_SCISSOR_USED - +#else +#ifdef MODE_RENDER_DEPTH #ifdef USE_OPAQUE_PREPASS -#if !defined(ALPHA_SCISSOR_USED) if (alpha < opaque_prepass_threshold) { discard; } - -#endif // not ALPHA_SCISSOR_USED #endif // USE_OPAQUE_PREPASS +#endif // MODE_RENDER_DEPTH +#endif // !ALPHA_SCISSOR_USED #endif // USE_SHADOW_TO_OPACITY diff --git a/drivers/gles3/storage/material_storage.cpp b/drivers/gles3/storage/material_storage.cpp index d0746a8fc8..aa6319f0ef 100644 --- a/drivers/gles3/storage/material_storage.cpp +++ b/drivers/gles3/storage/material_storage.cpp @@ -1338,7 +1338,7 @@ MaterialStorage::MaterialStorage() { actions.render_mode_defines["cull_front"] = "#define DO_SIDE_CHECK\n"; actions.render_mode_defines["cull_disabled"] = "#define DO_SIDE_CHECK\n"; actions.render_mode_defines["particle_trails"] = "#define USE_PARTICLE_TRAILS\n"; - actions.render_mode_defines["depth_draw_opaque"] = "#define USE_OPAQUE_PREPASS\n"; + actions.render_mode_defines["depth_prepass_alpha"] = "#define USE_OPAQUE_PREPASS\n"; bool force_lambert = GLOBAL_GET("rendering/shading/overrides/force_lambert_over_burley"); diff --git a/drivers/gles3/storage/mesh_storage.cpp b/drivers/gles3/storage/mesh_storage.cpp index ad4bdae272..381cf1a7c6 100644 --- a/drivers/gles3/storage/mesh_storage.cpp +++ b/drivers/gles3/storage/mesh_storage.cpp @@ -1075,6 +1075,7 @@ void MeshStorage::update_mesh_instances() { } glEnable(GL_RASTERIZER_DISCARD); + glBindFramebuffer(GL_FRAMEBUFFER, 0); // Process skeletons and blend shapes using transform feedback while (dirty_mesh_instance_arrays.first()) { MeshInstance *mi = dirty_mesh_instance_arrays.first()->self(); @@ -1284,13 +1285,17 @@ void MeshStorage::multimesh_allocate_data(RID p_multimesh, int p_instances, RS:: multimesh->data_cache_used_dirty_regions = 0; } + // If we have either color or custom data, reserve space for both to make data handling logic simpler. + // This way we can always treat them both as a single, compressed uvec4. + int color_and_custom_strides = (p_use_colors || p_use_custom_data) ? 2 : 0; + multimesh->instances = p_instances; multimesh->xform_format = p_transform_format; multimesh->uses_colors = p_use_colors; multimesh->color_offset_cache = p_transform_format == RS::MULTIMESH_TRANSFORM_2D ? 8 : 12; multimesh->uses_custom_data = p_use_custom_data; - multimesh->custom_data_offset_cache = multimesh->color_offset_cache + (p_use_colors ? 2 : 0); - multimesh->stride_cache = multimesh->custom_data_offset_cache + (p_use_custom_data ? 2 : 0); + multimesh->custom_data_offset_cache = multimesh->color_offset_cache + color_and_custom_strides; + multimesh->stride_cache = multimesh->custom_data_offset_cache + color_and_custom_strides; multimesh->buffer_set = false; multimesh->data_cache = Vector<float>(); diff --git a/drivers/gles3/storage/texture_storage.cpp b/drivers/gles3/storage/texture_storage.cpp index 649123cdca..a3f230f9e2 100644 --- a/drivers/gles3/storage/texture_storage.cpp +++ b/drivers/gles3/storage/texture_storage.cpp @@ -1192,6 +1192,9 @@ Size2 TextureStorage::texture_size_with_proxy(RID p_texture) { } } +void TextureStorage::texture_rd_initialize(RID p_texture, const RID &p_rd_texture, const RS::TextureLayeredType p_layer_type) { +} + RID TextureStorage::texture_get_rd_texture(RID p_texture, bool p_srgb) const { return RID(); } diff --git a/drivers/gles3/storage/texture_storage.h b/drivers/gles3/storage/texture_storage.h index fefcd56570..bad2b31a31 100644 --- a/drivers/gles3/storage/texture_storage.h +++ b/drivers/gles3/storage/texture_storage.h @@ -252,10 +252,10 @@ struct Texture { } Config *config = Config::get_singleton(); state_filter = p_filter; - GLenum pmin = GL_NEAREST; // param min - GLenum pmag = GL_NEAREST; // param mag - GLint max_lod = 1000; - bool use_anisotropy = false; + GLenum pmin = GL_NEAREST; + GLenum pmag = GL_NEAREST; + GLint max_lod = 0; + GLfloat anisotropy = 1.0f; switch (state_filter) { case RS::CANVAS_ITEM_TEXTURE_FILTER_NEAREST: { pmin = GL_NEAREST; @@ -268,7 +268,7 @@ struct Texture { max_lod = 0; } break; case RS::CANVAS_ITEM_TEXTURE_FILTER_NEAREST_WITH_MIPMAPS_ANISOTROPIC: { - use_anisotropy = true; + anisotropy = config->anisotropic_level; }; [[fallthrough]]; case RS::CANVAS_ITEM_TEXTURE_FILTER_NEAREST_WITH_MIPMAPS: { @@ -278,12 +278,14 @@ struct Texture { max_lod = 0; } else if (config->use_nearest_mip_filter) { pmin = GL_NEAREST_MIPMAP_NEAREST; + max_lod = 1000; } else { pmin = GL_NEAREST_MIPMAP_LINEAR; + max_lod = 1000; } } break; case RS::CANVAS_ITEM_TEXTURE_FILTER_LINEAR_WITH_MIPMAPS_ANISOTROPIC: { - use_anisotropy = true; + anisotropy = config->anisotropic_level; }; [[fallthrough]]; case RS::CANVAS_ITEM_TEXTURE_FILTER_LINEAR_WITH_MIPMAPS: { @@ -293,19 +295,22 @@ struct Texture { max_lod = 0; } else if (config->use_nearest_mip_filter) { pmin = GL_LINEAR_MIPMAP_NEAREST; + max_lod = 1000; } else { pmin = GL_LINEAR_MIPMAP_LINEAR; + max_lod = 1000; } } break; default: { + return; } break; } glTexParameteri(target, GL_TEXTURE_MIN_FILTER, pmin); glTexParameteri(target, GL_TEXTURE_MAG_FILTER, pmag); glTexParameteri(target, GL_TEXTURE_BASE_LEVEL, 0); glTexParameteri(target, GL_TEXTURE_MAX_LEVEL, max_lod); - if (config->support_anisotropic_filter && use_anisotropy) { - glTexParameterf(target, _GL_TEXTURE_MAX_ANISOTROPY_EXT, config->anisotropic_level); + if (config->support_anisotropic_filter) { + glTexParameterf(target, _GL_TEXTURE_MAX_ANISOTROPY_EXT, anisotropy); } } void gl_set_repeat(RS::CanvasItemTextureRepeat p_repeat) { @@ -313,8 +318,11 @@ struct Texture { return; } state_repeat = p_repeat; - GLenum prep = GL_CLAMP_TO_EDGE; // parameter repeat + GLenum prep = GL_CLAMP_TO_EDGE; switch (state_repeat) { + case RS::CANVAS_ITEM_TEXTURE_REPEAT_DISABLED: { + prep = GL_CLAMP_TO_EDGE; + } break; case RS::CANVAS_ITEM_TEXTURE_REPEAT_ENABLED: { prep = GL_REPEAT; } break; @@ -322,6 +330,7 @@ struct Texture { prep = GL_MIRRORED_REPEAT; } break; default: { + return; } break; } glTexParameteri(target, GL_TEXTURE_WRAP_T, prep); @@ -330,8 +339,8 @@ struct Texture { } private: - RS::CanvasItemTextureFilter state_filter = RS::CANVAS_ITEM_TEXTURE_FILTER_LINEAR; - RS::CanvasItemTextureRepeat state_repeat = RS::CANVAS_ITEM_TEXTURE_REPEAT_DISABLED; + RS::CanvasItemTextureFilter state_filter = RS::CANVAS_ITEM_TEXTURE_FILTER_MAX; + RS::CanvasItemTextureRepeat state_repeat = RS::CANVAS_ITEM_TEXTURE_REPEAT_MAX; }; struct RenderTarget { @@ -536,11 +545,12 @@ public: virtual Size2 texture_size_with_proxy(RID p_proxy) override; + virtual void texture_rd_initialize(RID p_texture, const RID &p_rd_texture, const RS::TextureLayeredType p_layer_type = RS::TEXTURE_LAYERED_2D_ARRAY) override; virtual RID texture_get_rd_texture(RID p_texture, bool p_srgb = false) const override; virtual uint64_t texture_get_native_handle(RID p_texture, bool p_srgb = false) const override; void texture_set_data(RID p_texture, const Ref<Image> &p_image, int p_layer = 0); - Image::Format texture_get_format(RID p_texture) const; + virtual Image::Format texture_get_format(RID p_texture) const override; uint32_t texture_get_texid(RID p_texture) const; uint32_t texture_get_width(RID p_texture) const; uint32_t texture_get_height(RID p_texture) const; diff --git a/drivers/png/resource_saver_png.cpp b/drivers/png/resource_saver_png.cpp index ab0ff32514..0df6b2ba21 100644 --- a/drivers/png/resource_saver_png.cpp +++ b/drivers/png/resource_saver_png.cpp @@ -33,7 +33,7 @@ #include "core/io/file_access.h" #include "core/io/image.h" #include "drivers/png/png_driver_common.h" -#include "scene/resources/texture.h" +#include "scene/resources/image_texture.h" Error ResourceSaverPNG::save(const Ref<Resource> &p_resource, const String &p_path, uint32_t p_flags) { Ref<ImageTexture> texture = p_resource; diff --git a/drivers/unix/file_access_unix.cpp b/drivers/unix/file_access_unix.cpp index 45f9f14dab..a80b7d449e 100644 --- a/drivers/unix/file_access_unix.cpp +++ b/drivers/unix/file_access_unix.cpp @@ -108,10 +108,7 @@ Error FileAccessUnix::open_internal(const String &p_path, int p_mode_flags) { last_error = ERR_FILE_CANT_OPEN; return last_error; } - // Fix temporary file permissions (defaults to 0600 instead of 0666 & ~umask). - mode_t mask = umask(022); - umask(mask); - fchmod(fd, 0666 & ~mask); + fchmod(fd, 0666); path = String::utf8(cs.ptr()); f = fdopen(fd, mode_string); diff --git a/drivers/unix/net_socket_posix.cpp b/drivers/unix/net_socket_posix.cpp index b46af012f1..a8074aa3f6 100644 --- a/drivers/unix/net_socket_posix.cpp +++ b/drivers/unix/net_socket_posix.cpp @@ -204,6 +204,9 @@ NetSocketPosix::NetError NetSocketPosix::_get_socket_error() const { if (err == WSAEACCES) { return ERR_NET_UNAUTHORIZED; } + if (err == WSAEMSGSIZE || err == WSAENOBUFS) { + return ERR_NET_BUFFER_TOO_SMALL; + } print_verbose("Socket error: " + itos(err)); return ERR_NET_OTHER; #else @@ -222,6 +225,9 @@ NetSocketPosix::NetError NetSocketPosix::_get_socket_error() const { if (errno == EACCES) { return ERR_NET_UNAUTHORIZED; } + if (errno == ENOBUFS) { + return ERR_NET_BUFFER_TOO_SMALL; + } print_verbose("Socket error: " + itos(errno)); return ERR_NET_OTHER; #endif @@ -550,6 +556,10 @@ Error NetSocketPosix::recv(uint8_t *p_buffer, int p_len, int &r_read) { return ERR_BUSY; } + if (err == ERR_NET_BUFFER_TOO_SMALL) { + return ERR_OUT_OF_MEMORY; + } + return FAILED; } @@ -571,6 +581,10 @@ Error NetSocketPosix::recvfrom(uint8_t *p_buffer, int p_len, int &r_read, IPAddr return ERR_BUSY; } + if (err == ERR_NET_BUFFER_TOO_SMALL) { + return ERR_OUT_OF_MEMORY; + } + return FAILED; } @@ -606,6 +620,9 @@ Error NetSocketPosix::send(const uint8_t *p_buffer, int p_len, int &r_sent) { if (err == ERR_NET_WOULD_BLOCK) { return ERR_BUSY; } + if (err == ERR_NET_BUFFER_TOO_SMALL) { + return ERR_OUT_OF_MEMORY; + } return FAILED; } @@ -625,6 +642,9 @@ Error NetSocketPosix::sendto(const uint8_t *p_buffer, int p_len, int &r_sent, IP if (err == ERR_NET_WOULD_BLOCK) { return ERR_BUSY; } + if (err == ERR_NET_BUFFER_TOO_SMALL) { + return ERR_OUT_OF_MEMORY; + } return FAILED; } diff --git a/drivers/unix/net_socket_posix.h b/drivers/unix/net_socket_posix.h index bd2088b4f9..2682530e15 100644 --- a/drivers/unix/net_socket_posix.h +++ b/drivers/unix/net_socket_posix.h @@ -56,6 +56,7 @@ private: ERR_NET_IN_PROGRESS, ERR_NET_ADDRESS_INVALID_OR_UNAVAILABLE, ERR_NET_UNAUTHORIZED, + ERR_NET_BUFFER_TOO_SMALL, ERR_NET_OTHER, }; diff --git a/drivers/vulkan/rendering_device_vulkan.cpp b/drivers/vulkan/rendering_device_vulkan.cpp index 23c6919854..be6f8f3580 100644 --- a/drivers/vulkan/rendering_device_vulkan.cpp +++ b/drivers/vulkan/rendering_device_vulkan.cpp @@ -54,9 +54,13 @@ RenderingDeviceVulkan::Buffer *RenderingDeviceVulkan::_get_buffer_from_owner(RID r_stage_mask |= VK_PIPELINE_STAGE_VERTEX_INPUT_BIT; r_access_mask |= VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT; if (buffer->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) { - if (p_post_barrier.has_flag(BARRIER_MASK_RASTER)) { + if (p_post_barrier.has_flag(BARRIER_MASK_VERTEX)) { r_access_mask |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; - r_stage_mask |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; + r_stage_mask |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT; + } + if (p_post_barrier.has_flag(BARRIER_MASK_FRAGMENT)) { + r_access_mask |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; + r_stage_mask |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; } if (p_post_barrier.has_flag(BARRIER_MASK_COMPUTE)) { r_access_mask |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; @@ -68,8 +72,11 @@ RenderingDeviceVulkan::Buffer *RenderingDeviceVulkan::_get_buffer_from_owner(RID r_access_mask |= VK_ACCESS_INDEX_READ_BIT; buffer = index_buffer_owner.get_or_null(p_buffer); } else if (uniform_buffer_owner.owns(p_buffer)) { - if (p_post_barrier.has_flag(BARRIER_MASK_RASTER)) { - r_stage_mask |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; + if (p_post_barrier.has_flag(BARRIER_MASK_VERTEX)) { + r_stage_mask |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT; + } + if (p_post_barrier.has_flag(BARRIER_MASK_FRAGMENT)) { + r_stage_mask |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; } if (p_post_barrier.has_flag(BARRIER_MASK_COMPUTE)) { r_stage_mask |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; @@ -77,8 +84,12 @@ RenderingDeviceVulkan::Buffer *RenderingDeviceVulkan::_get_buffer_from_owner(RID r_access_mask |= VK_ACCESS_UNIFORM_READ_BIT; buffer = uniform_buffer_owner.get_or_null(p_buffer); } else if (texture_buffer_owner.owns(p_buffer)) { - if (p_post_barrier.has_flag(BARRIER_MASK_RASTER)) { - r_stage_mask |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; + if (p_post_barrier.has_flag(BARRIER_MASK_VERTEX)) { + r_stage_mask |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT; + r_access_mask |= VK_ACCESS_SHADER_READ_BIT; + } + if (p_post_barrier.has_flag(BARRIER_MASK_FRAGMENT)) { + r_stage_mask |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; r_access_mask |= VK_ACCESS_SHADER_READ_BIT; } if (p_post_barrier.has_flag(BARRIER_MASK_COMPUTE)) { @@ -89,8 +100,12 @@ RenderingDeviceVulkan::Buffer *RenderingDeviceVulkan::_get_buffer_from_owner(RID buffer = &texture_buffer_owner.get_or_null(p_buffer)->buffer; } else if (storage_buffer_owner.owns(p_buffer)) { buffer = storage_buffer_owner.get_or_null(p_buffer); - if (p_post_barrier.has_flag(BARRIER_MASK_RASTER)) { - r_stage_mask |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; + if (p_post_barrier.has_flag(BARRIER_MASK_VERTEX)) { + r_stage_mask |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT; + r_access_mask |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; + } + if (p_post_barrier.has_flag(BARRIER_MASK_FRAGMENT)) { + r_stage_mask |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; r_access_mask |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; } if (p_post_barrier.has_flag(BARRIER_MASK_COMPUTE)) { @@ -2625,8 +2640,12 @@ Error RenderingDeviceVulkan::_texture_update(RID p_texture, uint32_t p_layer, co barrier_flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; } - if (p_post_barrier.has_flag(BARRIER_MASK_RASTER)) { - barrier_flags |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; + if (p_post_barrier.has_flag(BARRIER_MASK_VERTEX)) { + barrier_flags |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT; + access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; + } + if (p_post_barrier.has_flag(BARRIER_MASK_FRAGMENT)) { + barrier_flags |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; } if (p_post_barrier.has_flag(BARRIER_MASK_TRANSFER)) { @@ -2874,6 +2893,29 @@ bool RenderingDeviceVulkan::texture_is_valid(RID p_texture) { return texture_owner.owns(p_texture); } +RD::TextureFormat RenderingDeviceVulkan::texture_get_format(RID p_texture) { + _THREAD_SAFE_METHOD_ + + Texture *tex = texture_owner.get_or_null(p_texture); + ERR_FAIL_COND_V(!tex, TextureFormat()); + + TextureFormat tf; + + tf.format = tex->format; + tf.width = tex->width; + tf.height = tex->height; + tf.depth = tex->depth; + tf.array_layers = tex->layers; + tf.mipmaps = tex->mipmaps; + tf.texture_type = tex->type; + tf.samples = tex->samples; + tf.usage_bits = tex->usage_flags; + tf.shareable_formats = tex->allowed_shared_formats; + tf.is_resolve_buffer = tex->is_resolve_buffer; + + return tf; +} + Size2i RenderingDeviceVulkan::texture_size(RID p_texture) { _THREAD_SAFE_METHOD_ @@ -3020,8 +3062,12 @@ Error RenderingDeviceVulkan::texture_copy(RID p_from_texture, RID p_to_texture, barrier_flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; } - if (p_post_barrier.has_flag(BARRIER_MASK_RASTER)) { - barrier_flags |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; + if (p_post_barrier.has_flag(BARRIER_MASK_VERTEX)) { + barrier_flags |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT; + access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; + } + if (p_post_barrier.has_flag(BARRIER_MASK_FRAGMENT)) { + barrier_flags |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; } if (p_post_barrier.has_flag(BARRIER_MASK_TRANSFER)) { @@ -3198,8 +3244,12 @@ Error RenderingDeviceVulkan::texture_resolve_multisample(RID p_from_texture, RID barrier_flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; } - if (p_post_barrier.has_flag(BARRIER_MASK_RASTER)) { - barrier_flags |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; + if (p_post_barrier.has_flag(BARRIER_MASK_VERTEX)) { + barrier_flags |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT; + access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; + } + if (p_post_barrier.has_flag(BARRIER_MASK_FRAGMENT)) { + barrier_flags |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; } if (p_post_barrier.has_flag(BARRIER_MASK_TRANSFER)) { @@ -3334,8 +3384,12 @@ Error RenderingDeviceVulkan::texture_clear(RID p_texture, const Color &p_color, barrier_flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; } - if (p_post_barrier.has_flag(BARRIER_MASK_RASTER)) { - barrier_flags |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; + if (p_post_barrier.has_flag(BARRIER_MASK_VERTEX)) { + barrier_flags |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT; + access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; + } + if (p_post_barrier.has_flag(BARRIER_MASK_FRAGMENT)) { + barrier_flags |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; } if (p_post_barrier.has_flag(BARRIER_MASK_TRANSFER)) { @@ -7651,10 +7705,14 @@ void RenderingDeviceVulkan::draw_list_end(BitField<BarrierMask> p_post_barrier) barrier_flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; } - if (p_post_barrier.has_flag(BARRIER_MASK_RASTER)) { - barrier_flags |= VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT /*| VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT*/; + if (p_post_barrier.has_flag(BARRIER_MASK_VERTEX)) { + barrier_flags |= VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT /*| VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT*/; access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT | VK_ACCESS_INDEX_READ_BIT | VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT /*| VK_ACCESS_INDIRECT_COMMAND_READ_BIT*/; } + if (p_post_barrier.has_flag(BARRIER_MASK_FRAGMENT)) { + barrier_flags |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT /*| VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT*/; + access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT /*| VK_ACCESS_INDIRECT_COMMAND_READ_BIT*/; + } if (p_post_barrier.has_flag(BARRIER_MASK_TRANSFER)) { barrier_flags |= VK_PIPELINE_STAGE_TRANSFER_BIT; access_flags |= VK_ACCESS_TRANSFER_WRITE_BIT | VK_ACCESS_TRANSFER_READ_BIT; @@ -7731,6 +7789,8 @@ void RenderingDeviceVulkan::draw_list_end(BitField<BarrierMask> p_post_barrier) /***********************/ RenderingDevice::ComputeListID RenderingDeviceVulkan::compute_list_begin(bool p_allow_draw_overlap) { + _THREAD_SAFE_METHOD_ + ERR_FAIL_COND_V_MSG(!p_allow_draw_overlap && draw_list != nullptr, INVALID_ID, "Only one draw list can be active at the same time."); ERR_FAIL_COND_V_MSG(compute_list != nullptr, INVALID_ID, "Only one draw/compute list can be active at the same time."); @@ -7745,6 +7805,8 @@ RenderingDevice::ComputeListID RenderingDeviceVulkan::compute_list_begin(bool p_ } void RenderingDeviceVulkan::compute_list_bind_compute_pipeline(ComputeListID p_list, RID p_compute_pipeline) { + // Must be called within a compute list, the class mutex is locked during that time + ERR_FAIL_COND(p_list != ID_TYPE_COMPUTE_LIST); ERR_FAIL_COND(!compute_list); @@ -7809,6 +7871,8 @@ void RenderingDeviceVulkan::compute_list_bind_compute_pipeline(ComputeListID p_l } void RenderingDeviceVulkan::compute_list_bind_uniform_set(ComputeListID p_list, RID p_uniform_set, uint32_t p_index) { + // Must be called within a compute list, the class mutex is locked during that time + ERR_FAIL_COND(p_list != ID_TYPE_COMPUTE_LIST); ERR_FAIL_COND(!compute_list); @@ -7983,6 +8047,8 @@ void RenderingDeviceVulkan::compute_list_set_push_constant(ComputeListID p_list, } void RenderingDeviceVulkan::compute_list_dispatch(ComputeListID p_list, uint32_t p_x_groups, uint32_t p_y_groups, uint32_t p_z_groups) { + // Must be called within a compute list, the class mutex is locked during that time + ERR_FAIL_COND(p_list != ID_TYPE_COMPUTE_LIST); ERR_FAIL_COND(!compute_list); @@ -8126,6 +8192,8 @@ void RenderingDeviceVulkan::compute_list_dispatch_indirect(ComputeListID p_list, } void RenderingDeviceVulkan::compute_list_add_barrier(ComputeListID p_list) { + // Must be called within a compute list, the class mutex is locked during that time + uint32_t barrier_flags = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; uint32_t access_flags = VK_ACCESS_SHADER_READ_BIT; _compute_list_add_barrier(BARRIER_MASK_COMPUTE, barrier_flags, access_flags); @@ -8199,10 +8267,14 @@ void RenderingDeviceVulkan::compute_list_end(BitField<BarrierMask> p_post_barrie barrier_flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; } - if (p_post_barrier.has_flag(BARRIER_MASK_RASTER)) { - barrier_flags |= VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT; + if (p_post_barrier.has_flag(BARRIER_MASK_VERTEX)) { + barrier_flags |= VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT; access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT | VK_ACCESS_INDEX_READ_BIT | VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT | VK_ACCESS_INDIRECT_COMMAND_READ_BIT; } + if (p_post_barrier.has_flag(BARRIER_MASK_FRAGMENT)) { + barrier_flags |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT; + access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT | VK_ACCESS_INDIRECT_COMMAND_READ_BIT; + } if (p_post_barrier.has_flag(BARRIER_MASK_TRANSFER)) { barrier_flags |= VK_PIPELINE_STAGE_TRANSFER_BIT; access_flags |= VK_ACCESS_TRANSFER_WRITE_BIT | VK_ACCESS_TRANSFER_READ_BIT; @@ -8227,7 +8299,7 @@ void RenderingDeviceVulkan::barrier(BitField<BarrierMask> p_from, BitField<Barri src_barrier_flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; src_access_flags |= VK_ACCESS_SHADER_WRITE_BIT; } - if (p_from.has_flag(BARRIER_MASK_RASTER)) { + if (p_from.has_flag(BARRIER_MASK_FRAGMENT)) { src_barrier_flags |= VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT; src_access_flags |= VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; } @@ -8247,10 +8319,14 @@ void RenderingDeviceVulkan::barrier(BitField<BarrierMask> p_from, BitField<Barri dst_barrier_flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; dst_access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; } - if (p_to.has_flag(BARRIER_MASK_RASTER)) { - dst_barrier_flags |= VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT; + if (p_to.has_flag(BARRIER_MASK_VERTEX)) { + dst_barrier_flags |= VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT; dst_access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT | VK_ACCESS_INDEX_READ_BIT | VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT | VK_ACCESS_INDIRECT_COMMAND_READ_BIT; } + if (p_to.has_flag(BARRIER_MASK_FRAGMENT)) { + dst_barrier_flags |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT; + dst_access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT | VK_ACCESS_INDIRECT_COMMAND_READ_BIT; + } if (p_to.has_flag(BARRIER_MASK_TRANSFER)) { dst_barrier_flags |= VK_PIPELINE_STAGE_TRANSFER_BIT; dst_access_flags |= VK_ACCESS_TRANSFER_WRITE_BIT | VK_ACCESS_TRANSFER_READ_BIT; @@ -8606,6 +8682,8 @@ void RenderingDeviceVulkan::swap_buffers() { } void RenderingDeviceVulkan::submit() { + _THREAD_SAFE_METHOD_ + ERR_FAIL_COND_MSG(local_device.is_null(), "Only local devices can submit and sync."); ERR_FAIL_COND_MSG(local_device_processing, "device already submitted, call sync to wait until done."); @@ -8617,6 +8695,8 @@ void RenderingDeviceVulkan::submit() { } void RenderingDeviceVulkan::sync() { + _THREAD_SAFE_METHOD_ + ERR_FAIL_COND_MSG(local_device.is_null(), "Only local devices can submit and sync."); ERR_FAIL_COND_MSG(!local_device_processing, "sync can only be called after a submit"); diff --git a/drivers/vulkan/rendering_device_vulkan.h b/drivers/vulkan/rendering_device_vulkan.h index 9c621c1d44..010f7c9337 100644 --- a/drivers/vulkan/rendering_device_vulkan.h +++ b/drivers/vulkan/rendering_device_vulkan.h @@ -1082,6 +1082,7 @@ public: virtual bool texture_is_format_supported_for_usage(DataFormat p_format, BitField<RenderingDevice::TextureUsageBits> p_usage) const; virtual bool texture_is_shared(RID p_texture); virtual bool texture_is_valid(RID p_texture); + virtual TextureFormat texture_get_format(RID p_texture); virtual Size2i texture_size(RID p_texture); virtual uint64_t texture_get_native_handle(RID p_texture); diff --git a/drivers/vulkan/vulkan_context.cpp b/drivers/vulkan/vulkan_context.cpp index 7c52447e44..3a1330b331 100644 --- a/drivers/vulkan/vulkan_context.cpp +++ b/drivers/vulkan/vulkan_context.cpp @@ -1775,6 +1775,7 @@ Error VulkanContext::_clean_up_swap_chain(Window *window) { fpDestroySwapchainKHR(device, window->swapchain, nullptr); window->swapchain = VK_NULL_HANDLE; vkDestroyRenderPass(device, window->render_pass, nullptr); + window->render_pass = VK_NULL_HANDLE; if (window->swapchain_image_resources) { for (uint32_t i = 0; i < swapchainImageCount; i++) { vkDestroyImageView(device, window->swapchain_image_resources[i].view, nullptr); @@ -1783,6 +1784,7 @@ Error VulkanContext::_clean_up_swap_chain(Window *window) { free(window->swapchain_image_resources); window->swapchain_image_resources = nullptr; + swapchainImageCount = 0; } if (separate_present_queue) { vkDestroyCommandPool(device, window->present_cmd_pool, nullptr); |
