summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorRémi Verschelde <rverschelde@gmail.com>2024-03-10 21:13:18 +0100
committerRémi Verschelde <rverschelde@gmail.com>2024-03-10 21:13:18 +0100
commit53701a02341eef7ec3ebca69b673d31d58760e45 (patch)
tree0a8f0d6c04955b6d66c088d4a501075bd8bfd385
parentaf527e53c450eb957bfa6a5446a095b190ebcae9 (diff)
parentfb299d0fb134c603eafe7737bab8d22ec0b1cd59 (diff)
downloadredot-engine-53701a02341eef7ec3ebca69b673d31d58760e45.tar.gz
Merge pull request #89361 from Repiteo/scons/with-statement
SCons: Ensure `with` statement where applicable
-rw-r--r--core/core_builders.py172
-rw-r--r--core/extension/make_interface_dumper.py40
-rw-r--r--core/input/input_builders.py94
-rwxr-xr-xdoc/tools/make_rst.py878
-rw-r--r--editor/SCsub20
-rw-r--r--editor/editor_builders.py185
-rw-r--r--editor/icons/editor_icons_builders.py133
-rw-r--r--editor/template_builders.py62
-rw-r--r--editor/themes/editor_theme_builders.py33
-rw-r--r--gles3_builders.py1007
-rw-r--r--glsl_builders.py144
-rw-r--r--methods.py106
-rwxr-xr-xmisc/scripts/check_ci_log.py4
-rwxr-xr-xmisc/scripts/copyright_headers.py37
-rwxr-xr-xmodules/mono/build_scripts/build_assemblies.py2
-rw-r--r--modules/text_server_adv/SCsub41
-rw-r--r--modules/text_server_adv/gdextension_build/methods.py92
-rw-r--r--modules/text_server_fb/gdextension_build/methods.py92
-rw-r--r--platform_methods.py16
-rw-r--r--scene/theme/default_theme_builders.py33
-rw-r--r--scene/theme/icons/default_theme_icons_builders.py79
21 files changed, 1610 insertions, 1660 deletions
diff --git a/core/core_builders.py b/core/core_builders.py
index b941d6a272..2f92d8474c 100644
--- a/core/core_builders.py
+++ b/core/core_builders.py
@@ -31,35 +31,31 @@ def escape_string(s):
def make_certs_header(target, source, env):
src = source[0]
dst = target[0]
- f = open(src, "rb")
- g = open(dst, "w", encoding="utf-8", newline="\n")
- buf = f.read()
- decomp_size = len(buf)
-
- # Use maximum zlib compression level to further reduce file size
- # (at the cost of initial build times).
- buf = zlib.compress(buf, zlib.Z_BEST_COMPRESSION)
-
- g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
- g.write("#ifndef CERTS_COMPRESSED_GEN_H\n")
- g.write("#define CERTS_COMPRESSED_GEN_H\n")
-
- # System certs path. Editor will use them if defined. (for package maintainers)
- path = env["system_certs_path"]
- g.write('#define _SYSTEM_CERTS_PATH "%s"\n' % str(path))
- if env["builtin_certs"]:
- # Defined here and not in env so changing it does not trigger a full rebuild.
- g.write("#define BUILTIN_CERTS_ENABLED\n")
- g.write("static const int _certs_compressed_size = " + str(len(buf)) + ";\n")
- g.write("static const int _certs_uncompressed_size = " + str(decomp_size) + ";\n")
- g.write("static const unsigned char _certs_compressed[] = {\n")
- for i in range(len(buf)):
- g.write("\t" + str(buf[i]) + ",\n")
- g.write("};\n")
- g.write("#endif // CERTS_COMPRESSED_GEN_H")
-
- g.close()
- f.close()
+ with open(src, "rb") as f, open(dst, "w", encoding="utf-8", newline="\n") as g:
+ buf = f.read()
+ decomp_size = len(buf)
+
+ # Use maximum zlib compression level to further reduce file size
+ # (at the cost of initial build times).
+ buf = zlib.compress(buf, zlib.Z_BEST_COMPRESSION)
+
+ g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
+ g.write("#ifndef CERTS_COMPRESSED_GEN_H\n")
+ g.write("#define CERTS_COMPRESSED_GEN_H\n")
+
+ # System certs path. Editor will use them if defined. (for package maintainers)
+ path = env["system_certs_path"]
+ g.write('#define _SYSTEM_CERTS_PATH "%s"\n' % str(path))
+ if env["builtin_certs"]:
+ # Defined here and not in env so changing it does not trigger a full rebuild.
+ g.write("#define BUILTIN_CERTS_ENABLED\n")
+ g.write("static const int _certs_compressed_size = " + str(len(buf)) + ";\n")
+ g.write("static const int _certs_uncompressed_size = " + str(decomp_size) + ";\n")
+ g.write("static const unsigned char _certs_compressed[] = {\n")
+ for i in range(len(buf)):
+ g.write("\t" + str(buf[i]) + ",\n")
+ g.write("};\n")
+ g.write("#endif // CERTS_COMPRESSED_GEN_H")
def make_authors_header(target, source, env):
@@ -78,42 +74,37 @@ def make_authors_header(target, source, env):
src = source[0]
dst = target[0]
- f = open(src, "r", encoding="utf-8")
- g = open(dst, "w", encoding="utf-8", newline="\n")
+ with open(src, "r", encoding="utf-8") as f, open(dst, "w", encoding="utf-8", newline="\n") as g:
+ g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
+ g.write("#ifndef AUTHORS_GEN_H\n")
+ g.write("#define AUTHORS_GEN_H\n")
- g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
- g.write("#ifndef AUTHORS_GEN_H\n")
- g.write("#define AUTHORS_GEN_H\n")
+ reading = False
- reading = False
+ def close_section():
+ g.write("\t0\n")
+ g.write("};\n")
- def close_section():
- g.write("\t0\n")
- g.write("};\n")
-
- for line in f:
- if reading:
- if line.startswith(" "):
- g.write('\t"' + escape_string(line.strip()) + '",\n')
- continue
- if line.startswith("## "):
+ for line in f:
if reading:
- close_section()
- reading = False
- for section, section_id in zip(sections, sections_id):
- if line.strip().endswith(section):
- current_section = escape_string(section_id)
- reading = True
- g.write("const char *const " + current_section + "[] = {\n")
- break
+ if line.startswith(" "):
+ g.write('\t"' + escape_string(line.strip()) + '",\n')
+ continue
+ if line.startswith("## "):
+ if reading:
+ close_section()
+ reading = False
+ for section, section_id in zip(sections, sections_id):
+ if line.strip().endswith(section):
+ current_section = escape_string(section_id)
+ reading = True
+ g.write("const char *const " + current_section + "[] = {\n")
+ break
- if reading:
- close_section()
-
- g.write("#endif // AUTHORS_GEN_H\n")
+ if reading:
+ close_section()
- g.close()
- f.close()
+ g.write("#endif // AUTHORS_GEN_H\n")
def make_donors_header(target, source, env):
@@ -140,42 +131,37 @@ def make_donors_header(target, source, env):
src = source[0]
dst = target[0]
- f = open(src, "r", encoding="utf-8")
- g = open(dst, "w", encoding="utf-8", newline="\n")
+ with open(src, "r", encoding="utf-8") as f, open(dst, "w", encoding="utf-8", newline="\n") as g:
+ g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
+ g.write("#ifndef DONORS_GEN_H\n")
+ g.write("#define DONORS_GEN_H\n")
+
+ reading = False
+
+ def close_section():
+ g.write("\t0\n")
+ g.write("};\n")
+
+ for line in f:
+ if reading >= 0:
+ if line.startswith(" "):
+ g.write('\t"' + escape_string(line.strip()) + '",\n')
+ continue
+ if line.startswith("## "):
+ if reading:
+ close_section()
+ reading = False
+ for section, section_id in zip(sections, sections_id):
+ if line.strip().endswith(section):
+ current_section = escape_string(section_id)
+ reading = True
+ g.write("const char *const " + current_section + "[] = {\n")
+ break
- g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
- g.write("#ifndef DONORS_GEN_H\n")
- g.write("#define DONORS_GEN_H\n")
-
- reading = False
-
- def close_section():
- g.write("\t0\n")
- g.write("};\n")
+ if reading:
+ close_section()
- for line in f:
- if reading >= 0:
- if line.startswith(" "):
- g.write('\t"' + escape_string(line.strip()) + '",\n')
- continue
- if line.startswith("## "):
- if reading:
- close_section()
- reading = False
- for section, section_id in zip(sections, sections_id):
- if line.strip().endswith(section):
- current_section = escape_string(section_id)
- reading = True
- g.write("const char *const " + current_section + "[] = {\n")
- break
-
- if reading:
- close_section()
-
- g.write("#endif // DONORS_GEN_H\n")
-
- g.close()
- f.close()
+ g.write("#endif // DONORS_GEN_H\n")
def make_license_header(target, source, env):
diff --git a/core/extension/make_interface_dumper.py b/core/extension/make_interface_dumper.py
index f5662bdbbb..87f9a71522 100644
--- a/core/extension/make_interface_dumper.py
+++ b/core/extension/make_interface_dumper.py
@@ -4,18 +4,16 @@ import zlib
def run(target, source, env):
src = source[0]
dst = target[0]
- f = open(src, "rb")
- g = open(dst, "w", encoding="utf-8", newline="\n")
+ with open(src, "rb") as f, open(dst, "w", encoding="utf-8", newline="\n") as g:
+ buf = f.read()
+ decomp_size = len(buf)
- buf = f.read()
- decomp_size = len(buf)
+ # Use maximum zlib compression level to further reduce file size
+ # (at the cost of initial build times).
+ buf = zlib.compress(buf, zlib.Z_BEST_COMPRESSION)
- # Use maximum zlib compression level to further reduce file size
- # (at the cost of initial build times).
- buf = zlib.compress(buf, zlib.Z_BEST_COMPRESSION)
-
- g.write(
- """/* THIS FILE IS GENERATED DO NOT EDIT */
+ g.write(
+ """/* THIS FILE IS GENERATED DO NOT EDIT */
#ifndef GDEXTENSION_INTERFACE_DUMP_H
#define GDEXTENSION_INTERFACE_DUMP_H
@@ -26,17 +24,17 @@ def run(target, source, env):
#include "core/string/ustring.h"
"""
- )
+ )
- g.write("static const int _gdextension_interface_data_compressed_size = " + str(len(buf)) + ";\n")
- g.write("static const int _gdextension_interface_data_uncompressed_size = " + str(decomp_size) + ";\n")
- g.write("static const unsigned char _gdextension_interface_data_compressed[] = {\n")
- for i in range(len(buf)):
- g.write("\t" + str(buf[i]) + ",\n")
- g.write("};\n")
+ g.write("static const int _gdextension_interface_data_compressed_size = " + str(len(buf)) + ";\n")
+ g.write("static const int _gdextension_interface_data_uncompressed_size = " + str(decomp_size) + ";\n")
+ g.write("static const unsigned char _gdextension_interface_data_compressed[] = {\n")
+ for i in range(len(buf)):
+ g.write("\t" + str(buf[i]) + ",\n")
+ g.write("};\n")
- g.write(
- """
+ g.write(
+ """
class GDExtensionInterfaceDump {
public:
static void generate_gdextension_interface_file(const String &p_path) {
@@ -54,9 +52,7 @@ class GDExtensionInterfaceDump {
#endif // GDEXTENSION_INTERFACE_DUMP_H
"""
- )
- g.close()
- f.close()
+ )
if __name__ == "__main__":
diff --git a/core/input/input_builders.py b/core/input/input_builders.py
index 71238d6003..cc5e85897d 100644
--- a/core/input/input_builders.py
+++ b/core/input/input_builders.py
@@ -9,60 +9,58 @@ from collections import OrderedDict
def make_default_controller_mappings(target, source, env):
dst = target[0]
- g = open(dst, "w", encoding="utf-8", newline="\n")
+ with open(dst, "w", encoding="utf-8", newline="\n") as g:
+ g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
+ g.write('#include "core/typedefs.h"\n')
+ g.write('#include "core/input/default_controller_mappings.h"\n')
- g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
- g.write('#include "core/typedefs.h"\n')
- g.write('#include "core/input/default_controller_mappings.h"\n')
+ # ensure mappings have a consistent order
+ platform_mappings: dict = OrderedDict()
+ for src_path in source:
+ with open(src_path, "r") as f:
+ # read mapping file and skip header
+ mapping_file_lines = f.readlines()[2:]
- # ensure mappings have a consistent order
- platform_mappings: dict = OrderedDict()
- for src_path in source:
- with open(src_path, "r") as f:
- # read mapping file and skip header
- mapping_file_lines = f.readlines()[2:]
-
- current_platform = None
- for line in mapping_file_lines:
- if not line:
- continue
- line = line.strip()
- if len(line) == 0:
- continue
- if line[0] == "#":
- current_platform = line[1:].strip()
- if current_platform not in platform_mappings:
- platform_mappings[current_platform] = {}
- elif current_platform:
- line_parts = line.split(",")
- guid = line_parts[0]
- if guid in platform_mappings[current_platform]:
- g.write(
- "// WARNING - DATABASE {} OVERWROTE PRIOR MAPPING: {} {}\n".format(
- src_path, current_platform, platform_mappings[current_platform][guid]
+ current_platform = None
+ for line in mapping_file_lines:
+ if not line:
+ continue
+ line = line.strip()
+ if len(line) == 0:
+ continue
+ if line[0] == "#":
+ current_platform = line[1:].strip()
+ if current_platform not in platform_mappings:
+ platform_mappings[current_platform] = {}
+ elif current_platform:
+ line_parts = line.split(",")
+ guid = line_parts[0]
+ if guid in platform_mappings[current_platform]:
+ g.write(
+ "// WARNING - DATABASE {} OVERWROTE PRIOR MAPPING: {} {}\n".format(
+ src_path, current_platform, platform_mappings[current_platform][guid]
+ )
)
- )
- platform_mappings[current_platform][guid] = line
+ platform_mappings[current_platform][guid] = line
- platform_variables = {
- "Linux": "#ifdef LINUXBSD_ENABLED",
- "Windows": "#ifdef WINDOWS_ENABLED",
- "Mac OS X": "#ifdef MACOS_ENABLED",
- "Android": "#ifdef ANDROID_ENABLED",
- "iOS": "#ifdef IOS_ENABLED",
- "Web": "#ifdef WEB_ENABLED",
- }
+ platform_variables = {
+ "Linux": "#ifdef LINUXBSD_ENABLED",
+ "Windows": "#ifdef WINDOWS_ENABLED",
+ "Mac OS X": "#ifdef MACOS_ENABLED",
+ "Android": "#ifdef ANDROID_ENABLED",
+ "iOS": "#ifdef IOS_ENABLED",
+ "Web": "#ifdef WEB_ENABLED",
+ }
- g.write("const char* DefaultControllerMappings::mappings[] = {\n")
- for platform, mappings in platform_mappings.items():
- variable = platform_variables[platform]
- g.write("{}\n".format(variable))
- for mapping in mappings.values():
- g.write('\t"{}",\n'.format(mapping))
- g.write("#endif\n")
+ g.write("const char* DefaultControllerMappings::mappings[] = {\n")
+ for platform, mappings in platform_mappings.items():
+ variable = platform_variables[platform]
+ g.write("{}\n".format(variable))
+ for mapping in mappings.values():
+ g.write('\t"{}",\n'.format(mapping))
+ g.write("#endif\n")
- g.write("\tnullptr\n};\n")
- g.close()
+ g.write("\tnullptr\n};\n")
if __name__ == "__main__":
diff --git a/doc/tools/make_rst.py b/doc/tools/make_rst.py
index 51d67d3456..e10bc3477b 100755
--- a/doc/tools/make_rst.py
+++ b/doc/tools/make_rst.py
@@ -889,568 +889,570 @@ def get_git_branch() -> str:
def make_rst_class(class_def: ClassDef, state: State, dry_run: bool, output_dir: str) -> None:
class_name = class_def.name
+ with open(
+ os.devnull if dry_run else os.path.join(output_dir, f"class_{class_name.lower()}.rst"),
+ "w",
+ encoding="utf-8",
+ newline="\n",
+ ) as f:
+ # Remove the "Edit on Github" button from the online docs page.
+ f.write(":github_url: hide\n\n")
+
+ # Add keywords metadata.
+ if class_def.keywords is not None and class_def.keywords != "":
+ f.write(f".. meta::\n\t:keywords: {class_def.keywords}\n\n")
+
+ # Warn contributors not to edit this file directly.
+ # Also provide links to the source files for reference.
+
+ git_branch = get_git_branch()
+ source_xml_path = os.path.relpath(class_def.filepath, root_directory).replace("\\", "/")
+ source_github_url = f"https://github.com/godotengine/godot/tree/{git_branch}/{source_xml_path}"
+ generator_github_url = f"https://github.com/godotengine/godot/tree/{git_branch}/doc/tools/make_rst.py"
+
+ f.write(".. DO NOT EDIT THIS FILE!!!\n")
+ f.write(".. Generated automatically from Godot engine sources.\n")
+ f.write(f".. Generator: {generator_github_url}.\n")
+ f.write(f".. XML source: {source_github_url}.\n\n")
+
+ # Document reference id and header.
+ f.write(f".. _class_{class_name}:\n\n")
+ f.write(make_heading(class_name, "=", False))
+
+ f.write(make_deprecated_experimental(class_def, state))
+
+ ### INHERITANCE TREE ###
+
+ # Ascendants
+ if class_def.inherits:
+ inherits = class_def.inherits.strip()
+ f.write(f'**{translate("Inherits:")}** ')
+ first = True
+ while inherits in state.classes:
+ if not first:
+ f.write(" **<** ")
+ else:
+ first = False
- if dry_run:
- f = open(os.devnull, "w", encoding="utf-8", newline="\n")
- else:
- f = open(os.path.join(output_dir, f"class_{class_name.lower()}.rst"), "w", encoding="utf-8", newline="\n")
-
- # Remove the "Edit on Github" button from the online docs page.
- f.write(":github_url: hide\n\n")
-
- # Add keywords metadata.
- if class_def.keywords is not None and class_def.keywords != "":
- f.write(f".. meta::\n\t:keywords: {class_def.keywords}\n\n")
+ f.write(make_type(inherits, state))
+ inode = state.classes[inherits].inherits
+ if inode:
+ inherits = inode.strip()
+ else:
+ break
+ f.write("\n\n")
- # Warn contributors not to edit this file directly.
- # Also provide links to the source files for reference.
+ # Descendants
+ inherited: List[str] = []
+ for c in state.classes.values():
+ if c.inherits and c.inherits.strip() == class_name:
+ inherited.append(c.name)
+
+ if len(inherited):
+ f.write(f'**{translate("Inherited By:")}** ')
+ for i, child in enumerate(inherited):
+ if i > 0:
+ f.write(", ")
+ f.write(make_type(child, state))
+ f.write("\n\n")
- git_branch = get_git_branch()
- source_xml_path = os.path.relpath(class_def.filepath, root_directory).replace("\\", "/")
- source_github_url = f"https://github.com/godotengine/godot/tree/{git_branch}/{source_xml_path}"
- generator_github_url = f"https://github.com/godotengine/godot/tree/{git_branch}/doc/tools/make_rst.py"
+ ### INTRODUCTION ###
- f.write(".. DO NOT EDIT THIS FILE!!!\n")
- f.write(".. Generated automatically from Godot engine sources.\n")
- f.write(f".. Generator: {generator_github_url}.\n")
- f.write(f".. XML source: {source_github_url}.\n\n")
+ has_description = False
- # Document reference id and header.
- f.write(f".. _class_{class_name}:\n\n")
- f.write(make_heading(class_name, "=", False))
+ # Brief description
+ if class_def.brief_description is not None and class_def.brief_description.strip() != "":
+ has_description = True
- f.write(make_deprecated_experimental(class_def, state))
+ f.write(f"{format_text_block(class_def.brief_description.strip(), class_def, state)}\n\n")
- ### INHERITANCE TREE ###
+ # Class description
+ if class_def.description is not None and class_def.description.strip() != "":
+ has_description = True
- # Ascendants
- if class_def.inherits:
- inherits = class_def.inherits.strip()
- f.write(f'**{translate("Inherits:")}** ')
- first = True
- while inherits in state.classes:
- if not first:
- f.write(" **<** ")
- else:
- first = False
+ f.write(".. rst-class:: classref-introduction-group\n\n")
+ f.write(make_heading("Description", "-"))
- f.write(make_type(inherits, state))
- inode = state.classes[inherits].inherits
- if inode:
- inherits = inode.strip()
- else:
- break
- f.write("\n\n")
+ f.write(f"{format_text_block(class_def.description.strip(), class_def, state)}\n\n")
- # Descendants
- inherited: List[str] = []
- for c in state.classes.values():
- if c.inherits and c.inherits.strip() == class_name:
- inherited.append(c.name)
+ if not has_description:
+ f.write(".. container:: contribute\n\n\t")
+ f.write(
+ translate(
+ "There is currently no description for this class. Please help us by :ref:`contributing one <doc_updating_the_class_reference>`!"
+ )
+ + "\n\n"
+ )
- if len(inherited):
- f.write(f'**{translate("Inherited By:")}** ')
- for i, child in enumerate(inherited):
- if i > 0:
- f.write(", ")
- f.write(make_type(child, state))
- f.write("\n\n")
+ if class_def.name in CLASSES_WITH_CSHARP_DIFFERENCES:
+ f.write(".. note::\n\n\t")
+ f.write(
+ translate(
+ "There are notable differences when using this API with C#. See :ref:`doc_c_sharp_differences` for more information."
+ )
+ + "\n\n"
+ )
- ### INTRODUCTION ###
+ # Online tutorials
+ if len(class_def.tutorials) > 0:
+ f.write(".. rst-class:: classref-introduction-group\n\n")
+ f.write(make_heading("Tutorials", "-"))
- has_description = False
+ for url, title in class_def.tutorials:
+ f.write(f"- {make_link(url, title)}\n\n")
- # Brief description
- if class_def.brief_description is not None and class_def.brief_description.strip() != "":
- has_description = True
+ ### REFERENCE TABLES ###
- f.write(f"{format_text_block(class_def.brief_description.strip(), class_def, state)}\n\n")
+ # Reused container for reference tables.
+ ml: List[Tuple[Optional[str], ...]] = []
- # Class description
- if class_def.description is not None and class_def.description.strip() != "":
- has_description = True
+ # Properties reference table
+ if len(class_def.properties) > 0:
+ f.write(".. rst-class:: classref-reftable-group\n\n")
+ f.write(make_heading("Properties", "-"))
- f.write(".. rst-class:: classref-introduction-group\n\n")
- f.write(make_heading("Description", "-"))
+ ml = []
+ for property_def in class_def.properties.values():
+ type_rst = property_def.type_name.to_rst(state)
+ default = property_def.default_value
+ if default is not None and property_def.overrides:
+ ref = (
+ f":ref:`{property_def.overrides}<class_{property_def.overrides}_property_{property_def.name}>`"
+ )
+ # Not using translate() for now as it breaks table formatting.
+ ml.append((type_rst, property_def.name, f"{default} (overrides {ref})"))
+ else:
+ ref = f":ref:`{property_def.name}<class_{class_name}_property_{property_def.name}>`"
+ ml.append((type_rst, ref, default))
- f.write(f"{format_text_block(class_def.description.strip(), class_def, state)}\n\n")
+ format_table(f, ml, True)
- if not has_description:
- f.write(".. container:: contribute\n\n\t")
- f.write(
- translate(
- "There is currently no description for this class. Please help us by :ref:`contributing one <doc_updating_the_class_reference>`!"
- )
- + "\n\n"
- )
+ # Constructors, Methods, Operators reference tables
+ if len(class_def.constructors) > 0:
+ f.write(".. rst-class:: classref-reftable-group\n\n")
+ f.write(make_heading("Constructors", "-"))
- if class_def.name in CLASSES_WITH_CSHARP_DIFFERENCES:
- f.write(".. note::\n\n\t")
- f.write(
- translate(
- "There are notable differences when using this API with C#. See :ref:`doc_c_sharp_differences` for more information."
- )
- + "\n\n"
- )
+ ml = []
+ for method_list in class_def.constructors.values():
+ for m in method_list:
+ ml.append(make_method_signature(class_def, m, "constructor", state))
- # Online tutorials
- if len(class_def.tutorials) > 0:
- f.write(".. rst-class:: classref-introduction-group\n\n")
- f.write(make_heading("Tutorials", "-"))
+ format_table(f, ml)
- for url, title in class_def.tutorials:
- f.write(f"- {make_link(url, title)}\n\n")
+ if len(class_def.methods) > 0:
+ f.write(".. rst-class:: classref-reftable-group\n\n")
+ f.write(make_heading("Methods", "-"))
- ### REFERENCE TABLES ###
+ ml = []
+ for method_list in class_def.methods.values():
+ for m in method_list:
+ ml.append(make_method_signature(class_def, m, "method", state))
- # Reused container for reference tables.
- ml: List[Tuple[Optional[str], ...]] = []
+ format_table(f, ml)
- # Properties reference table
- if len(class_def.properties) > 0:
- f.write(".. rst-class:: classref-reftable-group\n\n")
- f.write(make_heading("Properties", "-"))
+ if len(class_def.operators) > 0:
+ f.write(".. rst-class:: classref-reftable-group\n\n")
+ f.write(make_heading("Operators", "-"))
- ml = []
- for property_def in class_def.properties.values():
- type_rst = property_def.type_name.to_rst(state)
- default = property_def.default_value
- if default is not None and property_def.overrides:
- ref = f":ref:`{property_def.overrides}<class_{property_def.overrides}_property_{property_def.name}>`"
- # Not using translate() for now as it breaks table formatting.
- ml.append((type_rst, property_def.name, f"{default} (overrides {ref})"))
- else:
- ref = f":ref:`{property_def.name}<class_{class_name}_property_{property_def.name}>`"
- ml.append((type_rst, ref, default))
+ ml = []
+ for method_list in class_def.operators.values():
+ for m in method_list:
+ ml.append(make_method_signature(class_def, m, "operator", state))
- format_table(f, ml, True)
+ format_table(f, ml)
- # Constructors, Methods, Operators reference tables
- if len(class_def.constructors) > 0:
- f.write(".. rst-class:: classref-reftable-group\n\n")
- f.write(make_heading("Constructors", "-"))
+ # Theme properties reference table
+ if len(class_def.theme_items) > 0:
+ f.write(".. rst-class:: classref-reftable-group\n\n")
+ f.write(make_heading("Theme Properties", "-"))
- ml = []
- for method_list in class_def.constructors.values():
- for m in method_list:
- ml.append(make_method_signature(class_def, m, "constructor", state))
+ ml = []
+ for theme_item_def in class_def.theme_items.values():
+ ref = f":ref:`{theme_item_def.name}<class_{class_name}_theme_{theme_item_def.data_name}_{theme_item_def.name}>`"
+ ml.append((theme_item_def.type_name.to_rst(state), ref, theme_item_def.default_value))
- format_table(f, ml)
+ format_table(f, ml, True)
- if len(class_def.methods) > 0:
- f.write(".. rst-class:: classref-reftable-group\n\n")
- f.write(make_heading("Methods", "-"))
+ ### DETAILED DESCRIPTIONS ###
- ml = []
- for method_list in class_def.methods.values():
- for m in method_list:
- ml.append(make_method_signature(class_def, m, "method", state))
+ # Signal descriptions
+ if len(class_def.signals) > 0:
+ f.write(make_separator(True))
+ f.write(".. rst-class:: classref-descriptions-group\n\n")
+ f.write(make_heading("Signals", "-"))
- format_table(f, ml)
+ index = 0
- if len(class_def.operators) > 0:
- f.write(".. rst-class:: classref-reftable-group\n\n")
- f.write(make_heading("Operators", "-"))
+ for signal in class_def.signals.values():
+ if index != 0:
+ f.write(make_separator())
- ml = []
- for method_list in class_def.operators.values():
- for m in method_list:
- ml.append(make_method_signature(class_def, m, "operator", state))
+ # Create signal signature and anchor point.
- format_table(f, ml)
+ f.write(f".. _class_{class_name}_signal_{signal.name}:\n\n")
+ f.write(".. rst-class:: classref-signal\n\n")
- # Theme properties reference table
- if len(class_def.theme_items) > 0:
- f.write(".. rst-class:: classref-reftable-group\n\n")
- f.write(make_heading("Theme Properties", "-"))
+ _, signature = make_method_signature(class_def, signal, "", state)
+ f.write(f"{signature}\n\n")
- ml = []
- for theme_item_def in class_def.theme_items.values():
- ref = f":ref:`{theme_item_def.name}<class_{class_name}_theme_{theme_item_def.data_name}_{theme_item_def.name}>`"
- ml.append((theme_item_def.type_name.to_rst(state), ref, theme_item_def.default_value))
+ # Add signal description, or a call to action if it's missing.
- format_table(f, ml, True)
+ f.write(make_deprecated_experimental(signal, state))
- ### DETAILED DESCRIPTIONS ###
+ if signal.description is not None and signal.description.strip() != "":
+ f.write(f"{format_text_block(signal.description.strip(), signal, state)}\n\n")
+ elif signal.deprecated is None and signal.experimental is None:
+ f.write(".. container:: contribute\n\n\t")
+ f.write(
+ translate(
+ "There is currently no description for this signal. Please help us by :ref:`contributing one <doc_updating_the_class_reference>`!"
+ )
+ + "\n\n"
+ )
- # Signal descriptions
- if len(class_def.signals) > 0:
- f.write(make_separator(True))
- f.write(".. rst-class:: classref-descriptions-group\n\n")
- f.write(make_heading("Signals", "-"))
+ index += 1
- index = 0
+ # Enumeration descriptions
+ if len(class_def.enums) > 0:
+ f.write(make_separator(True))
+ f.write(".. rst-class:: classref-descriptions-group\n\n")
+ f.write(make_heading("Enumerations", "-"))
- for signal in class_def.signals.values():
- if index != 0:
- f.write(make_separator())
+ index = 0
- # Create signal signature and anchor point.
+ for e in class_def.enums.values():
+ if index != 0:
+ f.write(make_separator())
- f.write(f".. _class_{class_name}_signal_{signal.name}:\n\n")
- f.write(".. rst-class:: classref-signal\n\n")
+ # Create enumeration signature and anchor point.
- _, signature = make_method_signature(class_def, signal, "", state)
- f.write(f"{signature}\n\n")
+ f.write(f".. _enum_{class_name}_{e.name}:\n\n")
+ f.write(".. rst-class:: classref-enumeration\n\n")
- # Add signal description, or a call to action if it's missing.
+ if e.is_bitfield:
+ f.write(f"flags **{e.name}**:\n\n")
+ else:
+ f.write(f"enum **{e.name}**:\n\n")
- f.write(make_deprecated_experimental(signal, state))
+ for value in e.values.values():
+ # Also create signature and anchor point for each enum constant.
- if signal.description is not None and signal.description.strip() != "":
- f.write(f"{format_text_block(signal.description.strip(), signal, state)}\n\n")
- elif signal.deprecated is None and signal.experimental is None:
- f.write(".. container:: contribute\n\n\t")
- f.write(
- translate(
- "There is currently no description for this signal. Please help us by :ref:`contributing one <doc_updating_the_class_reference>`!"
- )
- + "\n\n"
- )
+ f.write(f".. _class_{class_name}_constant_{value.name}:\n\n")
+ f.write(".. rst-class:: classref-enumeration-constant\n\n")
- index += 1
+ f.write(f"{e.type_name.to_rst(state)} **{value.name}** = ``{value.value}``\n\n")
- # Enumeration descriptions
- if len(class_def.enums) > 0:
- f.write(make_separator(True))
- f.write(".. rst-class:: classref-descriptions-group\n\n")
- f.write(make_heading("Enumerations", "-"))
+ # Add enum constant description.
- index = 0
+ f.write(make_deprecated_experimental(value, state))
- for e in class_def.enums.values():
- if index != 0:
- f.write(make_separator())
+ if value.text is not None and value.text.strip() != "":
+ f.write(f"{format_text_block(value.text.strip(), value, state)}")
+ elif value.deprecated is None and value.experimental is None:
+ f.write(".. container:: contribute\n\n\t")
+ f.write(
+ translate(
+ "There is currently no description for this enum. Please help us by :ref:`contributing one <doc_updating_the_class_reference>`!"
+ )
+ + "\n\n"
+ )
- # Create enumeration signature and anchor point.
+ f.write("\n\n")
- f.write(f".. _enum_{class_name}_{e.name}:\n\n")
- f.write(".. rst-class:: classref-enumeration\n\n")
+ index += 1
- if e.is_bitfield:
- f.write(f"flags **{e.name}**:\n\n")
- else:
- f.write(f"enum **{e.name}**:\n\n")
+ # Constant descriptions
+ if len(class_def.constants) > 0:
+ f.write(make_separator(True))
+ f.write(".. rst-class:: classref-descriptions-group\n\n")
+ f.write(make_heading("Constants", "-"))
- for value in e.values.values():
- # Also create signature and anchor point for each enum constant.
+ for constant in class_def.constants.values():
+ # Create constant signature and anchor point.
- f.write(f".. _class_{class_name}_constant_{value.name}:\n\n")
- f.write(".. rst-class:: classref-enumeration-constant\n\n")
+ f.write(f".. _class_{class_name}_constant_{constant.name}:\n\n")
+ f.write(".. rst-class:: classref-constant\n\n")
- f.write(f"{e.type_name.to_rst(state)} **{value.name}** = ``{value.value}``\n\n")
+ f.write(f"**{constant.name}** = ``{constant.value}``\n\n")
- # Add enum constant description.
+ # Add constant description.
- f.write(make_deprecated_experimental(value, state))
+ f.write(make_deprecated_experimental(constant, state))
- if value.text is not None and value.text.strip() != "":
- f.write(f"{format_text_block(value.text.strip(), value, state)}")
- elif value.deprecated is None and value.experimental is None:
+ if constant.text is not None and constant.text.strip() != "":
+ f.write(f"{format_text_block(constant.text.strip(), constant, state)}")
+ elif constant.deprecated is None and constant.experimental is None:
f.write(".. container:: contribute\n\n\t")
f.write(
translate(
- "There is currently no description for this enum. Please help us by :ref:`contributing one <doc_updating_the_class_reference>`!"
+ "There is currently no description for this constant. Please help us by :ref:`contributing one <doc_updating_the_class_reference>`!"
)
+ "\n\n"
)
f.write("\n\n")
- index += 1
+ # Annotation descriptions
+ if len(class_def.annotations) > 0:
+ f.write(make_separator(True))
+ f.write(make_heading("Annotations", "-"))
- # Constant descriptions
- if len(class_def.constants) > 0:
- f.write(make_separator(True))
- f.write(".. rst-class:: classref-descriptions-group\n\n")
- f.write(make_heading("Constants", "-"))
+ index = 0
- for constant in class_def.constants.values():
- # Create constant signature and anchor point.
+ for method_list in class_def.annotations.values(): # type: ignore
+ for i, m in enumerate(method_list):
+ if index != 0:
+ f.write(make_separator())
- f.write(f".. _class_{class_name}_constant_{constant.name}:\n\n")
- f.write(".. rst-class:: classref-constant\n\n")
+ # Create annotation signature and anchor point.
- f.write(f"**{constant.name}** = ``{constant.value}``\n\n")
+ if i == 0:
+ f.write(f".. _class_{class_name}_annotation_{m.name}:\n\n")
- # Add constant description.
+ f.write(".. rst-class:: classref-annotation\n\n")
- f.write(make_deprecated_experimental(constant, state))
+ _, signature = make_method_signature(class_def, m, "", state)
+ f.write(f"{signature}\n\n")
- if constant.text is not None and constant.text.strip() != "":
- f.write(f"{format_text_block(constant.text.strip(), constant, state)}")
- elif constant.deprecated is None and constant.experimental is None:
- f.write(".. container:: contribute\n\n\t")
- f.write(
- translate(
- "There is currently no description for this constant. Please help us by :ref:`contributing one <doc_updating_the_class_reference>`!"
- )
- + "\n\n"
- )
+ # Add annotation description, or a call to action if it's missing.
- f.write("\n\n")
+ if m.description is not None and m.description.strip() != "":
+ f.write(f"{format_text_block(m.description.strip(), m, state)}\n\n")
+ else:
+ f.write(".. container:: contribute\n\n\t")
+ f.write(
+ translate(
+ "There is currently no description for this annotation. Please help us by :ref:`contributing one <doc_updating_the_class_reference>`!"
+ )
+ + "\n\n"
+ )
+
+ index += 1
- # Annotation descriptions
- if len(class_def.annotations) > 0:
- f.write(make_separator(True))
- f.write(make_heading("Annotations", "-"))
+ # Property descriptions
+ if any(not p.overrides for p in class_def.properties.values()) > 0:
+ f.write(make_separator(True))
+ f.write(".. rst-class:: classref-descriptions-group\n\n")
+ f.write(make_heading("Property Descriptions", "-"))
- index = 0
+ index = 0
+
+ for property_def in class_def.properties.values():
+ if property_def.overrides:
+ continue
- for method_list in class_def.annotations.values(): # type: ignore
- for i, m in enumerate(method_list):
if index != 0:
f.write(make_separator())
- # Create annotation signature and anchor point.
+ # Create property signature and anchor point.
- if i == 0:
- f.write(f".. _class_{class_name}_annotation_{m.name}:\n\n")
+ f.write(f".. _class_{class_name}_property_{property_def.name}:\n\n")
+ f.write(".. rst-class:: classref-property\n\n")
- f.write(".. rst-class:: classref-annotation\n\n")
+ property_default = ""
+ if property_def.default_value is not None:
+ property_default = f" = {property_def.default_value}"
+ f.write(f"{property_def.type_name.to_rst(state)} **{property_def.name}**{property_default}\n\n")
- _, signature = make_method_signature(class_def, m, "", state)
- f.write(f"{signature}\n\n")
+ # Create property setter and getter records.
- # Add annotation description, or a call to action if it's missing.
+ property_setget = ""
- if m.description is not None and m.description.strip() != "":
- f.write(f"{format_text_block(m.description.strip(), m, state)}\n\n")
- else:
+ if property_def.setter is not None and not property_def.setter.startswith("_"):
+ property_setter = make_setter_signature(class_def, property_def, state)
+ property_setget += f"- {property_setter}\n"
+
+ if property_def.getter is not None and not property_def.getter.startswith("_"):
+ property_getter = make_getter_signature(class_def, property_def, state)
+ property_setget += f"- {property_getter}\n"
+
+ if property_setget != "":
+ f.write(".. rst-class:: classref-property-setget\n\n")
+ f.write(property_setget)
+ f.write("\n")
+
+ # Add property description, or a call to action if it's missing.
+
+ f.write(make_deprecated_experimental(property_def, state))
+
+ if property_def.text is not None and property_def.text.strip() != "":
+ f.write(f"{format_text_block(property_def.text.strip(), property_def, state)}\n\n")
+ elif property_def.deprecated is None and property_def.experimental is None:
f.write(".. container:: contribute\n\n\t")
f.write(
translate(
- "There is currently no description for this annotation. Please help us by :ref:`contributing one <doc_updating_the_class_reference>`!"
+ "There is currently no description for this property. Please help us by :ref:`contributing one <doc_updating_the_class_reference>`!"
)
+ "\n\n"
)
index += 1
- # Property descriptions
- if any(not p.overrides for p in class_def.properties.values()) > 0:
- f.write(make_separator(True))
- f.write(".. rst-class:: classref-descriptions-group\n\n")
- f.write(make_heading("Property Descriptions", "-"))
+ # Constructor, Method, Operator descriptions
+ if len(class_def.constructors) > 0:
+ f.write(make_separator(True))
+ f.write(".. rst-class:: classref-descriptions-group\n\n")
+ f.write(make_heading("Constructor Descriptions", "-"))
- index = 0
+ index = 0
- for property_def in class_def.properties.values():
- if property_def.overrides:
- continue
+ for method_list in class_def.constructors.values():
+ for i, m in enumerate(method_list):
+ if index != 0:
+ f.write(make_separator())
- if index != 0:
- f.write(make_separator())
+ # Create constructor signature and anchor point.
- # Create property signature and anchor point.
+ if i == 0:
+ f.write(f".. _class_{class_name}_constructor_{m.name}:\n\n")
- f.write(f".. _class_{class_name}_property_{property_def.name}:\n\n")
- f.write(".. rst-class:: classref-property\n\n")
+ f.write(".. rst-class:: classref-constructor\n\n")
- property_default = ""
- if property_def.default_value is not None:
- property_default = f" = {property_def.default_value}"
- f.write(f"{property_def.type_name.to_rst(state)} **{property_def.name}**{property_default}\n\n")
+ ret_type, signature = make_method_signature(class_def, m, "", state)
+ f.write(f"{ret_type} {signature}\n\n")
- # Create property setter and getter records.
+ # Add constructor description, or a call to action if it's missing.
- property_setget = ""
+ f.write(make_deprecated_experimental(m, state))
- if property_def.setter is not None and not property_def.setter.startswith("_"):
- property_setter = make_setter_signature(class_def, property_def, state)
- property_setget += f"- {property_setter}\n"
+ if m.description is not None and m.description.strip() != "":
+ f.write(f"{format_text_block(m.description.strip(), m, state)}\n\n")
+ elif m.deprecated is None and m.experimental is None:
+ f.write(".. container:: contribute\n\n\t")
+ f.write(
+ translate(
+ "There is currently no description for this constructor. Please help us by :ref:`contributing one <doc_updating_the_class_reference>`!"
+ )
+ + "\n\n"
+ )
- if property_def.getter is not None and not property_def.getter.startswith("_"):
- property_getter = make_getter_signature(class_def, property_def, state)
- property_setget += f"- {property_getter}\n"
+ index += 1
- if property_setget != "":
- f.write(".. rst-class:: classref-property-setget\n\n")
- f.write(property_setget)
- f.write("\n")
+ if len(class_def.methods) > 0:
+ f.write(make_separator(True))
+ f.write(".. rst-class:: classref-descriptions-group\n\n")
+ f.write(make_heading("Method Descriptions", "-"))
- # Add property description, or a call to action if it's missing.
+ index = 0
- f.write(make_deprecated_experimental(property_def, state))
+ for method_list in class_def.methods.values():
+ for i, m in enumerate(method_list):
+ if index != 0:
+ f.write(make_separator())
- if property_def.text is not None and property_def.text.strip() != "":
- f.write(f"{format_text_block(property_def.text.strip(), property_def, state)}\n\n")
- elif property_def.deprecated is None and property_def.experimental is None:
- f.write(".. container:: contribute\n\n\t")
- f.write(
- translate(
- "There is currently no description for this property. Please help us by :ref:`contributing one <doc_updating_the_class_reference>`!"
- )
- + "\n\n"
- )
+ # Create method signature and anchor point.
- index += 1
+ if i == 0:
+ method_qualifier = ""
+ if m.name.startswith("_"):
+ method_qualifier = "private_"
- # Constructor, Method, Operator descriptions
- if len(class_def.constructors) > 0:
- f.write(make_separator(True))
- f.write(".. rst-class:: classref-descriptions-group\n\n")
- f.write(make_heading("Constructor Descriptions", "-"))
+ f.write(f".. _class_{class_name}_{method_qualifier}method_{m.name}:\n\n")
- index = 0
+ f.write(".. rst-class:: classref-method\n\n")
- for method_list in class_def.constructors.values():
- for i, m in enumerate(method_list):
- if index != 0:
- f.write(make_separator())
+ ret_type, signature = make_method_signature(class_def, m, "", state)
+ f.write(f"{ret_type} {signature}\n\n")
- # Create constructor signature and anchor point.
+ # Add method description, or a call to action if it's missing.
- if i == 0:
- f.write(f".. _class_{class_name}_constructor_{m.name}:\n\n")
+ f.write(make_deprecated_experimental(m, state))
- f.write(".. rst-class:: classref-constructor\n\n")
-
- ret_type, signature = make_method_signature(class_def, m, "", state)
- f.write(f"{ret_type} {signature}\n\n")
-
- # Add constructor description, or a call to action if it's missing.
-
- f.write(make_deprecated_experimental(m, state))
-
- if m.description is not None and m.description.strip() != "":
- f.write(f"{format_text_block(m.description.strip(), m, state)}\n\n")
- elif m.deprecated is None and m.experimental is None:
- f.write(".. container:: contribute\n\n\t")
- f.write(
- translate(
- "There is currently no description for this constructor. Please help us by :ref:`contributing one <doc_updating_the_class_reference>`!"
+ if m.description is not None and m.description.strip() != "":
+ f.write(f"{format_text_block(m.description.strip(), m, state)}\n\n")
+ elif m.deprecated is None and m.experimental is None:
+ f.write(".. container:: contribute\n\n\t")
+ f.write(
+ translate(
+ "There is currently no description for this method. Please help us by :ref:`contributing one <doc_updating_the_class_reference>`!"
+ )
+ + "\n\n"
)
- + "\n\n"
- )
- index += 1
-
- if len(class_def.methods) > 0:
- f.write(make_separator(True))
- f.write(".. rst-class:: classref-descriptions-group\n\n")
- f.write(make_heading("Method Descriptions", "-"))
+ index += 1
- index = 0
+ if len(class_def.operators) > 0:
+ f.write(make_separator(True))
+ f.write(".. rst-class:: classref-descriptions-group\n\n")
+ f.write(make_heading("Operator Descriptions", "-"))
- for method_list in class_def.methods.values():
- for i, m in enumerate(method_list):
- if index != 0:
- f.write(make_separator())
+ index = 0
- # Create method signature and anchor point.
+ for method_list in class_def.operators.values():
+ for i, m in enumerate(method_list):
+ if index != 0:
+ f.write(make_separator())
- if i == 0:
- method_qualifier = ""
- if m.name.startswith("_"):
- method_qualifier = "private_"
+ # Create operator signature and anchor point.
- f.write(f".. _class_{class_name}_{method_qualifier}method_{m.name}:\n\n")
+ operator_anchor = f".. _class_{class_name}_operator_{sanitize_operator_name(m.name, state)}"
+ for parameter in m.parameters:
+ operator_anchor += f"_{parameter.type_name.type_name}"
+ operator_anchor += f":\n\n"
+ f.write(operator_anchor)
- f.write(".. rst-class:: classref-method\n\n")
+ f.write(".. rst-class:: classref-operator\n\n")
- ret_type, signature = make_method_signature(class_def, m, "", state)
- f.write(f"{ret_type} {signature}\n\n")
+ ret_type, signature = make_method_signature(class_def, m, "", state)
+ f.write(f"{ret_type} {signature}\n\n")
- # Add method description, or a call to action if it's missing.
+ # Add operator description, or a call to action if it's missing.
- f.write(make_deprecated_experimental(m, state))
+ f.write(make_deprecated_experimental(m, state))
- if m.description is not None and m.description.strip() != "":
- f.write(f"{format_text_block(m.description.strip(), m, state)}\n\n")
- elif m.deprecated is None and m.experimental is None:
- f.write(".. container:: contribute\n\n\t")
- f.write(
- translate(
- "There is currently no description for this method. Please help us by :ref:`contributing one <doc_updating_the_class_reference>`!"
+ if m.description is not None and m.description.strip() != "":
+ f.write(f"{format_text_block(m.description.strip(), m, state)}\n\n")
+ elif m.deprecated is None and m.experimental is None:
+ f.write(".. container:: contribute\n\n\t")
+ f.write(
+ translate(
+ "There is currently no description for this operator. Please help us by :ref:`contributing one <doc_updating_the_class_reference>`!"
+ )
+ + "\n\n"
)
- + "\n\n"
- )
- index += 1
+ index += 1
- if len(class_def.operators) > 0:
- f.write(make_separator(True))
- f.write(".. rst-class:: classref-descriptions-group\n\n")
- f.write(make_heading("Operator Descriptions", "-"))
+ # Theme property descriptions
+ if len(class_def.theme_items) > 0:
+ f.write(make_separator(True))
+ f.write(".. rst-class:: classref-descriptions-group\n\n")
+ f.write(make_heading("Theme Property Descriptions", "-"))
- index = 0
+ index = 0
- for method_list in class_def.operators.values():
- for i, m in enumerate(method_list):
+ for theme_item_def in class_def.theme_items.values():
if index != 0:
f.write(make_separator())
- # Create operator signature and anchor point.
-
- operator_anchor = f".. _class_{class_name}_operator_{sanitize_operator_name(m.name, state)}"
- for parameter in m.parameters:
- operator_anchor += f"_{parameter.type_name.type_name}"
- operator_anchor += f":\n\n"
- f.write(operator_anchor)
+ # Create theme property signature and anchor point.
- f.write(".. rst-class:: classref-operator\n\n")
+ f.write(f".. _class_{class_name}_theme_{theme_item_def.data_name}_{theme_item_def.name}:\n\n")
+ f.write(".. rst-class:: classref-themeproperty\n\n")
- ret_type, signature = make_method_signature(class_def, m, "", state)
- f.write(f"{ret_type} {signature}\n\n")
+ theme_item_default = ""
+ if theme_item_def.default_value is not None:
+ theme_item_default = f" = {theme_item_def.default_value}"
+ f.write(f"{theme_item_def.type_name.to_rst(state)} **{theme_item_def.name}**{theme_item_default}\n\n")
- # Add operator description, or a call to action if it's missing.
+ # Add theme property description, or a call to action if it's missing.
- f.write(make_deprecated_experimental(m, state))
+ f.write(make_deprecated_experimental(theme_item_def, state))
- if m.description is not None and m.description.strip() != "":
- f.write(f"{format_text_block(m.description.strip(), m, state)}\n\n")
- elif m.deprecated is None and m.experimental is None:
+ if theme_item_def.text is not None and theme_item_def.text.strip() != "":
+ f.write(f"{format_text_block(theme_item_def.text.strip(), theme_item_def, state)}\n\n")
+ elif theme_item_def.deprecated is None and theme_item_def.experimental is None:
f.write(".. container:: contribute\n\n\t")
f.write(
translate(
- "There is currently no description for this operator. Please help us by :ref:`contributing one <doc_updating_the_class_reference>`!"
+ "There is currently no description for this theme property. Please help us by :ref:`contributing one <doc_updating_the_class_reference>`!"
)
+ "\n\n"
)
index += 1
- # Theme property descriptions
- if len(class_def.theme_items) > 0:
- f.write(make_separator(True))
- f.write(".. rst-class:: classref-descriptions-group\n\n")
- f.write(make_heading("Theme Property Descriptions", "-"))
-
- index = 0
-
- for theme_item_def in class_def.theme_items.values():
- if index != 0:
- f.write(make_separator())
-
- # Create theme property signature and anchor point.
-
- f.write(f".. _class_{class_name}_theme_{theme_item_def.data_name}_{theme_item_def.name}:\n\n")
- f.write(".. rst-class:: classref-themeproperty\n\n")
-
- theme_item_default = ""
- if theme_item_def.default_value is not None:
- theme_item_default = f" = {theme_item_def.default_value}"
- f.write(f"{theme_item_def.type_name.to_rst(state)} **{theme_item_def.name}**{theme_item_default}\n\n")
-
- # Add theme property description, or a call to action if it's missing.
-
- f.write(make_deprecated_experimental(theme_item_def, state))
-
- if theme_item_def.text is not None and theme_item_def.text.strip() != "":
- f.write(f"{format_text_block(theme_item_def.text.strip(), theme_item_def, state)}\n\n")
- elif theme_item_def.deprecated is None and theme_item_def.experimental is None:
- f.write(".. container:: contribute\n\n\t")
- f.write(
- translate(
- "There is currently no description for this theme property. Please help us by :ref:`contributing one <doc_updating_the_class_reference>`!"
- )
- + "\n\n"
- )
-
- index += 1
-
- f.write(make_footer())
+ f.write(make_footer())
def make_type(klass: str, state: State) -> str:
@@ -1690,48 +1692,46 @@ def make_link(url: str, title: str) -> str:
def make_rst_index(grouped_classes: Dict[str, List[str]], dry_run: bool, output_dir: str) -> None:
- if dry_run:
- f = open(os.devnull, "w", encoding="utf-8", newline="\n")
- else:
- f = open(os.path.join(output_dir, "index.rst"), "w", encoding="utf-8", newline="\n")
-
- # Remove the "Edit on Github" button from the online docs page, and disallow user-contributed notes
- # on the index page. User-contributed notes are allowed on individual class pages.
- f.write(":github_url: hide\n:allow_comments: False\n\n")
+ with open(
+ os.devnull if dry_run else os.path.join(output_dir, "index.rst"), "w", encoding="utf-8", newline="\n"
+ ) as f:
+ # Remove the "Edit on Github" button from the online docs page, and disallow user-contributed notes
+ # on the index page. User-contributed notes are allowed on individual class pages.
+ f.write(":github_url: hide\n:allow_comments: False\n\n")
- # Warn contributors not to edit this file directly.
- # Also provide links to the source files for reference.
+ # Warn contributors not to edit this file directly.
+ # Also provide links to the source files for reference.
- git_branch = get_git_branch()
- generator_github_url = f"https://github.com/godotengine/godot/tree/{git_branch}/doc/tools/make_rst.py"
+ git_branch = get_git_branch()
+ generator_github_url = f"https://github.com/godotengine/godot/tree/{git_branch}/doc/tools/make_rst.py"
- f.write(".. DO NOT EDIT THIS FILE!!!\n")
- f.write(".. Generated automatically from Godot engine sources.\n")
- f.write(f".. Generator: {generator_github_url}.\n\n")
+ f.write(".. DO NOT EDIT THIS FILE!!!\n")
+ f.write(".. Generated automatically from Godot engine sources.\n")
+ f.write(f".. Generator: {generator_github_url}.\n\n")
- f.write(".. _doc_class_reference:\n\n")
+ f.write(".. _doc_class_reference:\n\n")
- f.write(make_heading("All classes", "="))
+ f.write(make_heading("All classes", "="))
- for group_name in CLASS_GROUPS:
- if group_name in grouped_classes:
- f.write(make_heading(CLASS_GROUPS[group_name], "="))
+ for group_name in CLASS_GROUPS:
+ if group_name in grouped_classes:
+ f.write(make_heading(CLASS_GROUPS[group_name], "="))
- f.write(".. toctree::\n")
- f.write(" :maxdepth: 1\n")
- f.write(f" :name: toc-class-ref-{group_name}s\n")
- f.write("\n")
+ f.write(".. toctree::\n")
+ f.write(" :maxdepth: 1\n")
+ f.write(f" :name: toc-class-ref-{group_name}s\n")
+ f.write("\n")
- if group_name in CLASS_GROUPS_BASE:
- f.write(f" class_{CLASS_GROUPS_BASE[group_name].lower()}\n")
+ if group_name in CLASS_GROUPS_BASE:
+ f.write(f" class_{CLASS_GROUPS_BASE[group_name].lower()}\n")
- for class_name in grouped_classes[group_name]:
- if group_name in CLASS_GROUPS_BASE and CLASS_GROUPS_BASE[group_name].lower() == class_name.lower():
- continue
+ for class_name in grouped_classes[group_name]:
+ if group_name in CLASS_GROUPS_BASE and CLASS_GROUPS_BASE[group_name].lower() == class_name.lower():
+ continue
- f.write(f" class_{class_name.lower()}\n")
+ f.write(f" class_{class_name.lower()}\n")
- f.write("\n")
+ f.write("\n")
# Formatting helpers.
diff --git a/editor/SCsub b/editor/SCsub
index 67ded244cf..442d0a3b75 100644
--- a/editor/SCsub
+++ b/editor/SCsub
@@ -11,17 +11,15 @@ import editor_builders
def _make_doc_data_class_path(to_path):
# NOTE: It is safe to generate this file here, since this is still executed serially
- g = open(os.path.join(to_path, "doc_data_class_path.gen.h"), "w", encoding="utf-8", newline="\n")
- g.write("static const int _doc_data_class_path_count = " + str(len(env.doc_class_path)) + ";\n")
- g.write("struct _DocDataClassPath { const char* name; const char* path; };\n")
-
- g.write("static const _DocDataClassPath _doc_data_class_paths[" + str(len(env.doc_class_path) + 1) + "] = {\n")
- for c in sorted(env.doc_class_path):
- g.write('\t{"' + c + '", "' + env.doc_class_path[c] + '"},\n')
- g.write("\t{nullptr, nullptr}\n")
- g.write("};\n")
-
- g.close()
+ with open(os.path.join(to_path, "doc_data_class_path.gen.h"), "w", encoding="utf-8", newline="\n") as g:
+ g.write("static const int _doc_data_class_path_count = " + str(len(env.doc_class_path)) + ";\n")
+ g.write("struct _DocDataClassPath { const char* name; const char* path; };\n")
+
+ g.write("static const _DocDataClassPath _doc_data_class_paths[" + str(len(env.doc_class_path) + 1) + "] = {\n")
+ for c in sorted(env.doc_class_path):
+ g.write('\t{"' + c + '", "' + env.doc_class_path[c] + '"},\n')
+ g.write("\t{nullptr, nullptr}\n")
+ g.write("};\n")
if env.editor_build:
diff --git a/editor/editor_builders.py b/editor/editor_builders.py
index 7cac984129..0189d7e9d4 100644
--- a/editor/editor_builders.py
+++ b/editor/editor_builders.py
@@ -16,116 +16,113 @@ from platform_methods import subprocess_main
def make_doc_header(target, source, env):
dst = target[0]
- g = open(dst, "w", encoding="utf-8", newline="\n")
- buf = ""
- docbegin = ""
- docend = ""
- for src in source:
- if not src.endswith(".xml"):
- continue
- with open(src, "r", encoding="utf-8") as f:
- content = f.read()
- buf += content
-
- buf = (docbegin + buf + docend).encode("utf-8")
- decomp_size = len(buf)
-
- # Use maximum zlib compression level to further reduce file size
- # (at the cost of initial build times).
- buf = zlib.compress(buf, zlib.Z_BEST_COMPRESSION)
-
- g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
- g.write("#ifndef _DOC_DATA_RAW_H\n")
- g.write("#define _DOC_DATA_RAW_H\n")
- g.write('static const char *_doc_data_hash = "' + str(hash(buf)) + '";\n')
- g.write("static const int _doc_data_compressed_size = " + str(len(buf)) + ";\n")
- g.write("static const int _doc_data_uncompressed_size = " + str(decomp_size) + ";\n")
- g.write("static const unsigned char _doc_data_compressed[] = {\n")
- for i in range(len(buf)):
- g.write("\t" + str(buf[i]) + ",\n")
- g.write("};\n")
-
- g.write("#endif")
-
- g.close()
+ with open(dst, "w", encoding="utf-8", newline="\n") as g:
+ buf = ""
+ docbegin = ""
+ docend = ""
+ for src in source:
+ if not src.endswith(".xml"):
+ continue
+ with open(src, "r", encoding="utf-8") as f:
+ content = f.read()
+ buf += content
+
+ buf = (docbegin + buf + docend).encode("utf-8")
+ decomp_size = len(buf)
+
+ # Use maximum zlib compression level to further reduce file size
+ # (at the cost of initial build times).
+ buf = zlib.compress(buf, zlib.Z_BEST_COMPRESSION)
+
+ g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
+ g.write("#ifndef _DOC_DATA_RAW_H\n")
+ g.write("#define _DOC_DATA_RAW_H\n")
+ g.write('static const char *_doc_data_hash = "' + str(hash(buf)) + '";\n')
+ g.write("static const int _doc_data_compressed_size = " + str(len(buf)) + ";\n")
+ g.write("static const int _doc_data_uncompressed_size = " + str(decomp_size) + ";\n")
+ g.write("static const unsigned char _doc_data_compressed[] = {\n")
+ for i in range(len(buf)):
+ g.write("\t" + str(buf[i]) + ",\n")
+ g.write("};\n")
+
+ g.write("#endif")
def make_translations_header(target, source, env, category):
dst = target[0]
- g = open(dst, "w", encoding="utf-8", newline="\n")
-
- g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
- g.write("#ifndef _{}_TRANSLATIONS_H\n".format(category.upper()))
- g.write("#define _{}_TRANSLATIONS_H\n".format(category.upper()))
+ with open(dst, "w", encoding="utf-8", newline="\n") as g:
+ g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
+ g.write("#ifndef _{}_TRANSLATIONS_H\n".format(category.upper()))
+ g.write("#define _{}_TRANSLATIONS_H\n".format(category.upper()))
- sorted_paths = sorted(source, key=lambda path: os.path.splitext(os.path.basename(path))[0])
+ sorted_paths = sorted(source, key=lambda path: os.path.splitext(os.path.basename(path))[0])
- msgfmt_available = shutil.which("msgfmt") is not None
+ msgfmt_available = shutil.which("msgfmt") is not None
- if not msgfmt_available:
- print("WARNING: msgfmt is not found, using .po files instead of .mo")
+ if not msgfmt_available:
+ print("WARNING: msgfmt is not found, using .po files instead of .mo")
- xl_names = []
- for i in range(len(sorted_paths)):
- if msgfmt_available:
- mo_path = os.path.join(tempfile.gettempdir(), uuid.uuid4().hex + ".mo")
- cmd = "msgfmt " + sorted_paths[i] + " --no-hash -o " + mo_path
- try:
- subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE).communicate()
- with open(mo_path, "rb") as f:
- buf = f.read()
- except OSError as e:
- print(
- "WARNING: msgfmt execution failed, using .po file instead of .mo: path=%r; [%s] %s"
- % (sorted_paths[i], e.__class__.__name__, e)
- )
- with open(sorted_paths[i], "rb") as f:
- buf = f.read()
- finally:
+ xl_names = []
+ for i in range(len(sorted_paths)):
+ if msgfmt_available:
+ mo_path = os.path.join(tempfile.gettempdir(), uuid.uuid4().hex + ".mo")
+ cmd = "msgfmt " + sorted_paths[i] + " --no-hash -o " + mo_path
try:
- os.remove(mo_path)
+ subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE).communicate()
+ with open(mo_path, "rb") as f:
+ buf = f.read()
except OSError as e:
- # Do not fail the entire build if it cannot delete a temporary file
print(
- "WARNING: Could not delete temporary .mo file: path=%r; [%s] %s"
- % (mo_path, e.__class__.__name__, e)
+ "WARNING: msgfmt execution failed, using .po file instead of .mo: path=%r; [%s] %s"
+ % (sorted_paths[i], e.__class__.__name__, e)
)
- else:
- with open(sorted_paths[i], "rb") as f:
- buf = f.read()
-
- decomp_size = len(buf)
- # Use maximum zlib compression level to further reduce file size
- # (at the cost of initial build times).
- buf = zlib.compress(buf, zlib.Z_BEST_COMPRESSION)
- name = os.path.splitext(os.path.basename(sorted_paths[i]))[0]
-
- g.write("static const unsigned char _{}_translation_{}_compressed[] = {{\n".format(category, name))
- for j in range(len(buf)):
- g.write("\t" + str(buf[j]) + ",\n")
+ with open(sorted_paths[i], "rb") as f:
+ buf = f.read()
+ finally:
+ try:
+ os.remove(mo_path)
+ except OSError as e:
+ # Do not fail the entire build if it cannot delete a temporary file
+ print(
+ "WARNING: Could not delete temporary .mo file: path=%r; [%s] %s"
+ % (mo_path, e.__class__.__name__, e)
+ )
+ else:
+ with open(sorted_paths[i], "rb") as f:
+ buf = f.read()
+ decomp_size = len(buf)
+ # Use maximum zlib compression level to further reduce file size
+ # (at the cost of initial build times).
+ buf = zlib.compress(buf, zlib.Z_BEST_COMPRESSION)
+ name = os.path.splitext(os.path.basename(sorted_paths[i]))[0]
+
+ g.write("static const unsigned char _{}_translation_{}_compressed[] = {{\n".format(category, name))
+ for j in range(len(buf)):
+ g.write("\t" + str(buf[j]) + ",\n")
+
+ g.write("};\n")
+
+ xl_names.append([name, len(buf), str(decomp_size)])
+
+ g.write("struct {}TranslationList {{\n".format(category.capitalize()))
+ g.write("\tconst char* lang;\n")
+ g.write("\tint comp_size;\n")
+ g.write("\tint uncomp_size;\n")
+ g.write("\tconst unsigned char* data;\n")
+ g.write("};\n\n")
+ g.write("static {}TranslationList _{}_translations[] = {{\n".format(category.capitalize(), category))
+ for x in xl_names:
+ g.write(
+ '\t{{ "{}", {}, {}, _{}_translation_{}_compressed }},\n'.format(
+ x[0], str(x[1]), str(x[2]), category, x[0]
+ )
+ )
+ g.write("\t{nullptr, 0, 0, nullptr}\n")
g.write("};\n")
- xl_names.append([name, len(buf), str(decomp_size)])
-
- g.write("struct {}TranslationList {{\n".format(category.capitalize()))
- g.write("\tconst char* lang;\n")
- g.write("\tint comp_size;\n")
- g.write("\tint uncomp_size;\n")
- g.write("\tconst unsigned char* data;\n")
- g.write("};\n\n")
- g.write("static {}TranslationList _{}_translations[] = {{\n".format(category.capitalize(), category))
- for x in xl_names:
- g.write(
- '\t{{ "{}", {}, {}, _{}_translation_{}_compressed }},\n'.format(x[0], str(x[1]), str(x[2]), category, x[0])
- )
- g.write("\t{nullptr, 0, 0, nullptr}\n")
- g.write("};\n")
-
- g.write("#endif")
-
- g.close()
+ g.write("#endif")
def make_editor_translations_header(target, source, env):
diff --git a/editor/icons/editor_icons_builders.py b/editor/icons/editor_icons_builders.py
index 4fe74881ed..3b2d8714d8 100644
--- a/editor/icons/editor_icons_builders.py
+++ b/editor/icons/editor_icons_builders.py
@@ -15,81 +15,76 @@ def make_editor_icons_action(target, source, env):
dst = target[0]
svg_icons = source
- icons_string = StringIO()
+ with StringIO() as icons_string, StringIO() as s:
+ for f in svg_icons:
+ fname = str(f)
- for f in svg_icons:
- fname = str(f)
+ icons_string.write('\t"')
- icons_string.write('\t"')
-
- with open(fname, "rb") as svgf:
- b = svgf.read(1)
- while len(b) == 1:
- icons_string.write("\\" + str(hex(ord(b)))[1:])
+ with open(fname, "rb") as svgf:
b = svgf.read(1)
+ while len(b) == 1:
+ icons_string.write("\\" + str(hex(ord(b)))[1:])
+ b = svgf.read(1)
+
+ icons_string.write('"')
+ if fname != svg_icons[-1]:
+ icons_string.write(",")
+ icons_string.write("\n")
+
+ s.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
+ s.write("#ifndef _EDITOR_ICONS_H\n")
+ s.write("#define _EDITOR_ICONS_H\n")
+ s.write("static const int editor_icons_count = {};\n".format(len(svg_icons)))
+ s.write("static const char *editor_icons_sources[] = {\n")
+ s.write(icons_string.getvalue())
+ s.write("};\n\n")
+ s.write("static const char *editor_icons_names[] = {\n")
+
+ # this is used to store the indices of thumbnail icons
+ thumb_medium_indices = []
+ thumb_big_indices = []
+ index = 0
+ for f in svg_icons:
+ fname = str(f)
+
+ # Trim the `.svg` extension from the string.
+ icon_name = os.path.basename(fname)[:-4]
+ # some special cases
+ if icon_name.endswith("MediumThumb"): # don't know a better way to handle this
+ thumb_medium_indices.append(str(index))
+ if icon_name.endswith("BigThumb"): # don't know a better way to handle this
+ thumb_big_indices.append(str(index))
+ if icon_name.endswith("GodotFile"): # don't know a better way to handle this
+ thumb_big_indices.append(str(index))
+
+ s.write('\t"{0}"'.format(icon_name))
+
+ if fname != svg_icons[-1]:
+ s.write(",")
+ s.write("\n")
+
+ index += 1
- icons_string.write('"')
- if fname != svg_icons[-1]:
- icons_string.write(",")
- icons_string.write("\n")
-
- s = StringIO()
- s.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
- s.write("#ifndef _EDITOR_ICONS_H\n")
- s.write("#define _EDITOR_ICONS_H\n")
- s.write("static const int editor_icons_count = {};\n".format(len(svg_icons)))
- s.write("static const char *editor_icons_sources[] = {\n")
- s.write(icons_string.getvalue())
- s.write("};\n\n")
- s.write("static const char *editor_icons_names[] = {\n")
-
- # this is used to store the indices of thumbnail icons
- thumb_medium_indices = []
- thumb_big_indices = []
- index = 0
- for f in svg_icons:
- fname = str(f)
-
- # Trim the `.svg` extension from the string.
- icon_name = os.path.basename(fname)[:-4]
- # some special cases
- if icon_name.endswith("MediumThumb"): # don't know a better way to handle this
- thumb_medium_indices.append(str(index))
- if icon_name.endswith("BigThumb"): # don't know a better way to handle this
- thumb_big_indices.append(str(index))
- if icon_name.endswith("GodotFile"): # don't know a better way to handle this
- thumb_big_indices.append(str(index))
-
- s.write('\t"{0}"'.format(icon_name))
-
- if fname != svg_icons[-1]:
- s.write(",")
- s.write("\n")
-
- index += 1
-
- s.write("};\n")
-
- if thumb_medium_indices:
- s.write("\n\n")
- s.write("static const int editor_md_thumbs_count = {};\n".format(len(thumb_medium_indices)))
- s.write("static const int editor_md_thumbs_indices[] = {")
- s.write(", ".join(thumb_medium_indices))
- s.write("};\n")
- if thumb_big_indices:
- s.write("\n\n")
- s.write("static const int editor_bg_thumbs_count = {};\n".format(len(thumb_big_indices)))
- s.write("static const int editor_bg_thumbs_indices[] = {")
- s.write(", ".join(thumb_big_indices))
s.write("};\n")
- s.write("#endif\n")
-
- with open(dst, "w", encoding="utf-8", newline="\n") as f:
- f.write(s.getvalue())
-
- s.close()
- icons_string.close()
+ if thumb_medium_indices:
+ s.write("\n\n")
+ s.write("static const int editor_md_thumbs_count = {};\n".format(len(thumb_medium_indices)))
+ s.write("static const int editor_md_thumbs_indices[] = {")
+ s.write(", ".join(thumb_medium_indices))
+ s.write("};\n")
+ if thumb_big_indices:
+ s.write("\n\n")
+ s.write("static const int editor_bg_thumbs_count = {};\n".format(len(thumb_big_indices)))
+ s.write("static const int editor_bg_thumbs_indices[] = {")
+ s.write(", ".join(thumb_big_indices))
+ s.write("};\n")
+
+ s.write("#endif\n")
+
+ with open(dst, "w", encoding="utf-8", newline="\n") as f:
+ f.write(s.getvalue())
if __name__ == "__main__":
diff --git a/editor/template_builders.py b/editor/template_builders.py
index d5932a08fe..c79c9bd8af 100644
--- a/editor/template_builders.py
+++ b/editor/template_builders.py
@@ -54,41 +54,41 @@ def parse_template(inherits, source, delimiter):
def make_templates(target, source, env):
dst = target[0]
- s = StringIO()
- s.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n\n")
- s.write("#ifndef _CODE_TEMPLATES_H\n")
- s.write("#define _CODE_TEMPLATES_H\n\n")
- s.write('#include "core/object/object.h"\n')
- s.write('#include "core/object/script_language.h"\n')
-
- delimiter = "#" # GDScript single line comment delimiter by default.
- if source:
- ext = os.path.splitext(source[0])[1]
- if ext == ".cs":
- delimiter = "//"
-
- parsed_template_string = ""
- number_of_templates = 0
-
- for filepath in source:
- node_name = os.path.basename(os.path.dirname(filepath))
- parsed_template = parse_template(node_name, filepath, delimiter)
- parsed_template_string += "\t" + parsed_template
- number_of_templates += 1
-
- s.write("\nstatic const int TEMPLATES_ARRAY_SIZE = " + str(number_of_templates) + ";\n")
- s.write("\nstatic const struct ScriptLanguage::ScriptTemplate TEMPLATES[" + str(number_of_templates) + "] = {\n")
-
- s.write(parsed_template_string)
+ with StringIO() as s:
+ s.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n\n")
+ s.write("#ifndef _CODE_TEMPLATES_H\n")
+ s.write("#define _CODE_TEMPLATES_H\n\n")
+ s.write('#include "core/object/object.h"\n')
+ s.write('#include "core/object/script_language.h"\n')
+
+ delimiter = "#" # GDScript single line comment delimiter by default.
+ if source:
+ ext = os.path.splitext(source[0])[1]
+ if ext == ".cs":
+ delimiter = "//"
+
+ parsed_template_string = ""
+ number_of_templates = 0
+
+ for filepath in source:
+ node_name = os.path.basename(os.path.dirname(filepath))
+ parsed_template = parse_template(node_name, filepath, delimiter)
+ parsed_template_string += "\t" + parsed_template
+ number_of_templates += 1
+
+ s.write("\nstatic const int TEMPLATES_ARRAY_SIZE = " + str(number_of_templates) + ";\n")
+ s.write(
+ "\nstatic const struct ScriptLanguage::ScriptTemplate TEMPLATES[" + str(number_of_templates) + "] = {\n"
+ )
- s.write("};\n")
+ s.write(parsed_template_string)
- s.write("\n#endif\n")
+ s.write("};\n")
- with open(dst, "w", encoding="utf-8", newline="\n") as f:
- f.write(s.getvalue())
+ s.write("\n#endif\n")
- s.close()
+ with open(dst, "w", encoding="utf-8", newline="\n") as f:
+ f.write(s.getvalue())
if __name__ == "__main__":
diff --git a/editor/themes/editor_theme_builders.py b/editor/themes/editor_theme_builders.py
index b503c37c4b..399ff16a1d 100644
--- a/editor/themes/editor_theme_builders.py
+++ b/editor/themes/editor_theme_builders.py
@@ -12,29 +12,26 @@ from platform_methods import subprocess_main
def make_fonts_header(target, source, env):
dst = target[0]
- g = open(dst, "w", encoding="utf-8", newline="\n")
+ with open(dst, "w", encoding="utf-8", newline="\n") as g:
+ g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
+ g.write("#ifndef _EDITOR_FONTS_H\n")
+ g.write("#define _EDITOR_FONTS_H\n")
- g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
- g.write("#ifndef _EDITOR_FONTS_H\n")
- g.write("#define _EDITOR_FONTS_H\n")
+ # Saving uncompressed, since FreeType will reference from memory pointer.
+ for i in range(len(source)):
+ with open(source[i], "rb") as f:
+ buf = f.read()
- # Saving uncompressed, since FreeType will reference from memory pointer.
- for i in range(len(source)):
- with open(source[i], "rb") as f:
- buf = f.read()
+ name = os.path.splitext(os.path.basename(source[i]))[0]
- name = os.path.splitext(os.path.basename(source[i]))[0]
+ g.write("static const int _font_" + name + "_size = " + str(len(buf)) + ";\n")
+ g.write("static const unsigned char _font_" + name + "[] = {\n")
+ for j in range(len(buf)):
+ g.write("\t" + str(buf[j]) + ",\n")
- g.write("static const int _font_" + name + "_size = " + str(len(buf)) + ";\n")
- g.write("static const unsigned char _font_" + name + "[] = {\n")
- for j in range(len(buf)):
- g.write("\t" + str(buf[j]) + ",\n")
+ g.write("};\n")
- g.write("};\n")
-
- g.write("#endif")
-
- g.close()
+ g.write("#endif")
if __name__ == "__main__":
diff --git a/gles3_builders.py b/gles3_builders.py
index 9280d3f0dd..9577ded544 100644
--- a/gles3_builders.py
+++ b/gles3_builders.py
@@ -37,161 +37,159 @@ class GLES3HeaderStruct:
def include_file_in_gles3_header(filename: str, header_data: GLES3HeaderStruct, depth: int):
- fs = open(filename, "r")
- line = fs.readline()
-
- while line:
- if line.find("=") != -1 and header_data.reading == "":
- # Mode
- eqpos = line.find("=")
- defname = line[:eqpos].strip().upper()
- define = line[eqpos + 1 :].strip()
- header_data.variant_names.append(defname)
- header_data.variant_defines.append(define)
- line = fs.readline()
- header_data.line_offset += 1
- header_data.vertex_offset = header_data.line_offset
- continue
-
- if line.find("=") != -1 and header_data.reading == "specializations":
- # Specialization
- eqpos = line.find("=")
- specname = line[:eqpos].strip()
- specvalue = line[eqpos + 1 :]
- header_data.specialization_names.append(specname)
- header_data.specialization_values.append(specvalue)
- line = fs.readline()
- header_data.line_offset += 1
- header_data.vertex_offset = header_data.line_offset
- continue
-
- if line.find("#[modes]") != -1:
- # Nothing really, just skip
- line = fs.readline()
- header_data.line_offset += 1
- header_data.vertex_offset = header_data.line_offset
- continue
-
- if line.find("#[specializations]") != -1:
- header_data.reading = "specializations"
- line = fs.readline()
- header_data.line_offset += 1
- header_data.vertex_offset = header_data.line_offset
- continue
+ with open(filename, "r") as fs:
+ line = fs.readline()
- if line.find("#[vertex]") != -1:
- header_data.reading = "vertex"
- line = fs.readline()
- header_data.line_offset += 1
- header_data.vertex_offset = header_data.line_offset
- continue
+ while line:
+ if line.find("=") != -1 and header_data.reading == "":
+ # Mode
+ eqpos = line.find("=")
+ defname = line[:eqpos].strip().upper()
+ define = line[eqpos + 1 :].strip()
+ header_data.variant_names.append(defname)
+ header_data.variant_defines.append(define)
+ line = fs.readline()
+ header_data.line_offset += 1
+ header_data.vertex_offset = header_data.line_offset
+ continue
+
+ if line.find("=") != -1 and header_data.reading == "specializations":
+ # Specialization
+ eqpos = line.find("=")
+ specname = line[:eqpos].strip()
+ specvalue = line[eqpos + 1 :]
+ header_data.specialization_names.append(specname)
+ header_data.specialization_values.append(specvalue)
+ line = fs.readline()
+ header_data.line_offset += 1
+ header_data.vertex_offset = header_data.line_offset
+ continue
+
+ if line.find("#[modes]") != -1:
+ # Nothing really, just skip
+ line = fs.readline()
+ header_data.line_offset += 1
+ header_data.vertex_offset = header_data.line_offset
+ continue
+
+ if line.find("#[specializations]") != -1:
+ header_data.reading = "specializations"
+ line = fs.readline()
+ header_data.line_offset += 1
+ header_data.vertex_offset = header_data.line_offset
+ continue
+
+ if line.find("#[vertex]") != -1:
+ header_data.reading = "vertex"
+ line = fs.readline()
+ header_data.line_offset += 1
+ header_data.vertex_offset = header_data.line_offset
+ continue
+
+ if line.find("#[fragment]") != -1:
+ header_data.reading = "fragment"
+ line = fs.readline()
+ header_data.line_offset += 1
+ header_data.fragment_offset = header_data.line_offset
+ continue
+
+ while line.find("#include ") != -1:
+ includeline = line.replace("#include ", "").strip()[1:-1]
+
+ included_file = os.path.relpath(os.path.dirname(filename) + "/" + includeline)
+ if not included_file in header_data.vertex_included_files and header_data.reading == "vertex":
+ header_data.vertex_included_files += [included_file]
+ if include_file_in_gles3_header(included_file, header_data, depth + 1) is None:
+ print("Error in file '" + filename + "': #include " + includeline + "could not be found!")
+ elif not included_file in header_data.fragment_included_files and header_data.reading == "fragment":
+ header_data.fragment_included_files += [included_file]
+ if include_file_in_gles3_header(included_file, header_data, depth + 1) is None:
+ print("Error in file '" + filename + "': #include " + includeline + "could not be found!")
+
+ line = fs.readline()
+
+ if line.find("uniform") != -1 and line.lower().find("texunit:") != -1:
+ # texture unit
+ texunitstr = line[line.find(":") + 1 :].strip()
+ if texunitstr == "auto":
+ texunit = "-1"
+ else:
+ texunit = str(int(texunitstr))
+ uline = line[: line.lower().find("//")]
+ uline = uline.replace("uniform", "")
+ uline = uline.replace("highp", "")
+ uline = uline.replace(";", "")
+ lines = uline.split(",")
+ for x in lines:
+ x = x.strip()
+ x = x[x.rfind(" ") + 1 :]
+ if x.find("[") != -1:
+ # unfiorm array
+ x = x[: x.find("[")]
+
+ if not x in header_data.texunit_names:
+ header_data.texunits += [(x, texunit)]
+ header_data.texunit_names += [x]
+
+ elif line.find("uniform") != -1 and line.lower().find("ubo:") != -1:
+ # uniform buffer object
+ ubostr = line[line.find(":") + 1 :].strip()
+ ubo = str(int(ubostr))
+ uline = line[: line.lower().find("//")]
+ uline = uline[uline.find("uniform") + len("uniform") :]
+ uline = uline.replace("highp", "")
+ uline = uline.replace(";", "")
+ uline = uline.replace("{", "").strip()
+ lines = uline.split(",")
+ for x in lines:
+ x = x.strip()
+ x = x[x.rfind(" ") + 1 :]
+ if x.find("[") != -1:
+ # unfiorm array
+ x = x[: x.find("[")]
+
+ if not x in header_data.ubo_names:
+ header_data.ubos += [(x, ubo)]
+ header_data.ubo_names += [x]
+
+ elif line.find("uniform") != -1 and line.find("{") == -1 and line.find(";") != -1:
+ uline = line.replace("uniform", "")
+ uline = uline.replace(";", "")
+ lines = uline.split(",")
+ for x in lines:
+ x = x.strip()
+ x = x[x.rfind(" ") + 1 :]
+ if x.find("[") != -1:
+ # unfiorm array
+ x = x[: x.find("[")]
+
+ if not x in header_data.uniforms:
+ header_data.uniforms += [x]
+
+ if (line.strip().find("out ") == 0 or line.strip().find("flat ") == 0) and line.find("tfb:") != -1:
+ uline = line.replace("flat ", "")
+ uline = uline.replace("out ", "")
+ uline = uline.replace("highp ", "")
+ uline = uline.replace(";", "")
+ uline = uline[uline.find(" ") :].strip()
+
+ if uline.find("//") != -1:
+ name, bind = uline.split("//")
+ if bind.find("tfb:") != -1:
+ name = name.strip()
+ bind = bind.replace("tfb:", "").strip()
+ header_data.feedbacks += [(name, bind)]
+
+ line = line.replace("\r", "")
+ line = line.replace("\n", "")
+
+ if header_data.reading == "vertex":
+ header_data.vertex_lines += [line]
+ if header_data.reading == "fragment":
+ header_data.fragment_lines += [line]
- if line.find("#[fragment]") != -1:
- header_data.reading = "fragment"
line = fs.readline()
header_data.line_offset += 1
- header_data.fragment_offset = header_data.line_offset
- continue
-
- while line.find("#include ") != -1:
- includeline = line.replace("#include ", "").strip()[1:-1]
-
- included_file = os.path.relpath(os.path.dirname(filename) + "/" + includeline)
- if not included_file in header_data.vertex_included_files and header_data.reading == "vertex":
- header_data.vertex_included_files += [included_file]
- if include_file_in_gles3_header(included_file, header_data, depth + 1) is None:
- print("Error in file '" + filename + "': #include " + includeline + "could not be found!")
- elif not included_file in header_data.fragment_included_files and header_data.reading == "fragment":
- header_data.fragment_included_files += [included_file]
- if include_file_in_gles3_header(included_file, header_data, depth + 1) is None:
- print("Error in file '" + filename + "': #include " + includeline + "could not be found!")
-
- line = fs.readline()
-
- if line.find("uniform") != -1 and line.lower().find("texunit:") != -1:
- # texture unit
- texunitstr = line[line.find(":") + 1 :].strip()
- if texunitstr == "auto":
- texunit = "-1"
- else:
- texunit = str(int(texunitstr))
- uline = line[: line.lower().find("//")]
- uline = uline.replace("uniform", "")
- uline = uline.replace("highp", "")
- uline = uline.replace(";", "")
- lines = uline.split(",")
- for x in lines:
- x = x.strip()
- x = x[x.rfind(" ") + 1 :]
- if x.find("[") != -1:
- # unfiorm array
- x = x[: x.find("[")]
-
- if not x in header_data.texunit_names:
- header_data.texunits += [(x, texunit)]
- header_data.texunit_names += [x]
-
- elif line.find("uniform") != -1 and line.lower().find("ubo:") != -1:
- # uniform buffer object
- ubostr = line[line.find(":") + 1 :].strip()
- ubo = str(int(ubostr))
- uline = line[: line.lower().find("//")]
- uline = uline[uline.find("uniform") + len("uniform") :]
- uline = uline.replace("highp", "")
- uline = uline.replace(";", "")
- uline = uline.replace("{", "").strip()
- lines = uline.split(",")
- for x in lines:
- x = x.strip()
- x = x[x.rfind(" ") + 1 :]
- if x.find("[") != -1:
- # unfiorm array
- x = x[: x.find("[")]
-
- if not x in header_data.ubo_names:
- header_data.ubos += [(x, ubo)]
- header_data.ubo_names += [x]
-
- elif line.find("uniform") != -1 and line.find("{") == -1 and line.find(";") != -1:
- uline = line.replace("uniform", "")
- uline = uline.replace(";", "")
- lines = uline.split(",")
- for x in lines:
- x = x.strip()
- x = x[x.rfind(" ") + 1 :]
- if x.find("[") != -1:
- # unfiorm array
- x = x[: x.find("[")]
-
- if not x in header_data.uniforms:
- header_data.uniforms += [x]
-
- if (line.strip().find("out ") == 0 or line.strip().find("flat ") == 0) and line.find("tfb:") != -1:
- uline = line.replace("flat ", "")
- uline = uline.replace("out ", "")
- uline = uline.replace("highp ", "")
- uline = uline.replace(";", "")
- uline = uline[uline.find(" ") :].strip()
-
- if uline.find("//") != -1:
- name, bind = uline.split("//")
- if bind.find("tfb:") != -1:
- name = name.strip()
- bind = bind.replace("tfb:", "").strip()
- header_data.feedbacks += [(name, bind)]
-
- line = line.replace("\r", "")
- line = line.replace("\n", "")
-
- if header_data.reading == "vertex":
- header_data.vertex_lines += [line]
- if header_data.reading == "fragment":
- header_data.fragment_lines += [line]
-
- line = fs.readline()
- header_data.line_offset += 1
-
- fs.close()
return header_data
@@ -211,393 +209,392 @@ def build_gles3_header(
else:
out_file = optional_output_filename
- fd = open(out_file, "w", encoding="utf-8", newline="\n")
- defspec = 0
- defvariant = ""
-
- fd.write("/* WARNING, THIS FILE WAS GENERATED, DO NOT EDIT */\n")
-
- out_file_base = out_file
- out_file_base = out_file_base[out_file_base.rfind("/") + 1 :]
- out_file_base = out_file_base[out_file_base.rfind("\\") + 1 :]
- out_file_ifdef = out_file_base.replace(".", "_").upper()
- fd.write("#ifndef " + out_file_ifdef + class_suffix + "_GLES3\n")
- fd.write("#define " + out_file_ifdef + class_suffix + "_GLES3\n")
-
- out_file_class = (
- out_file_base.replace(".glsl.gen.h", "").title().replace("_", "").replace(".", "") + "Shader" + class_suffix
- )
- fd.write("\n\n")
- fd.write('#include "' + include + '"\n\n\n')
- fd.write("class " + out_file_class + " : public Shader" + class_suffix + " {\n\n")
-
- fd.write("public:\n\n")
-
- if header_data.uniforms:
- fd.write("\tenum Uniforms {\n")
- for x in header_data.uniforms:
- fd.write("\t\t" + x.upper() + ",\n")
- fd.write("\t};\n\n")
-
- if header_data.variant_names:
- fd.write("\tenum ShaderVariant {\n")
- for x in header_data.variant_names:
- fd.write("\t\t" + x + ",\n")
- fd.write("\t};\n\n")
- else:
- fd.write("\tenum ShaderVariant { DEFAULT };\n\n")
- defvariant = "=DEFAULT"
-
- if header_data.specialization_names:
- fd.write("\tenum Specializations {\n")
- counter = 0
- for x in header_data.specialization_names:
- fd.write("\t\t" + x.upper() + "=" + str(1 << counter) + ",\n")
- counter += 1
- fd.write("\t};\n\n")
-
- for i in range(len(header_data.specialization_names)):
- defval = header_data.specialization_values[i].strip()
- if defval.upper() == "TRUE" or defval == "1":
- defspec |= 1 << i
-
- fd.write(
- "\t_FORCE_INLINE_ bool version_bind_shader(RID p_version,ShaderVariant p_variant"
- + defvariant
- + ",uint64_t p_specialization="
- + str(defspec)
- + ") { return _version_bind_shader(p_version,p_variant,p_specialization); }\n\n"
- )
-
- if header_data.uniforms:
- fd.write(
- "\t_FORCE_INLINE_ int version_get_uniform(Uniforms p_uniform,RID p_version,ShaderVariant p_variant"
- + defvariant
- + ",uint64_t p_specialization="
- + str(defspec)
- + ") { return _version_get_uniform(p_uniform,p_version,p_variant,p_specialization); }\n\n"
- )
+ with open(out_file, "w", encoding="utf-8", newline="\n") as fd:
+ defspec = 0
+ defvariant = ""
- fd.write(
- "\t#define _FU if (version_get_uniform(p_uniform,p_version,p_variant,p_specialization)<0) return; \n\n "
- )
- fd.write(
- "\t_FORCE_INLINE_ void version_set_uniform(Uniforms p_uniform, float p_value,RID p_version,ShaderVariant p_variant"
- + defvariant
- + ",uint64_t p_specialization="
- + str(defspec)
- + ") { _FU glUniform1f(version_get_uniform(p_uniform,p_version,p_variant,p_specialization),p_value); }\n\n"
- )
- fd.write(
- "\t_FORCE_INLINE_ void version_set_uniform(Uniforms p_uniform, double p_value,RID p_version,ShaderVariant p_variant"
- + defvariant
- + ",uint64_t p_specialization="
- + str(defspec)
- + ") { _FU glUniform1f(version_get_uniform(p_uniform,p_version,p_variant,p_specialization),p_value); }\n\n"
- )
- fd.write(
- "\t_FORCE_INLINE_ void version_set_uniform(Uniforms p_uniform, uint8_t p_value,RID p_version,ShaderVariant p_variant"
- + defvariant
- + ",uint64_t p_specialization="
- + str(defspec)
- + ") { _FU glUniform1ui(version_get_uniform(p_uniform,p_version,p_variant,p_specialization),p_value); }\n\n"
- )
- fd.write(
- "\t_FORCE_INLINE_ void version_set_uniform(Uniforms p_uniform, int8_t p_value,RID p_version,ShaderVariant p_variant"
- + defvariant
- + ",uint64_t p_specialization="
- + str(defspec)
- + ") { _FU glUniform1i(version_get_uniform(p_uniform,p_version,p_variant,p_specialization),p_value); }\n\n"
- )
- fd.write(
- "\t_FORCE_INLINE_ void version_set_uniform(Uniforms p_uniform, uint16_t p_value,RID p_version,ShaderVariant p_variant"
- + defvariant
- + ",uint64_t p_specialization="
- + str(defspec)
- + ") { _FU glUniform1ui(version_get_uniform(p_uniform,p_version,p_variant,p_specialization),p_value); }\n\n"
- )
- fd.write(
- "\t_FORCE_INLINE_ void version_set_uniform(Uniforms p_uniform, int16_t p_value,RID p_version,ShaderVariant p_variant"
- + defvariant
- + ",uint64_t p_specialization="
- + str(defspec)
- + ") { _FU glUniform1i(version_get_uniform(p_uniform,p_version,p_variant,p_specialization),p_value); }\n\n"
- )
- fd.write(
- "\t_FORCE_INLINE_ void version_set_uniform(Uniforms p_uniform, uint32_t p_value,RID p_version,ShaderVariant p_variant"
- + defvariant
- + ",uint64_t p_specialization="
- + str(defspec)
- + ") { _FU glUniform1ui(version_get_uniform(p_uniform,p_version,p_variant,p_specialization),p_value); }\n\n"
- )
- fd.write(
- "\t_FORCE_INLINE_ void version_set_uniform(Uniforms p_uniform, int32_t p_value,RID p_version,ShaderVariant p_variant"
- + defvariant
- + ",uint64_t p_specialization="
- + str(defspec)
- + ") { _FU glUniform1i(version_get_uniform(p_uniform,p_version,p_variant,p_specialization),p_value); }\n\n"
- )
- fd.write(
- "\t_FORCE_INLINE_ void version_set_uniform(Uniforms p_uniform, const Color& p_color,RID p_version,ShaderVariant p_variant"
- + defvariant
- + ",uint64_t p_specialization="
- + str(defspec)
- + ") { _FU GLfloat col[4]={p_color.r,p_color.g,p_color.b,p_color.a}; glUniform4fv(version_get_uniform(p_uniform,p_version,p_variant,p_specialization),1,col); }\n\n"
- )
- fd.write(
- "\t_FORCE_INLINE_ void version_set_uniform(Uniforms p_uniform, const Vector2& p_vec2,RID p_version,ShaderVariant p_variant"
- + defvariant
- + ",uint64_t p_specialization="
- + str(defspec)
- + ") { _FU GLfloat vec2[2]={float(p_vec2.x),float(p_vec2.y)}; glUniform2fv(version_get_uniform(p_uniform,p_version,p_variant,p_specialization),1,vec2); }\n\n"
- )
- fd.write(
- "\t_FORCE_INLINE_ void version_set_uniform(Uniforms p_uniform, const Size2i& p_vec2,RID p_version,ShaderVariant p_variant"
- + defvariant
- + ",uint64_t p_specialization="
- + str(defspec)
- + ") { _FU GLint vec2[2]={GLint(p_vec2.x),GLint(p_vec2.y)}; glUniform2iv(version_get_uniform(p_uniform,p_version,p_variant,p_specialization),1,vec2); }\n\n"
- )
- fd.write(
- "\t_FORCE_INLINE_ void version_set_uniform(Uniforms p_uniform, const Vector3& p_vec3,RID p_version,ShaderVariant p_variant"
- + defvariant
- + ",uint64_t p_specialization="
- + str(defspec)
- + ") { _FU GLfloat vec3[3]={float(p_vec3.x),float(p_vec3.y),float(p_vec3.z)}; glUniform3fv(version_get_uniform(p_uniform,p_version,p_variant,p_specialization),1,vec3); }\n\n"
- )
- fd.write(
- "\t_FORCE_INLINE_ void version_set_uniform(Uniforms p_uniform, const Vector4& p_vec4,RID p_version,ShaderVariant p_variant"
- + defvariant
- + ",uint64_t p_specialization="
- + str(defspec)
- + ") { _FU GLfloat vec4[4]={float(p_vec4.x),float(p_vec4.y),float(p_vec4.z),float(p_vec4.w)}; glUniform4fv(version_get_uniform(p_uniform,p_version,p_variant,p_specialization),1,vec4); }\n\n"
- )
- fd.write(
- "\t_FORCE_INLINE_ void version_set_uniform(Uniforms p_uniform, float p_a, float p_b,RID p_version,ShaderVariant p_variant"
- + defvariant
- + ",uint64_t p_specialization="
- + str(defspec)
- + ") { _FU glUniform2f(version_get_uniform(p_uniform,p_version,p_variant,p_specialization),p_a,p_b); }\n\n"
- )
- fd.write(
- "\t_FORCE_INLINE_ void version_set_uniform(Uniforms p_uniform, float p_a, float p_b, float p_c,RID p_version,ShaderVariant p_variant"
- + defvariant
- + ",uint64_t p_specialization="
- + str(defspec)
- + ") { _FU glUniform3f(version_get_uniform(p_uniform,p_version,p_variant,p_specialization),p_a,p_b,p_c); }\n\n"
+ fd.write("/* WARNING, THIS FILE WAS GENERATED, DO NOT EDIT */\n")
+
+ out_file_base = out_file
+ out_file_base = out_file_base[out_file_base.rfind("/") + 1 :]
+ out_file_base = out_file_base[out_file_base.rfind("\\") + 1 :]
+ out_file_ifdef = out_file_base.replace(".", "_").upper()
+ fd.write("#ifndef " + out_file_ifdef + class_suffix + "_GLES3\n")
+ fd.write("#define " + out_file_ifdef + class_suffix + "_GLES3\n")
+
+ out_file_class = (
+ out_file_base.replace(".glsl.gen.h", "").title().replace("_", "").replace(".", "") + "Shader" + class_suffix
)
+ fd.write("\n\n")
+ fd.write('#include "' + include + '"\n\n\n')
+ fd.write("class " + out_file_class + " : public Shader" + class_suffix + " {\n\n")
+
+ fd.write("public:\n\n")
+
+ if header_data.uniforms:
+ fd.write("\tenum Uniforms {\n")
+ for x in header_data.uniforms:
+ fd.write("\t\t" + x.upper() + ",\n")
+ fd.write("\t};\n\n")
+
+ if header_data.variant_names:
+ fd.write("\tenum ShaderVariant {\n")
+ for x in header_data.variant_names:
+ fd.write("\t\t" + x + ",\n")
+ fd.write("\t};\n\n")
+ else:
+ fd.write("\tenum ShaderVariant { DEFAULT };\n\n")
+ defvariant = "=DEFAULT"
+
+ if header_data.specialization_names:
+ fd.write("\tenum Specializations {\n")
+ counter = 0
+ for x in header_data.specialization_names:
+ fd.write("\t\t" + x.upper() + "=" + str(1 << counter) + ",\n")
+ counter += 1
+ fd.write("\t};\n\n")
+
+ for i in range(len(header_data.specialization_names)):
+ defval = header_data.specialization_values[i].strip()
+ if defval.upper() == "TRUE" or defval == "1":
+ defspec |= 1 << i
+
fd.write(
- "\t_FORCE_INLINE_ void version_set_uniform(Uniforms p_uniform, float p_a, float p_b, float p_c, float p_d,RID p_version,ShaderVariant p_variant"
+ "\t_FORCE_INLINE_ bool version_bind_shader(RID p_version,ShaderVariant p_variant"
+ defvariant
+ ",uint64_t p_specialization="
+ str(defspec)
- + ") { _FU glUniform4f(version_get_uniform(p_uniform,p_version,p_variant,p_specialization),p_a,p_b,p_c,p_d); }\n\n"
+ + ") { return _version_bind_shader(p_version,p_variant,p_specialization); }\n\n"
)
- fd.write(
- """\t_FORCE_INLINE_ void version_set_uniform(Uniforms p_uniform, const Transform3D& p_transform,RID p_version,ShaderVariant p_variant"""
- + defvariant
- + """,uint64_t p_specialization="""
- + str(defspec)
- + """) { _FU
+ if header_data.uniforms:
+ fd.write(
+ "\t_FORCE_INLINE_ int version_get_uniform(Uniforms p_uniform,RID p_version,ShaderVariant p_variant"
+ + defvariant
+ + ",uint64_t p_specialization="
+ + str(defspec)
+ + ") { return _version_get_uniform(p_uniform,p_version,p_variant,p_specialization); }\n\n"
+ )
+
+ fd.write(
+ "\t#define _FU if (version_get_uniform(p_uniform,p_version,p_variant,p_specialization)<0) return; \n\n "
+ )
+ fd.write(
+ "\t_FORCE_INLINE_ void version_set_uniform(Uniforms p_uniform, float p_value,RID p_version,ShaderVariant p_variant"
+ + defvariant
+ + ",uint64_t p_specialization="
+ + str(defspec)
+ + ") { _FU glUniform1f(version_get_uniform(p_uniform,p_version,p_variant,p_specialization),p_value); }\n\n"
+ )
+ fd.write(
+ "\t_FORCE_INLINE_ void version_set_uniform(Uniforms p_uniform, double p_value,RID p_version,ShaderVariant p_variant"
+ + defvariant
+ + ",uint64_t p_specialization="
+ + str(defspec)
+ + ") { _FU glUniform1f(version_get_uniform(p_uniform,p_version,p_variant,p_specialization),p_value); }\n\n"
+ )
+ fd.write(
+ "\t_FORCE_INLINE_ void version_set_uniform(Uniforms p_uniform, uint8_t p_value,RID p_version,ShaderVariant p_variant"
+ + defvariant
+ + ",uint64_t p_specialization="
+ + str(defspec)
+ + ") { _FU glUniform1ui(version_get_uniform(p_uniform,p_version,p_variant,p_specialization),p_value); }\n\n"
+ )
+ fd.write(
+ "\t_FORCE_INLINE_ void version_set_uniform(Uniforms p_uniform, int8_t p_value,RID p_version,ShaderVariant p_variant"
+ + defvariant
+ + ",uint64_t p_specialization="
+ + str(defspec)
+ + ") { _FU glUniform1i(version_get_uniform(p_uniform,p_version,p_variant,p_specialization),p_value); }\n\n"
+ )
+ fd.write(
+ "\t_FORCE_INLINE_ void version_set_uniform(Uniforms p_uniform, uint16_t p_value,RID p_version,ShaderVariant p_variant"
+ + defvariant
+ + ",uint64_t p_specialization="
+ + str(defspec)
+ + ") { _FU glUniform1ui(version_get_uniform(p_uniform,p_version,p_variant,p_specialization),p_value); }\n\n"
+ )
+ fd.write(
+ "\t_FORCE_INLINE_ void version_set_uniform(Uniforms p_uniform, int16_t p_value,RID p_version,ShaderVariant p_variant"
+ + defvariant
+ + ",uint64_t p_specialization="
+ + str(defspec)
+ + ") { _FU glUniform1i(version_get_uniform(p_uniform,p_version,p_variant,p_specialization),p_value); }\n\n"
+ )
+ fd.write(
+ "\t_FORCE_INLINE_ void version_set_uniform(Uniforms p_uniform, uint32_t p_value,RID p_version,ShaderVariant p_variant"
+ + defvariant
+ + ",uint64_t p_specialization="
+ + str(defspec)
+ + ") { _FU glUniform1ui(version_get_uniform(p_uniform,p_version,p_variant,p_specialization),p_value); }\n\n"
+ )
+ fd.write(
+ "\t_FORCE_INLINE_ void version_set_uniform(Uniforms p_uniform, int32_t p_value,RID p_version,ShaderVariant p_variant"
+ + defvariant
+ + ",uint64_t p_specialization="
+ + str(defspec)
+ + ") { _FU glUniform1i(version_get_uniform(p_uniform,p_version,p_variant,p_specialization),p_value); }\n\n"
+ )
+ fd.write(
+ "\t_FORCE_INLINE_ void version_set_uniform(Uniforms p_uniform, const Color& p_color,RID p_version,ShaderVariant p_variant"
+ + defvariant
+ + ",uint64_t p_specialization="
+ + str(defspec)
+ + ") { _FU GLfloat col[4]={p_color.r,p_color.g,p_color.b,p_color.a}; glUniform4fv(version_get_uniform(p_uniform,p_version,p_variant,p_specialization),1,col); }\n\n"
+ )
+ fd.write(
+ "\t_FORCE_INLINE_ void version_set_uniform(Uniforms p_uniform, const Vector2& p_vec2,RID p_version,ShaderVariant p_variant"
+ + defvariant
+ + ",uint64_t p_specialization="
+ + str(defspec)
+ + ") { _FU GLfloat vec2[2]={float(p_vec2.x),float(p_vec2.y)}; glUniform2fv(version_get_uniform(p_uniform,p_version,p_variant,p_specialization),1,vec2); }\n\n"
+ )
+ fd.write(
+ "\t_FORCE_INLINE_ void version_set_uniform(Uniforms p_uniform, const Size2i& p_vec2,RID p_version,ShaderVariant p_variant"
+ + defvariant
+ + ",uint64_t p_specialization="
+ + str(defspec)
+ + ") { _FU GLint vec2[2]={GLint(p_vec2.x),GLint(p_vec2.y)}; glUniform2iv(version_get_uniform(p_uniform,p_version,p_variant,p_specialization),1,vec2); }\n\n"
+ )
+ fd.write(
+ "\t_FORCE_INLINE_ void version_set_uniform(Uniforms p_uniform, const Vector3& p_vec3,RID p_version,ShaderVariant p_variant"
+ + defvariant
+ + ",uint64_t p_specialization="
+ + str(defspec)
+ + ") { _FU GLfloat vec3[3]={float(p_vec3.x),float(p_vec3.y),float(p_vec3.z)}; glUniform3fv(version_get_uniform(p_uniform,p_version,p_variant,p_specialization),1,vec3); }\n\n"
+ )
+ fd.write(
+ "\t_FORCE_INLINE_ void version_set_uniform(Uniforms p_uniform, const Vector4& p_vec4,RID p_version,ShaderVariant p_variant"
+ + defvariant
+ + ",uint64_t p_specialization="
+ + str(defspec)
+ + ") { _FU GLfloat vec4[4]={float(p_vec4.x),float(p_vec4.y),float(p_vec4.z),float(p_vec4.w)}; glUniform4fv(version_get_uniform(p_uniform,p_version,p_variant,p_specialization),1,vec4); }\n\n"
+ )
+ fd.write(
+ "\t_FORCE_INLINE_ void version_set_uniform(Uniforms p_uniform, float p_a, float p_b,RID p_version,ShaderVariant p_variant"
+ + defvariant
+ + ",uint64_t p_specialization="
+ + str(defspec)
+ + ") { _FU glUniform2f(version_get_uniform(p_uniform,p_version,p_variant,p_specialization),p_a,p_b); }\n\n"
+ )
+ fd.write(
+ "\t_FORCE_INLINE_ void version_set_uniform(Uniforms p_uniform, float p_a, float p_b, float p_c,RID p_version,ShaderVariant p_variant"
+ + defvariant
+ + ",uint64_t p_specialization="
+ + str(defspec)
+ + ") { _FU glUniform3f(version_get_uniform(p_uniform,p_version,p_variant,p_specialization),p_a,p_b,p_c); }\n\n"
+ )
+ fd.write(
+ "\t_FORCE_INLINE_ void version_set_uniform(Uniforms p_uniform, float p_a, float p_b, float p_c, float p_d,RID p_version,ShaderVariant p_variant"
+ + defvariant
+ + ",uint64_t p_specialization="
+ + str(defspec)
+ + ") { _FU glUniform4f(version_get_uniform(p_uniform,p_version,p_variant,p_specialization),p_a,p_b,p_c,p_d); }\n\n"
+ )
+
+ fd.write(
+ """\t_FORCE_INLINE_ void version_set_uniform(Uniforms p_uniform, const Transform3D& p_transform,RID p_version,ShaderVariant p_variant"""
+ + defvariant
+ + """,uint64_t p_specialization="""
+ + str(defspec)
+ + """) { _FU
+
+ const Transform3D &tr = p_transform;
+
+ GLfloat matrix[16]={ /* build a 16x16 matrix */
+ (GLfloat)tr.basis.rows[0][0],
+ (GLfloat)tr.basis.rows[1][0],
+ (GLfloat)tr.basis.rows[2][0],
+ (GLfloat)0,
+ (GLfloat)tr.basis.rows[0][1],
+ (GLfloat)tr.basis.rows[1][1],
+ (GLfloat)tr.basis.rows[2][1],
+ (GLfloat)0,
+ (GLfloat)tr.basis.rows[0][2],
+ (GLfloat)tr.basis.rows[1][2],
+ (GLfloat)tr.basis.rows[2][2],
+ (GLfloat)0,
+ (GLfloat)tr.origin.x,
+ (GLfloat)tr.origin.y,
+ (GLfloat)tr.origin.z,
+ (GLfloat)1
+ };
+
+ glUniformMatrix4fv(version_get_uniform(p_uniform,p_version,p_variant,p_specialization),1,false,matrix);
- const Transform3D &tr = p_transform;
+ }
+
+ """
+ )
+
+ fd.write(
+ """_FORCE_INLINE_ void version_set_uniform(Uniforms p_uniform, const Transform2D& p_transform,RID p_version,ShaderVariant p_variant"""
+ + defvariant
+ + """,uint64_t p_specialization="""
+ + str(defspec)
+ + """) { _FU
+
+ const Transform2D &tr = p_transform;
GLfloat matrix[16]={ /* build a 16x16 matrix */
- (GLfloat)tr.basis.rows[0][0],
- (GLfloat)tr.basis.rows[1][0],
- (GLfloat)tr.basis.rows[2][0],
+ (GLfloat)tr.columns[0][0],
+ (GLfloat)tr.columns[0][1],
+ (GLfloat)0,
+ (GLfloat)0,
+ (GLfloat)tr.columns[1][0],
+ (GLfloat)tr.columns[1][1],
(GLfloat)0,
- (GLfloat)tr.basis.rows[0][1],
- (GLfloat)tr.basis.rows[1][1],
- (GLfloat)tr.basis.rows[2][1],
(GLfloat)0,
- (GLfloat)tr.basis.rows[0][2],
- (GLfloat)tr.basis.rows[1][2],
- (GLfloat)tr.basis.rows[2][2],
(GLfloat)0,
- (GLfloat)tr.origin.x,
- (GLfloat)tr.origin.y,
- (GLfloat)tr.origin.z,
+ (GLfloat)0,
+ (GLfloat)1,
+ (GLfloat)0,
+ (GLfloat)tr.columns[2][0],
+ (GLfloat)tr.columns[2][1],
+ (GLfloat)0,
(GLfloat)1
};
- glUniformMatrix4fv(version_get_uniform(p_uniform,p_version,p_variant,p_specialization),1,false,matrix);
+ glUniformMatrix4fv(version_get_uniform(p_uniform,p_version,p_variant,p_specialization),1,false,matrix);
- }
-
- """
- )
+ }
- fd.write(
- """_FORCE_INLINE_ void version_set_uniform(Uniforms p_uniform, const Transform2D& p_transform,RID p_version,ShaderVariant p_variant"""
- + defvariant
- + """,uint64_t p_specialization="""
- + str(defspec)
- + """) { _FU
-
- const Transform2D &tr = p_transform;
-
- GLfloat matrix[16]={ /* build a 16x16 matrix */
- (GLfloat)tr.columns[0][0],
- (GLfloat)tr.columns[0][1],
- (GLfloat)0,
- (GLfloat)0,
- (GLfloat)tr.columns[1][0],
- (GLfloat)tr.columns[1][1],
- (GLfloat)0,
- (GLfloat)0,
- (GLfloat)0,
- (GLfloat)0,
- (GLfloat)1,
- (GLfloat)0,
- (GLfloat)tr.columns[2][0],
- (GLfloat)tr.columns[2][1],
- (GLfloat)0,
- (GLfloat)1
- };
-
- glUniformMatrix4fv(version_get_uniform(p_uniform,p_version,p_variant,p_specialization),1,false,matrix);
-
- }
-
- """
- )
+ """
+ )
- fd.write(
- """_FORCE_INLINE_ void version_set_uniform(Uniforms p_uniform, const Projection& p_matrix, RID p_version, ShaderVariant p_variant"""
- + defvariant
- + """,uint64_t p_specialization="""
- + str(defspec)
- + """) { _FU
+ fd.write(
+ """_FORCE_INLINE_ void version_set_uniform(Uniforms p_uniform, const Projection& p_matrix, RID p_version, ShaderVariant p_variant"""
+ + defvariant
+ + """,uint64_t p_specialization="""
+ + str(defspec)
+ + """) { _FU
- GLfloat matrix[16];
+ GLfloat matrix[16];
- for (int i = 0; i < 4; i++) {
- for (int j = 0; j < 4; j++) {
- matrix[i * 4 + j] = p_matrix.columns[i][j];
+ for (int i = 0; i < 4; i++) {
+ for (int j = 0; j < 4; j++) {
+ matrix[i * 4 + j] = p_matrix.columns[i][j];
+ }
}
- }
- glUniformMatrix4fv(version_get_uniform(p_uniform, p_version, p_variant, p_specialization), 1, false, matrix);
- }"""
- )
+ glUniformMatrix4fv(version_get_uniform(p_uniform, p_version, p_variant, p_specialization), 1, false, matrix);
+ }"""
+ )
- fd.write("\n\n#undef _FU\n\n\n")
+ fd.write("\n\n#undef _FU\n\n\n")
- fd.write("protected:\n\n")
+ fd.write("protected:\n\n")
- fd.write("\tvirtual void _init() override {\n\n")
+ fd.write("\tvirtual void _init() override {\n\n")
- if header_data.uniforms:
- fd.write("\t\tstatic const char* _uniform_strings[]={\n")
if header_data.uniforms:
- for x in header_data.uniforms:
+ fd.write("\t\tstatic const char* _uniform_strings[]={\n")
+ if header_data.uniforms:
+ for x in header_data.uniforms:
+ fd.write('\t\t\t"' + x + '",\n')
+ fd.write("\t\t};\n\n")
+ else:
+ fd.write("\t\tstatic const char **_uniform_strings=nullptr;\n")
+
+ variant_count = 1
+ if len(header_data.variant_defines) > 0:
+ fd.write("\t\tstatic const char* _variant_defines[]={\n")
+ for x in header_data.variant_defines:
fd.write('\t\t\t"' + x + '",\n')
- fd.write("\t\t};\n\n")
- else:
- fd.write("\t\tstatic const char **_uniform_strings=nullptr;\n")
-
- variant_count = 1
- if len(header_data.variant_defines) > 0:
- fd.write("\t\tstatic const char* _variant_defines[]={\n")
- for x in header_data.variant_defines:
- fd.write('\t\t\t"' + x + '",\n')
- fd.write("\t\t};\n\n")
- variant_count = len(header_data.variant_defines)
- else:
- fd.write("\t\tstatic const char **_variant_defines[]={" "};\n")
-
- if header_data.texunits:
- fd.write("\t\tstatic TexUnitPair _texunit_pairs[]={\n")
- for x in header_data.texunits:
- fd.write('\t\t\t{"' + x[0] + '",' + x[1] + "},\n")
- fd.write("\t\t};\n\n")
- else:
- fd.write("\t\tstatic TexUnitPair *_texunit_pairs=nullptr;\n")
-
- if header_data.ubos:
- fd.write("\t\tstatic UBOPair _ubo_pairs[]={\n")
- for x in header_data.ubos:
- fd.write('\t\t\t{"' + x[0] + '",' + x[1] + "},\n")
- fd.write("\t\t};\n\n")
- else:
- fd.write("\t\tstatic UBOPair *_ubo_pairs=nullptr;\n")
+ fd.write("\t\t};\n\n")
+ variant_count = len(header_data.variant_defines)
+ else:
+ fd.write("\t\tstatic const char **_variant_defines[]={" "};\n")
+
+ if header_data.texunits:
+ fd.write("\t\tstatic TexUnitPair _texunit_pairs[]={\n")
+ for x in header_data.texunits:
+ fd.write('\t\t\t{"' + x[0] + '",' + x[1] + "},\n")
+ fd.write("\t\t};\n\n")
+ else:
+ fd.write("\t\tstatic TexUnitPair *_texunit_pairs=nullptr;\n")
+
+ if header_data.ubos:
+ fd.write("\t\tstatic UBOPair _ubo_pairs[]={\n")
+ for x in header_data.ubos:
+ fd.write('\t\t\t{"' + x[0] + '",' + x[1] + "},\n")
+ fd.write("\t\t};\n\n")
+ else:
+ fd.write("\t\tstatic UBOPair *_ubo_pairs=nullptr;\n")
+
+ specializations_found = []
+
+ if header_data.specialization_names:
+ fd.write("\t\tstatic Specialization _spec_pairs[]={\n")
+ for i in range(len(header_data.specialization_names)):
+ defval = header_data.specialization_values[i].strip()
+ if defval.upper() == "TRUE" or defval == "1":
+ defval = "true"
+ else:
+ defval = "false"
+
+ fd.write('\t\t\t{"' + header_data.specialization_names[i] + '",' + defval + "},\n")
+ specializations_found.append(header_data.specialization_names[i])
+ fd.write("\t\t};\n\n")
+ else:
+ fd.write("\t\tstatic Specialization *_spec_pairs=nullptr;\n")
+
+ feedback_count = 0
+
+ if header_data.feedbacks:
+ fd.write("\t\tstatic const Feedback _feedbacks[]={\n")
+ for x in header_data.feedbacks:
+ name = x[0]
+ spec = x[1]
+ if spec in specializations_found:
+ fd.write('\t\t\t{"' + name + '",' + str(1 << specializations_found.index(spec)) + "},\n")
+ else:
+ fd.write('\t\t\t{"' + name + '",0},\n')
+
+ feedback_count += 1
+
+ fd.write("\t\t};\n\n")
+ else:
+ fd.write("\t\tstatic const Feedback* _feedbacks=nullptr;\n")
+
+ fd.write("\t\tstatic const char _vertex_code[]={\n")
+ for x in header_data.vertex_lines:
+ for c in x:
+ fd.write(str(ord(c)) + ",")
+
+ fd.write(str(ord("\n")) + ",")
+ fd.write("\t\t0};\n\n")
+
+ fd.write("\t\tstatic const char _fragment_code[]={\n")
+ for x in header_data.fragment_lines:
+ for c in x:
+ fd.write(str(ord(c)) + ",")
+
+ fd.write(str(ord("\n")) + ",")
+ fd.write("\t\t0};\n\n")
- specializations_found = []
-
- if header_data.specialization_names:
- fd.write("\t\tstatic Specialization _spec_pairs[]={\n")
- for i in range(len(header_data.specialization_names)):
- defval = header_data.specialization_values[i].strip()
- if defval.upper() == "TRUE" or defval == "1":
- defval = "true"
- else:
- defval = "false"
-
- fd.write('\t\t\t{"' + header_data.specialization_names[i] + '",' + defval + "},\n")
- specializations_found.append(header_data.specialization_names[i])
- fd.write("\t\t};\n\n")
- else:
- fd.write("\t\tstatic Specialization *_spec_pairs=nullptr;\n")
-
- feedback_count = 0
-
- if header_data.feedbacks:
- fd.write("\t\tstatic const Feedback _feedbacks[]={\n")
- for x in header_data.feedbacks:
- name = x[0]
- spec = x[1]
- if spec in specializations_found:
- fd.write('\t\t\t{"' + name + '",' + str(1 << specializations_found.index(spec)) + "},\n")
- else:
- fd.write('\t\t\t{"' + name + '",0},\n')
+ fd.write(
+ '\t\t_setup(_vertex_code,_fragment_code,"'
+ + out_file_class
+ + '",'
+ + str(len(header_data.uniforms))
+ + ",_uniform_strings,"
+ + str(len(header_data.ubos))
+ + ",_ubo_pairs,"
+ + str(feedback_count)
+ + ",_feedbacks,"
+ + str(len(header_data.texunits))
+ + ",_texunit_pairs,"
+ + str(len(header_data.specialization_names))
+ + ",_spec_pairs,"
+ + str(variant_count)
+ + ",_variant_defines);\n"
+ )
- feedback_count += 1
+ fd.write("\t}\n\n")
- fd.write("\t\t};\n\n")
- else:
- fd.write("\t\tstatic const Feedback* _feedbacks=nullptr;\n")
-
- fd.write("\t\tstatic const char _vertex_code[]={\n")
- for x in header_data.vertex_lines:
- for c in x:
- fd.write(str(ord(c)) + ",")
-
- fd.write(str(ord("\n")) + ",")
- fd.write("\t\t0};\n\n")
-
- fd.write("\t\tstatic const char _fragment_code[]={\n")
- for x in header_data.fragment_lines:
- for c in x:
- fd.write(str(ord(c)) + ",")
-
- fd.write(str(ord("\n")) + ",")
- fd.write("\t\t0};\n\n")
-
- fd.write(
- '\t\t_setup(_vertex_code,_fragment_code,"'
- + out_file_class
- + '",'
- + str(len(header_data.uniforms))
- + ",_uniform_strings,"
- + str(len(header_data.ubos))
- + ",_ubo_pairs,"
- + str(feedback_count)
- + ",_feedbacks,"
- + str(len(header_data.texunits))
- + ",_texunit_pairs,"
- + str(len(header_data.specialization_names))
- + ",_spec_pairs,"
- + str(variant_count)
- + ",_variant_defines);\n"
- )
-
- fd.write("\t}\n\n")
-
- fd.write("};\n\n")
- fd.write("#endif\n\n")
- fd.close()
+ fd.write("};\n\n")
+ fd.write("#endif\n\n")
def build_gles3_headers(target, source, env):
diff --git a/glsl_builders.py b/glsl_builders.py
index 406677ac9e..7eb79b8b32 100644
--- a/glsl_builders.py
+++ b/glsl_builders.py
@@ -44,72 +44,70 @@ class RDHeaderStruct:
def include_file_in_rd_header(filename: str, header_data: RDHeaderStruct, depth: int) -> RDHeaderStruct:
- fs = open(filename, "r")
- line = fs.readline()
-
- while line:
- index = line.find("//")
- if index != -1:
- line = line[:index]
-
- if line.find("#[vertex]") != -1:
- header_data.reading = "vertex"
- line = fs.readline()
- header_data.line_offset += 1
- header_data.vertex_offset = header_data.line_offset
- continue
+ with open(filename, "r") as fs:
+ line = fs.readline()
- if line.find("#[fragment]") != -1:
- header_data.reading = "fragment"
- line = fs.readline()
- header_data.line_offset += 1
- header_data.fragment_offset = header_data.line_offset
- continue
+ while line:
+ index = line.find("//")
+ if index != -1:
+ line = line[:index]
+
+ if line.find("#[vertex]") != -1:
+ header_data.reading = "vertex"
+ line = fs.readline()
+ header_data.line_offset += 1
+ header_data.vertex_offset = header_data.line_offset
+ continue
+
+ if line.find("#[fragment]") != -1:
+ header_data.reading = "fragment"
+ line = fs.readline()
+ header_data.line_offset += 1
+ header_data.fragment_offset = header_data.line_offset
+ continue
+
+ if line.find("#[compute]") != -1:
+ header_data.reading = "compute"
+ line = fs.readline()
+ header_data.line_offset += 1
+ header_data.compute_offset = header_data.line_offset
+ continue
+
+ while line.find("#include ") != -1:
+ includeline = line.replace("#include ", "").strip()[1:-1]
+
+ if includeline.startswith("thirdparty/"):
+ included_file = os.path.relpath(includeline)
+
+ else:
+ included_file = os.path.relpath(os.path.dirname(filename) + "/" + includeline)
+
+ if not included_file in header_data.vertex_included_files and header_data.reading == "vertex":
+ header_data.vertex_included_files += [included_file]
+ if include_file_in_rd_header(included_file, header_data, depth + 1) is None:
+ print("Error in file '" + filename + "': #include " + includeline + "could not be found!")
+ elif not included_file in header_data.fragment_included_files and header_data.reading == "fragment":
+ header_data.fragment_included_files += [included_file]
+ if include_file_in_rd_header(included_file, header_data, depth + 1) is None:
+ print("Error in file '" + filename + "': #include " + includeline + "could not be found!")
+ elif not included_file in header_data.compute_included_files and header_data.reading == "compute":
+ header_data.compute_included_files += [included_file]
+ if include_file_in_rd_header(included_file, header_data, depth + 1) is None:
+ print("Error in file '" + filename + "': #include " + includeline + "could not be found!")
+
+ line = fs.readline()
+
+ line = line.replace("\r", "").replace("\n", "")
+
+ if header_data.reading == "vertex":
+ header_data.vertex_lines += [line]
+ if header_data.reading == "fragment":
+ header_data.fragment_lines += [line]
+ if header_data.reading == "compute":
+ header_data.compute_lines += [line]
- if line.find("#[compute]") != -1:
- header_data.reading = "compute"
line = fs.readline()
header_data.line_offset += 1
- header_data.compute_offset = header_data.line_offset
- continue
-
- while line.find("#include ") != -1:
- includeline = line.replace("#include ", "").strip()[1:-1]
-
- if includeline.startswith("thirdparty/"):
- included_file = os.path.relpath(includeline)
-
- else:
- included_file = os.path.relpath(os.path.dirname(filename) + "/" + includeline)
-
- if not included_file in header_data.vertex_included_files and header_data.reading == "vertex":
- header_data.vertex_included_files += [included_file]
- if include_file_in_rd_header(included_file, header_data, depth + 1) is None:
- print("Error in file '" + filename + "': #include " + includeline + "could not be found!")
- elif not included_file in header_data.fragment_included_files and header_data.reading == "fragment":
- header_data.fragment_included_files += [included_file]
- if include_file_in_rd_header(included_file, header_data, depth + 1) is None:
- print("Error in file '" + filename + "': #include " + includeline + "could not be found!")
- elif not included_file in header_data.compute_included_files and header_data.reading == "compute":
- header_data.compute_included_files += [included_file]
- if include_file_in_rd_header(included_file, header_data, depth + 1) is None:
- print("Error in file '" + filename + "': #include " + includeline + "could not be found!")
-
- line = fs.readline()
-
- line = line.replace("\r", "").replace("\n", "")
-
- if header_data.reading == "vertex":
- header_data.vertex_lines += [line]
- if header_data.reading == "fragment":
- header_data.fragment_lines += [line]
- if header_data.reading == "compute":
- header_data.compute_lines += [line]
-
- line = fs.readline()
- header_data.line_offset += 1
-
- fs.close()
return header_data
@@ -180,22 +178,20 @@ class RAWHeaderStruct:
def include_file_in_raw_header(filename: str, header_data: RAWHeaderStruct, depth: int) -> None:
- fs = open(filename, "r")
- line = fs.readline()
-
- while line:
- while line.find("#include ") != -1:
- includeline = line.replace("#include ", "").strip()[1:-1]
+ with open(filename, "r") as fs:
+ line = fs.readline()
- included_file = os.path.relpath(os.path.dirname(filename) + "/" + includeline)
- include_file_in_raw_header(included_file, header_data, depth + 1)
+ while line:
+ while line.find("#include ") != -1:
+ includeline = line.replace("#include ", "").strip()[1:-1]
- line = fs.readline()
+ included_file = os.path.relpath(os.path.dirname(filename) + "/" + includeline)
+ include_file_in_raw_header(included_file, header_data, depth + 1)
- header_data.code += line
- line = fs.readline()
+ line = fs.readline()
- fs.close()
+ header_data.code += line
+ line = fs.readline()
def build_raw_header(
diff --git a/methods.py b/methods.py
index 81eda7a3f1..948574e18a 100644
--- a/methods.py
+++ b/methods.py
@@ -179,12 +179,14 @@ def get_version_info(module_version_string="", silent=False):
gitfolder = ".git"
if os.path.isfile(".git"):
- module_folder = open(".git", "r").readline().strip()
+ with open(".git", "r") as file:
+ module_folder = file.readline().strip()
if module_folder.startswith("gitdir: "):
gitfolder = module_folder[8:]
if os.path.isfile(os.path.join(gitfolder, "HEAD")):
- head = open(os.path.join(gitfolder, "HEAD"), "r", encoding="utf8").readline().strip()
+ with open(os.path.join(gitfolder, "HEAD"), "r", encoding="utf8") as file:
+ head = file.readline().strip()
if head.startswith("ref: "):
ref = head[5:]
# If this directory is a Git worktree instead of a root clone.
@@ -194,7 +196,8 @@ def get_version_info(module_version_string="", silent=False):
head = os.path.join(gitfolder, ref)
packedrefs = os.path.join(gitfolder, "packed-refs")
if os.path.isfile(head):
- githash = open(head, "r").readline().strip()
+ with open(head, "r") as file:
+ githash = file.readline().strip()
elif os.path.isfile(packedrefs):
# Git may pack refs into a single file. This code searches .git/packed-refs file for the current ref's hash.
# https://mirrors.edge.kernel.org/pub/software/scm/git/docs/git-pack-refs.html
@@ -230,9 +233,10 @@ def generate_version_header(module_version_string=""):
# NOTE: It is safe to generate these files here, since this is still executed serially.
- f = open("core/version_generated.gen.h", "w", encoding="utf-8", newline="\n")
- f.write(
- """/* THIS FILE IS GENERATED DO NOT EDIT */
+ with open("core/version_generated.gen.h", "w", encoding="utf-8", newline="\n") as f:
+ f.write(
+ """\
+/* THIS FILE IS GENERATED DO NOT EDIT */
#ifndef VERSION_GENERATED_GEN_H
#define VERSION_GENERATED_GEN_H
#define VERSION_SHORT_NAME "{short_name}"
@@ -248,52 +252,49 @@ def generate_version_header(module_version_string=""):
#define VERSION_DOCS_URL "https://docs.godotengine.org/en/" VERSION_DOCS_BRANCH
#endif // VERSION_GENERATED_GEN_H
""".format(
- **version_info
+ **version_info
+ )
)
- )
- f.close()
- fhash = open("core/version_hash.gen.cpp", "w", encoding="utf-8", newline="\n")
- fhash.write(
- """/* THIS FILE IS GENERATED DO NOT EDIT */
+ with open("core/version_hash.gen.cpp", "w", encoding="utf-8", newline="\n") as fhash:
+ fhash.write(
+ """\
+/* THIS FILE IS GENERATED DO NOT EDIT */
#include "core/version.h"
const char *const VERSION_HASH = "{git_hash}";
const uint64_t VERSION_TIMESTAMP = {git_timestamp};
""".format(
- **version_info
+ **version_info
+ )
)
- )
- fhash.close()
def parse_cg_file(fname, uniforms, sizes, conditionals):
- fs = open(fname, "r")
- line = fs.readline()
+ with open(fname, "r") as fs:
+ line = fs.readline()
- while line:
- if re.match(r"^\s*uniform", line):
- res = re.match(r"uniform ([\d\w]*) ([\d\w]*)")
- type = res.groups(1)
- name = res.groups(2)
+ while line:
+ if re.match(r"^\s*uniform", line):
+ res = re.match(r"uniform ([\d\w]*) ([\d\w]*)")
+ type = res.groups(1)
+ name = res.groups(2)
- uniforms.append(name)
+ uniforms.append(name)
- if type.find("texobj") != -1:
- sizes.append(1)
- else:
- t = re.match(r"float(\d)x(\d)", type)
- if t:
- sizes.append(int(t.groups(1)) * int(t.groups(2)))
+ if type.find("texobj") != -1:
+ sizes.append(1)
else:
- t = re.match(r"float(\d)", type)
- sizes.append(int(t.groups(1)))
+ t = re.match(r"float(\d)x(\d)", type)
+ if t:
+ sizes.append(int(t.groups(1)) * int(t.groups(2)))
+ else:
+ t = re.match(r"float(\d)", type)
+ sizes.append(int(t.groups(1)))
- if line.find("[branch]") != -1:
- conditionals.append(name)
-
- line = fs.readline()
+ if line.find("[branch]") != -1:
+ conditionals.append(name)
- fs.close()
+ line = fs.readline()
def get_cmdline_bool(option, default):
@@ -384,15 +385,15 @@ def is_module(path):
def write_disabled_classes(class_list):
- f = open("core/disabled_classes.gen.h", "w", encoding="utf-8", newline="\n")
- f.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
- f.write("#ifndef DISABLED_CLASSES_GEN_H\n")
- f.write("#define DISABLED_CLASSES_GEN_H\n\n")
- for c in class_list:
- cs = c.strip()
- if cs != "":
- f.write("#define ClassDB_Disable_" + cs + " 1\n")
- f.write("\n#endif\n")
+ with open("core/disabled_classes.gen.h", "w", encoding="utf-8", newline="\n") as f:
+ f.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
+ f.write("#ifndef DISABLED_CLASSES_GEN_H\n")
+ f.write("#define DISABLED_CLASSES_GEN_H\n\n")
+ for c in class_list:
+ cs = c.strip()
+ if cs != "":
+ f.write("#define ClassDB_Disable_" + cs + " 1\n")
+ f.write("\n#endif\n")
def write_modules(modules):
@@ -1246,7 +1247,8 @@ def generate_vs_project(env, original_args, project_name="godot"):
).hexdigest()
if os.path.exists(f"{project_name}.vcxproj.filters"):
- existing_filters = open(f"{project_name}.vcxproj.filters", "r").read()
+ with open(f"{project_name}.vcxproj.filters", "r") as file:
+ existing_filters = file.read()
match = re.search(r"(?ms)^<!-- CHECKSUM$.([0-9a-f]{32})", existing_filters)
if match is not None and md5 == match.group(1):
skip_filters = True
@@ -1257,7 +1259,8 @@ def generate_vs_project(env, original_args, project_name="godot"):
if not skip_filters:
print(f"Regenerating {project_name}.vcxproj.filters")
- filters_template = open("misc/msvs/vcxproj.filters.template", "r").read()
+ with open("misc/msvs/vcxproj.filters.template", "r") as file:
+ filters_template = file.read()
for i in range(1, 10):
filters_template = filters_template.replace(f"%%UUID{i}%%", str(uuid.uuid4()))
@@ -1410,7 +1413,8 @@ def generate_vs_project(env, original_args, project_name="godot"):
)
output = f'bin\\godot{env["PROGSUFFIX"]}'
- props_template = open("misc/msvs/props.template", "r").read()
+ with open("misc/msvs/props.template", "r") as file:
+ props_template = file.read()
props_template = props_template.replace("%%VSCONF%%", vsconf)
props_template = props_template.replace("%%CONDITION%%", condition)
@@ -1567,7 +1571,8 @@ def generate_vs_project(env, original_args, project_name="godot"):
section2 = sorted(section2)
if not get_bool(original_args, "vsproj_props_only", False):
- proj_template = open("misc/msvs/vcxproj.template", "r").read()
+ with open("misc/msvs/vcxproj.template", "r") as file:
+ proj_template = file.read()
proj_template = proj_template.replace("%%UUID%%", proj_uuid)
proj_template = proj_template.replace("%%CONFS%%", "\n ".join(configurations))
proj_template = proj_template.replace("%%IMPORTS%%", "\n ".join(imports))
@@ -1578,7 +1583,8 @@ def generate_vs_project(env, original_args, project_name="godot"):
f.write(proj_template)
if not get_bool(original_args, "vsproj_props_only", False):
- sln_template = open("misc/msvs/sln.template", "r").read()
+ with open("misc/msvs/sln.template", "r") as file:
+ sln_template = file.read()
sln_template = sln_template.replace("%%NAME%%", project_name)
sln_template = sln_template.replace("%%UUID%%", proj_uuid)
sln_template = sln_template.replace("%%SLNUUID%%", sln_uuid)
diff --git a/misc/scripts/check_ci_log.py b/misc/scripts/check_ci_log.py
index 1e5a12eeb4..d979d373de 100755
--- a/misc/scripts/check_ci_log.py
+++ b/misc/scripts/check_ci_log.py
@@ -9,8 +9,8 @@ if len(sys.argv) < 2:
fname = sys.argv[1]
-fileread = open(fname.strip(), "r")
-file_contents = fileread.read()
+with open(fname.strip(), "r") as fileread:
+ file_contents = fileread.read()
# If find "ERROR: AddressSanitizer:", then happens invalid read or write
# This is critical bug, so we need to fix this as fast as possible
diff --git a/misc/scripts/copyright_headers.py b/misc/scripts/copyright_headers.py
index 8fb793976c..b60eb32289 100755
--- a/misc/scripts/copyright_headers.py
+++ b/misc/scripts/copyright_headers.py
@@ -65,31 +65,28 @@ text += "\n"
# In a second pass, we skip all consecutive comment lines starting with "/*",
# then we can append the rest (step 2).
-fileread = open(fname.strip(), "r")
-line = fileread.readline()
-header_done = False
-
-while line.strip() == "": # Skip empty lines at the top
+with open(fname.strip(), "r") as fileread:
line = fileread.readline()
+ header_done = False
-if line.find("/**********") == -1: # Godot header starts this way
- # Maybe starting with a non-Godot comment, abort header magic
- header_done = True
+ while line.strip() == "": # Skip empty lines at the top
+ line = fileread.readline()
-while not header_done: # Handle header now
- if line.find("/*") != 0: # No more starting with a comment
+ if line.find("/**********") == -1: # Godot header starts this way
+ # Maybe starting with a non-Godot comment, abort header magic
header_done = True
- if line.strip() != "":
- text += line
- line = fileread.readline()
-while line != "": # Dump everything until EOF
- text += line
- line = fileread.readline()
+ while not header_done: # Handle header now
+ if line.find("/*") != 0: # No more starting with a comment
+ header_done = True
+ if line.strip() != "":
+ text += line
+ line = fileread.readline()
-fileread.close()
+ while line != "": # Dump everything until EOF
+ text += line
+ line = fileread.readline()
# Write
-filewrite = open(fname.strip(), "w", encoding="utf-8", newline="\n")
-filewrite.write(text)
-filewrite.close()
+with open(fname.strip(), "w", encoding="utf-8", newline="\n") as filewrite:
+ filewrite.write(text)
diff --git a/modules/mono/build_scripts/build_assemblies.py b/modules/mono/build_scripts/build_assemblies.py
index 9ed87c7a8c..b3f583b1b8 100755
--- a/modules/mono/build_scripts/build_assemblies.py
+++ b/modules/mono/build_scripts/build_assemblies.py
@@ -314,7 +314,6 @@ def generate_sdk_package_versions():
# We write in ../SdkPackageVersions.props.
with open(os.path.join(dirname(script_path), "SdkPackageVersions.props"), "w", encoding="utf-8", newline="\n") as f:
f.write(props)
- f.close()
# Also write the versioned docs URL to a constant for the Source Generators.
@@ -342,7 +341,6 @@ def generate_sdk_package_versions():
with open(os.path.join(generators_dir, "Common.Constants.cs"), "w", encoding="utf-8", newline="\n") as f:
f.write(constants)
- f.close()
def build_all(msbuild_tool, module_dir, output_dir, godot_platform, dev_debug, push_nupkgs_local, precision):
diff --git a/modules/text_server_adv/SCsub b/modules/text_server_adv/SCsub
index 79950eaac3..6b53353ec9 100644
--- a/modules/text_server_adv/SCsub
+++ b/modules/text_server_adv/SCsub
@@ -9,27 +9,26 @@ env_text_server_adv = env_modules.Clone()
def make_icu_data(target, source, env):
dst = target[0].srcnode().abspath
- g = open(dst, "w", encoding="utf-8", newline="\n")
-
- g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
- g.write("/* (C) 2016 and later: Unicode, Inc. and others. */\n")
- g.write("/* License & terms of use: https://www.unicode.org/copyright.html */\n")
- g.write("#ifndef _ICU_DATA_H\n")
- g.write("#define _ICU_DATA_H\n")
- g.write('#include "unicode/utypes.h"\n')
- g.write('#include "unicode/udata.h"\n')
- g.write('#include "unicode/uversion.h"\n')
-
- f = open(source[0].srcnode().abspath, "rb")
- buf = f.read()
-
- g.write('extern "C" U_EXPORT const size_t U_ICUDATA_SIZE = ' + str(len(buf)) + ";\n")
- g.write('extern "C" U_EXPORT const unsigned char U_ICUDATA_ENTRY_POINT[] = {\n')
- for i in range(len(buf)):
- g.write("\t" + str(buf[i]) + ",\n")
-
- g.write("};\n")
- g.write("#endif")
+ with open(dst, "w", encoding="utf-8", newline="\n") as g:
+ g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
+ g.write("/* (C) 2016 and later: Unicode, Inc. and others. */\n")
+ g.write("/* License & terms of use: https://www.unicode.org/copyright.html */\n")
+ g.write("#ifndef _ICU_DATA_H\n")
+ g.write("#define _ICU_DATA_H\n")
+ g.write('#include "unicode/utypes.h"\n')
+ g.write('#include "unicode/udata.h"\n')
+ g.write('#include "unicode/uversion.h"\n')
+
+ with open(source[0].srcnode().abspath, "rb") as f:
+ buf = f.read()
+
+ g.write('extern "C" U_EXPORT const size_t U_ICUDATA_SIZE = ' + str(len(buf)) + ";\n")
+ g.write('extern "C" U_EXPORT const unsigned char U_ICUDATA_ENTRY_POINT[] = {\n')
+ for i in range(len(buf)):
+ g.write("\t" + str(buf[i]) + ",\n")
+
+ g.write("};\n")
+ g.write("#endif")
# Thirdparty source files
diff --git a/modules/text_server_adv/gdextension_build/methods.py b/modules/text_server_adv/gdextension_build/methods.py
index 8456149973..32dbc59fd4 100644
--- a/modules/text_server_adv/gdextension_build/methods.py
+++ b/modules/text_server_adv/gdextension_build/methods.py
@@ -83,56 +83,56 @@ def disable_warnings(self):
def make_icu_data(target, source, env):
dst = target[0].srcnode().abspath
- g = open(dst, "w", encoding="utf-8", newline="\n")
+ with open(dst, "w", encoding="utf-8", newline="\n") as g:
+ g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
+ g.write("/* (C) 2016 and later: Unicode, Inc. and others. */\n")
+ g.write("/* License & terms of use: https://www.unicode.org/copyright.html */\n")
+ g.write("#ifndef _ICU_DATA_H\n")
+ g.write("#define _ICU_DATA_H\n")
+ g.write('#include "unicode/utypes.h"\n')
+ g.write('#include "unicode/udata.h"\n')
+ g.write('#include "unicode/uversion.h"\n')
- g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
- g.write("/* (C) 2016 and later: Unicode, Inc. and others. */\n")
- g.write("/* License & terms of use: https://www.unicode.org/copyright.html */\n")
- g.write("#ifndef _ICU_DATA_H\n")
- g.write("#define _ICU_DATA_H\n")
- g.write('#include "unicode/utypes.h"\n')
- g.write('#include "unicode/udata.h"\n')
- g.write('#include "unicode/uversion.h"\n')
+ with open(source[0].srcnode().abspath, "rb") as f:
+ buf = f.read()
- f = open(source[0].srcnode().abspath, "rb")
- buf = f.read()
+ g.write('extern "C" U_EXPORT const size_t U_ICUDATA_SIZE = ' + str(len(buf)) + ";\n")
+ g.write('extern "C" U_EXPORT const unsigned char U_ICUDATA_ENTRY_POINT[] = {\n')
+ for i in range(len(buf)):
+ g.write("\t" + str(buf[i]) + ",\n")
- g.write('extern "C" U_EXPORT const size_t U_ICUDATA_SIZE = ' + str(len(buf)) + ";\n")
- g.write('extern "C" U_EXPORT const unsigned char U_ICUDATA_ENTRY_POINT[] = {\n')
- for i in range(len(buf)):
- g.write("\t" + str(buf[i]) + ",\n")
-
- g.write("};\n")
- g.write("#endif")
+ g.write("};\n")
+ g.write("#endif")
def write_macos_plist(target, binary_name, identifier, name):
os.makedirs(f"{target}/Resource/", exist_ok=True)
- f = open(f"{target}/Resource/Info.plist", "w", encoding="utf-8", newline="\n")
-
- f.write(f'<?xml version="1.0" encoding="UTF-8"?>\n')
- f.write(f'<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">\n')
- f.write(f'<plist version="1.0">\n')
- f.write(f"<dict>\n")
- f.write(f"\t<key>CFBundleExecutable</key>\n")
- f.write(f"\t<string>{binary_name}</string>\n")
- f.write(f"\t<key>CFBundleIdentifier</key>\n")
- f.write(f"\t<string>{identifier}</string>\n")
- f.write(f"\t<key>CFBundleInfoDictionaryVersion</key>\n")
- f.write(f"\t<string>6.0</string>\n")
- f.write(f"\t<key>CFBundleName</key>\n")
- f.write(f"\t<string>{name}</string>\n")
- f.write(f"\t<key>CFBundlePackageType</key>\n")
- f.write(f"\t<string>FMWK</string>\n")
- f.write(f"\t<key>CFBundleShortVersionString</key>\n")
- f.write(f"\t<string>1.0.0</string>\n")
- f.write(f"\t<key>CFBundleSupportedPlatforms</key>\n")
- f.write(f"\t<array>\n")
- f.write(f"\t\t<string>MacOSX</string>\n")
- f.write(f"\t</array>\n")
- f.write(f"\t<key>CFBundleVersion</key>\n")
- f.write(f"\t<string>1.0.0</string>\n")
- f.write(f"\t<key>LSMinimumSystemVersion</key>\n")
- f.write(f"\t<string>10.14</string>\n")
- f.write(f"</dict>\n")
- f.write(f"</plist>\n")
+ with open(f"{target}/Resource/Info.plist", "w", encoding="utf-8", newline="\n") as f:
+ f.write(f'<?xml version="1.0" encoding="UTF-8"?>\n')
+ f.write(
+ f'<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">\n'
+ )
+ f.write(f'<plist version="1.0">\n')
+ f.write(f"<dict>\n")
+ f.write(f"\t<key>CFBundleExecutable</key>\n")
+ f.write(f"\t<string>{binary_name}</string>\n")
+ f.write(f"\t<key>CFBundleIdentifier</key>\n")
+ f.write(f"\t<string>{identifier}</string>\n")
+ f.write(f"\t<key>CFBundleInfoDictionaryVersion</key>\n")
+ f.write(f"\t<string>6.0</string>\n")
+ f.write(f"\t<key>CFBundleName</key>\n")
+ f.write(f"\t<string>{name}</string>\n")
+ f.write(f"\t<key>CFBundlePackageType</key>\n")
+ f.write(f"\t<string>FMWK</string>\n")
+ f.write(f"\t<key>CFBundleShortVersionString</key>\n")
+ f.write(f"\t<string>1.0.0</string>\n")
+ f.write(f"\t<key>CFBundleSupportedPlatforms</key>\n")
+ f.write(f"\t<array>\n")
+ f.write(f"\t\t<string>MacOSX</string>\n")
+ f.write(f"\t</array>\n")
+ f.write(f"\t<key>CFBundleVersion</key>\n")
+ f.write(f"\t<string>1.0.0</string>\n")
+ f.write(f"\t<key>LSMinimumSystemVersion</key>\n")
+ f.write(f"\t<string>10.14</string>\n")
+ f.write(f"</dict>\n")
+ f.write(f"</plist>\n")
diff --git a/modules/text_server_fb/gdextension_build/methods.py b/modules/text_server_fb/gdextension_build/methods.py
index 8456149973..32dbc59fd4 100644
--- a/modules/text_server_fb/gdextension_build/methods.py
+++ b/modules/text_server_fb/gdextension_build/methods.py
@@ -83,56 +83,56 @@ def disable_warnings(self):
def make_icu_data(target, source, env):
dst = target[0].srcnode().abspath
- g = open(dst, "w", encoding="utf-8", newline="\n")
+ with open(dst, "w", encoding="utf-8", newline="\n") as g:
+ g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
+ g.write("/* (C) 2016 and later: Unicode, Inc. and others. */\n")
+ g.write("/* License & terms of use: https://www.unicode.org/copyright.html */\n")
+ g.write("#ifndef _ICU_DATA_H\n")
+ g.write("#define _ICU_DATA_H\n")
+ g.write('#include "unicode/utypes.h"\n')
+ g.write('#include "unicode/udata.h"\n')
+ g.write('#include "unicode/uversion.h"\n')
- g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
- g.write("/* (C) 2016 and later: Unicode, Inc. and others. */\n")
- g.write("/* License & terms of use: https://www.unicode.org/copyright.html */\n")
- g.write("#ifndef _ICU_DATA_H\n")
- g.write("#define _ICU_DATA_H\n")
- g.write('#include "unicode/utypes.h"\n')
- g.write('#include "unicode/udata.h"\n')
- g.write('#include "unicode/uversion.h"\n')
+ with open(source[0].srcnode().abspath, "rb") as f:
+ buf = f.read()
- f = open(source[0].srcnode().abspath, "rb")
- buf = f.read()
+ g.write('extern "C" U_EXPORT const size_t U_ICUDATA_SIZE = ' + str(len(buf)) + ";\n")
+ g.write('extern "C" U_EXPORT const unsigned char U_ICUDATA_ENTRY_POINT[] = {\n')
+ for i in range(len(buf)):
+ g.write("\t" + str(buf[i]) + ",\n")
- g.write('extern "C" U_EXPORT const size_t U_ICUDATA_SIZE = ' + str(len(buf)) + ";\n")
- g.write('extern "C" U_EXPORT const unsigned char U_ICUDATA_ENTRY_POINT[] = {\n')
- for i in range(len(buf)):
- g.write("\t" + str(buf[i]) + ",\n")
-
- g.write("};\n")
- g.write("#endif")
+ g.write("};\n")
+ g.write("#endif")
def write_macos_plist(target, binary_name, identifier, name):
os.makedirs(f"{target}/Resource/", exist_ok=True)
- f = open(f"{target}/Resource/Info.plist", "w", encoding="utf-8", newline="\n")
-
- f.write(f'<?xml version="1.0" encoding="UTF-8"?>\n')
- f.write(f'<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">\n')
- f.write(f'<plist version="1.0">\n')
- f.write(f"<dict>\n")
- f.write(f"\t<key>CFBundleExecutable</key>\n")
- f.write(f"\t<string>{binary_name}</string>\n")
- f.write(f"\t<key>CFBundleIdentifier</key>\n")
- f.write(f"\t<string>{identifier}</string>\n")
- f.write(f"\t<key>CFBundleInfoDictionaryVersion</key>\n")
- f.write(f"\t<string>6.0</string>\n")
- f.write(f"\t<key>CFBundleName</key>\n")
- f.write(f"\t<string>{name}</string>\n")
- f.write(f"\t<key>CFBundlePackageType</key>\n")
- f.write(f"\t<string>FMWK</string>\n")
- f.write(f"\t<key>CFBundleShortVersionString</key>\n")
- f.write(f"\t<string>1.0.0</string>\n")
- f.write(f"\t<key>CFBundleSupportedPlatforms</key>\n")
- f.write(f"\t<array>\n")
- f.write(f"\t\t<string>MacOSX</string>\n")
- f.write(f"\t</array>\n")
- f.write(f"\t<key>CFBundleVersion</key>\n")
- f.write(f"\t<string>1.0.0</string>\n")
- f.write(f"\t<key>LSMinimumSystemVersion</key>\n")
- f.write(f"\t<string>10.14</string>\n")
- f.write(f"</dict>\n")
- f.write(f"</plist>\n")
+ with open(f"{target}/Resource/Info.plist", "w", encoding="utf-8", newline="\n") as f:
+ f.write(f'<?xml version="1.0" encoding="UTF-8"?>\n')
+ f.write(
+ f'<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">\n'
+ )
+ f.write(f'<plist version="1.0">\n')
+ f.write(f"<dict>\n")
+ f.write(f"\t<key>CFBundleExecutable</key>\n")
+ f.write(f"\t<string>{binary_name}</string>\n")
+ f.write(f"\t<key>CFBundleIdentifier</key>\n")
+ f.write(f"\t<string>{identifier}</string>\n")
+ f.write(f"\t<key>CFBundleInfoDictionaryVersion</key>\n")
+ f.write(f"\t<string>6.0</string>\n")
+ f.write(f"\t<key>CFBundleName</key>\n")
+ f.write(f"\t<string>{name}</string>\n")
+ f.write(f"\t<key>CFBundlePackageType</key>\n")
+ f.write(f"\t<string>FMWK</string>\n")
+ f.write(f"\t<key>CFBundleShortVersionString</key>\n")
+ f.write(f"\t<string>1.0.0</string>\n")
+ f.write(f"\t<key>CFBundleSupportedPlatforms</key>\n")
+ f.write(f"\t<array>\n")
+ f.write(f"\t\t<string>MacOSX</string>\n")
+ f.write(f"\t</array>\n")
+ f.write(f"\t<key>CFBundleVersion</key>\n")
+ f.write(f"\t<string>1.0.0</string>\n")
+ f.write(f"\t<key>LSMinimumSystemVersion</key>\n")
+ f.write(f"\t<string>10.14</string>\n")
+ f.write(f"</dict>\n")
+ f.write(f"</plist>\n")
diff --git a/platform_methods.py b/platform_methods.py
index 91c1388288..43e6e4f799 100644
--- a/platform_methods.py
+++ b/platform_methods.py
@@ -124,17 +124,15 @@ def generate_export_icons(platform_path, platform_name):
svg_names.append("run_icon")
for name in svg_names:
- svgf = open(export_path + "/" + name + ".svg", "rb")
- b = svgf.read(1)
- svg_str = " /* AUTOGENERATED FILE, DO NOT EDIT */ \n"
- svg_str += " static const char *_" + platform_name + "_" + name + '_svg = "'
- while len(b) == 1:
- svg_str += "\\" + hex(ord(b))[1:]
+ with open(export_path + "/" + name + ".svg", "rb") as svgf:
b = svgf.read(1)
+ svg_str = " /* AUTOGENERATED FILE, DO NOT EDIT */ \n"
+ svg_str += " static const char *_" + platform_name + "_" + name + '_svg = "'
+ while len(b) == 1:
+ svg_str += "\\" + hex(ord(b))[1:]
+ b = svgf.read(1)
- svg_str += '";\n'
-
- svgf.close()
+ svg_str += '";\n'
# NOTE: It is safe to generate this file here, since this is still executed serially.
wf = export_path + "/" + name + "_svg.gen.h"
diff --git a/scene/theme/default_theme_builders.py b/scene/theme/default_theme_builders.py
index 02466f52df..3def1054b6 100644
--- a/scene/theme/default_theme_builders.py
+++ b/scene/theme/default_theme_builders.py
@@ -13,29 +13,26 @@ from platform_methods import subprocess_main
def make_fonts_header(target, source, env):
dst = target[0]
- g = open(dst, "w", encoding="utf-8", newline="\n")
+ with open(dst, "w", encoding="utf-8", newline="\n") as g:
+ g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
+ g.write("#ifndef _DEFAULT_FONTS_H\n")
+ g.write("#define _DEFAULT_FONTS_H\n")
- g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
- g.write("#ifndef _DEFAULT_FONTS_H\n")
- g.write("#define _DEFAULT_FONTS_H\n")
+ # Saving uncompressed, since FreeType will reference from memory pointer.
+ for i in range(len(source)):
+ with open(source[i], "rb") as f:
+ buf = f.read()
- # Saving uncompressed, since FreeType will reference from memory pointer.
- for i in range(len(source)):
- with open(source[i], "rb") as f:
- buf = f.read()
+ name = os.path.splitext(os.path.basename(source[i]))[0]
- name = os.path.splitext(os.path.basename(source[i]))[0]
+ g.write("static const int _font_" + name + "_size = " + str(len(buf)) + ";\n")
+ g.write("static const unsigned char _font_" + name + "[] = {\n")
+ for j in range(len(buf)):
+ g.write("\t" + str(buf[j]) + ",\n")
- g.write("static const int _font_" + name + "_size = " + str(len(buf)) + ";\n")
- g.write("static const unsigned char _font_" + name + "[] = {\n")
- for j in range(len(buf)):
- g.write("\t" + str(buf[j]) + ",\n")
+ g.write("};\n")
- g.write("};\n")
-
- g.write("#endif")
-
- g.close()
+ g.write("#endif")
if __name__ == "__main__":
diff --git a/scene/theme/icons/default_theme_icons_builders.py b/scene/theme/icons/default_theme_icons_builders.py
index 12347cc58b..ecce0eea61 100644
--- a/scene/theme/icons/default_theme_icons_builders.py
+++ b/scene/theme/icons/default_theme_icons_builders.py
@@ -15,61 +15,56 @@ def make_default_theme_icons_action(target, source, env):
dst = target[0]
svg_icons = source
- icons_string = StringIO()
+ with StringIO() as icons_string, StringIO() as s:
+ for f in svg_icons:
+ fname = str(f)
- for f in svg_icons:
- fname = str(f)
+ icons_string.write('\t"')
- icons_string.write('\t"')
-
- with open(fname, "rb") as svgf:
- b = svgf.read(1)
- while len(b) == 1:
- icons_string.write("\\" + str(hex(ord(b)))[1:])
+ with open(fname, "rb") as svgf:
b = svgf.read(1)
+ while len(b) == 1:
+ icons_string.write("\\" + str(hex(ord(b)))[1:])
+ b = svgf.read(1)
- icons_string.write('"')
- if fname != svg_icons[-1]:
- icons_string.write(",")
- icons_string.write("\n")
-
- s = StringIO()
- s.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n\n")
- s.write('#include "modules/modules_enabled.gen.h"\n\n')
- s.write("#ifndef _DEFAULT_THEME_ICONS_H\n")
- s.write("#define _DEFAULT_THEME_ICONS_H\n")
- s.write("static const int default_theme_icons_count = {};\n\n".format(len(svg_icons)))
- s.write("#ifdef MODULE_SVG_ENABLED\n")
- s.write("static const char *default_theme_icons_sources[] = {\n")
- s.write(icons_string.getvalue())
- s.write("};\n")
- s.write("#endif // MODULE_SVG_ENABLED\n\n")
- s.write("static const char *default_theme_icons_names[] = {\n")
+ icons_string.write('"')
+ if fname != svg_icons[-1]:
+ icons_string.write(",")
+ icons_string.write("\n")
- index = 0
- for f in svg_icons:
- fname = str(f)
+ s.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n\n")
+ s.write('#include "modules/modules_enabled.gen.h"\n\n')
+ s.write("#ifndef _DEFAULT_THEME_ICONS_H\n")
+ s.write("#define _DEFAULT_THEME_ICONS_H\n")
+ s.write("static const int default_theme_icons_count = {};\n\n".format(len(svg_icons)))
+ s.write("#ifdef MODULE_SVG_ENABLED\n")
+ s.write("static const char *default_theme_icons_sources[] = {\n")
+ s.write(icons_string.getvalue())
+ s.write("};\n")
+ s.write("#endif // MODULE_SVG_ENABLED\n\n")
+ s.write("static const char *default_theme_icons_names[] = {\n")
- # Trim the `.svg` extension from the string.
- icon_name = os.path.basename(fname)[:-4]
+ index = 0
+ for f in svg_icons:
+ fname = str(f)
- s.write('\t"{0}"'.format(icon_name))
+ # Trim the `.svg` extension from the string.
+ icon_name = os.path.basename(fname)[:-4]
- if fname != svg_icons[-1]:
- s.write(",")
- s.write("\n")
+ s.write('\t"{0}"'.format(icon_name))
- index += 1
+ if fname != svg_icons[-1]:
+ s.write(",")
+ s.write("\n")
- s.write("};\n")
+ index += 1
- s.write("#endif\n")
+ s.write("};\n")
- with open(dst, "w", encoding="utf-8", newline="\n") as f:
- f.write(s.getvalue())
+ s.write("#endif\n")
- s.close()
- icons_string.close()
+ with open(dst, "w", encoding="utf-8", newline="\n") as f:
+ f.write(s.getvalue())
if __name__ == "__main__":