From a5cf92664db9633b9fb1307a801d99de70d09aba Mon Sep 17 00:00:00 2001 From: Riteo Date: Wed, 13 Mar 2024 17:54:14 +0100 Subject: [PATCH 1/2] SCons: unify code generations routine and minimize timestamp changes Previously, all of the code generation routines would just needlessly write the same files over and over, even when not needed. This became a problem with the advent of the experimental ninja backend for SCons, which can be trivially enabled with a few lines of code and relies on timestamp changes, making it thus impractical. --- core/SCsub | 9 ++++--- editor/SCsub | 28 +++++++++++---------- methods.py | 60 +++++++++++++++++++++++++-------------------- platform/SCsub | 9 +++---- platform_methods.py | 5 ++-- scu_builders.py | 53 +++++++++++++++++++++++++-------------- 6 files changed, 96 insertions(+), 68 deletions(-) diff --git a/core/SCsub b/core/SCsub index 7edf8ea88d8..f7b733a2212 100644 --- a/core/SCsub +++ b/core/SCsub @@ -3,6 +3,7 @@ Import("env") import core_builders +import methods env.core_sources = [] @@ -35,10 +36,12 @@ if "SCRIPT_AES256_ENCRYPTION_KEY" in os.environ: ) Exit(255) -# NOTE: It is safe to generate this file here, since this is still executed serially -with open("script_encryption_key.gen.cpp", "w", encoding="utf-8", newline="\n") as f: - f.write('#include "core/config/project_settings.h"\nuint8_t script_encryption_key[32]={' + txt + "};\n") +script_encryption_key_contents = ( + '#include "core/config/project_settings.h"\nuint8_t script_encryption_key[32]={' + txt + "};\n" +) + +methods.write_file_if_needed("script_encryption_key.gen.cpp", script_encryption_key_contents) # Add required thirdparty code. diff --git a/editor/SCsub b/editor/SCsub index 442d0a3b753..f6d2f58d8eb 100644 --- a/editor/SCsub +++ b/editor/SCsub @@ -7,19 +7,24 @@ env.editor_sources = [] import os import glob import editor_builders +import methods def _make_doc_data_class_path(to_path): - # NOTE: It is safe to generate this file here, since this is still executed serially - with open(os.path.join(to_path, "doc_data_class_path.gen.h"), "w", encoding="utf-8", newline="\n") as g: - g.write("static const int _doc_data_class_path_count = " + str(len(env.doc_class_path)) + ";\n") - g.write("struct _DocDataClassPath { const char* name; const char* path; };\n") + file_path = os.path.join(to_path, "doc_data_class_path.gen.h") - g.write("static const _DocDataClassPath _doc_data_class_paths[" + str(len(env.doc_class_path) + 1) + "] = {\n") - for c in sorted(env.doc_class_path): - g.write('\t{"' + c + '", "' + env.doc_class_path[c] + '"},\n') - g.write("\t{nullptr, nullptr}\n") - g.write("};\n") + class_path_data = "" + class_path_data += "static const int _doc_data_class_path_count = " + str(len(env.doc_class_path)) + ";\n" + class_path_data += "struct _DocDataClassPath { const char* name; const char* path; };\n" + class_path_data += ( + "static const _DocDataClassPath _doc_data_class_paths[" + str(len(env.doc_class_path) + 1) + "] = {\n" + ) + for c in sorted(env.doc_class_path): + class_path_data += '\t{"' + c + '", "' + env.doc_class_path[c] + '"},\n' + class_path_data += "\t{nullptr, nullptr}\n" + class_path_data += "};\n" + + methods.write_file_if_needed(file_path, class_path_data) if env.editor_build: @@ -38,10 +43,7 @@ if env.editor_build: reg_exporters += "\tregister_" + e + "_exporter_types();\n" reg_exporters += "}\n" - # NOTE: It is safe to generate this file here, since this is still executed serially - with open("register_exporters.gen.cpp", "w", encoding="utf-8", newline="\n") as f: - f.write(reg_exporters_inc) - f.write(reg_exporters) + methods.write_file_if_needed("register_exporters.gen.cpp", reg_exporters_inc + reg_exporters) # Core API documentation. docs = [] diff --git a/methods.py b/methods.py index 5aa34888eb0..6cd944c6b0b 100644 --- a/methods.py +++ b/methods.py @@ -228,14 +228,22 @@ def get_version_info(module_version_string="", silent=False): return version_info +def write_file_if_needed(path, string): + try: + with open(path, "r", encoding="utf-8", newline="\n") as f: + if f.read() == string: + return + except FileNotFoundError: + pass + + with open(path, "w", encoding="utf-8", newline="\n") as f: + f.write(string) + + def generate_version_header(module_version_string=""): version_info = get_version_info(module_version_string) - # NOTE: It is safe to generate these files here, since this is still executed serially. - - with open("core/version_generated.gen.h", "w", encoding="utf-8", newline="\n") as f: - f.write( - """\ + version_info_header = """\ /* THIS FILE IS GENERATED DO NOT EDIT */ #ifndef VERSION_GENERATED_GEN_H #define VERSION_GENERATED_GEN_H @@ -252,21 +260,20 @@ def generate_version_header(module_version_string=""): #define VERSION_DOCS_URL "https://docs.godotengine.org/en/" VERSION_DOCS_BRANCH #endif // VERSION_GENERATED_GEN_H """.format( - **version_info - ) - ) + **version_info + ) - with open("core/version_hash.gen.cpp", "w", encoding="utf-8", newline="\n") as fhash: - fhash.write( - """\ + version_hash_data = """\ /* THIS FILE IS GENERATED DO NOT EDIT */ #include "core/version.h" const char *const VERSION_HASH = "{git_hash}"; const uint64_t VERSION_TIMESTAMP = {git_timestamp}; """.format( - **version_info - ) - ) + **version_info + ) + + write_file_if_needed("core/version_generated.gen.h", version_info_header) + write_file_if_needed("core/version_hash.gen.cpp", version_hash_data) def parse_cg_file(fname, uniforms, sizes, conditionals): @@ -385,15 +392,18 @@ def is_module(path): def write_disabled_classes(class_list): - with open("core/disabled_classes.gen.h", "w", encoding="utf-8", newline="\n") as f: - f.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n") - f.write("#ifndef DISABLED_CLASSES_GEN_H\n") - f.write("#define DISABLED_CLASSES_GEN_H\n\n") - for c in class_list: - cs = c.strip() - if cs != "": - f.write("#define ClassDB_Disable_" + cs + " 1\n") - f.write("\n#endif\n") + file_contents = "" + + file_contents += "/* THIS FILE IS GENERATED DO NOT EDIT */\n" + file_contents += "#ifndef DISABLED_CLASSES_GEN_H\n" + file_contents += "#define DISABLED_CLASSES_GEN_H\n\n" + for c in class_list: + cs = c.strip() + if cs != "": + file_contents += "#define ClassDB_Disable_" + cs + " 1\n" + file_contents += "\n#endif\n" + + write_file_if_needed("core/disabled_classes.gen.h", file_contents) def write_modules(modules): @@ -435,9 +445,7 @@ void uninitialize_modules(ModuleInitializationLevel p_level) { uninitialize_cpp, ) - # NOTE: It is safe to generate this file here, since this is still executed serially - with open("modules/register_module_types.gen.cpp", "w", encoding="utf-8", newline="\n") as f: - f.write(modules_cpp) + write_file_if_needed("modules/register_module_types.gen.cpp", modules_cpp) def convert_custom_modules_path(path): diff --git a/platform/SCsub b/platform/SCsub index e432cebd48b..ca282e3e684 100644 --- a/platform/SCsub +++ b/platform/SCsub @@ -1,5 +1,7 @@ #!/usr/bin/env python +import methods + Import("env") env.platform_sources = [] @@ -18,12 +20,7 @@ reg_apis_inc += "\n" reg_apis += "}\n\n" unreg_apis += "}\n" -# NOTE: It is safe to generate this file here, since this is still execute serially -with open("register_platform_apis.gen.cpp", "w", encoding="utf-8", newline="\n") as f: - f.write(reg_apis_inc) - f.write(reg_apis) - f.write(unreg_apis) - +methods.write_file_if_needed("register_platform_apis.gen.cpp", reg_apis_inc + reg_apis + unreg_apis) env.add_source_files(env.platform_sources, "register_platform_apis.gen.cpp") lib = env.add_library("platform", env.platform_sources) diff --git a/platform_methods.py b/platform_methods.py index 37fc8a83ed8..92aefcc648b 100644 --- a/platform_methods.py +++ b/platform_methods.py @@ -5,6 +5,7 @@ import platform import uuid import functools import subprocess +import methods # NOTE: The multiprocessing module is not compatible with SCons due to conflict on cPickle @@ -67,8 +68,8 @@ def generate_export_icons(platform_path, platform_name): # NOTE: It is safe to generate this file here, since this is still executed serially. wf = export_path + "/" + name + "_svg.gen.h" - with open(wf, "w", encoding="utf-8", newline="\n") as svgw: - svgw.write(svg_str) + + methods.write_file_if_needed(wf, svg_str) def get_build_version(short): diff --git a/scu_builders.py b/scu_builders.py index 0435c0a4f55..b180cbc8647 100644 --- a/scu_builders.py +++ b/scu_builders.py @@ -13,9 +13,9 @@ _scu_folders = set() _max_includes_per_scu = 1024 -def clear_out_existing_files(output_folder, extension): +def clear_out_stale_files(output_folder, extension, fresh_files): output_folder = os.path.abspath(output_folder) - # print("clear_out_existing_files from folder: " + output_folder) + # print("clear_out_stale_files from folder: " + output_folder) if not os.path.isdir(output_folder): # folder does not exist or has not been created yet, @@ -23,8 +23,9 @@ def clear_out_existing_files(output_folder, extension): return for file in glob.glob(output_folder + "/*." + extension): - # print("removed pre-existing file: " + file) - os.remove(file) + if not file in fresh_files: + # print("removed stale file: " + file) + os.remove(file) def folder_not_found(folder): @@ -87,11 +88,16 @@ def write_output_file(file_count, include_list, start_line, end_line, output_fol short_filename = output_filename_prefix + num_string + ".gen." + extension output_filename = output_folder + "/" + short_filename - if _verbose: - print("SCU: Generating: %s" % short_filename) - output_path = Path(output_filename) - output_path.write_text(file_text, encoding="utf8") + + if not output_path.exists() or output_path.read_text() != file_text: + if _verbose: + print("SCU: Generating: %s" % short_filename) + output_path.write_text(file_text, encoding="utf8") + elif _verbose: + print("SCU: Generation not needed for: " + short_filename) + + return output_filename def write_exception_output_file(file_count, exception_string, output_folder, output_filename_prefix, extension): @@ -109,11 +115,16 @@ def write_exception_output_file(file_count, exception_string, output_folder, out short_filename = output_filename_prefix + "_exception" + num_string + ".gen." + extension output_filename = output_folder + "/" + short_filename - if _verbose: - print("SCU: Generating: " + short_filename) - output_path = Path(output_filename) - output_path.write_text(file_text, encoding="utf8") + + if not output_path.exists() or output_path.read_text() != file_text: + if _verbose: + print("SCU: Generating: " + short_filename) + output_path.write_text(file_text, encoding="utf8") + elif _verbose: + print("SCU: Generation not needed for: " + short_filename) + + return output_filename def find_section_name(sub_folder): @@ -214,10 +225,7 @@ def process_folder(folders, sought_exceptions=[], includes_per_scu=0, extension= output_folder = abs_main_folder + "/scu/" output_filename_prefix = "scu_" + out_filename - # Clear out any existing files (usually we will be overwriting, - # but we want to remove any that are pre-existing that will not be - # overwritten, so as to not compile anything stale) - clear_out_existing_files(output_folder, extension) + fresh_files = set() for file_count in range(0, num_output_files): end_line = start_line + lines_per_file @@ -226,19 +234,28 @@ def process_folder(folders, sought_exceptions=[], includes_per_scu=0, extension= if file_count == (num_output_files - 1): end_line = len(found_includes) - write_output_file( + fresh_file = write_output_file( file_count, found_includes, start_line, end_line, output_folder, output_filename_prefix, extension ) + fresh_files.add(fresh_file) + start_line = end_line # Write the exceptions each in their own scu gen file, # so they can effectively compile in "old style / normal build". for exception_count in range(len(found_exceptions)): - write_exception_output_file( + fresh_file = write_exception_output_file( exception_count, found_exceptions[exception_count], output_folder, output_filename_prefix, extension ) + fresh_files.add(fresh_file) + + # Clear out any stale file (usually we will be overwriting if necessary, + # but we want to remove any that are pre-existing that will not be + # overwritten, so as to not compile anything stale). + clear_out_stale_files(output_folder, extension, fresh_files) + def generate_scu_files(max_includes_per_scu): print("=============================") From 55558fb17574ddcbf0dcbba3f90a1aa880907f28 Mon Sep 17 00:00:00 2001 From: Riteo Date: Tue, 12 Mar 2024 22:51:19 +0100 Subject: [PATCH 2/2] SCons: Add an option to enable the experimental ninja build backend With this option turned on, if properly set up, SCons generates a `build.ninja` file and quits. To actually build the engine, the user can then call `ninja` with whatever options they might prefer (not everything is yet transferred properly to this new generated file). Ideally, the scons file should never be called again, as ninja automatically detects any SCons build script change and invokes the required commands to regenerate itself. This approach speeds up incremental builds considerably, as it limits SCons to code generation and uses ninja's extremely fast timestamp-based file change detector. --- .gitignore | 4 ++++ SConstruct | 23 ++++++++++++++++++++--- platform_methods.py | 1 - 3 files changed, 24 insertions(+), 4 deletions(-) diff --git a/.gitignore b/.gitignore index d40f4d39486..46dcf84b43a 100644 --- a/.gitignore +++ b/.gitignore @@ -35,6 +35,10 @@ bin compile_commands.json platform/windows/godot_res.res +# Ninja build files +build.ninja +.ninja + # Generated by Godot binary .import/ /gdextension_interface.h diff --git a/SConstruct b/SConstruct index 753cea40e33..73ef420a0dc 100644 --- a/SConstruct +++ b/SConstruct @@ -203,6 +203,7 @@ opts.Add(BoolVariable("custom_modules_recursive", "Detect custom modules recursi opts.Add(BoolVariable("dev_mode", "Alias for dev options: verbose=yes warnings=extra werror=yes tests=yes", False)) opts.Add(BoolVariable("tests", "Build the unit tests", False)) opts.Add(BoolVariable("fast_unsafe", "Enable unsafe options for faster rebuilds", False)) +opts.Add(BoolVariable("ninja", "Use the ninja backend for faster rebuilds", False)) opts.Add(BoolVariable("compiledb", "Generate compilation DB (`compile_commands.json`) for external tools", False)) opts.Add(BoolVariable("verbose", "Enable verbose output for the compilation", False)) opts.Add(BoolVariable("progress", "Show a progress indicator during compilation", True)) @@ -956,7 +957,8 @@ if selected_platform in platform_list: env.vs_incs = [] env.vs_srcs = [] - # CompileDB + # CompileDB and Ninja are only available with certain SCons versions which + # not everybody might have yet, so we have to check. from SCons import __version__ as scons_raw_version scons_ver = env._get_major_minor_revision(scons_raw_version) @@ -968,6 +970,20 @@ if selected_platform in platform_list: env.Tool("compilation_db") env.Alias("compiledb", env.CompilationDatabase()) + if env["ninja"]: + if scons_ver < (4, 2, 0): + print("The `ninja=yes` option requires SCons 4.2 or later, but your version is %s." % scons_raw_version) + Exit(255) + + SetOption("experimental", "ninja") + + # By setting this we allow the user to run ninja by themselves with all + # the flags they need, as apparently automatically running from scons + # is way slower. + SetOption("disable_execute_ninja", True) + + env.Tool("ninja") + # Threads if env["threads"]: env.Append(CPPDEFINES=["THREADS_ENABLED"]) @@ -1041,9 +1057,10 @@ atexit.register(print_elapsed_time) def purge_flaky_files(): + paths_to_keep = ["ninja.build"] for build_failure in GetBuildFailures(): - path = build_failure.node.abspath - if os.path.isfile(path): + path = build_failure.node.path + if os.path.isfile(path) and path not in paths_to_keep: os.remove(path) diff --git a/platform_methods.py b/platform_methods.py index 92aefcc648b..56115db4a49 100644 --- a/platform_methods.py +++ b/platform_methods.py @@ -66,7 +66,6 @@ def generate_export_icons(platform_path, platform_name): svg_str += '";\n' - # NOTE: It is safe to generate this file here, since this is still executed serially. wf = export_path + "/" + name + "_svg.gen.h" methods.write_file_if_needed(wf, svg_str)