SCons: unify code generations routine and minimize timestamp changes

Previously, all of the code generation routines would just needlessly
write the same files over and over, even when not needed.

This became a problem with the advent of the experimental ninja backend
for SCons, which can be trivially enabled with a few lines of code and
relies on timestamp changes, making it thus impractical.
This commit is contained in:
Riteo 2024-03-13 17:54:14 +01:00
parent 89f70e98d2
commit a5cf92664d
6 changed files with 96 additions and 68 deletions

View File

@ -3,6 +3,7 @@
Import("env") Import("env")
import core_builders import core_builders
import methods
env.core_sources = [] env.core_sources = []
@ -35,10 +36,12 @@ if "SCRIPT_AES256_ENCRYPTION_KEY" in os.environ:
) )
Exit(255) Exit(255)
# NOTE: It is safe to generate this file here, since this is still executed serially
with open("script_encryption_key.gen.cpp", "w", encoding="utf-8", newline="\n") as f:
f.write('#include "core/config/project_settings.h"\nuint8_t script_encryption_key[32]={' + txt + "};\n")
script_encryption_key_contents = (
'#include "core/config/project_settings.h"\nuint8_t script_encryption_key[32]={' + txt + "};\n"
)
methods.write_file_if_needed("script_encryption_key.gen.cpp", script_encryption_key_contents)
# Add required thirdparty code. # Add required thirdparty code.

View File

@ -7,19 +7,24 @@ env.editor_sources = []
import os import os
import glob import glob
import editor_builders import editor_builders
import methods
def _make_doc_data_class_path(to_path): def _make_doc_data_class_path(to_path):
# NOTE: It is safe to generate this file here, since this is still executed serially file_path = os.path.join(to_path, "doc_data_class_path.gen.h")
with open(os.path.join(to_path, "doc_data_class_path.gen.h"), "w", encoding="utf-8", newline="\n") as g:
g.write("static const int _doc_data_class_path_count = " + str(len(env.doc_class_path)) + ";\n")
g.write("struct _DocDataClassPath { const char* name; const char* path; };\n")
g.write("static const _DocDataClassPath _doc_data_class_paths[" + str(len(env.doc_class_path) + 1) + "] = {\n") class_path_data = ""
class_path_data += "static const int _doc_data_class_path_count = " + str(len(env.doc_class_path)) + ";\n"
class_path_data += "struct _DocDataClassPath { const char* name; const char* path; };\n"
class_path_data += (
"static const _DocDataClassPath _doc_data_class_paths[" + str(len(env.doc_class_path) + 1) + "] = {\n"
)
for c in sorted(env.doc_class_path): for c in sorted(env.doc_class_path):
g.write('\t{"' + c + '", "' + env.doc_class_path[c] + '"},\n') class_path_data += '\t{"' + c + '", "' + env.doc_class_path[c] + '"},\n'
g.write("\t{nullptr, nullptr}\n") class_path_data += "\t{nullptr, nullptr}\n"
g.write("};\n") class_path_data += "};\n"
methods.write_file_if_needed(file_path, class_path_data)
if env.editor_build: if env.editor_build:
@ -38,10 +43,7 @@ if env.editor_build:
reg_exporters += "\tregister_" + e + "_exporter_types();\n" reg_exporters += "\tregister_" + e + "_exporter_types();\n"
reg_exporters += "}\n" reg_exporters += "}\n"
# NOTE: It is safe to generate this file here, since this is still executed serially methods.write_file_if_needed("register_exporters.gen.cpp", reg_exporters_inc + reg_exporters)
with open("register_exporters.gen.cpp", "w", encoding="utf-8", newline="\n") as f:
f.write(reg_exporters_inc)
f.write(reg_exporters)
# Core API documentation. # Core API documentation.
docs = [] docs = []

View File

@ -228,14 +228,22 @@ def get_version_info(module_version_string="", silent=False):
return version_info return version_info
def write_file_if_needed(path, string):
try:
with open(path, "r", encoding="utf-8", newline="\n") as f:
if f.read() == string:
return
except FileNotFoundError:
pass
with open(path, "w", encoding="utf-8", newline="\n") as f:
f.write(string)
def generate_version_header(module_version_string=""): def generate_version_header(module_version_string=""):
version_info = get_version_info(module_version_string) version_info = get_version_info(module_version_string)
# NOTE: It is safe to generate these files here, since this is still executed serially. version_info_header = """\
with open("core/version_generated.gen.h", "w", encoding="utf-8", newline="\n") as f:
f.write(
"""\
/* THIS FILE IS GENERATED DO NOT EDIT */ /* THIS FILE IS GENERATED DO NOT EDIT */
#ifndef VERSION_GENERATED_GEN_H #ifndef VERSION_GENERATED_GEN_H
#define VERSION_GENERATED_GEN_H #define VERSION_GENERATED_GEN_H
@ -254,11 +262,8 @@ def generate_version_header(module_version_string=""):
""".format( """.format(
**version_info **version_info
) )
)
with open("core/version_hash.gen.cpp", "w", encoding="utf-8", newline="\n") as fhash: version_hash_data = """\
fhash.write(
"""\
/* THIS FILE IS GENERATED DO NOT EDIT */ /* THIS FILE IS GENERATED DO NOT EDIT */
#include "core/version.h" #include "core/version.h"
const char *const VERSION_HASH = "{git_hash}"; const char *const VERSION_HASH = "{git_hash}";
@ -266,7 +271,9 @@ const uint64_t VERSION_TIMESTAMP = {git_timestamp};
""".format( """.format(
**version_info **version_info
) )
)
write_file_if_needed("core/version_generated.gen.h", version_info_header)
write_file_if_needed("core/version_hash.gen.cpp", version_hash_data)
def parse_cg_file(fname, uniforms, sizes, conditionals): def parse_cg_file(fname, uniforms, sizes, conditionals):
@ -385,15 +392,18 @@ def is_module(path):
def write_disabled_classes(class_list): def write_disabled_classes(class_list):
with open("core/disabled_classes.gen.h", "w", encoding="utf-8", newline="\n") as f: file_contents = ""
f.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
f.write("#ifndef DISABLED_CLASSES_GEN_H\n") file_contents += "/* THIS FILE IS GENERATED DO NOT EDIT */\n"
f.write("#define DISABLED_CLASSES_GEN_H\n\n") file_contents += "#ifndef DISABLED_CLASSES_GEN_H\n"
file_contents += "#define DISABLED_CLASSES_GEN_H\n\n"
for c in class_list: for c in class_list:
cs = c.strip() cs = c.strip()
if cs != "": if cs != "":
f.write("#define ClassDB_Disable_" + cs + " 1\n") file_contents += "#define ClassDB_Disable_" + cs + " 1\n"
f.write("\n#endif\n") file_contents += "\n#endif\n"
write_file_if_needed("core/disabled_classes.gen.h", file_contents)
def write_modules(modules): def write_modules(modules):
@ -435,9 +445,7 @@ void uninitialize_modules(ModuleInitializationLevel p_level) {
uninitialize_cpp, uninitialize_cpp,
) )
# NOTE: It is safe to generate this file here, since this is still executed serially write_file_if_needed("modules/register_module_types.gen.cpp", modules_cpp)
with open("modules/register_module_types.gen.cpp", "w", encoding="utf-8", newline="\n") as f:
f.write(modules_cpp)
def convert_custom_modules_path(path): def convert_custom_modules_path(path):

View File

@ -1,5 +1,7 @@
#!/usr/bin/env python #!/usr/bin/env python
import methods
Import("env") Import("env")
env.platform_sources = [] env.platform_sources = []
@ -18,12 +20,7 @@ reg_apis_inc += "\n"
reg_apis += "}\n\n" reg_apis += "}\n\n"
unreg_apis += "}\n" unreg_apis += "}\n"
# NOTE: It is safe to generate this file here, since this is still execute serially methods.write_file_if_needed("register_platform_apis.gen.cpp", reg_apis_inc + reg_apis + unreg_apis)
with open("register_platform_apis.gen.cpp", "w", encoding="utf-8", newline="\n") as f:
f.write(reg_apis_inc)
f.write(reg_apis)
f.write(unreg_apis)
env.add_source_files(env.platform_sources, "register_platform_apis.gen.cpp") env.add_source_files(env.platform_sources, "register_platform_apis.gen.cpp")
lib = env.add_library("platform", env.platform_sources) lib = env.add_library("platform", env.platform_sources)

View File

@ -5,6 +5,7 @@ import platform
import uuid import uuid
import functools import functools
import subprocess import subprocess
import methods
# NOTE: The multiprocessing module is not compatible with SCons due to conflict on cPickle # NOTE: The multiprocessing module is not compatible with SCons due to conflict on cPickle
@ -67,8 +68,8 @@ def generate_export_icons(platform_path, platform_name):
# NOTE: It is safe to generate this file here, since this is still executed serially. # NOTE: It is safe to generate this file here, since this is still executed serially.
wf = export_path + "/" + name + "_svg.gen.h" wf = export_path + "/" + name + "_svg.gen.h"
with open(wf, "w", encoding="utf-8", newline="\n") as svgw:
svgw.write(svg_str) methods.write_file_if_needed(wf, svg_str)
def get_build_version(short): def get_build_version(short):

View File

@ -13,9 +13,9 @@ _scu_folders = set()
_max_includes_per_scu = 1024 _max_includes_per_scu = 1024
def clear_out_existing_files(output_folder, extension): def clear_out_stale_files(output_folder, extension, fresh_files):
output_folder = os.path.abspath(output_folder) output_folder = os.path.abspath(output_folder)
# print("clear_out_existing_files from folder: " + output_folder) # print("clear_out_stale_files from folder: " + output_folder)
if not os.path.isdir(output_folder): if not os.path.isdir(output_folder):
# folder does not exist or has not been created yet, # folder does not exist or has not been created yet,
@ -23,7 +23,8 @@ def clear_out_existing_files(output_folder, extension):
return return
for file in glob.glob(output_folder + "/*." + extension): for file in glob.glob(output_folder + "/*." + extension):
# print("removed pre-existing file: " + file) if not file in fresh_files:
# print("removed stale file: " + file)
os.remove(file) os.remove(file)
@ -87,11 +88,16 @@ def write_output_file(file_count, include_list, start_line, end_line, output_fol
short_filename = output_filename_prefix + num_string + ".gen." + extension short_filename = output_filename_prefix + num_string + ".gen." + extension
output_filename = output_folder + "/" + short_filename output_filename = output_folder + "/" + short_filename
output_path = Path(output_filename)
if not output_path.exists() or output_path.read_text() != file_text:
if _verbose: if _verbose:
print("SCU: Generating: %s" % short_filename) print("SCU: Generating: %s" % short_filename)
output_path = Path(output_filename)
output_path.write_text(file_text, encoding="utf8") output_path.write_text(file_text, encoding="utf8")
elif _verbose:
print("SCU: Generation not needed for: " + short_filename)
return output_filename
def write_exception_output_file(file_count, exception_string, output_folder, output_filename_prefix, extension): def write_exception_output_file(file_count, exception_string, output_folder, output_filename_prefix, extension):
@ -109,11 +115,16 @@ def write_exception_output_file(file_count, exception_string, output_folder, out
short_filename = output_filename_prefix + "_exception" + num_string + ".gen." + extension short_filename = output_filename_prefix + "_exception" + num_string + ".gen." + extension
output_filename = output_folder + "/" + short_filename output_filename = output_folder + "/" + short_filename
output_path = Path(output_filename)
if not output_path.exists() or output_path.read_text() != file_text:
if _verbose: if _verbose:
print("SCU: Generating: " + short_filename) print("SCU: Generating: " + short_filename)
output_path = Path(output_filename)
output_path.write_text(file_text, encoding="utf8") output_path.write_text(file_text, encoding="utf8")
elif _verbose:
print("SCU: Generation not needed for: " + short_filename)
return output_filename
def find_section_name(sub_folder): def find_section_name(sub_folder):
@ -214,10 +225,7 @@ def process_folder(folders, sought_exceptions=[], includes_per_scu=0, extension=
output_folder = abs_main_folder + "/scu/" output_folder = abs_main_folder + "/scu/"
output_filename_prefix = "scu_" + out_filename output_filename_prefix = "scu_" + out_filename
# Clear out any existing files (usually we will be overwriting, fresh_files = set()
# but we want to remove any that are pre-existing that will not be
# overwritten, so as to not compile anything stale)
clear_out_existing_files(output_folder, extension)
for file_count in range(0, num_output_files): for file_count in range(0, num_output_files):
end_line = start_line + lines_per_file end_line = start_line + lines_per_file
@ -226,19 +234,28 @@ def process_folder(folders, sought_exceptions=[], includes_per_scu=0, extension=
if file_count == (num_output_files - 1): if file_count == (num_output_files - 1):
end_line = len(found_includes) end_line = len(found_includes)
write_output_file( fresh_file = write_output_file(
file_count, found_includes, start_line, end_line, output_folder, output_filename_prefix, extension file_count, found_includes, start_line, end_line, output_folder, output_filename_prefix, extension
) )
fresh_files.add(fresh_file)
start_line = end_line start_line = end_line
# Write the exceptions each in their own scu gen file, # Write the exceptions each in their own scu gen file,
# so they can effectively compile in "old style / normal build". # so they can effectively compile in "old style / normal build".
for exception_count in range(len(found_exceptions)): for exception_count in range(len(found_exceptions)):
write_exception_output_file( fresh_file = write_exception_output_file(
exception_count, found_exceptions[exception_count], output_folder, output_filename_prefix, extension exception_count, found_exceptions[exception_count], output_folder, output_filename_prefix, extension
) )
fresh_files.add(fresh_file)
# Clear out any stale file (usually we will be overwriting if necessary,
# but we want to remove any that are pre-existing that will not be
# overwritten, so as to not compile anything stale).
clear_out_stale_files(output_folder, extension, fresh_files)
def generate_scu_files(max_includes_per_scu): def generate_scu_files(max_includes_per_scu):
print("=============================") print("=============================")