SCons: Generate all scripts natively
This commit is contained in:
parent
55b8724bd5
commit
34fb3f7730
13
SConstruct
13
SConstruct
|
@ -61,7 +61,6 @@ _helper_module("platform_methods", "platform_methods.py")
|
||||||
_helper_module("version", "version.py")
|
_helper_module("version", "version.py")
|
||||||
_helper_module("core.core_builders", "core/core_builders.py")
|
_helper_module("core.core_builders", "core/core_builders.py")
|
||||||
_helper_module("main.main_builders", "main/main_builders.py")
|
_helper_module("main.main_builders", "main/main_builders.py")
|
||||||
_helper_module("modules.modules_builders", "modules/modules_builders.py")
|
|
||||||
|
|
||||||
# Local
|
# Local
|
||||||
import methods
|
import methods
|
||||||
|
@ -69,7 +68,7 @@ import glsl_builders
|
||||||
import gles3_builders
|
import gles3_builders
|
||||||
import scu_builders
|
import scu_builders
|
||||||
from methods import print_warning, print_error
|
from methods import print_warning, print_error
|
||||||
from platform_methods import architectures, architecture_aliases, generate_export_icons
|
from platform_methods import architectures, architecture_aliases
|
||||||
|
|
||||||
if ARGUMENTS.get("target", "editor") == "editor":
|
if ARGUMENTS.get("target", "editor") == "editor":
|
||||||
_helper_module("editor.editor_builders", "editor/editor_builders.py")
|
_helper_module("editor.editor_builders", "editor/editor_builders.py")
|
||||||
|
@ -107,7 +106,6 @@ for x in sorted(glob.glob("platform/*")):
|
||||||
|
|
||||||
if os.path.exists(x + "/export/export.cpp"):
|
if os.path.exists(x + "/export/export.cpp"):
|
||||||
platform_exporters.append(platform_name)
|
platform_exporters.append(platform_name)
|
||||||
generate_export_icons(x, platform_name)
|
|
||||||
if os.path.exists(x + "/api/api.cpp"):
|
if os.path.exists(x + "/api/api.cpp"):
|
||||||
platform_apis.append(platform_name)
|
platform_apis.append(platform_name)
|
||||||
if detect.can_build():
|
if detect.can_build():
|
||||||
|
@ -428,7 +426,7 @@ for name, path in modules_detected.items():
|
||||||
sys.path.remove(path)
|
sys.path.remove(path)
|
||||||
sys.modules.pop("config")
|
sys.modules.pop("config")
|
||||||
|
|
||||||
methods.write_modules(modules_detected)
|
env.modules_detected = modules_detected
|
||||||
|
|
||||||
# Update the environment again after all the module options are added.
|
# Update the environment again after all the module options are added.
|
||||||
opts.Update(env)
|
opts.Update(env)
|
||||||
|
@ -544,7 +542,7 @@ env.Append(CFLAGS=env.get("cflags", "").split())
|
||||||
env.Append(LINKFLAGS=env.get("linkflags", "").split())
|
env.Append(LINKFLAGS=env.get("linkflags", "").split())
|
||||||
|
|
||||||
# Feature build profile
|
# Feature build profile
|
||||||
disabled_classes = []
|
env.disabled_classes = []
|
||||||
if env["build_profile"] != "":
|
if env["build_profile"] != "":
|
||||||
print('Using feature build profile: "{}"'.format(env["build_profile"]))
|
print('Using feature build profile: "{}"'.format(env["build_profile"]))
|
||||||
import json
|
import json
|
||||||
|
@ -552,7 +550,7 @@ if env["build_profile"] != "":
|
||||||
try:
|
try:
|
||||||
ft = json.load(open(env["build_profile"]))
|
ft = json.load(open(env["build_profile"]))
|
||||||
if "disabled_classes" in ft:
|
if "disabled_classes" in ft:
|
||||||
disabled_classes = ft["disabled_classes"]
|
env.disabled_classes = ft["disabled_classes"]
|
||||||
if "disabled_build_options" in ft:
|
if "disabled_build_options" in ft:
|
||||||
dbo = ft["disabled_build_options"]
|
dbo = ft["disabled_build_options"]
|
||||||
for c in dbo:
|
for c in dbo:
|
||||||
|
@ -560,7 +558,6 @@ if env["build_profile"] != "":
|
||||||
except:
|
except:
|
||||||
print_error('Failed to open feature build profile: "{}"'.format(env["build_profile"]))
|
print_error('Failed to open feature build profile: "{}"'.format(env["build_profile"]))
|
||||||
Exit(255)
|
Exit(255)
|
||||||
methods.write_disabled_classes(disabled_classes)
|
|
||||||
|
|
||||||
# Platform specific flags.
|
# Platform specific flags.
|
||||||
# These can sometimes override default options.
|
# These can sometimes override default options.
|
||||||
|
@ -926,7 +923,7 @@ if env.editor_build:
|
||||||
print_error("Not all modules required by editor builds are enabled.")
|
print_error("Not all modules required by editor builds are enabled.")
|
||||||
Exit(255)
|
Exit(255)
|
||||||
|
|
||||||
methods.generate_version_header(env.module_version_string)
|
env.version_info = methods.get_version_info(env.module_version_string)
|
||||||
|
|
||||||
env["PROGSUFFIX_WRAP"] = suffix + env.module_version_string + ".console" + env["PROGSUFFIX"]
|
env["PROGSUFFIX_WRAP"] = suffix + env.module_version_string + ".console" + env["PROGSUFFIX"]
|
||||||
env["PROGSUFFIX"] = suffix + env.module_version_string + env["PROGSUFFIX"]
|
env["PROGSUFFIX"] = suffix + env.module_version_string + env["PROGSUFFIX"]
|
||||||
|
|
129
core/SCsub
129
core/SCsub
|
@ -4,44 +4,9 @@ Import("env")
|
||||||
|
|
||||||
import core_builders
|
import core_builders
|
||||||
import methods
|
import methods
|
||||||
|
|
||||||
env.core_sources = []
|
|
||||||
|
|
||||||
|
|
||||||
# Generate AES256 script encryption key
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
txt = "0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0"
|
env.core_sources = []
|
||||||
if "SCRIPT_AES256_ENCRYPTION_KEY" in os.environ:
|
|
||||||
key = os.environ["SCRIPT_AES256_ENCRYPTION_KEY"]
|
|
||||||
ec_valid = True
|
|
||||||
if len(key) != 64:
|
|
||||||
ec_valid = False
|
|
||||||
else:
|
|
||||||
txt = ""
|
|
||||||
for i in range(len(key) >> 1):
|
|
||||||
if i > 0:
|
|
||||||
txt += ","
|
|
||||||
txts = "0x" + key[i * 2 : i * 2 + 2]
|
|
||||||
try:
|
|
||||||
int(txts, 16)
|
|
||||||
except Exception:
|
|
||||||
ec_valid = False
|
|
||||||
txt += txts
|
|
||||||
if not ec_valid:
|
|
||||||
methods.print_error(
|
|
||||||
f'Invalid AES256 encryption key, not 64 hexadecimal characters: "{key}".\n'
|
|
||||||
"Unset 'SCRIPT_AES256_ENCRYPTION_KEY' in your environment "
|
|
||||||
"or make sure that it contains exactly 64 hexadecimal characters."
|
|
||||||
)
|
|
||||||
Exit(255)
|
|
||||||
|
|
||||||
|
|
||||||
script_encryption_key_contents = (
|
|
||||||
'#include "core/config/project_settings.h"\nuint8_t script_encryption_key[32]={' + txt + "};\n"
|
|
||||||
)
|
|
||||||
|
|
||||||
methods.write_file_if_needed("script_encryption_key.gen.cpp", script_encryption_key_contents)
|
|
||||||
|
|
||||||
# Add required thirdparty code.
|
# Add required thirdparty code.
|
||||||
|
|
||||||
|
@ -193,8 +158,96 @@ env.core_sources += thirdparty_obj
|
||||||
# Godot source files
|
# Godot source files
|
||||||
|
|
||||||
env.add_source_files(env.core_sources, "*.cpp")
|
env.add_source_files(env.core_sources, "*.cpp")
|
||||||
env.add_source_files(env.core_sources, "script_encryption_key.gen.cpp")
|
|
||||||
env.add_source_files(env.core_sources, "version_hash.gen.cpp")
|
|
||||||
|
# Generate disabled classes
|
||||||
|
def disabled_class_builder(target, source, env):
|
||||||
|
with methods.generated_wrapper(target) as file:
|
||||||
|
for c in source[0].read():
|
||||||
|
cs = c.strip()
|
||||||
|
if cs != "":
|
||||||
|
file.write(f"#define ClassDB_Disable_{cs} 1")
|
||||||
|
|
||||||
|
|
||||||
|
env.CommandNoCache("disabled_classes.gen.h", env.Value(env.disabled_classes), env.Run(disabled_class_builder))
|
||||||
|
|
||||||
|
|
||||||
|
# Generate version info
|
||||||
|
def version_info_builder(target, source, env):
|
||||||
|
with methods.generated_wrapper(target) as file:
|
||||||
|
file.write(
|
||||||
|
"""\
|
||||||
|
#define VERSION_SHORT_NAME "{short_name}"
|
||||||
|
#define VERSION_NAME "{name}"
|
||||||
|
#define VERSION_MAJOR {major}
|
||||||
|
#define VERSION_MINOR {minor}
|
||||||
|
#define VERSION_PATCH {patch}
|
||||||
|
#define VERSION_STATUS "{status}"
|
||||||
|
#define VERSION_BUILD "{build}"
|
||||||
|
#define VERSION_MODULE_CONFIG "{module_config}"
|
||||||
|
#define VERSION_WEBSITE "{website}"
|
||||||
|
#define VERSION_DOCS_BRANCH "{docs_branch}"
|
||||||
|
#define VERSION_DOCS_URL "https://docs.godotengine.org/en/" VERSION_DOCS_BRANCH
|
||||||
|
""".format(
|
||||||
|
**env.version_info
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
env.CommandNoCache("version_generated.gen.h", "#version.py", env.Run(version_info_builder))
|
||||||
|
|
||||||
|
|
||||||
|
# Generate version hash
|
||||||
|
def version_hash_builder(target, source, env):
|
||||||
|
with methods.generated_wrapper(target) as file:
|
||||||
|
file.write(
|
||||||
|
"""\
|
||||||
|
#include "core/version.h"
|
||||||
|
|
||||||
|
const char *const VERSION_HASH = "{git_hash}";
|
||||||
|
const uint64_t VERSION_TIMESTAMP = {git_timestamp};
|
||||||
|
""".format(
|
||||||
|
**env.version_info
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
gen_hash = env.CommandNoCache(
|
||||||
|
"version_hash.gen.cpp", env.Value(env.version_info["git_hash"]), env.Run(version_hash_builder)
|
||||||
|
)
|
||||||
|
env.add_source_files(env.core_sources, gen_hash)
|
||||||
|
|
||||||
|
|
||||||
|
# Generate AES256 script encryption key
|
||||||
|
def encryption_key_builder(target, source, env):
|
||||||
|
with methods.generated_wrapper(target) as file:
|
||||||
|
file.write(
|
||||||
|
f"""\
|
||||||
|
#include "core/config/project_settings.h"
|
||||||
|
|
||||||
|
uint8_t script_encryption_key[32] = {{
|
||||||
|
{source[0]}
|
||||||
|
}};"""
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
gdkey = os.environ.get("SCRIPT_AES256_ENCRYPTION_KEY", "0" * 64)
|
||||||
|
ec_valid = len(gdkey) == 64
|
||||||
|
if ec_valid:
|
||||||
|
try:
|
||||||
|
gdkey = ", ".join([str(int(f"{a}{b}", 16)) for a, b in zip(gdkey[0::2], gdkey[1::2])])
|
||||||
|
except Exception:
|
||||||
|
ec_valid = False
|
||||||
|
if not ec_valid:
|
||||||
|
methods.print_error(
|
||||||
|
f'Invalid AES256 encryption key, not 64 hexadecimal characters: "{gdkey}".\n'
|
||||||
|
"Unset `SCRIPT_AES256_ENCRYPTION_KEY` in your environment "
|
||||||
|
"or make sure that it contains exactly 64 hexadecimal characters."
|
||||||
|
)
|
||||||
|
Exit(255)
|
||||||
|
gen_encrypt = env.CommandNoCache("script_encryption_key.gen.cpp", env.Value(gdkey), env.Run(encryption_key_builder))
|
||||||
|
env.add_source_files(env.core_sources, gen_encrypt)
|
||||||
|
|
||||||
|
|
||||||
# Certificates
|
# Certificates
|
||||||
env.Depends(
|
env.Depends(
|
||||||
|
|
83
editor/SCsub
83
editor/SCsub
|
@ -10,40 +10,59 @@ import editor_builders
|
||||||
import methods
|
import methods
|
||||||
|
|
||||||
|
|
||||||
def _make_doc_data_class_path(to_path):
|
|
||||||
file_path = os.path.join(to_path, "doc_data_class_path.gen.h")
|
|
||||||
|
|
||||||
class_path_data = ""
|
|
||||||
class_path_data += "static const int _doc_data_class_path_count = " + str(len(env.doc_class_path)) + ";\n"
|
|
||||||
class_path_data += "struct _DocDataClassPath { const char* name; const char* path; };\n"
|
|
||||||
class_path_data += (
|
|
||||||
"static const _DocDataClassPath _doc_data_class_paths[" + str(len(env.doc_class_path) + 1) + "] = {\n"
|
|
||||||
)
|
|
||||||
for c in sorted(env.doc_class_path):
|
|
||||||
class_path_data += '\t{"' + c + '", "' + env.doc_class_path[c] + '"},\n'
|
|
||||||
class_path_data += "\t{nullptr, nullptr}\n"
|
|
||||||
class_path_data += "};\n"
|
|
||||||
|
|
||||||
methods.write_file_if_needed(file_path, class_path_data)
|
|
||||||
|
|
||||||
|
|
||||||
if env.editor_build:
|
if env.editor_build:
|
||||||
|
# Generate doc data paths
|
||||||
|
def doc_data_class_path_builder(target, source, env):
|
||||||
|
paths = dict(sorted(source[0].read().items()))
|
||||||
|
data = "\n".join([f'\t{{"{key}", "{value}"}},' for key, value in paths.items()])
|
||||||
|
with methods.generated_wrapper(target) as file:
|
||||||
|
file.write(
|
||||||
|
f"""\
|
||||||
|
static const int _doc_data_class_path_count = {len(paths)};
|
||||||
|
|
||||||
|
struct _DocDataClassPath {{
|
||||||
|
const char *name;
|
||||||
|
const char *path;
|
||||||
|
}};
|
||||||
|
|
||||||
|
static const _DocDataClassPath _doc_data_class_paths[{len(env.doc_class_path) + 1}] = {{
|
||||||
|
{data}
|
||||||
|
{{nullptr, nullptr}},
|
||||||
|
}};
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
env.CommandNoCache("doc_data_class_path.gen.h", env.Value(env.doc_class_path), env.Run(doc_data_class_path_builder))
|
||||||
|
|
||||||
# Register exporters
|
# Register exporters
|
||||||
reg_exporters_inc = '#include "register_exporters.h"\n\n'
|
def register_exporters_builder(target, source, env):
|
||||||
reg_exporters = "void register_exporters() {\n"
|
platforms = source[0].read()
|
||||||
|
exp_inc = "\n".join([f'#include "platform/{p}/export/export.h"' for p in platforms])
|
||||||
|
exp_reg = "\n".join([f"\tregister_{p}_exporter();" for p in platforms])
|
||||||
|
exp_type = "\n".join([f"\tregister_{p}_exporter_types();" for p in platforms])
|
||||||
|
with methods.generated_wrapper(target) as file:
|
||||||
|
file.write(
|
||||||
|
f"""\
|
||||||
|
#include "register_exporters.h"
|
||||||
|
|
||||||
|
{exp_inc}
|
||||||
|
|
||||||
|
void register_exporters() {{
|
||||||
|
{exp_reg}
|
||||||
|
}}
|
||||||
|
|
||||||
|
void register_exporter_types() {{
|
||||||
|
{exp_type}
|
||||||
|
}}
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
gen_exporters = env.CommandNoCache(
|
||||||
|
"register_exporters.gen.cpp", env.Value(env.platform_exporters), env.Run(register_exporters_builder)
|
||||||
|
)
|
||||||
for e in env.platform_exporters:
|
for e in env.platform_exporters:
|
||||||
# Add all .cpp files in export folder
|
# Add all .cpp files in export folder
|
||||||
env.add_source_files(env.editor_sources, "../platform/" + e + "/export/" + "*.cpp")
|
env.add_source_files(env.editor_sources, f"../platform/{e}/export/*.cpp")
|
||||||
|
|
||||||
reg_exporters += "\tregister_" + e + "_exporter();\n"
|
|
||||||
reg_exporters_inc += '#include "platform/' + e + '/export/export.h"\n'
|
|
||||||
reg_exporters += "}\n\n"
|
|
||||||
reg_exporters += "void register_exporter_types() {\n"
|
|
||||||
for e in env.platform_exporters:
|
|
||||||
reg_exporters += "\tregister_" + e + "_exporter_types();\n"
|
|
||||||
reg_exporters += "}\n"
|
|
||||||
|
|
||||||
methods.write_file_if_needed("register_exporters.gen.cpp", reg_exporters_inc + reg_exporters)
|
|
||||||
|
|
||||||
# Core API documentation.
|
# Core API documentation.
|
||||||
docs = []
|
docs = []
|
||||||
|
@ -61,8 +80,6 @@ if env.editor_build:
|
||||||
else:
|
else:
|
||||||
docs += Glob(d + "/*.xml") # Custom.
|
docs += Glob(d + "/*.xml") # Custom.
|
||||||
|
|
||||||
_make_doc_data_class_path(env.Dir("#editor").abspath)
|
|
||||||
|
|
||||||
docs = sorted(docs)
|
docs = sorted(docs)
|
||||||
env.Depends("#editor/doc_data_compressed.gen.h", docs)
|
env.Depends("#editor/doc_data_compressed.gen.h", docs)
|
||||||
env.CommandNoCache(
|
env.CommandNoCache(
|
||||||
|
@ -115,7 +132,7 @@ if env.editor_build:
|
||||||
)
|
)
|
||||||
|
|
||||||
env.add_source_files(env.editor_sources, "*.cpp")
|
env.add_source_files(env.editor_sources, "*.cpp")
|
||||||
env.add_source_files(env.editor_sources, "register_exporters.gen.cpp")
|
env.add_source_files(env.editor_sources, gen_exporters)
|
||||||
|
|
||||||
SConscript("debugger/SCsub")
|
SConscript("debugger/SCsub")
|
||||||
SConscript("export/SCsub")
|
SConscript("export/SCsub")
|
||||||
|
|
244
methods.py
244
methods.py
|
@ -3,13 +3,16 @@ import sys
|
||||||
import re
|
import re
|
||||||
import glob
|
import glob
|
||||||
import subprocess
|
import subprocess
|
||||||
|
import contextlib
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
from collections.abc import Mapping
|
from collections.abc import Mapping
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Iterator
|
from typing import Generator, Optional
|
||||||
|
from io import TextIOWrapper, StringIO
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from os.path import normpath, basename
|
from os.path import normpath, basename
|
||||||
|
|
||||||
|
|
||||||
# Get the "Godot" folder name ahead of time
|
# Get the "Godot" folder name ahead of time
|
||||||
base_folder_path = str(os.path.abspath(Path(__file__).parent)) + "/"
|
base_folder_path = str(os.path.abspath(Path(__file__).parent)) + "/"
|
||||||
base_folder_only = os.path.basename(os.path.normpath(base_folder_path))
|
base_folder_only = os.path.basename(os.path.normpath(base_folder_path))
|
||||||
|
@ -277,79 +280,6 @@ def get_version_info(module_version_string="", silent=False):
|
||||||
return version_info
|
return version_info
|
||||||
|
|
||||||
|
|
||||||
_cleanup_env = None
|
|
||||||
_cleanup_bool = False
|
|
||||||
|
|
||||||
|
|
||||||
def write_file_if_needed(path, string):
|
|
||||||
"""Generates a file only if it doesn't already exist or the content has changed.
|
|
||||||
|
|
||||||
Utilizes a dedicated SCons environment to ensure the files are properly removed
|
|
||||||
during cleanup; will not attempt to create files during cleanup.
|
|
||||||
|
|
||||||
- `path` - Path to the file in question; used to create cleanup logic.
|
|
||||||
- `string` - Content to compare against an existing file.
|
|
||||||
"""
|
|
||||||
global _cleanup_env
|
|
||||||
global _cleanup_bool
|
|
||||||
|
|
||||||
if _cleanup_env is None:
|
|
||||||
from SCons.Environment import Environment
|
|
||||||
|
|
||||||
_cleanup_env = Environment()
|
|
||||||
_cleanup_bool = _cleanup_env.GetOption("clean")
|
|
||||||
|
|
||||||
_cleanup_env.Clean("#", path)
|
|
||||||
if _cleanup_bool:
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(path, "r", encoding="utf-8", newline="\n") as f:
|
|
||||||
if f.read() == string:
|
|
||||||
return
|
|
||||||
except FileNotFoundError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
with open(path, "w", encoding="utf-8", newline="\n") as f:
|
|
||||||
f.write(string)
|
|
||||||
|
|
||||||
|
|
||||||
def generate_version_header(module_version_string=""):
|
|
||||||
version_info = get_version_info(module_version_string)
|
|
||||||
|
|
||||||
version_info_header = """\
|
|
||||||
/* THIS FILE IS GENERATED DO NOT EDIT */
|
|
||||||
#ifndef VERSION_GENERATED_GEN_H
|
|
||||||
#define VERSION_GENERATED_GEN_H
|
|
||||||
#define VERSION_SHORT_NAME "{short_name}"
|
|
||||||
#define VERSION_NAME "{name}"
|
|
||||||
#define VERSION_MAJOR {major}
|
|
||||||
#define VERSION_MINOR {minor}
|
|
||||||
#define VERSION_PATCH {patch}
|
|
||||||
#define VERSION_STATUS "{status}"
|
|
||||||
#define VERSION_BUILD "{build}"
|
|
||||||
#define VERSION_MODULE_CONFIG "{module_config}"
|
|
||||||
#define VERSION_WEBSITE "{website}"
|
|
||||||
#define VERSION_DOCS_BRANCH "{docs_branch}"
|
|
||||||
#define VERSION_DOCS_URL "https://docs.godotengine.org/en/" VERSION_DOCS_BRANCH
|
|
||||||
#endif // VERSION_GENERATED_GEN_H
|
|
||||||
""".format(
|
|
||||||
**version_info
|
|
||||||
)
|
|
||||||
|
|
||||||
version_hash_data = """\
|
|
||||||
/* THIS FILE IS GENERATED DO NOT EDIT */
|
|
||||||
#include "core/version.h"
|
|
||||||
const char *const VERSION_HASH = "{git_hash}";
|
|
||||||
const uint64_t VERSION_TIMESTAMP = {git_timestamp};
|
|
||||||
""".format(
|
|
||||||
**version_info
|
|
||||||
)
|
|
||||||
|
|
||||||
write_file_if_needed("core/version_generated.gen.h", version_info_header)
|
|
||||||
write_file_if_needed("core/version_hash.gen.cpp", version_hash_data)
|
|
||||||
|
|
||||||
|
|
||||||
def parse_cg_file(fname, uniforms, sizes, conditionals):
|
def parse_cg_file(fname, uniforms, sizes, conditionals):
|
||||||
with open(fname, "r", encoding="utf-8") as fs:
|
with open(fname, "r", encoding="utf-8") as fs:
|
||||||
line = fs.readline()
|
line = fs.readline()
|
||||||
|
@ -465,63 +395,6 @@ def is_module(path):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def write_disabled_classes(class_list):
|
|
||||||
file_contents = ""
|
|
||||||
|
|
||||||
file_contents += "/* THIS FILE IS GENERATED DO NOT EDIT */\n"
|
|
||||||
file_contents += "#ifndef DISABLED_CLASSES_GEN_H\n"
|
|
||||||
file_contents += "#define DISABLED_CLASSES_GEN_H\n\n"
|
|
||||||
for c in class_list:
|
|
||||||
cs = c.strip()
|
|
||||||
if cs != "":
|
|
||||||
file_contents += "#define ClassDB_Disable_" + cs + " 1\n"
|
|
||||||
file_contents += "\n#endif\n"
|
|
||||||
|
|
||||||
write_file_if_needed("core/disabled_classes.gen.h", file_contents)
|
|
||||||
|
|
||||||
|
|
||||||
def write_modules(modules):
|
|
||||||
includes_cpp = ""
|
|
||||||
initialize_cpp = ""
|
|
||||||
uninitialize_cpp = ""
|
|
||||||
|
|
||||||
for name, path in modules.items():
|
|
||||||
try:
|
|
||||||
with open(os.path.join(path, "register_types.h")):
|
|
||||||
includes_cpp += '#include "' + path + '/register_types.h"\n'
|
|
||||||
initialize_cpp += "#ifdef MODULE_" + name.upper() + "_ENABLED\n"
|
|
||||||
initialize_cpp += "\tinitialize_" + name + "_module(p_level);\n"
|
|
||||||
initialize_cpp += "#endif\n"
|
|
||||||
uninitialize_cpp += "#ifdef MODULE_" + name.upper() + "_ENABLED\n"
|
|
||||||
uninitialize_cpp += "\tuninitialize_" + name + "_module(p_level);\n"
|
|
||||||
uninitialize_cpp += "#endif\n"
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
modules_cpp = """// register_module_types.gen.cpp
|
|
||||||
/* THIS FILE IS GENERATED DO NOT EDIT */
|
|
||||||
#include "register_module_types.h"
|
|
||||||
|
|
||||||
#include "modules/modules_enabled.gen.h"
|
|
||||||
|
|
||||||
%s
|
|
||||||
|
|
||||||
void initialize_modules(ModuleInitializationLevel p_level) {
|
|
||||||
%s
|
|
||||||
}
|
|
||||||
|
|
||||||
void uninitialize_modules(ModuleInitializationLevel p_level) {
|
|
||||||
%s
|
|
||||||
}
|
|
||||||
""" % (
|
|
||||||
includes_cpp,
|
|
||||||
initialize_cpp,
|
|
||||||
uninitialize_cpp,
|
|
||||||
)
|
|
||||||
|
|
||||||
write_file_if_needed("modules/register_module_types.gen.cpp", modules_cpp)
|
|
||||||
|
|
||||||
|
|
||||||
def convert_custom_modules_path(path):
|
def convert_custom_modules_path(path):
|
||||||
if not path:
|
if not path:
|
||||||
return path
|
return path
|
||||||
|
@ -1649,3 +1522,112 @@ def generate_vs_project(env, original_args, project_name="godot"):
|
||||||
|
|
||||||
if get_bool(original_args, "vsproj_gen_only", True):
|
if get_bool(original_args, "vsproj_gen_only", True):
|
||||||
sys.exit()
|
sys.exit()
|
||||||
|
|
||||||
|
|
||||||
|
def generate_copyright_header(filename: str) -> str:
|
||||||
|
MARGIN = 70
|
||||||
|
TEMPLATE = """\
|
||||||
|
/**************************************************************************/
|
||||||
|
/* %s*/
|
||||||
|
/**************************************************************************/
|
||||||
|
/* This file is part of: */
|
||||||
|
/* GODOT ENGINE */
|
||||||
|
/* https://godotengine.org */
|
||||||
|
/**************************************************************************/
|
||||||
|
/* Copyright (c) 2014-present Godot Engine contributors (see AUTHORS.md). */
|
||||||
|
/* Copyright (c) 2007-2014 Juan Linietsky, Ariel Manzur. */
|
||||||
|
/* */
|
||||||
|
/* Permission is hereby granted, free of charge, to any person obtaining */
|
||||||
|
/* a copy of this software and associated documentation files (the */
|
||||||
|
/* "Software"), to deal in the Software without restriction, including */
|
||||||
|
/* without limitation the rights to use, copy, modify, merge, publish, */
|
||||||
|
/* distribute, sublicense, and/or sell copies of the Software, and to */
|
||||||
|
/* permit persons to whom the Software is furnished to do so, subject to */
|
||||||
|
/* the following conditions: */
|
||||||
|
/* */
|
||||||
|
/* The above copyright notice and this permission notice shall be */
|
||||||
|
/* included in all copies or substantial portions of the Software. */
|
||||||
|
/* */
|
||||||
|
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
|
||||||
|
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
|
||||||
|
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. */
|
||||||
|
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
|
||||||
|
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
|
||||||
|
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
|
||||||
|
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
|
||||||
|
/**************************************************************************/
|
||||||
|
"""
|
||||||
|
filename = filename.split("/")[-1].ljust(MARGIN)
|
||||||
|
if len(filename) > MARGIN:
|
||||||
|
print(f'WARNING: Filename "{filename}" too large for copyright header.')
|
||||||
|
return TEMPLATE % filename
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def generated_wrapper(
|
||||||
|
path, # FIXME: type with `Union[str, Node, List[Node]]` when pytest conflicts are resolved
|
||||||
|
guard: Optional[bool] = None,
|
||||||
|
prefix: str = "",
|
||||||
|
suffix: str = "",
|
||||||
|
) -> Generator[TextIOWrapper, None, None]:
|
||||||
|
"""
|
||||||
|
Wrapper class to automatically handle copyright headers and header guards
|
||||||
|
for generated scripts. Meant to be invoked via `with` statement similar to
|
||||||
|
creating a file.
|
||||||
|
|
||||||
|
- `path`: The path of the file to be created. Can be passed a raw string, an
|
||||||
|
isolated SCons target, or a full SCons target list. If a target list contains
|
||||||
|
multiple entries, produces a warning & only creates the first entry.
|
||||||
|
- `guard`: Optional bool to determine if a header guard should be added. If
|
||||||
|
unassigned, header guards are determined by the file extension.
|
||||||
|
- `prefix`: Custom prefix to prepend to a header guard. Produces a warning if
|
||||||
|
provided a value when `guard` evaluates to `False`.
|
||||||
|
- `suffix`: Custom suffix to append to a header guard. Produces a warning if
|
||||||
|
provided a value when `guard` evaluates to `False`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Handle unfiltered SCons target[s] passed as path.
|
||||||
|
if not isinstance(path, str):
|
||||||
|
if isinstance(path, list):
|
||||||
|
if len(path) > 1:
|
||||||
|
print_warning(
|
||||||
|
"Attempting to use generated wrapper with multiple targets; "
|
||||||
|
f"will only use first entry: {path[0]}"
|
||||||
|
)
|
||||||
|
path = path[0]
|
||||||
|
if not hasattr(path, "get_abspath"):
|
||||||
|
raise TypeError(f'Expected type "str", "Node" or "List[Node]"; was passed {type(path)}.')
|
||||||
|
path = path.get_abspath()
|
||||||
|
|
||||||
|
path = str(path).replace("\\", "/")
|
||||||
|
if guard is None:
|
||||||
|
guard = path.endswith((".h", ".hh", ".hpp", ".inc"))
|
||||||
|
if not guard and (prefix or suffix):
|
||||||
|
print_warning(f'Trying to assign header guard prefix/suffix while `guard` is disabled: "{path}".')
|
||||||
|
|
||||||
|
header_guard = ""
|
||||||
|
if guard:
|
||||||
|
if prefix:
|
||||||
|
prefix += "_"
|
||||||
|
if suffix:
|
||||||
|
suffix = f"_{suffix}"
|
||||||
|
split = path.split("/")[-1].split(".")
|
||||||
|
header_guard = (f"{prefix}{split[0]}{suffix}.{'.'.join(split[1:])}".upper()
|
||||||
|
.replace(".", "_").replace("-", "_").replace(" ", "_").replace("__", "_")) # fmt: skip
|
||||||
|
|
||||||
|
with open(path, "wt", encoding="utf-8", newline="\n") as file:
|
||||||
|
file.write(generate_copyright_header(path))
|
||||||
|
file.write("\n/* THIS FILE IS GENERATED. EDITS WILL BE LOST. */\n\n")
|
||||||
|
|
||||||
|
if guard:
|
||||||
|
file.write(f"#ifndef {header_guard}\n")
|
||||||
|
file.write(f"#define {header_guard}\n\n")
|
||||||
|
|
||||||
|
with StringIO(newline="\n") as str_io:
|
||||||
|
yield str_io
|
||||||
|
file.write(str_io.getvalue().strip() or "/* NO CONTENT */")
|
||||||
|
|
||||||
|
if guard:
|
||||||
|
file.write(f"\n\n#endif // {header_guard}")
|
||||||
|
|
||||||
|
file.write("\n")
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
import modules_builders
|
import methods
|
||||||
import os
|
import os
|
||||||
|
|
||||||
Import("env")
|
Import("env")
|
||||||
|
@ -12,15 +12,51 @@ env_modules.Append(CPPDEFINES=["GODOT_MODULE"])
|
||||||
|
|
||||||
Export("env_modules")
|
Export("env_modules")
|
||||||
|
|
||||||
# Header with MODULE_*_ENABLED defines.
|
|
||||||
env.Depends("modules_enabled.gen.h", Value(env.module_list))
|
def register_module_types_builder(target, source, env):
|
||||||
env.CommandNoCache(
|
modules = source[0].read()
|
||||||
"modules_enabled.gen.h",
|
mod_inc = "\n".join([f'#include "{p}/register_types.h"' for p in modules.values()])
|
||||||
Value(env.module_list),
|
mod_init = "\n".join(
|
||||||
env.Run(modules_builders.generate_modules_enabled),
|
[f"#ifdef MODULE_{n.upper()}_ENABLED\n\tinitialize_{n}_module(p_level);\n#endif" for n in modules.keys()]
|
||||||
|
)
|
||||||
|
mod_uninit = "\n".join(
|
||||||
|
[f"#ifdef MODULE_{n.upper()}_ENABLED\n\tuninitialize_{n}_module(p_level);\n#endif" for n in modules.keys()]
|
||||||
|
)
|
||||||
|
with methods.generated_wrapper(target) as file:
|
||||||
|
file.write(
|
||||||
|
f"""\
|
||||||
|
#include "register_module_types.h"
|
||||||
|
|
||||||
|
#include "modules/modules_enabled.gen.h"
|
||||||
|
|
||||||
|
{mod_inc}
|
||||||
|
|
||||||
|
void initialize_modules(ModuleInitializationLevel p_level) {{
|
||||||
|
{mod_init}
|
||||||
|
}}
|
||||||
|
|
||||||
|
void uninitialize_modules(ModuleInitializationLevel p_level) {{
|
||||||
|
{mod_uninit}
|
||||||
|
}}
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
register_module_types = env.CommandNoCache(
|
||||||
|
"register_module_types.gen.cpp", env.Value(env.modules_detected), env.Run(register_module_types_builder)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Header with MODULE_*_ENABLED defines.
|
||||||
|
def modules_enabled_builder(target, source, env):
|
||||||
|
with methods.generated_wrapper(target) as file:
|
||||||
|
for module in source[0].read():
|
||||||
|
file.write(f"#define MODULE_{module.upper()}_ENABLED\n")
|
||||||
|
|
||||||
|
|
||||||
|
env.CommandNoCache("modules_enabled.gen.h", env.Value(env.module_list), env.Run(modules_enabled_builder))
|
||||||
|
|
||||||
|
|
||||||
vs_sources = []
|
vs_sources = []
|
||||||
test_headers = []
|
test_headers = []
|
||||||
# libmodule_<name>.a for each active module.
|
# libmodule_<name>.a for each active module.
|
||||||
|
@ -47,18 +83,19 @@ for name, path in env.module_list.items():
|
||||||
|
|
||||||
# Generate header to be included in `tests/test_main.cpp` to run module-specific tests.
|
# Generate header to be included in `tests/test_main.cpp` to run module-specific tests.
|
||||||
if env["tests"]:
|
if env["tests"]:
|
||||||
env.Depends("modules_tests.gen.h", test_headers)
|
|
||||||
env.CommandNoCache(
|
def modules_tests_builder(target, source, env):
|
||||||
"modules_tests.gen.h",
|
with methods.generated_wrapper(target) as file:
|
||||||
test_headers,
|
for header in source:
|
||||||
env.Run(modules_builders.generate_modules_tests),
|
file.write('#include "{}"\n'.format(os.path.normpath(header.path).replace("\\", "/")))
|
||||||
)
|
|
||||||
|
env.CommandNoCache("modules_tests.gen.h", test_headers, env.Run(modules_tests_builder))
|
||||||
|
|
||||||
# libmodules.a with only register_module_types.
|
# libmodules.a with only register_module_types.
|
||||||
# Must be last so that all libmodule_<name>.a libraries are on the right side
|
# Must be last so that all libmodule_<name>.a libraries are on the right side
|
||||||
# in the linker command.
|
# in the linker command.
|
||||||
env.modules_sources = []
|
env.modules_sources = []
|
||||||
env_modules.add_source_files(env.modules_sources, "register_module_types.gen.cpp")
|
env_modules.add_source_files(env.modules_sources, register_module_types)
|
||||||
lib = env_modules.add_library("modules", env.modules_sources)
|
lib = env_modules.add_library("modules", env.modules_sources)
|
||||||
env.Prepend(LIBS=[lib])
|
env.Prepend(LIBS=[lib])
|
||||||
if env["vsproj"]:
|
if env["vsproj"]:
|
||||||
|
|
|
@ -1,15 +0,0 @@
|
||||||
"""Functions used to generate source files during build time"""
|
|
||||||
|
|
||||||
|
|
||||||
def generate_modules_enabled(target, source, env):
|
|
||||||
with open(target[0].path, "w", encoding="utf-8", newline="\n") as f:
|
|
||||||
for module in env.module_list:
|
|
||||||
f.write("#define %s\n" % ("MODULE_" + module.upper() + "_ENABLED"))
|
|
||||||
|
|
||||||
|
|
||||||
def generate_modules_tests(target, source, env):
|
|
||||||
import os
|
|
||||||
|
|
||||||
with open(target[0].path, "w", encoding="utf-8", newline="\n") as f:
|
|
||||||
for header in source:
|
|
||||||
f.write('#include "%s"\n' % (os.path.normpath(header.path)))
|
|
|
@ -1,27 +1,64 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
import methods
|
import methods
|
||||||
|
from glob import glob
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
Import("env")
|
Import("env")
|
||||||
|
|
||||||
env.platform_sources = []
|
env.platform_sources = []
|
||||||
|
|
||||||
# Register platform-exclusive APIs
|
|
||||||
reg_apis_inc = '#include "register_platform_apis.h"\n'
|
|
||||||
reg_apis = "void register_platform_apis() {\n"
|
|
||||||
unreg_apis = "void unregister_platform_apis() {\n"
|
|
||||||
for platform in env.platform_apis:
|
|
||||||
platform_dir = env.Dir(platform)
|
|
||||||
env.add_source_files(env.platform_sources, platform + "/api/api.cpp")
|
|
||||||
reg_apis += "\tregister_" + platform + "_api();\n"
|
|
||||||
unreg_apis += "\tunregister_" + platform + "_api();\n"
|
|
||||||
reg_apis_inc += '#include "' + platform + '/api/api.h"\n'
|
|
||||||
reg_apis_inc += "\n"
|
|
||||||
reg_apis += "}\n\n"
|
|
||||||
unreg_apis += "}\n"
|
|
||||||
|
|
||||||
methods.write_file_if_needed("register_platform_apis.gen.cpp", reg_apis_inc + reg_apis + unreg_apis)
|
# Generate export icons
|
||||||
env.add_source_files(env.platform_sources, "register_platform_apis.gen.cpp")
|
def export_icon_builder(target, source, env):
|
||||||
|
src_path = Path(str(source[0]))
|
||||||
|
src_name = src_path.stem
|
||||||
|
platform = src_path.parent.parent.stem
|
||||||
|
with open(str(source[0]), "rb") as file:
|
||||||
|
svg = "".join([f"\\{hex(x)[1:]}" for x in file.read()])
|
||||||
|
with methods.generated_wrapper(target, prefix=platform) as file:
|
||||||
|
file.write(
|
||||||
|
f"""\
|
||||||
|
static const char *_{platform}_{src_name}_svg = "{svg}";
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
for platform in env.platform_exporters:
|
||||||
|
for path in glob(f"{platform}/export/*.svg"):
|
||||||
|
env.CommandNoCache(path.replace(".svg", "_svg.gen.h"), path, env.Run(export_icon_builder))
|
||||||
|
|
||||||
|
|
||||||
|
# Register platform-exclusive APIs
|
||||||
|
def register_platform_apis_builder(target, source, env):
|
||||||
|
platforms = source[0].read()
|
||||||
|
api_inc = "\n".join([f'#include "{p}/api/api.h"' for p in platforms])
|
||||||
|
api_reg = "\n".join([f"\tregister_{p}_api();" for p in platforms])
|
||||||
|
api_unreg = "\n".join([f"\tunregister_{p}_api();" for p in platforms])
|
||||||
|
with methods.generated_wrapper(target) as file:
|
||||||
|
file.write(
|
||||||
|
f"""\
|
||||||
|
#include "register_platform_apis.h"
|
||||||
|
|
||||||
|
{api_inc}
|
||||||
|
|
||||||
|
void register_platform_apis() {{
|
||||||
|
{api_reg}
|
||||||
|
}}
|
||||||
|
|
||||||
|
void unregister_platform_apis() {{
|
||||||
|
{api_unreg}
|
||||||
|
}}
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
register_platform_apis = env.CommandNoCache(
|
||||||
|
"register_platform_apis.gen.cpp", env.Value(env.platform_apis), env.Run(register_platform_apis_builder)
|
||||||
|
)
|
||||||
|
env.add_source_files(env.platform_sources, register_platform_apis)
|
||||||
|
for platform in env.platform_apis:
|
||||||
|
env.add_source_files(env.platform_sources, f"{platform}/api/api.cpp")
|
||||||
|
|
||||||
lib = env.add_library("platform", env.platform_sources)
|
lib = env.add_library("platform", env.platform_sources)
|
||||||
env.Prepend(LIBS=[lib])
|
env.Prepend(LIBS=[lib])
|
||||||
|
|
|
@ -43,33 +43,6 @@ def detect_arch():
|
||||||
return "x86_64"
|
return "x86_64"
|
||||||
|
|
||||||
|
|
||||||
def generate_export_icons(platform_path, platform_name):
|
|
||||||
"""
|
|
||||||
Generate headers for logo and run icon for the export plugin.
|
|
||||||
"""
|
|
||||||
export_path = platform_path + "/export"
|
|
||||||
svg_names = []
|
|
||||||
if os.path.isfile(export_path + "/logo.svg"):
|
|
||||||
svg_names.append("logo")
|
|
||||||
if os.path.isfile(export_path + "/run_icon.svg"):
|
|
||||||
svg_names.append("run_icon")
|
|
||||||
|
|
||||||
for name in svg_names:
|
|
||||||
with open(export_path + "/" + name + ".svg", "rb") as svgf:
|
|
||||||
b = svgf.read(1)
|
|
||||||
svg_str = " /* AUTOGENERATED FILE, DO NOT EDIT */ \n"
|
|
||||||
svg_str += " static const char *_" + platform_name + "_" + name + '_svg = "'
|
|
||||||
while len(b) == 1:
|
|
||||||
svg_str += "\\" + hex(ord(b))[1:]
|
|
||||||
b = svgf.read(1)
|
|
||||||
|
|
||||||
svg_str += '";\n'
|
|
||||||
|
|
||||||
wf = export_path + "/" + name + "_svg.gen.h"
|
|
||||||
|
|
||||||
methods.write_file_if_needed(wf, svg_str)
|
|
||||||
|
|
||||||
|
|
||||||
def get_build_version(short):
|
def get_build_version(short):
|
||||||
import version
|
import version
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue