diff --git a/SConstruct b/SConstruct
index 2a525295397..693137f77f8 100644
--- a/SConstruct
+++ b/SConstruct
@@ -26,48 +26,48 @@ platform_exporters = []
platform_apis = []
for x in sorted(glob.glob("platform/*")):
- if (not os.path.isdir(x) or not os.path.exists(x + "/detect.py")):
+ if not os.path.isdir(x) or not os.path.exists(x + "/detect.py"):
continue
tmppath = "./" + x
sys.path.insert(0, tmppath)
import detect
- if (os.path.exists(x + "/export/export.cpp")):
+ if os.path.exists(x + "/export/export.cpp"):
platform_exporters.append(x[9:])
- if (os.path.exists(x + "/api/api.cpp")):
+ if os.path.exists(x + "/api/api.cpp"):
platform_apis.append(x[9:])
- if (detect.is_active()):
+ if detect.is_active():
active_platforms.append(detect.get_name())
active_platform_ids.append(x)
- if (detect.can_build()):
+ if detect.can_build():
x = x.replace("platform/", "") # rest of world
x = x.replace("platform\\", "") # win32
platform_list += [x]
platform_opts[x] = detect.get_opts()
platform_flags[x] = detect.get_flags()
sys.path.remove(tmppath)
- sys.modules.pop('detect')
+ sys.modules.pop("detect")
module_list = methods.detect_modules()
methods.save_active_platforms(active_platforms, active_platform_ids)
-custom_tools = ['default']
+custom_tools = ["default"]
platform_arg = ARGUMENTS.get("platform", ARGUMENTS.get("p", False))
if os.name == "nt" and (platform_arg == "android" or ARGUMENTS.get("use_mingw", False)):
- custom_tools = ['mingw']
-elif platform_arg == 'javascript':
+ custom_tools = ["mingw"]
+elif platform_arg == "javascript":
# Use generic POSIX build toolchain for Emscripten.
- custom_tools = ['cc', 'c++', 'ar', 'link', 'textfile', 'zip']
+ custom_tools = ["cc", "c++", "ar", "link", "textfile", "zip"]
env_base = Environment(tools=custom_tools)
-if 'TERM' in os.environ:
- env_base['ENV']['TERM'] = os.environ['TERM']
-env_base.AppendENVPath('PATH', os.getenv('PATH'))
-env_base.AppendENVPath('PKG_CONFIG_PATH', os.getenv('PKG_CONFIG_PATH'))
+if "TERM" in os.environ:
+ env_base["ENV"]["TERM"] = os.environ["TERM"]
+env_base.AppendENVPath("PATH", os.getenv("PATH"))
+env_base.AppendENVPath("PKG_CONFIG_PATH", os.getenv("PKG_CONFIG_PATH"))
env_base.disabled_modules = []
env_base.use_ptrcall = False
env_base.module_version_string = ""
@@ -94,7 +94,7 @@ env_base.SConsignFile(".sconsign{0}.dblite".format(pickle.HIGHEST_PROTOCOL))
# Build options
-customs = ['custom.py']
+customs = ["custom.py"]
profile = ARGUMENTS.get("profile", False)
if profile:
@@ -106,62 +106,62 @@ if profile:
opts = Variables(customs, ARGUMENTS)
# Target build options
-opts.Add('arch', "Platform-dependent architecture (arm/arm64/x86/x64/mips/...)", '')
-opts.Add(EnumVariable('bits', "Target platform bits", 'default', ('default', '32', '64')))
-opts.Add('p', "Platform (alias for 'platform')", '')
-opts.Add('platform', "Target platform (%s)" % ('|'.join(platform_list), ), '')
-opts.Add(EnumVariable('target', "Compilation target", 'debug', ('debug', 'release_debug', 'release')))
-opts.Add(EnumVariable('optimize', "Optimization type", 'speed', ('speed', 'size')))
+opts.Add("arch", "Platform-dependent architecture (arm/arm64/x86/x64/mips/...)", "")
+opts.Add(EnumVariable("bits", "Target platform bits", "default", ("default", "32", "64")))
+opts.Add("p", "Platform (alias for 'platform')", "")
+opts.Add("platform", "Target platform (%s)" % ("|".join(platform_list),), "")
+opts.Add(EnumVariable("target", "Compilation target", "debug", ("debug", "release_debug", "release")))
+opts.Add(EnumVariable("optimize", "Optimization type", "speed", ("speed", "size")))
-opts.Add(BoolVariable('tools', "Build the tools (a.k.a. the Godot editor)", True))
-opts.Add(BoolVariable('use_lto', 'Use link-time optimization', False))
-opts.Add(BoolVariable('use_precise_math_checks', 'Math checks use very precise epsilon (useful to debug the engine)', False))
+opts.Add(BoolVariable("tools", "Build the tools (a.k.a. the Godot editor)", True))
+opts.Add(BoolVariable("use_lto", "Use link-time optimization", False))
+opts.Add(BoolVariable("use_precise_math_checks", "Math checks use very precise epsilon (debug option)", False))
# Components
-opts.Add(BoolVariable('deprecated', "Enable deprecated features", True))
-opts.Add(BoolVariable('minizip', "Enable ZIP archive support using minizip", True))
-opts.Add(BoolVariable('xaudio2', "Enable the XAudio2 audio driver", False))
+opts.Add(BoolVariable("deprecated", "Enable deprecated features", True))
+opts.Add(BoolVariable("minizip", "Enable ZIP archive support using minizip", True))
+opts.Add(BoolVariable("xaudio2", "Enable the XAudio2 audio driver", False))
# Advanced options
-opts.Add(BoolVariable('verbose', "Enable verbose output for the compilation", False))
-opts.Add(BoolVariable('progress', "Show a progress indicator during compilation", True))
-opts.Add(EnumVariable('warnings', "Set the level of warnings emitted during compilation", 'all', ('extra', 'all', 'moderate', 'no')))
-opts.Add(BoolVariable('werror', "Treat compiler warnings as errors. Depends on the level of warnings set with 'warnings'", False))
-opts.Add(BoolVariable('dev', "If yes, alias for verbose=yes warnings=extra werror=yes", False))
-opts.Add('extra_suffix', "Custom extra suffix added to the base filename of all generated binary files", '')
-opts.Add(BoolVariable('vsproj', "Generate a Visual Studio solution", False))
-opts.Add(EnumVariable('macports_clang', "Build using Clang from MacPorts", 'no', ('no', '5.0', 'devel')))
-opts.Add(BoolVariable('disable_3d', "Disable 3D nodes for a smaller executable", False))
-opts.Add(BoolVariable('disable_advanced_gui', "Disable advanced GUI nodes and behaviors", False))
-opts.Add(BoolVariable('no_editor_splash', "Don't use the custom splash screen for the editor", False))
-opts.Add('system_certs_path', "Use this path as SSL certificates default for editor (for package maintainers)", '')
+opts.Add(BoolVariable("verbose", "Enable verbose output for the compilation", False))
+opts.Add(BoolVariable("progress", "Show a progress indicator during compilation", True))
+opts.Add(EnumVariable("warnings", "Level of compilation warnings", "all", ("extra", "all", "moderate", "no")))
+opts.Add(BoolVariable("werror", "Treat compiler warnings as errors", False))
+opts.Add(BoolVariable("dev", "If yes, alias for verbose=yes warnings=extra werror=yes", False))
+opts.Add("extra_suffix", "Custom extra suffix added to the base filename of all generated binary files", "")
+opts.Add(BoolVariable("vsproj", "Generate a Visual Studio solution", False))
+opts.Add(EnumVariable("macports_clang", "Build using Clang from MacPorts", "no", ("no", "5.0", "devel")))
+opts.Add(BoolVariable("disable_3d", "Disable 3D nodes for a smaller executable", False))
+opts.Add(BoolVariable("disable_advanced_gui", "Disable advanced GUI nodes and behaviors", False))
+opts.Add(BoolVariable("no_editor_splash", "Don't use the custom splash screen for the editor", False))
+opts.Add("system_certs_path", "Use this path as SSL certificates default for editor (for package maintainers)", "")
# Thirdparty libraries
-#opts.Add(BoolVariable('builtin_assimp', "Use the built-in Assimp library", True))
-opts.Add(BoolVariable('builtin_bullet', "Use the built-in Bullet library", True))
-opts.Add(BoolVariable('builtin_certs', "Bundle default SSL certificates to be used if you don't specify an override in the project settings", True))
-opts.Add(BoolVariable('builtin_enet', "Use the built-in ENet library", True))
-opts.Add(BoolVariable('builtin_freetype', "Use the built-in FreeType library", True))
-opts.Add(BoolVariable('builtin_glslang', "Use the built-in glslang library", True))
-opts.Add(BoolVariable('builtin_libogg', "Use the built-in libogg library", True))
-opts.Add(BoolVariable('builtin_libpng', "Use the built-in libpng library", True))
-opts.Add(BoolVariable('builtin_libtheora', "Use the built-in libtheora library", True))
-opts.Add(BoolVariable('builtin_libvorbis', "Use the built-in libvorbis library", True))
-opts.Add(BoolVariable('builtin_libvpx', "Use the built-in libvpx library", True))
-opts.Add(BoolVariable('builtin_libwebp', "Use the built-in libwebp library", True))
-opts.Add(BoolVariable('builtin_wslay', "Use the built-in wslay library", True))
-opts.Add(BoolVariable('builtin_mbedtls', "Use the built-in mbedTLS library", True))
-opts.Add(BoolVariable('builtin_miniupnpc', "Use the built-in miniupnpc library", True))
-opts.Add(BoolVariable('builtin_opus', "Use the built-in Opus library", True))
-opts.Add(BoolVariable('builtin_pcre2', "Use the built-in PCRE2 library", True))
-opts.Add(BoolVariable('builtin_pcre2_with_jit', "Use JIT compiler for the built-in PCRE2 library", True))
-opts.Add(BoolVariable('builtin_recast', "Use the built-in Recast library", True))
-opts.Add(BoolVariable('builtin_rvo2', "Use the built-in RVO2 library", True))
-opts.Add(BoolVariable('builtin_squish', "Use the built-in squish library", True))
-opts.Add(BoolVariable('builtin_vulkan', "Use the built-in Vulkan loader library and headers", True))
-opts.Add(BoolVariable('builtin_xatlas', "Use the built-in xatlas library", True))
-opts.Add(BoolVariable('builtin_zlib', "Use the built-in zlib library", True))
-opts.Add(BoolVariable('builtin_zstd', "Use the built-in Zstd library", True))
+# opts.Add(BoolVariable('builtin_assimp', "Use the built-in Assimp library", True))
+opts.Add(BoolVariable("builtin_bullet", "Use the built-in Bullet library", True))
+opts.Add(BoolVariable("builtin_certs", "Use the built-in SSL certificates bundles", True))
+opts.Add(BoolVariable("builtin_enet", "Use the built-in ENet library", True))
+opts.Add(BoolVariable("builtin_freetype", "Use the built-in FreeType library", True))
+opts.Add(BoolVariable("builtin_glslang", "Use the built-in glslang library", True))
+opts.Add(BoolVariable("builtin_libogg", "Use the built-in libogg library", True))
+opts.Add(BoolVariable("builtin_libpng", "Use the built-in libpng library", True))
+opts.Add(BoolVariable("builtin_libtheora", "Use the built-in libtheora library", True))
+opts.Add(BoolVariable("builtin_libvorbis", "Use the built-in libvorbis library", True))
+opts.Add(BoolVariable("builtin_libvpx", "Use the built-in libvpx library", True))
+opts.Add(BoolVariable("builtin_libwebp", "Use the built-in libwebp library", True))
+opts.Add(BoolVariable("builtin_wslay", "Use the built-in wslay library", True))
+opts.Add(BoolVariable("builtin_mbedtls", "Use the built-in mbedTLS library", True))
+opts.Add(BoolVariable("builtin_miniupnpc", "Use the built-in miniupnpc library", True))
+opts.Add(BoolVariable("builtin_opus", "Use the built-in Opus library", True))
+opts.Add(BoolVariable("builtin_pcre2", "Use the built-in PCRE2 library", True))
+opts.Add(BoolVariable("builtin_pcre2_with_jit", "Use JIT compiler for the built-in PCRE2 library", True))
+opts.Add(BoolVariable("builtin_recast", "Use the built-in Recast library", True))
+opts.Add(BoolVariable("builtin_rvo2", "Use the built-in RVO2 library", True))
+opts.Add(BoolVariable("builtin_squish", "Use the built-in squish library", True))
+opts.Add(BoolVariable("builtin_vulkan", "Use the built-in Vulkan loader library and headers", True))
+opts.Add(BoolVariable("builtin_xatlas", "Use the built-in xatlas library", True))
+opts.Add(BoolVariable("builtin_zlib", "Use the built-in zlib library", True))
+opts.Add(BoolVariable("builtin_zstd", "Use the built-in Zstd library", True))
# Compilation environment setup
opts.Add("CXX", "C++ compiler")
@@ -184,63 +184,64 @@ for x in module_list:
tmppath = "./modules/" + x
sys.path.insert(0, tmppath)
import config
+
enabled_attr = getattr(config, "is_enabled", None)
- if (callable(enabled_attr) and not config.is_enabled()):
+ if callable(enabled_attr) and not config.is_enabled():
module_enabled = False
sys.path.remove(tmppath)
- sys.modules.pop('config')
- opts.Add(BoolVariable('module_' + x + '_enabled', "Enable module '%s'" % (x, ), module_enabled))
+ sys.modules.pop("config")
+ opts.Add(BoolVariable("module_" + x + "_enabled", "Enable module '%s'" % (x,), module_enabled))
opts.Update(env_base) # update environment
Help(opts.GenerateHelpText(env_base)) # generate help
# add default include paths
-env_base.Prepend(CPPPATH=['#'])
+env_base.Prepend(CPPPATH=["#"])
# configure ENV for platform
env_base.platform_exporters = platform_exporters
env_base.platform_apis = platform_apis
-if (env_base["use_precise_math_checks"]):
- env_base.Append(CPPDEFINES=['PRECISE_MATH_CHECKS'])
+if env_base["use_precise_math_checks"]:
+ env_base.Append(CPPDEFINES=["PRECISE_MATH_CHECKS"])
-if (env_base['target'] == 'debug'):
- env_base.Append(CPPDEFINES=['DEBUG_MEMORY_ALLOC','DISABLE_FORCED_INLINE'])
+if env_base["target"] == "debug":
+ env_base.Append(CPPDEFINES=["DEBUG_MEMORY_ALLOC", "DISABLE_FORCED_INLINE"])
# The two options below speed up incremental builds, but reduce the certainty that all files
# will properly be rebuilt. As such, we only enable them for debug (dev) builds, not release.
# To decide whether to rebuild a file, use the MD5 sum only if the timestamp has changed.
# http://scons.org/doc/production/HTML/scons-user/ch06.html#idm139837621851792
- env_base.Decider('MD5-timestamp')
+ env_base.Decider("MD5-timestamp")
# Use cached implicit dependencies by default. Can be overridden by specifying `--implicit-deps-changed` in the command line.
# http://scons.org/doc/production/HTML/scons-user/ch06s04.html
- env_base.SetOption('implicit_cache', 1)
+ env_base.SetOption("implicit_cache", 1)
-if (env_base['no_editor_splash']):
- env_base.Append(CPPDEFINES=['NO_EDITOR_SPLASH'])
+if env_base["no_editor_splash"]:
+ env_base.Append(CPPDEFINES=["NO_EDITOR_SPLASH"])
-if not env_base['deprecated']:
- env_base.Append(CPPDEFINES=['DISABLE_DEPRECATED'])
+if not env_base["deprecated"]:
+ env_base.Append(CPPDEFINES=["DISABLE_DEPRECATED"])
env_base.platforms = {}
selected_platform = ""
-if env_base['platform'] != "":
- selected_platform = env_base['platform']
-elif env_base['p'] != "":
- selected_platform = env_base['p']
+if env_base["platform"] != "":
+ selected_platform = env_base["platform"]
+elif env_base["p"] != "":
+ selected_platform = env_base["p"]
env_base["platform"] = selected_platform
else:
# Missing `platform` argument, try to detect platform automatically
- if sys.platform.startswith('linux'):
- selected_platform = 'linuxbsd'
- elif sys.platform == 'darwin':
- selected_platform = 'osx'
- elif sys.platform == 'win32':
- selected_platform = 'windows'
+ if sys.platform.startswith("linux"):
+ selected_platform = "linuxbsd"
+ elif sys.platform == "darwin":
+ selected_platform = "osx"
+ elif sys.platform == "win32":
+ selected_platform = "windows"
else:
print("Could not detect platform automatically. Supported platforms:")
for x in platform_list:
@@ -254,8 +255,7 @@ else:
if selected_platform in ["linux", "bsd", "x11"]:
if selected_platform == "x11":
# Deprecated alias kept for compatibility.
- print('Platform "x11" has been renamed to "linuxbsd" in Godot 4.0. '
- 'Building for platform "linuxbsd".')
+ print('Platform "x11" has been renamed to "linuxbsd" in Godot 4.0. Building for platform "linuxbsd".')
# Alias for convenience.
selected_platform = "linuxbsd"
@@ -263,17 +263,18 @@ if selected_platform in platform_list:
tmppath = "./platform/" + selected_platform
sys.path.insert(0, tmppath)
import detect
+
if "create" in dir(detect):
env = detect.create(env_base)
else:
env = env_base.Clone()
- if env['dev']:
- env['verbose'] = True
- env['warnings'] = "extra"
- env['werror'] = True
+ if env["dev"]:
+ env["verbose"] = True
+ env["warnings"] = "extra"
+ env["werror"] = True
- if env['vsproj']:
+ if env["vsproj"]:
env.vs_incs = []
env.vs_srcs = []
@@ -286,7 +287,7 @@ if selected_platform in platform_list:
pieces = fname.split(".")
if len(pieces) > 0:
basename = pieces[0]
- basename = basename.replace('\\\\', '/')
+ basename = basename.replace("\\\\", "/")
if os.path.isfile(basename + ".h"):
env.vs_incs = env.vs_incs + [basename + ".h"]
elif os.path.isfile(basename + ".hpp"):
@@ -295,28 +296,29 @@ if selected_platform in platform_list:
env.vs_srcs = env.vs_srcs + [basename + ".c"]
elif os.path.isfile(basename + ".cpp"):
env.vs_srcs = env.vs_srcs + [basename + ".cpp"]
+
env.AddToVSProject = AddToVSProject
env.extra_suffix = ""
- if env["extra_suffix"] != '':
- env.extra_suffix += '.' + env["extra_suffix"]
+ if env["extra_suffix"] != "":
+ env.extra_suffix += "." + env["extra_suffix"]
# Environment flags
- CCFLAGS = env.get('CCFLAGS', '')
- env['CCFLAGS'] = ''
+ CCFLAGS = env.get("CCFLAGS", "")
+ env["CCFLAGS"] = ""
env.Append(CCFLAGS=str(CCFLAGS).split())
- CFLAGS = env.get('CFLAGS', '')
- env['CFLAGS'] = ''
+ CFLAGS = env.get("CFLAGS", "")
+ env["CFLAGS"] = ""
env.Append(CFLAGS=str(CFLAGS).split())
- CXXFLAGS = env.get('CXXFLAGS', '')
- env['CXXFLAGS'] = ''
+ CXXFLAGS = env.get("CXXFLAGS", "")
+ env["CXXFLAGS"] = ""
env.Append(CXXFLAGS=str(CXXFLAGS).split())
- LINKFLAGS = env.get('LINKFLAGS', '')
- env['LINKFLAGS'] = ''
+ LINKFLAGS = env.get("LINKFLAGS", "")
+ env["LINKFLAGS"] = ""
env.Append(LINKFLAGS=str(LINKFLAGS).split())
# Platform specific flags
@@ -335,12 +337,12 @@ if selected_platform in platform_list:
if not env.msvc:
# Specifying GNU extensions support explicitly, which are supported by
# both GCC and Clang. Both currently default to gnu11 and gnu++14.
- env.Prepend(CFLAGS=['-std=gnu11'])
- env.Prepend(CXXFLAGS=['-std=gnu++17'])
+ env.Prepend(CFLAGS=["-std=gnu11"])
+ env.Prepend(CXXFLAGS=["-std=gnu++17"])
else:
# MSVC doesn't have clear C standard support, /std only covers C++.
# We apply it to CCFLAGS (both C and C++ code) in case it impacts C features.
- env.Prepend(CCFLAGS=['/std:c++17'])
+ env.Prepend(CCFLAGS=["/std:c++17"])
# Enforce our minimal compiler version requirements
cc_version = methods.get_compiler_version(env) or [-1, -1]
@@ -351,16 +353,20 @@ if selected_platform in platform_list:
# GCC 8 before 8.4 has a regression in the support of guaranteed copy elision
# which causes a build failure: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=86521
if cc_version_major == 8 and cc_version_minor < 4:
- print("Detected GCC 8 version < 8.4, which is not supported due to a "
- "regression in its C++17 guaranteed copy elision support. Use a "
- "newer GCC version, or Clang 6 or later by passing \"use_llvm=yes\" "
- "to the SCons command line.")
+ print(
+ "Detected GCC 8 version < 8.4, which is not supported due to a "
+ "regression in its C++17 guaranteed copy elision support. Use a "
+ 'newer GCC version, or Clang 6 or later by passing "use_llvm=yes" '
+ "to the SCons command line."
+ )
sys.exit(255)
elif cc_version_major < 7:
- print("Detected GCC version older than 7, which does not fully support "
- "C++17. Supported versions are GCC 7, 9 and later. Use a newer GCC "
- "version, or Clang 6 or later by passing \"use_llvm=yes\" to the "
- "SCons command line.")
+ print(
+ "Detected GCC version older than 7, which does not fully support "
+ "C++17. Supported versions are GCC 7, 9 and later. Use a newer GCC "
+ 'version, or Clang 6 or later by passing "use_llvm=yes" to the '
+ "SCons command line."
+ )
sys.exit(255)
elif methods.using_clang(env):
# Apple LLVM versions differ from upstream LLVM version \o/, compare
@@ -368,87 +374,98 @@ if selected_platform in platform_list:
if env["platform"] == "osx" or env["platform"] == "iphone":
vanilla = methods.is_vanilla_clang(env)
if vanilla and cc_version_major < 6:
- print("Detected Clang version older than 6, which does not fully support "
- "C++17. Supported versions are Clang 6 and later.")
+ print(
+ "Detected Clang version older than 6, which does not fully support "
+ "C++17. Supported versions are Clang 6 and later."
+ )
sys.exit(255)
elif not vanilla and cc_version_major < 10:
- print("Detected Apple Clang version older than 10, which does not fully "
- "support C++17. Supported versions are Apple Clang 10 and later.")
+ print(
+ "Detected Apple Clang version older than 10, which does not fully "
+ "support C++17. Supported versions are Apple Clang 10 and later."
+ )
sys.exit(255)
elif cc_version_major < 6:
- print("Detected Clang version older than 6, which does not fully support "
- "C++17. Supported versions are Clang 6 and later.")
+ print(
+ "Detected Clang version older than 6, which does not fully support "
+ "C++17. Supported versions are Clang 6 and later."
+ )
sys.exit(255)
# Configure compiler warnings
if env.msvc:
# Truncations, narrowing conversions, signed/unsigned comparisons...
- disable_nonessential_warnings = ['/wd4267', '/wd4244', '/wd4305', '/wd4018', '/wd4800']
- if (env["warnings"] == 'extra'):
- env.Append(CCFLAGS=['/Wall']) # Implies /W4
- elif (env["warnings"] == 'all'):
- env.Append(CCFLAGS=['/W3'] + disable_nonessential_warnings)
- elif (env["warnings"] == 'moderate'):
- env.Append(CCFLAGS=['/W2'] + disable_nonessential_warnings)
- else: # 'no'
- env.Append(CCFLAGS=['/w'])
+ disable_nonessential_warnings = ["/wd4267", "/wd4244", "/wd4305", "/wd4018", "/wd4800"]
+ if env["warnings"] == "extra":
+ env.Append(CCFLAGS=["/Wall"]) # Implies /W4
+ elif env["warnings"] == "all":
+ env.Append(CCFLAGS=["/W3"] + disable_nonessential_warnings)
+ elif env["warnings"] == "moderate":
+ env.Append(CCFLAGS=["/W2"] + disable_nonessential_warnings)
+ else: # 'no'
+ env.Append(CCFLAGS=["/w"])
# Set exception handling model to avoid warnings caused by Windows system headers.
- env.Append(CCFLAGS=['/EHsc'])
- if (env["werror"]):
- env.Append(CCFLAGS=['/WX'])
+ env.Append(CCFLAGS=["/EHsc"])
+ if env["werror"]:
+ env.Append(CCFLAGS=["/WX"])
# Force to use Unicode encoding
- env.Append(MSVC_FLAGS=['/utf8'])
- else: # Rest of the world
+ env.Append(MSVC_FLAGS=["/utf8"])
+ else: # Rest of the world
shadow_local_warning = []
- all_plus_warnings = ['-Wwrite-strings']
+ all_plus_warnings = ["-Wwrite-strings"]
if methods.using_gcc(env):
if cc_version_major >= 7:
- shadow_local_warning = ['-Wshadow-local']
+ shadow_local_warning = ["-Wshadow-local"]
- if (env["warnings"] == 'extra'):
- env.Append(CCFLAGS=['-Wall', '-Wextra', '-Wno-unused-parameter']
- + all_plus_warnings + shadow_local_warning)
- env.Append(CXXFLAGS=['-Wctor-dtor-privacy', '-Wnon-virtual-dtor'])
+ if env["warnings"] == "extra":
+ env.Append(CCFLAGS=["-Wall", "-Wextra", "-Wno-unused-parameter"] + all_plus_warnings + shadow_local_warning)
+ env.Append(CXXFLAGS=["-Wctor-dtor-privacy", "-Wnon-virtual-dtor"])
if methods.using_gcc(env):
- env.Append(CCFLAGS=['-Walloc-zero',
- '-Wduplicated-branches', '-Wduplicated-cond',
- '-Wstringop-overflow=4', '-Wlogical-op'])
+ env.Append(
+ CCFLAGS=[
+ "-Walloc-zero",
+ "-Wduplicated-branches",
+ "-Wduplicated-cond",
+ "-Wstringop-overflow=4",
+ "-Wlogical-op",
+ ]
+ )
# -Wnoexcept was removed temporarily due to GH-36325.
- env.Append(CXXFLAGS=['-Wplacement-new=1'])
+ env.Append(CXXFLAGS=["-Wplacement-new=1"])
if cc_version_major >= 9:
- env.Append(CCFLAGS=['-Wattribute-alias=2'])
+ env.Append(CCFLAGS=["-Wattribute-alias=2"])
if methods.using_clang(env):
- env.Append(CCFLAGS=['-Wimplicit-fallthrough'])
- elif (env["warnings"] == 'all'):
- env.Append(CCFLAGS=['-Wall'] + shadow_local_warning)
- elif (env["warnings"] == 'moderate'):
- env.Append(CCFLAGS=['-Wall', '-Wno-unused'] + shadow_local_warning)
- else: # 'no'
- env.Append(CCFLAGS=['-w'])
- if (env["werror"]):
- env.Append(CCFLAGS=['-Werror'])
+ env.Append(CCFLAGS=["-Wimplicit-fallthrough"])
+ elif env["warnings"] == "all":
+ env.Append(CCFLAGS=["-Wall"] + shadow_local_warning)
+ elif env["warnings"] == "moderate":
+ env.Append(CCFLAGS=["-Wall", "-Wno-unused"] + shadow_local_warning)
+ else: # 'no'
+ env.Append(CCFLAGS=["-w"])
+ if env["werror"]:
+ env.Append(CCFLAGS=["-Werror"])
# FIXME: Temporary workaround after the Vulkan merge, remove once warnings are fixed.
if methods.using_gcc(env):
- env.Append(CXXFLAGS=['-Wno-error=cpp'])
+ env.Append(CXXFLAGS=["-Wno-error=cpp"])
else:
- env.Append(CXXFLAGS=['-Wno-error=#warnings'])
- else: # always enable those errors
- env.Append(CCFLAGS=['-Werror=return-type'])
+ env.Append(CXXFLAGS=["-Wno-error=#warnings"])
+ else: # always enable those errors
+ env.Append(CCFLAGS=["-Werror=return-type"])
- if (hasattr(detect, 'get_program_suffix')):
+ if hasattr(detect, "get_program_suffix"):
suffix = "." + detect.get_program_suffix()
else:
suffix = "." + selected_platform
- if (env["target"] == "release"):
+ if env["target"] == "release":
if env["tools"]:
print("Tools can only be built with targets 'debug' and 'release_debug'.")
sys.exit(255)
suffix += ".opt"
- env.Append(CPPDEFINES=['NDEBUG'])
+ env.Append(CPPDEFINES=["NDEBUG"])
- elif (env["target"] == "release_debug"):
+ elif env["target"] == "release_debug":
if env["tools"]:
suffix += ".opt.tools"
else:
@@ -461,27 +478,28 @@ if selected_platform in platform_list:
if env["arch"] != "":
suffix += "." + env["arch"]
- elif (env["bits"] == "32"):
+ elif env["bits"] == "32":
suffix += ".32"
- elif (env["bits"] == "64"):
+ elif env["bits"] == "64":
suffix += ".64"
suffix += env.extra_suffix
sys.path.remove(tmppath)
- sys.modules.pop('detect')
+ sys.modules.pop("detect")
env.module_list = []
env.module_icons_paths = []
env.doc_class_path = {}
for x in sorted(module_list):
- if not env['module_' + x + '_enabled']:
+ if not env["module_" + x + "_enabled"]:
continue
tmppath = "./modules/" + x
sys.path.insert(0, tmppath)
env.current_module = x
import config
+
if config.can_build(env, selected_platform):
config.configure(env)
env.module_list.append(x)
@@ -503,7 +521,7 @@ if selected_platform in platform_list:
env.module_icons_paths.append("modules/" + x + "/" + "icons")
sys.path.remove(tmppath)
- sys.modules.pop('config')
+ sys.modules.pop("config")
methods.update_version(env.module_version_string)
@@ -522,45 +540,66 @@ if selected_platform in platform_list:
env["LIBSUFFIX"] = suffix + env["LIBSUFFIX"]
env["SHLIBSUFFIX"] = suffix + env["SHLIBSUFFIX"]
- if (env.use_ptrcall):
- env.Append(CPPDEFINES=['PTRCALL_ENABLED'])
- if env['tools']:
- env.Append(CPPDEFINES=['TOOLS_ENABLED'])
- if env['disable_3d']:
- if env['tools']:
- print("Build option 'disable_3d=yes' cannot be used with 'tools=yes' (editor), only with 'tools=no' (export template).")
+ if env.use_ptrcall:
+ env.Append(CPPDEFINES=["PTRCALL_ENABLED"])
+ if env["tools"]:
+ env.Append(CPPDEFINES=["TOOLS_ENABLED"])
+ if env["disable_3d"]:
+ if env["tools"]:
+ print(
+ "Build option 'disable_3d=yes' cannot be used with 'tools=yes' (editor), "
+ "only with 'tools=no' (export template)."
+ )
sys.exit(255)
else:
- env.Append(CPPDEFINES=['_3D_DISABLED'])
- if env['disable_advanced_gui']:
- if env['tools']:
- print("Build option 'disable_advanced_gui=yes' cannot be used with 'tools=yes' (editor), only with 'tools=no' (export template).")
+ env.Append(CPPDEFINES=["_3D_DISABLED"])
+ if env["disable_advanced_gui"]:
+ if env["tools"]:
+ print(
+ "Build option 'disable_advanced_gui=yes' cannot be used with 'tools=yes' (editor), "
+ "only with 'tools=no' (export template)."
+ )
sys.exit(255)
else:
- env.Append(CPPDEFINES=['ADVANCED_GUI_DISABLED'])
- if env['minizip']:
- env.Append(CPPDEFINES=['MINIZIP_ENABLED'])
+ env.Append(CPPDEFINES=["ADVANCED_GUI_DISABLED"])
+ if env["minizip"]:
+ env.Append(CPPDEFINES=["MINIZIP_ENABLED"])
- editor_module_list = ['regex']
+ editor_module_list = ["regex"]
for x in editor_module_list:
- if not env['module_' + x + '_enabled']:
- if env['tools']:
- print("Build option 'module_" + x + "_enabled=no' cannot be used with 'tools=yes' (editor), only with 'tools=no' (export template).")
+ if not env["module_" + x + "_enabled"]:
+ if env["tools"]:
+ print(
+ "Build option 'module_" + x + "_enabled=no' cannot be used with 'tools=yes' (editor), "
+ "only with 'tools=no' (export template)."
+ )
sys.exit(255)
- if not env['verbose']:
+ if not env["verbose"]:
methods.no_verbose(sys, env)
- if (not env["platform"] == "server"):
- env.Append(BUILDERS = { 'GLES2_GLSL' : env.Builder(action=run_in_subprocess(gles_builders.build_gles2_headers), suffix='glsl.gen.h', src_suffix='.glsl')})
- env.Append(BUILDERS = { 'RD_GLSL' : env.Builder(action=run_in_subprocess(gles_builders.build_rd_headers), suffix='glsl.gen.h', src_suffix='.glsl')})
+ if not env["platform"] == "server":
+ env.Append(
+ BUILDERS={
+ "GLES2_GLSL": env.Builder(
+ action=run_in_subprocess(gles_builders.build_gles2_headers), suffix="glsl.gen.h", src_suffix=".glsl"
+ )
+ }
+ )
+ env.Append(
+ BUILDERS={
+ "RD_GLSL": env.Builder(
+ action=run_in_subprocess(gles_builders.build_rd_headers), suffix="glsl.gen.h", src_suffix=".glsl"
+ )
+ }
+ )
scons_cache_path = os.environ.get("SCONS_CACHE")
if scons_cache_path != None:
CacheDir(scons_cache_path)
print("Scons cache enabled... (path: '" + scons_cache_path + "')")
- Export('env')
+ Export("env")
# build subdirs, the build order is dependent on link order.
@@ -577,16 +616,16 @@ if selected_platform in platform_list:
SConscript("platform/" + selected_platform + "/SCsub") # build selected platform
# Microsoft Visual Studio Project Generation
- if env['vsproj']:
- env['CPPPATH'] = [Dir(path) for path in env['CPPPATH']]
+ if env["vsproj"]:
+ env["CPPPATH"] = [Dir(path) for path in env["CPPPATH"]]
methods.generate_vs_project(env, GetOption("num_jobs"))
methods.generate_cpp_hint_file("cpp.hint")
# Check for the existence of headers
conf = Configure(env)
- if ("check_c_headers" in env):
+ if "check_c_headers" in env:
for header in env["check_c_headers"]:
- if (conf.CheckCHeader(header[0])):
+ if conf.CheckCHeader(header[0]):
env.AppendUnique(CPPDEFINES=[header[1]])
elif selected_platform != "":
@@ -608,26 +647,30 @@ elif selected_platform != "":
sys.exit(255)
# The following only makes sense when the env is defined, and assumes it is
-if 'env' in locals():
+if "env" in locals():
screen = sys.stdout
# Progress reporting is not available in non-TTY environments since it
# messes with the output (for example, when writing to a file)
- show_progress = (env['progress'] and sys.stdout.isatty())
+ show_progress = env["progress"] and sys.stdout.isatty()
node_count = 0
node_count_max = 0
node_count_interval = 1
- node_count_fname = str(env.Dir('#')) + '/.scons_node_count'
+ node_count_fname = str(env.Dir("#")) + "/.scons_node_count"
import time, math
class cache_progress:
# The default is 1 GB cache and 12 hours half life
- def __init__(self, path = None, limit = 1073741824, half_life = 43200):
+ def __init__(self, path=None, limit=1073741824, half_life=43200):
self.path = path
self.limit = limit
self.exponent_scale = math.log(2) / half_life
- if env['verbose'] and path != None:
- screen.write('Current cache limit is ' + self.convert_size(limit) + ' (used: ' + self.convert_size(self.get_size(path)) + ')\n')
+ if env["verbose"] and path != None:
+ screen.write(
+ "Current cache limit is {} (used: {})\n".format(
+ self.convert_size(limit), self.convert_size(self.get_size(path))
+ )
+ )
self.delete(self.file_list())
def __call__(self, node, *args, **kw):
@@ -635,22 +678,22 @@ if 'env' in locals():
if show_progress:
# Print the progress percentage
node_count += node_count_interval
- if (node_count_max > 0 and node_count <= node_count_max):
- screen.write('\r[%3d%%] ' % (node_count * 100 / node_count_max))
+ if node_count_max > 0 and node_count <= node_count_max:
+ screen.write("\r[%3d%%] " % (node_count * 100 / node_count_max))
screen.flush()
- elif (node_count_max > 0 and node_count > node_count_max):
- screen.write('\r[100%] ')
+ elif node_count_max > 0 and node_count > node_count_max:
+ screen.write("\r[100%] ")
screen.flush()
else:
- screen.write('\r[Initial build] ')
+ screen.write("\r[Initial build] ")
screen.flush()
def delete(self, files):
if len(files) == 0:
return
- if env['verbose']:
+ if env["verbose"]:
# Utter something
- screen.write('\rPurging %d %s from cache...\n' % (len(files), len(files) > 1 and 'files' or 'file'))
+ screen.write("\rPurging %d %s from cache...\n" % (len(files), len(files) > 1 and "files" or "file"))
[os.remove(f) for f in files]
def file_list(self):
@@ -659,7 +702,7 @@ if 'env' in locals():
return []
# Gather a list of (filename, (size, atime)) within the
# cache directory
- file_stat = [(x, os.stat(x)[6:8]) for x in glob.glob(os.path.join(self.path, '*', '*'))]
+ file_stat = [(x, os.stat(x)[6:8]) for x in glob.glob(os.path.join(self.path, "*", "*"))]
if file_stat == []:
# Nothing to do
return []
@@ -674,7 +717,7 @@ if 'env' in locals():
# Search for the first entry where the storage limit is
# reached
sum, mark = 0, None
- for i,x in enumerate(file_stat):
+ for i, x in enumerate(file_stat):
sum += x[1]
if sum > self.limit:
mark = i
@@ -693,7 +736,7 @@ if 'env' in locals():
s = round(size_bytes / p, 2)
return "%s %s" % (int(s) if i == 0 else s, size_name[i])
- def get_size(self, start_path = '.'):
+ def get_size(self, start_path="."):
total_size = 0
for dirpath, dirnames, filenames in os.walk(start_path):
for f in filenames:
@@ -703,8 +746,8 @@ if 'env' in locals():
def progress_finish(target, source, env):
global node_count, progressor
- with open(node_count_fname, 'w') as f:
- f.write('%d\n' % node_count)
+ with open(node_count_fname, "w") as f:
+ f.write("%d\n" % node_count)
progressor.delete(progressor.file_list())
try:
@@ -718,7 +761,7 @@ if 'env' in locals():
# cache directory to a size not larger than cache_limit.
cache_limit = float(os.getenv("SCONS_CACHE_LIMIT", 1024)) * 1024 * 1024
progressor = cache_progress(cache_directory, cache_limit)
- Progress(progressor, interval = node_count_interval)
+ Progress(progressor, interval=node_count_interval)
- progress_finish_command = Command('progress_finish', [], progress_finish)
+ progress_finish_command = Command("progress_finish", [], progress_finish)
AlwaysBuild(progress_finish_command)
diff --git a/core/SCsub b/core/SCsub
index ca003ce931d..0ab4f11d877 100644
--- a/core/SCsub
+++ b/core/SCsub
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
import core_builders
import make_binders
@@ -11,31 +11,32 @@ env.core_sources = []
# Generate AES256 script encryption key
import os
+
txt = "0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0"
-if ("SCRIPT_AES256_ENCRYPTION_KEY" in os.environ):
+if "SCRIPT_AES256_ENCRYPTION_KEY" in os.environ:
e = os.environ["SCRIPT_AES256_ENCRYPTION_KEY"]
txt = ""
ec_valid = True
- if (len(e) != 64):
+ if len(e) != 64:
ec_valid = False
else:
for i in range(len(e) >> 1):
- if (i > 0):
+ if i > 0:
txt += ","
- txts = "0x" + e[i * 2:i * 2 + 2]
+ txts = "0x" + e[i * 2 : i * 2 + 2]
try:
int(txts, 16)
except:
ec_valid = False
txt += txts
- if (not ec_valid):
+ if not ec_valid:
txt = "0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0"
print("Invalid AES256 encryption key, not 64 bits hex: " + e)
# NOTE: It is safe to generate this file here, since this is still executed serially
with open("script_encryption_key.gen.cpp", "w") as f:
- f.write("#include \"core/project_settings.h\"\nuint8_t script_encryption_key[32]={" + txt + "};\n")
+ f.write('#include "core/project_settings.h"\nuint8_t script_encryption_key[32]={' + txt + "};\n")
# Add required thirdparty code.
@@ -49,7 +50,6 @@ thirdparty_misc_sources = [
# C sources
"fastlz.c",
"smaz.c",
-
# C++ sources
"hq2x.cpp",
"pcg.cpp",
@@ -60,30 +60,30 @@ thirdparty_misc_sources = [thirdparty_misc_dir + file for file in thirdparty_mis
env_thirdparty.add_source_files(env.core_sources, thirdparty_misc_sources)
# Zlib library, can be unbundled
-if env['builtin_zlib']:
- thirdparty_zlib_dir = "#thirdparty/zlib/"
- thirdparty_zlib_sources = [
- "adler32.c",
- "compress.c",
- "crc32.c",
- "deflate.c",
- "infback.c",
- "inffast.c",
- "inflate.c",
- "inftrees.c",
- "trees.c",
- "uncompr.c",
- "zutil.c",
- ]
- thirdparty_zlib_sources = [thirdparty_zlib_dir + file for file in thirdparty_zlib_sources]
+if env["builtin_zlib"]:
+ thirdparty_zlib_dir = "#thirdparty/zlib/"
+ thirdparty_zlib_sources = [
+ "adler32.c",
+ "compress.c",
+ "crc32.c",
+ "deflate.c",
+ "infback.c",
+ "inffast.c",
+ "inflate.c",
+ "inftrees.c",
+ "trees.c",
+ "uncompr.c",
+ "zutil.c",
+ ]
+ thirdparty_zlib_sources = [thirdparty_zlib_dir + file for file in thirdparty_zlib_sources]
- env_thirdparty.Prepend(CPPPATH=[thirdparty_zlib_dir])
- # Needs to be available in main env too
- env.Prepend(CPPPATH=[thirdparty_zlib_dir])
- if (env['target'] == 'debug'):
- env_thirdparty.Append(CPPDEFINES=['ZLIB_DEBUG'])
+ env_thirdparty.Prepend(CPPPATH=[thirdparty_zlib_dir])
+ # Needs to be available in main env too
+ env.Prepend(CPPPATH=[thirdparty_zlib_dir])
+ if env["target"] == "debug":
+ env_thirdparty.Append(CPPDEFINES=["ZLIB_DEBUG"])
- env_thirdparty.add_source_files(env.core_sources, thirdparty_zlib_sources)
+ env_thirdparty.add_source_files(env.core_sources, thirdparty_zlib_sources)
# Minizip library, could be unbundled in theory
# However, our version has some custom modifications, so it won't compile with the system one
@@ -99,7 +99,7 @@ env_thirdparty.add_source_files(env.core_sources, thirdparty_minizip_sources)
# Zstd library, can be unbundled in theory
# though we currently use some private symbols
# https://github.com/godotengine/godot/issues/17374
-if env['builtin_zstd']:
+if env["builtin_zstd"]:
thirdparty_zstd_dir = "#thirdparty/zstd/"
thirdparty_zstd_sources = [
"common/debug.c",
@@ -142,32 +142,45 @@ if env['builtin_zstd']:
env.add_source_files(env.core_sources, "*.cpp")
# Certificates
-env.Depends("#core/io/certs_compressed.gen.h", ["#thirdparty/certs/ca-certificates.crt", env.Value(env['builtin_certs']), env.Value(env['system_certs_path'])])
-env.CommandNoCache("#core/io/certs_compressed.gen.h", "#thirdparty/certs/ca-certificates.crt", run_in_subprocess(core_builders.make_certs_header))
+env.Depends(
+ "#core/io/certs_compressed.gen.h",
+ ["#thirdparty/certs/ca-certificates.crt", env.Value(env["builtin_certs"]), env.Value(env["system_certs_path"])],
+)
+env.CommandNoCache(
+ "#core/io/certs_compressed.gen.h",
+ "#thirdparty/certs/ca-certificates.crt",
+ run_in_subprocess(core_builders.make_certs_header),
+)
# Make binders
-env.CommandNoCache(['method_bind.gen.inc', 'method_bind_ext.gen.inc', 'method_bind_free_func.gen.inc'], 'make_binders.py', run_in_subprocess(make_binders.run))
+env.CommandNoCache(
+ ["method_bind.gen.inc", "method_bind_ext.gen.inc", "method_bind_free_func.gen.inc"],
+ "make_binders.py",
+ run_in_subprocess(make_binders.run),
+)
# Authors
-env.Depends('#core/authors.gen.h', "../AUTHORS.md")
-env.CommandNoCache('#core/authors.gen.h', "../AUTHORS.md", run_in_subprocess(core_builders.make_authors_header))
+env.Depends("#core/authors.gen.h", "../AUTHORS.md")
+env.CommandNoCache("#core/authors.gen.h", "../AUTHORS.md", run_in_subprocess(core_builders.make_authors_header))
# Donors
-env.Depends('#core/donors.gen.h', "../DONORS.md")
-env.CommandNoCache('#core/donors.gen.h', "../DONORS.md", run_in_subprocess(core_builders.make_donors_header))
+env.Depends("#core/donors.gen.h", "../DONORS.md")
+env.CommandNoCache("#core/donors.gen.h", "../DONORS.md", run_in_subprocess(core_builders.make_donors_header))
# License
-env.Depends('#core/license.gen.h', ["../COPYRIGHT.txt", "../LICENSE.txt"])
-env.CommandNoCache('#core/license.gen.h', ["../COPYRIGHT.txt", "../LICENSE.txt"], run_in_subprocess(core_builders.make_license_header))
+env.Depends("#core/license.gen.h", ["../COPYRIGHT.txt", "../LICENSE.txt"])
+env.CommandNoCache(
+ "#core/license.gen.h", ["../COPYRIGHT.txt", "../LICENSE.txt"], run_in_subprocess(core_builders.make_license_header)
+)
# Chain load SCsubs
-SConscript('os/SCsub')
-SConscript('math/SCsub')
-SConscript('crypto/SCsub')
-SConscript('io/SCsub')
-SConscript('debugger/SCsub')
-SConscript('input/SCsub')
-SConscript('bind/SCsub')
+SConscript("os/SCsub")
+SConscript("math/SCsub")
+SConscript("crypto/SCsub")
+SConscript("io/SCsub")
+SConscript("debugger/SCsub")
+SConscript("input/SCsub")
+SConscript("bind/SCsub")
# Build it all as a library
diff --git a/core/bind/SCsub b/core/bind/SCsub
index 1c5f954470d..19a65492252 100644
--- a/core/bind/SCsub
+++ b/core/bind/SCsub
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
env.add_source_files(env.core_sources, "*.cpp")
diff --git a/core/core_builders.py b/core/core_builders.py
index a06b61cb9b2..d03874608e6 100644
--- a/core/core_builders.py
+++ b/core/core_builders.py
@@ -11,15 +11,15 @@ def escape_string(s):
rev_result = []
while c >= 256:
c, low = (c // 256, c % 256)
- rev_result.append('\\%03o' % low)
- rev_result.append('\\%03o' % c)
- return ''.join(reversed(rev_result))
+ rev_result.append("\\%03o" % low)
+ rev_result.append("\\%03o" % c)
+ return "".join(reversed(rev_result))
- result = ''
+ result = ""
if isinstance(s, str):
- s = s.encode('utf-8')
+ s = s.encode("utf-8")
for c in s:
- if not(32 <= c < 127) or c in (ord('\\'), ord('"')):
+ if not (32 <= c < 127) or c in (ord("\\"), ord('"')):
result += charcode_to_c_escapes(c)
else:
result += chr(c)
@@ -34,6 +34,7 @@ def make_certs_header(target, source, env):
buf = f.read()
decomp_size = len(buf)
import zlib
+
buf = zlib.compress(buf)
g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
@@ -41,9 +42,9 @@ def make_certs_header(target, source, env):
g.write("#define CERTS_COMPRESSED_GEN_H\n")
# System certs path. Editor will use them if defined. (for package maintainers)
- path = env['system_certs_path']
- g.write("#define _SYSTEM_CERTS_PATH \"%s\"\n" % str(path))
- if env['builtin_certs']:
+ path = env["system_certs_path"]
+ g.write('#define _SYSTEM_CERTS_PATH "%s"\n' % str(path))
+ if env["builtin_certs"]:
# Defined here and not in env so changing it does not trigger a full rebuild.
g.write("#define BUILTIN_CERTS_ENABLED\n")
g.write("static const int _certs_compressed_size = " + str(len(buf)) + ";\n")
@@ -59,8 +60,18 @@ def make_certs_header(target, source, env):
def make_authors_header(target, source, env):
- sections = ["Project Founders", "Lead Developer", "Project Manager", "Developers"]
- sections_id = ["AUTHORS_FOUNDERS", "AUTHORS_LEAD_DEVELOPERS", "AUTHORS_PROJECT_MANAGERS", "AUTHORS_DEVELOPERS"]
+ sections = [
+ "Project Founders",
+ "Lead Developer",
+ "Project Manager",
+ "Developers",
+ ]
+ sections_id = [
+ "AUTHORS_FOUNDERS",
+ "AUTHORS_LEAD_DEVELOPERS",
+ "AUTHORS_PROJECT_MANAGERS",
+ "AUTHORS_DEVELOPERS",
+ ]
src = source[0]
dst = target[0]
@@ -80,7 +91,7 @@ def make_authors_header(target, source, env):
for line in f:
if reading:
if line.startswith(" "):
- g.write("\t\"" + escape_string(line.strip()) + "\",\n")
+ g.write('\t"' + escape_string(line.strip()) + '",\n')
continue
if line.startswith("## "):
if reading:
@@ -103,10 +114,22 @@ def make_authors_header(target, source, env):
def make_donors_header(target, source, env):
- sections = ["Platinum sponsors", "Gold sponsors", "Mini sponsors",
- "Gold donors", "Silver donors", "Bronze donors"]
- sections_id = ["DONORS_SPONSOR_PLAT", "DONORS_SPONSOR_GOLD", "DONORS_SPONSOR_MINI",
- "DONORS_GOLD", "DONORS_SILVER", "DONORS_BRONZE"]
+ sections = [
+ "Platinum sponsors",
+ "Gold sponsors",
+ "Mini sponsors",
+ "Gold donors",
+ "Silver donors",
+ "Bronze donors",
+ ]
+ sections_id = [
+ "DONORS_SPONSOR_PLAT",
+ "DONORS_SPONSOR_GOLD",
+ "DONORS_SPONSOR_MINI",
+ "DONORS_GOLD",
+ "DONORS_SILVER",
+ "DONORS_BRONZE",
+ ]
src = source[0]
dst = target[0]
@@ -126,7 +149,7 @@ def make_donors_header(target, source, env):
for line in f:
if reading >= 0:
if line.startswith(" "):
- g.write("\t\"" + escape_string(line.strip()) + "\",\n")
+ g.write('\t"' + escape_string(line.strip()) + '",\n')
continue
if line.startswith("## "):
if reading:
@@ -169,8 +192,8 @@ def make_license_header(target, source, env):
return line
def next_tag(self):
- if not ':' in self.current:
- return ('', [])
+ if not ":" in self.current:
+ return ("", [])
tag, line = self.current.split(":", 1)
lines = [line.strip()]
while self.next_line() and self.current.startswith(" "):
@@ -178,6 +201,7 @@ def make_license_header(target, source, env):
return (tag, lines)
from collections import OrderedDict
+
projects = OrderedDict()
license_list = []
@@ -218,26 +242,30 @@ def make_license_header(target, source, env):
with open(src_license, "r", encoding="utf-8") as license_file:
for line in license_file:
escaped_string = escape_string(line.strip())
- f.write("\n\t\t\"" + escaped_string + "\\n\"")
+ f.write('\n\t\t"' + escaped_string + '\\n"')
f.write(";\n\n")
- f.write("struct ComponentCopyrightPart {\n"
- "\tconst char *license;\n"
- "\tconst char *const *files;\n"
- "\tconst char *const *copyright_statements;\n"
- "\tint file_count;\n"
- "\tint copyright_count;\n"
- "};\n\n")
+ f.write(
+ "struct ComponentCopyrightPart {\n"
+ "\tconst char *license;\n"
+ "\tconst char *const *files;\n"
+ "\tconst char *const *copyright_statements;\n"
+ "\tint file_count;\n"
+ "\tint copyright_count;\n"
+ "};\n\n"
+ )
- f.write("struct ComponentCopyright {\n"
- "\tconst char *name;\n"
- "\tconst ComponentCopyrightPart *parts;\n"
- "\tint part_count;\n"
- "};\n\n")
+ f.write(
+ "struct ComponentCopyright {\n"
+ "\tconst char *name;\n"
+ "\tconst ComponentCopyrightPart *parts;\n"
+ "\tint part_count;\n"
+ "};\n\n"
+ )
f.write("const char *const COPYRIGHT_INFO_DATA[] = {\n")
for line in data_list:
- f.write("\t\"" + escape_string(line) + "\",\n")
+ f.write('\t"' + escape_string(line) + '",\n')
f.write("};\n\n")
f.write("const ComponentCopyrightPart COPYRIGHT_PROJECT_PARTS[] = {\n")
@@ -246,11 +274,21 @@ def make_license_header(target, source, env):
for project_name, project in iter(projects.items()):
part_indexes[project_name] = part_index
for part in project:
- f.write("\t{ \"" + escape_string(part["License"][0]) + "\", "
- + "©RIGHT_INFO_DATA[" + str(part["file_index"]) + "], "
- + "©RIGHT_INFO_DATA[" + str(part["copyright_index"]) + "], "
- + str(len(part["Files"])) + ", "
- + str(len(part["Copyright"])) + " },\n")
+ f.write(
+ '\t{ "'
+ + escape_string(part["License"][0])
+ + '", '
+ + "©RIGHT_INFO_DATA["
+ + str(part["file_index"])
+ + "], "
+ + "©RIGHT_INFO_DATA["
+ + str(part["copyright_index"])
+ + "], "
+ + str(len(part["Files"]))
+ + ", "
+ + str(len(part["Copyright"]))
+ + " },\n"
+ )
part_index += 1
f.write("};\n\n")
@@ -258,30 +296,37 @@ def make_license_header(target, source, env):
f.write("const ComponentCopyright COPYRIGHT_INFO[] = {\n")
for project_name, project in iter(projects.items()):
- f.write("\t{ \"" + escape_string(project_name) + "\", "
- + "©RIGHT_PROJECT_PARTS[" + str(part_indexes[project_name]) + "], "
- + str(len(project)) + " },\n")
+ f.write(
+ '\t{ "'
+ + escape_string(project_name)
+ + '", '
+ + "©RIGHT_PROJECT_PARTS["
+ + str(part_indexes[project_name])
+ + "], "
+ + str(len(project))
+ + " },\n"
+ )
f.write("};\n\n")
f.write("const int LICENSE_COUNT = " + str(len(license_list)) + ";\n")
f.write("const char *const LICENSE_NAMES[] = {\n")
for l in license_list:
- f.write("\t\"" + escape_string(l[0]) + "\",\n")
+ f.write('\t"' + escape_string(l[0]) + '",\n')
f.write("};\n\n")
f.write("const char *const LICENSE_BODIES[] = {\n\n")
for l in license_list:
for line in l[1:]:
if line == ".":
- f.write("\t\"\\n\"\n")
+ f.write('\t"\\n"\n')
else:
- f.write("\t\"" + escape_string(line) + "\\n\"\n")
- f.write("\t\"\",\n\n")
+ f.write('\t"' + escape_string(line) + '\\n"\n')
+ f.write('\t"",\n\n')
f.write("};\n\n")
f.write("#endif // LICENSE_GEN_H\n")
-if __name__ == '__main__':
+if __name__ == "__main__":
subprocess_main(globals())
diff --git a/core/crypto/SCsub b/core/crypto/SCsub
index 0a3f05d87ae..da4a9c9381d 100644
--- a/core/crypto/SCsub
+++ b/core/crypto/SCsub
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
env_crypto = env.Clone()
@@ -22,7 +22,9 @@ if not has_module:
env_thirdparty = env_crypto.Clone()
env_thirdparty.disable_warnings()
# Custom config file
- env_thirdparty.Append(CPPDEFINES=[('MBEDTLS_CONFIG_FILE', '\\"thirdparty/mbedtls/include/godot_core_mbedtls_config.h\\"')])
+ env_thirdparty.Append(
+ CPPDEFINES=[("MBEDTLS_CONFIG_FILE", '\\"thirdparty/mbedtls/include/godot_core_mbedtls_config.h\\"')]
+ )
thirdparty_mbedtls_dir = "#thirdparty/mbedtls/library/"
thirdparty_mbedtls_sources = [
"aes.c",
@@ -30,7 +32,7 @@ if not has_module:
"md5.c",
"sha1.c",
"sha256.c",
- "godot_core_mbedtls_platform.c"
+ "godot_core_mbedtls_platform.c",
]
thirdparty_mbedtls_sources = [thirdparty_mbedtls_dir + file for file in thirdparty_mbedtls_sources]
env_thirdparty.add_source_files(env.core_sources, thirdparty_mbedtls_sources)
diff --git a/core/debugger/SCsub b/core/debugger/SCsub
index 1c5f954470d..19a65492252 100644
--- a/core/debugger/SCsub
+++ b/core/debugger/SCsub
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
env.add_source_files(env.core_sources, "*.cpp")
diff --git a/core/input/SCsub b/core/input/SCsub
index f1660932e53..d46e52a347f 100644
--- a/core/input/SCsub
+++ b/core/input/SCsub
@@ -1,20 +1,28 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
from platform_methods import run_in_subprocess
import input_builders
# Order matters here. Higher index controller database files write on top of lower index database files.
-controller_databases = ["#core/input/gamecontrollerdb_204.txt", "#core/input/gamecontrollerdb_205.txt", "#core/input/gamecontrollerdb.txt", "#core/input/godotcontrollerdb.txt"]
+controller_databases = [
+ "#core/input/gamecontrollerdb_204.txt",
+ "#core/input/gamecontrollerdb_205.txt",
+ "#core/input/gamecontrollerdb.txt",
+ "#core/input/godotcontrollerdb.txt",
+]
env.Depends("#core/input/default_controller_mappings.gen.cpp", controller_databases)
-env.CommandNoCache("#core/input/default_controller_mappings.gen.cpp", controller_databases, run_in_subprocess(input_builders.make_default_controller_mappings))
+env.CommandNoCache(
+ "#core/input/default_controller_mappings.gen.cpp",
+ controller_databases,
+ run_in_subprocess(input_builders.make_default_controller_mappings),
+)
env.add_source_files(env.core_sources, "*.cpp")
# Don't warn about duplicate entry here, we need it registered manually for first build,
# even if later builds will pick it up twice due to above *.cpp globbing.
env.add_source_files(env.core_sources, "#core/input/default_controller_mappings.gen.cpp", warn_duplicates=False)
-
diff --git a/core/input/input_builders.py b/core/input/input_builders.py
index ca142c0c80f..6184c5debbd 100644
--- a/core/input/input_builders.py
+++ b/core/input/input_builders.py
@@ -12,8 +12,8 @@ def make_default_controller_mappings(target, source, env):
g = open(dst, "w")
g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
- g.write("#include \"core/typedefs.h\"\n")
- g.write("#include \"core/input/default_controller_mappings.h\"\n")
+ g.write('#include "core/typedefs.h"\n')
+ g.write('#include "core/input/default_controller_mappings.h"\n')
# ensure mappings have a consistent order
platform_mappings = OrderedDict()
@@ -37,11 +37,19 @@ def make_default_controller_mappings(target, source, env):
line_parts = line.split(",")
guid = line_parts[0]
if guid in platform_mappings[current_platform]:
- g.write("// WARNING - DATABASE {} OVERWROTE PRIOR MAPPING: {} {}\n".format(src_path, current_platform, platform_mappings[current_platform][guid]))
+ g.write(
+ "// WARNING - DATABASE {} OVERWROTE PRIOR MAPPING: {} {}\n".format(
+ src_path, current_platform, platform_mappings[current_platform][guid]
+ )
+ )
valid_mapping = True
for input_map in line_parts[2:]:
if "+" in input_map or "-" in input_map or "~" in input_map:
- g.write("// WARNING - DISCARDED UNSUPPORTED MAPPING TYPE FROM DATABASE {}: {} {}\n".format(src_path, current_platform, line))
+ g.write(
+ "// WARNING - DISCARDED UNSUPPORTED MAPPING TYPE FROM DATABASE {}: {} {}\n".format(
+ src_path, current_platform, line
+ )
+ )
valid_mapping = False
break
if valid_mapping:
@@ -62,12 +70,12 @@ def make_default_controller_mappings(target, source, env):
variable = platform_variables[platform]
g.write("{}\n".format(variable))
for mapping in mappings.values():
- g.write("\t\"{}\",\n".format(mapping))
+ g.write('\t"{}",\n'.format(mapping))
g.write("#endif\n")
g.write("\tNULL\n};\n")
g.close()
-if __name__ == '__main__':
+if __name__ == "__main__":
subprocess_main(globals())
diff --git a/core/io/SCsub b/core/io/SCsub
index 1c5f954470d..19a65492252 100644
--- a/core/io/SCsub
+++ b/core/io/SCsub
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
env.add_source_files(env.core_sources, "*.cpp")
diff --git a/core/make_binders.py b/core/make_binders.py
index c42b91fbe55..94bee95bfb8 100644
--- a/core/make_binders.py
+++ b/core/make_binders.py
@@ -280,58 +280,57 @@ MethodBind* create_method_bind($ifret R$ $ifnoret void$ (*p_method)($ifconst con
"""
-
def make_version(template, nargs, argmax, const, ret):
intext = template
from_pos = 0
outtext = ""
- while(True):
+ while True:
to_pos = intext.find("$", from_pos)
- if (to_pos == -1):
+ if to_pos == -1:
outtext += intext[from_pos:]
break
else:
outtext += intext[from_pos:to_pos]
end = intext.find("$", to_pos + 1)
- if (end == -1):
+ if end == -1:
break # ignore
- macro = intext[to_pos + 1:end]
+ macro = intext[to_pos + 1 : end]
cmd = ""
data = ""
- if (macro.find(" ") != -1):
- cmd = macro[0:macro.find(" ")]
- data = macro[macro.find(" ") + 1:]
+ if macro.find(" ") != -1:
+ cmd = macro[0 : macro.find(" ")]
+ data = macro[macro.find(" ") + 1 :]
else:
cmd = macro
- if (cmd == "argc"):
+ if cmd == "argc":
outtext += str(nargs)
- if (cmd == "ifret" and ret):
+ if cmd == "ifret" and ret:
outtext += data
- if (cmd == "ifargs" and nargs):
+ if cmd == "ifargs" and nargs:
outtext += data
- if (cmd == "ifretargs" and nargs and ret):
+ if cmd == "ifretargs" and nargs and ret:
outtext += data
- if (cmd == "ifconst" and const):
+ if cmd == "ifconst" and const:
outtext += data
- elif (cmd == "ifnoconst" and not const):
+ elif cmd == "ifnoconst" and not const:
outtext += data
- elif (cmd == "ifnoret" and not ret):
+ elif cmd == "ifnoret" and not ret:
outtext += data
- elif (cmd == "iftempl" and (nargs > 0 or ret)):
+ elif cmd == "iftempl" and (nargs > 0 or ret):
outtext += data
- elif (cmd == "arg,"):
+ elif cmd == "arg,":
for i in range(1, nargs + 1):
- if (i > 1):
+ if i > 1:
outtext += ", "
outtext += data.replace("@", str(i))
- elif (cmd == "arg"):
+ elif cmd == "arg":
for i in range(1, nargs + 1):
outtext += data.replace("@", str(i))
- elif (cmd == "noarg"):
+ elif cmd == "noarg":
for i in range(nargs + 1, argmax + 1):
outtext += data.replace("@", str(i))
@@ -348,7 +347,9 @@ def run(target, source, env):
text_ext = ""
text_free_func = "#ifndef METHOD_BIND_FREE_FUNC_H\n#define METHOD_BIND_FREE_FUNC_H\n"
text_free_func += "\n//including this header file allows method binding to use free functions\n"
- text_free_func += "//note that the free function must have a pointer to an instance of the class as its first parameter\n"
+ text_free_func += (
+ "//note that the free function must have a pointer to an instance of the class as its first parameter\n"
+ )
for i in range(0, versions + 1):
@@ -361,7 +362,7 @@ def run(target, source, env):
t += make_version(template_typed, i, versions, True, False)
t += make_version(template, i, versions, True, True)
t += make_version(template_typed, i, versions, True, True)
- if (i >= versions_ext):
+ if i >= versions_ext:
text_ext += t
else:
text += t
@@ -383,6 +384,7 @@ def run(target, source, env):
f.write(text_free_func)
-if __name__ == '__main__':
+if __name__ == "__main__":
from platform_methods import subprocess_main
+
subprocess_main(globals())
diff --git a/core/math/SCsub b/core/math/SCsub
index be438fcfbea..c8fdac207ec 100644
--- a/core/math/SCsub
+++ b/core/math/SCsub
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
env_math = env.Clone()
diff --git a/core/os/SCsub b/core/os/SCsub
index 1c5f954470d..19a65492252 100644
--- a/core/os/SCsub
+++ b/core/os/SCsub
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
env.add_source_files(env.core_sources, "*.cpp")
diff --git a/doc/tools/doc_merge.py b/doc/tools/doc_merge.py
index 496d5dcb74b..f6f52f5d66d 100755
--- a/doc/tools/doc_merge.py
+++ b/doc/tools/doc_merge.py
@@ -21,7 +21,7 @@ def write_string(_f, text, newline=True):
for t in range(tab):
_f.write("\t")
_f.write(text)
- if (newline):
+ if newline:
_f.write("\n")
@@ -30,7 +30,7 @@ def escape(ret):
ret = ret.replace("<", ">")
ret = ret.replace(">", "<")
ret = ret.replace("'", "'")
- ret = ret.replace("\"", """)
+ ret = ret.replace('"', """)
return ret
@@ -43,25 +43,26 @@ def dec_tab():
global tab
tab -= 1
+
write_string(f, '')
write_string(f, '')
def get_tag(node, name):
tag = ""
- if (name in node.attrib):
- tag = ' ' + name + '="' + escape(node.attrib[name]) + '" '
+ if name in node.attrib:
+ tag = " " + name + '="' + escape(node.attrib[name]) + '" '
return tag
def find_method_descr(old_class, name):
methods = old_class.find("methods")
- if(methods != None and len(list(methods)) > 0):
+ if methods != None and len(list(methods)) > 0:
for m in list(methods):
- if (m.attrib["name"] == name):
+ if m.attrib["name"] == name:
description = m.find("description")
- if (description != None and description.text.strip() != ""):
+ if description != None and description.text.strip() != "":
return description.text
return None
@@ -70,11 +71,11 @@ def find_method_descr(old_class, name):
def find_signal_descr(old_class, name):
signals = old_class.find("signals")
- if(signals != None and len(list(signals)) > 0):
+ if signals != None and len(list(signals)) > 0:
for m in list(signals):
- if (m.attrib["name"] == name):
+ if m.attrib["name"] == name:
description = m.find("description")
- if (description != None and description.text.strip() != ""):
+ if description != None and description.text.strip() != "":
return description.text
return None
@@ -82,13 +83,13 @@ def find_signal_descr(old_class, name):
def find_constant_descr(old_class, name):
- if (old_class is None):
+ if old_class is None:
return None
constants = old_class.find("constants")
- if(constants != None and len(list(constants)) > 0):
+ if constants != None and len(list(constants)) > 0:
for m in list(constants):
- if (m.attrib["name"] == name):
- if (m.text.strip() != ""):
+ if m.attrib["name"] == name:
+ if m.text.strip() != "":
return m.text
return None
@@ -96,35 +97,35 @@ def find_constant_descr(old_class, name):
def write_class(c):
class_name = c.attrib["name"]
print("Parsing Class: " + class_name)
- if (class_name in old_classes):
+ if class_name in old_classes:
old_class = old_classes[class_name]
else:
old_class = None
category = get_tag(c, "category")
inherits = get_tag(c, "inherits")
- write_string(f, '')
+ write_string(f, '")
inc_tab()
write_string(f, "")
- if (old_class != None):
+ if old_class != None:
old_brief_descr = old_class.find("brief_description")
- if (old_brief_descr != None):
+ if old_brief_descr != None:
write_string(f, escape(old_brief_descr.text.strip()))
write_string(f, "")
write_string(f, "")
- if (old_class != None):
+ if old_class != None:
old_descr = old_class.find("description")
- if (old_descr != None):
+ if old_descr != None:
write_string(f, escape(old_descr.text.strip()))
write_string(f, "")
methods = c.find("methods")
- if(methods != None and len(list(methods)) > 0):
+ if methods != None and len(list(methods)) > 0:
write_string(f, "")
inc_tab()
@@ -132,35 +133,46 @@ def write_class(c):
for m in list(methods):
qualifiers = get_tag(m, "qualifiers")
- write_string(f, '')
+ write_string(f, '")
inc_tab()
for a in list(m):
- if (a.tag == "return"):
+ if a.tag == "return":
typ = get_tag(a, "type")
- write_string(f, '')
- write_string(f, '')
- elif (a.tag == "argument"):
+ write_string(f, "")
+ write_string(f, "")
+ elif a.tag == "argument":
default = get_tag(a, "default")
- write_string(f, '')
- write_string(f, '')
+ write_string(
+ f,
+ '",
+ )
+ write_string(f, "")
- write_string(f, '')
- if (old_class != None):
+ write_string(f, "")
+ if old_class != None:
old_method_descr = find_method_descr(old_class, m.attrib["name"])
- if (old_method_descr):
+ if old_method_descr:
write_string(f, escape(escape(old_method_descr.strip())))
- write_string(f, '')
+ write_string(f, "")
dec_tab()
write_string(f, "")
dec_tab()
write_string(f, "")
signals = c.find("signals")
- if(signals != None and len(list(signals)) > 0):
+ if signals != None and len(list(signals)) > 0:
write_string(f, "")
inc_tab()
@@ -171,24 +183,33 @@ def write_class(c):
inc_tab()
for a in list(m):
- if (a.tag == "argument"):
+ if a.tag == "argument":
- write_string(f, '')
- write_string(f, '')
+ write_string(
+ f,
+ '',
+ )
+ write_string(f, "")
- write_string(f, '')
- if (old_class != None):
+ write_string(f, "")
+ if old_class != None:
old_signal_descr = find_signal_descr(old_class, m.attrib["name"])
- if (old_signal_descr):
+ if old_signal_descr:
write_string(f, escape(old_signal_descr.strip()))
- write_string(f, '')
+ write_string(f, "")
dec_tab()
write_string(f, "")
dec_tab()
write_string(f, "")
constants = c.find("constants")
- if(constants != None and len(list(constants)) > 0):
+ if constants != None and len(list(constants)) > 0:
write_string(f, "")
inc_tab()
@@ -197,7 +218,7 @@ def write_class(c):
write_string(f, '')
old_constant_descr = find_constant_descr(old_class, m.attrib["name"])
- if (old_constant_descr):
+ if old_constant_descr:
write_string(f, escape(old_constant_descr.strip()))
write_string(f, "")
@@ -207,9 +228,10 @@ def write_class(c):
dec_tab()
write_string(f, "")
+
for c in list(old_doc):
old_classes[c.attrib["name"]] = c
for c in list(new_doc):
write_class(c)
-write_string(f, '\n')
+write_string(f, "\n")
diff --git a/doc/tools/doc_status.py b/doc/tools/doc_status.py
index e6e6d5f6066..629b5a032b8 100755
--- a/doc/tools/doc_status.py
+++ b/doc/tools/doc_status.py
@@ -13,75 +13,74 @@ import xml.etree.ElementTree as ET
################################################################################
flags = {
- 'c': platform.platform() != 'Windows', # Disable by default on windows, since we use ANSI escape codes
- 'b': False,
- 'g': False,
- 's': False,
- 'u': False,
- 'h': False,
- 'p': False,
- 'o': True,
- 'i': False,
- 'a': True,
- 'e': False,
+ "c": platform.platform() != "Windows", # Disable by default on windows, since we use ANSI escape codes
+ "b": False,
+ "g": False,
+ "s": False,
+ "u": False,
+ "h": False,
+ "p": False,
+ "o": True,
+ "i": False,
+ "a": True,
+ "e": False,
}
flag_descriptions = {
- 'c': 'Toggle colors when outputting.',
- 'b': 'Toggle showing only not fully described classes.',
- 'g': 'Toggle showing only completed classes.',
- 's': 'Toggle showing comments about the status.',
- 'u': 'Toggle URLs to docs.',
- 'h': 'Show help and exit.',
- 'p': 'Toggle showing percentage as well as counts.',
- 'o': 'Toggle overall column.',
- 'i': 'Toggle collapse of class items columns.',
- 'a': 'Toggle showing all items.',
- 'e': 'Toggle hiding empty items.',
+ "c": "Toggle colors when outputting.",
+ "b": "Toggle showing only not fully described classes.",
+ "g": "Toggle showing only completed classes.",
+ "s": "Toggle showing comments about the status.",
+ "u": "Toggle URLs to docs.",
+ "h": "Show help and exit.",
+ "p": "Toggle showing percentage as well as counts.",
+ "o": "Toggle overall column.",
+ "i": "Toggle collapse of class items columns.",
+ "a": "Toggle showing all items.",
+ "e": "Toggle hiding empty items.",
}
long_flags = {
- 'colors': 'c',
- 'use-colors': 'c',
-
- 'bad': 'b',
- 'only-bad': 'b',
-
- 'good': 'g',
- 'only-good': 'g',
-
- 'comments': 's',
- 'status': 's',
-
- 'urls': 'u',
- 'gen-url': 'u',
-
- 'help': 'h',
-
- 'percent': 'p',
- 'use-percentages': 'p',
-
- 'overall': 'o',
- 'use-overall': 'o',
-
- 'items': 'i',
- 'collapse': 'i',
-
- 'all': 'a',
-
- 'empty': 'e',
+ "colors": "c",
+ "use-colors": "c",
+ "bad": "b",
+ "only-bad": "b",
+ "good": "g",
+ "only-good": "g",
+ "comments": "s",
+ "status": "s",
+ "urls": "u",
+ "gen-url": "u",
+ "help": "h",
+ "percent": "p",
+ "use-percentages": "p",
+ "overall": "o",
+ "use-overall": "o",
+ "items": "i",
+ "collapse": "i",
+ "all": "a",
+ "empty": "e",
}
-table_columns = ['name', 'brief_description', 'description', 'methods', 'constants', 'members', 'signals', 'theme_items']
-table_column_names = ['Name', 'Brief Desc.', 'Desc.', 'Methods', 'Constants', 'Members', 'Signals', 'Theme Items']
+table_columns = [
+ "name",
+ "brief_description",
+ "description",
+ "methods",
+ "constants",
+ "members",
+ "signals",
+ "theme_items",
+]
+table_column_names = ["Name", "Brief Desc.", "Desc.", "Methods", "Constants", "Members", "Signals", "Theme Items"]
colors = {
- 'name': [36], # cyan
- 'part_big_problem': [4, 31], # underline, red
- 'part_problem': [31], # red
- 'part_mostly_good': [33], # yellow
- 'part_good': [32], # green
- 'url': [4, 34], # underline, blue
- 'section': [1, 4], # bold, underline
- 'state_off': [36], # cyan
- 'state_on': [1, 35], # bold, magenta/plum
- 'bold': [1], # bold
+ "name": [36], # cyan
+ "part_big_problem": [4, 31], # underline, red
+ "part_problem": [31], # red
+ "part_mostly_good": [33], # yellow
+ "part_good": [32], # green
+ "url": [4, 34], # underline, blue
+ "section": [1, 4], # bold, underline
+ "state_off": [36], # cyan
+ "state_on": [1, 35], # bold, magenta/plum
+ "bold": [1], # bold
}
overall_progress_description_weigth = 10
@@ -90,6 +89,7 @@ overall_progress_description_weigth = 10
# Utils #
################################################################################
+
def validate_tag(elem, tag):
if elem.tag != tag:
print('Tag mismatch, expected "' + tag + '", got ' + elem.tag)
@@ -97,36 +97,38 @@ def validate_tag(elem, tag):
def color(color, string):
- if flags['c'] and terminal_supports_color():
- color_format = ''
+ if flags["c"] and terminal_supports_color():
+ color_format = ""
for code in colors[color]:
- color_format += '\033[' + str(code) + 'm'
- return color_format + string + '\033[0m'
+ color_format += "\033[" + str(code) + "m"
+ return color_format + string + "\033[0m"
else:
return string
-ansi_escape = re.compile(r'\x1b[^m]*m')
+
+ansi_escape = re.compile(r"\x1b[^m]*m")
def nonescape_len(s):
- return len(ansi_escape.sub('', s))
+ return len(ansi_escape.sub("", s))
+
def terminal_supports_color():
p = sys.platform
- supported_platform = p != 'Pocket PC' and (p != 'win32' or
- 'ANSICON' in os.environ)
+ supported_platform = p != "Pocket PC" and (p != "win32" or "ANSICON" in os.environ)
- is_a_tty = hasattr(sys.stdout, 'isatty') and sys.stdout.isatty()
+ is_a_tty = hasattr(sys.stdout, "isatty") and sys.stdout.isatty()
if not supported_platform or not is_a_tty:
return False
return True
+
################################################################################
# Classes #
################################################################################
-class ClassStatusProgress:
+class ClassStatusProgress:
def __init__(self, described=0, total=0):
self.described = described
self.total = total
@@ -143,42 +145,41 @@ class ClassStatusProgress:
return self.described >= self.total
def to_configured_colored_string(self):
- if flags['p']:
- return self.to_colored_string('{percent}% ({has}/{total})', '{pad_percent}{pad_described}{s}{pad_total}')
+ if flags["p"]:
+ return self.to_colored_string("{percent}% ({has}/{total})", "{pad_percent}{pad_described}{s}{pad_total}")
else:
return self.to_colored_string()
- def to_colored_string(self, format='{has}/{total}', pad_format='{pad_described}{s}{pad_total}'):
+ def to_colored_string(self, format="{has}/{total}", pad_format="{pad_described}{s}{pad_total}"):
ratio = float(self.described) / float(self.total) if self.total != 0 else 1
percent = int(round(100 * ratio))
s = format.format(has=str(self.described), total=str(self.total), percent=str(percent))
if self.described >= self.total:
- s = color('part_good', s)
+ s = color("part_good", s)
elif self.described >= self.total / 4 * 3:
- s = color('part_mostly_good', s)
+ s = color("part_mostly_good", s)
elif self.described > 0:
- s = color('part_problem', s)
+ s = color("part_problem", s)
else:
- s = color('part_big_problem', s)
+ s = color("part_big_problem", s)
pad_size = max(len(str(self.described)), len(str(self.total)))
- pad_described = ''.ljust(pad_size - len(str(self.described)))
- pad_percent = ''.ljust(3 - len(str(percent)))
- pad_total = ''.ljust(pad_size - len(str(self.total)))
+ pad_described = "".ljust(pad_size - len(str(self.described)))
+ pad_percent = "".ljust(3 - len(str(percent)))
+ pad_total = "".ljust(pad_size - len(str(self.total)))
return pad_format.format(pad_described=pad_described, pad_total=pad_total, pad_percent=pad_percent, s=s)
class ClassStatus:
-
- def __init__(self, name=''):
+ def __init__(self, name=""):
self.name = name
self.has_brief_description = True
self.has_description = True
self.progresses = {
- 'methods': ClassStatusProgress(),
- 'constants': ClassStatusProgress(),
- 'members': ClassStatusProgress(),
- 'theme_items': ClassStatusProgress(),
- 'signals': ClassStatusProgress()
+ "methods": ClassStatusProgress(),
+ "constants": ClassStatusProgress(),
+ "members": ClassStatusProgress(),
+ "theme_items": ClassStatusProgress(),
+ "signals": ClassStatusProgress(),
}
def __add__(self, other):
@@ -208,66 +209,70 @@ class ClassStatus:
def make_output(self):
output = {}
- output['name'] = color('name', self.name)
+ output["name"] = color("name", self.name)
- ok_string = color('part_good', 'OK')
- missing_string = color('part_big_problem', 'MISSING')
+ ok_string = color("part_good", "OK")
+ missing_string = color("part_big_problem", "MISSING")
- output['brief_description'] = ok_string if self.has_brief_description else missing_string
- output['description'] = ok_string if self.has_description else missing_string
+ output["brief_description"] = ok_string if self.has_brief_description else missing_string
+ output["description"] = ok_string if self.has_description else missing_string
description_progress = ClassStatusProgress(
(self.has_brief_description + self.has_description) * overall_progress_description_weigth,
- 2 * overall_progress_description_weigth
+ 2 * overall_progress_description_weigth,
)
items_progress = ClassStatusProgress()
- for k in ['methods', 'constants', 'members', 'signals', 'theme_items']:
+ for k in ["methods", "constants", "members", "signals", "theme_items"]:
items_progress += self.progresses[k]
output[k] = self.progresses[k].to_configured_colored_string()
- output['items'] = items_progress.to_configured_colored_string()
+ output["items"] = items_progress.to_configured_colored_string()
- output['overall'] = (description_progress + items_progress).to_colored_string(color('bold', '{percent}%'), '{pad_percent}{s}')
+ output["overall"] = (description_progress + items_progress).to_colored_string(
+ color("bold", "{percent}%"), "{pad_percent}{s}"
+ )
- if self.name.startswith('Total'):
- output['url'] = color('url', 'https://docs.godotengine.org/en/latest/classes/')
- if flags['s']:
- output['comment'] = color('part_good', 'ALL OK')
+ if self.name.startswith("Total"):
+ output["url"] = color("url", "https://docs.godotengine.org/en/latest/classes/")
+ if flags["s"]:
+ output["comment"] = color("part_good", "ALL OK")
else:
- output['url'] = color('url', 'https://docs.godotengine.org/en/latest/classes/class_{name}.html'.format(name=self.name.lower()))
+ output["url"] = color(
+ "url", "https://docs.godotengine.org/en/latest/classes/class_{name}.html".format(name=self.name.lower())
+ )
- if flags['s'] and not flags['g'] and self.is_ok():
- output['comment'] = color('part_good', 'ALL OK')
+ if flags["s"] and not flags["g"] and self.is_ok():
+ output["comment"] = color("part_good", "ALL OK")
return output
@staticmethod
def generate_for_class(c):
status = ClassStatus()
- status.name = c.attrib['name']
+ status.name = c.attrib["name"]
for tag in list(c):
- if tag.tag == 'brief_description':
+ if tag.tag == "brief_description":
status.has_brief_description = len(tag.text.strip()) > 0
- elif tag.tag == 'description':
+ elif tag.tag == "description":
status.has_description = len(tag.text.strip()) > 0
- elif tag.tag in ['methods', 'signals']:
+ elif tag.tag in ["methods", "signals"]:
for sub_tag in list(tag):
- descr = sub_tag.find('description')
+ descr = sub_tag.find("description")
status.progresses[tag.tag].increment(len(descr.text.strip()) > 0)
- elif tag.tag in ['constants', 'members', 'theme_items']:
+ elif tag.tag in ["constants", "members", "theme_items"]:
for sub_tag in list(tag):
if not sub_tag.text is None:
status.progresses[tag.tag].increment(len(sub_tag.text.strip()) > 0)
- elif tag.tag in ['tutorials']:
+ elif tag.tag in ["tutorials"]:
pass # Ignore those tags for now
- elif tag.tag in ['theme_items']:
+ elif tag.tag in ["theme_items"]:
pass # Ignore those tags, since they seem to lack description at all
else:
@@ -286,63 +291,69 @@ merged_file = ""
for arg in sys.argv[1:]:
try:
- if arg.startswith('--'):
+ if arg.startswith("--"):
flags[long_flags[arg[2:]]] = not flags[long_flags[arg[2:]]]
- elif arg.startswith('-'):
+ elif arg.startswith("-"):
for f in arg[1:]:
flags[f] = not flags[f]
elif os.path.isdir(arg):
for f in os.listdir(arg):
- if f.endswith('.xml'):
- input_file_list.append(os.path.join(arg, f));
+ if f.endswith(".xml"):
+ input_file_list.append(os.path.join(arg, f))
else:
input_class_list.append(arg)
except KeyError:
print("Unknown command line flag: " + arg)
sys.exit(1)
-if flags['i']:
- for r in ['methods', 'constants', 'members', 'signals', 'theme_items']:
+if flags["i"]:
+ for r in ["methods", "constants", "members", "signals", "theme_items"]:
index = table_columns.index(r)
del table_column_names[index]
del table_columns[index]
- table_column_names.append('Items')
- table_columns.append('items')
+ table_column_names.append("Items")
+ table_columns.append("items")
-if flags['o'] == (not flags['i']):
- table_column_names.append(color('bold', 'Overall'))
- table_columns.append('overall')
+if flags["o"] == (not flags["i"]):
+ table_column_names.append(color("bold", "Overall"))
+ table_columns.append("overall")
-if flags['u']:
- table_column_names.append('Docs URL')
- table_columns.append('url')
+if flags["u"]:
+ table_column_names.append("Docs URL")
+ table_columns.append("url")
################################################################################
# Help #
################################################################################
-if len(input_file_list) < 1 or flags['h']:
- if not flags['h']:
- print(color('section', 'Invalid usage') + ': Please specify a classes directory')
- print(color('section', 'Usage') + ': doc_status.py [flags] [class names]')
- print('\t< and > signify required parameters, while [ and ] signify optional parameters.')
- print(color('section', 'Available flags') + ':')
+if len(input_file_list) < 1 or flags["h"]:
+ if not flags["h"]:
+ print(color("section", "Invalid usage") + ": Please specify a classes directory")
+ print(color("section", "Usage") + ": doc_status.py [flags] [class names]")
+ print("\t< and > signify required parameters, while [ and ] signify optional parameters.")
+ print(color("section", "Available flags") + ":")
possible_synonym_list = list(long_flags)
possible_synonym_list.sort()
flag_list = list(flags)
flag_list.sort()
for flag in flag_list:
- synonyms = [color('name', '-' + flag)]
+ synonyms = [color("name", "-" + flag)]
for synonym in possible_synonym_list:
if long_flags[synonym] == flag:
- synonyms.append(color('name', '--' + synonym))
+ synonyms.append(color("name", "--" + synonym))
- print(('{synonyms} (Currently ' + color('state_' + ('on' if flags[flag] else 'off'), '{value}') + ')\n\t{description}').format(
- synonyms=', '.join(synonyms),
- value=('on' if flags[flag] else 'off'),
- description=flag_descriptions[flag]
- ))
+ print(
+ (
+ "{synonyms} (Currently "
+ + color("state_" + ("on" if flags[flag] else "off"), "{value}")
+ + ")\n\t{description}"
+ ).format(
+ synonyms=", ".join(synonyms),
+ value=("on" if flags[flag] else "off"),
+ description=flag_descriptions[flag],
+ )
+ )
sys.exit(0)
@@ -357,21 +368,21 @@ for file in input_file_list:
tree = ET.parse(file)
doc = tree.getroot()
- if 'version' not in doc.attrib:
+ if "version" not in doc.attrib:
print('Version missing from "doc"')
sys.exit(255)
- version = doc.attrib['version']
+ version = doc.attrib["version"]
- if doc.attrib['name'] in class_names:
+ if doc.attrib["name"] in class_names:
continue
- class_names.append(doc.attrib['name'])
- classes[doc.attrib['name']] = doc
+ class_names.append(doc.attrib["name"])
+ classes[doc.attrib["name"]] = doc
class_names.sort()
if len(input_class_list) < 1:
- input_class_list = ['*']
+ input_class_list = ["*"]
filtered_classes = set()
for pattern in input_class_list:
@@ -384,23 +395,23 @@ filtered_classes.sort()
################################################################################
table = [table_column_names]
-table_row_chars = '| - '
-table_column_chars = '|'
+table_row_chars = "| - "
+table_column_chars = "|"
-total_status = ClassStatus('Total')
+total_status = ClassStatus("Total")
for cn in filtered_classes:
c = classes[cn]
- validate_tag(c, 'class')
+ validate_tag(c, "class")
status = ClassStatus.generate_for_class(c)
total_status = total_status + status
- if (flags['b'] and status.is_ok()) or (flags['g'] and not status.is_ok()) or (not flags['a']):
+ if (flags["b"] and status.is_ok()) or (flags["g"] and not status.is_ok()) or (not flags["a"]):
continue
- if flags['e'] and status.is_empty():
+ if flags["e"] and status.is_empty():
continue
out = status.make_output()
@@ -409,10 +420,10 @@ for cn in filtered_classes:
if column in out:
row.append(out[column])
else:
- row.append('')
+ row.append("")
- if 'comment' in out and out['comment'] != '':
- row.append(out['comment'])
+ if "comment" in out and out["comment"] != "":
+ row.append(out["comment"])
table.append(row)
@@ -421,22 +432,22 @@ for cn in filtered_classes:
# Print output table #
################################################################################
-if len(table) == 1 and flags['a']:
- print(color('part_big_problem', 'No classes suitable for printing!'))
+if len(table) == 1 and flags["a"]:
+ print(color("part_big_problem", "No classes suitable for printing!"))
sys.exit(0)
-if len(table) > 2 or not flags['a']:
- total_status.name = 'Total = {0}'.format(len(table) - 1)
+if len(table) > 2 or not flags["a"]:
+ total_status.name = "Total = {0}".format(len(table) - 1)
out = total_status.make_output()
row = []
for column in table_columns:
if column in out:
row.append(out[column])
else:
- row.append('')
+ row.append("")
table.append(row)
-if flags['a']:
+if flags["a"]:
# Duplicate the headers at the bottom of the table so they can be viewed
# without having to scroll back to the top.
table.append(table_column_names)
@@ -451,7 +462,9 @@ for row in table:
divider_string = table_row_chars[0]
for cell_i in range(len(table[0])):
- divider_string += table_row_chars[1] + table_row_chars[2] * (table_column_sizes[cell_i]) + table_row_chars[1] + table_row_chars[0]
+ divider_string += (
+ table_row_chars[1] + table_row_chars[2] * (table_column_sizes[cell_i]) + table_row_chars[1] + table_row_chars[0]
+ )
print(divider_string)
for row_i, row in enumerate(table):
@@ -461,7 +474,11 @@ for row_i, row in enumerate(table):
if cell_i == 0:
row_string += table_row_chars[3] + cell + table_row_chars[3] * (padding_needed - 1)
else:
- row_string += table_row_chars[3] * int(math.floor(float(padding_needed) / 2)) + cell + table_row_chars[3] * int(math.ceil(float(padding_needed) / 2))
+ row_string += (
+ table_row_chars[3] * int(math.floor(float(padding_needed) / 2))
+ + cell
+ + table_row_chars[3] * int(math.ceil(float(padding_needed) / 2))
+ )
row_string += table_column_chars
print(row_string)
@@ -474,5 +491,5 @@ for row_i, row in enumerate(table):
print(divider_string)
-if total_status.is_ok() and not flags['g']:
- print('All listed classes are ' + color('part_good', 'OK') + '!')
+if total_status.is_ok() and not flags["g"]:
+ print("All listed classes are " + color("part_good", "OK") + "!")
diff --git a/doc/tools/makerst.py b/doc/tools/makerst.py
index 9012de03b34..417fe592785 100755
--- a/doc/tools/makerst.py
+++ b/doc/tools/makerst.py
@@ -7,10 +7,12 @@ import xml.etree.ElementTree as ET
from collections import OrderedDict
# Uncomment to do type checks. I have it commented out so it works below Python 3.5
-#from typing import List, Dict, TextIO, Tuple, Iterable, Optional, DefaultDict, Any, Union
+# from typing import List, Dict, TextIO, Tuple, Iterable, Optional, DefaultDict, Any, Union
# http(s)://docs.godotengine.org///path/to/page.html(#fragment-tag)
-GODOT_DOCS_PATTERN = re.compile(r'^http(?:s)?://docs\.godotengine\.org/(?:[a-zA-Z0-9.\-_]*)/(?:[a-zA-Z0-9.\-_]*)/(.*)\.html(#.*)?$')
+GODOT_DOCS_PATTERN = re.compile(
+ r"^http(?:s)?://docs\.godotengine\.org/(?:[a-zA-Z0-9.\-_]*)/(?:[a-zA-Z0-9.\-_]*)/(.*)\.html(#.*)?$"
+)
def print_error(error, state): # type: (str, State) -> None
@@ -37,7 +39,9 @@ class TypeName:
class PropertyDef:
- def __init__(self, name, type_name, setter, getter, text, default_value, overridden): # type: (str, TypeName, Optional[str], Optional[str], Optional[str], Optional[str], Optional[bool]) -> None
+ def __init__(
+ self, name, type_name, setter, getter, text, default_value, overridden
+ ): # type: (str, TypeName, Optional[str], Optional[str], Optional[str], Optional[str], Optional[bool]) -> None
self.name = name
self.type_name = type_name
self.setter = setter
@@ -46,6 +50,7 @@ class PropertyDef:
self.default_value = default_value
self.overridden = overridden
+
class ParameterDef:
def __init__(self, name, type_name, default_value): # type: (str, TypeName, Optional[str]) -> None
self.name = name
@@ -61,7 +66,9 @@ class SignalDef:
class MethodDef:
- def __init__(self, name, return_type, parameters, description, qualifiers): # type: (str, TypeName, List[ParameterDef], Optional[str], Optional[str]) -> None
+ def __init__(
+ self, name, return_type, parameters, description, qualifiers
+ ): # type: (str, TypeName, List[ParameterDef], Optional[str], Optional[str]) -> None
self.name = name
self.return_type = return_type
self.parameters = parameters
@@ -144,10 +151,12 @@ class State:
getter = property.get("getter") or None
default_value = property.get("default") or None
if default_value is not None:
- default_value = '``{}``'.format(default_value)
+ default_value = "``{}``".format(default_value)
overridden = property.get("override") or False
- property_def = PropertyDef(property_name, type_name, setter, getter, property.text, default_value, overridden)
+ property_def = PropertyDef(
+ property_name, type_name, setter, getter, property.text, default_value, overridden
+ )
class_def.properties[property_name] = property_def
methods = class_root.find("methods")
@@ -246,8 +255,6 @@ class State:
if link.text is not None:
class_def.tutorials.append(link.text)
-
-
def sort_classes(self): # type: () -> None
self.classes = OrderedDict(sorted(self.classes.items(), key=lambda t: t[0]))
@@ -273,7 +280,11 @@ def main(): # type: () -> None
parser.add_argument("path", nargs="+", help="A path to an XML file or a directory containing XML files to parse.")
group = parser.add_mutually_exclusive_group()
group.add_argument("--output", "-o", default=".", help="The directory to save output .rst files in.")
- group.add_argument("--dry-run", action="store_true", help="If passed, no output will be generated and XML files are only checked for errors.")
+ group.add_argument(
+ "--dry-run",
+ action="store_true",
+ help="If passed, no output will be generated and XML files are only checked for errors.",
+ )
args = parser.parse_args()
file_list = [] # type: List[str]
@@ -283,15 +294,15 @@ def main(): # type: () -> None
if path.endswith(os.sep):
path = path[:-1]
- if os.path.basename(path) == 'modules':
+ if os.path.basename(path) == "modules":
for subdir, dirs, _ in os.walk(path):
- if 'doc_classes' in dirs:
- doc_dir = os.path.join(subdir, 'doc_classes')
- class_file_names = (f for f in os.listdir(doc_dir) if f.endswith('.xml'))
+ if "doc_classes" in dirs:
+ doc_dir = os.path.join(subdir, "doc_classes")
+ class_file_names = (f for f in os.listdir(doc_dir) if f.endswith(".xml"))
file_list += (os.path.join(doc_dir, f) for f in class_file_names)
elif os.path.isdir(path):
- file_list += (os.path.join(path, f) for f in os.listdir(path) if f.endswith('.xml'))
+ file_list += (os.path.join(path, f) for f in os.listdir(path) if f.endswith(".xml"))
elif os.path.isfile(path):
if not path.endswith(".xml"):
@@ -311,7 +322,7 @@ def main(): # type: () -> None
continue
doc = tree.getroot()
- if 'version' not in doc.attrib:
+ if "version" not in doc.attrib:
print_error("Version missing from 'doc', file: {}".format(cur_file), state)
continue
@@ -337,13 +348,14 @@ def main(): # type: () -> None
if state.errored:
exit(1)
+
def make_rst_class(class_def, state, dry_run, output_dir): # type: (ClassDef, State, bool, str) -> None
class_name = class_def.name
if dry_run:
f = open(os.devnull, "w")
else:
- f = open(os.path.join(output_dir, "class_" + class_name.lower() + '.rst'), 'w', encoding='utf-8')
+ f = open(os.path.join(output_dir, "class_" + class_name.lower() + ".rst"), "w", encoding="utf-8")
# Warn contributors not to edit this file directly
f.write(":github_url: hide\n\n")
@@ -352,13 +364,13 @@ def make_rst_class(class_def, state, dry_run, output_dir): # type: (ClassDef, S
f.write(".. The source is found in doc/classes or modules//doc_classes.\n\n")
f.write(".. _class_" + class_name + ":\n\n")
- f.write(make_heading(class_name, '='))
+ f.write(make_heading(class_name, "="))
# Inheritance tree
# Ascendants
if class_def.inherits:
inh = class_def.inherits.strip()
- f.write('**Inherits:** ')
+ f.write("**Inherits:** ")
first = True
while inh in state.classes:
if not first:
@@ -381,7 +393,7 @@ def make_rst_class(class_def, state, dry_run, output_dir): # type: (ClassDef, S
inherited.append(c.name)
if len(inherited):
- f.write('**Inherited By:** ')
+ f.write("**Inherited By:** ")
for i, child in enumerate(inherited):
if i > 0:
f.write(", ")
@@ -393,20 +405,20 @@ def make_rst_class(class_def, state, dry_run, output_dir): # type: (ClassDef, S
f.write(rstize_text(class_def.brief_description.strip(), state) + "\n\n")
# Class description
- if class_def.description is not None and class_def.description.strip() != '':
- f.write(make_heading('Description', '-'))
+ if class_def.description is not None and class_def.description.strip() != "":
+ f.write(make_heading("Description", "-"))
f.write(rstize_text(class_def.description.strip(), state) + "\n\n")
# Online tutorials
if len(class_def.tutorials) > 0:
- f.write(make_heading('Tutorials', '-'))
+ f.write(make_heading("Tutorials", "-"))
for t in class_def.tutorials:
link = t.strip()
f.write("- " + make_url(link) + "\n\n")
# Properties overview
if len(class_def.properties) > 0:
- f.write(make_heading('Properties', '-'))
+ f.write(make_heading("Properties", "-"))
ml = [] # type: List[Tuple[str, str, str]]
for property_def in class_def.properties.values():
type_rst = property_def.type_name.to_rst(state)
@@ -420,7 +432,7 @@ def make_rst_class(class_def, state, dry_run, output_dir): # type: (ClassDef, S
# Methods overview
if len(class_def.methods) > 0:
- f.write(make_heading('Methods', '-'))
+ f.write(make_heading("Methods", "-"))
ml = []
for method_list in class_def.methods.values():
for m in method_list:
@@ -429,7 +441,7 @@ def make_rst_class(class_def, state, dry_run, output_dir): # type: (ClassDef, S
# Theme properties
if class_def.theme_items is not None and len(class_def.theme_items) > 0:
- f.write(make_heading('Theme Properties', '-'))
+ f.write(make_heading("Theme Properties", "-"))
pl = []
for theme_item_list in class_def.theme_items.values():
for theme_item in theme_item_list:
@@ -438,30 +450,30 @@ def make_rst_class(class_def, state, dry_run, output_dir): # type: (ClassDef, S
# Signals
if len(class_def.signals) > 0:
- f.write(make_heading('Signals', '-'))
+ f.write(make_heading("Signals", "-"))
index = 0
for signal in class_def.signals.values():
if index != 0:
- f.write('----\n\n')
+ f.write("----\n\n")
f.write(".. _class_{}_signal_{}:\n\n".format(class_name, signal.name))
_, signature = make_method_signature(class_def, signal, False, state)
f.write("- {}\n\n".format(signature))
- if signal.description is not None and signal.description.strip() != '':
- f.write(rstize_text(signal.description.strip(), state) + '\n\n')
+ if signal.description is not None and signal.description.strip() != "":
+ f.write(rstize_text(signal.description.strip(), state) + "\n\n")
index += 1
# Enums
if len(class_def.enums) > 0:
- f.write(make_heading('Enumerations', '-'))
+ f.write(make_heading("Enumerations", "-"))
index = 0
for e in class_def.enums.values():
if index != 0:
- f.write('----\n\n')
+ f.write("----\n\n")
f.write(".. _enum_{}_{}:\n\n".format(class_name, e.name))
# Sphinx seems to divide the bullet list into individual tags if we weave the labels into it.
@@ -474,16 +486,16 @@ def make_rst_class(class_def, state, dry_run, output_dir): # type: (ClassDef, S
f.write("enum **{}**:\n\n".format(e.name))
for value in e.values.values():
f.write("- **{}** = **{}**".format(value.name, value.value))
- if value.text is not None and value.text.strip() != '':
- f.write(' --- ' + rstize_text(value.text.strip(), state))
+ if value.text is not None and value.text.strip() != "":
+ f.write(" --- " + rstize_text(value.text.strip(), state))
- f.write('\n\n')
+ f.write("\n\n")
index += 1
# Constants
if len(class_def.constants) > 0:
- f.write(make_heading('Constants', '-'))
+ f.write(make_heading("Constants", "-"))
# Sphinx seems to divide the bullet list into individual tags if we weave the labels into it.
# As such I'll put them all above the list. Won't be perfect but better than making the list visually broken.
for constant in class_def.constants.values():
@@ -491,14 +503,14 @@ def make_rst_class(class_def, state, dry_run, output_dir): # type: (ClassDef, S
for constant in class_def.constants.values():
f.write("- **{}** = **{}**".format(constant.name, constant.value))
- if constant.text is not None and constant.text.strip() != '':
- f.write(' --- ' + rstize_text(constant.text.strip(), state))
+ if constant.text is not None and constant.text.strip() != "":
+ f.write(" --- " + rstize_text(constant.text.strip(), state))
- f.write('\n\n')
+ f.write("\n\n")
# Property descriptions
if any(not p.overridden for p in class_def.properties.values()) > 0:
- f.write(make_heading('Property Descriptions', '-'))
+ f.write(make_heading("Property Descriptions", "-"))
index = 0
for property_def in class_def.properties.values():
@@ -506,36 +518,36 @@ def make_rst_class(class_def, state, dry_run, output_dir): # type: (ClassDef, S
continue
if index != 0:
- f.write('----\n\n')
+ f.write("----\n\n")
f.write(".. _class_{}_property_{}:\n\n".format(class_name, property_def.name))
- f.write('- {} **{}**\n\n'.format(property_def.type_name.to_rst(state), property_def.name))
+ f.write("- {} **{}**\n\n".format(property_def.type_name.to_rst(state), property_def.name))
info = []
if property_def.default_value is not None:
info.append(("*Default*", property_def.default_value))
if property_def.setter is not None and not property_def.setter.startswith("_"):
- info.append(("*Setter*", property_def.setter + '(value)'))
+ info.append(("*Setter*", property_def.setter + "(value)"))
if property_def.getter is not None and not property_def.getter.startswith("_"):
- info.append(('*Getter*', property_def.getter + '()'))
+ info.append(("*Getter*", property_def.getter + "()"))
if len(info) > 0:
format_table(f, info)
- if property_def.text is not None and property_def.text.strip() != '':
- f.write(rstize_text(property_def.text.strip(), state) + '\n\n')
+ if property_def.text is not None and property_def.text.strip() != "":
+ f.write(rstize_text(property_def.text.strip(), state) + "\n\n")
index += 1
# Method descriptions
if len(class_def.methods) > 0:
- f.write(make_heading('Method Descriptions', '-'))
+ f.write(make_heading("Method Descriptions", "-"))
index = 0
for method_list in class_def.methods.values():
for i, m in enumerate(method_list):
if index != 0:
- f.write('----\n\n')
+ f.write("----\n\n")
if i == 0:
f.write(".. _class_{}_method_{}:\n\n".format(class_name, m.name))
@@ -543,24 +555,24 @@ def make_rst_class(class_def, state, dry_run, output_dir): # type: (ClassDef, S
ret_type, signature = make_method_signature(class_def, m, False, state)
f.write("- {} {}\n\n".format(ret_type, signature))
- if m.description is not None and m.description.strip() != '':
- f.write(rstize_text(m.description.strip(), state) + '\n\n')
+ if m.description is not None and m.description.strip() != "":
+ f.write(rstize_text(m.description.strip(), state) + "\n\n")
index += 1
def make_class_list(class_list, columns): # type: (List[str], int) -> None
# This function is no longer used.
- f = open('class_list.rst', 'w', encoding='utf-8')
+ f = open("class_list.rst", "w", encoding="utf-8")
col_max = len(class_list) // columns + 1
- print(('col max is ', col_max))
+ print(("col max is ", col_max))
fit_columns = [] # type: List[List[str]]
for _ in range(0, columns):
fit_columns.append([])
indexers = [] # type List[str]
- last_initial = ''
+ last_initial = ""
for idx, name in enumerate(class_list):
col = idx // col_max
@@ -590,7 +602,7 @@ def make_class_list(class_list, columns): # type: (List[str], int) -> None
f.write("\n")
for r in range(0, row_max):
- s = '+ '
+ s = "+ "
for c in range(0, columns):
if r >= len(fit_columns[c]):
continue
@@ -598,13 +610,13 @@ def make_class_list(class_list, columns): # type: (List[str], int) -> None
classname = fit_columns[c][r]
initial = classname[0]
if classname in indexers:
- s += '**' + initial + '** | '
+ s += "**" + initial + "** | "
else:
- s += ' | '
+ s += " | "
- s += '[' + classname + '](class_' + classname.lower() + ') | '
+ s += "[" + classname + "](class_" + classname.lower() + ") | "
- s += '\n'
+ s += "\n"
f.write(s)
for n in range(0, columns):
@@ -618,29 +630,29 @@ def escape_rst(text, until_pos=-1): # type: (str) -> str
# Escape \ character, otherwise it ends up as an escape character in rst
pos = 0
while True:
- pos = text.find('\\', pos, until_pos)
+ pos = text.find("\\", pos, until_pos)
if pos == -1:
break
- text = text[:pos] + "\\\\" + text[pos + 1:]
+ text = text[:pos] + "\\\\" + text[pos + 1 :]
pos += 2
# Escape * character to avoid interpreting it as emphasis
pos = 0
while True:
- pos = text.find('*', pos, until_pos)
+ pos = text.find("*", pos, until_pos)
if pos == -1:
break
- text = text[:pos] + "\*" + text[pos + 1:]
+ text = text[:pos] + "\*" + text[pos + 1 :]
pos += 2
# Escape _ character at the end of a word to avoid interpreting it as an inline hyperlink
pos = 0
while True:
- pos = text.find('_', pos, until_pos)
+ pos = text.find("_", pos, until_pos)
if pos == -1:
break
if not text[pos + 1].isalnum(): # don't escape within a snake_case word
- text = text[:pos] + "\_" + text[pos + 1:]
+ text = text[:pos] + "\_" + text[pos + 1 :]
pos += 2
else:
pos += 1
@@ -652,16 +664,16 @@ def rstize_text(text, state): # type: (str, State) -> str
# Linebreak + tabs in the XML should become two line breaks unless in a "codeblock"
pos = 0
while True:
- pos = text.find('\n', pos)
+ pos = text.find("\n", pos)
if pos == -1:
break
pre_text = text[:pos]
indent_level = 0
- while text[pos + 1] == '\t':
+ while text[pos + 1] == "\t":
pos += 1
indent_level += 1
- post_text = text[pos + 1:]
+ post_text = text[pos + 1 :]
# Handle codeblocks
if post_text.startswith("[codeblock]"):
@@ -670,28 +682,33 @@ def rstize_text(text, state): # type: (str, State) -> str
print_error("[codeblock] without a closing tag, file: {}".format(state.current_class), state)
return ""
- code_text = post_text[len("[codeblock]"):end_pos]
+ code_text = post_text[len("[codeblock]") : end_pos]
post_text = post_text[end_pos:]
# Remove extraneous tabs
code_pos = 0
while True:
- code_pos = code_text.find('\n', code_pos)
+ code_pos = code_text.find("\n", code_pos)
if code_pos == -1:
break
to_skip = 0
- while code_pos + to_skip + 1 < len(code_text) and code_text[code_pos + to_skip + 1] == '\t':
+ while code_pos + to_skip + 1 < len(code_text) and code_text[code_pos + to_skip + 1] == "\t":
to_skip += 1
if to_skip > indent_level:
- print_error("Four spaces should be used for indentation within [codeblock], file: {}".format(state.current_class), state)
+ print_error(
+ "Four spaces should be used for indentation within [codeblock], file: {}".format(
+ state.current_class
+ ),
+ state,
+ )
- if len(code_text[code_pos + to_skip + 1:]) == 0:
+ if len(code_text[code_pos + to_skip + 1 :]) == 0:
code_text = code_text[:code_pos] + "\n"
code_pos += 1
else:
- code_text = code_text[:code_pos] + "\n " + code_text[code_pos + to_skip + 1:]
+ code_text = code_text[:code_pos] + "\n " + code_text[code_pos + to_skip + 1 :]
code_pos += 5 - to_skip
text = pre_text + "\n[codeblock]" + code_text + post_text
@@ -702,7 +719,7 @@ def rstize_text(text, state): # type: (str, State) -> str
text = pre_text + "\n\n" + post_text
pos += 2
- next_brac_pos = text.find('[')
+ next_brac_pos = text.find("[")
text = escape_rst(text, next_brac_pos)
# Handle [tags]
@@ -714,54 +731,59 @@ def rstize_text(text, state): # type: (str, State) -> str
tag_depth = 0
previous_pos = 0
while True:
- pos = text.find('[', pos)
+ pos = text.find("[", pos)
if inside_url and (pos > previous_pos):
url_has_name = True
if pos == -1:
break
- endq_pos = text.find(']', pos + 1)
+ endq_pos = text.find("]", pos + 1)
if endq_pos == -1:
break
pre_text = text[:pos]
- post_text = text[endq_pos + 1:]
- tag_text = text[pos + 1:endq_pos]
+ post_text = text[endq_pos + 1 :]
+ tag_text = text[pos + 1 : endq_pos]
escape_post = False
if tag_text in state.classes:
if tag_text == state.current_class:
# We don't want references to the same class
- tag_text = '``{}``'.format(tag_text)
+ tag_text = "``{}``".format(tag_text)
else:
tag_text = make_type(tag_text, state)
escape_post = True
else: # command
cmd = tag_text
- space_pos = tag_text.find(' ')
- if cmd == '/codeblock':
- tag_text = ''
+ space_pos = tag_text.find(" ")
+ if cmd == "/codeblock":
+ tag_text = ""
tag_depth -= 1
inside_code = False
# Strip newline if the tag was alone on one
- if pre_text[-1] == '\n':
+ if pre_text[-1] == "\n":
pre_text = pre_text[:-1]
- elif cmd == '/code':
- tag_text = '``'
+ elif cmd == "/code":
+ tag_text = "``"
tag_depth -= 1
inside_code = False
escape_post = True
elif inside_code:
- tag_text = '[' + tag_text + ']'
- elif cmd.find('html') == 0:
- param = tag_text[space_pos + 1:]
+ tag_text = "[" + tag_text + "]"
+ elif cmd.find("html") == 0:
+ param = tag_text[space_pos + 1 :]
tag_text = param
- elif cmd.startswith('method') or cmd.startswith('member') or cmd.startswith('signal') or cmd.startswith('constant'):
- param = tag_text[space_pos + 1:]
+ elif (
+ cmd.startswith("method")
+ or cmd.startswith("member")
+ or cmd.startswith("signal")
+ or cmd.startswith("constant")
+ ):
+ param = tag_text[space_pos + 1 :]
- if param.find('.') != -1:
- ss = param.split('.')
+ if param.find(".") != -1:
+ ss = param.split(".")
if len(ss) > 2:
print_error("Bad reference: '{}', file: {}".format(param, state.current_class), state)
class_param, method_param = ss
@@ -794,7 +816,7 @@ def rstize_text(text, state): # type: (str, State) -> str
# Search in the current class
search_class_defs = [class_def]
- if param.find('.') == -1:
+ if param.find(".") == -1:
# Also search in @GlobalScope as a last resort if no class was specified
search_class_defs.append(state.classes["@GlobalScope"])
@@ -815,66 +837,71 @@ def rstize_text(text, state): # type: (str, State) -> str
ref_type = "_constant"
else:
- print_error("Unresolved type reference '{}' in method reference '{}', file: {}".format(class_param, param, state.current_class), state)
+ print_error(
+ "Unresolved type reference '{}' in method reference '{}', file: {}".format(
+ class_param, param, state.current_class
+ ),
+ state,
+ )
repl_text = method_param
if class_param != state.current_class:
repl_text = "{}.{}".format(class_param, method_param)
- tag_text = ':ref:`{}`'.format(repl_text, class_param, ref_type, method_param)
+ tag_text = ":ref:`{}`".format(repl_text, class_param, ref_type, method_param)
escape_post = True
- elif cmd.find('image=') == 0:
+ elif cmd.find("image=") == 0:
tag_text = "" # '![](' + cmd[6:] + ')'
- elif cmd.find('url=') == 0:
+ elif cmd.find("url=") == 0:
url_link = cmd[4:]
- tag_text = '`'
+ tag_text = "`"
tag_depth += 1
inside_url = True
url_has_name = False
- elif cmd == '/url':
- tag_text = ('' if url_has_name else url_link) + " <" + url_link + ">`_"
+ elif cmd == "/url":
+ tag_text = ("" if url_has_name else url_link) + " <" + url_link + ">`_"
tag_depth -= 1
escape_post = True
inside_url = False
url_has_name = False
- elif cmd == 'center':
+ elif cmd == "center":
tag_depth += 1
- tag_text = ''
- elif cmd == '/center':
+ tag_text = ""
+ elif cmd == "/center":
tag_depth -= 1
- tag_text = ''
- elif cmd == 'codeblock':
+ tag_text = ""
+ elif cmd == "codeblock":
tag_depth += 1
- tag_text = '\n::\n'
+ tag_text = "\n::\n"
inside_code = True
- elif cmd == 'br':
+ elif cmd == "br":
# Make a new paragraph instead of a linebreak, rst is not so linebreak friendly
- tag_text = '\n\n'
+ tag_text = "\n\n"
# Strip potential leading spaces
- while post_text[0] == ' ':
+ while post_text[0] == " ":
post_text = post_text[1:]
- elif cmd == 'i' or cmd == '/i':
+ elif cmd == "i" or cmd == "/i":
if cmd == "/i":
tag_depth -= 1
else:
tag_depth += 1
- tag_text = '*'
- elif cmd == 'b' or cmd == '/b':
+ tag_text = "*"
+ elif cmd == "b" or cmd == "/b":
if cmd == "/b":
tag_depth -= 1
else:
tag_depth += 1
- tag_text = '**'
- elif cmd == 'u' or cmd == '/u':
+ tag_text = "**"
+ elif cmd == "u" or cmd == "/u":
if cmd == "/u":
tag_depth -= 1
else:
tag_depth += 1
- tag_text = ''
- elif cmd == 'code':
- tag_text = '``'
+ tag_text = ""
+ elif cmd == "code":
+ tag_text = "``"
tag_depth += 1
inside_code = True
- elif cmd.startswith('enum '):
+ elif cmd.startswith("enum "):
tag_text = make_enum(cmd[5:], state)
escape_post = True
else:
@@ -883,24 +910,24 @@ def rstize_text(text, state): # type: (str, State) -> str
# Properly escape things like `[Node]s`
if escape_post and post_text and (post_text[0].isalnum() or post_text[0] == "("): # not punctuation, escape
- post_text = '\ ' + post_text
+ post_text = "\ " + post_text
- next_brac_pos = post_text.find('[', 0)
+ next_brac_pos = post_text.find("[", 0)
iter_pos = 0
while not inside_code:
- iter_pos = post_text.find('*', iter_pos, next_brac_pos)
+ iter_pos = post_text.find("*", iter_pos, next_brac_pos)
if iter_pos == -1:
break
- post_text = post_text[:iter_pos] + "\*" + post_text[iter_pos + 1:]
+ post_text = post_text[:iter_pos] + "\*" + post_text[iter_pos + 1 :]
iter_pos += 2
iter_pos = 0
while not inside_code:
- iter_pos = post_text.find('_', iter_pos, next_brac_pos)
+ iter_pos = post_text.find("_", iter_pos, next_brac_pos)
if iter_pos == -1:
break
if not post_text[iter_pos + 1].isalnum(): # don't escape within a snake_case word
- post_text = post_text[:iter_pos] + "\_" + post_text[iter_pos + 1:]
+ post_text = post_text[:iter_pos] + "\_" + post_text[iter_pos + 1 :]
iter_pos += 2
else:
iter_pos += 1
@@ -922,7 +949,7 @@ def format_table(f, data, remove_empty_columns=False): # type: (TextIO, Iterabl
column_sizes = [0] * len(data[0])
for row in data:
for i, text in enumerate(row):
- text_length = len(text or '')
+ text_length = len(text or "")
if text_length > column_sizes[i]:
column_sizes[i] = text_length
@@ -939,16 +966,16 @@ def format_table(f, data, remove_empty_columns=False): # type: (TextIO, Iterabl
for i, text in enumerate(row):
if column_sizes[i] == 0 and remove_empty_columns:
continue
- row_text += " " + (text or '').ljust(column_sizes[i]) + " |"
+ row_text += " " + (text or "").ljust(column_sizes[i]) + " |"
row_text += "\n"
f.write(row_text)
f.write(sep)
- f.write('\n')
+ f.write("\n")
def make_type(t, state): # type: (str, State) -> str
if t in state.classes:
- return ':ref:`{0}`'.format(t)
+ return ":ref:`{0}`".format(t)
print_error("Unresolved type '{}', file: {}".format(t, state.current_class), state)
return t
@@ -957,7 +984,7 @@ def make_enum(t, state): # type: (str, State) -> str
p = t.find(".")
if p >= 0:
c = t[0:p]
- e = t[p + 1:]
+ e = t[p + 1 :]
# Variant enums live in GlobalScope but still use periods.
if c == "Variant":
c = "@GlobalScope"
@@ -969,7 +996,7 @@ def make_enum(t, state): # type: (str, State) -> str
c = "@GlobalScope"
if not c in state.classes and c.startswith("_"):
- c = c[1:] # Remove the underscore prefix
+ c = c[1:] # Remove the underscore prefix
if c in state.classes and e in state.classes[c].enums:
return ":ref:`{0}`".format(e, c)
@@ -981,7 +1008,9 @@ def make_enum(t, state): # type: (str, State) -> str
return t
-def make_method_signature(class_def, method_def, make_ref, state): # type: (ClassDef, Union[MethodDef, SignalDef], bool, State) -> Tuple[str, str]
+def make_method_signature(
+ class_def, method_def, make_ref, state
+): # type: (ClassDef, Union[MethodDef, SignalDef], bool, State) -> Tuple[str, str]
ret_type = " "
ref_type = "signal"
@@ -996,34 +1025,34 @@ def make_method_signature(class_def, method_def, make_ref, state): # type: (Cla
else:
out += "**{}** ".format(method_def.name)
- out += '**(**'
+ out += "**(**"
for i, arg in enumerate(method_def.parameters):
if i > 0:
- out += ', '
+ out += ", "
else:
- out += ' '
+ out += " "
out += "{} {}".format(arg.type_name.to_rst(state), arg.name)
if arg.default_value is not None:
- out += '=' + arg.default_value
+ out += "=" + arg.default_value
- if isinstance(method_def, MethodDef) and method_def.qualifiers is not None and 'vararg' in method_def.qualifiers:
+ if isinstance(method_def, MethodDef) and method_def.qualifiers is not None and "vararg" in method_def.qualifiers:
if len(method_def.parameters) > 0:
- out += ', ...'
+ out += ", ..."
else:
- out += ' ...'
+ out += " ..."
- out += ' **)**'
+ out += " **)**"
if isinstance(method_def, MethodDef) and method_def.qualifiers is not None:
- out += ' ' + method_def.qualifiers
+ out += " " + method_def.qualifiers
return ret_type, out
def make_heading(title, underline): # type: (str, str) -> str
- return title + '\n' + (underline * len(title)) + "\n\n"
+ return title + "\n" + (underline * len(title)) + "\n\n"
def make_url(link): # type: (str) -> str
@@ -1047,5 +1076,5 @@ def make_url(link): # type: (str) -> str
return "`" + link + " <" + link + ">`_"
-if __name__ == '__main__':
+if __name__ == "__main__":
main()
diff --git a/doc/translations/extract.py b/doc/translations/extract.py
index cd06e13dda4..a65f942b924 100644
--- a/doc/translations/extract.py
+++ b/doc/translations/extract.py
@@ -7,7 +7,7 @@ import shutil
from collections import OrderedDict
EXTRACT_TAGS = ["description", "brief_description", "member", "constant", "theme_item", "link"]
-HEADER = '''\
+HEADER = """\
# LANGUAGE translation of the Godot Engine class reference.
# Copyright (c) 2007-2020 Juan Linietsky, Ariel Manzur.
# Copyright (c) 2014-2020 Godot Engine contributors (cf. AUTHORS.md).
@@ -24,7 +24,7 @@ msgstr ""
"Content-Type: text/plain; charset=UTF-8\\n"
"Content-Transfer-Encoding: 8-bit\\n"
-'''
+"""
# Some strings used by makerst.py are normally part of the editor translations,
# so we need to include them manually here for the online docs.
BASE_STRINGS = [
@@ -42,7 +42,8 @@ BASE_STRINGS = [
##
import sys
-sys.modules['_elementtree'] = None
+
+sys.modules["_elementtree"] = None
import xml.etree.ElementTree as ET
## override the parser to get the line number
@@ -62,8 +63,11 @@ class LineNumberingParser(ET.XMLParser):
element._end_column_number = self.parser.CurrentColumnNumber
element._end_byte_index = self.parser.CurrentByteIndex
return element
+
+
##
+
class Desc:
def __init__(self, line_no, msg, desc_list=None):
## line_no : the line number where the desc is
@@ -73,6 +77,7 @@ class Desc:
self.msg = msg
self.desc_list = desc_list
+
class DescList:
def __init__(self, doc, path):
## doc : root xml element of the document
@@ -82,29 +87,32 @@ class DescList:
self.path = path
self.list = []
+
def print_error(error):
print("ERROR: {}".format(error))
+
## build classes with xml elements recursively
def _collect_classes_dir(path, classes):
if not os.path.isdir(path):
print_error("Invalid directory path: {}".format(path))
exit(1)
- for _dir in map(lambda dir : os.path.join(path, dir), os.listdir(path)):
+ for _dir in map(lambda dir: os.path.join(path, dir), os.listdir(path)):
if os.path.isdir(_dir):
_collect_classes_dir(_dir, classes)
elif os.path.isfile(_dir):
if not _dir.endswith(".xml"):
- #print("Got non-.xml file '{}', skipping.".format(path))
+ # print("Got non-.xml file '{}', skipping.".format(path))
continue
_collect_classes_file(_dir, classes)
+
## opens a file and parse xml add to classes
def _collect_classes_file(path, classes):
if not os.path.isfile(path) or not path.endswith(".xml"):
print_error("Invalid xml file path: {}".format(path))
exit(1)
- print('Collecting file: {}'.format(os.path.basename(path)))
+ print("Collecting file: {}".format(os.path.basename(path)))
try:
tree = ET.parse(path, parser=LineNumberingParser())
@@ -114,8 +122,8 @@ def _collect_classes_file(path, classes):
doc = tree.getroot()
- if 'name' in doc.attrib:
- if 'version' not in doc.attrib:
+ if "name" in doc.attrib:
+ if "version" not in doc.attrib:
print_error("Version missing from 'doc', file: {}".format(path))
name = doc.attrib["name"]
@@ -124,7 +132,7 @@ def _collect_classes_file(path, classes):
exit(1)
classes[name] = DescList(doc, path)
else:
- print_error('Unknown XML file {}, skipping'.format(path))
+ print_error("Unknown XML file {}, skipping".format(path))
## regions are list of tuples with size 3 (start_index, end_index, indent)
@@ -132,56 +140,64 @@ def _collect_classes_file(path, classes):
## if i inside the region returns the indent, else returns -1
def _get_xml_indent(i, regions):
for region in regions:
- if region[0] < i < region[1] :
+ if region[0] < i < region[1]:
return region[2]
return -1
+
## find and build all regions of codeblock which we need later
-def _make_codeblock_regions(desc, path=''):
+def _make_codeblock_regions(desc, path=""):
code_block_end = False
code_block_index = 0
code_block_regions = []
while not code_block_end:
code_block_index = desc.find("[codeblock]", code_block_index)
- if code_block_index < 0: break
- xml_indent=0
- while True :
+ if code_block_index < 0:
+ break
+ xml_indent = 0
+ while True:
## [codeblock] always have a trailing new line and some tabs
## those tabs are belongs to xml indentations not code indent
- if desc[code_block_index+len("[codeblock]\n")+xml_indent] == '\t':
- xml_indent+=1
- else: break
+ if desc[code_block_index + len("[codeblock]\n") + xml_indent] == "\t":
+ xml_indent += 1
+ else:
+ break
end_index = desc.find("[/codeblock]", code_block_index)
- if end_index < 0 :
- print_error('Non terminating codeblock: {}'.format(path))
+ if end_index < 0:
+ print_error("Non terminating codeblock: {}".format(path))
exit(1)
- code_block_regions.append( (code_block_index, end_index, xml_indent) )
+ code_block_regions.append((code_block_index, end_index, xml_indent))
code_block_index += 1
return code_block_regions
+
def _strip_and_split_desc(desc, code_block_regions):
- desc_strip = '' ## a stripped desc msg
+ desc_strip = "" ## a stripped desc msg
total_indent = 0 ## code indent = total indent - xml indent
for i in range(len(desc)):
c = desc[i]
- if c == '\n' : c = '\\n'
- if c == '"': c = '\\"'
- if c == '\\': c = '\\\\' ## is invalid for msgmerge
- if c == '\t':
+ if c == "\n":
+ c = "\\n"
+ if c == '"':
+ c = '\\"'
+ if c == "\\":
+ c = "\\\\" ## is invalid for msgmerge
+ if c == "\t":
xml_indent = _get_xml_indent(i, code_block_regions)
if xml_indent >= 0:
total_indent += 1
if xml_indent < total_indent:
- c = '\\t'
+ c = "\\t"
else:
continue
else:
continue
desc_strip += c
- if c == '\\n':
+ if c == "\\n":
total_indent = 0
return desc_strip
+
## make catalog strings from xml elements
def _make_translation_catalog(classes):
unique_msgs = OrderedDict()
@@ -189,8 +205,9 @@ def _make_translation_catalog(classes):
desc_list = classes[class_name]
for elem in desc_list.doc.iter():
if elem.tag in EXTRACT_TAGS:
- if not elem.text or len(elem.text) == 0 : continue
- line_no = elem._start_line_number if elem.text[0]!='\n' else elem._start_line_number+1
+ if not elem.text or len(elem.text) == 0:
+ continue
+ line_no = elem._start_line_number if elem.text[0] != "\n" else elem._start_line_number + 1
desc_str = elem.text.strip()
code_block_regions = _make_codeblock_regions(desc_str, desc_list.path)
desc_msg = _strip_and_split_desc(desc_str, code_block_regions)
@@ -203,44 +220,48 @@ def _make_translation_catalog(classes):
unique_msgs[desc_msg].append(desc_obj)
return unique_msgs
+
## generate the catalog file
def _generate_translation_catalog_file(unique_msgs, output):
- with open(output, 'w', encoding='utf8') as f:
+ with open(output, "w", encoding="utf8") as f:
f.write(HEADER)
for msg in BASE_STRINGS:
- f.write('#: doc/tools/makerst.py\n')
+ f.write("#: doc/tools/makerst.py\n")
f.write('msgid "{}"\n'.format(msg))
f.write('msgstr ""\n\n')
for msg in unique_msgs:
if len(msg) == 0 or msg in BASE_STRINGS:
continue
- f.write('#:')
+ f.write("#:")
desc_list = unique_msgs[msg]
for desc in desc_list:
- path = desc.desc_list.path.replace('\\', '/')
- if path.startswith('./'):
+ path = desc.desc_list.path.replace("\\", "/")
+ if path.startswith("./"):
path = path[2:]
- f.write(' {}:{}'.format(path, desc.line_no))
- f.write('\n')
+ f.write(" {}:{}".format(path, desc.line_no))
+ f.write("\n")
f.write('msgid "{}"\n'.format(msg))
f.write('msgstr ""\n\n')
## TODO: what if 'nt'?
- if (os.name == "posix"):
+ if os.name == "posix":
print("Wrapping template at 79 characters for compatibility with Weblate.")
os.system("msgmerge -w79 {0} {0} > {0}.wrap".format(output))
shutil.move("{}.wrap".format(output), output)
+
def main():
parser = argparse.ArgumentParser()
- parser.add_argument("--path", "-p", nargs="+", default=".", help="The directory or directories containing XML files to collect.")
+ parser.add_argument(
+ "--path", "-p", nargs="+", default=".", help="The directory or directories containing XML files to collect."
+ )
parser.add_argument("--output", "-o", default="translation_catalog.pot", help="The path to the output file.")
args = parser.parse_args()
output = os.path.abspath(args.output)
- if not os.path.isdir(os.path.dirname(output)) or not output.endswith('.pot'):
+ if not os.path.isdir(os.path.dirname(output)) or not output.endswith(".pot"):
print_error("Invalid output path: {}".format(output))
exit(1)
@@ -252,13 +273,14 @@ def main():
print("\nCurrent working dir: {}".format(path))
- path_classes = OrderedDict() ## dictionary of key=class_name, value=DescList objects
+ path_classes = OrderedDict() ## dictionary of key=class_name, value=DescList objects
_collect_classes_dir(path, path_classes)
classes.update(path_classes)
- classes = OrderedDict(sorted(classes.items(), key = lambda kv: kv[0].lower()))
+ classes = OrderedDict(sorted(classes.items(), key=lambda kv: kv[0].lower()))
unique_msgs = _make_translation_catalog(classes)
_generate_translation_catalog_file(unique_msgs, output)
-if __name__ == '__main__':
+
+if __name__ == "__main__":
main()
diff --git a/drivers/SCsub b/drivers/SCsub
index 41c20d81ad3..cc7bcbc6400 100644
--- a/drivers/SCsub
+++ b/drivers/SCsub
@@ -1,41 +1,42 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
env.drivers_sources = []
# OS drivers
-SConscript('unix/SCsub')
-SConscript('windows/SCsub')
+SConscript("unix/SCsub")
+SConscript("windows/SCsub")
# Sounds drivers
-SConscript('alsa/SCsub')
-SConscript('coreaudio/SCsub')
-SConscript('pulseaudio/SCsub')
-if (env["platform"] == "windows"):
+SConscript("alsa/SCsub")
+SConscript("coreaudio/SCsub")
+SConscript("pulseaudio/SCsub")
+if env["platform"] == "windows":
SConscript("wasapi/SCsub")
-if env['xaudio2']:
+if env["xaudio2"]:
SConscript("xaudio2/SCsub")
# Midi drivers
-SConscript('alsamidi/SCsub')
-SConscript('coremidi/SCsub')
-SConscript('winmidi/SCsub')
+SConscript("alsamidi/SCsub")
+SConscript("coremidi/SCsub")
+SConscript("winmidi/SCsub")
# Graphics drivers
-if (env["platform"] != "server" and env["platform"] != "javascript"):
-# SConscript('gles2/SCsub')
- SConscript('vulkan/SCsub')
- SConscript('gl_context/SCsub')
+if env["platform"] != "server" and env["platform"] != "javascript":
+ # SConscript('gles2/SCsub')
+ SConscript("vulkan/SCsub")
+ SConscript("gl_context/SCsub")
else:
- SConscript('dummy/SCsub')
+ SConscript("dummy/SCsub")
# Core dependencies
SConscript("png/SCsub")
SConscript("spirv-reflect/SCsub")
-if env['vsproj']:
+if env["vsproj"]:
import os
+
path = os.getcwd()
# Change directory so the path resolves correctly in the function call.
os.chdir("..")
diff --git a/drivers/alsa/SCsub b/drivers/alsa/SCsub
index 28b315ae668..91e1140b750 100644
--- a/drivers/alsa/SCsub
+++ b/drivers/alsa/SCsub
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
env.add_source_files(env.drivers_sources, "*.cpp")
diff --git a/drivers/alsamidi/SCsub b/drivers/alsamidi/SCsub
index 4c249251920..4e1b5f2a362 100644
--- a/drivers/alsamidi/SCsub
+++ b/drivers/alsamidi/SCsub
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
# Driver source files
env.add_source_files(env.drivers_sources, "*.cpp")
diff --git a/drivers/coreaudio/SCsub b/drivers/coreaudio/SCsub
index 4c249251920..4e1b5f2a362 100644
--- a/drivers/coreaudio/SCsub
+++ b/drivers/coreaudio/SCsub
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
# Driver source files
env.add_source_files(env.drivers_sources, "*.cpp")
diff --git a/drivers/coremidi/SCsub b/drivers/coremidi/SCsub
index 4c249251920..4e1b5f2a362 100644
--- a/drivers/coremidi/SCsub
+++ b/drivers/coremidi/SCsub
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
# Driver source files
env.add_source_files(env.drivers_sources, "*.cpp")
diff --git a/drivers/dummy/SCsub b/drivers/dummy/SCsub
index 28b315ae668..91e1140b750 100644
--- a/drivers/dummy/SCsub
+++ b/drivers/dummy/SCsub
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
env.add_source_files(env.drivers_sources, "*.cpp")
diff --git a/drivers/gl_context/SCsub b/drivers/gl_context/SCsub
index d6945ee2eb1..ddeec6f4c6f 100644
--- a/drivers/gl_context/SCsub
+++ b/drivers/gl_context/SCsub
@@ -1,8 +1,8 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
-if (env["platform"] in ["haiku", "osx", "windows", "linuxbsd"]):
+if env["platform"] in ["haiku", "osx", "windows", "linuxbsd"]:
# Thirdparty source files
thirdparty_dir = "#thirdparty/glad/"
thirdparty_sources = [
@@ -12,8 +12,8 @@ if (env["platform"] in ["haiku", "osx", "windows", "linuxbsd"]):
env.Prepend(CPPPATH=[thirdparty_dir])
- env.Append(CPPDEFINES=['GLAD_ENABLED'])
- env.Append(CPPDEFINES=['GLES_OVER_GL'])
+ env.Append(CPPDEFINES=["GLAD_ENABLED"])
+ env.Append(CPPDEFINES=["GLES_OVER_GL"])
env_thirdparty = env.Clone()
env_thirdparty.disable_warnings()
diff --git a/drivers/gles2/SCsub b/drivers/gles2/SCsub
index 9923e52c733..987ddcd16ed 100644
--- a/drivers/gles2/SCsub
+++ b/drivers/gles2/SCsub
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
env.add_source_files(env.drivers_sources, "*.cpp")
diff --git a/drivers/gles2/shaders/SCsub b/drivers/gles2/shaders/SCsub
index d7ae0243e6d..bcd6ea79fbb 100644
--- a/drivers/gles2/shaders/SCsub
+++ b/drivers/gles2/shaders/SCsub
@@ -1,23 +1,23 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
-if 'GLES2_GLSL' in env['BUILDERS']:
- env.GLES2_GLSL('copy.glsl');
-# env.GLES2_GLSL('resolve.glsl');
- env.GLES2_GLSL('canvas.glsl');
- env.GLES2_GLSL('canvas_shadow.glsl');
- env.GLES2_GLSL('scene.glsl');
- env.GLES2_GLSL('cubemap_filter.glsl');
- env.GLES2_GLSL('cube_to_dp.glsl');
-# env.GLES2_GLSL('blend_shape.glsl');
-# env.GLES2_GLSL('screen_space_reflection.glsl');
- env.GLES2_GLSL('effect_blur.glsl');
-# env.GLES2_GLSL('subsurf_scattering.glsl');
-# env.GLES2_GLSL('ssao.glsl');
-# env.GLES2_GLSL('ssao_minify.glsl');
-# env.GLES2_GLSL('ssao_blur.glsl');
-# env.GLES2_GLSL('exposure.glsl');
- env.GLES2_GLSL('tonemap.glsl');
-# env.GLES2_GLSL('particles.glsl');
- env.GLES2_GLSL('lens_distorted.glsl');
+if "GLES2_GLSL" in env["BUILDERS"]:
+ env.GLES2_GLSL("copy.glsl")
+ # env.GLES2_GLSL('resolve.glsl');
+ env.GLES2_GLSL("canvas.glsl")
+ env.GLES2_GLSL("canvas_shadow.glsl")
+ env.GLES2_GLSL("scene.glsl")
+ env.GLES2_GLSL("cubemap_filter.glsl")
+ env.GLES2_GLSL("cube_to_dp.glsl")
+ # env.GLES2_GLSL('blend_shape.glsl');
+ # env.GLES2_GLSL('screen_space_reflection.glsl');
+ env.GLES2_GLSL("effect_blur.glsl")
+ # env.GLES2_GLSL('subsurf_scattering.glsl');
+ # env.GLES2_GLSL('ssao.glsl');
+ # env.GLES2_GLSL('ssao_minify.glsl');
+ # env.GLES2_GLSL('ssao_blur.glsl');
+ # env.GLES2_GLSL('exposure.glsl');
+ env.GLES2_GLSL("tonemap.glsl")
+ # env.GLES2_GLSL('particles.glsl');
+ env.GLES2_GLSL("lens_distorted.glsl")
diff --git a/drivers/png/SCsub b/drivers/png/SCsub
index 87b54cecaf4..db08be0c47c 100644
--- a/drivers/png/SCsub
+++ b/drivers/png/SCsub
@@ -1,11 +1,11 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
env_png = env.Clone()
# Thirdparty source files
-if env['builtin_libpng']:
+if env["builtin_libpng"]:
thirdparty_dir = "#thirdparty/libpng/"
thirdparty_sources = [
"png.c",
@@ -32,6 +32,7 @@ if env['builtin_libpng']:
# Currently .ASM filter_neon.S does not compile on NT.
import os
+
use_neon = "neon_enabled" in env and env["neon_enabled"] and os.name != "nt"
if use_neon:
env_png.Append(CPPDEFINES=[("PNG_ARM_NEON_OPT", 2)])
@@ -45,7 +46,7 @@ if env['builtin_libpng']:
if use_neon:
env_neon = env_thirdparty.Clone()
if "S_compiler" in env:
- env_neon['CC'] = env['S_compiler']
+ env_neon["CC"] = env["S_compiler"]
neon_sources = []
neon_sources.append(env_neon.Object(thirdparty_dir + "/arm/arm_init.c"))
neon_sources.append(env_neon.Object(thirdparty_dir + "/arm/filter_neon_intrinsics.c"))
@@ -56,4 +57,4 @@ if env['builtin_libpng']:
# Godot source files
env_png.add_source_files(env.drivers_sources, "*.cpp")
-Export('env')
+Export("env")
diff --git a/drivers/pulseaudio/SCsub b/drivers/pulseaudio/SCsub
index 28b315ae668..91e1140b750 100644
--- a/drivers/pulseaudio/SCsub
+++ b/drivers/pulseaudio/SCsub
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
env.add_source_files(env.drivers_sources, "*.cpp")
diff --git a/drivers/spirv-reflect/SCsub b/drivers/spirv-reflect/SCsub
index 8ff27da1140..d0ffaf068db 100644
--- a/drivers/spirv-reflect/SCsub
+++ b/drivers/spirv-reflect/SCsub
@@ -1,17 +1,17 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
env_spirv_reflect = env.Clone()
env_spirv_reflect.disable_warnings()
thirdparty_dir = "#thirdparty/spirv-reflect/"
thirdparty_sources = [
- "spirv_reflect.c"
+ "spirv_reflect.c",
]
thirdparty_sources = [thirdparty_dir + file for file in thirdparty_sources]
env_spirv_reflect.add_source_files(env.drivers_sources, thirdparty_sources)
-Export('env')
+Export("env")
diff --git a/drivers/unix/SCsub b/drivers/unix/SCsub
index 4888f560993..91ef6135463 100644
--- a/drivers/unix/SCsub
+++ b/drivers/unix/SCsub
@@ -1,7 +1,7 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
env.add_source_files(env.drivers_sources, "*.cpp")
-env["check_c_headers"] = [ [ "mntent.h", "HAVE_MNTENT" ] ]
+env["check_c_headers"] = [["mntent.h", "HAVE_MNTENT"]]
diff --git a/drivers/vulkan/SCsub b/drivers/vulkan/SCsub
index de776b19b22..7ffdac27d5f 100644
--- a/drivers/vulkan/SCsub
+++ b/drivers/vulkan/SCsub
@@ -1,10 +1,10 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
env.add_source_files(env.drivers_sources, "*.cpp")
-if env['builtin_vulkan']:
+if env["builtin_vulkan"]:
# Use bundled Vulkan headers
thirdparty_dir = "#thirdparty/vulkan"
env.Prepend(CPPPATH=[thirdparty_dir, thirdparty_dir + "/include", thirdparty_dir + "/loader"])
@@ -27,48 +27,56 @@ if env['builtin_vulkan']:
]
vma_sources = [thirdparty_dir + "/vk_mem_alloc.cpp"]
- if env['platform'] == "windows":
+ if env["platform"] == "windows":
loader_sources.append("dirent_on_windows.c")
loader_sources.append("dxgi_loader.c")
- env_thirdparty.AppendUnique(CPPDEFINES=[
- 'VK_USE_PLATFORM_WIN32_KHR',
- 'VULKAN_NON_CMAKE_BUILD',
- 'WIN32_LEAN_AND_MEAN',
- 'API_NAME=\\"%s\\"' % 'Vulkan'
- ])
- if not env.msvc: # Windows 7+, missing in mingw headers
- env_thirdparty.AppendUnique(CPPDEFINES=[
- "CM_GETIDLIST_FILTER_CLASS=0x00000200",
- "CM_GETIDLIST_FILTER_PRESENT=0x00000100"
- ])
- elif env['platform'] == "osx":
- env_thirdparty.AppendUnique(CPPDEFINES=[
- 'VK_USE_PLATFORM_MACOS_MVK',
- 'VULKAN_NON_CMAKE_BUILD',
- 'SYSCONFDIR=\\"%s\\"' % '/etc',
- 'FALLBACK_DATA_DIRS=\\"%s\\"' % '/usr/local/share:/usr/share',
- 'FALLBACK_CONFIG_DIRS=\\"%s\\"' % '/etc/xdg'
- ])
- elif env['platform'] == "iphone":
- env_thirdparty.AppendUnique(CPPDEFINES=[
- 'VK_USE_PLATFORM_IOS_MVK',
- 'VULKAN_NON_CMAKE_BUILD',
- 'SYSCONFDIR=\\"%s\\"' % '/etc',
- 'FALLBACK_DATA_DIRS=\\"%s\\"' % '/usr/local/share:/usr/share',
- 'FALLBACK_CONFIG_DIRS=\\"%s\\"' % '/etc/xdg'
- ])
- elif env['platform'] == "linuxbsd":
- env_thirdparty.AppendUnique(CPPDEFINES=[
- 'VK_USE_PLATFORM_XLIB_KHR',
- 'VULKAN_NON_CMAKE_BUILD',
- 'SYSCONFDIR=\\"%s\\"' % '/etc',
- 'FALLBACK_DATA_DIRS=\\"%s\\"' % '/usr/local/share:/usr/share',
- 'FALLBACK_CONFIG_DIRS=\\"%s\\"' % '/etc/xdg'
- ])
+ env_thirdparty.AppendUnique(
+ CPPDEFINES=[
+ "VK_USE_PLATFORM_WIN32_KHR",
+ "VULKAN_NON_CMAKE_BUILD",
+ "WIN32_LEAN_AND_MEAN",
+ 'API_NAME=\\"%s\\"' % "Vulkan",
+ ]
+ )
+ if not env.msvc: # Windows 7+, missing in mingw headers
+ env_thirdparty.AppendUnique(
+ CPPDEFINES=["CM_GETIDLIST_FILTER_CLASS=0x00000200", "CM_GETIDLIST_FILTER_PRESENT=0x00000100"]
+ )
+ elif env["platform"] == "osx":
+ env_thirdparty.AppendUnique(
+ CPPDEFINES=[
+ "VK_USE_PLATFORM_MACOS_MVK",
+ "VULKAN_NON_CMAKE_BUILD",
+ 'SYSCONFDIR=\\"%s\\"' % "/etc",
+ 'FALLBACK_DATA_DIRS=\\"%s\\"' % "/usr/local/share:/usr/share",
+ 'FALLBACK_CONFIG_DIRS=\\"%s\\"' % "/etc/xdg",
+ ]
+ )
+ elif env["platform"] == "iphone":
+ env_thirdparty.AppendUnique(
+ CPPDEFINES=[
+ "VK_USE_PLATFORM_IOS_MVK",
+ "VULKAN_NON_CMAKE_BUILD",
+ 'SYSCONFDIR=\\"%s\\"' % "/etc",
+ 'FALLBACK_DATA_DIRS=\\"%s\\"' % "/usr/local/share:/usr/share",
+ 'FALLBACK_CONFIG_DIRS=\\"%s\\"' % "/etc/xdg",
+ ]
+ )
+ elif env["platform"] == "linuxbsd":
+ env_thirdparty.AppendUnique(
+ CPPDEFINES=[
+ "VK_USE_PLATFORM_XLIB_KHR",
+ "VULKAN_NON_CMAKE_BUILD",
+ 'SYSCONFDIR=\\"%s\\"' % "/etc",
+ 'FALLBACK_DATA_DIRS=\\"%s\\"' % "/usr/local/share:/usr/share",
+ 'FALLBACK_CONFIG_DIRS=\\"%s\\"' % "/etc/xdg",
+ ]
+ )
import platform
- if (platform.system() == "Linux"):
+
+ if platform.system() == "Linux":
# In glibc since 2.17 and musl libc since 1.1.24. Used by loader.c.
- env_thirdparty.AppendUnique(CPPDEFINES=['HAVE_SECURE_GETENV'])
+ env_thirdparty.AppendUnique(CPPDEFINES=["HAVE_SECURE_GETENV"])
loader_sources = [thirdparty_dir + "/loader/" + file for file in loader_sources]
env_thirdparty.add_source_files(env.drivers_sources, loader_sources)
diff --git a/drivers/wasapi/SCsub b/drivers/wasapi/SCsub
index 4c249251920..4e1b5f2a362 100644
--- a/drivers/wasapi/SCsub
+++ b/drivers/wasapi/SCsub
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
# Driver source files
env.add_source_files(env.drivers_sources, "*.cpp")
diff --git a/drivers/windows/SCsub b/drivers/windows/SCsub
index 28b315ae668..91e1140b750 100644
--- a/drivers/windows/SCsub
+++ b/drivers/windows/SCsub
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
env.add_source_files(env.drivers_sources, "*.cpp")
diff --git a/drivers/winmidi/SCsub b/drivers/winmidi/SCsub
index 4c249251920..4e1b5f2a362 100644
--- a/drivers/winmidi/SCsub
+++ b/drivers/winmidi/SCsub
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
# Driver source files
env.add_source_files(env.drivers_sources, "*.cpp")
diff --git a/drivers/xaudio2/SCsub b/drivers/xaudio2/SCsub
index de750525ab3..6778ad281e5 100644
--- a/drivers/xaudio2/SCsub
+++ b/drivers/xaudio2/SCsub
@@ -1,7 +1,7 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
env.add_source_files(env.drivers_sources, "*.cpp")
-env.Append(CPPDEFINES=['XAUDIO2_ENABLED'])
-env.Append(LINKFLAGS=['xaudio2_8.lib'])
+env.Append(CPPDEFINES=["XAUDIO2_ENABLED"])
+env.Append(LINKFLAGS=["xaudio2_8.lib"])
diff --git a/editor/SCsub b/editor/SCsub
index 61562d70d36..72a168efe27 100644
--- a/editor/SCsub
+++ b/editor/SCsub
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
env.editor_sources = []
@@ -16,24 +16,24 @@ def _make_doc_data_class_path(to_path):
g.write("static const int _doc_data_class_path_count = " + str(len(env.doc_class_path)) + ";\n")
g.write("struct _DocDataClassPath { const char* name; const char* path; };\n")
- g.write("static const _DocDataClassPath _doc_data_class_paths[" + str(len(env.doc_class_path) + 1) + "] = {\n");
+ g.write("static const _DocDataClassPath _doc_data_class_paths[" + str(len(env.doc_class_path) + 1) + "] = {\n")
for c in sorted(env.doc_class_path):
- g.write("\t{\"" + c + "\", \"" + env.doc_class_path[c] + "\"},\n")
+ g.write('\t{"' + c + '", "' + env.doc_class_path[c] + '"},\n')
g.write("\t{NULL, NULL}\n")
g.write("};\n")
g.close()
-if env['tools']:
+if env["tools"]:
# Register exporters
reg_exporters_inc = '#include "register_exporters.h"\n'
- reg_exporters = 'void register_exporters() {\n'
+ reg_exporters = "void register_exporters() {\n"
for e in env.platform_exporters:
env.add_source_files(env.editor_sources, "#platform/" + e + "/export/export.cpp")
- reg_exporters += '\tregister_' + e + '_exporter();\n'
+ reg_exporters += "\tregister_" + e + "_exporter();\n"
reg_exporters_inc += '#include "platform/' + e + '/export/export.h"\n'
- reg_exporters += '}\n'
+ reg_exporters += "}\n"
# NOTE: It is safe to generate this file here, since this is still executed serially
with open("register_exporters.gen.cpp", "w", encoding="utf-8") as f:
@@ -50,12 +50,12 @@ if env['tools']:
for d in doc_dirs:
try:
- for f in os.listdir(os.path.join(env.Dir('#').abspath, d)):
+ for f in os.listdir(os.path.join(env.Dir("#").abspath, d)):
docs.append("#" + os.path.join(d, f))
except OSError:
pass
- _make_doc_data_class_path(os.path.join(env.Dir('#').abspath, "editor"))
+ _make_doc_data_class_path(os.path.join(env.Dir("#").abspath, "editor"))
docs = sorted(docs)
env.Depends("#editor/doc_data_compressed.gen.h", docs)
@@ -63,32 +63,36 @@ if env['tools']:
import glob
- path = env.Dir('.').abspath
+ path = env.Dir(".").abspath
# Editor translations
tlist = glob.glob(path + "/translations/*.po")
- env.Depends('#editor/editor_translations.gen.h', tlist)
- env.CommandNoCache('#editor/editor_translations.gen.h', tlist, run_in_subprocess(editor_builders.make_editor_translations_header))
+ env.Depends("#editor/editor_translations.gen.h", tlist)
+ env.CommandNoCache(
+ "#editor/editor_translations.gen.h", tlist, run_in_subprocess(editor_builders.make_editor_translations_header)
+ )
# Documentation translations
tlist = glob.glob(env.Dir("#doc").abspath + "/translations/*.po")
- env.Depends('#editor/doc_translations.gen.h', tlist)
- env.CommandNoCache('#editor/doc_translations.gen.h', tlist, run_in_subprocess(editor_builders.make_doc_translations_header))
+ env.Depends("#editor/doc_translations.gen.h", tlist)
+ env.CommandNoCache(
+ "#editor/doc_translations.gen.h", tlist, run_in_subprocess(editor_builders.make_doc_translations_header)
+ )
# Fonts
flist = glob.glob(path + "/../thirdparty/fonts/*.ttf")
flist.extend(glob.glob(path + "/../thirdparty/fonts/*.otf"))
flist.sort()
- env.Depends('#editor/builtin_fonts.gen.h', flist)
- env.CommandNoCache('#editor/builtin_fonts.gen.h', flist, run_in_subprocess(editor_builders.make_fonts_header))
+ env.Depends("#editor/builtin_fonts.gen.h", flist)
+ env.CommandNoCache("#editor/builtin_fonts.gen.h", flist, run_in_subprocess(editor_builders.make_fonts_header))
env.add_source_files(env.editor_sources, "*.cpp")
- SConscript('debugger/SCsub')
- SConscript('fileserver/SCsub')
- SConscript('icons/SCsub')
- SConscript('import/SCsub')
- SConscript('plugins/SCsub')
+ SConscript("debugger/SCsub")
+ SConscript("fileserver/SCsub")
+ SConscript("icons/SCsub")
+ SConscript("import/SCsub")
+ SConscript("plugins/SCsub")
lib = env.add_library("editor", env.editor_sources)
env.Prepend(LIBS=[lib])
diff --git a/editor/debugger/SCsub b/editor/debugger/SCsub
index 2b1e889fb03..359d04e5df2 100644
--- a/editor/debugger/SCsub
+++ b/editor/debugger/SCsub
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
env.add_source_files(env.editor_sources, "*.cpp")
diff --git a/editor/editor_builders.py b/editor/editor_builders.py
index 44c3e50dfc8..0c9cf91b2f1 100644
--- a/editor/editor_builders.py
+++ b/editor/editor_builders.py
@@ -25,6 +25,7 @@ def make_doc_header(target, source, env):
buf = (docbegin + buf + docend).encode("utf-8")
decomp_size = len(buf)
import zlib
+
buf = zlib.compress(buf)
g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
@@ -55,7 +56,7 @@ def make_fonts_header(target, source, env):
# saving uncompressed, since freetype will reference from memory pointer
xl_names = []
for i in range(len(source)):
- with open(source[i], "rb")as f:
+ with open(source[i], "rb") as f:
buf = f.read()
name = os.path.splitext(os.path.basename(source[i]))[0]
@@ -111,7 +112,9 @@ def make_translations_header(target, source, env, category):
g.write("};\n\n")
g.write("static {}TranslationList _{}_translations[] = {{\n".format(category.capitalize(), category))
for x in xl_names:
- g.write("\t{{ \"{}\", {}, {}, _{}_translation_{}_compressed }},\n".format(x[0], str(x[1]), str(x[2]), category, x[0]))
+ g.write(
+ '\t{{ "{}", {}, {}, _{}_translation_{}_compressed }},\n'.format(x[0], str(x[1]), str(x[2]), category, x[0])
+ )
g.write("\t{NULL, 0, 0, NULL}\n")
g.write("};\n")
@@ -128,5 +131,5 @@ def make_doc_translations_header(target, source, env):
make_translations_header(target, source, env, "doc")
-if __name__ == '__main__':
+if __name__ == "__main__":
subprocess_main(globals())
diff --git a/editor/fileserver/SCsub b/editor/fileserver/SCsub
index 2b1e889fb03..359d04e5df2 100644
--- a/editor/fileserver/SCsub
+++ b/editor/fileserver/SCsub
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
env.add_source_files(env.editor_sources, "*.cpp")
diff --git a/editor/icons/SCsub b/editor/icons/SCsub
index b39c74c66ae..f0d51999f07 100644
--- a/editor/icons/SCsub
+++ b/editor/icons/SCsub
@@ -1,21 +1,21 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
from platform_methods import run_in_subprocess
import editor_icons_builders
-make_editor_icons_builder = Builder(action=run_in_subprocess(editor_icons_builders.make_editor_icons_action),
- suffix='.h',
- src_suffix='.svg')
+make_editor_icons_builder = Builder(
+ action=run_in_subprocess(editor_icons_builders.make_editor_icons_action), suffix=".h", src_suffix=".svg"
+)
-env['BUILDERS']['MakeEditorIconsBuilder'] = make_editor_icons_builder
+env["BUILDERS"]["MakeEditorIconsBuilder"] = make_editor_icons_builder
# Editor's own icons
icon_sources = Glob("*.svg")
# Module icons
for module_icons in env.module_icons_paths:
- icon_sources += Glob('#' + module_icons + "/*.svg")
+ icon_sources += Glob("#" + module_icons + "/*.svg")
-env.Alias('editor_icons', [env.MakeEditorIconsBuilder('#editor/editor_icons.gen.h', icon_sources)])
+env.Alias("editor_icons", [env.MakeEditorIconsBuilder("#editor/editor_icons.gen.h", icon_sources)])
diff --git a/editor/icons/editor_icons_builders.py b/editor/icons/editor_icons_builders.py
index a00f21c265b..d7145abe500 100644
--- a/editor/icons/editor_icons_builders.py
+++ b/editor/icons/editor_icons_builders.py
@@ -22,16 +22,16 @@ def make_editor_icons_action(target, source, env):
icons_string.write('\t"')
- with open(fname, 'rb') as svgf:
+ with open(fname, "rb") as svgf:
b = svgf.read(1)
- while(len(b) == 1):
+ while len(b) == 1:
icons_string.write("\\" + str(hex(ord(b)))[1:])
b = svgf.read(1)
icons_string.write('"')
if fname != svg_icons[-1]:
icons_string.write(",")
- icons_string.write('\n')
+ icons_string.write("\n")
s = StringIO()
s.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
@@ -40,7 +40,7 @@ def make_editor_icons_action(target, source, env):
s.write("static const int editor_icons_count = {};\n".format(len(svg_icons)))
s.write("static const char *editor_icons_sources[] = {\n")
s.write(icons_string.getvalue())
- s.write('};\n\n')
+ s.write("};\n\n")
s.write("static const char *editor_icons_names[] = {\n")
# this is used to store the indices of thumbnail icons
@@ -63,11 +63,11 @@ def make_editor_icons_action(target, source, env):
if fname != svg_icons[-1]:
s.write(",")
- s.write('\n')
+ s.write("\n")
index += 1
- s.write('};\n')
+ s.write("};\n")
if thumb_medium_indices:
s.write("\n\n")
@@ -91,5 +91,5 @@ def make_editor_icons_action(target, source, env):
icons_string.close()
-if __name__ == '__main__':
+if __name__ == "__main__":
subprocess_main(globals())
diff --git a/editor/import/SCsub b/editor/import/SCsub
index 2b1e889fb03..359d04e5df2 100644
--- a/editor/import/SCsub
+++ b/editor/import/SCsub
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
env.add_source_files(env.editor_sources, "*.cpp")
diff --git a/editor/plugins/SCsub b/editor/plugins/SCsub
index 2b1e889fb03..359d04e5df2 100644
--- a/editor/plugins/SCsub
+++ b/editor/plugins/SCsub
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
env.add_source_files(env.editor_sources, "*.cpp")
diff --git a/editor/translations/extract.py b/editor/translations/extract.py
index 10859c7c90c..749bad5fff6 100755
--- a/editor/translations/extract.py
+++ b/editor/translations/extract.py
@@ -10,23 +10,23 @@ import sys
line_nb = False
for arg in sys.argv[1:]:
- if (arg == "--with-line-nb"):
+ if arg == "--with-line-nb":
print("Enabling line numbers in the context locations.")
line_nb = True
else:
os.sys.exit("Non supported argument '" + arg + "'. Aborting.")
-if (not os.path.exists("editor")):
+if not os.path.exists("editor"):
os.sys.exit("ERROR: This script should be started from the root of the git repo.")
matches = []
-for root, dirnames, filenames in os.walk('.'):
+for root, dirnames, filenames in os.walk("."):
dirnames[:] = [d for d in dirnames if d not in ["thirdparty"]]
- for filename in fnmatch.filter(filenames, '*.cpp'):
+ for filename in fnmatch.filter(filenames, "*.cpp"):
matches.append(os.path.join(root, filename))
- for filename in fnmatch.filter(filenames, '*.h'):
+ for filename in fnmatch.filter(filenames, "*.h"):
matches.append(os.path.join(root, filename))
matches.sort()
@@ -51,52 +51,54 @@ msgstr ""
"Content-Transfer-Encoding: 8-bit\\n"
"""
+
def process_file(f, fname):
global main_po, unique_str, unique_loc
l = f.readline()
lc = 1
- while (l):
+ while l:
- patterns = ['RTR(\"', 'TTR(\"', 'TTRC(\"']
+ patterns = ['RTR("', 'TTR("', 'TTRC("']
idx = 0
pos = 0
- while (pos >= 0):
+ while pos >= 0:
pos = l.find(patterns[idx], pos)
- if (pos == -1):
- if (idx < len(patterns) - 1):
+ if pos == -1:
+ if idx < len(patterns) - 1:
idx += 1
pos = 0
continue
pos += len(patterns[idx])
msg = ""
- while (pos < len(l) and (l[pos] != '"' or l[pos - 1] == '\\')):
+ while pos < len(l) and (l[pos] != '"' or l[pos - 1] == "\\"):
msg += l[pos]
pos += 1
- location = os.path.relpath(fname).replace('\\', '/')
- if (line_nb):
+ location = os.path.relpath(fname).replace("\\", "/")
+ if line_nb:
location += ":" + str(lc)
- if (not msg in unique_str):
+ if not msg in unique_str:
main_po += "\n#: " + location + "\n"
main_po += 'msgid "' + msg + '"\n'
main_po += 'msgstr ""\n'
unique_str.append(msg)
unique_loc[msg] = [location]
- elif (not location in unique_loc[msg]):
+ elif not location in unique_loc[msg]:
# Add additional location to previous occurrence too
msg_pos = main_po.find('\nmsgid "' + msg + '"')
- if (msg_pos == -1):
+ if msg_pos == -1:
print("Someone apparently thought writing Python was as easy as GDScript. Ping Akien.")
- main_po = main_po[:msg_pos] + ' ' + location + main_po[msg_pos:]
+ main_po = main_po[:msg_pos] + " " + location + main_po[msg_pos:]
unique_loc[msg].append(location)
l = f.readline()
lc += 1
+
print("Updating the editor.pot template...")
for fname in matches:
@@ -106,7 +108,7 @@ for fname in matches:
with open("editor.pot", "w") as f:
f.write(main_po)
-if (os.name == "posix"):
+if os.name == "posix":
print("Wrapping template at 79 characters for compatibility with Weblate.")
os.system("msgmerge -w79 editor.pot editor.pot > editor.pot.wrap")
shutil.move("editor.pot.wrap", "editor.pot")
@@ -114,7 +116,7 @@ if (os.name == "posix"):
shutil.move("editor.pot", "editor/translations/editor.pot")
# TODO: Make that in a portable way, if we care; if not, kudos to Unix users
-if (os.name == "posix"):
+if os.name == "posix":
added = subprocess.check_output(r"git diff editor/translations/editor.pot | grep \+msgid | wc -l", shell=True)
removed = subprocess.check_output(r"git diff editor/translations/editor.pot | grep \\\-msgid | wc -l", shell=True)
print("\n# Template changes compared to the staged status:")
diff --git a/gles_builders.py b/gles_builders.py
index e4e5fafc85f..e8928728fa7 100644
--- a/gles_builders.py
+++ b/gles_builders.py
@@ -7,7 +7,6 @@ from platform_methods import subprocess_main
class LegacyGLHeaderStruct:
-
def __init__(self):
self.vertex_lines = []
self.fragment_lines = []
@@ -73,7 +72,7 @@ def include_file_in_legacygl_header(filename, header_data, depth):
ifdefline = line.replace("#ifdef ", "").strip()
if line.find("_EN_") != -1:
- enumbase = ifdefline[:ifdefline.find("_EN_")]
+ enumbase = ifdefline[: ifdefline.find("_EN_")]
ifdefline = ifdefline.replace("_EN_", "_")
line = line.replace("_EN_", "_")
if enumbase not in header_data.enums:
@@ -86,12 +85,12 @@ def include_file_in_legacygl_header(filename, header_data, depth):
if line.find("uniform") != -1 and line.lower().find("texunit:") != -1:
# texture unit
- texunitstr = line[line.find(":") + 1:].strip()
+ texunitstr = line[line.find(":") + 1 :].strip()
if texunitstr == "auto":
texunit = "-1"
else:
texunit = str(int(texunitstr))
- uline = line[:line.lower().find("//")]
+ uline = line[: line.lower().find("//")]
uline = uline.replace("uniform", "")
uline = uline.replace("highp", "")
uline = uline.replace(";", "")
@@ -99,10 +98,10 @@ def include_file_in_legacygl_header(filename, header_data, depth):
for x in lines:
x = x.strip()
- x = x[x.rfind(" ") + 1:]
+ x = x[x.rfind(" ") + 1 :]
if x.find("[") != -1:
# unfiorm array
- x = x[:x.find("[")]
+ x = x[: x.find("[")]
if not x in header_data.texunit_names:
header_data.texunits += [(x, texunit)]
@@ -110,10 +109,10 @@ def include_file_in_legacygl_header(filename, header_data, depth):
elif line.find("uniform") != -1 and line.lower().find("ubo:") != -1:
# uniform buffer object
- ubostr = line[line.find(":") + 1:].strip()
+ ubostr = line[line.find(":") + 1 :].strip()
ubo = str(int(ubostr))
- uline = line[:line.lower().find("//")]
- uline = uline[uline.find("uniform") + len("uniform"):]
+ uline = line[: line.lower().find("//")]
+ uline = uline[uline.find("uniform") + len("uniform") :]
uline = uline.replace("highp", "")
uline = uline.replace(";", "")
uline = uline.replace("{", "").strip()
@@ -121,10 +120,10 @@ def include_file_in_legacygl_header(filename, header_data, depth):
for x in lines:
x = x.strip()
- x = x[x.rfind(" ") + 1:]
+ x = x[x.rfind(" ") + 1 :]
if x.find("[") != -1:
# unfiorm array
- x = x[:x.find("[")]
+ x = x[: x.find("[")]
if not x in header_data.ubo_names:
header_data.ubos += [(x, ubo)]
@@ -137,10 +136,10 @@ def include_file_in_legacygl_header(filename, header_data, depth):
for x in lines:
x = x.strip()
- x = x[x.rfind(" ") + 1:]
+ x = x[x.rfind(" ") + 1 :]
if x.find("[") != -1:
# unfiorm array
- x = x[:x.find("[")]
+ x = x[: x.find("[")]
if not x in header_data.uniforms:
header_data.uniforms += [x]
@@ -150,7 +149,7 @@ def include_file_in_legacygl_header(filename, header_data, depth):
uline = uline.replace("attribute ", "")
uline = uline.replace("highp ", "")
uline = uline.replace(";", "")
- uline = uline[uline.find(" "):].strip()
+ uline = uline[uline.find(" ") :].strip()
if uline.find("//") != -1:
name, bind = uline.split("//")
@@ -163,7 +162,7 @@ def include_file_in_legacygl_header(filename, header_data, depth):
uline = line.replace("out ", "")
uline = uline.replace("highp ", "")
uline = uline.replace(";", "")
- uline = uline[uline.find(" "):].strip()
+ uline = uline[uline.find(" ") :].strip()
if uline.find("//") != -1:
name, bind = uline.split("//")
@@ -200,17 +199,19 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2
fd.write("/* WARNING, THIS FILE WAS GENERATED, DO NOT EDIT */\n")
out_file_base = out_file
- out_file_base = out_file_base[out_file_base.rfind("/") + 1:]
- out_file_base = out_file_base[out_file_base.rfind("\\") + 1:]
+ out_file_base = out_file_base[out_file_base.rfind("/") + 1 :]
+ out_file_base = out_file_base[out_file_base.rfind("\\") + 1 :]
out_file_ifdef = out_file_base.replace(".", "_").upper()
fd.write("#ifndef " + out_file_ifdef + class_suffix + "_120\n")
fd.write("#define " + out_file_ifdef + class_suffix + "_120\n")
- out_file_class = out_file_base.replace(".glsl.gen.h", "").title().replace("_", "").replace(".", "") + "Shader" + class_suffix
+ out_file_class = (
+ out_file_base.replace(".glsl.gen.h", "").title().replace("_", "").replace(".", "") + "Shader" + class_suffix
+ )
fd.write("\n\n")
- fd.write("#include \"" + include + "\"\n\n\n")
+ fd.write('#include "' + include + '"\n\n\n')
fd.write("class " + out_file_class + " : public Shader" + class_suffix + " {\n\n")
- fd.write("\t virtual String get_shader_name() const { return \"" + out_file_class + "\"; }\n")
+ fd.write('\t virtual String get_shader_name() const { return "' + out_file_class + '"; }\n')
fd.write("public:\n\n")
@@ -228,29 +229,64 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2
fd.write("\t_FORCE_INLINE_ int get_uniform(Uniforms p_uniform) const { return _get_uniform(p_uniform); }\n\n")
if header_data.conditionals:
- fd.write("\t_FORCE_INLINE_ void set_conditional(Conditionals p_conditional,bool p_enable) { _set_conditional(p_conditional,p_enable); }\n\n")
+ fd.write(
+ "\t_FORCE_INLINE_ void set_conditional(Conditionals p_conditional,bool p_enable) { _set_conditional(p_conditional,p_enable); }\n\n"
+ )
fd.write("\t#ifdef DEBUG_ENABLED\n ")
- fd.write("\t#define _FU if (get_uniform(p_uniform)<0) return; if (!is_version_valid()) return; ERR_FAIL_COND( get_active()!=this ); \n\n ")
+ fd.write(
+ "\t#define _FU if (get_uniform(p_uniform)<0) return; if (!is_version_valid()) return; ERR_FAIL_COND( get_active()!=this ); \n\n "
+ )
fd.write("\t#else\n ")
fd.write("\t#define _FU if (get_uniform(p_uniform)<0) return; \n\n ")
fd.write("\t#endif\n")
- fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, float p_value) { _FU glUniform1f(get_uniform(p_uniform),p_value); }\n\n")
- fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, double p_value) { _FU glUniform1f(get_uniform(p_uniform),p_value); }\n\n")
- fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, uint8_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n")
- fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, int8_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n")
- fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, uint16_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n")
- fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, int16_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n")
- fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, uint32_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n")
- fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, int32_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n")
- fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Color& p_color) { _FU GLfloat col[4]={p_color.r,p_color.g,p_color.b,p_color.a}; glUniform4fv(get_uniform(p_uniform),1,col); }\n\n")
- fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Vector2& p_vec2) { _FU GLfloat vec2[2]={p_vec2.x,p_vec2.y}; glUniform2fv(get_uniform(p_uniform),1,vec2); }\n\n")
- fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Size2i& p_vec2) { _FU GLint vec2[2]={p_vec2.x,p_vec2.y}; glUniform2iv(get_uniform(p_uniform),1,vec2); }\n\n")
- fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Vector3& p_vec3) { _FU GLfloat vec3[3]={p_vec3.x,p_vec3.y,p_vec3.z}; glUniform3fv(get_uniform(p_uniform),1,vec3); }\n\n")
- fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, float p_a, float p_b) { _FU glUniform2f(get_uniform(p_uniform),p_a,p_b); }\n\n")
- fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, float p_a, float p_b, float p_c) { _FU glUniform3f(get_uniform(p_uniform),p_a,p_b,p_c); }\n\n")
- fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, float p_a, float p_b, float p_c, float p_d) { _FU glUniform4f(get_uniform(p_uniform),p_a,p_b,p_c,p_d); }\n\n")
+ fd.write(
+ "\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, float p_value) { _FU glUniform1f(get_uniform(p_uniform),p_value); }\n\n"
+ )
+ fd.write(
+ "\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, double p_value) { _FU glUniform1f(get_uniform(p_uniform),p_value); }\n\n"
+ )
+ fd.write(
+ "\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, uint8_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n"
+ )
+ fd.write(
+ "\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, int8_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n"
+ )
+ fd.write(
+ "\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, uint16_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n"
+ )
+ fd.write(
+ "\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, int16_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n"
+ )
+ fd.write(
+ "\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, uint32_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n"
+ )
+ fd.write(
+ "\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, int32_t p_value) { _FU glUniform1i(get_uniform(p_uniform),p_value); }\n\n"
+ )
+ fd.write(
+ "\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Color& p_color) { _FU GLfloat col[4]={p_color.r,p_color.g,p_color.b,p_color.a}; glUniform4fv(get_uniform(p_uniform),1,col); }\n\n"
+ )
+ fd.write(
+ "\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Vector2& p_vec2) { _FU GLfloat vec2[2]={p_vec2.x,p_vec2.y}; glUniform2fv(get_uniform(p_uniform),1,vec2); }\n\n"
+ )
+ fd.write(
+ "\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Size2i& p_vec2) { _FU GLint vec2[2]={p_vec2.x,p_vec2.y}; glUniform2iv(get_uniform(p_uniform),1,vec2); }\n\n"
+ )
+ fd.write(
+ "\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Vector3& p_vec3) { _FU GLfloat vec3[3]={p_vec3.x,p_vec3.y,p_vec3.z}; glUniform3fv(get_uniform(p_uniform),1,vec3); }\n\n"
+ )
+ fd.write(
+ "\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, float p_a, float p_b) { _FU glUniform2f(get_uniform(p_uniform),p_a,p_b); }\n\n"
+ )
+ fd.write(
+ "\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, float p_a, float p_b, float p_c) { _FU glUniform3f(get_uniform(p_uniform),p_a,p_b,p_c); }\n\n"
+ )
+ fd.write(
+ "\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, float p_a, float p_b, float p_c, float p_d) { _FU glUniform4f(get_uniform(p_uniform),p_a,p_b,p_c,p_d); }\n\n"
+ )
- fd.write("""\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Transform& p_transform) { _FU
+ fd.write(
+ """\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Transform& p_transform) { _FU
const Transform &tr = p_transform;
@@ -276,9 +312,11 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2
glUniformMatrix4fv(get_uniform(p_uniform),1,false,matrix);
}
- """)
+ """
+ )
- fd.write("""_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Transform2D& p_transform) { _FU
+ fd.write(
+ """_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Transform2D& p_transform) { _FU
const Transform2D &tr = p_transform;
@@ -304,9 +342,11 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2
glUniformMatrix4fv(get_uniform(p_uniform),1,false,matrix);
}
- """)
+ """
+ )
- fd.write("""_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const CameraMatrix& p_matrix) { _FU
+ fd.write(
+ """_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const CameraMatrix& p_matrix) { _FU
GLfloat matrix[16];
@@ -320,7 +360,8 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2
glUniformMatrix4fv(get_uniform(p_uniform),1,false,matrix);
}
- """)
+ """
+ )
fd.write("\n\n#undef _FU\n\n\n")
@@ -340,21 +381,25 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2
x = header_data.enums[xv]
bits = 1
amt = len(x)
- while (2 ** bits < amt):
+ while 2 ** bits < amt:
bits += 1
strs = "{"
for i in range(amt):
- strs += "\"#define " + x[i] + "\\n\","
+ strs += '"#define ' + x[i] + '\\n",'
c = {}
c["set_mask"] = "uint64_t(" + str(i) + ")<<" + str(bitofs)
- c["clear_mask"] = "((uint64_t(1)<<40)-1) ^ (((uint64_t(1)<<" + str(bits) + ") - 1)<<" + str(bitofs) + ")"
+ c["clear_mask"] = (
+ "((uint64_t(1)<<40)-1) ^ (((uint64_t(1)<<" + str(bits) + ") - 1)<<" + str(bitofs) + ")"
+ )
enum_vals.append(c)
enum_constants.append(x[i])
strs += "NULL}"
- fd.write("\t\t\t{(uint64_t(1<<" + str(bits) + ")-1)<<" + str(bitofs) + "," + str(bitofs) + "," + strs + "},\n")
+ fd.write(
+ "\t\t\t{(uint64_t(1<<" + str(bits) + ")-1)<<" + str(bitofs) + "," + str(bitofs) + "," + strs + "},\n"
+ )
bitofs += bits
fd.write("\t\t};\n\n")
@@ -373,7 +418,7 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2
fd.write("\t\tstatic const char* _conditional_strings[]={\n")
if header_data.conditionals:
for x in header_data.conditionals:
- fd.write("\t\t\t\"#define " + x + "\\n\",\n")
+ fd.write('\t\t\t"#define ' + x + '\\n",\n')
conditionals_found.append(x)
fd.write("\t\t};\n\n")
else:
@@ -384,7 +429,7 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2
fd.write("\t\tstatic const char* _uniform_strings[]={\n")
if header_data.uniforms:
for x in header_data.uniforms:
- fd.write("\t\t\t\"" + x + "\",\n")
+ fd.write('\t\t\t"' + x + '",\n')
fd.write("\t\t};\n\n")
else:
fd.write("\t\tstatic const char **_uniform_strings=NULL;\n")
@@ -394,7 +439,7 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2
fd.write("\t\tstatic AttributePair _attribute_pairs[]={\n")
for x in header_data.attributes:
- fd.write("\t\t\t{\"" + x[0] + "\"," + x[1] + "},\n")
+ fd.write('\t\t\t{"' + x[0] + '",' + x[1] + "},\n")
fd.write("\t\t};\n\n")
else:
fd.write("\t\tstatic AttributePair *_attribute_pairs=NULL;\n")
@@ -408,9 +453,9 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2
name = x[0]
cond = x[1]
if cond in conditionals_found:
- fd.write("\t\t\t{\"" + name + "\"," + str(conditionals_found.index(cond)) + "},\n")
+ fd.write('\t\t\t{"' + name + '",' + str(conditionals_found.index(cond)) + "},\n")
else:
- fd.write("\t\t\t{\"" + name + "\",-1},\n")
+ fd.write('\t\t\t{"' + name + '",-1},\n')
feedback_count += 1
@@ -424,7 +469,7 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2
if header_data.texunits:
fd.write("\t\tstatic TexUnitPair _texunit_pairs[]={\n")
for x in header_data.texunits:
- fd.write("\t\t\t{\"" + x[0] + "\"," + x[1] + "},\n")
+ fd.write('\t\t\t{"' + x[0] + '",' + x[1] + "},\n")
fd.write("\t\t};\n\n")
else:
fd.write("\t\tstatic TexUnitPair *_texunit_pairs=NULL;\n")
@@ -432,7 +477,7 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2
if not gles2 and header_data.ubos:
fd.write("\t\tstatic UBOPair _ubo_pairs[]={\n")
for x in header_data.ubos:
- fd.write("\t\t\t{\"" + x[0] + "\"," + x[1] + "},\n")
+ fd.write('\t\t\t{"' + x[0] + '",' + x[1] + "},\n")
fd.write("\t\t};\n\n")
else:
if gles2:
@@ -445,7 +490,7 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2
for c in x:
fd.write(str(ord(c)) + ",")
- fd.write(str(ord('\n')) + ",")
+ fd.write(str(ord("\n")) + ",")
fd.write("\t\t0};\n\n")
fd.write("\t\tstatic const int _vertex_code_start=" + str(header_data.vertex_offset) + ";\n")
@@ -455,28 +500,73 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2
for c in x:
fd.write(str(ord(c)) + ",")
- fd.write(str(ord('\n')) + ",")
+ fd.write(str(ord("\n")) + ",")
fd.write("\t\t0};\n\n")
fd.write("\t\tstatic const int _fragment_code_start=" + str(header_data.fragment_offset) + ";\n")
if output_attribs:
if gles2:
- fd.write("\t\tsetup(_conditional_strings," + str(len(header_data.conditionals)) + ",_uniform_strings," + str(len(header_data.uniforms)) + ",_attribute_pairs," + str(
- len(header_data.attributes)) + ", _texunit_pairs," + str(len(header_data.texunits)) + ",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n")
+ fd.write(
+ "\t\tsetup(_conditional_strings,"
+ + str(len(header_data.conditionals))
+ + ",_uniform_strings,"
+ + str(len(header_data.uniforms))
+ + ",_attribute_pairs,"
+ + str(len(header_data.attributes))
+ + ", _texunit_pairs,"
+ + str(len(header_data.texunits))
+ + ",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n"
+ )
else:
- fd.write("\t\tsetup(_conditional_strings," + str(len(header_data.conditionals)) + ",_uniform_strings," + str(len(header_data.uniforms)) + ",_attribute_pairs," + str(
- len(header_data.attributes)) + ", _texunit_pairs," + str(len(header_data.texunits)) + ",_ubo_pairs," + str(len(header_data.ubos)) + ",_feedbacks," + str(
- feedback_count) + ",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n")
+ fd.write(
+ "\t\tsetup(_conditional_strings,"
+ + str(len(header_data.conditionals))
+ + ",_uniform_strings,"
+ + str(len(header_data.uniforms))
+ + ",_attribute_pairs,"
+ + str(len(header_data.attributes))
+ + ", _texunit_pairs,"
+ + str(len(header_data.texunits))
+ + ",_ubo_pairs,"
+ + str(len(header_data.ubos))
+ + ",_feedbacks,"
+ + str(feedback_count)
+ + ",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n"
+ )
else:
if gles2:
- fd.write("\t\tsetup(_conditional_strings," + str(len(header_data.conditionals)) + ",_uniform_strings," + str(len(header_data.uniforms)) + ",_texunit_pairs," + str(
- len(header_data.texunits)) + ",_enums," + str(len(header_data.enums)) + ",_enum_values," + str(
- enum_value_count) + ",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n")
+ fd.write(
+ "\t\tsetup(_conditional_strings,"
+ + str(len(header_data.conditionals))
+ + ",_uniform_strings,"
+ + str(len(header_data.uniforms))
+ + ",_texunit_pairs,"
+ + str(len(header_data.texunits))
+ + ",_enums,"
+ + str(len(header_data.enums))
+ + ",_enum_values,"
+ + str(enum_value_count)
+ + ",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n"
+ )
else:
- fd.write("\t\tsetup(_conditional_strings," + str(len(header_data.conditionals)) + ",_uniform_strings," + str(len(header_data.uniforms)) + ",_texunit_pairs," + str(
- len(header_data.texunits)) + ",_enums," + str(len(header_data.enums)) + ",_enum_values," + str(enum_value_count) + ",_ubo_pairs," + str(len(header_data.ubos)) + ",_feedbacks," + str(
- feedback_count) + ",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n")
+ fd.write(
+ "\t\tsetup(_conditional_strings,"
+ + str(len(header_data.conditionals))
+ + ",_uniform_strings,"
+ + str(len(header_data.uniforms))
+ + ",_texunit_pairs,"
+ + str(len(header_data.texunits))
+ + ",_enums,"
+ + str(len(header_data.enums))
+ + ",_enum_values,"
+ + str(enum_value_count)
+ + ",_ubo_pairs,"
+ + str(len(header_data.ubos))
+ + ",_feedbacks,"
+ + str(feedback_count)
+ + ",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n"
+ )
fd.write("\t}\n\n")
@@ -495,12 +585,12 @@ def build_legacygl_header(filename, include, class_suffix, output_attribs, gles2
def build_gles2_headers(target, source, env):
for x in source:
- build_legacygl_header(str(x), include="drivers/gles2/shader_gles2.h", class_suffix="GLES2", output_attribs=True, gles2=True)
-
+ build_legacygl_header(
+ str(x), include="drivers/gles2/shader_gles2.h", class_suffix="GLES2", output_attribs=True, gles2=True
+ )
class RDHeaderStruct:
-
def __init__(self):
self.vertex_lines = []
self.fragment_lines = []
@@ -594,30 +684,30 @@ def build_rd_header(filename):
fd.write("/* WARNING, THIS FILE WAS GENERATED, DO NOT EDIT */\n")
out_file_base = out_file
- out_file_base = out_file_base[out_file_base.rfind("/") + 1:]
- out_file_base = out_file_base[out_file_base.rfind("\\") + 1:]
+ out_file_base = out_file_base[out_file_base.rfind("/") + 1 :]
+ out_file_base = out_file_base[out_file_base.rfind("\\") + 1 :]
out_file_ifdef = out_file_base.replace(".", "_").upper()
fd.write("#ifndef " + out_file_ifdef + "_RD\n")
fd.write("#define " + out_file_ifdef + "_RD\n")
out_file_class = out_file_base.replace(".glsl.gen.h", "").title().replace("_", "").replace(".", "") + "ShaderRD"
fd.write("\n")
- fd.write("#include \"servers/rendering/rasterizer_rd/shader_rd.h\"\n\n")
+ fd.write('#include "servers/rendering/rasterizer_rd/shader_rd.h"\n\n')
fd.write("class " + out_file_class + " : public ShaderRD {\n\n")
fd.write("public:\n\n")
fd.write("\t" + out_file_class + "() {\n\n")
- if (len(header_data.compute_lines)):
+ if len(header_data.compute_lines):
fd.write("\t\tstatic const char _compute_code[] = {\n")
for x in header_data.compute_lines:
for c in x:
fd.write(str(ord(c)) + ",")
- fd.write(str(ord('\n')) + ",")
+ fd.write(str(ord("\n")) + ",")
fd.write("\t\t0};\n\n")
- fd.write("\t\tsetup(nullptr, nullptr, _compute_code, \"" + out_file_class + "\");\n")
+ fd.write('\t\tsetup(nullptr, nullptr, _compute_code, "' + out_file_class + '");\n')
fd.write("\t}\n")
else:
@@ -626,17 +716,17 @@ def build_rd_header(filename):
for x in header_data.vertex_lines:
for c in x:
fd.write(str(ord(c)) + ",")
- fd.write(str(ord('\n')) + ",")
+ fd.write(str(ord("\n")) + ",")
fd.write("\t\t0};\n\n")
-
+
fd.write("\t\tstatic const char _fragment_code[]={\n")
for x in header_data.fragment_lines:
for c in x:
fd.write(str(ord(c)) + ",")
- fd.write(str(ord('\n')) + ",")
+ fd.write(str(ord("\n")) + ",")
fd.write("\t\t0};\n\n")
- fd.write("\t\tsetup(_vertex_code, _fragment_code, nullptr, \"" + out_file_class + "\");\n")
+ fd.write('\t\tsetup(_vertex_code, _fragment_code, nullptr, "' + out_file_class + '");\n')
fd.write("\t}\n")
fd.write("};\n\n")
@@ -650,5 +740,5 @@ def build_rd_headers(target, source, env):
build_rd_header(str(x))
-if __name__ == '__main__':
+if __name__ == "__main__":
subprocess_main(globals())
diff --git a/main/SCsub b/main/SCsub
index 0262701d790..7a301b82bc5 100644
--- a/main/SCsub
+++ b/main/SCsub
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
from platform_methods import run_in_subprocess
import main_builders
@@ -14,13 +14,15 @@ env.Depends("#main/splash.gen.h", "#main/splash.png")
env.CommandNoCache("#main/splash.gen.h", "#main/splash.png", run_in_subprocess(main_builders.make_splash))
env.Depends("#main/splash_editor.gen.h", "#main/splash_editor.png")
-env.CommandNoCache("#main/splash_editor.gen.h", "#main/splash_editor.png", run_in_subprocess(main_builders.make_splash_editor))
+env.CommandNoCache(
+ "#main/splash_editor.gen.h", "#main/splash_editor.png", run_in_subprocess(main_builders.make_splash_editor)
+)
env.Depends("#main/app_icon.gen.h", "#main/app_icon.png")
env.CommandNoCache("#main/app_icon.gen.h", "#main/app_icon.png", run_in_subprocess(main_builders.make_app_icon))
if env["tools"]:
- SConscript('tests/SCsub')
+ SConscript("tests/SCsub")
lib = env.add_library("main", env.main_sources)
env.Prepend(LIBS=[lib])
diff --git a/main/main_builders.py b/main/main_builders.py
index aebac2b0228..2ea774e3b4a 100644
--- a/main/main_builders.py
+++ b/main/main_builders.py
@@ -18,7 +18,7 @@ def make_splash(target, source, env):
g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
g.write("#ifndef BOOT_SPLASH_H\n")
g.write("#define BOOT_SPLASH_H\n")
- g.write('static const Color boot_splash_bg_color = Color(0.14, 0.14, 0.14);\n')
+ g.write("static const Color boot_splash_bg_color = Color(0.14, 0.14, 0.14);\n")
g.write("static const unsigned char boot_splash_png[] = {\n")
for i in range(len(buf)):
g.write(str(buf[i]) + ",\n")
@@ -37,7 +37,7 @@ def make_splash_editor(target, source, env):
g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
g.write("#ifndef BOOT_SPLASH_EDITOR_H\n")
g.write("#define BOOT_SPLASH_EDITOR_H\n")
- g.write('static const Color boot_splash_editor_bg_color = Color(0.14, 0.14, 0.14);\n')
+ g.write("static const Color boot_splash_editor_bg_color = Color(0.14, 0.14, 0.14);\n")
g.write("static const unsigned char boot_splash_editor_png[] = {\n")
for i in range(len(buf)):
g.write(str(buf[i]) + ",\n")
@@ -63,5 +63,5 @@ def make_app_icon(target, source, env):
g.write("#endif")
-if __name__ == '__main__':
+if __name__ == "__main__":
subprocess_main(globals())
diff --git a/main/tests/SCsub b/main/tests/SCsub
index 437d9ed7779..cb1d35b12f0 100644
--- a/main/tests/SCsub
+++ b/main/tests/SCsub
@@ -1,6 +1,6 @@
#!/usr/bin/python
-Import('env')
+Import("env")
env.tests_sources = []
env.add_source_files(env.tests_sources, "*.cpp")
diff --git a/methods.py b/methods.py
index dc82c973618..89a229e4abb 100644
--- a/methods.py
+++ b/methods.py
@@ -8,13 +8,13 @@ def add_source_files(self, sources, files, warn_duplicates=True):
# Convert string to list of absolute paths (including expanding wildcard)
if isinstance(files, (str, bytes)):
# Keep SCons project-absolute path as they are (no wildcard support)
- if files.startswith('#'):
- if '*' in files:
+ if files.startswith("#"):
+ if "*" in files:
print("ERROR: Wildcards can't be expanded in SCons project-absolute path: '{}'".format(files))
return
files = [files]
else:
- dir_path = self.Dir('.').abspath
+ dir_path = self.Dir(".").abspath
files = sorted(glob.glob(dir_path + "/" + files))
# Add each path as compiled Object following environment (self) configuration
@@ -22,7 +22,7 @@ def add_source_files(self, sources, files, warn_duplicates=True):
obj = self.Object(path)
if obj in sources:
if warn_duplicates:
- print("WARNING: Object \"{}\" already included in environment sources.".format(obj))
+ print('WARNING: Object "{}" already included in environment sources.'.format(obj))
else:
continue
sources.append(obj)
@@ -33,20 +33,20 @@ def disable_warnings(self):
if self.msvc:
# We have to remove existing warning level defines before appending /w,
# otherwise we get: "warning D9025 : overriding '/W3' with '/w'"
- warn_flags = ['/Wall', '/W4', '/W3', '/W2', '/W1', '/WX']
- self.Append(CCFLAGS=['/w'])
- self.Append(CFLAGS=['/w'])
- self.Append(CXXFLAGS=['/w'])
- self['CCFLAGS'] = [x for x in self['CCFLAGS'] if not x in warn_flags]
- self['CFLAGS'] = [x for x in self['CFLAGS'] if not x in warn_flags]
- self['CXXFLAGS'] = [x for x in self['CXXFLAGS'] if not x in warn_flags]
+ warn_flags = ["/Wall", "/W4", "/W3", "/W2", "/W1", "/WX"]
+ self.Append(CCFLAGS=["/w"])
+ self.Append(CFLAGS=["/w"])
+ self.Append(CXXFLAGS=["/w"])
+ self["CCFLAGS"] = [x for x in self["CCFLAGS"] if not x in warn_flags]
+ self["CFLAGS"] = [x for x in self["CFLAGS"] if not x in warn_flags]
+ self["CXXFLAGS"] = [x for x in self["CXXFLAGS"] if not x in warn_flags]
else:
- self.Append(CCFLAGS=['-w'])
- self.Append(CFLAGS=['-w'])
- self.Append(CXXFLAGS=['-w'])
+ self.Append(CCFLAGS=["-w"])
+ self.Append(CFLAGS=["-w"])
+ self.Append(CXXFLAGS=["-w"])
-def add_module_version_string(self,s):
+def add_module_version_string(self, s):
self.module_version_string += "." + s
@@ -64,16 +64,16 @@ def update_version(module_version_string=""):
f.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
f.write("#ifndef VERSION_GENERATED_GEN_H\n")
f.write("#define VERSION_GENERATED_GEN_H\n")
- f.write("#define VERSION_SHORT_NAME \"" + str(version.short_name) + "\"\n")
- f.write("#define VERSION_NAME \"" + str(version.name) + "\"\n")
+ f.write('#define VERSION_SHORT_NAME "' + str(version.short_name) + '"\n')
+ f.write('#define VERSION_NAME "' + str(version.name) + '"\n')
f.write("#define VERSION_MAJOR " + str(version.major) + "\n")
f.write("#define VERSION_MINOR " + str(version.minor) + "\n")
f.write("#define VERSION_PATCH " + str(version.patch) + "\n")
- f.write("#define VERSION_STATUS \"" + str(version.status) + "\"\n")
- f.write("#define VERSION_BUILD \"" + str(build_name) + "\"\n")
- f.write("#define VERSION_MODULE_CONFIG \"" + str(version.module_config) + module_version_string + "\"\n")
+ f.write('#define VERSION_STATUS "' + str(version.status) + '"\n')
+ f.write('#define VERSION_BUILD "' + str(build_name) + '"\n')
+ f.write('#define VERSION_MODULE_CONFIG "' + str(version.module_config) + module_version_string + '"\n')
f.write("#define VERSION_YEAR " + str(version.year) + "\n")
- f.write("#define VERSION_WEBSITE \"" + str(version.website) + "\"\n")
+ f.write('#define VERSION_WEBSITE "' + str(version.website) + '"\n')
f.write("#endif // VERSION_GENERATED_GEN_H\n")
f.close()
@@ -99,7 +99,7 @@ def update_version(module_version_string=""):
else:
githash = head
- fhash.write("#define VERSION_HASH \"" + githash + "\"\n")
+ fhash.write('#define VERSION_HASH "' + githash + '"\n')
fhash.write("#endif // VERSION_HASH_GEN_H\n")
fhash.close()
@@ -158,17 +158,17 @@ def detect_modules():
try:
with open("modules/" + x + "/register_types.h"):
includes_cpp += '#include "modules/' + x + '/register_types.h"\n'
- register_cpp += '#ifdef MODULE_' + x.upper() + '_ENABLED\n'
- register_cpp += '\tregister_' + x + '_types();\n'
- register_cpp += '#endif\n'
- preregister_cpp += '#ifdef MODULE_' + x.upper() + '_ENABLED\n'
- preregister_cpp += '#ifdef MODULE_' + x.upper() + '_HAS_PREREGISTER\n'
- preregister_cpp += '\tpreregister_' + x + '_types();\n'
- preregister_cpp += '#endif\n'
- preregister_cpp += '#endif\n'
- unregister_cpp += '#ifdef MODULE_' + x.upper() + '_ENABLED\n'
- unregister_cpp += '\tunregister_' + x + '_types();\n'
- unregister_cpp += '#endif\n'
+ register_cpp += "#ifdef MODULE_" + x.upper() + "_ENABLED\n"
+ register_cpp += "\tregister_" + x + "_types();\n"
+ register_cpp += "#endif\n"
+ preregister_cpp += "#ifdef MODULE_" + x.upper() + "_ENABLED\n"
+ preregister_cpp += "#ifdef MODULE_" + x.upper() + "_HAS_PREREGISTER\n"
+ preregister_cpp += "\tpreregister_" + x + "_types();\n"
+ preregister_cpp += "#endif\n"
+ preregister_cpp += "#endif\n"
+ unregister_cpp += "#ifdef MODULE_" + x.upper() + "_ENABLED\n"
+ unregister_cpp += "\tunregister_" + x + "_types();\n"
+ unregister_cpp += "#endif\n"
except IOError:
pass
@@ -191,7 +191,12 @@ void register_module_types() {
void unregister_module_types() {
%s
}
-""" % (includes_cpp, preregister_cpp, register_cpp, unregister_cpp)
+""" % (
+ includes_cpp,
+ preregister_cpp,
+ register_cpp,
+ unregister_cpp,
+ )
# NOTE: It is safe to generate this file here, since this is still executed serially
with open("modules/register_module_types.gen.cpp", "w") as f:
@@ -206,7 +211,7 @@ def disable_module(self):
def use_windows_spawn_fix(self, platform=None):
- if (os.name != "nt"):
+ if os.name != "nt":
return # not needed, only for windows
# On Windows, due to the limited command line length, when creating a static library
@@ -217,14 +222,21 @@ def use_windows_spawn_fix(self, platform=None):
# got built correctly regardless the invocation strategy.
# Furthermore, since SCons will rebuild the library from scratch when an object file
# changes, no multiple versions of the same object file will be present.
- self.Replace(ARFLAGS='q')
+ self.Replace(ARFLAGS="q")
def mySubProcess(cmdline, env):
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
- proc = subprocess.Popen(cmdline, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
- stderr=subprocess.PIPE, startupinfo=startupinfo, shell=False, env=env)
+ proc = subprocess.Popen(
+ cmdline,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ startupinfo=startupinfo,
+ shell=False,
+ env=env,
+ )
_, err = proc.communicate()
rv = proc.wait()
if rv:
@@ -235,7 +247,7 @@ def use_windows_spawn_fix(self, platform=None):
def mySpawn(sh, escape, cmd, args, env):
- newargs = ' '.join(args[1:])
+ newargs = " ".join(args[1:])
cmdline = cmd + " " + newargs
rv = 0
@@ -251,15 +263,15 @@ def use_windows_spawn_fix(self, platform=None):
return rv
- self['SPAWN'] = mySpawn
+ self["SPAWN"] = mySpawn
def save_active_platforms(apnames, ap):
for x in ap:
- names = ['logo']
+ names = ["logo"]
if os.path.isfile(x + "/run_icon.png"):
- names.append('run_icon')
+ names.append("run_icon")
for name in names:
pngf = open(x + "/" + name + ".png", "rb")
@@ -269,7 +281,7 @@ def save_active_platforms(apnames, ap):
while len(b) == 1:
str += hex(ord(b))
b = pngf.read(1)
- if (len(b) == 1):
+ if len(b) == 1:
str += ","
str += "};\n"
@@ -289,30 +301,46 @@ def no_verbose(sys, env):
# Colors are disabled in non-TTY environments such as pipes. This means
# that if output is redirected to a file, it will not contain color codes
if sys.stdout.isatty():
- colors['cyan'] = '\033[96m'
- colors['purple'] = '\033[95m'
- colors['blue'] = '\033[94m'
- colors['green'] = '\033[92m'
- colors['yellow'] = '\033[93m'
- colors['red'] = '\033[91m'
- colors['end'] = '\033[0m'
+ colors["cyan"] = "\033[96m"
+ colors["purple"] = "\033[95m"
+ colors["blue"] = "\033[94m"
+ colors["green"] = "\033[92m"
+ colors["yellow"] = "\033[93m"
+ colors["red"] = "\033[91m"
+ colors["end"] = "\033[0m"
else:
- colors['cyan'] = ''
- colors['purple'] = ''
- colors['blue'] = ''
- colors['green'] = ''
- colors['yellow'] = ''
- colors['red'] = ''
- colors['end'] = ''
+ colors["cyan"] = ""
+ colors["purple"] = ""
+ colors["blue"] = ""
+ colors["green"] = ""
+ colors["yellow"] = ""
+ colors["red"] = ""
+ colors["end"] = ""
- compile_source_message = '%sCompiling %s==> %s$SOURCE%s' % (colors['blue'], colors['purple'], colors['yellow'], colors['end'])
- java_compile_source_message = '%sCompiling %s==> %s$SOURCE%s' % (colors['blue'], colors['purple'], colors['yellow'], colors['end'])
- compile_shared_source_message = '%sCompiling shared %s==> %s$SOURCE%s' % (colors['blue'], colors['purple'], colors['yellow'], colors['end'])
- link_program_message = '%sLinking Program %s==> %s$TARGET%s' % (colors['red'], colors['purple'], colors['yellow'], colors['end'])
- link_library_message = '%sLinking Static Library %s==> %s$TARGET%s' % (colors['red'], colors['purple'], colors['yellow'], colors['end'])
- ranlib_library_message = '%sRanlib Library %s==> %s$TARGET%s' % (colors['red'], colors['purple'], colors['yellow'], colors['end'])
- link_shared_library_message = '%sLinking Shared Library %s==> %s$TARGET%s' % (colors['red'], colors['purple'], colors['yellow'], colors['end'])
- java_library_message = '%sCreating Java Archive %s==> %s$TARGET%s' % (colors['red'], colors['purple'], colors['yellow'], colors['end'])
+ compile_source_message = "{}Compiling {}==> {}$SOURCE{}".format(
+ colors["blue"], colors["purple"], colors["yellow"], colors["end"]
+ )
+ java_compile_source_message = "{}Compiling {}==> {}$SOURCE{}".format(
+ colors["blue"], colors["purple"], colors["yellow"], colors["end"]
+ )
+ compile_shared_source_message = "{}Compiling shared {}==> {}$SOURCE{}".format(
+ colors["blue"], colors["purple"], colors["yellow"], colors["end"]
+ )
+ link_program_message = "{}Linking Program {}==> {}$TARGET{}".format(
+ colors["red"], colors["purple"], colors["yellow"], colors["end"]
+ )
+ link_library_message = "{}Linking Static Library {}==> {}$TARGET{}".format(
+ colors["red"], colors["purple"], colors["yellow"], colors["end"]
+ )
+ ranlib_library_message = "{}Ranlib Library {}==> {}$TARGET{}".format(
+ colors["red"], colors["purple"], colors["yellow"], colors["end"]
+ )
+ link_shared_library_message = "{}Linking Shared Library {}==> {}$TARGET{}".format(
+ colors["red"], colors["purple"], colors["yellow"], colors["end"]
+ )
+ java_library_message = "{}Creating Java Archive {}==> {}$TARGET{}".format(
+ colors["red"], colors["purple"], colors["yellow"], colors["end"]
+ )
env.Append(CXXCOMSTR=[compile_source_message])
env.Append(CCCOMSTR=[compile_source_message])
@@ -353,70 +381,79 @@ def detect_visual_c_compiler_version(tools_env):
vc_chosen_compiler_str = ""
# Start with Pre VS 2017 checks which uses VCINSTALLDIR:
- if 'VCINSTALLDIR' in tools_env:
+ if "VCINSTALLDIR" in tools_env:
# print("Checking VCINSTALLDIR")
# find() works with -1 so big ifs below are needed... the simplest solution, in fact
# First test if amd64 and amd64_x86 compilers are present in the path
vc_amd64_compiler_detection_index = tools_env["PATH"].find(tools_env["VCINSTALLDIR"] + "BIN\\amd64;")
- if(vc_amd64_compiler_detection_index > -1):
+ if vc_amd64_compiler_detection_index > -1:
vc_chosen_compiler_index = vc_amd64_compiler_detection_index
vc_chosen_compiler_str = "amd64"
vc_amd64_x86_compiler_detection_index = tools_env["PATH"].find(tools_env["VCINSTALLDIR"] + "BIN\\amd64_x86;")
- if(vc_amd64_x86_compiler_detection_index > -1
- and (vc_chosen_compiler_index == -1
- or vc_chosen_compiler_index > vc_amd64_x86_compiler_detection_index)):
+ if vc_amd64_x86_compiler_detection_index > -1 and (
+ vc_chosen_compiler_index == -1 or vc_chosen_compiler_index > vc_amd64_x86_compiler_detection_index
+ ):
vc_chosen_compiler_index = vc_amd64_x86_compiler_detection_index
vc_chosen_compiler_str = "amd64_x86"
# Now check the 32 bit compilers
vc_x86_compiler_detection_index = tools_env["PATH"].find(tools_env["VCINSTALLDIR"] + "BIN;")
- if(vc_x86_compiler_detection_index > -1
- and (vc_chosen_compiler_index == -1
- or vc_chosen_compiler_index > vc_x86_compiler_detection_index)):
+ if vc_x86_compiler_detection_index > -1 and (
+ vc_chosen_compiler_index == -1 or vc_chosen_compiler_index > vc_x86_compiler_detection_index
+ ):
vc_chosen_compiler_index = vc_x86_compiler_detection_index
vc_chosen_compiler_str = "x86"
- vc_x86_amd64_compiler_detection_index = tools_env["PATH"].find(tools_env['VCINSTALLDIR'] + "BIN\\x86_amd64;")
- if(vc_x86_amd64_compiler_detection_index > -1
- and (vc_chosen_compiler_index == -1
- or vc_chosen_compiler_index > vc_x86_amd64_compiler_detection_index)):
+ vc_x86_amd64_compiler_detection_index = tools_env["PATH"].find(tools_env["VCINSTALLDIR"] + "BIN\\x86_amd64;")
+ if vc_x86_amd64_compiler_detection_index > -1 and (
+ vc_chosen_compiler_index == -1 or vc_chosen_compiler_index > vc_x86_amd64_compiler_detection_index
+ ):
vc_chosen_compiler_index = vc_x86_amd64_compiler_detection_index
vc_chosen_compiler_str = "x86_amd64"
# and for VS 2017 and newer we check VCTOOLSINSTALLDIR:
- if 'VCTOOLSINSTALLDIR' in tools_env:
+ if "VCTOOLSINSTALLDIR" in tools_env:
# Newer versions have a different path available
- vc_amd64_compiler_detection_index = tools_env["PATH"].upper().find(tools_env['VCTOOLSINSTALLDIR'].upper() + "BIN\\HOSTX64\\X64;")
- if(vc_amd64_compiler_detection_index > -1):
+ vc_amd64_compiler_detection_index = (
+ tools_env["PATH"].upper().find(tools_env["VCTOOLSINSTALLDIR"].upper() + "BIN\\HOSTX64\\X64;")
+ )
+ if vc_amd64_compiler_detection_index > -1:
vc_chosen_compiler_index = vc_amd64_compiler_detection_index
vc_chosen_compiler_str = "amd64"
- vc_amd64_x86_compiler_detection_index = tools_env["PATH"].upper().find(tools_env['VCTOOLSINSTALLDIR'].upper() + "BIN\\HOSTX64\\X86;")
- if(vc_amd64_x86_compiler_detection_index > -1
- and (vc_chosen_compiler_index == -1
- or vc_chosen_compiler_index > vc_amd64_x86_compiler_detection_index)):
+ vc_amd64_x86_compiler_detection_index = (
+ tools_env["PATH"].upper().find(tools_env["VCTOOLSINSTALLDIR"].upper() + "BIN\\HOSTX64\\X86;")
+ )
+ if vc_amd64_x86_compiler_detection_index > -1 and (
+ vc_chosen_compiler_index == -1 or vc_chosen_compiler_index > vc_amd64_x86_compiler_detection_index
+ ):
vc_chosen_compiler_index = vc_amd64_x86_compiler_detection_index
vc_chosen_compiler_str = "amd64_x86"
- vc_x86_compiler_detection_index = tools_env["PATH"].upper().find(tools_env['VCTOOLSINSTALLDIR'].upper() + "BIN\\HOSTX86\\X86;")
- if(vc_x86_compiler_detection_index > -1
- and (vc_chosen_compiler_index == -1
- or vc_chosen_compiler_index > vc_x86_compiler_detection_index)):
+ vc_x86_compiler_detection_index = (
+ tools_env["PATH"].upper().find(tools_env["VCTOOLSINSTALLDIR"].upper() + "BIN\\HOSTX86\\X86;")
+ )
+ if vc_x86_compiler_detection_index > -1 and (
+ vc_chosen_compiler_index == -1 or vc_chosen_compiler_index > vc_x86_compiler_detection_index
+ ):
vc_chosen_compiler_index = vc_x86_compiler_detection_index
vc_chosen_compiler_str = "x86"
- vc_x86_amd64_compiler_detection_index = tools_env["PATH"].upper().find(tools_env['VCTOOLSINSTALLDIR'].upper() + "BIN\\HOSTX86\\X64;")
- if(vc_x86_amd64_compiler_detection_index > -1
- and (vc_chosen_compiler_index == -1
- or vc_chosen_compiler_index > vc_x86_amd64_compiler_detection_index)):
+ vc_x86_amd64_compiler_detection_index = (
+ tools_env["PATH"].upper().find(tools_env["VCTOOLSINSTALLDIR"].upper() + "BIN\\HOSTX86\\X64;")
+ )
+ if vc_x86_amd64_compiler_detection_index > -1 and (
+ vc_chosen_compiler_index == -1 or vc_chosen_compiler_index > vc_x86_amd64_compiler_detection_index
+ ):
vc_chosen_compiler_index = vc_x86_amd64_compiler_detection_index
vc_chosen_compiler_str = "x86_amd64"
return vc_chosen_compiler_str
+
def find_visual_c_batch_file(env):
from SCons.Tool.MSCommon.vc import get_default_version, get_host_target, find_batch_file
@@ -424,6 +461,7 @@ def find_visual_c_batch_file(env):
(host_platform, target_platform, _) = get_host_target(env)
return find_batch_file(env, version, host_platform, target_platform)[0]
+
def generate_cpp_hint_file(filename):
if os.path.isfile(filename):
# Don't overwrite an existing hint file since the user may have customized it.
@@ -435,15 +473,19 @@ def generate_cpp_hint_file(filename):
except IOError:
print("Could not write cpp.hint file.")
+
def generate_vs_project(env, num_jobs):
batch_file = find_visual_c_batch_file(env)
if batch_file:
+
def build_commandline(commands):
- common_build_prefix = ['cmd /V /C set "plat=$(PlatformTarget)"',
- '(if "$(PlatformTarget)"=="x64" (set "plat=x86_amd64"))',
- 'set "tools=yes"',
- '(if "$(Configuration)"=="release" (set "tools=no"))',
- 'call "' + batch_file + '" !plat!']
+ common_build_prefix = [
+ 'cmd /V /C set "plat=$(PlatformTarget)"',
+ '(if "$(PlatformTarget)"=="x64" (set "plat=x86_amd64"))',
+ 'set "tools=yes"',
+ '(if "$(Configuration)"=="release" (set "tools=no"))',
+ 'call "' + batch_file + '" !plat!',
+ ]
result = " ^& ".join(common_build_prefix + [commands])
return result
@@ -459,87 +501,104 @@ def generate_vs_project(env, num_jobs):
# to double quote off the directory. However, the path ends
# in a backslash, so we need to remove this, lest it escape the
# last double quote off, confusing MSBuild
- env['MSVSBUILDCOM'] = build_commandline('scons --directory="$(ProjectDir.TrimEnd(\'\\\'))" platform=windows progress=no target=$(Configuration) tools=!tools! -j' + str(num_jobs))
- env['MSVSREBUILDCOM'] = build_commandline('scons --directory="$(ProjectDir.TrimEnd(\'\\\'))" platform=windows progress=no target=$(Configuration) tools=!tools! vsproj=yes -j' + str(num_jobs))
- env['MSVSCLEANCOM'] = build_commandline('scons --directory="$(ProjectDir.TrimEnd(\'\\\'))" --clean platform=windows progress=no target=$(Configuration) tools=!tools! -j' + str(num_jobs))
+ env["MSVSBUILDCOM"] = build_commandline(
+ "scons --directory=\"$(ProjectDir.TrimEnd('\\'))\" platform=windows progress=no target=$(Configuration) tools=!tools! -j"
+ + str(num_jobs)
+ )
+ env["MSVSREBUILDCOM"] = build_commandline(
+ "scons --directory=\"$(ProjectDir.TrimEnd('\\'))\" platform=windows progress=no target=$(Configuration) tools=!tools! vsproj=yes -j"
+ + str(num_jobs)
+ )
+ env["MSVSCLEANCOM"] = build_commandline(
+ "scons --directory=\"$(ProjectDir.TrimEnd('\\'))\" --clean platform=windows progress=no target=$(Configuration) tools=!tools! -j"
+ + str(num_jobs)
+ )
# This version information (Win32, x64, Debug, Release, Release_Debug seems to be
# required for Visual Studio to understand that it needs to generate an NMAKE
# project. Do not modify without knowing what you are doing.
- debug_variants = ['debug|Win32'] + ['debug|x64']
- release_variants = ['release|Win32'] + ['release|x64']
- release_debug_variants = ['release_debug|Win32'] + ['release_debug|x64']
+ debug_variants = ["debug|Win32"] + ["debug|x64"]
+ release_variants = ["release|Win32"] + ["release|x64"]
+ release_debug_variants = ["release_debug|Win32"] + ["release_debug|x64"]
variants = debug_variants + release_variants + release_debug_variants
- debug_targets = ['bin\\godot.windows.tools.32.exe'] + ['bin\\godot.windows.tools.64.exe']
- release_targets = ['bin\\godot.windows.opt.32.exe'] + ['bin\\godot.windows.opt.64.exe']
- release_debug_targets = ['bin\\godot.windows.opt.tools.32.exe'] + ['bin\\godot.windows.opt.tools.64.exe']
+ debug_targets = ["bin\\godot.windows.tools.32.exe"] + ["bin\\godot.windows.tools.64.exe"]
+ release_targets = ["bin\\godot.windows.opt.32.exe"] + ["bin\\godot.windows.opt.64.exe"]
+ release_debug_targets = ["bin\\godot.windows.opt.tools.32.exe"] + ["bin\\godot.windows.opt.tools.64.exe"]
targets = debug_targets + release_targets + release_debug_targets
- if not env.get('MSVS'):
- env['MSVS']['PROJECTSUFFIX'] = '.vcxproj'
- env['MSVS']['SOLUTIONSUFFIX'] = '.sln'
+ if not env.get("MSVS"):
+ env["MSVS"]["PROJECTSUFFIX"] = ".vcxproj"
+ env["MSVS"]["SOLUTIONSUFFIX"] = ".sln"
env.MSVSProject(
- target=['#godot' + env['MSVSPROJECTSUFFIX']],
+ target=["#godot" + env["MSVSPROJECTSUFFIX"]],
incs=env.vs_incs,
srcs=env.vs_srcs,
runfile=targets,
buildtarget=targets,
auto_build_solution=1,
- variant=variants)
+ variant=variants,
+ )
else:
- print("Could not locate Visual Studio batch file for setting up the build environment. Not generating VS project.")
+ print("Could not locate Visual Studio batch file to set up the build environment. Not generating VS project.")
+
def precious_program(env, program, sources, **args):
program = env.ProgramOriginal(program, sources, **args)
env.Precious(program)
return program
+
def add_shared_library(env, name, sources, **args):
library = env.SharedLibrary(name, sources, **args)
env.NoCache(library)
return library
+
def add_library(env, name, sources, **args):
library = env.Library(name, sources, **args)
env.NoCache(library)
return library
+
def add_program(env, name, sources, **args):
program = env.Program(name, sources, **args)
env.NoCache(program)
return program
+
def CommandNoCache(env, target, sources, command, **args):
result = env.Command(target, sources, command, **args)
env.NoCache(result)
return result
+
def detect_darwin_sdk_path(platform, env):
- sdk_name = ''
- if platform == 'osx':
- sdk_name = 'macosx'
- var_name = 'MACOS_SDK_PATH'
- elif platform == 'iphone':
- sdk_name = 'iphoneos'
- var_name = 'IPHONESDK'
- elif platform == 'iphonesimulator':
- sdk_name = 'iphonesimulator'
- var_name = 'IPHONESDK'
+ sdk_name = ""
+ if platform == "osx":
+ sdk_name = "macosx"
+ var_name = "MACOS_SDK_PATH"
+ elif platform == "iphone":
+ sdk_name = "iphoneos"
+ var_name = "IPHONESDK"
+ elif platform == "iphonesimulator":
+ sdk_name = "iphonesimulator"
+ var_name = "IPHONESDK"
else:
raise Exception("Invalid platform argument passed to detect_darwin_sdk_path")
if not env[var_name]:
try:
- sdk_path = subprocess.check_output(['xcrun', '--sdk', sdk_name, '--show-sdk-path']).strip().decode("utf-8")
+ sdk_path = subprocess.check_output(["xcrun", "--sdk", sdk_name, "--show-sdk-path"]).strip().decode("utf-8")
if sdk_path:
env[var_name] = sdk_path
except (subprocess.CalledProcessError, OSError):
print("Failed to find SDK path while running xcrun --sdk {} --show-sdk-path.".format(sdk_name))
raise
+
def is_vanilla_clang(env):
if not using_clang(env):
return False
- version = subprocess.check_output([env['CXX'], '--version']).strip().decode("utf-8")
+ version = subprocess.check_output([env["CXX"], "--version"]).strip().decode("utf-8")
return not version.startswith("Apple")
@@ -552,20 +611,22 @@ def get_compiler_version(env):
# Not using -dumpversion as some GCC distros only return major, and
# Clang used to return hardcoded 4.2.1: # https://reviews.llvm.org/D56803
try:
- version = subprocess.check_output([env.subst(env['CXX']), '--version']).strip().decode("utf-8")
+ version = subprocess.check_output([env.subst(env["CXX"]), "--version"]).strip().decode("utf-8")
except (subprocess.CalledProcessError, OSError):
print("Couldn't parse CXX environment variable to infer compiler version.")
return None
else: # TODO: Implement for MSVC
return None
- match = re.search('[0-9]+\.[0-9.]+', version)
+ match = re.search("[0-9]+\.[0-9.]+", version)
if match is not None:
- return list(map(int, match.group().split('.')))
+ return list(map(int, match.group().split(".")))
else:
return None
+
def using_gcc(env):
- return 'gcc' in os.path.basename(env["CC"])
+ return "gcc" in os.path.basename(env["CC"])
+
def using_clang(env):
- return 'clang' in os.path.basename(env["CC"])
+ return "clang" in os.path.basename(env["CC"])
diff --git a/misc/scripts/fix_headers.py b/misc/scripts/fix_headers.py
index f0038a8351f..7af97eec4bb 100755
--- a/misc/scripts/fix_headers.py
+++ b/misc/scripts/fix_headers.py
@@ -37,24 +37,24 @@ files = open("files", "r")
fname = files.readline()
-while (fname != ""):
+while fname != "":
# Handle replacing $filename with actual filename and keep alignment
fsingle = fname.strip()
- if (fsingle.find("/") != -1):
- fsingle = fsingle[fsingle.rfind("/") + 1:]
+ if fsingle.find("/") != -1:
+ fsingle = fsingle[fsingle.rfind("/") + 1 :]
rep_fl = "$filename"
rep_fi = fsingle
len_fl = len(rep_fl)
len_fi = len(rep_fi)
# Pad with spaces to keep alignment
- if (len_fi < len_fl):
+ if len_fi < len_fl:
for x in range(len_fl - len_fi):
rep_fi += " "
- elif (len_fl < len_fi):
+ elif len_fl < len_fi:
for x in range(len_fi - len_fl):
rep_fl += " "
- if (header.find(rep_fl) != -1):
+ if header.find(rep_fl) != -1:
text = header.replace(rep_fl, rep_fi)
else:
text = header.replace("$filename", fsingle)
@@ -71,21 +71,21 @@ while (fname != ""):
line = fileread.readline()
header_done = False
- while (line.strip() == ""): # Skip empty lines at the top
+ while line.strip() == "": # Skip empty lines at the top
line = fileread.readline()
- if (line.find("/**********") == -1): # Godot header starts this way
+ if line.find("/**********") == -1: # Godot header starts this way
# Maybe starting with a non-Godot comment, abort header magic
header_done = True
- while (not header_done): # Handle header now
- if (line.find("/*") != 0): # No more starting with a comment
+ while not header_done: # Handle header now
+ if line.find("/*") != 0: # No more starting with a comment
header_done = True
- if (line.strip() != ""):
+ if line.strip() != "":
text += line
line = fileread.readline()
- while (line != ""): # Dump everything until EOF
+ while line != "": # Dump everything until EOF
text += line
line = fileread.readline()
diff --git a/modules/SCsub b/modules/SCsub
index 5b39b183343..1671b398e57 100644
--- a/modules/SCsub
+++ b/modules/SCsub
@@ -1,12 +1,12 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
import modules_builders
env_modules = env.Clone()
-Export('env_modules')
+Export("env_modules")
# Header with MODULE_*_ENABLED defines.
env.CommandNoCache("modules_enabled.gen.h", Value(env.module_list), modules_builders.generate_modules_enabled)
diff --git a/modules/arkit/SCsub b/modules/arkit/SCsub
index c05595ea2a2..61c0a8248c1 100644
--- a/modules/arkit/SCsub
+++ b/modules/arkit/SCsub
@@ -1,7 +1,7 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
env_arkit = env_modules.Clone()
@@ -9,4 +9,4 @@ env_arkit = env_modules.Clone()
modules_sources = []
env_arkit.add_source_files(modules_sources, "*.cpp")
env_arkit.add_source_files(modules_sources, "*.mm")
-mod_lib = env_modules.add_library('#bin/libgodot_arkit_module' + env['LIBSUFFIX'], modules_sources)
+mod_lib = env_modules.add_library("#bin/libgodot_arkit_module" + env["LIBSUFFIX"], modules_sources)
diff --git a/modules/arkit/config.py b/modules/arkit/config.py
index 96e41826c54..e68603fc93b 100644
--- a/modules/arkit/config.py
+++ b/modules/arkit/config.py
@@ -1,5 +1,6 @@
def can_build(env, platform):
- return platform == 'iphone'
+ return platform == "iphone"
+
def configure(env):
pass
diff --git a/modules/assimp/SCsub b/modules/assimp/SCsub
index cef2396dfba..f1d0c742b4f 100644
--- a/modules/assimp/SCsub
+++ b/modules/assimp/SCsub
@@ -1,7 +1,7 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
env_assimp = env_modules.Clone()
@@ -10,85 +10,85 @@ env_assimp = env_modules.Clone()
if True: # env['builtin_assimp']:
thirdparty_dir = "#thirdparty/assimp"
- env_assimp.Prepend(CPPPATH=['#thirdparty/assimp'])
- env_assimp.Prepend(CPPPATH=['#thirdparty/assimp/code'])
- env_assimp.Prepend(CPPPATH=['#thirdparty/assimp/include'])
+ env_assimp.Prepend(CPPPATH=["#thirdparty/assimp"])
+ env_assimp.Prepend(CPPPATH=["#thirdparty/assimp/code"])
+ env_assimp.Prepend(CPPPATH=["#thirdparty/assimp/include"])
- #env_assimp.Append(CPPDEFINES=['ASSIMP_DOUBLE_PRECISION']) # TODO default to what godot is compiled with for future double support
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_SINGLETHREADED'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_BOOST_WORKAROUND'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_OWN_ZLIB'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_EXPORT'])
+ # env_assimp.Append(CPPDEFINES=['ASSIMP_DOUBLE_PRECISION']) # TODO default to what godot is compiled with for future double support
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_SINGLETHREADED"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_BOOST_WORKAROUND"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_OWN_ZLIB"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_EXPORT"])
# Importers we don't need
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_3D_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_3DS_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_3MF_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_AC_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_AMF_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_ASE_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_ASSBIN_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_B3D_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_BLEND_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_BVH_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_C4D_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_COB_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_COLLADA_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_CSM_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_DXF_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_GLTF2_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_GLTF_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_HMP_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_IFC_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_IRR_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_IRRMESH_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_LWO_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_LWS_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_M3D_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_MD2_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_MD3_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_MD5_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_MD5_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_MDC_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_MDL_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_MMD_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_MS3D_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_NDO_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_NFF_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_OBJ_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_OFF_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_OGRE_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_OPENGEX_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_PLY_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_Q3BSP_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_Q3D_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_RAW_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_SIB_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_SMD_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_STEP_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_STL_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_TERRAGEN_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_X3D_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_XGL_IMPORTER'])
- env_assimp.Append(CPPDEFINES=['ASSIMP_BUILD_NO_X_IMPORTER'])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_3D_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_3DS_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_3MF_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_AC_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_AMF_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_ASE_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_ASSBIN_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_B3D_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_BLEND_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_BVH_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_C4D_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_COB_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_COLLADA_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_CSM_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_DXF_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_GLTF2_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_GLTF_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_HMP_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_IFC_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_IRR_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_IRRMESH_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_LWO_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_LWS_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_M3D_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_MD2_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_MD3_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_MD5_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_MD5_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_MDC_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_MDL_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_MMD_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_MS3D_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_NDO_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_NFF_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_OBJ_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_OFF_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_OGRE_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_OPENGEX_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_PLY_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_Q3BSP_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_Q3D_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_RAW_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_SIB_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_SMD_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_STEP_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_STL_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_TERRAGEN_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_X3D_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_XGL_IMPORTER"])
+ env_assimp.Append(CPPDEFINES=["ASSIMP_BUILD_NO_X_IMPORTER"])
+
+ if env["platform"] == "windows":
+ env_assimp.Append(CPPDEFINES=["PLATFORM_WINDOWS"])
+ env_assimp.Append(CPPDEFINES=[("PLATFORM", "WINDOWS")])
+ elif env["platform"] == "linuxbsd":
+ env_assimp.Append(CPPDEFINES=["PLATFORM_LINUX"])
+ env_assimp.Append(CPPDEFINES=[("PLATFORM", "LINUX")])
+ elif env["platform"] == "osx":
+ env_assimp.Append(CPPDEFINES=["PLATFORM_DARWIN"])
+ env_assimp.Append(CPPDEFINES=[("PLATFORM", "DARWIN")])
- if(env['platform'] == 'windows'):
- env_assimp.Append(CPPDEFINES=['PLATFORM_WINDOWS'])
- env_assimp.Append(CPPDEFINES=[('PLATFORM', 'WINDOWS')])
- elif(env['platform'] == 'linuxbsd'):
- env_assimp.Append(CPPDEFINES=['PLATFORM_LINUX'])
- env_assimp.Append(CPPDEFINES=[('PLATFORM', 'LINUX')])
- elif(env['platform'] == 'osx'):
- env_assimp.Append(CPPDEFINES=['PLATFORM_DARWIN'])
- env_assimp.Append(CPPDEFINES=[('PLATFORM', 'DARWIN')])
-
env_thirdparty = env_assimp.Clone()
env_thirdparty.disable_warnings()
- env_thirdparty.add_source_files(env.modules_sources, Glob('#thirdparty/assimp/code/CApi/*.cpp'))
- env_thirdparty.add_source_files(env.modules_sources, Glob('#thirdparty/assimp/code/Common/*.cpp'))
- env_thirdparty.add_source_files(env.modules_sources, Glob('#thirdparty/assimp/code/PostProcessing/*.cpp'))
- env_thirdparty.add_source_files(env.modules_sources, Glob('#thirdparty/assimp/code/Material/*.cpp'))
- env_thirdparty.add_source_files(env.modules_sources, Glob('#thirdparty/assimp/code/FBX/*.cpp'))
+ env_thirdparty.add_source_files(env.modules_sources, Glob("#thirdparty/assimp/code/CApi/*.cpp"))
+ env_thirdparty.add_source_files(env.modules_sources, Glob("#thirdparty/assimp/code/Common/*.cpp"))
+ env_thirdparty.add_source_files(env.modules_sources, Glob("#thirdparty/assimp/code/PostProcessing/*.cpp"))
+ env_thirdparty.add_source_files(env.modules_sources, Glob("#thirdparty/assimp/code/Material/*.cpp"))
+ env_thirdparty.add_source_files(env.modules_sources, Glob("#thirdparty/assimp/code/FBX/*.cpp"))
# Godot's own source files
env_assimp.add_source_files(env.modules_sources, "*.cpp")
diff --git a/modules/assimp/config.py b/modules/assimp/config.py
index 098f1eafa9e..53b8f2f2e36 100644
--- a/modules/assimp/config.py
+++ b/modules/assimp/config.py
@@ -1,5 +1,6 @@
def can_build(env, platform):
- return env['tools']
+ return env["tools"]
+
def configure(env):
pass
diff --git a/modules/basis_universal/SCsub b/modules/basis_universal/SCsub
index 63324e920bd..dc7b176d246 100644
--- a/modules/basis_universal/SCsub
+++ b/modules/basis_universal/SCsub
@@ -1,7 +1,7 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
env_basisu = env_modules.Clone()
@@ -9,36 +9,38 @@ env_basisu = env_modules.Clone()
# Not unbundled so far since not widespread as shared library
thirdparty_dir = "#thirdparty/basis_universal/"
tool_sources = [
- "basisu_astc_decomp.cpp",
- "basisu_backend.cpp",
- "basisu_basis_file.cpp",
- "basisu_comp.cpp",
- "basisu_enc.cpp",
- "basisu_etc.cpp",
- "basisu_frontend.cpp",
- "basisu_global_selector_palette_helpers.cpp",
- "basisu_gpu_texture.cpp",
- "basisu_pvrtc1_4.cpp",
- "basisu_resample_filters.cpp",
- "basisu_resampler.cpp",
- "basisu_ssim.cpp",
- "lodepng.cpp",
+ "basisu_astc_decomp.cpp",
+ "basisu_backend.cpp",
+ "basisu_basis_file.cpp",
+ "basisu_comp.cpp",
+ "basisu_enc.cpp",
+ "basisu_etc.cpp",
+ "basisu_frontend.cpp",
+ "basisu_global_selector_palette_helpers.cpp",
+ "basisu_gpu_texture.cpp",
+ "basisu_pvrtc1_4.cpp",
+ "basisu_resample_filters.cpp",
+ "basisu_resampler.cpp",
+ "basisu_ssim.cpp",
+ "lodepng.cpp",
]
tool_sources = [thirdparty_dir + file for file in tool_sources]
transcoder_sources = [thirdparty_dir + "transcoder/basisu_transcoder.cpp"]
# Treat Basis headers as system headers to avoid raising warnings. Not supported on MSVC.
if not env.msvc:
- env_basisu.Append(CPPFLAGS=['-isystem', Dir(thirdparty_dir).path, '-isystem', Dir(thirdparty_dir + "transcoder").path])
+ env_basisu.Append(
+ CPPFLAGS=["-isystem", Dir(thirdparty_dir).path, "-isystem", Dir(thirdparty_dir + "transcoder").path]
+ )
else:
env_basisu.Prepend(CPPPATH=[thirdparty_dir, thirdparty_dir + "transcoder"])
-if env['target'] == "debug":
- env_basisu.Append(CPPFLAGS=["-DBASISU_DEVEL_MESSAGES=1", "-DBASISD_ENABLE_DEBUG_FLAGS=1"])
+if env["target"] == "debug":
+ env_basisu.Append(CPPFLAGS=["-DBASISU_DEVEL_MESSAGES=1", "-DBASISD_ENABLE_DEBUG_FLAGS=1"])
env_thirdparty = env_basisu.Clone()
env_thirdparty.disable_warnings()
-if env['tools']:
+if env["tools"]:
env_thirdparty.add_source_files(env.modules_sources, tool_sources)
env_thirdparty.add_source_files(env.modules_sources, transcoder_sources)
diff --git a/modules/basis_universal/config.py b/modules/basis_universal/config.py
index 1c8cd12a2dc..d22f9454ed2 100644
--- a/modules/basis_universal/config.py
+++ b/modules/basis_universal/config.py
@@ -1,5 +1,6 @@
def can_build(env, platform):
return True
+
def configure(env):
pass
diff --git a/modules/bmp/SCsub b/modules/bmp/SCsub
index e7da7cf108e..4f3405ff286 100644
--- a/modules/bmp/SCsub
+++ b/modules/bmp/SCsub
@@ -1,7 +1,7 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
env_bmp = env_modules.Clone()
diff --git a/modules/bmp/config.py b/modules/bmp/config.py
index 1c8cd12a2dc..d22f9454ed2 100644
--- a/modules/bmp/config.py
+++ b/modules/bmp/config.py
@@ -1,5 +1,6 @@
def can_build(env, platform):
return True
+
def configure(env):
pass
diff --git a/modules/bullet/SCsub b/modules/bullet/SCsub
index 02d0a31a699..692c7498867 100644
--- a/modules/bullet/SCsub
+++ b/modules/bullet/SCsub
@@ -1,208 +1,203 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
env_bullet = env_modules.Clone()
# Thirdparty source files
-if env['builtin_bullet']:
+if env["builtin_bullet"]:
# Build only version 2 for now (as of 2.89)
# Sync file list with relevant upstream CMakeLists.txt for each folder.
thirdparty_dir = "#thirdparty/bullet/"
bullet2_src = [
# BulletCollision
- "BulletCollision/BroadphaseCollision/btAxisSweep3.cpp"
- , "BulletCollision/BroadphaseCollision/btBroadphaseProxy.cpp"
- , "BulletCollision/BroadphaseCollision/btCollisionAlgorithm.cpp"
- , "BulletCollision/BroadphaseCollision/btDbvt.cpp"
- , "BulletCollision/BroadphaseCollision/btDbvtBroadphase.cpp"
- , "BulletCollision/BroadphaseCollision/btDispatcher.cpp"
- , "BulletCollision/BroadphaseCollision/btOverlappingPairCache.cpp"
- , "BulletCollision/BroadphaseCollision/btQuantizedBvh.cpp"
- , "BulletCollision/BroadphaseCollision/btSimpleBroadphase.cpp"
- , "BulletCollision/CollisionDispatch/btActivatingCollisionAlgorithm.cpp"
- , "BulletCollision/CollisionDispatch/btBoxBoxCollisionAlgorithm.cpp"
- , "BulletCollision/CollisionDispatch/btBox2dBox2dCollisionAlgorithm.cpp"
- , "BulletCollision/CollisionDispatch/btBoxBoxDetector.cpp"
- , "BulletCollision/CollisionDispatch/btCollisionDispatcher.cpp"
- , "BulletCollision/CollisionDispatch/btCollisionDispatcherMt.cpp"
- , "BulletCollision/CollisionDispatch/btCollisionObject.cpp"
- , "BulletCollision/CollisionDispatch/btCollisionWorld.cpp"
- , "BulletCollision/CollisionDispatch/btCollisionWorldImporter.cpp"
- , "BulletCollision/CollisionDispatch/btCompoundCollisionAlgorithm.cpp"
- , "BulletCollision/CollisionDispatch/btCompoundCompoundCollisionAlgorithm.cpp"
- , "BulletCollision/CollisionDispatch/btConvexConcaveCollisionAlgorithm.cpp"
- , "BulletCollision/CollisionDispatch/btConvexConvexAlgorithm.cpp"
- , "BulletCollision/CollisionDispatch/btConvexPlaneCollisionAlgorithm.cpp"
- , "BulletCollision/CollisionDispatch/btConvex2dConvex2dAlgorithm.cpp"
- , "BulletCollision/CollisionDispatch/btDefaultCollisionConfiguration.cpp"
- , "BulletCollision/CollisionDispatch/btEmptyCollisionAlgorithm.cpp"
- , "BulletCollision/CollisionDispatch/btGhostObject.cpp"
- , "BulletCollision/CollisionDispatch/btHashedSimplePairCache.cpp"
- , "BulletCollision/CollisionDispatch/btInternalEdgeUtility.cpp"
- , "BulletCollision/CollisionDispatch/btManifoldResult.cpp"
- , "BulletCollision/CollisionDispatch/btSimulationIslandManager.cpp"
- , "BulletCollision/CollisionDispatch/btSphereBoxCollisionAlgorithm.cpp"
- , "BulletCollision/CollisionDispatch/btSphereSphereCollisionAlgorithm.cpp"
- , "BulletCollision/CollisionDispatch/btSphereTriangleCollisionAlgorithm.cpp"
- , "BulletCollision/CollisionDispatch/btUnionFind.cpp"
- , "BulletCollision/CollisionDispatch/SphereTriangleDetector.cpp"
- , "BulletCollision/CollisionShapes/btBoxShape.cpp"
- , "BulletCollision/CollisionShapes/btBox2dShape.cpp"
- , "BulletCollision/CollisionShapes/btBvhTriangleMeshShape.cpp"
- , "BulletCollision/CollisionShapes/btCapsuleShape.cpp"
- , "BulletCollision/CollisionShapes/btCollisionShape.cpp"
- , "BulletCollision/CollisionShapes/btCompoundShape.cpp"
- , "BulletCollision/CollisionShapes/btConcaveShape.cpp"
- , "BulletCollision/CollisionShapes/btConeShape.cpp"
- , "BulletCollision/CollisionShapes/btConvexHullShape.cpp"
- , "BulletCollision/CollisionShapes/btConvexInternalShape.cpp"
- , "BulletCollision/CollisionShapes/btConvexPointCloudShape.cpp"
- , "BulletCollision/CollisionShapes/btConvexPolyhedron.cpp"
- , "BulletCollision/CollisionShapes/btConvexShape.cpp"
- , "BulletCollision/CollisionShapes/btConvex2dShape.cpp"
- , "BulletCollision/CollisionShapes/btConvexTriangleMeshShape.cpp"
- , "BulletCollision/CollisionShapes/btCylinderShape.cpp"
- , "BulletCollision/CollisionShapes/btEmptyShape.cpp"
- , "BulletCollision/CollisionShapes/btHeightfieldTerrainShape.cpp"
- , "BulletCollision/CollisionShapes/btMiniSDF.cpp"
- , "BulletCollision/CollisionShapes/btMinkowskiSumShape.cpp"
- , "BulletCollision/CollisionShapes/btMultimaterialTriangleMeshShape.cpp"
- , "BulletCollision/CollisionShapes/btMultiSphereShape.cpp"
- , "BulletCollision/CollisionShapes/btOptimizedBvh.cpp"
- , "BulletCollision/CollisionShapes/btPolyhedralConvexShape.cpp"
- , "BulletCollision/CollisionShapes/btScaledBvhTriangleMeshShape.cpp"
- , "BulletCollision/CollisionShapes/btSdfCollisionShape.cpp"
- , "BulletCollision/CollisionShapes/btShapeHull.cpp"
- , "BulletCollision/CollisionShapes/btSphereShape.cpp"
- , "BulletCollision/CollisionShapes/btStaticPlaneShape.cpp"
- , "BulletCollision/CollisionShapes/btStridingMeshInterface.cpp"
- , "BulletCollision/CollisionShapes/btTetrahedronShape.cpp"
- , "BulletCollision/CollisionShapes/btTriangleBuffer.cpp"
- , "BulletCollision/CollisionShapes/btTriangleCallback.cpp"
- , "BulletCollision/CollisionShapes/btTriangleIndexVertexArray.cpp"
- , "BulletCollision/CollisionShapes/btTriangleIndexVertexMaterialArray.cpp"
- , "BulletCollision/CollisionShapes/btTriangleMesh.cpp"
- , "BulletCollision/CollisionShapes/btTriangleMeshShape.cpp"
- , "BulletCollision/CollisionShapes/btUniformScalingShape.cpp"
- , "BulletCollision/Gimpact/btContactProcessing.cpp"
- , "BulletCollision/Gimpact/btGenericPoolAllocator.cpp"
- , "BulletCollision/Gimpact/btGImpactBvh.cpp"
- , "BulletCollision/Gimpact/btGImpactCollisionAlgorithm.cpp"
- , "BulletCollision/Gimpact/btGImpactQuantizedBvh.cpp"
- , "BulletCollision/Gimpact/btGImpactShape.cpp"
- , "BulletCollision/Gimpact/btTriangleShapeEx.cpp"
- , "BulletCollision/Gimpact/gim_box_set.cpp"
- , "BulletCollision/Gimpact/gim_contact.cpp"
- , "BulletCollision/Gimpact/gim_memory.cpp"
- , "BulletCollision/Gimpact/gim_tri_collision.cpp"
- , "BulletCollision/NarrowPhaseCollision/btContinuousConvexCollision.cpp"
- , "BulletCollision/NarrowPhaseCollision/btConvexCast.cpp"
- , "BulletCollision/NarrowPhaseCollision/btGjkConvexCast.cpp"
- , "BulletCollision/NarrowPhaseCollision/btGjkEpa2.cpp"
- , "BulletCollision/NarrowPhaseCollision/btGjkEpaPenetrationDepthSolver.cpp"
- , "BulletCollision/NarrowPhaseCollision/btGjkPairDetector.cpp"
- , "BulletCollision/NarrowPhaseCollision/btMinkowskiPenetrationDepthSolver.cpp"
- , "BulletCollision/NarrowPhaseCollision/btPersistentManifold.cpp"
- , "BulletCollision/NarrowPhaseCollision/btRaycastCallback.cpp"
- , "BulletCollision/NarrowPhaseCollision/btSubSimplexConvexCast.cpp"
- , "BulletCollision/NarrowPhaseCollision/btVoronoiSimplexSolver.cpp"
- , "BulletCollision/NarrowPhaseCollision/btPolyhedralContactClipping.cpp"
-
+ "BulletCollision/BroadphaseCollision/btAxisSweep3.cpp",
+ "BulletCollision/BroadphaseCollision/btBroadphaseProxy.cpp",
+ "BulletCollision/BroadphaseCollision/btCollisionAlgorithm.cpp",
+ "BulletCollision/BroadphaseCollision/btDbvt.cpp",
+ "BulletCollision/BroadphaseCollision/btDbvtBroadphase.cpp",
+ "BulletCollision/BroadphaseCollision/btDispatcher.cpp",
+ "BulletCollision/BroadphaseCollision/btOverlappingPairCache.cpp",
+ "BulletCollision/BroadphaseCollision/btQuantizedBvh.cpp",
+ "BulletCollision/BroadphaseCollision/btSimpleBroadphase.cpp",
+ "BulletCollision/CollisionDispatch/btActivatingCollisionAlgorithm.cpp",
+ "BulletCollision/CollisionDispatch/btBoxBoxCollisionAlgorithm.cpp",
+ "BulletCollision/CollisionDispatch/btBox2dBox2dCollisionAlgorithm.cpp",
+ "BulletCollision/CollisionDispatch/btBoxBoxDetector.cpp",
+ "BulletCollision/CollisionDispatch/btCollisionDispatcher.cpp",
+ "BulletCollision/CollisionDispatch/btCollisionDispatcherMt.cpp",
+ "BulletCollision/CollisionDispatch/btCollisionObject.cpp",
+ "BulletCollision/CollisionDispatch/btCollisionWorld.cpp",
+ "BulletCollision/CollisionDispatch/btCollisionWorldImporter.cpp",
+ "BulletCollision/CollisionDispatch/btCompoundCollisionAlgorithm.cpp",
+ "BulletCollision/CollisionDispatch/btCompoundCompoundCollisionAlgorithm.cpp",
+ "BulletCollision/CollisionDispatch/btConvexConcaveCollisionAlgorithm.cpp",
+ "BulletCollision/CollisionDispatch/btConvexConvexAlgorithm.cpp",
+ "BulletCollision/CollisionDispatch/btConvexPlaneCollisionAlgorithm.cpp",
+ "BulletCollision/CollisionDispatch/btConvex2dConvex2dAlgorithm.cpp",
+ "BulletCollision/CollisionDispatch/btDefaultCollisionConfiguration.cpp",
+ "BulletCollision/CollisionDispatch/btEmptyCollisionAlgorithm.cpp",
+ "BulletCollision/CollisionDispatch/btGhostObject.cpp",
+ "BulletCollision/CollisionDispatch/btHashedSimplePairCache.cpp",
+ "BulletCollision/CollisionDispatch/btInternalEdgeUtility.cpp",
+ "BulletCollision/CollisionDispatch/btManifoldResult.cpp",
+ "BulletCollision/CollisionDispatch/btSimulationIslandManager.cpp",
+ "BulletCollision/CollisionDispatch/btSphereBoxCollisionAlgorithm.cpp",
+ "BulletCollision/CollisionDispatch/btSphereSphereCollisionAlgorithm.cpp",
+ "BulletCollision/CollisionDispatch/btSphereTriangleCollisionAlgorithm.cpp",
+ "BulletCollision/CollisionDispatch/btUnionFind.cpp",
+ "BulletCollision/CollisionDispatch/SphereTriangleDetector.cpp",
+ "BulletCollision/CollisionShapes/btBoxShape.cpp",
+ "BulletCollision/CollisionShapes/btBox2dShape.cpp",
+ "BulletCollision/CollisionShapes/btBvhTriangleMeshShape.cpp",
+ "BulletCollision/CollisionShapes/btCapsuleShape.cpp",
+ "BulletCollision/CollisionShapes/btCollisionShape.cpp",
+ "BulletCollision/CollisionShapes/btCompoundShape.cpp",
+ "BulletCollision/CollisionShapes/btConcaveShape.cpp",
+ "BulletCollision/CollisionShapes/btConeShape.cpp",
+ "BulletCollision/CollisionShapes/btConvexHullShape.cpp",
+ "BulletCollision/CollisionShapes/btConvexInternalShape.cpp",
+ "BulletCollision/CollisionShapes/btConvexPointCloudShape.cpp",
+ "BulletCollision/CollisionShapes/btConvexPolyhedron.cpp",
+ "BulletCollision/CollisionShapes/btConvexShape.cpp",
+ "BulletCollision/CollisionShapes/btConvex2dShape.cpp",
+ "BulletCollision/CollisionShapes/btConvexTriangleMeshShape.cpp",
+ "BulletCollision/CollisionShapes/btCylinderShape.cpp",
+ "BulletCollision/CollisionShapes/btEmptyShape.cpp",
+ "BulletCollision/CollisionShapes/btHeightfieldTerrainShape.cpp",
+ "BulletCollision/CollisionShapes/btMiniSDF.cpp",
+ "BulletCollision/CollisionShapes/btMinkowskiSumShape.cpp",
+ "BulletCollision/CollisionShapes/btMultimaterialTriangleMeshShape.cpp",
+ "BulletCollision/CollisionShapes/btMultiSphereShape.cpp",
+ "BulletCollision/CollisionShapes/btOptimizedBvh.cpp",
+ "BulletCollision/CollisionShapes/btPolyhedralConvexShape.cpp",
+ "BulletCollision/CollisionShapes/btScaledBvhTriangleMeshShape.cpp",
+ "BulletCollision/CollisionShapes/btSdfCollisionShape.cpp",
+ "BulletCollision/CollisionShapes/btShapeHull.cpp",
+ "BulletCollision/CollisionShapes/btSphereShape.cpp",
+ "BulletCollision/CollisionShapes/btStaticPlaneShape.cpp",
+ "BulletCollision/CollisionShapes/btStridingMeshInterface.cpp",
+ "BulletCollision/CollisionShapes/btTetrahedronShape.cpp",
+ "BulletCollision/CollisionShapes/btTriangleBuffer.cpp",
+ "BulletCollision/CollisionShapes/btTriangleCallback.cpp",
+ "BulletCollision/CollisionShapes/btTriangleIndexVertexArray.cpp",
+ "BulletCollision/CollisionShapes/btTriangleIndexVertexMaterialArray.cpp",
+ "BulletCollision/CollisionShapes/btTriangleMesh.cpp",
+ "BulletCollision/CollisionShapes/btTriangleMeshShape.cpp",
+ "BulletCollision/CollisionShapes/btUniformScalingShape.cpp",
+ "BulletCollision/Gimpact/btContactProcessing.cpp",
+ "BulletCollision/Gimpact/btGenericPoolAllocator.cpp",
+ "BulletCollision/Gimpact/btGImpactBvh.cpp",
+ "BulletCollision/Gimpact/btGImpactCollisionAlgorithm.cpp",
+ "BulletCollision/Gimpact/btGImpactQuantizedBvh.cpp",
+ "BulletCollision/Gimpact/btGImpactShape.cpp",
+ "BulletCollision/Gimpact/btTriangleShapeEx.cpp",
+ "BulletCollision/Gimpact/gim_box_set.cpp",
+ "BulletCollision/Gimpact/gim_contact.cpp",
+ "BulletCollision/Gimpact/gim_memory.cpp",
+ "BulletCollision/Gimpact/gim_tri_collision.cpp",
+ "BulletCollision/NarrowPhaseCollision/btContinuousConvexCollision.cpp",
+ "BulletCollision/NarrowPhaseCollision/btConvexCast.cpp",
+ "BulletCollision/NarrowPhaseCollision/btGjkConvexCast.cpp",
+ "BulletCollision/NarrowPhaseCollision/btGjkEpa2.cpp",
+ "BulletCollision/NarrowPhaseCollision/btGjkEpaPenetrationDepthSolver.cpp",
+ "BulletCollision/NarrowPhaseCollision/btGjkPairDetector.cpp",
+ "BulletCollision/NarrowPhaseCollision/btMinkowskiPenetrationDepthSolver.cpp",
+ "BulletCollision/NarrowPhaseCollision/btPersistentManifold.cpp",
+ "BulletCollision/NarrowPhaseCollision/btRaycastCallback.cpp",
+ "BulletCollision/NarrowPhaseCollision/btSubSimplexConvexCast.cpp",
+ "BulletCollision/NarrowPhaseCollision/btVoronoiSimplexSolver.cpp",
+ "BulletCollision/NarrowPhaseCollision/btPolyhedralContactClipping.cpp",
# BulletDynamics
- , "BulletDynamics/Character/btKinematicCharacterController.cpp"
- , "BulletDynamics/ConstraintSolver/btConeTwistConstraint.cpp"
- , "BulletDynamics/ConstraintSolver/btContactConstraint.cpp"
- , "BulletDynamics/ConstraintSolver/btFixedConstraint.cpp"
- , "BulletDynamics/ConstraintSolver/btGearConstraint.cpp"
- , "BulletDynamics/ConstraintSolver/btGeneric6DofConstraint.cpp"
- , "BulletDynamics/ConstraintSolver/btGeneric6DofSpringConstraint.cpp"
- , "BulletDynamics/ConstraintSolver/btGeneric6DofSpring2Constraint.cpp"
- , "BulletDynamics/ConstraintSolver/btHinge2Constraint.cpp"
- , "BulletDynamics/ConstraintSolver/btHingeConstraint.cpp"
- , "BulletDynamics/ConstraintSolver/btPoint2PointConstraint.cpp"
- , "BulletDynamics/ConstraintSolver/btSequentialImpulseConstraintSolver.cpp"
- , "BulletDynamics/ConstraintSolver/btSequentialImpulseConstraintSolverMt.cpp"
- , "BulletDynamics/ConstraintSolver/btBatchedConstraints.cpp"
- , "BulletDynamics/ConstraintSolver/btNNCGConstraintSolver.cpp"
- , "BulletDynamics/ConstraintSolver/btSliderConstraint.cpp"
- , "BulletDynamics/ConstraintSolver/btSolve2LinearConstraint.cpp"
- , "BulletDynamics/ConstraintSolver/btTypedConstraint.cpp"
- , "BulletDynamics/ConstraintSolver/btUniversalConstraint.cpp"
- , "BulletDynamics/Dynamics/btDiscreteDynamicsWorld.cpp"
- , "BulletDynamics/Dynamics/btDiscreteDynamicsWorldMt.cpp"
- , "BulletDynamics/Dynamics/btSimulationIslandManagerMt.cpp"
- , "BulletDynamics/Dynamics/btRigidBody.cpp"
- , "BulletDynamics/Dynamics/btSimpleDynamicsWorld.cpp"
- #, "BulletDynamics/Dynamics/Bullet-C-API.cpp"
- , "BulletDynamics/Vehicle/btRaycastVehicle.cpp"
- , "BulletDynamics/Vehicle/btWheelInfo.cpp"
- , "BulletDynamics/Featherstone/btMultiBody.cpp"
- , "BulletDynamics/Featherstone/btMultiBodyConstraint.cpp"
- , "BulletDynamics/Featherstone/btMultiBodyConstraintSolver.cpp"
- , "BulletDynamics/Featherstone/btMultiBodyDynamicsWorld.cpp"
- , "BulletDynamics/Featherstone/btMultiBodyFixedConstraint.cpp"
- , "BulletDynamics/Featherstone/btMultiBodyGearConstraint.cpp"
- , "BulletDynamics/Featherstone/btMultiBodyJointLimitConstraint.cpp"
- , "BulletDynamics/Featherstone/btMultiBodyJointMotor.cpp"
- , "BulletDynamics/Featherstone/btMultiBodyMLCPConstraintSolver.cpp"
- , "BulletDynamics/Featherstone/btMultiBodyPoint2Point.cpp"
- , "BulletDynamics/Featherstone/btMultiBodySliderConstraint.cpp"
- , "BulletDynamics/Featherstone/btMultiBodySphericalJointMotor.cpp"
- , "BulletDynamics/MLCPSolvers/btDantzigLCP.cpp"
- , "BulletDynamics/MLCPSolvers/btMLCPSolver.cpp"
- , "BulletDynamics/MLCPSolvers/btLemkeAlgorithm.cpp"
-
+ "BulletDynamics/Character/btKinematicCharacterController.cpp",
+ "BulletDynamics/ConstraintSolver/btConeTwistConstraint.cpp",
+ "BulletDynamics/ConstraintSolver/btContactConstraint.cpp",
+ "BulletDynamics/ConstraintSolver/btFixedConstraint.cpp",
+ "BulletDynamics/ConstraintSolver/btGearConstraint.cpp",
+ "BulletDynamics/ConstraintSolver/btGeneric6DofConstraint.cpp",
+ "BulletDynamics/ConstraintSolver/btGeneric6DofSpringConstraint.cpp",
+ "BulletDynamics/ConstraintSolver/btGeneric6DofSpring2Constraint.cpp",
+ "BulletDynamics/ConstraintSolver/btHinge2Constraint.cpp",
+ "BulletDynamics/ConstraintSolver/btHingeConstraint.cpp",
+ "BulletDynamics/ConstraintSolver/btPoint2PointConstraint.cpp",
+ "BulletDynamics/ConstraintSolver/btSequentialImpulseConstraintSolver.cpp",
+ "BulletDynamics/ConstraintSolver/btSequentialImpulseConstraintSolverMt.cpp",
+ "BulletDynamics/ConstraintSolver/btBatchedConstraints.cpp",
+ "BulletDynamics/ConstraintSolver/btNNCGConstraintSolver.cpp",
+ "BulletDynamics/ConstraintSolver/btSliderConstraint.cpp",
+ "BulletDynamics/ConstraintSolver/btSolve2LinearConstraint.cpp",
+ "BulletDynamics/ConstraintSolver/btTypedConstraint.cpp",
+ "BulletDynamics/ConstraintSolver/btUniversalConstraint.cpp",
+ "BulletDynamics/Dynamics/btDiscreteDynamicsWorld.cpp",
+ "BulletDynamics/Dynamics/btDiscreteDynamicsWorldMt.cpp",
+ "BulletDynamics/Dynamics/btSimulationIslandManagerMt.cpp",
+ "BulletDynamics/Dynamics/btRigidBody.cpp",
+ "BulletDynamics/Dynamics/btSimpleDynamicsWorld.cpp",
+ # "BulletDynamics/Dynamics/Bullet-C-API.cpp",
+ "BulletDynamics/Vehicle/btRaycastVehicle.cpp",
+ "BulletDynamics/Vehicle/btWheelInfo.cpp",
+ "BulletDynamics/Featherstone/btMultiBody.cpp",
+ "BulletDynamics/Featherstone/btMultiBodyConstraint.cpp",
+ "BulletDynamics/Featherstone/btMultiBodyConstraintSolver.cpp",
+ "BulletDynamics/Featherstone/btMultiBodyDynamicsWorld.cpp",
+ "BulletDynamics/Featherstone/btMultiBodyFixedConstraint.cpp",
+ "BulletDynamics/Featherstone/btMultiBodyGearConstraint.cpp",
+ "BulletDynamics/Featherstone/btMultiBodyJointLimitConstraint.cpp",
+ "BulletDynamics/Featherstone/btMultiBodyJointMotor.cpp",
+ "BulletDynamics/Featherstone/btMultiBodyMLCPConstraintSolver.cpp",
+ "BulletDynamics/Featherstone/btMultiBodyPoint2Point.cpp",
+ "BulletDynamics/Featherstone/btMultiBodySliderConstraint.cpp",
+ "BulletDynamics/Featherstone/btMultiBodySphericalJointMotor.cpp",
+ "BulletDynamics/MLCPSolvers/btDantzigLCP.cpp",
+ "BulletDynamics/MLCPSolvers/btMLCPSolver.cpp",
+ "BulletDynamics/MLCPSolvers/btLemkeAlgorithm.cpp",
# BulletInverseDynamics
- , "BulletInverseDynamics/IDMath.cpp"
- , "BulletInverseDynamics/MultiBodyTree.cpp"
- , "BulletInverseDynamics/details/MultiBodyTreeInitCache.cpp"
- , "BulletInverseDynamics/details/MultiBodyTreeImpl.cpp"
-
+ "BulletInverseDynamics/IDMath.cpp",
+ "BulletInverseDynamics/MultiBodyTree.cpp",
+ "BulletInverseDynamics/details/MultiBodyTreeInitCache.cpp",
+ "BulletInverseDynamics/details/MultiBodyTreeImpl.cpp",
# BulletSoftBody
- , "BulletSoftBody/btSoftBody.cpp"
- , "BulletSoftBody/btSoftBodyConcaveCollisionAlgorithm.cpp"
- , "BulletSoftBody/btSoftBodyHelpers.cpp"
- , "BulletSoftBody/btSoftBodyRigidBodyCollisionConfiguration.cpp"
- , "BulletSoftBody/btSoftRigidCollisionAlgorithm.cpp"
- , "BulletSoftBody/btSoftRigidDynamicsWorld.cpp"
- , "BulletSoftBody/btSoftMultiBodyDynamicsWorld.cpp"
- , "BulletSoftBody/btSoftSoftCollisionAlgorithm.cpp"
- , "BulletSoftBody/btDefaultSoftBodySolver.cpp"
- , "BulletSoftBody/btDeformableBackwardEulerObjective.cpp"
- , "BulletSoftBody/btDeformableBodySolver.cpp"
- , "BulletSoftBody/btDeformableMultiBodyConstraintSolver.cpp"
- , "BulletSoftBody/btDeformableContactProjection.cpp"
- , "BulletSoftBody/btDeformableMultiBodyDynamicsWorld.cpp"
- , "BulletSoftBody/btDeformableContactConstraint.cpp"
-
+ "BulletSoftBody/btSoftBody.cpp",
+ "BulletSoftBody/btSoftBodyConcaveCollisionAlgorithm.cpp",
+ "BulletSoftBody/btSoftBodyHelpers.cpp",
+ "BulletSoftBody/btSoftBodyRigidBodyCollisionConfiguration.cpp",
+ "BulletSoftBody/btSoftRigidCollisionAlgorithm.cpp",
+ "BulletSoftBody/btSoftRigidDynamicsWorld.cpp",
+ "BulletSoftBody/btSoftMultiBodyDynamicsWorld.cpp",
+ "BulletSoftBody/btSoftSoftCollisionAlgorithm.cpp",
+ "BulletSoftBody/btDefaultSoftBodySolver.cpp",
+ "BulletSoftBody/btDeformableBackwardEulerObjective.cpp",
+ "BulletSoftBody/btDeformableBodySolver.cpp",
+ "BulletSoftBody/btDeformableMultiBodyConstraintSolver.cpp",
+ "BulletSoftBody/btDeformableContactProjection.cpp",
+ "BulletSoftBody/btDeformableMultiBodyDynamicsWorld.cpp",
+ "BulletSoftBody/btDeformableContactConstraint.cpp",
# clew
- , "clew/clew.c"
-
+ "clew/clew.c",
# LinearMath
- , "LinearMath/btAlignedAllocator.cpp"
- , "LinearMath/btConvexHull.cpp"
- , "LinearMath/btConvexHullComputer.cpp"
- , "LinearMath/btGeometryUtil.cpp"
- , "LinearMath/btPolarDecomposition.cpp"
- , "LinearMath/btQuickprof.cpp"
- , "LinearMath/btSerializer.cpp"
- , "LinearMath/btSerializer64.cpp"
- , "LinearMath/btThreads.cpp"
- , "LinearMath/btVector3.cpp"
- , "LinearMath/TaskScheduler/btTaskScheduler.cpp"
- , "LinearMath/TaskScheduler/btThreadSupportPosix.cpp"
- , "LinearMath/TaskScheduler/btThreadSupportWin32.cpp"
+ "LinearMath/btAlignedAllocator.cpp",
+ "LinearMath/btConvexHull.cpp",
+ "LinearMath/btConvexHullComputer.cpp",
+ "LinearMath/btGeometryUtil.cpp",
+ "LinearMath/btPolarDecomposition.cpp",
+ "LinearMath/btQuickprof.cpp",
+ "LinearMath/btSerializer.cpp",
+ "LinearMath/btSerializer64.cpp",
+ "LinearMath/btThreads.cpp",
+ "LinearMath/btVector3.cpp",
+ "LinearMath/TaskScheduler/btTaskScheduler.cpp",
+ "LinearMath/TaskScheduler/btThreadSupportPosix.cpp",
+ "LinearMath/TaskScheduler/btThreadSupportWin32.cpp",
]
thirdparty_sources = [thirdparty_dir + file for file in bullet2_src]
# Treat Bullet headers as system headers to avoid raising warnings. Not supported on MSVC.
if not env.msvc:
- env_bullet.Append(CPPFLAGS=['-isystem', Dir(thirdparty_dir).path])
+ env_bullet.Append(CPPFLAGS=["-isystem", Dir(thirdparty_dir).path])
else:
env_bullet.Prepend(CPPPATH=[thirdparty_dir])
# if env['target'] == "debug" or env['target'] == "release_debug":
diff --git a/modules/bullet/config.py b/modules/bullet/config.py
index 92dbcf5cb0d..f6fd0be8ba5 100644
--- a/modules/bullet/config.py
+++ b/modules/bullet/config.py
@@ -1,14 +1,17 @@
def can_build(env, platform):
return True
+
def configure(env):
pass
+
def get_doc_classes():
return [
"BulletPhysicsDirectBodyState",
"BulletPhysicsServer",
]
+
def get_doc_path():
return "doc_classes"
diff --git a/modules/camera/SCsub b/modules/camera/SCsub
index 23f031f06e2..63c4e9fbab6 100644
--- a/modules/camera/SCsub
+++ b/modules/camera/SCsub
@@ -1,7 +1,7 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
env_camera = env_modules.Clone()
@@ -10,7 +10,7 @@ if env["platform"] == "iphone":
modules_sources = []
env_camera.add_source_files(modules_sources, "register_types.cpp")
env_camera.add_source_files(modules_sources, "camera_ios.mm")
- mod_lib = env_modules.add_library('#bin/libgodot_camera_module' + env['LIBSUFFIX'], modules_sources)
+ mod_lib = env_modules.add_library("#bin/libgodot_camera_module" + env["LIBSUFFIX"], modules_sources)
elif env["platform"] == "windows":
env_camera.add_source_files(env.modules_sources, "register_types.cpp")
@@ -19,4 +19,3 @@ elif env["platform"] == "windows":
elif env["platform"] == "osx":
env_camera.add_source_files(env.modules_sources, "register_types.cpp")
env_camera.add_source_files(env.modules_sources, "camera_osx.mm")
-
diff --git a/modules/camera/config.py b/modules/camera/config.py
index d308c04195b..87d75427419 100644
--- a/modules/camera/config.py
+++ b/modules/camera/config.py
@@ -1,5 +1,6 @@
def can_build(env, platform):
- return platform == 'iphone' or platform == 'osx' or platform == 'windows'
+ return platform == "iphone" or platform == "osx" or platform == "windows"
+
def configure(env):
pass
diff --git a/modules/csg/SCsub b/modules/csg/SCsub
index 57c504efd88..641a42c187e 100644
--- a/modules/csg/SCsub
+++ b/modules/csg/SCsub
@@ -1,7 +1,7 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
env_csg = env_modules.Clone()
diff --git a/modules/csg/config.py b/modules/csg/config.py
index 38ccc66d915..0f267872d72 100644
--- a/modules/csg/config.py
+++ b/modules/csg/config.py
@@ -1,9 +1,11 @@
def can_build(env, platform):
return True
+
def configure(env):
pass
+
def get_doc_classes():
return [
"CSGBox",
@@ -17,5 +19,6 @@ def get_doc_classes():
"CSGTorus",
]
+
def get_doc_path():
return "doc_classes"
diff --git a/modules/cvtt/SCsub b/modules/cvtt/SCsub
index 746b23ca288..5438f7ebacc 100644
--- a/modules/cvtt/SCsub
+++ b/modules/cvtt/SCsub
@@ -1,14 +1,14 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
env_cvtt = env_modules.Clone()
# Thirdparty source files
thirdparty_dir = "#thirdparty/cvtt/"
thirdparty_sources = [
- "ConvectionKernels.cpp"
+ "ConvectionKernels.cpp",
]
thirdparty_sources = [thirdparty_dir + file for file in thirdparty_sources]
diff --git a/modules/cvtt/config.py b/modules/cvtt/config.py
index 098f1eafa9e..53b8f2f2e36 100644
--- a/modules/cvtt/config.py
+++ b/modules/cvtt/config.py
@@ -1,5 +1,6 @@
def can_build(env, platform):
- return env['tools']
+ return env["tools"]
+
def configure(env):
pass
diff --git a/modules/dds/SCsub b/modules/dds/SCsub
index 3d92ff02d6d..06980bd670f 100644
--- a/modules/dds/SCsub
+++ b/modules/dds/SCsub
@@ -1,7 +1,7 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
env_dds = env_modules.Clone()
diff --git a/modules/dds/config.py b/modules/dds/config.py
index 1c8cd12a2dc..d22f9454ed2 100644
--- a/modules/dds/config.py
+++ b/modules/dds/config.py
@@ -1,5 +1,6 @@
def can_build(env, platform):
return True
+
def configure(env):
pass
diff --git a/modules/enet/SCsub b/modules/enet/SCsub
index 485c33b1a88..c8f4b3885e4 100644
--- a/modules/enet/SCsub
+++ b/modules/enet/SCsub
@@ -1,13 +1,13 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
env_enet = env_modules.Clone()
# Thirdparty source files
-if env['builtin_enet']:
+if env["builtin_enet"]:
thirdparty_dir = "#thirdparty/enet/"
thirdparty_sources = [
"godot.cpp",
diff --git a/modules/enet/config.py b/modules/enet/config.py
index 3e30bbe778f..5fd343c75da 100644
--- a/modules/enet/config.py
+++ b/modules/enet/config.py
@@ -1,13 +1,16 @@
def can_build(env, platform):
return True
+
def configure(env):
pass
+
def get_doc_classes():
return [
"NetworkedMultiplayerENet",
]
+
def get_doc_path():
return "doc_classes"
diff --git a/modules/etc/SCsub b/modules/etc/SCsub
index 1742d3534fb..383bbf83c3e 100644
--- a/modules/etc/SCsub
+++ b/modules/etc/SCsub
@@ -1,7 +1,7 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
env_etc = env_modules.Clone()
@@ -9,21 +9,21 @@ env_etc = env_modules.Clone()
# Not unbundled so far since not widespread as shared library
thirdparty_dir = "#thirdparty/etc2comp/"
thirdparty_sources = [
- "EtcBlock4x4.cpp",
- "EtcBlock4x4Encoding.cpp",
- "EtcBlock4x4Encoding_ETC1.cpp",
- "EtcBlock4x4Encoding_R11.cpp",
- "EtcBlock4x4Encoding_RG11.cpp",
- "EtcBlock4x4Encoding_RGB8A1.cpp",
- "EtcBlock4x4Encoding_RGB8.cpp",
- "EtcBlock4x4Encoding_RGBA8.cpp",
- "Etc.cpp",
- "EtcDifferentialTrys.cpp",
- "EtcFilter.cpp",
- "EtcImage.cpp",
- "EtcIndividualTrys.cpp",
- "EtcMath.cpp",
- "EtcSortedBlockList.cpp",
+ "EtcBlock4x4.cpp",
+ "EtcBlock4x4Encoding.cpp",
+ "EtcBlock4x4Encoding_ETC1.cpp",
+ "EtcBlock4x4Encoding_R11.cpp",
+ "EtcBlock4x4Encoding_RG11.cpp",
+ "EtcBlock4x4Encoding_RGB8A1.cpp",
+ "EtcBlock4x4Encoding_RGB8.cpp",
+ "EtcBlock4x4Encoding_RGBA8.cpp",
+ "Etc.cpp",
+ "EtcDifferentialTrys.cpp",
+ "EtcFilter.cpp",
+ "EtcImage.cpp",
+ "EtcIndividualTrys.cpp",
+ "EtcMath.cpp",
+ "EtcSortedBlockList.cpp",
]
thirdparty_sources = [thirdparty_dir + file for file in thirdparty_sources]
diff --git a/modules/etc/config.py b/modules/etc/config.py
index 098f1eafa9e..53b8f2f2e36 100644
--- a/modules/etc/config.py
+++ b/modules/etc/config.py
@@ -1,5 +1,6 @@
def can_build(env, platform):
- return env['tools']
+ return env["tools"]
+
def configure(env):
pass
diff --git a/modules/freetype/SCsub b/modules/freetype/SCsub
index 9e1853c4cda..bfc1658bb48 100644
--- a/modules/freetype/SCsub
+++ b/modules/freetype/SCsub
@@ -1,12 +1,12 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
env_freetype = env_modules.Clone()
# Thirdparty source files
-if env['builtin_freetype']:
+if env["builtin_freetype"]:
thirdparty_dir = "#thirdparty/freetype/"
thirdparty_sources = [
"src/autofit/autofit.c",
@@ -53,31 +53,31 @@ if env['builtin_freetype']:
]
thirdparty_sources = [thirdparty_dir + file for file in thirdparty_sources]
- if env['platform'] == 'uwp':
+ if env["platform"] == "uwp":
# Include header for UWP to fix build issues
- env_freetype.Append(CCFLAGS=['/FI', '"modules/freetype/uwpdef.h"'])
+ env_freetype.Append(CCFLAGS=["/FI", '"modules/freetype/uwpdef.h"'])
# Globally too, as freetype is used in scene (see bottom)
- env.Append(CCFLAGS=['/FI', '"modules/freetype/uwpdef.h"'])
+ env.Append(CCFLAGS=["/FI", '"modules/freetype/uwpdef.h"'])
env_freetype.Prepend(CPPPATH=[thirdparty_dir + "/include"])
# Also needed in main env for scene/
env.Prepend(CPPPATH=[thirdparty_dir + "/include"])
- env_freetype.Append(CPPDEFINES=['FT2_BUILD_LIBRARY', 'FT_CONFIG_OPTION_USE_PNG'])
- if (env['target'] == 'debug'):
- env_freetype.Append(CPPDEFINES=['ZLIB_DEBUG'])
+ env_freetype.Append(CPPDEFINES=["FT2_BUILD_LIBRARY", "FT_CONFIG_OPTION_USE_PNG"])
+ if env["target"] == "debug":
+ env_freetype.Append(CPPDEFINES=["ZLIB_DEBUG"])
# Also requires libpng headers
- if env['builtin_libpng']:
+ if env["builtin_libpng"]:
env_freetype.Prepend(CPPPATH=["#thirdparty/libpng"])
- sfnt = thirdparty_dir + 'src/sfnt/sfnt.c'
+ sfnt = thirdparty_dir + "src/sfnt/sfnt.c"
# Must be done after all CPPDEFINES are being set so we can copy them.
- if env['platform'] == 'javascript':
+ if env["platform"] == "javascript":
# Forcibly undefine this macro so SIMD is not used in this file,
# since currently unsupported in WASM
tmp_env = env_freetype.Clone()
- tmp_env.Append(CPPFLAGS=['-U__OPTIMIZE__'])
+ tmp_env.Append(CPPFLAGS=["-U__OPTIMIZE__"])
sfnt = tmp_env.Object(sfnt)
thirdparty_sources += [sfnt]
@@ -91,7 +91,7 @@ if env['builtin_freetype']:
# and then plain strings for system library. We insert between the two.
inserted = False
for idx, linklib in enumerate(env["LIBS"]):
- if isinstance(linklib, (str, bytes)): # first system lib such as "X11", otherwise SCons lib object
+ if isinstance(linklib, (str, bytes)): # first system lib such as "X11", otherwise SCons lib object
env["LIBS"].insert(idx, lib)
inserted = True
break
diff --git a/modules/freetype/config.py b/modules/freetype/config.py
index 1c8cd12a2dc..d22f9454ed2 100644
--- a/modules/freetype/config.py
+++ b/modules/freetype/config.py
@@ -1,5 +1,6 @@
def can_build(env, platform):
return True
+
def configure(env):
pass
diff --git a/modules/gdnative/SCsub b/modules/gdnative/SCsub
index a18c75fa27c..a788175b07d 100644
--- a/modules/gdnative/SCsub
+++ b/modules/gdnative/SCsub
@@ -1,7 +1,7 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
env_gdnative = env_modules.Clone()
env_gdnative.add_source_files(env.modules_sources, "gdnative.cpp")
@@ -12,9 +12,9 @@ env_gdnative.add_source_files(env.modules_sources, "nativescript/*.cpp")
env_gdnative.add_source_files(env.modules_sources, "gdnative_library_singleton_editor.cpp")
env_gdnative.add_source_files(env.modules_sources, "gdnative_library_editor_plugin.cpp")
-env_gdnative.Prepend(CPPPATH=['#modules/gdnative/include/'])
+env_gdnative.Prepend(CPPPATH=["#modules/gdnative/include/"])
-Export('env_gdnative')
+Export("env_gdnative")
SConscript("net/SCsub")
SConscript("arvr/SCsub")
@@ -25,8 +25,11 @@ SConscript("videodecoder/SCsub")
from platform_methods import run_in_subprocess
import gdnative_builders
-_, gensource = env_gdnative.CommandNoCache(['include/gdnative_api_struct.gen.h', 'gdnative_api_struct.gen.cpp'],
- 'gdnative_api.json', run_in_subprocess(gdnative_builders.build_gdnative_api_struct))
+_, gensource = env_gdnative.CommandNoCache(
+ ["include/gdnative_api_struct.gen.h", "gdnative_api_struct.gen.cpp"],
+ "gdnative_api.json",
+ run_in_subprocess(gdnative_builders.build_gdnative_api_struct),
+)
env_gdnative.add_source_files(env.modules_sources, [gensource])
env.use_ptrcall = True
diff --git a/modules/gdnative/arvr/SCsub b/modules/gdnative/arvr/SCsub
index 20eaa995925..0b2db3b5045 100644
--- a/modules/gdnative/arvr/SCsub
+++ b/modules/gdnative/arvr/SCsub
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-Import('env')
-Import('env_gdnative')
+Import("env")
+Import("env_gdnative")
-env_gdnative.add_source_files(env.modules_sources, '*.cpp')
+env_gdnative.add_source_files(env.modules_sources, "*.cpp")
diff --git a/modules/gdnative/arvr/config.py b/modules/gdnative/arvr/config.py
index 53bc8270272..d22f9454ed2 100644
--- a/modules/gdnative/arvr/config.py
+++ b/modules/gdnative/arvr/config.py
@@ -1,5 +1,6 @@
def can_build(env, platform):
- return True
+ return True
+
def configure(env):
- pass
+ pass
diff --git a/modules/gdnative/config.py b/modules/gdnative/config.py
index b9e5afcdf31..37e25a46d4b 100644
--- a/modules/gdnative/config.py
+++ b/modules/gdnative/config.py
@@ -1,9 +1,11 @@
def can_build(env, platform):
return True
+
def configure(env):
env.use_ptrcall = True
+
def get_doc_classes():
return [
"@NativeScript",
@@ -20,5 +22,6 @@ def get_doc_classes():
"WebRTCDataChannelGDNative",
]
+
def get_doc_path():
return "doc_classes"
diff --git a/modules/gdnative/gdnative_builders.py b/modules/gdnative/gdnative_builders.py
index 0d95a65b7e9..d0094fb8691 100644
--- a/modules/gdnative/gdnative_builders.py
+++ b/modules/gdnative/gdnative_builders.py
@@ -8,209 +8,249 @@ from platform_methods import subprocess_main
def _spaced(e):
- return e if e[-1] == '*' else e + ' '
+ return e if e[-1] == "*" else e + " "
def _build_gdnative_api_struct_header(api):
out = [
- '/* THIS FILE IS GENERATED DO NOT EDIT */',
- '#ifndef GODOT_GDNATIVE_API_STRUCT_H',
- '#define GODOT_GDNATIVE_API_STRUCT_H',
- '',
- '#include ',
- '#include ',
- '#include ',
- '#include ',
- '#include ',
- '#include ',
- '#include ',
- '',
- '#ifdef __cplusplus',
+ "/* THIS FILE IS GENERATED DO NOT EDIT */",
+ "#ifndef GODOT_GDNATIVE_API_STRUCT_H",
+ "#define GODOT_GDNATIVE_API_STRUCT_H",
+ "",
+ "#include ",
+ "#include ",
+ "#include ",
+ "#include ",
+ "#include ",
+ "#include ",
+ "#include ",
+ "",
+ "#ifdef __cplusplus",
'extern "C" {',
- '#endif',
- '',
- 'enum GDNATIVE_API_TYPES {',
- '\tGDNATIVE_' + api['core']['type'] + ','
+ "#endif",
+ "",
+ "enum GDNATIVE_API_TYPES {",
+ "\tGDNATIVE_" + api["core"]["type"] + ",",
]
- for ext in api['extensions']:
- out += ['\tGDNATIVE_EXT_' + ext['type'] + ',']
+ for ext in api["extensions"]:
+ out += ["\tGDNATIVE_EXT_" + ext["type"] + ","]
- out += ['};', '']
+ out += ["};", ""]
def generate_extension_struct(name, ext, include_version=True):
ret_val = []
- if ext['next']:
- ret_val += generate_extension_struct(name, ext['next'])
+ if ext["next"]:
+ ret_val += generate_extension_struct(name, ext["next"])
ret_val += [
- 'typedef struct godot_gdnative_ext_' + name + ('' if not include_version else ('_{0}_{1}'.format(ext['version']['major'], ext['version']['minor']))) + '_api_struct {',
- '\tunsigned int type;',
- '\tgodot_gdnative_api_version version;',
- '\tconst godot_gdnative_api_struct *next;'
+ "typedef struct godot_gdnative_ext_"
+ + name
+ + ("" if not include_version else ("_{0}_{1}".format(ext["version"]["major"], ext["version"]["minor"])))
+ + "_api_struct {",
+ "\tunsigned int type;",
+ "\tgodot_gdnative_api_version version;",
+ "\tconst godot_gdnative_api_struct *next;",
]
- for funcdef in ext['api']:
- args = ', '.join(['%s%s' % (_spaced(t), n) for t, n in funcdef['arguments']])
- ret_val.append('\t%s(*%s)(%s);' % (_spaced(funcdef['return_type']), funcdef['name'], args))
+ for funcdef in ext["api"]:
+ args = ", ".join(["%s%s" % (_spaced(t), n) for t, n in funcdef["arguments"]])
+ ret_val.append("\t%s(*%s)(%s);" % (_spaced(funcdef["return_type"]), funcdef["name"], args))
- ret_val += ['} godot_gdnative_ext_' + name + ('' if not include_version else ('_{0}_{1}'.format(ext['version']['major'], ext['version']['minor']))) + '_api_struct;', '']
+ ret_val += [
+ "} godot_gdnative_ext_"
+ + name
+ + ("" if not include_version else ("_{0}_{1}".format(ext["version"]["major"], ext["version"]["minor"])))
+ + "_api_struct;",
+ "",
+ ]
return ret_val
-
def generate_core_extension_struct(core):
ret_val = []
- if core['next']:
- ret_val += generate_core_extension_struct(core['next'])
+ if core["next"]:
+ ret_val += generate_core_extension_struct(core["next"])
ret_val += [
- 'typedef struct godot_gdnative_core_' + ('{0}_{1}'.format(core['version']['major'], core['version']['minor'])) + '_api_struct {',
- '\tunsigned int type;',
- '\tgodot_gdnative_api_version version;',
- '\tconst godot_gdnative_api_struct *next;',
+ "typedef struct godot_gdnative_core_"
+ + ("{0}_{1}".format(core["version"]["major"], core["version"]["minor"]))
+ + "_api_struct {",
+ "\tunsigned int type;",
+ "\tgodot_gdnative_api_version version;",
+ "\tconst godot_gdnative_api_struct *next;",
]
- for funcdef in core['api']:
- args = ', '.join(['%s%s' % (_spaced(t), n) for t, n in funcdef['arguments']])
- ret_val.append('\t%s(*%s)(%s);' % (_spaced(funcdef['return_type']), funcdef['name'], args))
+ for funcdef in core["api"]:
+ args = ", ".join(["%s%s" % (_spaced(t), n) for t, n in funcdef["arguments"]])
+ ret_val.append("\t%s(*%s)(%s);" % (_spaced(funcdef["return_type"]), funcdef["name"], args))
- ret_val += ['} godot_gdnative_core_' + '{0}_{1}'.format(core['version']['major'], core['version']['minor']) + '_api_struct;', '']
+ ret_val += [
+ "} godot_gdnative_core_"
+ + "{0}_{1}".format(core["version"]["major"], core["version"]["minor"])
+ + "_api_struct;",
+ "",
+ ]
return ret_val
-
- for ext in api['extensions']:
- name = ext['name']
+ for ext in api["extensions"]:
+ name = ext["name"]
out += generate_extension_struct(name, ext, False)
- if api['core']['next']:
- out += generate_core_extension_struct(api['core']['next'])
+ if api["core"]["next"]:
+ out += generate_core_extension_struct(api["core"]["next"])
out += [
- 'typedef struct godot_gdnative_core_api_struct {',
- '\tunsigned int type;',
- '\tgodot_gdnative_api_version version;',
- '\tconst godot_gdnative_api_struct *next;',
- '\tunsigned int num_extensions;',
- '\tconst godot_gdnative_api_struct **extensions;',
+ "typedef struct godot_gdnative_core_api_struct {",
+ "\tunsigned int type;",
+ "\tgodot_gdnative_api_version version;",
+ "\tconst godot_gdnative_api_struct *next;",
+ "\tunsigned int num_extensions;",
+ "\tconst godot_gdnative_api_struct **extensions;",
]
- for funcdef in api['core']['api']:
- args = ', '.join(['%s%s' % (_spaced(t), n) for t, n in funcdef['arguments']])
- out.append('\t%s(*%s)(%s);' % (_spaced(funcdef['return_type']), funcdef['name'], args))
+ for funcdef in api["core"]["api"]:
+ args = ", ".join(["%s%s" % (_spaced(t), n) for t, n in funcdef["arguments"]])
+ out.append("\t%s(*%s)(%s);" % (_spaced(funcdef["return_type"]), funcdef["name"], args))
out += [
- '} godot_gdnative_core_api_struct;',
- '',
- '#ifdef __cplusplus',
- '}',
- '#endif',
- '',
- '#endif // GODOT_GDNATIVE_API_STRUCT_H',
- ''
+ "} godot_gdnative_core_api_struct;",
+ "",
+ "#ifdef __cplusplus",
+ "}",
+ "#endif",
+ "",
+ "#endif // GODOT_GDNATIVE_API_STRUCT_H",
+ "",
]
- return '\n'.join(out)
+ return "\n".join(out)
def _build_gdnative_api_struct_source(api):
- out = [
- '/* THIS FILE IS GENERATED DO NOT EDIT */',
- '',
- '#include ',
- ''
- ]
+ out = ["/* THIS FILE IS GENERATED DO NOT EDIT */", "", "#include ", ""]
def get_extension_struct_name(name, ext, include_version=True):
- return 'godot_gdnative_ext_' + name + ('' if not include_version else ('_{0}_{1}'.format(ext['version']['major'], ext['version']['minor']))) + '_api_struct'
+ return (
+ "godot_gdnative_ext_"
+ + name
+ + ("" if not include_version else ("_{0}_{1}".format(ext["version"]["major"], ext["version"]["minor"])))
+ + "_api_struct"
+ )
def get_extension_struct_instance_name(name, ext, include_version=True):
- return 'api_extension_' + name + ('' if not include_version else ('_{0}_{1}'.format(ext['version']['major'], ext['version']['minor']))) + '_struct'
+ return (
+ "api_extension_"
+ + name
+ + ("" if not include_version else ("_{0}_{1}".format(ext["version"]["major"], ext["version"]["minor"])))
+ + "_struct"
+ )
def get_extension_struct_definition(name, ext, include_version=True):
ret_val = []
- if ext['next']:
- ret_val += get_extension_struct_definition(name, ext['next'])
+ if ext["next"]:
+ ret_val += get_extension_struct_definition(name, ext["next"])
ret_val += [
- 'extern const ' + get_extension_struct_name(name, ext, include_version) + ' ' + get_extension_struct_instance_name(name, ext, include_version) + ' = {',
- '\tGDNATIVE_EXT_' + ext['type'] + ',',
- '\t{' + str(ext['version']['major']) + ', ' + str(ext['version']['minor']) + '},',
- '\t' + ('NULL' if not ext['next'] else ('(const godot_gdnative_api_struct *)&' + get_extension_struct_instance_name(name, ext['next']))) + ','
+ "extern const "
+ + get_extension_struct_name(name, ext, include_version)
+ + " "
+ + get_extension_struct_instance_name(name, ext, include_version)
+ + " = {",
+ "\tGDNATIVE_EXT_" + ext["type"] + ",",
+ "\t{" + str(ext["version"]["major"]) + ", " + str(ext["version"]["minor"]) + "},",
+ "\t"
+ + (
+ "NULL"
+ if not ext["next"]
+ else ("(const godot_gdnative_api_struct *)&" + get_extension_struct_instance_name(name, ext["next"]))
+ )
+ + ",",
]
- for funcdef in ext['api']:
- ret_val.append('\t%s,' % funcdef['name'])
+ for funcdef in ext["api"]:
+ ret_val.append("\t%s," % funcdef["name"])
- ret_val += ['};\n']
+ ret_val += ["};\n"]
return ret_val
-
def get_core_struct_definition(core):
ret_val = []
- if core['next']:
- ret_val += get_core_struct_definition(core['next'])
+ if core["next"]:
+ ret_val += get_core_struct_definition(core["next"])
ret_val += [
- 'extern const godot_gdnative_core_' + ('{0}_{1}_api_struct api_{0}_{1}'.format(core['version']['major'], core['version']['minor'])) + ' = {',
- '\tGDNATIVE_' + core['type'] + ',',
- '\t{' + str(core['version']['major']) + ', ' + str(core['version']['minor']) + '},',
- '\t' + ('NULL' if not core['next'] else ('(const godot_gdnative_api_struct *)& api_{0}_{1}'.format(core['next']['version']['major'], core['next']['version']['minor']))) + ','
+ "extern const godot_gdnative_core_"
+ + ("{0}_{1}_api_struct api_{0}_{1}".format(core["version"]["major"], core["version"]["minor"]))
+ + " = {",
+ "\tGDNATIVE_" + core["type"] + ",",
+ "\t{" + str(core["version"]["major"]) + ", " + str(core["version"]["minor"]) + "},",
+ "\t"
+ + (
+ "NULL"
+ if not core["next"]
+ else (
+ "(const godot_gdnative_api_struct *)& api_{0}_{1}".format(
+ core["next"]["version"]["major"], core["next"]["version"]["minor"]
+ )
+ )
+ )
+ + ",",
]
- for funcdef in core['api']:
- ret_val.append('\t%s,' % funcdef['name'])
+ for funcdef in core["api"]:
+ ret_val.append("\t%s," % funcdef["name"])
- ret_val += ['};\n']
+ ret_val += ["};\n"]
return ret_val
- for ext in api['extensions']:
- name = ext['name']
+ for ext in api["extensions"]:
+ name = ext["name"]
out += get_extension_struct_definition(name, ext, False)
- out += ['', 'const godot_gdnative_api_struct *gdnative_extensions_pointers[] = {']
+ out += ["", "const godot_gdnative_api_struct *gdnative_extensions_pointers[] = {"]
- for ext in api['extensions']:
- name = ext['name']
- out += ['\t(godot_gdnative_api_struct *)&api_extension_' + name + '_struct,']
+ for ext in api["extensions"]:
+ name = ext["name"]
+ out += ["\t(godot_gdnative_api_struct *)&api_extension_" + name + "_struct,"]
- out += ['};\n']
+ out += ["};\n"]
- if api['core']['next']:
- out += get_core_struct_definition(api['core']['next'])
+ if api["core"]["next"]:
+ out += get_core_struct_definition(api["core"]["next"])
out += [
- 'extern const godot_gdnative_core_api_struct api_struct = {',
- '\tGDNATIVE_' + api['core']['type'] + ',',
- '\t{' + str(api['core']['version']['major']) + ', ' + str(api['core']['version']['minor']) + '},',
- '\t(const godot_gdnative_api_struct *)&api_1_1,',
- '\t' + str(len(api['extensions'])) + ',',
- '\tgdnative_extensions_pointers,',
+ "extern const godot_gdnative_core_api_struct api_struct = {",
+ "\tGDNATIVE_" + api["core"]["type"] + ",",
+ "\t{" + str(api["core"]["version"]["major"]) + ", " + str(api["core"]["version"]["minor"]) + "},",
+ "\t(const godot_gdnative_api_struct *)&api_1_1,",
+ "\t" + str(len(api["extensions"])) + ",",
+ "\tgdnative_extensions_pointers,",
]
- for funcdef in api['core']['api']:
- out.append('\t%s,' % funcdef['name'])
- out.append('};\n')
+ for funcdef in api["core"]["api"]:
+ out.append("\t%s," % funcdef["name"])
+ out.append("};\n")
- return '\n'.join(out)
+ return "\n".join(out)
def build_gdnative_api_struct(target, source, env):
- with open(source[0], 'r') as fd:
+ with open(source[0], "r") as fd:
api = json.load(fd)
header, source = target
- with open(header, 'w') as fd:
+ with open(header, "w") as fd:
fd.write(_build_gdnative_api_struct_header(api))
- with open(source, 'w') as fd:
+ with open(source, "w") as fd:
fd.write(_build_gdnative_api_struct_source(api))
-if __name__ == '__main__':
+if __name__ == "__main__":
subprocess_main(globals())
diff --git a/modules/gdnative/nativescript/SCsub b/modules/gdnative/nativescript/SCsub
index c28a1c86597..4212e87a875 100644
--- a/modules/gdnative/nativescript/SCsub
+++ b/modules/gdnative/nativescript/SCsub
@@ -1,9 +1,9 @@
#!/usr/bin/env python
-Import('env')
-Import('env_gdnative')
+Import("env")
+Import("env_gdnative")
-env_gdnative.add_source_files(env.modules_sources, '*.cpp')
+env_gdnative.add_source_files(env.modules_sources, "*.cpp")
if "platform" in env and env["platform"] in ["linuxbsd", "iphone"]:
env.Append(LINKFLAGS=["-rdynamic"])
diff --git a/modules/gdnative/net/SCsub b/modules/gdnative/net/SCsub
index 18ab9986b0d..b76500c003d 100644
--- a/modules/gdnative/net/SCsub
+++ b/modules/gdnative/net/SCsub
@@ -1,13 +1,12 @@
#!/usr/bin/env python
-Import('env')
-Import('env_gdnative')
+Import("env")
+Import("env_gdnative")
env_net = env_gdnative.Clone()
has_webrtc = env_net["module_webrtc_enabled"]
if has_webrtc:
- env_net.Append(CPPDEFINES=['WEBRTC_GDNATIVE_ENABLED'])
-
-env_net.add_source_files(env.modules_sources, '*.cpp')
+ env_net.Append(CPPDEFINES=["WEBRTC_GDNATIVE_ENABLED"])
+env_net.add_source_files(env.modules_sources, "*.cpp")
diff --git a/modules/gdnative/pluginscript/SCsub b/modules/gdnative/pluginscript/SCsub
index 20eaa995925..0b2db3b5045 100644
--- a/modules/gdnative/pluginscript/SCsub
+++ b/modules/gdnative/pluginscript/SCsub
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-Import('env')
-Import('env_gdnative')
+Import("env")
+Import("env_gdnative")
-env_gdnative.add_source_files(env.modules_sources, '*.cpp')
+env_gdnative.add_source_files(env.modules_sources, "*.cpp")
diff --git a/modules/gdnative/videodecoder/SCsub b/modules/gdnative/videodecoder/SCsub
index 04cc8ed6041..5948b9a3dd4 100644
--- a/modules/gdnative/videodecoder/SCsub
+++ b/modules/gdnative/videodecoder/SCsub
@@ -1,9 +1,9 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
env_vsdecoder_gdnative = env_modules.Clone()
-env_vsdecoder_gdnative.Prepend(CPPPATH=['#modules/gdnative/include/'])
-env_vsdecoder_gdnative.add_source_files(env.modules_sources, '*.cpp')
+env_vsdecoder_gdnative.Prepend(CPPPATH=["#modules/gdnative/include/"])
+env_vsdecoder_gdnative.add_source_files(env.modules_sources, "*.cpp")
diff --git a/modules/gdnavigation/SCsub b/modules/gdnavigation/SCsub
index 9d462f92a78..877d601c6ab 100644
--- a/modules/gdnavigation/SCsub
+++ b/modules/gdnavigation/SCsub
@@ -1,25 +1,25 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
env_navigation = env_modules.Clone()
# Recast Thirdparty source files
-if env['builtin_recast']:
+if env["builtin_recast"]:
thirdparty_dir = "#thirdparty/recastnavigation/Recast/"
thirdparty_sources = [
- "Source/Recast.cpp",
- "Source/RecastAlloc.cpp",
- "Source/RecastArea.cpp",
- "Source/RecastAssert.cpp",
- "Source/RecastContour.cpp",
- "Source/RecastFilter.cpp",
- "Source/RecastLayers.cpp",
- "Source/RecastMesh.cpp",
- "Source/RecastMeshDetail.cpp",
- "Source/RecastRasterization.cpp",
- "Source/RecastRegion.cpp",
+ "Source/Recast.cpp",
+ "Source/RecastAlloc.cpp",
+ "Source/RecastArea.cpp",
+ "Source/RecastAssert.cpp",
+ "Source/RecastContour.cpp",
+ "Source/RecastFilter.cpp",
+ "Source/RecastLayers.cpp",
+ "Source/RecastMesh.cpp",
+ "Source/RecastMeshDetail.cpp",
+ "Source/RecastRasterization.cpp",
+ "Source/RecastRegion.cpp",
]
thirdparty_sources = [thirdparty_dir + file for file in thirdparty_sources]
@@ -31,11 +31,11 @@ if env['builtin_recast']:
# RVO Thirdparty source files
-if env['builtin_rvo2']:
+if env["builtin_rvo2"]:
thirdparty_dir = "#thirdparty/rvo2"
thirdparty_sources = [
- "/src/Agent.cpp",
- "/src/KdTree.cpp",
+ "/src/Agent.cpp",
+ "/src/KdTree.cpp",
]
thirdparty_sources = [thirdparty_dir + file for file in thirdparty_sources]
diff --git a/modules/gdnavigation/config.py b/modules/gdnavigation/config.py
index 1c8cd12a2dc..d22f9454ed2 100644
--- a/modules/gdnavigation/config.py
+++ b/modules/gdnavigation/config.py
@@ -1,5 +1,6 @@
def can_build(env, platform):
return True
+
def configure(env):
pass
diff --git a/modules/gdscript/SCsub b/modules/gdscript/SCsub
index 74e653ce432..e58a1d8edcf 100644
--- a/modules/gdscript/SCsub
+++ b/modules/gdscript/SCsub
@@ -1,19 +1,19 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
env_gdscript = env_modules.Clone()
env_gdscript.add_source_files(env.modules_sources, "*.cpp")
-if env['tools']:
+if env["tools"]:
env_gdscript.add_source_files(env.modules_sources, "./editor/*.cpp")
# Those two modules are required for the language server protocol
- if env['module_jsonrpc_enabled'] and env['module_websocket_enabled']:
+ if env["module_jsonrpc_enabled"] and env["module_websocket_enabled"]:
env_gdscript.add_source_files(env.modules_sources, "./language_server/*.cpp")
else:
# Using a define in the disabled case, to avoid having an extra define
# in regular builds where all modules are enabled.
- env_gdscript.Append(CPPDEFINES=['GDSCRIPT_NO_LSP'])
+ env_gdscript.Append(CPPDEFINES=["GDSCRIPT_NO_LSP"])
diff --git a/modules/gdscript/config.py b/modules/gdscript/config.py
index a525eedaaa0..185a10bcb29 100644
--- a/modules/gdscript/config.py
+++ b/modules/gdscript/config.py
@@ -1,9 +1,11 @@
def can_build(env, platform):
return True
+
def configure(env):
pass
+
def get_doc_classes():
return [
"@GDScript",
@@ -12,5 +14,6 @@ def get_doc_classes():
"GDScriptNativeClass",
]
+
def get_doc_path():
return "doc_classes"
diff --git a/modules/glslang/SCsub b/modules/glslang/SCsub
index ae102238f25..c1d23a138b1 100644
--- a/modules/glslang/SCsub
+++ b/modules/glslang/SCsub
@@ -1,12 +1,12 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
env_glslang = env_modules.Clone()
# Thirdparty source files
-if env['builtin_glslang']:
+if env["builtin_glslang"]:
thirdparty_dir = "#thirdparty/glslang/"
thirdparty_sources = [
"glslang/MachineIndependent/RemoveTree.cpp",
@@ -48,10 +48,10 @@ if env['builtin_glslang']:
"SPIRV/doc.cpp",
"SPIRV/SPVRemapper.cpp",
"SPIRV/SpvPostProcess.cpp",
- "SPIRV/Logger.cpp"
+ "SPIRV/Logger.cpp",
]
- if (env["platform"]=="windows"):
+ if env["platform"] == "windows":
thirdparty_sources.append("glslang/OSDependent/Windows/ossource.cpp")
else:
thirdparty_sources.append("glslang/OSDependent/Unix/ossource.cpp")
@@ -60,7 +60,7 @@ if env['builtin_glslang']:
# Treat glslang headers as system headers to avoid raising warnings. Not supported on MSVC.
if not env.msvc:
- env_glslang.Append(CPPFLAGS=['-isystem', Dir(thirdparty_dir).path])
+ env_glslang.Append(CPPFLAGS=["-isystem", Dir(thirdparty_dir).path])
else:
env_glslang.Prepend(CPPPATH=[thirdparty_dir])
diff --git a/modules/glslang/config.py b/modules/glslang/config.py
index 1c8cd12a2dc..d22f9454ed2 100644
--- a/modules/glslang/config.py
+++ b/modules/glslang/config.py
@@ -1,5 +1,6 @@
def can_build(env, platform):
return True
+
def configure(env):
pass
diff --git a/modules/gridmap/SCsub b/modules/gridmap/SCsub
index 62b8a0ff93c..970ce534f00 100644
--- a/modules/gridmap/SCsub
+++ b/modules/gridmap/SCsub
@@ -1,7 +1,7 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
env_gridmap = env_modules.Clone()
diff --git a/modules/gridmap/config.py b/modules/gridmap/config.py
index 5022116c9b1..a6319fe1ea0 100644
--- a/modules/gridmap/config.py
+++ b/modules/gridmap/config.py
@@ -1,13 +1,16 @@
def can_build(env, platform):
return True
+
def configure(env):
pass
+
def get_doc_classes():
return [
"GridMap",
]
+
def get_doc_path():
return "doc_classes"
diff --git a/modules/hdr/SCsub b/modules/hdr/SCsub
index c960e8126b1..a709397c9a0 100644
--- a/modules/hdr/SCsub
+++ b/modules/hdr/SCsub
@@ -1,7 +1,7 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
env_hdr = env_modules.Clone()
diff --git a/modules/hdr/config.py b/modules/hdr/config.py
index 1c8cd12a2dc..d22f9454ed2 100644
--- a/modules/hdr/config.py
+++ b/modules/hdr/config.py
@@ -1,5 +1,6 @@
def can_build(env, platform):
return True
+
def configure(env):
pass
diff --git a/modules/jpg/SCsub b/modules/jpg/SCsub
index 96e8e704dde..8ee8e6dd6ee 100644
--- a/modules/jpg/SCsub
+++ b/modules/jpg/SCsub
@@ -1,7 +1,7 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
env_jpg = env_modules.Clone()
diff --git a/modules/jpg/config.py b/modules/jpg/config.py
index 1c8cd12a2dc..d22f9454ed2 100644
--- a/modules/jpg/config.py
+++ b/modules/jpg/config.py
@@ -1,5 +1,6 @@
def can_build(env, platform):
return True
+
def configure(env):
pass
diff --git a/modules/jsonrpc/SCsub b/modules/jsonrpc/SCsub
index 13c9ffb253e..fe5312670a2 100644
--- a/modules/jsonrpc/SCsub
+++ b/modules/jsonrpc/SCsub
@@ -1,7 +1,7 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
env_jsonrpc = env_modules.Clone()
env_jsonrpc.add_source_files(env.modules_sources, "*.cpp")
diff --git a/modules/jsonrpc/config.py b/modules/jsonrpc/config.py
index 53bc8270272..d22f9454ed2 100644
--- a/modules/jsonrpc/config.py
+++ b/modules/jsonrpc/config.py
@@ -1,5 +1,6 @@
def can_build(env, platform):
- return True
+ return True
+
def configure(env):
- pass
+ pass
diff --git a/modules/mbedtls/SCsub b/modules/mbedtls/SCsub
index 0c6c703e16e..5f5d25a3ee2 100755
--- a/modules/mbedtls/SCsub
+++ b/modules/mbedtls/SCsub
@@ -1,11 +1,11 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
env_mbed_tls = env_modules.Clone()
-if env['builtin_mbedtls']:
+if env["builtin_mbedtls"]:
# Thirdparty source files
thirdparty_sources = [
"aes.c",
@@ -86,7 +86,7 @@ if env['builtin_mbedtls']:
"x509_csr.c",
"x509write_crt.c",
"x509write_csr.c",
- "xtea.c"
+ "xtea.c",
]
thirdparty_dir = "#thirdparty/mbedtls/library/"
diff --git a/modules/mbedtls/config.py b/modules/mbedtls/config.py
index 1c8cd12a2dc..d22f9454ed2 100755
--- a/modules/mbedtls/config.py
+++ b/modules/mbedtls/config.py
@@ -1,5 +1,6 @@
def can_build(env, platform):
return True
+
def configure(env):
pass
diff --git a/modules/mobile_vr/SCsub b/modules/mobile_vr/SCsub
index 4bd184f0258..e6c43228b4c 100644
--- a/modules/mobile_vr/SCsub
+++ b/modules/mobile_vr/SCsub
@@ -1,8 +1,8 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
env_mobile_vr = env_modules.Clone()
-env_mobile_vr.add_source_files(env.modules_sources, '*.cpp')
+env_mobile_vr.add_source_files(env.modules_sources, "*.cpp")
diff --git a/modules/mobile_vr/config.py b/modules/mobile_vr/config.py
index e85fa631dd3..ee401c1a2a0 100644
--- a/modules/mobile_vr/config.py
+++ b/modules/mobile_vr/config.py
@@ -1,13 +1,16 @@
def can_build(env, platform):
return True
+
def configure(env):
pass
+
def get_doc_classes():
return [
"MobileVRInterface",
]
+
def get_doc_path():
return "doc_classes"
diff --git a/modules/modules_builders.py b/modules/modules_builders.py
index 0e9cba2b0b0..e7be6380d1d 100644
--- a/modules/modules_builders.py
+++ b/modules/modules_builders.py
@@ -7,10 +7,10 @@ from platform_methods import subprocess_main
def generate_modules_enabled(target, source, env):
- with open(target[0].path, 'w') as f:
+ with open(target[0].path, "w") as f:
for module in env.module_list:
- f.write('#define %s\n' % ("MODULE_" + module.upper() + "_ENABLED"))
+ f.write("#define %s\n" % ("MODULE_" + module.upper() + "_ENABLED"))
-if __name__ == '__main__':
+if __name__ == "__main__":
subprocess_main(globals())
diff --git a/modules/mono/SCsub b/modules/mono/SCsub
index 5f03fafdcfe..af77913b911 100644
--- a/modules/mono/SCsub
+++ b/modules/mono/SCsub
@@ -2,46 +2,50 @@
import build_scripts.mono_configure as mono_configure
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
env_mono = env_modules.Clone()
-if env_mono['tools']:
+if env_mono["tools"]:
# NOTE: It is safe to generate this file here, since this is still executed serially
import build_scripts.gen_cs_glue_version as gen_cs_glue_version
- gen_cs_glue_version.generate_header('glue/GodotSharp', 'glue/cs_glue_version.gen.h')
+
+ gen_cs_glue_version.generate_header("glue/GodotSharp", "glue/cs_glue_version.gen.h")
# Glue sources
-if env_mono['mono_glue']:
- env_mono.Append(CPPDEFINES=['MONO_GLUE_ENABLED'])
+if env_mono["mono_glue"]:
+ env_mono.Append(CPPDEFINES=["MONO_GLUE_ENABLED"])
import os.path
- if not os.path.isfile('glue/mono_glue.gen.cpp'):
+
+ if not os.path.isfile("glue/mono_glue.gen.cpp"):
raise RuntimeError("Mono glue sources not found. Did you forget to run '--generate-mono-glue'?")
-if env_mono['tools'] or env_mono['target'] != 'release':
- env_mono.Append(CPPDEFINES=['GD_MONO_HOT_RELOAD'])
+if env_mono["tools"] or env_mono["target"] != "release":
+ env_mono.Append(CPPDEFINES=["GD_MONO_HOT_RELOAD"])
# Configure Mono
mono_configure.configure(env, env_mono)
-if env_mono['tools'] and env_mono['mono_glue']:
+if env_mono["tools"] and env_mono["mono_glue"]:
# Build Godot API solution
import build_scripts.api_solution_build as api_solution_build
+
api_sln_cmd = api_solution_build.build(env_mono)
# Build GodotTools
import build_scripts.godot_tools_build as godot_tools_build
+
godot_tools_build.build(env_mono, api_sln_cmd)
# Add sources
-env_mono.add_source_files(env.modules_sources, '*.cpp')
-env_mono.add_source_files(env.modules_sources, 'glue/*.cpp')
-env_mono.add_source_files(env.modules_sources, 'mono_gd/*.cpp')
-env_mono.add_source_files(env.modules_sources, 'utils/*.cpp')
+env_mono.add_source_files(env.modules_sources, "*.cpp")
+env_mono.add_source_files(env.modules_sources, "glue/*.cpp")
+env_mono.add_source_files(env.modules_sources, "mono_gd/*.cpp")
+env_mono.add_source_files(env.modules_sources, "utils/*.cpp")
-if env['tools']:
- env_mono.add_source_files(env.modules_sources, 'editor/*.cpp')
+if env["tools"]:
+ env_mono.add_source_files(env.modules_sources, "editor/*.cpp")
diff --git a/modules/mono/build_scripts/api_solution_build.py b/modules/mono/build_scripts/api_solution_build.py
index 639197c285e..9abac22df60 100644
--- a/modules/mono/build_scripts/api_solution_build.py
+++ b/modules/mono/build_scripts/api_solution_build.py
@@ -8,21 +8,22 @@ from SCons.Script import Dir
def build_api_solution(source, target, env):
# source and target elements are of type SCons.Node.FS.File, hence why we convert them to str
- module_dir = env['module_dir']
+ module_dir = env["module_dir"]
- solution_path = os.path.join(module_dir, 'glue/GodotSharp/GodotSharp.sln')
+ solution_path = os.path.join(module_dir, "glue/GodotSharp/GodotSharp.sln")
- build_config = env['solution_build_config']
+ build_config = env["solution_build_config"]
- extra_msbuild_args = ['/p:NoWarn=1591'] # Ignore missing documentation warnings
+ extra_msbuild_args = ["/p:NoWarn=1591"] # Ignore missing documentation warnings
from .solution_builder import build_solution
+
build_solution(env, solution_path, build_config, extra_msbuild_args=extra_msbuild_args)
# Copy targets
- core_src_dir = os.path.abspath(os.path.join(solution_path, os.pardir, 'GodotSharp', 'bin', build_config))
- editor_src_dir = os.path.abspath(os.path.join(solution_path, os.pardir, 'GodotSharpEditor', 'bin', build_config))
+ core_src_dir = os.path.abspath(os.path.join(solution_path, os.pardir, "GodotSharp", "bin", build_config))
+ editor_src_dir = os.path.abspath(os.path.join(solution_path, os.pardir, "GodotSharpEditor", "bin", build_config))
dst_dir = os.path.abspath(os.path.join(str(target[0]), os.pardir))
@@ -32,6 +33,7 @@ def build_api_solution(source, target, env):
def copy_target(target_path):
from shutil import copy
+
filename = os.path.basename(target_path)
src_path = os.path.join(core_src_dir, filename)
@@ -45,23 +47,28 @@ def build_api_solution(source, target, env):
def build(env_mono):
- assert env_mono['tools']
+ assert env_mono["tools"]
target_filenames = [
- 'GodotSharp.dll', 'GodotSharp.pdb', 'GodotSharp.xml',
- 'GodotSharpEditor.dll', 'GodotSharpEditor.pdb', 'GodotSharpEditor.xml'
+ "GodotSharp.dll",
+ "GodotSharp.pdb",
+ "GodotSharp.xml",
+ "GodotSharpEditor.dll",
+ "GodotSharpEditor.pdb",
+ "GodotSharpEditor.xml",
]
depend_cmd = []
- for build_config in ['Debug', 'Release']:
- output_dir = Dir('#bin').abspath
- editor_api_dir = os.path.join(output_dir, 'GodotSharp', 'Api', build_config)
+ for build_config in ["Debug", "Release"]:
+ output_dir = Dir("#bin").abspath
+ editor_api_dir = os.path.join(output_dir, "GodotSharp", "Api", build_config)
targets = [os.path.join(editor_api_dir, filename) for filename in target_filenames]
- cmd = env_mono.CommandNoCache(targets, depend_cmd, build_api_solution,
- module_dir=os.getcwd(), solution_build_config=build_config)
+ cmd = env_mono.CommandNoCache(
+ targets, depend_cmd, build_api_solution, module_dir=os.getcwd(), solution_build_config=build_config
+ )
env_mono.AlwaysBuild(cmd)
# Make the Release build of the API solution depend on the Debug build.
diff --git a/modules/mono/build_scripts/gen_cs_glue_version.py b/modules/mono/build_scripts/gen_cs_glue_version.py
index 5d1056c2fc7..98bbb4d9be3 100644
--- a/modules/mono/build_scripts/gen_cs_glue_version.py
+++ b/modules/mono/build_scripts/gen_cs_glue_version.py
@@ -1,20 +1,20 @@
-
def generate_header(solution_dir, version_header_dst):
import os
+
latest_mtime = 0
for root, dirs, files in os.walk(solution_dir, topdown=True):
- dirs[:] = [d for d in dirs if d not in ['Generated']] # Ignored generated files
- files = [f for f in files if f.endswith('.cs')]
+ dirs[:] = [d for d in dirs if d not in ["Generated"]] # Ignored generated files
+ files = [f for f in files if f.endswith(".cs")]
for file in files:
filepath = os.path.join(root, file)
mtime = os.path.getmtime(filepath)
latest_mtime = mtime if mtime > latest_mtime else latest_mtime
- glue_version = int(latest_mtime) # The latest modified time will do for now
+ glue_version = int(latest_mtime) # The latest modified time will do for now
- with open(version_header_dst, 'w') as version_header:
- version_header.write('/* THIS FILE IS GENERATED DO NOT EDIT */\n')
- version_header.write('#ifndef CS_GLUE_VERSION_H\n')
- version_header.write('#define CS_GLUE_VERSION_H\n\n')
- version_header.write('#define CS_GLUE_VERSION UINT32_C(' + str(glue_version) + ')\n')
- version_header.write('\n#endif // CS_GLUE_VERSION_H\n')
+ with open(version_header_dst, "w") as version_header:
+ version_header.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
+ version_header.write("#ifndef CS_GLUE_VERSION_H\n")
+ version_header.write("#define CS_GLUE_VERSION_H\n\n")
+ version_header.write("#define CS_GLUE_VERSION UINT32_C(" + str(glue_version) + ")\n")
+ version_header.write("\n#endif // CS_GLUE_VERSION_H\n")
diff --git a/modules/mono/build_scripts/godot_tools_build.py b/modules/mono/build_scripts/godot_tools_build.py
index 99341c631ef..cffacf25772 100644
--- a/modules/mono/build_scripts/godot_tools_build.py
+++ b/modules/mono/build_scripts/godot_tools_build.py
@@ -8,30 +8,31 @@ from SCons.Script import Dir
def build_godot_tools(source, target, env):
# source and target elements are of type SCons.Node.FS.File, hence why we convert them to str
- module_dir = env['module_dir']
+ module_dir = env["module_dir"]
- solution_path = os.path.join(module_dir, 'editor/GodotTools/GodotTools.sln')
- build_config = 'Debug' if env['target'] == 'debug' else 'Release'
+ solution_path = os.path.join(module_dir, "editor/GodotTools/GodotTools.sln")
+ build_config = "Debug" if env["target"] == "debug" else "Release"
# Custom build target to make sure output is always copied to the data dir.
- extra_build_args = ['/Target:Build;GodotTools:BuildAlwaysCopyToDataDir']
+ extra_build_args = ["/Target:Build;GodotTools:BuildAlwaysCopyToDataDir"]
+
+ from .solution_builder import build_solution, nuget_restore
- from . solution_builder import build_solution, nuget_restore
nuget_restore(env, solution_path)
build_solution(env, solution_path, build_config, extra_build_args)
# No need to copy targets. The GodotTools csproj takes care of copying them.
def build(env_mono, api_sln_cmd):
- assert env_mono['tools']
+ assert env_mono["tools"]
- output_dir = Dir('#bin').abspath
- editor_tools_dir = os.path.join(output_dir, 'GodotSharp', 'Tools')
+ output_dir = Dir("#bin").abspath
+ editor_tools_dir = os.path.join(output_dir, "GodotSharp", "Tools")
- target_filenames = ['GodotTools.dll']
+ target_filenames = ["GodotTools.dll"]
- if env_mono['target'] == 'debug':
- target_filenames += ['GodotTools.pdb']
+ if env_mono["target"] == "debug":
+ target_filenames += ["GodotTools.pdb"]
targets = [os.path.join(editor_tools_dir, filename) for filename in target_filenames]
diff --git a/modules/mono/build_scripts/make_android_mono_config.py b/modules/mono/build_scripts/make_android_mono_config.py
index 4f5a4968910..d276d7d8865 100644
--- a/modules/mono/build_scripts/make_android_mono_config.py
+++ b/modules/mono/build_scripts/make_android_mono_config.py
@@ -1,23 +1,24 @@
-
def generate_compressed_config(config_src, output_dir):
import os.path
# Source file
- with open(os.path.join(output_dir, 'android_mono_config.gen.cpp'), 'w') as cpp:
- with open(config_src, 'rb') as f:
+ with open(os.path.join(output_dir, "android_mono_config.gen.cpp"), "w") as cpp:
+ with open(config_src, "rb") as f:
buf = f.read()
decompr_size = len(buf)
import zlib
+
buf = zlib.compress(buf)
compr_size = len(buf)
- bytes_seq_str = ''
+ bytes_seq_str = ""
for i, buf_idx in enumerate(range(compr_size)):
if i > 0:
- bytes_seq_str += ', '
+ bytes_seq_str += ", "
bytes_seq_str += str(buf[buf_idx])
- cpp.write('''/* THIS FILE IS GENERATED DO NOT EDIT */
+ cpp.write(
+ """/* THIS FILE IS GENERATED DO NOT EDIT */
#include "android_mono_config.h"
#ifdef ANDROID_ENABLED
@@ -48,4 +49,6 @@ String get_godot_android_mono_config() {
}
#endif // ANDROID_ENABLED
-''' % (compr_size, decompr_size, bytes_seq_str))
+"""
+ % (compr_size, decompr_size, bytes_seq_str)
+ )
diff --git a/modules/mono/build_scripts/mono_configure.py b/modules/mono/build_scripts/mono_configure.py
index 4e101583ef5..c8d97c56ba2 100644
--- a/modules/mono/build_scripts/mono_configure.py
+++ b/modules/mono/build_scripts/mono_configure.py
@@ -5,31 +5,33 @@ import subprocess
from SCons.Script import Dir, Environment
-if os.name == 'nt':
+if os.name == "nt":
from . import mono_reg_utils as monoreg
android_arch_dirs = {
- 'armv7': 'armeabi-v7a',
- 'arm64v8': 'arm64-v8a',
- 'x86': 'x86',
- 'x86_64': 'x86_64'
+ "armv7": "armeabi-v7a",
+ "arm64v8": "arm64-v8a",
+ "x86": "x86",
+ "x86_64": "x86_64",
}
def get_android_out_dir(env):
- return os.path.join(Dir('#platform/android/java/lib/libs').abspath,
- 'release' if env['target'] == 'release' else 'debug',
- android_arch_dirs[env['android_arch']])
+ return os.path.join(
+ Dir("#platform/android/java/lib/libs").abspath,
+ "release" if env["target"] == "release" else "debug",
+ android_arch_dirs[env["android_arch"]],
+ )
-def find_file_in_dir(directory, files, prefix='', extension=''):
- if not extension.startswith('.'):
- extension = '.' + extension
+def find_file_in_dir(directory, files, prefix="", extension=""):
+ if not extension.startswith("."):
+ extension = "." + extension
for curfile in files:
if os.path.isfile(os.path.join(directory, prefix + curfile + extension)):
return curfile
- return ''
+ return ""
def copy_file(src_dir, dst_dir, name):
@@ -45,138 +47,144 @@ def copy_file(src_dir, dst_dir, name):
def is_desktop(platform):
- return platform in ['windows', 'osx', 'linuxbsd', 'server', 'uwp', 'haiku']
+ return platform in ["windows", "osx", "linuxbsd", "server", "uwp", "haiku"]
def is_unix_like(platform):
- return platform in ['osx', 'linuxbsd', 'server', 'android', 'haiku']
+ return platform in ["osx", "linuxbsd", "server", "android", "haiku"]
def module_supports_tools_on(platform):
- return platform not in ['android', 'javascript']
+ return platform not in ["android", "javascript"]
def find_wasm_src_dir(mono_root):
hint_dirs = [
- os.path.join(mono_root, 'src'),
- os.path.join(mono_root, '../src'),
+ os.path.join(mono_root, "src"),
+ os.path.join(mono_root, "../src"),
]
for hint_dir in hint_dirs:
- if os.path.isfile(os.path.join(hint_dir, 'driver.c')):
+ if os.path.isfile(os.path.join(hint_dir, "driver.c")):
return hint_dir
- return ''
+ return ""
def configure(env, env_mono):
- bits = env['bits']
- is_android = env['platform'] == 'android'
- is_javascript = env['platform'] == 'javascript'
+ bits = env["bits"]
+ is_android = env["platform"] == "android"
+ is_javascript = env["platform"] == "javascript"
- tools_enabled = env['tools']
- mono_static = env['mono_static']
- copy_mono_root = env['copy_mono_root']
+ tools_enabled = env["tools"]
+ mono_static = env["mono_static"]
+ copy_mono_root = env["copy_mono_root"]
- mono_prefix = env['mono_prefix']
+ mono_prefix = env["mono_prefix"]
- mono_lib_names = ['mono-2.0-sgen', 'monosgen-2.0']
+ mono_lib_names = ["mono-2.0-sgen", "monosgen-2.0"]
- is_travis = os.environ.get('TRAVIS') == 'true'
+ is_travis = os.environ.get("TRAVIS") == "true"
if is_travis:
# Travis CI may have a Mono version lower than 5.12
- env_mono.Append(CPPDEFINES=['NO_PENDING_EXCEPTIONS'])
+ env_mono.Append(CPPDEFINES=["NO_PENDING_EXCEPTIONS"])
- if is_android and not env['android_arch'] in android_arch_dirs:
- raise RuntimeError('This module does not support the specified \'android_arch\': ' + env['android_arch'])
+ if is_android and not env["android_arch"] in android_arch_dirs:
+ raise RuntimeError("This module does not support the specified 'android_arch': " + env["android_arch"])
- if tools_enabled and not module_supports_tools_on(env['platform']):
+ if tools_enabled and not module_supports_tools_on(env["platform"]):
# TODO:
# Android: We have to add the data directory to the apk, concretely the Api and Tools folders.
- raise RuntimeError('This module does not currently support building for this platform with tools enabled')
+ raise RuntimeError("This module does not currently support building for this platform with tools enabled")
if is_android and mono_static:
# Android: When static linking and doing something that requires libmono-native, we get a dlopen error as libmono-native seems to depend on libmonosgen-2.0
- raise RuntimeError('Statically linking Mono is not currently supported on this platform')
+ raise RuntimeError("Statically linking Mono is not currently supported on this platform")
if is_javascript:
mono_static = True
- if not mono_prefix and (os.getenv('MONO32_PREFIX') or os.getenv('MONO64_PREFIX')):
- print("WARNING: The environment variables 'MONO32_PREFIX' and 'MONO64_PREFIX' are deprecated; use the 'mono_prefix' SCons parameter instead")
+ if not mono_prefix and (os.getenv("MONO32_PREFIX") or os.getenv("MONO64_PREFIX")):
+ print(
+ "WARNING: The environment variables 'MONO32_PREFIX' and 'MONO64_PREFIX' are deprecated; use the 'mono_prefix' SCons parameter instead"
+ )
- if env['platform'] == 'windows':
+ if env["platform"] == "windows":
mono_root = mono_prefix
- if not mono_root and os.name == 'nt':
+ if not mono_root and os.name == "nt":
mono_root = monoreg.find_mono_root_dir(bits)
if not mono_root:
- raise RuntimeError("Mono installation directory not found; specify one manually with the 'mono_prefix' SCons parameter")
+ raise RuntimeError(
+ "Mono installation directory not found; specify one manually with the 'mono_prefix' SCons parameter"
+ )
- print('Found Mono root directory: ' + mono_root)
+ print("Found Mono root directory: " + mono_root)
- mono_lib_path = os.path.join(mono_root, 'lib')
+ mono_lib_path = os.path.join(mono_root, "lib")
env.Append(LIBPATH=mono_lib_path)
- env_mono.Prepend(CPPPATH=os.path.join(mono_root, 'include', 'mono-2.0'))
+ env_mono.Prepend(CPPPATH=os.path.join(mono_root, "include", "mono-2.0"))
- lib_suffix = Environment()['LIBSUFFIX']
+ lib_suffix = Environment()["LIBSUFFIX"]
if mono_static:
if env.msvc:
- mono_static_lib_name = 'libmono-static-sgen'
+ mono_static_lib_name = "libmono-static-sgen"
else:
- mono_static_lib_name = 'libmonosgen-2.0'
+ mono_static_lib_name = "libmonosgen-2.0"
if not os.path.isfile(os.path.join(mono_lib_path, mono_static_lib_name + lib_suffix)):
- raise RuntimeError('Could not find static mono library in: ' + mono_lib_path)
+ raise RuntimeError("Could not find static mono library in: " + mono_lib_path)
if env.msvc:
env.Append(LINKFLAGS=mono_static_lib_name + lib_suffix)
- env.Append(LINKFLAGS='Mincore' + lib_suffix)
- env.Append(LINKFLAGS='msvcrt' + lib_suffix)
- env.Append(LINKFLAGS='LIBCMT' + lib_suffix)
- env.Append(LINKFLAGS='Psapi' + lib_suffix)
+ env.Append(LINKFLAGS="Mincore" + lib_suffix)
+ env.Append(LINKFLAGS="msvcrt" + lib_suffix)
+ env.Append(LINKFLAGS="LIBCMT" + lib_suffix)
+ env.Append(LINKFLAGS="Psapi" + lib_suffix)
else:
env.Append(LINKFLAGS=os.path.join(mono_lib_path, mono_static_lib_name + lib_suffix))
- env.Append(LIBS=['psapi'])
- env.Append(LIBS=['version'])
+ env.Append(LIBS=["psapi"])
+ env.Append(LIBS=["version"])
else:
mono_lib_name = find_file_in_dir(mono_lib_path, mono_lib_names, extension=lib_suffix)
if not mono_lib_name:
- raise RuntimeError('Could not find mono library in: ' + mono_lib_path)
+ raise RuntimeError("Could not find mono library in: " + mono_lib_path)
if env.msvc:
env.Append(LINKFLAGS=mono_lib_name + lib_suffix)
else:
env.Append(LIBS=[mono_lib_name])
- mono_bin_path = os.path.join(mono_root, 'bin')
+ mono_bin_path = os.path.join(mono_root, "bin")
- mono_dll_name = find_file_in_dir(mono_bin_path, mono_lib_names, extension='.dll')
+ mono_dll_name = find_file_in_dir(mono_bin_path, mono_lib_names, extension=".dll")
if not mono_dll_name:
- raise RuntimeError('Could not find mono shared library in: ' + mono_bin_path)
+ raise RuntimeError("Could not find mono shared library in: " + mono_bin_path)
- copy_file(mono_bin_path, '#bin', mono_dll_name + '.dll')
+ copy_file(mono_bin_path, "#bin", mono_dll_name + ".dll")
else:
- is_apple = env['platform'] in ['osx', 'iphone']
+ is_apple = env["platform"] in ["osx", "iphone"]
- sharedlib_ext = '.dylib' if is_apple else '.so'
+ sharedlib_ext = ".dylib" if is_apple else ".so"
mono_root = mono_prefix
- mono_lib_path = ''
- mono_so_name = ''
+ mono_lib_path = ""
+ mono_so_name = ""
if not mono_root and (is_android or is_javascript):
- raise RuntimeError("Mono installation directory not found; specify one manually with the 'mono_prefix' SCons parameter")
+ raise RuntimeError(
+ "Mono installation directory not found; specify one manually with the 'mono_prefix' SCons parameter"
+ )
if not mono_root and is_apple:
# Try with some known directories under OSX
- hint_dirs = ['/Library/Frameworks/Mono.framework/Versions/Current', '/usr/local/var/homebrew/linked/mono']
+ hint_dirs = ["/Library/Frameworks/Mono.framework/Versions/Current", "/usr/local/var/homebrew/linked/mono"]
for hint_dir in hint_dirs:
if os.path.isdir(hint_dir):
mono_root = hint_dir
@@ -187,126 +195,139 @@ def configure(env, env_mono):
if not mono_root and mono_static:
mono_root = pkgconfig_try_find_mono_root(mono_lib_names, sharedlib_ext)
if not mono_root:
- raise RuntimeError("Building with mono_static=yes, but failed to find the mono prefix with pkg-config; " + \
- "specify one manually with the 'mono_prefix' SCons parameter")
+ raise RuntimeError(
+ "Building with mono_static=yes, but failed to find the mono prefix with pkg-config; "
+ + "specify one manually with the 'mono_prefix' SCons parameter"
+ )
if mono_root:
- print('Found Mono root directory: ' + mono_root)
+ print("Found Mono root directory: " + mono_root)
- mono_lib_path = os.path.join(mono_root, 'lib')
+ mono_lib_path = os.path.join(mono_root, "lib")
env.Append(LIBPATH=[mono_lib_path])
- env_mono.Prepend(CPPPATH=os.path.join(mono_root, 'include', 'mono-2.0'))
+ env_mono.Prepend(CPPPATH=os.path.join(mono_root, "include", "mono-2.0"))
- mono_lib = find_file_in_dir(mono_lib_path, mono_lib_names, prefix='lib', extension='.a')
+ mono_lib = find_file_in_dir(mono_lib_path, mono_lib_names, prefix="lib", extension=".a")
if not mono_lib:
- raise RuntimeError('Could not find mono library in: ' + mono_lib_path)
+ raise RuntimeError("Could not find mono library in: " + mono_lib_path)
- env_mono.Append(CPPDEFINES=['_REENTRANT'])
+ env_mono.Append(CPPDEFINES=["_REENTRANT"])
if mono_static:
- env.Append(LINKFLAGS=['-rdynamic'])
+ env.Append(LINKFLAGS=["-rdynamic"])
- mono_lib_file = os.path.join(mono_lib_path, 'lib' + mono_lib + '.a')
+ mono_lib_file = os.path.join(mono_lib_path, "lib" + mono_lib + ".a")
if is_apple:
- env.Append(LINKFLAGS=['-Wl,-force_load,' + mono_lib_file])
+ env.Append(LINKFLAGS=["-Wl,-force_load," + mono_lib_file])
else:
- assert is_desktop(env['platform']) or is_android or is_javascript
- env.Append(LINKFLAGS=['-Wl,-whole-archive', mono_lib_file, '-Wl,-no-whole-archive'])
+ assert is_desktop(env["platform"]) or is_android or is_javascript
+ env.Append(LINKFLAGS=["-Wl,-whole-archive", mono_lib_file, "-Wl,-no-whole-archive"])
if is_javascript:
- env.Append(LIBS=['mono-icall-table', 'mono-native', 'mono-ilgen', 'mono-ee-interp'])
+ env.Append(LIBS=["mono-icall-table", "mono-native", "mono-ilgen", "mono-ee-interp"])
- wasm_src_dir = os.path.join(mono_root, 'src')
+ wasm_src_dir = os.path.join(mono_root, "src")
if not os.path.isdir(wasm_src_dir):
- raise RuntimeError('Could not find mono wasm src directory')
+ raise RuntimeError("Could not find mono wasm src directory")
# Ideally this should be defined only for 'driver.c', but I can't fight scons for another 2 hours
- env_mono.Append(CPPDEFINES=['CORE_BINDINGS'])
+ env_mono.Append(CPPDEFINES=["CORE_BINDINGS"])
- env_mono.add_source_files(env.modules_sources, [
- os.path.join(wasm_src_dir, 'driver.c'),
- os.path.join(wasm_src_dir, 'zlib-helper.c'),
- os.path.join(wasm_src_dir, 'corebindings.c')
- ])
+ env_mono.add_source_files(
+ env.modules_sources,
+ [
+ os.path.join(wasm_src_dir, "driver.c"),
+ os.path.join(wasm_src_dir, "zlib-helper.c"),
+ os.path.join(wasm_src_dir, "corebindings.c"),
+ ],
+ )
- env.Append(LINKFLAGS=[
- '--js-library', os.path.join(wasm_src_dir, 'library_mono.js'),
- '--js-library', os.path.join(wasm_src_dir, 'binding_support.js'),
- '--js-library', os.path.join(wasm_src_dir, 'dotnet_support.js')
- ])
+ env.Append(
+ LINKFLAGS=[
+ "--js-library",
+ os.path.join(wasm_src_dir, "library_mono.js"),
+ "--js-library",
+ os.path.join(wasm_src_dir, "binding_support.js"),
+ "--js-library",
+ os.path.join(wasm_src_dir, "dotnet_support.js"),
+ ]
+ )
else:
env.Append(LIBS=[mono_lib])
if is_apple:
- env.Append(LIBS=['iconv', 'pthread'])
+ env.Append(LIBS=["iconv", "pthread"])
elif is_android:
- pass # Nothing
+ pass # Nothing
elif is_javascript:
- env.Append(LIBS=['m', 'rt', 'dl', 'pthread'])
+ env.Append(LIBS=["m", "rt", "dl", "pthread"])
else:
- env.Append(LIBS=['m', 'rt', 'dl', 'pthread'])
+ env.Append(LIBS=["m", "rt", "dl", "pthread"])
if not mono_static:
- mono_so_name = find_file_in_dir(mono_lib_path, mono_lib_names, prefix='lib', extension=sharedlib_ext)
+ mono_so_name = find_file_in_dir(mono_lib_path, mono_lib_names, prefix="lib", extension=sharedlib_ext)
if not mono_so_name:
- raise RuntimeError('Could not find mono shared library in: ' + mono_lib_path)
+ raise RuntimeError("Could not find mono shared library in: " + mono_lib_path)
- copy_file(mono_lib_path, '#bin', 'lib' + mono_so_name + sharedlib_ext)
+ copy_file(mono_lib_path, "#bin", "lib" + mono_so_name + sharedlib_ext)
else:
assert not mono_static
# TODO: Add option to force using pkg-config
- print('Mono root directory not found. Using pkg-config instead')
+ print("Mono root directory not found. Using pkg-config instead")
- env.ParseConfig('pkg-config monosgen-2 --libs')
- env_mono.ParseConfig('pkg-config monosgen-2 --cflags')
+ env.ParseConfig("pkg-config monosgen-2 --libs")
+ env_mono.ParseConfig("pkg-config monosgen-2 --cflags")
tmpenv = Environment()
- tmpenv.AppendENVPath('PKG_CONFIG_PATH', os.getenv('PKG_CONFIG_PATH'))
- tmpenv.ParseConfig('pkg-config monosgen-2 --libs-only-L')
+ tmpenv.AppendENVPath("PKG_CONFIG_PATH", os.getenv("PKG_CONFIG_PATH"))
+ tmpenv.ParseConfig("pkg-config monosgen-2 --libs-only-L")
- for hint_dir in tmpenv['LIBPATH']:
- name_found = find_file_in_dir(hint_dir, mono_lib_names, prefix='lib', extension=sharedlib_ext)
+ for hint_dir in tmpenv["LIBPATH"]:
+ name_found = find_file_in_dir(hint_dir, mono_lib_names, prefix="lib", extension=sharedlib_ext)
if name_found:
mono_lib_path = hint_dir
mono_so_name = name_found
break
if not mono_so_name:
- raise RuntimeError('Could not find mono shared library in: ' + str(tmpenv['LIBPATH']))
+ raise RuntimeError("Could not find mono shared library in: " + str(tmpenv["LIBPATH"]))
if not mono_static:
- libs_output_dir = get_android_out_dir(env) if is_android else '#bin'
- copy_file(mono_lib_path, libs_output_dir, 'lib' + mono_so_name + sharedlib_ext)
+ libs_output_dir = get_android_out_dir(env) if is_android else "#bin"
+ copy_file(mono_lib_path, libs_output_dir, "lib" + mono_so_name + sharedlib_ext)
if not tools_enabled:
- if is_desktop(env['platform']):
+ if is_desktop(env["platform"]):
if not mono_root:
- mono_root = subprocess.check_output(['pkg-config', 'mono-2', '--variable=prefix']).decode('utf8').strip()
+ mono_root = (
+ subprocess.check_output(["pkg-config", "mono-2", "--variable=prefix"]).decode("utf8").strip()
+ )
make_template_dir(env, mono_root)
elif is_android:
# Compress Android Mono Config
from . import make_android_mono_config
+
module_dir = os.getcwd()
- config_file_path = os.path.join(module_dir, 'build_scripts', 'mono_android_config.xml')
- make_android_mono_config.generate_compressed_config(config_file_path, 'mono_gd/')
+ config_file_path = os.path.join(module_dir, "build_scripts", "mono_android_config.xml")
+ make_android_mono_config.generate_compressed_config(config_file_path, "mono_gd/")
# Copy the required shared libraries
copy_mono_shared_libs(env, mono_root, None)
elif is_javascript:
- pass # No data directory for this platform
+ pass # No data directory for this platform
if copy_mono_root:
if not mono_root:
- mono_root = subprocess.check_output(['pkg-config', 'mono-2', '--variable=prefix']).decode('utf8').strip()
+ mono_root = subprocess.check_output(["pkg-config", "mono-2", "--variable=prefix"]).decode("utf8").strip()
if tools_enabled:
- copy_mono_root_files(env, mono_root)
+ copy_mono_root_files(env, mono_root)
else:
print("Ignoring option: 'copy_mono_root'; only available for builds with 'tools' enabled.")
@@ -314,26 +335,26 @@ def configure(env, env_mono):
def make_template_dir(env, mono_root):
from shutil import rmtree
- platform = env['platform']
- target = env['target']
+ platform = env["platform"]
+ target = env["target"]
- template_dir_name = ''
+ template_dir_name = ""
assert is_desktop(platform)
- template_dir_name = 'data.mono.%s.%s.%s' % (platform, env['bits'], target)
+ template_dir_name = "data.mono.%s.%s.%s" % (platform, env["bits"], target)
- output_dir = Dir('#bin').abspath
+ output_dir = Dir("#bin").abspath
template_dir = os.path.join(output_dir, template_dir_name)
- template_mono_root_dir = os.path.join(template_dir, 'Mono')
+ template_mono_root_dir = os.path.join(template_dir, "Mono")
if os.path.isdir(template_mono_root_dir):
- rmtree(template_mono_root_dir) # Clean first
+ rmtree(template_mono_root_dir) # Clean first
# Copy etc/mono/
- template_mono_config_dir = os.path.join(template_mono_root_dir, 'etc', 'mono')
+ template_mono_config_dir = os.path.join(template_mono_root_dir, "etc", "mono")
copy_mono_etc_dir(mono_root, template_mono_config_dir, platform)
# Copy the required shared libraries
@@ -347,18 +368,18 @@ def copy_mono_root_files(env, mono_root):
from shutil import rmtree
if not mono_root:
- raise RuntimeError('Mono installation directory not found')
+ raise RuntimeError("Mono installation directory not found")
- output_dir = Dir('#bin').abspath
- editor_mono_root_dir = os.path.join(output_dir, 'GodotSharp', 'Mono')
+ output_dir = Dir("#bin").abspath
+ editor_mono_root_dir = os.path.join(output_dir, "GodotSharp", "Mono")
if os.path.isdir(editor_mono_root_dir):
- rmtree(editor_mono_root_dir) # Clean first
+ rmtree(editor_mono_root_dir) # Clean first
# Copy etc/mono/
- editor_mono_config_dir = os.path.join(editor_mono_root_dir, 'etc', 'mono')
- copy_mono_etc_dir(mono_root, editor_mono_config_dir, env['platform'])
+ editor_mono_config_dir = os.path.join(editor_mono_root_dir, "etc", "mono")
+ copy_mono_etc_dir(mono_root, editor_mono_config_dir, env["platform"])
# Copy the required shared libraries
@@ -366,20 +387,20 @@ def copy_mono_root_files(env, mono_root):
# Copy framework assemblies
- mono_framework_dir = os.path.join(mono_root, 'lib', 'mono', '4.5')
- mono_framework_facades_dir = os.path.join(mono_framework_dir, 'Facades')
+ mono_framework_dir = os.path.join(mono_root, "lib", "mono", "4.5")
+ mono_framework_facades_dir = os.path.join(mono_framework_dir, "Facades")
- editor_mono_framework_dir = os.path.join(editor_mono_root_dir, 'lib', 'mono', '4.5')
- editor_mono_framework_facades_dir = os.path.join(editor_mono_framework_dir, 'Facades')
+ editor_mono_framework_dir = os.path.join(editor_mono_root_dir, "lib", "mono", "4.5")
+ editor_mono_framework_facades_dir = os.path.join(editor_mono_framework_dir, "Facades")
if not os.path.isdir(editor_mono_framework_dir):
os.makedirs(editor_mono_framework_dir)
if not os.path.isdir(editor_mono_framework_facades_dir):
os.makedirs(editor_mono_framework_facades_dir)
- for assembly in glob(os.path.join(mono_framework_dir, '*.dll')):
+ for assembly in glob(os.path.join(mono_framework_dir, "*.dll")):
copy(assembly, editor_mono_framework_dir)
- for assembly in glob(os.path.join(mono_framework_facades_dir, '*.dll')):
+ for assembly in glob(os.path.join(mono_framework_facades_dir, "*.dll")):
copy(assembly, editor_mono_framework_facades_dir)
@@ -391,28 +412,28 @@ def copy_mono_etc_dir(mono_root, target_mono_config_dir, platform):
if not os.path.isdir(target_mono_config_dir):
os.makedirs(target_mono_config_dir)
- mono_etc_dir = os.path.join(mono_root, 'etc', 'mono')
+ mono_etc_dir = os.path.join(mono_root, "etc", "mono")
if not os.path.isdir(mono_etc_dir):
- mono_etc_dir = ''
+ mono_etc_dir = ""
etc_hint_dirs = []
- if platform != 'windows':
- etc_hint_dirs += ['/etc/mono', '/usr/local/etc/mono']
- if 'MONO_CFG_DIR' in os.environ:
- etc_hint_dirs += [os.path.join(os.environ['MONO_CFG_DIR'], 'mono')]
+ if platform != "windows":
+ etc_hint_dirs += ["/etc/mono", "/usr/local/etc/mono"]
+ if "MONO_CFG_DIR" in os.environ:
+ etc_hint_dirs += [os.path.join(os.environ["MONO_CFG_DIR"], "mono")]
for etc_hint_dir in etc_hint_dirs:
if os.path.isdir(etc_hint_dir):
mono_etc_dir = etc_hint_dir
break
if not mono_etc_dir:
- raise RuntimeError('Mono installation etc directory not found')
+ raise RuntimeError("Mono installation etc directory not found")
- copy_tree(os.path.join(mono_etc_dir, '2.0'), os.path.join(target_mono_config_dir, '2.0'))
- copy_tree(os.path.join(mono_etc_dir, '4.0'), os.path.join(target_mono_config_dir, '4.0'))
- copy_tree(os.path.join(mono_etc_dir, '4.5'), os.path.join(target_mono_config_dir, '4.5'))
- if os.path.isdir(os.path.join(mono_etc_dir, 'mconfig')):
- copy_tree(os.path.join(mono_etc_dir, 'mconfig'), os.path.join(target_mono_config_dir, 'mconfig'))
+ copy_tree(os.path.join(mono_etc_dir, "2.0"), os.path.join(target_mono_config_dir, "2.0"))
+ copy_tree(os.path.join(mono_etc_dir, "4.0"), os.path.join(target_mono_config_dir, "4.0"))
+ copy_tree(os.path.join(mono_etc_dir, "4.5"), os.path.join(target_mono_config_dir, "4.5"))
+ if os.path.isdir(os.path.join(mono_etc_dir, "mconfig")):
+ copy_tree(os.path.join(mono_etc_dir, "mconfig"), os.path.join(target_mono_config_dir, "mconfig"))
- for file in glob(os.path.join(mono_etc_dir, '*')):
+ for file in glob(os.path.join(mono_etc_dir, "*")):
if os.path.isfile(file):
copy(file, target_mono_config_dir)
@@ -424,48 +445,66 @@ def copy_mono_shared_libs(env, mono_root, target_mono_root_dir):
if os.path.isfile(src):
copy(src, dst)
- platform = env['platform']
+ platform = env["platform"]
- if platform == 'windows':
- src_mono_bin_dir = os.path.join(mono_root, 'bin')
- target_mono_bin_dir = os.path.join(target_mono_root_dir, 'bin')
+ if platform == "windows":
+ src_mono_bin_dir = os.path.join(mono_root, "bin")
+ target_mono_bin_dir = os.path.join(target_mono_root_dir, "bin")
if not os.path.isdir(target_mono_bin_dir):
os.makedirs(target_mono_bin_dir)
- mono_posix_helper_name = find_file_in_dir(src_mono_bin_dir, ['MonoPosixHelper', 'libMonoPosixHelper'], extension='.dll')
- copy(os.path.join(src_mono_bin_dir, mono_posix_helper_name + '.dll'), os.path.join(target_mono_bin_dir, 'MonoPosixHelper.dll'))
+ mono_posix_helper_name = find_file_in_dir(
+ src_mono_bin_dir, ["MonoPosixHelper", "libMonoPosixHelper"], extension=".dll"
+ )
+ copy(
+ os.path.join(src_mono_bin_dir, mono_posix_helper_name + ".dll"),
+ os.path.join(target_mono_bin_dir, "MonoPosixHelper.dll"),
+ )
# For newer versions
- btls_dll_path = os.path.join(src_mono_bin_dir, 'libmono-btls-shared.dll')
+ btls_dll_path = os.path.join(src_mono_bin_dir, "libmono-btls-shared.dll")
if os.path.isfile(btls_dll_path):
copy(btls_dll_path, target_mono_bin_dir)
else:
- target_mono_lib_dir = get_android_out_dir(env) if platform == 'android' else os.path.join(target_mono_root_dir, 'lib')
+ target_mono_lib_dir = (
+ get_android_out_dir(env) if platform == "android" else os.path.join(target_mono_root_dir, "lib")
+ )
if not os.path.isdir(target_mono_lib_dir):
os.makedirs(target_mono_lib_dir)
lib_file_names = []
- if platform == 'osx':
- lib_file_names = [lib_name + '.dylib' for lib_name in [
- 'libmono-btls-shared', 'libmono-native-compat', 'libMonoPosixHelper'
- ]]
+ if platform == "osx":
+ lib_file_names = [
+ lib_name + ".dylib"
+ for lib_name in ["libmono-btls-shared", "libmono-native-compat", "libMonoPosixHelper"]
+ ]
elif is_unix_like(platform):
- lib_file_names = [lib_name + '.so' for lib_name in [
- 'libmono-btls-shared', 'libmono-ee-interp', 'libmono-native', 'libMonoPosixHelper',
- 'libmono-profiler-aot', 'libmono-profiler-coverage', 'libmono-profiler-log', 'libMonoSupportW'
- ]]
+ lib_file_names = [
+ lib_name + ".so"
+ for lib_name in [
+ "libmono-btls-shared",
+ "libmono-ee-interp",
+ "libmono-native",
+ "libMonoPosixHelper",
+ "libmono-profiler-aot",
+ "libmono-profiler-coverage",
+ "libmono-profiler-log",
+ "libMonoSupportW",
+ ]
+ ]
for lib_file_name in lib_file_names:
- copy_if_exists(os.path.join(mono_root, 'lib', lib_file_name), target_mono_lib_dir)
+ copy_if_exists(os.path.join(mono_root, "lib", lib_file_name), target_mono_lib_dir)
+
def pkgconfig_try_find_mono_root(mono_lib_names, sharedlib_ext):
tmpenv = Environment()
- tmpenv.AppendENVPath('PKG_CONFIG_PATH', os.getenv('PKG_CONFIG_PATH'))
- tmpenv.ParseConfig('pkg-config monosgen-2 --libs-only-L')
- for hint_dir in tmpenv['LIBPATH']:
- name_found = find_file_in_dir(hint_dir, mono_lib_names, prefix='lib', extension=sharedlib_ext)
- if name_found and os.path.isdir(os.path.join(hint_dir, '..', 'include', 'mono-2.0')):
- return os.path.join(hint_dir, '..')
- return ''
+ tmpenv.AppendENVPath("PKG_CONFIG_PATH", os.getenv("PKG_CONFIG_PATH"))
+ tmpenv.ParseConfig("pkg-config monosgen-2 --libs-only-L")
+ for hint_dir in tmpenv["LIBPATH"]:
+ name_found = find_file_in_dir(hint_dir, mono_lib_names, prefix="lib", extension=sharedlib_ext)
+ if name_found and os.path.isdir(os.path.join(hint_dir, "..", "include", "mono-2.0")):
+ return os.path.join(hint_dir, "..")
+ return ""
diff --git a/modules/mono/build_scripts/mono_reg_utils.py b/modules/mono/build_scripts/mono_reg_utils.py
index 3bae11b1675..3090a4759aa 100644
--- a/modules/mono/build_scripts/mono_reg_utils.py
+++ b/modules/mono/build_scripts/mono_reg_utils.py
@@ -1,7 +1,7 @@
import os
import platform
-if os.name == 'nt':
+if os.name == "nt":
import sys
import winreg
@@ -10,7 +10,7 @@ def _reg_open_key(key, subkey):
try:
return winreg.OpenKey(key, subkey)
except (WindowsError, OSError):
- if platform.architecture()[0] == '32bit':
+ if platform.architecture()[0] == "32bit":
bitness_sam = winreg.KEY_WOW64_64KEY
else:
bitness_sam = winreg.KEY_WOW64_32KEY
@@ -20,12 +20,12 @@ def _reg_open_key(key, subkey):
def _reg_open_key_bits(key, subkey, bits):
sam = winreg.KEY_READ
- if platform.architecture()[0] == '32bit':
- if bits == '64':
+ if platform.architecture()[0] == "32bit":
+ if bits == "64":
# Force 32bit process to search in 64bit registry
sam |= winreg.KEY_WOW64_64KEY
else:
- if bits == '32':
+ if bits == "32":
# Force 64bit process to search in 32bit registry
sam |= winreg.KEY_WOW64_32KEY
@@ -35,7 +35,7 @@ def _reg_open_key_bits(key, subkey, bits):
def _find_mono_in_reg(subkey, bits):
try:
with _reg_open_key_bits(winreg.HKEY_LOCAL_MACHINE, subkey, bits) as hKey:
- value = winreg.QueryValueEx(hKey, 'SdkInstallRoot')[0]
+ value = winreg.QueryValueEx(hKey, "SdkInstallRoot")[0]
return value
except (WindowsError, OSError):
return None
@@ -44,70 +44,70 @@ def _find_mono_in_reg(subkey, bits):
def _find_mono_in_reg_old(subkey, bits):
try:
with _reg_open_key_bits(winreg.HKEY_LOCAL_MACHINE, subkey, bits) as hKey:
- default_clr = winreg.QueryValueEx(hKey, 'DefaultCLR')[0]
+ default_clr = winreg.QueryValueEx(hKey, "DefaultCLR")[0]
if default_clr:
- return _find_mono_in_reg(subkey + '\\' + default_clr, bits)
+ return _find_mono_in_reg(subkey + "\\" + default_clr, bits)
return None
except (WindowsError, EnvironmentError):
return None
def find_mono_root_dir(bits):
- root_dir = _find_mono_in_reg(r'SOFTWARE\Mono', bits)
+ root_dir = _find_mono_in_reg(r"SOFTWARE\Mono", bits)
if root_dir is not None:
return str(root_dir)
- root_dir = _find_mono_in_reg_old(r'SOFTWARE\Novell\Mono', bits)
+ root_dir = _find_mono_in_reg_old(r"SOFTWARE\Novell\Mono", bits)
if root_dir is not None:
return str(root_dir)
- return ''
+ return ""
def find_msbuild_tools_path_reg():
import subprocess
- vswhere = os.getenv('PROGRAMFILES(X86)')
+ vswhere = os.getenv("PROGRAMFILES(X86)")
if not vswhere:
- vswhere = os.getenv('PROGRAMFILES')
- vswhere += r'\Microsoft Visual Studio\Installer\vswhere.exe'
+ vswhere = os.getenv("PROGRAMFILES")
+ vswhere += r"\Microsoft Visual Studio\Installer\vswhere.exe"
- vswhere_args = ['-latest', '-products', '*', '-requires', 'Microsoft.Component.MSBuild']
+ vswhere_args = ["-latest", "-products", "*", "-requires", "Microsoft.Component.MSBuild"]
try:
lines = subprocess.check_output([vswhere] + vswhere_args).splitlines()
for line in lines:
- parts = line.decode("utf-8").split(':', 1)
+ parts = line.decode("utf-8").split(":", 1)
- if len(parts) < 2 or parts[0] != 'installationPath':
+ if len(parts) < 2 or parts[0] != "installationPath":
continue
val = parts[1].strip()
if not val:
- raise ValueError('Value of `installationPath` entry is empty')
+ raise ValueError("Value of `installationPath` entry is empty")
# Since VS2019, the directory is simply named "Current"
- msbuild_dir = os.path.join(val, 'MSBuild\\Current\\Bin')
+ msbuild_dir = os.path.join(val, "MSBuild\\Current\\Bin")
if os.path.isdir(msbuild_dir):
return msbuild_dir
# Directory name "15.0" is used in VS 2017
- return os.path.join(val, 'MSBuild\\15.0\\Bin')
+ return os.path.join(val, "MSBuild\\15.0\\Bin")
- raise ValueError('Cannot find `installationPath` entry')
+ raise ValueError("Cannot find `installationPath` entry")
except ValueError as e:
- print('Error reading output from vswhere: ' + e.message)
+ print("Error reading output from vswhere: " + e.message)
except WindowsError:
- pass # Fine, vswhere not found
+ pass # Fine, vswhere not found
except (subprocess.CalledProcessError, OSError):
pass
# Try to find 14.0 in the Registry
try:
- subkey = r'SOFTWARE\Microsoft\MSBuild\ToolsVersions\14.0'
+ subkey = r"SOFTWARE\Microsoft\MSBuild\ToolsVersions\14.0"
with _reg_open_key(winreg.HKEY_LOCAL_MACHINE, subkey) as hKey:
- value = winreg.QueryValueEx(hKey, 'MSBuildToolsPath')[0]
+ value = winreg.QueryValueEx(hKey, "MSBuildToolsPath")[0]
return value
except (WindowsError, OSError):
- return ''
+ return ""
diff --git a/modules/mono/build_scripts/solution_builder.py b/modules/mono/build_scripts/solution_builder.py
index d1529a64d22..db6b4ff7aae 100644
--- a/modules/mono/build_scripts/solution_builder.py
+++ b/modules/mono/build_scripts/solution_builder.py
@@ -1,4 +1,3 @@
-
import os
@@ -8,35 +7,38 @@ verbose = False
def find_nuget_unix():
import os
- if 'NUGET_PATH' in os.environ:
- hint_path = os.environ['NUGET_PATH']
+ if "NUGET_PATH" in os.environ:
+ hint_path = os.environ["NUGET_PATH"]
if os.path.isfile(hint_path) and os.access(hint_path, os.X_OK):
return hint_path
- hint_path = os.path.join(hint_path, 'nuget')
+ hint_path = os.path.join(hint_path, "nuget")
if os.path.isfile(hint_path) and os.access(hint_path, os.X_OK):
return hint_path
import os.path
import sys
- hint_dirs = ['/opt/novell/mono/bin']
- if sys.platform == 'darwin':
- hint_dirs = ['/Library/Frameworks/Mono.framework/Versions/Current/bin', '/usr/local/var/homebrew/linked/mono/bin'] + hint_dirs
+ hint_dirs = ["/opt/novell/mono/bin"]
+ if sys.platform == "darwin":
+ hint_dirs = [
+ "/Library/Frameworks/Mono.framework/Versions/Current/bin",
+ "/usr/local/var/homebrew/linked/mono/bin",
+ ] + hint_dirs
for hint_dir in hint_dirs:
- hint_path = os.path.join(hint_dir, 'nuget')
+ hint_path = os.path.join(hint_dir, "nuget")
if os.path.isfile(hint_path):
return hint_path
- elif os.path.isfile(hint_path + '.exe'):
- return hint_path + '.exe'
+ elif os.path.isfile(hint_path + ".exe"):
+ return hint_path + ".exe"
- for hint_dir in os.environ['PATH'].split(os.pathsep):
+ for hint_dir in os.environ["PATH"].split(os.pathsep):
hint_dir = hint_dir.strip('"')
- hint_path = os.path.join(hint_dir, 'nuget')
+ hint_path = os.path.join(hint_dir, "nuget")
if os.path.isfile(hint_path) and os.access(hint_path, os.X_OK):
return hint_path
- if os.path.isfile(hint_path + '.exe') and os.access(hint_path + '.exe', os.X_OK):
- return hint_path + '.exe'
+ if os.path.isfile(hint_path + ".exe") and os.access(hint_path + ".exe", os.X_OK):
+ return hint_path + ".exe"
return None
@@ -44,30 +46,30 @@ def find_nuget_unix():
def find_nuget_windows(env):
import os
- if 'NUGET_PATH' in os.environ:
- hint_path = os.environ['NUGET_PATH']
+ if "NUGET_PATH" in os.environ:
+ hint_path = os.environ["NUGET_PATH"]
if os.path.isfile(hint_path) and os.access(hint_path, os.X_OK):
return hint_path
- hint_path = os.path.join(hint_path, 'nuget.exe')
+ hint_path = os.path.join(hint_path, "nuget.exe")
if os.path.isfile(hint_path) and os.access(hint_path, os.X_OK):
return hint_path
- from . mono_reg_utils import find_mono_root_dir
+ from .mono_reg_utils import find_mono_root_dir
- mono_root = env['mono_prefix'] or find_mono_root_dir(env['bits'])
+ mono_root = env["mono_prefix"] or find_mono_root_dir(env["bits"])
if mono_root:
- mono_bin_dir = os.path.join(mono_root, 'bin')
- nuget_mono = os.path.join(mono_bin_dir, 'nuget.bat')
+ mono_bin_dir = os.path.join(mono_root, "bin")
+ nuget_mono = os.path.join(mono_bin_dir, "nuget.bat")
if os.path.isfile(nuget_mono):
return nuget_mono
# Standalone NuGet
- for hint_dir in os.environ['PATH'].split(os.pathsep):
+ for hint_dir in os.environ["PATH"].split(os.pathsep):
hint_dir = hint_dir.strip('"')
- hint_path = os.path.join(hint_dir, 'nuget.exe')
+ hint_path = os.path.join(hint_dir, "nuget.exe")
if os.path.isfile(hint_path) and os.access(hint_path, os.X_OK):
return hint_path
@@ -78,52 +80,55 @@ def find_msbuild_unix(filename):
import os.path
import sys
- hint_dirs = ['/opt/novell/mono/bin']
- if sys.platform == 'darwin':
- hint_dirs = ['/Library/Frameworks/Mono.framework/Versions/Current/bin', '/usr/local/var/homebrew/linked/mono/bin'] + hint_dirs
+ hint_dirs = ["/opt/novell/mono/bin"]
+ if sys.platform == "darwin":
+ hint_dirs = [
+ "/Library/Frameworks/Mono.framework/Versions/Current/bin",
+ "/usr/local/var/homebrew/linked/mono/bin",
+ ] + hint_dirs
for hint_dir in hint_dirs:
hint_path = os.path.join(hint_dir, filename)
if os.path.isfile(hint_path):
return hint_path
- elif os.path.isfile(hint_path + '.exe'):
- return hint_path + '.exe'
+ elif os.path.isfile(hint_path + ".exe"):
+ return hint_path + ".exe"
- for hint_dir in os.environ['PATH'].split(os.pathsep):
+ for hint_dir in os.environ["PATH"].split(os.pathsep):
hint_dir = hint_dir.strip('"')
hint_path = os.path.join(hint_dir, filename)
if os.path.isfile(hint_path) and os.access(hint_path, os.X_OK):
return hint_path
- if os.path.isfile(hint_path + '.exe') and os.access(hint_path + '.exe', os.X_OK):
- return hint_path + '.exe'
+ if os.path.isfile(hint_path + ".exe") and os.access(hint_path + ".exe", os.X_OK):
+ return hint_path + ".exe"
return None
def find_msbuild_windows(env):
- from . mono_reg_utils import find_mono_root_dir, find_msbuild_tools_path_reg
+ from .mono_reg_utils import find_mono_root_dir, find_msbuild_tools_path_reg
- mono_root = env['mono_prefix'] or find_mono_root_dir(env['bits'])
+ mono_root = env["mono_prefix"] or find_mono_root_dir(env["bits"])
if not mono_root:
- raise RuntimeError('Cannot find mono root directory')
+ raise RuntimeError("Cannot find mono root directory")
- mono_bin_dir = os.path.join(mono_root, 'bin')
- msbuild_mono = os.path.join(mono_bin_dir, 'msbuild.bat')
+ mono_bin_dir = os.path.join(mono_root, "bin")
+ msbuild_mono = os.path.join(mono_bin_dir, "msbuild.bat")
msbuild_tools_path = find_msbuild_tools_path_reg()
if msbuild_tools_path:
- return (os.path.join(msbuild_tools_path, 'MSBuild.exe'), {})
+ return (os.path.join(msbuild_tools_path, "MSBuild.exe"), {})
if os.path.isfile(msbuild_mono):
# The (Csc/Vbc/Fsc)ToolExe environment variables are required when
# building with Mono's MSBuild. They must point to the batch files
# in Mono's bin directory to make sure they are executed with Mono.
mono_msbuild_env = {
- 'CscToolExe': os.path.join(mono_bin_dir, 'csc.bat'),
- 'VbcToolExe': os.path.join(mono_bin_dir, 'vbc.bat'),
- 'FscToolExe': os.path.join(mono_bin_dir, 'fsharpc.bat')
+ "CscToolExe": os.path.join(mono_bin_dir, "csc.bat"),
+ "VbcToolExe": os.path.join(mono_bin_dir, "vbc.bat"),
+ "FscToolExe": os.path.join(mono_bin_dir, "fsharpc.bat"),
}
return (msbuild_mono, mono_msbuild_env)
@@ -132,7 +137,7 @@ def find_msbuild_windows(env):
def run_command(command, args, env_override=None, name=None):
def cmd_args_to_str(cmd_args):
- return ' '.join([arg if not ' ' in arg else '"%s"' % arg for arg in cmd_args])
+ return " ".join([arg if not " " in arg else '"%s"' % arg for arg in cmd_args])
args = [command] + args
@@ -143,6 +148,7 @@ def run_command(command, args, env_override=None, name=None):
print("Running '%s': %s" % (name, cmd_args_to_str(args)))
import subprocess
+
try:
if env_override is None:
subprocess.check_call(args)
@@ -154,61 +160,61 @@ def run_command(command, args, env_override=None, name=None):
def nuget_restore(env, *args):
global verbose
- verbose = env['verbose']
+ verbose = env["verbose"]
# Find NuGet
- nuget_path = find_nuget_windows(env) if os.name == 'nt' else find_nuget_unix()
+ nuget_path = find_nuget_windows(env) if os.name == "nt" else find_nuget_unix()
if nuget_path is None:
- raise RuntimeError('Cannot find NuGet executable')
+ raise RuntimeError("Cannot find NuGet executable")
- print('NuGet path: ' + nuget_path)
+ print("NuGet path: " + nuget_path)
# Do NuGet restore
- run_command(nuget_path, ['restore'] + list(args), name='nuget restore')
+ run_command(nuget_path, ["restore"] + list(args), name="nuget restore")
def build_solution(env, solution_path, build_config, extra_msbuild_args=[]):
global verbose
- verbose = env['verbose']
+ verbose = env["verbose"]
msbuild_env = os.environ.copy()
# Needed when running from Developer Command Prompt for VS
- if 'PLATFORM' in msbuild_env:
- del msbuild_env['PLATFORM']
+ if "PLATFORM" in msbuild_env:
+ del msbuild_env["PLATFORM"]
# Find MSBuild
- if os.name == 'nt':
+ if os.name == "nt":
msbuild_info = find_msbuild_windows(env)
if msbuild_info is None:
- raise RuntimeError('Cannot find MSBuild executable')
+ raise RuntimeError("Cannot find MSBuild executable")
msbuild_path = msbuild_info[0]
msbuild_env.update(msbuild_info[1])
else:
- msbuild_path = find_msbuild_unix('msbuild')
+ msbuild_path = find_msbuild_unix("msbuild")
if msbuild_path is None:
- xbuild_fallback = env['xbuild_fallback']
+ xbuild_fallback = env["xbuild_fallback"]
- if xbuild_fallback and os.name == 'nt':
- print('Option \'xbuild_fallback\' not supported on Windows')
+ if xbuild_fallback and os.name == "nt":
+ print("Option 'xbuild_fallback' not supported on Windows")
xbuild_fallback = False
if xbuild_fallback:
- print('Cannot find MSBuild executable, trying with xbuild')
- print('Warning: xbuild is deprecated')
+ print("Cannot find MSBuild executable, trying with xbuild")
+ print("Warning: xbuild is deprecated")
- msbuild_path = find_msbuild_unix('xbuild')
+ msbuild_path = find_msbuild_unix("xbuild")
if msbuild_path is None:
- raise RuntimeError('Cannot find xbuild executable')
+ raise RuntimeError("Cannot find xbuild executable")
else:
- raise RuntimeError('Cannot find MSBuild executable')
+ raise RuntimeError("Cannot find MSBuild executable")
- print('MSBuild path: ' + msbuild_path)
+ print("MSBuild path: " + msbuild_path)
# Build solution
- msbuild_args = [solution_path, '/p:Configuration=' + build_config]
+ msbuild_args = [solution_path, "/p:Configuration=" + build_config]
msbuild_args += extra_msbuild_args
- run_command(msbuild_path, msbuild_args, env_override=msbuild_env, name='msbuild')
+ run_command(msbuild_path, msbuild_args, env_override=msbuild_env, name="msbuild")
diff --git a/modules/mono/config.py b/modules/mono/config.py
index 3ee16183669..2ea8a5247d5 100644
--- a/modules/mono/config.py
+++ b/modules/mono/config.py
@@ -3,41 +3,52 @@ def can_build(env, platform):
def configure(env):
- if env['platform'] not in ['windows', 'osx', 'linuxbsd', 'server', 'android', 'haiku', 'javascript']:
- raise RuntimeError('This module does not currently support building for this platform')
+ if env["platform"] not in ["windows", "osx", "linuxbsd", "server", "android", "haiku", "javascript"]:
+ raise RuntimeError("This module does not currently support building for this platform")
env.use_ptrcall = True
- env.add_module_version_string('mono')
+ env.add_module_version_string("mono")
from SCons.Script import BoolVariable, PathVariable, Variables, Help
envvars = Variables()
- envvars.Add(PathVariable('mono_prefix', 'Path to the mono installation directory for the target platform and architecture', '', PathVariable.PathAccept))
- envvars.Add(BoolVariable('mono_static', 'Statically link mono', False))
- envvars.Add(BoolVariable('mono_glue', 'Build with the mono glue sources', True))
- envvars.Add(BoolVariable('copy_mono_root', 'Make a copy of the mono installation directory to bundle with the editor', False))
- envvars.Add(BoolVariable('xbuild_fallback', 'If MSBuild is not found, fallback to xbuild', False))
+ envvars.Add(
+ PathVariable(
+ "mono_prefix",
+ "Path to the mono installation directory for the target platform and architecture",
+ "",
+ PathVariable.PathAccept,
+ )
+ )
+ envvars.Add(BoolVariable("mono_static", "Statically link mono", False))
+ envvars.Add(BoolVariable("mono_glue", "Build with the mono glue sources", True))
+ envvars.Add(
+ BoolVariable(
+ "copy_mono_root", "Make a copy of the mono installation directory to bundle with the editor", False
+ )
+ )
+ envvars.Add(BoolVariable("xbuild_fallback", "If MSBuild is not found, fallback to xbuild", False))
envvars.Update(env)
Help(envvars.GenerateHelpText(env))
- if env['platform'] == 'javascript':
+ if env["platform"] == "javascript":
# Mono wasm already has zlib builtin, so we need this workaround to avoid symbol collisions
- print('Compiling with Mono wasm disables \'builtin_zlib\'')
- env['builtin_zlib'] = False
+ print("Compiling with Mono wasm disables 'builtin_zlib'")
+ env["builtin_zlib"] = False
thirdparty_zlib_dir = "#thirdparty/zlib/"
env.Prepend(CPPPATH=[thirdparty_zlib_dir])
def get_doc_classes():
return [
- '@C#',
- 'CSharpScript',
- 'GodotSharp',
+ "@C#",
+ "CSharpScript",
+ "GodotSharp",
]
def get_doc_path():
- return 'doc_classes'
+ return "doc_classes"
def is_enabled():
diff --git a/modules/ogg/SCsub b/modules/ogg/SCsub
index 44c7963cd34..e768fb4ae84 100644
--- a/modules/ogg/SCsub
+++ b/modules/ogg/SCsub
@@ -1,7 +1,7 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
# Only kept to build the thirdparty library used by the theora and webm
# modules.
@@ -9,7 +9,7 @@ Import('env_modules')
env_ogg = env_modules.Clone()
# Thirdparty source files
-if env['builtin_libogg']:
+if env["builtin_libogg"]:
thirdparty_dir = "#thirdparty/libogg/"
thirdparty_sources = [
"bitwise.c",
diff --git a/modules/ogg/config.py b/modules/ogg/config.py
index 1c8cd12a2dc..d22f9454ed2 100644
--- a/modules/ogg/config.py
+++ b/modules/ogg/config.py
@@ -1,5 +1,6 @@
def can_build(env, platform):
return True
+
def configure(env):
pass
diff --git a/modules/opensimplex/SCsub b/modules/opensimplex/SCsub
index 311d33b0476..52d8b145ef3 100644
--- a/modules/opensimplex/SCsub
+++ b/modules/opensimplex/SCsub
@@ -1,7 +1,7 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
env_opensimplex = env_modules.Clone()
diff --git a/modules/opensimplex/config.py b/modules/opensimplex/config.py
index c1010ad4338..90b85dbd70d 100644
--- a/modules/opensimplex/config.py
+++ b/modules/opensimplex/config.py
@@ -1,14 +1,17 @@
def can_build(env, platform):
- return True
+ return True
+
def configure(env):
- pass
+ pass
+
def get_doc_classes():
return [
"NoiseTexture",
- "OpenSimplexNoise"
+ "OpenSimplexNoise",
]
+
def get_doc_path():
return "doc_classes"
diff --git a/modules/opus/SCsub b/modules/opus/SCsub
index fec2911d6dc..e51590d8083 100644
--- a/modules/opus/SCsub
+++ b/modules/opus/SCsub
@@ -1,7 +1,7 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
# Only kept to build the thirdparty library used by the webm module.
# AudioStreamOpus was dropped in 3.0 due to incompatibility with the new audio
@@ -10,11 +10,10 @@ Import('env_modules')
env_opus = env_modules.Clone()
# Thirdparty source files
-if env['builtin_opus']:
+if env["builtin_opus"]:
thirdparty_dir = "#thirdparty/opus/"
thirdparty_sources = [
-
# Sync with opus_sources.mk
"opus.c",
"opus_decoder.c",
@@ -23,17 +22,14 @@ if env['builtin_opus']:
"opus_multistream_encoder.c",
"opus_multistream_decoder.c",
"repacketizer.c",
-
"analysis.c",
"mlp.c",
"mlp_data.c",
-
# Sync with libopusfile Makefile.am
"info.c",
"internal.c",
"opusfile.c",
"stream.c",
-
# Sync with celt_sources.mk
"celt/bands.c",
"celt/celt.c",
@@ -53,12 +49,11 @@ if env['builtin_opus']:
"celt/quant_bands.c",
"celt/rate.c",
"celt/vq.c",
- #"celt/arm/arm_celt_map.c",
- #"celt/arm/armcpu.c",
- #"celt/arm/celt_ne10_fft.c",
- #"celt/arm/celt_ne10_mdct.c",
- #"celt/arm/celt_neon_intr.c",
-
+ # "celt/arm/arm_celt_map.c",
+ # "celt/arm/armcpu.c",
+ # "celt/arm/celt_ne10_fft.c",
+ # "celt/arm/celt_ne10_mdct.c",
+ # "celt/arm/celt_neon_intr.c",
# Sync with silk_sources.mk
"silk/CNG.c",
"silk/code_signs.c",
@@ -207,7 +202,7 @@ if env['builtin_opus']:
thirdparty_sources = [thirdparty_dir + file for file in thirdparty_sources + opus_sources_silk]
# also requires libogg
- if env['builtin_libogg']:
+ if env["builtin_libogg"]:
env_opus.Prepend(CPPPATH=["#thirdparty/libogg"])
env_opus.Append(CPPDEFINES=["HAVE_CONFIG_H"])
@@ -223,14 +218,14 @@ if env['builtin_opus']:
env_opus.Prepend(CPPPATH=[thirdparty_dir + "/" + dir for dir in thirdparty_include_paths])
if env["platform"] == "android":
- if ("android_arch" in env and env["android_arch"] == "armv7"):
+ if "android_arch" in env and env["android_arch"] == "armv7":
env_opus.Append(CPPDEFINES=["OPUS_ARM_OPT"])
- elif ("android_arch" in env and env["android_arch"] == "arm64v8"):
+ elif "android_arch" in env and env["android_arch"] == "arm64v8":
env_opus.Append(CPPDEFINES=["OPUS_ARM64_OPT"])
elif env["platform"] == "iphone":
- if ("arch" in env and env["arch"] == "arm"):
+ if "arch" in env and env["arch"] == "arm":
env_opus.Append(CPPDEFINES=["OPUS_ARM_OPT"])
- elif ("arch" in env and env["arch"] == "arm64"):
+ elif "arch" in env and env["arch"] == "arm64":
env_opus.Append(CPPDEFINES=["OPUS_ARM64_OPT"])
env_thirdparty = env_opus.Clone()
diff --git a/modules/opus/config.py b/modules/opus/config.py
index 1c8cd12a2dc..d22f9454ed2 100644
--- a/modules/opus/config.py
+++ b/modules/opus/config.py
@@ -1,5 +1,6 @@
def can_build(env, platform):
return True
+
def configure(env):
pass
diff --git a/modules/pvr/SCsub b/modules/pvr/SCsub
index 18da38fbbd1..e0baf851f18 100644
--- a/modules/pvr/SCsub
+++ b/modules/pvr/SCsub
@@ -1,7 +1,7 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
env_pvr = env_modules.Clone()
diff --git a/modules/pvr/config.py b/modules/pvr/config.py
index 1c8cd12a2dc..d22f9454ed2 100644
--- a/modules/pvr/config.py
+++ b/modules/pvr/config.py
@@ -1,5 +1,6 @@
def can_build(env, platform):
return True
+
def configure(env):
pass
diff --git a/modules/regex/SCsub b/modules/regex/SCsub
index 6238cd3d9fc..753650adcb3 100644
--- a/modules/regex/SCsub
+++ b/modules/regex/SCsub
@@ -1,16 +1,16 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
env_regex = env_modules.Clone()
-if env['builtin_pcre2']:
- thirdparty_dir = '#thirdparty/pcre2/src/'
- thirdparty_flags = ['PCRE2_STATIC', 'HAVE_CONFIG_H']
+if env["builtin_pcre2"]:
+ thirdparty_dir = "#thirdparty/pcre2/src/"
+ thirdparty_flags = ["PCRE2_STATIC", "HAVE_CONFIG_H"]
- if env['builtin_pcre2_with_jit']:
- thirdparty_flags.append('SUPPORT_JIT')
+ if env["builtin_pcre2_with_jit"]:
+ thirdparty_flags.append("SUPPORT_JIT")
thirdparty_sources = [
"pcre2_auto_possess.c",
@@ -24,7 +24,7 @@ if env['builtin_pcre2']:
"pcre2_extuni.c",
"pcre2_find_bracket.c",
"pcre2_jit_compile.c",
- #"pcre2_jit_match.c", "pcre2_jit_misc.c", # these files are included in pcre2_jit_compile.c.
+ # "pcre2_jit_match.c", "pcre2_jit_misc.c", # these files are included in pcre2_jit_compile.c.
"pcre2_maketables.c",
"pcre2_match.c",
"pcre2_match_data.c",
diff --git a/modules/regex/config.py b/modules/regex/config.py
index 42cfe3b43c1..df9f44cb952 100644
--- a/modules/regex/config.py
+++ b/modules/regex/config.py
@@ -1,14 +1,17 @@
def can_build(env, platform):
return True
+
def configure(env):
pass
+
def get_doc_classes():
return [
"RegEx",
"RegExMatch",
]
+
def get_doc_path():
return "doc_classes"
diff --git a/modules/squish/SCsub b/modules/squish/SCsub
index 15320bcd0cf..b31032403f2 100644
--- a/modules/squish/SCsub
+++ b/modules/squish/SCsub
@@ -1,12 +1,12 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
env_squish = env_modules.Clone()
# Thirdparty source files
-if env['builtin_squish']:
+if env["builtin_squish"]:
thirdparty_dir = "#thirdparty/squish/"
thirdparty_sources = [
"alpha.cpp",
diff --git a/modules/squish/config.py b/modules/squish/config.py
index 1c8cd12a2dc..d22f9454ed2 100644
--- a/modules/squish/config.py
+++ b/modules/squish/config.py
@@ -1,5 +1,6 @@
def can_build(env, platform):
return True
+
def configure(env):
pass
diff --git a/modules/stb_vorbis/SCsub b/modules/stb_vorbis/SCsub
index d14939a3b1a..266c87c8024 100644
--- a/modules/stb_vorbis/SCsub
+++ b/modules/stb_vorbis/SCsub
@@ -1,7 +1,7 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
env_stb_vorbis = env_modules.Clone()
diff --git a/modules/stb_vorbis/config.py b/modules/stb_vorbis/config.py
index 200b8dfd507..1eb0a8cf331 100644
--- a/modules/stb_vorbis/config.py
+++ b/modules/stb_vorbis/config.py
@@ -1,13 +1,16 @@
def can_build(env, platform):
return True
+
def configure(env):
pass
+
def get_doc_classes():
return [
"AudioStreamOGGVorbis",
]
+
def get_doc_path():
return "doc_classes"
diff --git a/modules/svg/SCsub b/modules/svg/SCsub
index 7961d1f33e7..0bfba34fe5b 100644
--- a/modules/svg/SCsub
+++ b/modules/svg/SCsub
@@ -1,14 +1,14 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
env_svg = env_modules.Clone()
# Thirdparty source files
thirdparty_dir = "#thirdparty/nanosvg/"
thirdparty_sources = [
- "nanosvg.cc"
+ "nanosvg.cc",
]
thirdparty_sources = [thirdparty_dir + file for file in thirdparty_sources]
diff --git a/modules/svg/config.py b/modules/svg/config.py
index 1c8cd12a2dc..d22f9454ed2 100644
--- a/modules/svg/config.py
+++ b/modules/svg/config.py
@@ -1,5 +1,6 @@
def can_build(env, platform):
return True
+
def configure(env):
pass
diff --git a/modules/tga/SCsub b/modules/tga/SCsub
index 7e405f405cb..067caa6ea02 100644
--- a/modules/tga/SCsub
+++ b/modules/tga/SCsub
@@ -1,7 +1,7 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
env_tga = env_modules.Clone()
diff --git a/modules/tga/config.py b/modules/tga/config.py
index 1c8cd12a2dc..d22f9454ed2 100644
--- a/modules/tga/config.py
+++ b/modules/tga/config.py
@@ -1,5 +1,6 @@
def can_build(env, platform):
return True
+
def configure(env):
pass
diff --git a/modules/theora/SCsub b/modules/theora/SCsub
index ff65d2f8ece..a01e65b4b07 100644
--- a/modules/theora/SCsub
+++ b/modules/theora/SCsub
@@ -1,71 +1,71 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
env_theora = env_modules.Clone()
# Thirdparty source files
-if env['builtin_libtheora']:
+if env["builtin_libtheora"]:
thirdparty_dir = "#thirdparty/libtheora/"
thirdparty_sources = [
- #"analyze.c",
- #"apiwrapper.c",
+ # "analyze.c",
+ # "apiwrapper.c",
"bitpack.c",
"cpu.c",
- #"decapiwrapper.c",
+ # "decapiwrapper.c",
"decinfo.c",
"decode.c",
"dequant.c",
- #"encapiwrapper.c",
- #"encfrag.c",
- #"encinfo.c",
- #"encode.c",
- #"encoder_disabled.c",
- #"enquant.c",
- #"fdct.c",
+ # "encapiwrapper.c",
+ # "encfrag.c",
+ # "encinfo.c",
+ # "encode.c",
+ # "encoder_disabled.c",
+ # "enquant.c",
+ # "fdct.c",
"fragment.c",
"huffdec.c",
- #"huffenc.c",
+ # "huffenc.c",
"idct.c",
"info.c",
"internal.c",
- #"mathops.c",
- #"mcenc.c",
+ # "mathops.c",
+ # "mcenc.c",
"quant.c",
- #"rate.c",
+ # "rate.c",
"state.c",
- #"tokenize.c",
+ # "tokenize.c",
]
thirdparty_sources_x86 = [
- #"x86/mmxencfrag.c",
- #"x86/mmxfdct.c",
+ # "x86/mmxencfrag.c",
+ # "x86/mmxfdct.c",
"x86/mmxfrag.c",
"x86/mmxidct.c",
"x86/mmxstate.c",
- #"x86/sse2fdct.c",
- #"x86/x86enc.c",
+ # "x86/sse2fdct.c",
+ # "x86/x86enc.c",
"x86/x86state.c",
]
thirdparty_sources_x86_vc = [
- #"x86_vc/mmxencfrag.c",
- #"x86_vc/mmxfdct.c",
+ # "x86_vc/mmxencfrag.c",
+ # "x86_vc/mmxfdct.c",
"x86_vc/mmxfrag.c",
"x86_vc/mmxidct.c",
"x86_vc/mmxstate.c",
- #"x86_vc/x86enc.c",
+ # "x86_vc/x86enc.c",
"x86_vc/x86state.c",
]
- if (env["x86_libtheora_opt_gcc"]):
+ if env["x86_libtheora_opt_gcc"]:
thirdparty_sources += thirdparty_sources_x86
- if (env["x86_libtheora_opt_vc"]):
+ if env["x86_libtheora_opt_vc"]:
thirdparty_sources += thirdparty_sources_x86_vc
- if (env["x86_libtheora_opt_gcc"] or env["x86_libtheora_opt_vc"]):
+ if env["x86_libtheora_opt_gcc"] or env["x86_libtheora_opt_vc"]:
env_theora.Append(CPPDEFINES=["OC_X86_ASM"])
thirdparty_sources = [thirdparty_dir + file for file in thirdparty_sources]
@@ -73,9 +73,9 @@ if env['builtin_libtheora']:
env_theora.Prepend(CPPPATH=[thirdparty_dir])
# also requires libogg and libvorbis
- if env['builtin_libogg']:
+ if env["builtin_libogg"]:
env_theora.Prepend(CPPPATH=["#thirdparty/libogg"])
- if env['builtin_libvorbis']:
+ if env["builtin_libvorbis"]:
env_theora.Prepend(CPPPATH=["#thirdparty/libvorbis"])
env_thirdparty = env_theora.Clone()
diff --git a/modules/theora/config.py b/modules/theora/config.py
index c7713d76079..413acce2df8 100644
--- a/modules/theora/config.py
+++ b/modules/theora/config.py
@@ -1,13 +1,16 @@
def can_build(env, platform):
return True
+
def configure(env):
pass
+
def get_doc_classes():
return [
"VideoStreamTheora",
]
+
def get_doc_path():
return "doc_classes"
diff --git a/modules/tinyexr/SCsub b/modules/tinyexr/SCsub
index 97f9797b58d..e7fd44fabd0 100644
--- a/modules/tinyexr/SCsub
+++ b/modules/tinyexr/SCsub
@@ -1,7 +1,7 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
env_tinyexr = env_modules.Clone()
diff --git a/modules/tinyexr/config.py b/modules/tinyexr/config.py
index 098f1eafa9e..53b8f2f2e36 100644
--- a/modules/tinyexr/config.py
+++ b/modules/tinyexr/config.py
@@ -1,5 +1,6 @@
def can_build(env, platform):
- return env['tools']
+ return env["tools"]
+
def configure(env):
pass
diff --git a/modules/upnp/SCsub b/modules/upnp/SCsub
index 3f56a69594c..2e129e15ca8 100644
--- a/modules/upnp/SCsub
+++ b/modules/upnp/SCsub
@@ -1,13 +1,13 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
env_upnp = env_modules.Clone()
# Thirdparty source files
-if env['builtin_miniupnpc']:
+if env["builtin_miniupnpc"]:
thirdparty_dir = "#thirdparty/miniupnpc/"
thirdparty_sources = [
"miniupnpc.c",
diff --git a/modules/upnp/config.py b/modules/upnp/config.py
index 8724ff1a511..3e15b940a18 100644
--- a/modules/upnp/config.py
+++ b/modules/upnp/config.py
@@ -1,14 +1,17 @@
def can_build(env, platform):
return True
+
def configure(env):
pass
+
def get_doc_classes():
return [
"UPNP",
- "UPNPDevice"
+ "UPNPDevice",
]
+
def get_doc_path():
return "doc_classes"
diff --git a/modules/vhacd/SCsub b/modules/vhacd/SCsub
index 685976dc33c..ecd432b275e 100644
--- a/modules/vhacd/SCsub
+++ b/modules/vhacd/SCsub
@@ -1,7 +1,7 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
env_vhacd = env_modules.Clone()
@@ -19,7 +19,7 @@ thirdparty_sources = [
"src/btAlignedAllocator.cpp",
"src/vhacdRaycastMesh.cpp",
"src/VHACD.cpp",
- "src/btConvexHullComputer.cpp"
+ "src/btConvexHullComputer.cpp",
]
thirdparty_sources = [thirdparty_dir + file for file in thirdparty_sources]
diff --git a/modules/vhacd/config.py b/modules/vhacd/config.py
index 9ced70d2fba..d22f9454ed2 100644
--- a/modules/vhacd/config.py
+++ b/modules/vhacd/config.py
@@ -1,6 +1,6 @@
def can_build(env, platform):
return True
+
def configure(env):
pass
-
diff --git a/modules/visual_script/SCsub b/modules/visual_script/SCsub
index 3c3d2caa578..16faea08d77 100644
--- a/modules/visual_script/SCsub
+++ b/modules/visual_script/SCsub
@@ -1,7 +1,7 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
env_vs = env_modules.Clone()
diff --git a/modules/visual_script/config.py b/modules/visual_script/config.py
index 087a13a2004..bd459ca3449 100644
--- a/modules/visual_script/config.py
+++ b/modules/visual_script/config.py
@@ -1,9 +1,11 @@
def can_build(env, platform):
return True
+
def configure(env):
pass
+
def get_doc_classes():
return [
"@VisualScript",
@@ -56,5 +58,6 @@ def get_doc_classes():
"VisualScriptYield",
]
+
def get_doc_path():
return "doc_classes"
diff --git a/modules/vorbis/SCsub b/modules/vorbis/SCsub
index bde4359595f..05d46757d3f 100644
--- a/modules/vorbis/SCsub
+++ b/modules/vorbis/SCsub
@@ -1,7 +1,7 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
# Only kept to build the thirdparty library used by the theora and webm
# modules. We now use stb_vorbis for AudioStreamOGGVorbis.
@@ -11,11 +11,11 @@ env_vorbis = env_modules.Clone()
stub = True
# Thirdparty source files
-if env['builtin_libvorbis']:
+if env["builtin_libvorbis"]:
thirdparty_dir = "#thirdparty/libvorbis/"
thirdparty_sources = [
- #"analysis.c",
- #"barkmel.c",
+ # "analysis.c",
+ # "barkmel.c",
"bitrate.c",
"block.c",
"codebook.c",
@@ -29,14 +29,14 @@ if env['builtin_libvorbis']:
"mapping0.c",
"mdct.c",
"psy.c",
- #"psytune.c",
+ # "psytune.c",
"registry.c",
"res0.c",
"sharedbook.c",
"smallft.c",
"synthesis.c",
- #"tone.c",
- #"vorbisenc.c",
+ # "tone.c",
+ # "vorbisenc.c",
"vorbisfile.c",
"window.c",
]
@@ -46,7 +46,7 @@ if env['builtin_libvorbis']:
env_vorbis.Prepend(CPPPATH=[thirdparty_dir])
# also requires libogg
- if env['builtin_libogg']:
+ if env["builtin_libogg"]:
env_vorbis.Prepend(CPPPATH=["#thirdparty/libogg"])
env_thirdparty = env_vorbis.Clone()
diff --git a/modules/vorbis/config.py b/modules/vorbis/config.py
index 1c8cd12a2dc..d22f9454ed2 100644
--- a/modules/vorbis/config.py
+++ b/modules/vorbis/config.py
@@ -1,5 +1,6 @@
def can_build(env, platform):
return True
+
def configure(env):
pass
diff --git a/modules/webm/SCsub b/modules/webm/SCsub
index 32e67276568..247b4ead37c 100644
--- a/modules/webm/SCsub
+++ b/modules/webm/SCsub
@@ -1,7 +1,7 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
env_webm = env_modules.Clone()
@@ -18,14 +18,14 @@ thirdparty_sources = [thirdparty_dir + file for file in thirdparty_sources]
env_webm.Prepend(CPPPATH=[thirdparty_dir, thirdparty_dir + "libwebm/"])
# also requires libogg, libvorbis and libopus
-if env['builtin_libogg']:
+if env["builtin_libogg"]:
env_webm.Prepend(CPPPATH=["#thirdparty/libogg"])
-if env['builtin_libvorbis']:
+if env["builtin_libvorbis"]:
env_webm.Prepend(CPPPATH=["#thirdparty/libvorbis"])
-if env['builtin_opus']:
+if env["builtin_opus"]:
env_webm.Prepend(CPPPATH=["#thirdparty/opus"])
-if env['builtin_libvpx']:
+if env["builtin_libvpx"]:
env_webm.Prepend(CPPPATH=["#thirdparty/libvpx"])
SConscript("libvpx/SCsub")
diff --git a/modules/webm/config.py b/modules/webm/config.py
index ba4dcce2f59..93b49d177ad 100644
--- a/modules/webm/config.py
+++ b/modules/webm/config.py
@@ -1,13 +1,16 @@
def can_build(env, platform):
- return platform not in ['iphone']
+ return platform not in ["iphone"]
+
def configure(env):
pass
+
def get_doc_classes():
return [
"VideoStreamWebm",
]
+
def get_doc_path():
return "doc_classes"
diff --git a/modules/webm/libvpx/SCsub b/modules/webm/libvpx/SCsub
index 6aed8a16f55..dd6866ad0e5 100644
--- a/modules/webm/libvpx/SCsub
+++ b/modules/webm/libvpx/SCsub
@@ -1,7 +1,7 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
# Thirdparty sources
@@ -9,9 +9,7 @@ libvpx_dir = "#thirdparty/libvpx/"
libvpx_sources = [
"vp8/vp8_dx_iface.c",
-
"vp8/common/generic/systemdependent.c",
-
"vp8/common/alloccommon.c",
"vp8/common/blockd.c",
"vp8/common/copy_c.c",
@@ -37,16 +35,12 @@ libvpx_sources = [
"vp8/common/swapyv12buffer.c",
"vp8/common/treecoder.c",
"vp8/common/vp8_loopfilter.c",
-
"vp8/decoder/dboolhuff.c",
"vp8/decoder/decodeframe.c",
"vp8/decoder/decodemv.c",
"vp8/decoder/detokenize.c",
"vp8/decoder/onyxd_if.c",
-
-
"vp9/vp9_dx_iface.c",
-
"vp9/common/vp9_alloccommon.c",
"vp9/common/vp9_blockd.c",
"vp9/common/vp9_common_data.c",
@@ -69,21 +63,16 @@ libvpx_sources = [
"vp9/common/vp9_seg_common.c",
"vp9/common/vp9_thread_common.c",
"vp9/common/vp9_tile_common.c",
-
"vp9/decoder/vp9_decodeframe.c",
"vp9/decoder/vp9_decodemv.c",
"vp9/decoder/vp9_decoder.c",
"vp9/decoder/vp9_detokenize.c",
"vp9/decoder/vp9_dsubexp.c",
"vp9/decoder/vp9_dthread.c",
-
-
"vpx/src/vpx_codec.c",
"vpx/src/vpx_decoder.c",
"vpx/src/vpx_image.c",
"vpx/src/vpx_psnr.c",
-
-
"vpx_dsp/bitreader.c",
"vpx_dsp/bitreader_buffer.c",
"vpx_dsp/intrapred.c",
@@ -92,18 +81,11 @@ libvpx_sources = [
"vpx_dsp/prob.c",
"vpx_dsp/vpx_convolve.c",
"vpx_dsp/vpx_dsp_rtcd.c",
-
-
"vpx_mem/vpx_mem.c",
-
-
"vpx_scale/vpx_scale_rtcd.c",
-
"vpx_scale/generic/yv12config.c",
"vpx_scale/generic/yv12extend.c",
-
-
- "vpx_util/vpx_thread.c"
+ "vpx_util/vpx_thread.c",
]
libvpx_sources_mt = [
@@ -114,29 +96,23 @@ libvpx_sources_intrin_x86 = [
"vp8/common/x86/filter_x86.c",
"vp8/common/x86/loopfilter_x86.c",
"vp8/common/x86/vp8_asm_stubs.c",
-
-
- "vpx_dsp/x86/vpx_asm_stubs.c"
+ "vpx_dsp/x86/vpx_asm_stubs.c",
]
libvpx_sources_intrin_x86_mmx = [
"vp8/common/x86/idct_blk_mmx.c",
]
libvpx_sources_intrin_x86_sse2 = [
"vp8/common/x86/idct_blk_sse2.c",
-
-
"vp9/common/x86/vp9_idct_intrin_sse2.c",
-
-
"vpx_dsp/x86/inv_txfm_sse2.c",
"vpx_dsp/x86/loopfilter_sse2.c",
]
libvpx_sources_intrin_x86_ssse3 = [
- "vpx_dsp/x86/vpx_subpixel_8t_intrin_ssse3.c"
+ "vpx_dsp/x86/vpx_subpixel_8t_intrin_ssse3.c",
]
libvpx_sources_intrin_x86_avx2 = [
"vpx_dsp/x86/loopfilter_avx2.c",
- "vpx_dsp/x86/vpx_subpixel_8t_intrin_avx2.c"
+ "vpx_dsp/x86/vpx_subpixel_8t_intrin_avx2.c",
]
libvpx_sources_x86asm = [
"vp8/common/x86/copy_sse2.asm",
@@ -153,8 +129,6 @@ libvpx_sources_x86asm = [
"vp8/common/x86/subpixel_sse2.asm",
"vp8/common/x86/subpixel_ssse3.asm",
"vp8/common/x86/vp8_loopfilter_mmx.asm",
-
-
"vpx_dsp/x86/intrapred_sse2.asm",
"vpx_dsp/x86/intrapred_ssse3.asm",
"vpx_dsp/x86/inv_wht_sse2.asm",
@@ -163,21 +137,15 @@ libvpx_sources_x86asm = [
"vpx_dsp/x86/vpx_subpixel_8t_ssse3.asm",
"vpx_dsp/x86/vpx_subpixel_bilinear_sse2.asm",
"vpx_dsp/x86/vpx_subpixel_bilinear_ssse3.asm",
-
-
- "vpx_ports/emms.asm"
+ "vpx_ports/emms.asm",
]
libvpx_sources_x86_64asm = [
"vp8/common/x86/loopfilter_block_sse2_x86_64.asm",
-
-
- "vpx_dsp/x86/inv_txfm_ssse3_x86_64.asm"
+ "vpx_dsp/x86/inv_txfm_ssse3_x86_64.asm",
]
libvpx_sources_arm = [
"vpx_ports/arm_cpudetect.c",
-
-
"vp8/common/arm/loopfilter_arm.c",
]
libvpx_sources_arm_neon = [
@@ -196,12 +164,8 @@ libvpx_sources_arm_neon = [
"vp8/common/arm/neon/shortidct4x4llm_neon.c",
"vp8/common/arm/neon/sixtappredict_neon.c",
"vp8/common/arm/neon/vp8_loopfilter_neon.c",
-
-
"vp9/common/arm/neon/vp9_iht4x4_add_neon.c",
"vp9/common/arm/neon/vp9_iht8x8_add_neon.c",
-
-
"vpx_dsp/arm/idct16x16_1_add_neon.c",
"vpx_dsp/arm/idct16x16_add_neon.c",
"vpx_dsp/arm/idct16x16_neon.c",
@@ -220,22 +184,22 @@ libvpx_sources_arm_neon = [
"vpx_dsp/arm/vpx_convolve8_neon.c",
"vpx_dsp/arm/vpx_convolve_avg_neon.c",
"vpx_dsp/arm/vpx_convolve_copy_neon.c",
- "vpx_dsp/arm/vpx_convolve_neon.c"
+ "vpx_dsp/arm/vpx_convolve_neon.c",
]
libvpx_sources_arm_neon_gas = [
"vpx_dsp/arm/gas/intrapred_neon_asm.s",
"vpx_dsp/arm/gas/loopfilter_mb_neon.s",
- "vpx_dsp/arm/gas/save_reg_neon.s"
+ "vpx_dsp/arm/gas/save_reg_neon.s",
]
libvpx_sources_arm_neon_armasm_ms = [
"vpx_dsp/arm/armasm_ms/intrapred_neon_asm.asm",
"vpx_dsp/arm/armasm_ms/loopfilter_mb_neon.asm",
- "vpx_dsp/arm/armasm_ms/save_reg_neon.asm"
+ "vpx_dsp/arm/armasm_ms/save_reg_neon.asm",
]
libvpx_sources_arm_neon_gas_apple = [
"vpx_dsp/arm/gas_apple/intrapred_neon_asm.s",
"vpx_dsp/arm/gas_apple/loopfilter_mb_neon.s",
- "vpx_dsp/arm/gas_apple/save_reg_neon.s"
+ "vpx_dsp/arm/gas_apple/save_reg_neon.s",
]
libvpx_sources = [libvpx_dir + file for file in libvpx_sources]
@@ -258,25 +222,43 @@ env_libvpx = env_modules.Clone()
env_libvpx.disable_warnings()
env_libvpx.Prepend(CPPPATH=[libvpx_dir])
-webm_multithread = env["platform"] != 'javascript'
+webm_multithread = env["platform"] != "javascript"
cpu_bits = env["bits"]
webm_cpu_x86 = False
webm_cpu_arm = False
-if env["platform"] == 'uwp':
- if 'arm' in env["PROGSUFFIX"]:
+if env["platform"] == "uwp":
+ if "arm" in env["PROGSUFFIX"]:
webm_cpu_arm = True
else:
webm_cpu_x86 = True
else:
import platform
- is_x11_or_server_arm = ((env["platform"] == 'linuxbsd' or env["platform"] == 'server') and (platform.machine().startswith('arm') or platform.machine().startswith('aarch')))
- is_ios_x86 = (env["platform"] == 'iphone' and ("arch" in env and env["arch"].startswith('x86')))
- is_android_x86 = (env["platform"] == 'android' and env["android_arch"].startswith('x86'))
+
+ is_x11_or_server_arm = (env["platform"] == "linuxbsd" or env["platform"] == "server") and (
+ platform.machine().startswith("arm") or platform.machine().startswith("aarch")
+ )
+ is_ios_x86 = env["platform"] == "iphone" and ("arch" in env and env["arch"].startswith("x86"))
+ is_android_x86 = env["platform"] == "android" and env["android_arch"].startswith("x86")
if is_android_x86:
- cpu_bits = '32' if env["android_arch"] == 'x86' else '64'
- webm_cpu_x86 = not is_x11_or_server_arm and (cpu_bits == '32' or cpu_bits == '64') and (env["platform"] == 'windows' or env["platform"] == 'linuxbsd' or env["platform"] == 'osx' or env["platform"] == 'haiku' or is_android_x86 or is_ios_x86)
- webm_cpu_arm = is_x11_or_server_arm or (not is_ios_x86 and env["platform"] == 'iphone') or (not is_android_x86 and env["platform"] == 'android')
+ cpu_bits = "32" if env["android_arch"] == "x86" else "64"
+ webm_cpu_x86 = (
+ not is_x11_or_server_arm
+ and (cpu_bits == "32" or cpu_bits == "64")
+ and (
+ env["platform"] == "windows"
+ or env["platform"] == "linuxbsd"
+ or env["platform"] == "osx"
+ or env["platform"] == "haiku"
+ or is_android_x86
+ or is_ios_x86
+ )
+ )
+ webm_cpu_arm = (
+ is_x11_or_server_arm
+ or (not is_ios_x86 and env["platform"] == "iphone")
+ or (not is_android_x86 and env["platform"] == "android")
+ )
if webm_cpu_x86:
import subprocess
@@ -306,38 +288,43 @@ if webm_cpu_x86:
webm_simd_optimizations = False
if webm_cpu_x86:
- if env["platform"] == 'windows' or env["platform"] == 'uwp':
- env_libvpx["ASFORMAT"] = 'win'
- elif env["platform"] == 'osx' or env["platform"] == "iphone":
- env_libvpx["ASFORMAT"] = 'macho'
+ if env["platform"] == "windows" or env["platform"] == "uwp":
+ env_libvpx["ASFORMAT"] = "win"
+ elif env["platform"] == "osx" or env["platform"] == "iphone":
+ env_libvpx["ASFORMAT"] = "macho"
else:
- env_libvpx["ASFORMAT"] = 'elf'
+ env_libvpx["ASFORMAT"] = "elf"
env_libvpx["ASFORMAT"] += cpu_bits
- env_libvpx["AS"] = 'yasm'
- env_libvpx["ASFLAGS"] = '-I' + libvpx_dir[1:] + ' -f $ASFORMAT -D $ASCPU'
- env_libvpx["ASCOM"] = '$AS $ASFLAGS -o $TARGET $SOURCES'
+ env_libvpx["AS"] = "yasm"
+ env_libvpx["ASFLAGS"] = "-I" + libvpx_dir[1:] + " -f $ASFORMAT -D $ASCPU"
+ env_libvpx["ASCOM"] = "$AS $ASFLAGS -o $TARGET $SOURCES"
- if cpu_bits == '32':
- env_libvpx["ASCPU"] = 'X86_32'
- elif cpu_bits == '64':
- env_libvpx["ASCPU"] = 'X86_64'
+ if cpu_bits == "32":
+ env_libvpx["ASCPU"] = "X86_32"
+ elif cpu_bits == "64":
+ env_libvpx["ASCPU"] = "X86_64"
- env_libvpx.Append(CPPDEFINES=['WEBM_X86ASM'])
+ env_libvpx.Append(CPPDEFINES=["WEBM_X86ASM"])
webm_simd_optimizations = True
if webm_cpu_arm:
- if env["platform"] == 'iphone':
- env_libvpx["ASFLAGS"] = '-arch armv7'
- elif env["platform"] == 'android' and env["android_arch"] == 'armv7' or env["platform"] == 'linuxbsd' or env["platform"] == 'server':
- env_libvpx["ASFLAGS"] = '-mfpu=neon'
- elif env["platform"] == 'uwp':
- env_libvpx["AS"] = 'armasm'
- env_libvpx["ASFLAGS"] = ''
- env_libvpx["ASCOM"] = '$AS $ASFLAGS -o $TARGET $SOURCES'
+ if env["platform"] == "iphone":
+ env_libvpx["ASFLAGS"] = "-arch armv7"
+ elif (
+ env["platform"] == "android"
+ and env["android_arch"] == "armv7"
+ or env["platform"] == "linuxbsd"
+ or env["platform"] == "server"
+ ):
+ env_libvpx["ASFLAGS"] = "-mfpu=neon"
+ elif env["platform"] == "uwp":
+ env_libvpx["AS"] = "armasm"
+ env_libvpx["ASFLAGS"] = ""
+ env_libvpx["ASCOM"] = "$AS $ASFLAGS -o $TARGET $SOURCES"
- env_libvpx.Append(CPPDEFINES=['WEBM_ARMASM'])
+ env_libvpx.Append(CPPDEFINES=["WEBM_ARMASM"])
webm_simd_optimizations = True
@@ -350,45 +337,49 @@ if webm_multithread:
env_libvpx.add_source_files(env.modules_sources, libvpx_sources_mt)
if webm_cpu_x86:
- is_clang_or_gcc = ('gcc' in os.path.basename(env["CC"])) or ('clang' in os.path.basename(env["CC"])) or ("osxcross" in env)
+ is_clang_or_gcc = (
+ ("gcc" in os.path.basename(env["CC"])) or ("clang" in os.path.basename(env["CC"])) or ("osxcross" in env)
+ )
env_libvpx_mmx = env_libvpx.Clone()
- if cpu_bits == '32' and is_clang_or_gcc:
- env_libvpx_mmx.Append(CCFLAGS=['-mmmx'])
+ if cpu_bits == "32" and is_clang_or_gcc:
+ env_libvpx_mmx.Append(CCFLAGS=["-mmmx"])
env_libvpx_mmx.add_source_files(env.modules_sources, libvpx_sources_intrin_x86_mmx)
env_libvpx_sse2 = env_libvpx.Clone()
- if cpu_bits == '32' and is_clang_or_gcc:
- env_libvpx_sse2.Append(CCFLAGS=['-msse2'])
+ if cpu_bits == "32" and is_clang_or_gcc:
+ env_libvpx_sse2.Append(CCFLAGS=["-msse2"])
env_libvpx_sse2.add_source_files(env.modules_sources, libvpx_sources_intrin_x86_sse2)
env_libvpx_ssse3 = env_libvpx.Clone()
if is_clang_or_gcc:
- env_libvpx_ssse3.Append(CCFLAGS=['-mssse3'])
+ env_libvpx_ssse3.Append(CCFLAGS=["-mssse3"])
env_libvpx_ssse3.add_source_files(env.modules_sources, libvpx_sources_intrin_x86_ssse3)
env_libvpx_avx2 = env_libvpx.Clone()
if is_clang_or_gcc:
- env_libvpx_avx2.Append(CCFLAGS=['-mavx2'])
+ env_libvpx_avx2.Append(CCFLAGS=["-mavx2"])
env_libvpx_avx2.add_source_files(env.modules_sources, libvpx_sources_intrin_x86_avx2)
env_libvpx.add_source_files(env.modules_sources, libvpx_sources_intrin_x86)
env_libvpx.add_source_files(env.modules_sources, libvpx_sources_x86asm)
- if cpu_bits == '64':
+ if cpu_bits == "64":
env_libvpx.add_source_files(env.modules_sources, libvpx_sources_x86_64asm)
elif webm_cpu_arm:
env_libvpx.add_source_files(env.modules_sources, libvpx_sources_arm)
- if env["platform"] == 'android':
+ if env["platform"] == "android":
env_libvpx.Prepend(CPPPATH=[libvpx_dir + "third_party/android"])
env_libvpx.add_source_files(env.modules_sources, [libvpx_dir + "third_party/android/cpu-features.c"])
env_libvpx_neon = env_libvpx.Clone()
env_libvpx_neon.add_source_files(env.modules_sources, libvpx_sources_arm_neon)
- if env["platform"] == 'uwp':
+ if env["platform"] == "uwp":
env_libvpx.add_source_files(env.modules_sources, libvpx_sources_arm_neon_armasm_ms)
- elif env["platform"] == 'iphone':
+ elif env["platform"] == "iphone":
env_libvpx.add_source_files(env.modules_sources, libvpx_sources_arm_neon_gas_apple)
- elif (is_x11_or_server_arm and cpu_bits == '32') or (env["platform"] == 'android' and not env["android_arch"] == 'arm64v8'):
+ elif (is_x11_or_server_arm and cpu_bits == "32") or (
+ env["platform"] == "android" and not env["android_arch"] == "arm64v8"
+ ):
env_libvpx.add_source_files(env.modules_sources, libvpx_sources_arm_neon_gas)
diff --git a/modules/webp/SCsub b/modules/webp/SCsub
index 666628bb44f..58f2bb35e61 100644
--- a/modules/webp/SCsub
+++ b/modules/webp/SCsub
@@ -1,12 +1,12 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
env_webp = env_modules.Clone()
# Thirdparty source files
-if env['builtin_libwebp']:
+if env["builtin_libwebp"]:
thirdparty_dir = "#thirdparty/libwebp/"
thirdparty_sources = [
"dec/alpha_dec.c",
diff --git a/modules/webp/config.py b/modules/webp/config.py
index 1c8cd12a2dc..d22f9454ed2 100644
--- a/modules/webp/config.py
+++ b/modules/webp/config.py
@@ -1,5 +1,6 @@
def can_build(env, platform):
return True
+
def configure(env):
pass
diff --git a/modules/webrtc/SCsub b/modules/webrtc/SCsub
index 868553b8792..20b4c8f8d2c 100644
--- a/modules/webrtc/SCsub
+++ b/modules/webrtc/SCsub
@@ -1,15 +1,15 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
# Thirdparty source files
env_webrtc = env_modules.Clone()
use_gdnative = env_webrtc["module_gdnative_enabled"]
-if use_gdnative: # GDNative is retained in Javascript for export compatibility
- env_webrtc.Append(CPPDEFINES=['WEBRTC_GDNATIVE_ENABLED'])
+if use_gdnative: # GDNative is retained in Javascript for export compatibility
+ env_webrtc.Append(CPPDEFINES=["WEBRTC_GDNATIVE_ENABLED"])
env_webrtc.Prepend(CPPPATH=["#modules/gdnative/include/"])
env_webrtc.add_source_files(env.modules_sources, "*.cpp")
diff --git a/modules/webrtc/config.py b/modules/webrtc/config.py
index 48b4c33c5dc..0a075ccef13 100644
--- a/modules/webrtc/config.py
+++ b/modules/webrtc/config.py
@@ -1,15 +1,18 @@
def can_build(env, platform):
return True
+
def configure(env):
pass
+
def get_doc_classes():
return [
"WebRTCPeerConnection",
"WebRTCDataChannel",
- "WebRTCMultiplayer"
+ "WebRTCMultiplayer",
]
+
def get_doc_path():
return "doc_classes"
diff --git a/modules/websocket/SCsub b/modules/websocket/SCsub
index 033169411f0..af600558555 100644
--- a/modules/websocket/SCsub
+++ b/modules/websocket/SCsub
@@ -1,13 +1,13 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
# Thirdparty source files
env_ws = env_modules.Clone()
-if env['builtin_wslay'] and not env["platform"] == "javascript": # already builtin for javascript
+if env["builtin_wslay"] and not env["platform"] == "javascript": # already builtin for javascript
wslay_dir = "#thirdparty/wslay/"
wslay_sources = [
"wslay_net.c",
diff --git a/modules/websocket/config.py b/modules/websocket/config.py
index f59ef432b4e..9e27a1a0fed 100644
--- a/modules/websocket/config.py
+++ b/modules/websocket/config.py
@@ -1,16 +1,19 @@
def can_build(env, platform):
return True
+
def configure(env):
pass
+
def get_doc_classes():
return [
"WebSocketClient",
"WebSocketMultiplayerPeer",
"WebSocketPeer",
- "WebSocketServer"
+ "WebSocketServer",
]
+
def get_doc_path():
return "doc_classes"
diff --git a/modules/xatlas_unwrap/SCsub b/modules/xatlas_unwrap/SCsub
index b242fd46732..c659349d05f 100644
--- a/modules/xatlas_unwrap/SCsub
+++ b/modules/xatlas_unwrap/SCsub
@@ -1,12 +1,12 @@
#!/usr/bin/env python
-Import('env')
-Import('env_modules')
+Import("env")
+Import("env_modules")
env_xatlas_unwrap = env_modules.Clone()
# Thirdparty source files
-if env['builtin_xatlas']:
+if env["builtin_xatlas"]:
thirdparty_dir = "#thirdparty/xatlas/"
thirdparty_sources = [
"xatlas.cpp",
diff --git a/modules/xatlas_unwrap/config.py b/modules/xatlas_unwrap/config.py
index bd092bdc161..2e73c516262 100644
--- a/modules/xatlas_unwrap/config.py
+++ b/modules/xatlas_unwrap/config.py
@@ -1,5 +1,6 @@
def can_build(env, platform):
- return (env['tools'] and platform not in ["android", "ios"])
+ return env["tools"] and platform not in ["android", "ios"]
+
def configure(env):
pass
diff --git a/platform/SCsub b/platform/SCsub
index 40cacce6744..5194a195187 100644
--- a/platform/SCsub
+++ b/platform/SCsub
@@ -1,30 +1,30 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
env.platform_sources = []
# Register platform-exclusive APIs
reg_apis_inc = '#include "register_platform_apis.h"\n'
-reg_apis = 'void register_platform_apis() {\n'
-unreg_apis = 'void unregister_platform_apis() {\n'
+reg_apis = "void register_platform_apis() {\n"
+unreg_apis = "void unregister_platform_apis() {\n"
for platform in env.platform_apis:
platform_dir = env.Dir(platform)
- env.add_source_files(env.platform_sources, platform + '/api/api.cpp')
- reg_apis += '\tregister_' + platform + '_api();\n'
- unreg_apis += '\tunregister_' + platform + '_api();\n'
+ env.add_source_files(env.platform_sources, platform + "/api/api.cpp")
+ reg_apis += "\tregister_" + platform + "_api();\n"
+ unreg_apis += "\tunregister_" + platform + "_api();\n"
reg_apis_inc += '#include "' + platform + '/api/api.h"\n'
-reg_apis_inc += '\n'
-reg_apis += '}\n\n'
-unreg_apis += '}\n'
+reg_apis_inc += "\n"
+reg_apis += "}\n\n"
+unreg_apis += "}\n"
# NOTE: It is safe to generate this file here, since this is still execute serially
-with open('register_platform_apis.gen.cpp', 'w', encoding="utf-8") as f:
+with open("register_platform_apis.gen.cpp", "w", encoding="utf-8") as f:
f.write(reg_apis_inc)
f.write(reg_apis)
f.write(unreg_apis)
-env.add_source_files(env.platform_sources, 'register_platform_apis.gen.cpp')
+env.add_source_files(env.platform_sources, "register_platform_apis.gen.cpp")
-lib = env.add_library('platform', env.platform_sources)
+lib = env.add_library("platform", env.platform_sources)
env.Prepend(LIBS=[lib])
diff --git a/platform/android/SCsub b/platform/android/SCsub
index 5ca5ce548c5..f39eb8b8890 100644
--- a/platform/android/SCsub
+++ b/platform/android/SCsub
@@ -1,24 +1,24 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
android_files = [
- 'os_android.cpp',
- 'file_access_android.cpp',
- 'audio_driver_opensl.cpp',
- 'file_access_jandroid.cpp',
- 'dir_access_jandroid.cpp',
- 'thread_jandroid.cpp',
- 'net_socket_android.cpp',
- 'audio_driver_jandroid.cpp',
- 'java_godot_lib_jni.cpp',
- 'java_class_wrapper.cpp',
- 'java_godot_wrapper.cpp',
- 'java_godot_io_wrapper.cpp',
- 'jni_utils.cpp',
- 'android_keys_utils.cpp',
- 'vulkan/vk_renderer_jni.cpp',
- 'plugin/godot_plugin_jni.cpp'
+ "os_android.cpp",
+ "file_access_android.cpp",
+ "audio_driver_opensl.cpp",
+ "file_access_jandroid.cpp",
+ "dir_access_jandroid.cpp",
+ "thread_jandroid.cpp",
+ "net_socket_android.cpp",
+ "audio_driver_jandroid.cpp",
+ "java_godot_lib_jni.cpp",
+ "java_class_wrapper.cpp",
+ "java_godot_wrapper.cpp",
+ "java_godot_io_wrapper.cpp",
+ "jni_utils.cpp",
+ "android_keys_utils.cpp",
+ "vulkan/vk_renderer_jni.cpp",
+ "plugin/godot_plugin_jni.cpp",
]
env_android = env.Clone()
@@ -29,30 +29,34 @@ for x in android_files:
env_thirdparty = env_android.Clone()
env_thirdparty.disable_warnings()
-android_objects.append(env_thirdparty.SharedObject('#thirdparty/misc/ifaddrs-android.cc'))
+android_objects.append(env_thirdparty.SharedObject("#thirdparty/misc/ifaddrs-android.cc"))
lib = env_android.add_shared_library("#bin/libgodot", [android_objects], SHLIBSUFFIX=env["SHLIBSUFFIX"])
-lib_arch_dir = ''
-if env['android_arch'] == 'armv7':
- lib_arch_dir = 'armeabi-v7a'
-elif env['android_arch'] == 'arm64v8':
- lib_arch_dir = 'arm64-v8a'
-elif env['android_arch'] == 'x86':
- lib_arch_dir = 'x86'
-elif env['android_arch'] == 'x86_64':
- lib_arch_dir = 'x86_64'
+lib_arch_dir = ""
+if env["android_arch"] == "armv7":
+ lib_arch_dir = "armeabi-v7a"
+elif env["android_arch"] == "arm64v8":
+ lib_arch_dir = "arm64-v8a"
+elif env["android_arch"] == "x86":
+ lib_arch_dir = "x86"
+elif env["android_arch"] == "x86_64":
+ lib_arch_dir = "x86_64"
else:
- print('WARN: Architecture not suitable for embedding into APK; keeping .so at \\bin')
+ print("WARN: Architecture not suitable for embedding into APK; keeping .so at \\bin")
-if lib_arch_dir != '':
- if env['target'] == 'release':
- lib_type_dir = 'release'
+if lib_arch_dir != "":
+ if env["target"] == "release":
+ lib_type_dir = "release"
else: # release_debug, debug
- lib_type_dir = 'debug'
+ lib_type_dir = "debug"
- out_dir = '#platform/android/java/lib/libs/' + lib_type_dir + '/' + lib_arch_dir
- env_android.Command(out_dir + '/libgodot_android.so', '#bin/libgodot' + env['SHLIBSUFFIX'], Move("$TARGET", "$SOURCE"))
+ out_dir = "#platform/android/java/lib/libs/" + lib_type_dir + "/" + lib_arch_dir
+ env_android.Command(
+ out_dir + "/libgodot_android.so", "#bin/libgodot" + env["SHLIBSUFFIX"], Move("$TARGET", "$SOURCE")
+ )
- stl_lib_path = str(env['ANDROID_NDK_ROOT']) + '/sources/cxx-stl/llvm-libc++/libs/' + lib_arch_dir + '/libc++_shared.so'
- env_android.Command(out_dir + '/libc++_shared.so', stl_lib_path, Copy("$TARGET", "$SOURCE"))
+ stl_lib_path = (
+ str(env["ANDROID_NDK_ROOT"]) + "/sources/cxx-stl/llvm-libc++/libs/" + lib_arch_dir + "/libc++_shared.so"
+ )
+ env_android.Command(out_dir + "/libc++_shared.so", stl_lib_path, Copy("$TARGET", "$SOURCE"))
diff --git a/platform/android/detect.py b/platform/android/detect.py
index ff3ca0706c9..ed0643e3b3e 100644
--- a/platform/android/detect.py
+++ b/platform/android/detect.py
@@ -13,7 +13,7 @@ def get_name():
def can_build():
- return ("ANDROID_NDK_ROOT" in os.environ)
+ return "ANDROID_NDK_ROOT" in os.environ
def get_platform(platform):
@@ -24,33 +24,33 @@ def get_opts():
from SCons.Variables import BoolVariable, EnumVariable
return [
- ('ANDROID_NDK_ROOT', 'Path to the Android NDK', os.environ.get("ANDROID_NDK_ROOT", 0)),
- ('ndk_platform', 'Target platform (android-, e.g. "android-18")', "android-18"),
- EnumVariable('android_arch', 'Target architecture', "armv7", ('armv7', 'arm64v8', 'x86', 'x86_64')),
- BoolVariable('android_neon', 'Enable NEON support (armv7 only)', True),
+ ("ANDROID_NDK_ROOT", "Path to the Android NDK", os.environ.get("ANDROID_NDK_ROOT", 0)),
+ ("ndk_platform", 'Target platform (android-, e.g. "android-18")', "android-18"),
+ EnumVariable("android_arch", "Target architecture", "armv7", ("armv7", "arm64v8", "x86", "x86_64")),
+ BoolVariable("android_neon", "Enable NEON support (armv7 only)", True),
]
def get_flags():
return [
- ('tools', False),
+ ("tools", False),
]
def create(env):
- tools = env['TOOLS']
+ tools = env["TOOLS"]
if "mingw" in tools:
- tools.remove('mingw')
+ tools.remove("mingw")
if "applelink" in tools:
tools.remove("applelink")
- env.Tool('gcc')
+ env.Tool("gcc")
return env.Clone(tools=tools)
def configure(env):
# Workaround for MinGW. See:
# http://www.scons.org/wiki/LongCmdLinesOnWin32
- if (os.name == "nt"):
+ if os.name == "nt":
import subprocess
@@ -58,8 +58,15 @@ def configure(env):
# print("SPAWNED : " + cmdline)
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
- proc = subprocess.Popen(cmdline, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
- stderr=subprocess.PIPE, startupinfo=startupinfo, shell=False, env=env)
+ proc = subprocess.Popen(
+ cmdline,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ startupinfo=startupinfo,
+ shell=False,
+ env=env,
+ )
data, err = proc.communicate()
rv = proc.wait()
if rv:
@@ -70,7 +77,7 @@ def configure(env):
def mySpawn(sh, escape, cmd, args, env):
- newargs = ' '.join(args[1:])
+ newargs = " ".join(args[1:])
cmdline = cmd + " " + newargs
rv = 0
@@ -85,50 +92,54 @@ def configure(env):
return rv
- env['SPAWN'] = mySpawn
+ env["SPAWN"] = mySpawn
# Architecture
- if env['android_arch'] not in ['armv7', 'arm64v8', 'x86', 'x86_64']:
- env['android_arch'] = 'armv7'
+ if env["android_arch"] not in ["armv7", "arm64v8", "x86", "x86_64"]:
+ env["android_arch"] = "armv7"
neon_text = ""
- if env["android_arch"] == "armv7" and env['android_neon']:
+ if env["android_arch"] == "armv7" and env["android_neon"]:
neon_text = " (with NEON)"
- print("Building for Android (" + env['android_arch'] + ")" + neon_text)
+ print("Building for Android (" + env["android_arch"] + ")" + neon_text)
can_vectorize = True
- if env['android_arch'] == 'x86':
- env['ARCH'] = 'arch-x86'
+ if env["android_arch"] == "x86":
+ env["ARCH"] = "arch-x86"
env.extra_suffix = ".x86" + env.extra_suffix
target_subpath = "x86-4.9"
abi_subpath = "i686-linux-android"
arch_subpath = "x86"
env["x86_libtheora_opt_gcc"] = True
- if env['android_arch'] == 'x86_64':
+ if env["android_arch"] == "x86_64":
if get_platform(env["ndk_platform"]) < 21:
- print("WARNING: android_arch=x86_64 is not supported by ndk_platform lower than android-21; setting ndk_platform=android-21")
+ print(
+ "WARNING: android_arch=x86_64 is not supported by ndk_platform lower than android-21; setting ndk_platform=android-21"
+ )
env["ndk_platform"] = "android-21"
- env['ARCH'] = 'arch-x86_64'
+ env["ARCH"] = "arch-x86_64"
env.extra_suffix = ".x86_64" + env.extra_suffix
target_subpath = "x86_64-4.9"
abi_subpath = "x86_64-linux-android"
arch_subpath = "x86_64"
env["x86_libtheora_opt_gcc"] = True
elif env["android_arch"] == "armv7":
- env['ARCH'] = 'arch-arm'
+ env["ARCH"] = "arch-arm"
target_subpath = "arm-linux-androideabi-4.9"
abi_subpath = "arm-linux-androideabi"
arch_subpath = "armeabi-v7a"
- if env['android_neon']:
+ if env["android_neon"]:
env.extra_suffix = ".armv7.neon" + env.extra_suffix
else:
env.extra_suffix = ".armv7" + env.extra_suffix
elif env["android_arch"] == "arm64v8":
if get_platform(env["ndk_platform"]) < 21:
- print("WARNING: android_arch=arm64v8 is not supported by ndk_platform lower than android-21; setting ndk_platform=android-21")
+ print(
+ "WARNING: android_arch=arm64v8 is not supported by ndk_platform lower than android-21; setting ndk_platform=android-21"
+ )
env["ndk_platform"] = "android-21"
- env['ARCH'] = 'arch-arm64'
+ env["ARCH"] = "arch-arm64"
target_subpath = "aarch64-linux-android-4.9"
abi_subpath = "aarch64-linux-android"
arch_subpath = "arm64-v8a"
@@ -136,40 +147,40 @@ def configure(env):
# Build type
- if (env["target"].startswith("release")):
- if (env["optimize"] == "speed"): # optimize for speed (default)
- env.Append(LINKFLAGS=['-O2'])
- env.Append(CCFLAGS=['-O2', '-fomit-frame-pointer'])
- env.Append(CPPDEFINES=['NDEBUG'])
+ if env["target"].startswith("release"):
+ if env["optimize"] == "speed": # optimize for speed (default)
+ env.Append(LINKFLAGS=["-O2"])
+ env.Append(CCFLAGS=["-O2", "-fomit-frame-pointer"])
+ env.Append(CPPDEFINES=["NDEBUG"])
else: # optimize for size
- env.Append(CCFLAGS=['-Os'])
- env.Append(CPPDEFINES=['NDEBUG'])
- env.Append(LINKFLAGS=['-Os'])
+ env.Append(CCFLAGS=["-Os"])
+ env.Append(CPPDEFINES=["NDEBUG"])
+ env.Append(LINKFLAGS=["-Os"])
- if (can_vectorize):
- env.Append(CCFLAGS=['-ftree-vectorize'])
- if (env["target"] == "release_debug"):
- env.Append(CPPDEFINES=['DEBUG_ENABLED'])
- elif (env["target"] == "debug"):
- env.Append(LINKFLAGS=['-O0'])
- env.Append(CCFLAGS=['-O0', '-g', '-fno-limit-debug-info'])
- env.Append(CPPDEFINES=['_DEBUG', 'DEBUG_ENABLED', 'DEBUG_MEMORY_ENABLED'])
- env.Append(CPPFLAGS=['-UNDEBUG'])
+ if can_vectorize:
+ env.Append(CCFLAGS=["-ftree-vectorize"])
+ if env["target"] == "release_debug":
+ env.Append(CPPDEFINES=["DEBUG_ENABLED"])
+ elif env["target"] == "debug":
+ env.Append(LINKFLAGS=["-O0"])
+ env.Append(CCFLAGS=["-O0", "-g", "-fno-limit-debug-info"])
+ env.Append(CPPDEFINES=["_DEBUG", "DEBUG_ENABLED", "DEBUG_MEMORY_ENABLED"])
+ env.Append(CPPFLAGS=["-UNDEBUG"])
# Compiler configuration
- env['SHLIBSUFFIX'] = '.so'
+ env["SHLIBSUFFIX"] = ".so"
- if env['PLATFORM'] == 'win32':
- env.Tool('gcc')
+ if env["PLATFORM"] == "win32":
+ env.Tool("gcc")
env.use_windows_spawn_fix()
- if (sys.platform.startswith("linux")):
+ if sys.platform.startswith("linux"):
host_subpath = "linux-x86_64"
- elif (sys.platform.startswith("darwin")):
+ elif sys.platform.startswith("darwin"):
host_subpath = "darwin-x86_64"
- elif (sys.platform.startswith('win')):
- if (platform.machine().endswith('64')):
+ elif sys.platform.startswith("win"):
+ if platform.machine().endswith("64"):
host_subpath = "windows-x86_64"
else:
host_subpath = "windows"
@@ -179,22 +190,22 @@ def configure(env):
tools_path = gcc_toolchain_path + "/" + abi_subpath + "/bin"
# For Clang to find NDK tools in preference of those system-wide
- env.PrependENVPath('PATH', tools_path)
+ env.PrependENVPath("PATH", tools_path)
ccache_path = os.environ.get("CCACHE")
if ccache_path is None:
- env['CC'] = compiler_path + '/clang'
- env['CXX'] = compiler_path + '/clang++'
+ env["CC"] = compiler_path + "/clang"
+ env["CXX"] = compiler_path + "/clang++"
else:
# there aren't any ccache wrappers available for Android,
# to enable caching we need to prepend the path to the ccache binary
- env['CC'] = ccache_path + ' ' + compiler_path + '/clang'
- env['CXX'] = ccache_path + ' ' + compiler_path + '/clang++'
- env['AR'] = tools_path + "/ar"
- env['RANLIB'] = tools_path + "/ranlib"
- env['AS'] = tools_path + "/as"
+ env["CC"] = ccache_path + " " + compiler_path + "/clang"
+ env["CXX"] = ccache_path + " " + compiler_path + "/clang++"
+ env["AR"] = tools_path + "/ar"
+ env["RANLIB"] = tools_path + "/ranlib"
+ env["AS"] = tools_path + "/as"
- common_opts = ['-fno-integrated-as', '-gcc-toolchain', gcc_toolchain_path]
+ common_opts = ["-fno-integrated-as", "-gcc-toolchain", gcc_toolchain_path]
# Compile flags
@@ -202,14 +213,14 @@ def configure(env):
env.Append(CPPFLAGS=["-isystem", env["ANDROID_NDK_ROOT"] + "/sources/cxx-stl/llvm-libc++abi/include"])
# Disable exceptions and rtti on non-tools (template) builds
- if env['tools']:
- env.Append(CXXFLAGS=['-frtti'])
+ if env["tools"]:
+ env.Append(CXXFLAGS=["-frtti"])
else:
- env.Append(CXXFLAGS=['-fno-rtti', '-fno-exceptions'])
+ env.Append(CXXFLAGS=["-fno-rtti", "-fno-exceptions"])
# Don't use dynamic_cast, necessary with no-rtti.
- env.Append(CPPDEFINES=['NO_SAFE_CAST'])
+ env.Append(CPPDEFINES=["NO_SAFE_CAST"])
- lib_sysroot = env["ANDROID_NDK_ROOT"] + "/platforms/" + env['ndk_platform'] + "/" + env['ARCH']
+ lib_sysroot = env["ANDROID_NDK_ROOT"] + "/platforms/" + env["ndk_platform"] + "/" + env["ARCH"]
# Using NDK unified headers (NDK r15+)
sysroot = env["ANDROID_NDK_ROOT"] + "/sysroot"
@@ -217,35 +228,37 @@ def configure(env):
env.Append(CPPFLAGS=["-isystem", sysroot + "/usr/include/" + abi_subpath])
env.Append(CPPFLAGS=["-isystem", env["ANDROID_NDK_ROOT"] + "/sources/android/support/include"])
# For unified headers this define has to be set manually
- env.Append(CPPDEFINES=[('__ANDROID_API__', str(get_platform(env['ndk_platform'])))])
+ env.Append(CPPDEFINES=[("__ANDROID_API__", str(get_platform(env["ndk_platform"])))])
- env.Append(CCFLAGS='-fpic -ffunction-sections -funwind-tables -fstack-protector-strong -fvisibility=hidden -fno-strict-aliasing'.split())
- env.Append(CPPDEFINES=['NO_STATVFS', 'GLES_ENABLED'])
+ env.Append(
+ CCFLAGS="-fpic -ffunction-sections -funwind-tables -fstack-protector-strong -fvisibility=hidden -fno-strict-aliasing".split()
+ )
+ env.Append(CPPDEFINES=["NO_STATVFS", "GLES_ENABLED"])
- env['neon_enabled'] = False
- if env['android_arch'] == 'x86':
- target_opts = ['-target', 'i686-none-linux-android']
+ env["neon_enabled"] = False
+ if env["android_arch"] == "x86":
+ target_opts = ["-target", "i686-none-linux-android"]
# The NDK adds this if targeting API < 21, so we can drop it when Godot targets it at least
- env.Append(CCFLAGS=['-mstackrealign'])
+ env.Append(CCFLAGS=["-mstackrealign"])
- elif env['android_arch'] == 'x86_64':
- target_opts = ['-target', 'x86_64-none-linux-android']
+ elif env["android_arch"] == "x86_64":
+ target_opts = ["-target", "x86_64-none-linux-android"]
elif env["android_arch"] == "armv7":
- target_opts = ['-target', 'armv7-none-linux-androideabi']
- env.Append(CCFLAGS='-march=armv7-a -mfloat-abi=softfp'.split())
- env.Append(CPPDEFINES=['__ARM_ARCH_7__', '__ARM_ARCH_7A__'])
- if env['android_neon']:
- env['neon_enabled'] = True
- env.Append(CCFLAGS=['-mfpu=neon'])
- env.Append(CPPDEFINES=['__ARM_NEON__'])
+ target_opts = ["-target", "armv7-none-linux-androideabi"]
+ env.Append(CCFLAGS="-march=armv7-a -mfloat-abi=softfp".split())
+ env.Append(CPPDEFINES=["__ARM_ARCH_7__", "__ARM_ARCH_7A__"])
+ if env["android_neon"]:
+ env["neon_enabled"] = True
+ env.Append(CCFLAGS=["-mfpu=neon"])
+ env.Append(CPPDEFINES=["__ARM_NEON__"])
else:
- env.Append(CCFLAGS=['-mfpu=vfpv3-d16'])
+ env.Append(CCFLAGS=["-mfpu=vfpv3-d16"])
elif env["android_arch"] == "arm64v8":
- target_opts = ['-target', 'aarch64-none-linux-android']
- env.Append(CCFLAGS=['-mfix-cortex-a53-835769'])
- env.Append(CPPDEFINES=['__ARM_ARCH_8A__'])
+ target_opts = ["-target", "aarch64-none-linux-android"]
+ env.Append(CCFLAGS=["-mfix-cortex-a53-835769"])
+ env.Append(CPPDEFINES=["__ARM_ARCH_8A__"])
env.Append(CCFLAGS=target_opts)
env.Append(CCFLAGS=common_opts)
@@ -254,29 +267,55 @@ def configure(env):
ndk_version = get_ndk_version(env["ANDROID_NDK_ROOT"])
if ndk_version != None and LooseVersion(ndk_version) >= LooseVersion("17.1.4828580"):
- env.Append(LINKFLAGS=['-Wl,--exclude-libs,libgcc.a', '-Wl,--exclude-libs,libatomic.a', '-nostdlib++'])
+ env.Append(LINKFLAGS=["-Wl,--exclude-libs,libgcc.a", "-Wl,--exclude-libs,libatomic.a", "-nostdlib++"])
else:
- env.Append(LINKFLAGS=[env["ANDROID_NDK_ROOT"] + "/sources/cxx-stl/llvm-libc++/libs/" + arch_subpath + "/libandroid_support.a"])
- env.Append(LINKFLAGS=['-shared', '--sysroot=' + lib_sysroot, '-Wl,--warn-shared-textrel'])
+ env.Append(
+ LINKFLAGS=[
+ env["ANDROID_NDK_ROOT"] + "/sources/cxx-stl/llvm-libc++/libs/" + arch_subpath + "/libandroid_support.a"
+ ]
+ )
+ env.Append(LINKFLAGS=["-shared", "--sysroot=" + lib_sysroot, "-Wl,--warn-shared-textrel"])
env.Append(LIBPATH=[env["ANDROID_NDK_ROOT"] + "/sources/cxx-stl/llvm-libc++/libs/" + arch_subpath + "/"])
- env.Append(LINKFLAGS=[env["ANDROID_NDK_ROOT"] + "/sources/cxx-stl/llvm-libc++/libs/" + arch_subpath + "/libc++_shared.so"])
+ env.Append(
+ LINKFLAGS=[env["ANDROID_NDK_ROOT"] + "/sources/cxx-stl/llvm-libc++/libs/" + arch_subpath + "/libc++_shared.so"]
+ )
if env["android_arch"] == "armv7":
- env.Append(LINKFLAGS='-Wl,--fix-cortex-a8'.split())
- env.Append(LINKFLAGS='-Wl,--no-undefined -Wl,-z,noexecstack -Wl,-z,relro -Wl,-z,now'.split())
- env.Append(LINKFLAGS='-Wl,-soname,libgodot_android.so -Wl,--gc-sections'.split())
+ env.Append(LINKFLAGS="-Wl,--fix-cortex-a8".split())
+ env.Append(LINKFLAGS="-Wl,--no-undefined -Wl,-z,noexecstack -Wl,-z,relro -Wl,-z,now".split())
+ env.Append(LINKFLAGS="-Wl,-soname,libgodot_android.so -Wl,--gc-sections".split())
env.Append(LINKFLAGS=target_opts)
env.Append(LINKFLAGS=common_opts)
- env.Append(LIBPATH=[env["ANDROID_NDK_ROOT"] + '/toolchains/' + target_subpath + '/prebuilt/' +
- host_subpath + '/lib/gcc/' + abi_subpath + '/4.9.x'])
- env.Append(LIBPATH=[env["ANDROID_NDK_ROOT"] +
- '/toolchains/' + target_subpath + '/prebuilt/' + host_subpath + '/' + abi_subpath + '/lib'])
+ env.Append(
+ LIBPATH=[
+ env["ANDROID_NDK_ROOT"]
+ + "/toolchains/"
+ + target_subpath
+ + "/prebuilt/"
+ + host_subpath
+ + "/lib/gcc/"
+ + abi_subpath
+ + "/4.9.x"
+ ]
+ )
+ env.Append(
+ LIBPATH=[
+ env["ANDROID_NDK_ROOT"]
+ + "/toolchains/"
+ + target_subpath
+ + "/prebuilt/"
+ + host_subpath
+ + "/"
+ + abi_subpath
+ + "/lib"
+ ]
+ )
- env.Prepend(CPPPATH=['#platform/android'])
- env.Append(CPPDEFINES=['ANDROID_ENABLED', 'UNIX_ENABLED', 'NO_FCNTL'])
- env.Append(LIBS=['OpenSLES', 'EGL', 'GLESv3', 'GLESv2', 'android', 'log', 'z', 'dl'])
+ env.Prepend(CPPPATH=["#platform/android"])
+ env.Append(CPPDEFINES=["ANDROID_ENABLED", "UNIX_ENABLED", "NO_FCNTL"])
+ env.Append(LIBS=["OpenSLES", "EGL", "GLESv3", "GLESv2", "android", "log", "z", "dl"])
# Return NDK version string in source.properties (adapted from the Chromium project).
diff --git a/platform/haiku/SCsub b/platform/haiku/SCsub
index 592f56bbbfb..dbff6c5ae97 100644
--- a/platform/haiku/SCsub
+++ b/platform/haiku/SCsub
@@ -1,28 +1,25 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
common_haiku = [
- 'os_haiku.cpp',
- 'context_gl_haiku.cpp',
- 'haiku_application.cpp',
- 'haiku_direct_window.cpp',
- 'haiku_gl_view.cpp',
- 'key_mapping_haiku.cpp',
- 'audio_driver_media_kit.cpp'
+ "os_haiku.cpp",
+ "context_gl_haiku.cpp",
+ "haiku_application.cpp",
+ "haiku_direct_window.cpp",
+ "haiku_gl_view.cpp",
+ "key_mapping_haiku.cpp",
+ "audio_driver_media_kit.cpp",
]
-target = env.add_program(
- '#bin/godot',
- ['godot_haiku.cpp'] + common_haiku
-)
+target = env.add_program("#bin/godot", ["godot_haiku.cpp"] + common_haiku)
-command = env.Command('#bin/godot.rsrc', '#platform/haiku/godot.rdef',
- ['rc -o $TARGET $SOURCE'])
+command = env.Command("#bin/godot.rsrc", "#platform/haiku/godot.rdef", ["rc -o $TARGET $SOURCE"])
def addResourcesAction(target=None, source=None, env=None):
- return env.Execute('xres -o ' + File(target)[0].path + ' bin/godot.rsrc')
+ return env.Execute("xres -o " + File(target)[0].path + " bin/godot.rsrc")
+
env.AddPostAction(target, addResourcesAction)
env.Depends(target, command)
diff --git a/platform/haiku/detect.py b/platform/haiku/detect.py
index dd722948161..0b84df8f9bd 100644
--- a/platform/haiku/detect.py
+++ b/platform/haiku/detect.py
@@ -12,7 +12,7 @@ def get_name():
def can_build():
- if (os.name != "posix" or sys.platform == "darwin"):
+ if os.name != "posix" or sys.platform == "darwin":
return False
return True
@@ -22,41 +22,40 @@ def get_opts():
from SCons.Variables import EnumVariable
return [
- EnumVariable('debug_symbols', 'Add debugging symbols to release builds', 'yes', ('yes', 'no', 'full')),
+ EnumVariable("debug_symbols", "Add debugging symbols to release builds", "yes", ("yes", "no", "full")),
]
def get_flags():
- return [
- ]
+ return []
def configure(env):
## Build type
- if (env["target"] == "release"):
- env.Prepend(CCFLAGS=['-O3'])
- if (env["debug_symbols"] == "yes"):
- env.Prepend(CCFLAGS=['-g1'])
- if (env["debug_symbols"] == "full"):
- env.Prepend(CCFLAGS=['-g2'])
+ if env["target"] == "release":
+ env.Prepend(CCFLAGS=["-O3"])
+ if env["debug_symbols"] == "yes":
+ env.Prepend(CCFLAGS=["-g1"])
+ if env["debug_symbols"] == "full":
+ env.Prepend(CCFLAGS=["-g2"])
- elif (env["target"] == "release_debug"):
- env.Prepend(CCFLAGS=['-O2', '-DDEBUG_ENABLED'])
- if (env["debug_symbols"] == "yes"):
- env.Prepend(CCFLAGS=['-g1'])
- if (env["debug_symbols"] == "full"):
- env.Prepend(CCFLAGS=['-g2'])
+ elif env["target"] == "release_debug":
+ env.Prepend(CCFLAGS=["-O2", "-DDEBUG_ENABLED"])
+ if env["debug_symbols"] == "yes":
+ env.Prepend(CCFLAGS=["-g1"])
+ if env["debug_symbols"] == "full":
+ env.Prepend(CCFLAGS=["-g2"])
- elif (env["target"] == "debug"):
- env.Prepend(CCFLAGS=['-g3', '-DDEBUG_ENABLED', '-DDEBUG_MEMORY_ENABLED'])
+ elif env["target"] == "debug":
+ env.Prepend(CCFLAGS=["-g3", "-DDEBUG_ENABLED", "-DDEBUG_MEMORY_ENABLED"])
## Architecture
- is64 = sys.maxsize > 2**32
- if (env["bits"] == "default"):
+ is64 = sys.maxsize > 2 ** 32
+ if env["bits"] == "default":
env["bits"] = "64" if is64 else "32"
## Compiler configuration
@@ -66,89 +65,94 @@ def configure(env):
## Dependencies
- if not env['builtin_libwebp']:
- env.ParseConfig('pkg-config libwebp --cflags --libs')
+ if not env["builtin_libwebp"]:
+ env.ParseConfig("pkg-config libwebp --cflags --libs")
# freetype depends on libpng and zlib, so bundling one of them while keeping others
# as shared libraries leads to weird issues
- if env['builtin_freetype'] or env['builtin_libpng'] or env['builtin_zlib']:
- env['builtin_freetype'] = True
- env['builtin_libpng'] = True
- env['builtin_zlib'] = True
+ if env["builtin_freetype"] or env["builtin_libpng"] or env["builtin_zlib"]:
+ env["builtin_freetype"] = True
+ env["builtin_libpng"] = True
+ env["builtin_zlib"] = True
- if not env['builtin_freetype']:
- env.ParseConfig('pkg-config freetype2 --cflags --libs')
+ if not env["builtin_freetype"]:
+ env.ParseConfig("pkg-config freetype2 --cflags --libs")
- if not env['builtin_libpng']:
- env.ParseConfig('pkg-config libpng16 --cflags --libs')
+ if not env["builtin_libpng"]:
+ env.ParseConfig("pkg-config libpng16 --cflags --libs")
- if not env['builtin_bullet']:
+ if not env["builtin_bullet"]:
# We need at least version 2.88
import subprocess
- bullet_version = subprocess.check_output(['pkg-config', 'bullet', '--modversion']).strip()
+
+ bullet_version = subprocess.check_output(["pkg-config", "bullet", "--modversion"]).strip()
if bullet_version < "2.88":
# Abort as system bullet was requested but too old
- print("Bullet: System version {0} does not match minimal requirements ({1}). Aborting.".format(bullet_version, "2.88"))
+ print(
+ "Bullet: System version {0} does not match minimal requirements ({1}). Aborting.".format(
+ bullet_version, "2.88"
+ )
+ )
sys.exit(255)
- env.ParseConfig('pkg-config bullet --cflags --libs')
+ env.ParseConfig("pkg-config bullet --cflags --libs")
- if not env['builtin_enet']:
- env.ParseConfig('pkg-config libenet --cflags --libs')
+ if not env["builtin_enet"]:
+ env.ParseConfig("pkg-config libenet --cflags --libs")
- if not env['builtin_squish']:
- env.ParseConfig('pkg-config libsquish --cflags --libs')
+ if not env["builtin_squish"]:
+ env.ParseConfig("pkg-config libsquish --cflags --libs")
- if not env['builtin_zstd']:
- env.ParseConfig('pkg-config libzstd --cflags --libs')
+ if not env["builtin_zstd"]:
+ env.ParseConfig("pkg-config libzstd --cflags --libs")
# Sound and video libraries
# Keep the order as it triggers chained dependencies (ogg needed by others, etc.)
- if not env['builtin_libtheora']:
- env['builtin_libogg'] = False # Needed to link against system libtheora
- env['builtin_libvorbis'] = False # Needed to link against system libtheora
- env.ParseConfig('pkg-config theora theoradec --cflags --libs')
+ if not env["builtin_libtheora"]:
+ env["builtin_libogg"] = False # Needed to link against system libtheora
+ env["builtin_libvorbis"] = False # Needed to link against system libtheora
+ env.ParseConfig("pkg-config theora theoradec --cflags --libs")
- if not env['builtin_libvpx']:
- env.ParseConfig('pkg-config vpx --cflags --libs')
+ if not env["builtin_libvpx"]:
+ env.ParseConfig("pkg-config vpx --cflags --libs")
- if not env['builtin_libvorbis']:
- env['builtin_libogg'] = False # Needed to link against system libvorbis
- env.ParseConfig('pkg-config vorbis vorbisfile --cflags --libs')
+ if not env["builtin_libvorbis"]:
+ env["builtin_libogg"] = False # Needed to link against system libvorbis
+ env.ParseConfig("pkg-config vorbis vorbisfile --cflags --libs")
- if not env['builtin_opus']:
- env['builtin_libogg'] = False # Needed to link against system opus
- env.ParseConfig('pkg-config opus opusfile --cflags --libs')
+ if not env["builtin_opus"]:
+ env["builtin_libogg"] = False # Needed to link against system opus
+ env.ParseConfig("pkg-config opus opusfile --cflags --libs")
- if not env['builtin_libogg']:
- env.ParseConfig('pkg-config ogg --cflags --libs')
+ if not env["builtin_libogg"]:
+ env.ParseConfig("pkg-config ogg --cflags --libs")
- if env['builtin_libtheora']:
- list_of_x86 = ['x86_64', 'x86', 'i386', 'i586']
+ if env["builtin_libtheora"]:
+ list_of_x86 = ["x86_64", "x86", "i386", "i586"]
if any(platform.machine() in s for s in list_of_x86):
env["x86_libtheora_opt_gcc"] = True
- if not env['builtin_wslay']:
- env.ParseConfig('pkg-config libwslay --cflags --libs')
+ if not env["builtin_wslay"]:
+ env.ParseConfig("pkg-config libwslay --cflags --libs")
- if not env['builtin_mbedtls']:
+ if not env["builtin_mbedtls"]:
# mbedTLS does not provide a pkgconfig config yet. See https://github.com/ARMmbed/mbedtls/issues/228
- env.Append(LIBS=['mbedtls', 'mbedcrypto', 'mbedx509'])
+ env.Append(LIBS=["mbedtls", "mbedcrypto", "mbedx509"])
- if not env['builtin_miniupnpc']:
+ if not env["builtin_miniupnpc"]:
# No pkgconfig file so far, hardcode default paths.
env.Prepend(CPPPATH=["/system/develop/headers/x86/miniupnpc"])
env.Append(LIBS=["miniupnpc"])
# On Linux wchar_t should be 32-bits
# 16-bit library shouldn't be required due to compiler optimisations
- if not env['builtin_pcre2']:
- env.ParseConfig('pkg-config libpcre2-32 --cflags --libs')
+ if not env["builtin_pcre2"]:
+ env.ParseConfig("pkg-config libpcre2-32 --cflags --libs")
## Flags
- env.Prepend(CPPPATH=['#platform/haiku'])
- env.Append(CPPDEFINES=['UNIX_ENABLED', 'OPENGL_ENABLED', 'GLES_ENABLED'])
- env.Append(CPPDEFINES=['MEDIA_KIT_ENABLED'])
- env.Append(CPPDEFINES=['PTHREAD_NO_RENAME']) # TODO: enable when we have pthread_setname_np
- env.Append(LIBS=['be', 'game', 'media', 'network', 'bnetapi', 'z', 'GL'])
+ env.Prepend(CPPPATH=["#platform/haiku"])
+ env.Append(CPPDEFINES=["UNIX_ENABLED", "OPENGL_ENABLED", "GLES_ENABLED"])
+ env.Append(CPPDEFINES=["MEDIA_KIT_ENABLED"])
+ env.Append(CPPDEFINES=["PTHREAD_NO_RENAME"]) # TODO: enable when we have pthread_setname_np
+ env.Append(LIBS=["be", "game", "media", "network", "bnetapi", "z", "GL"])
diff --git a/platform/iphone/SCsub b/platform/iphone/SCsub
index 1f82f518882..a48629f7206 100644
--- a/platform/iphone/SCsub
+++ b/platform/iphone/SCsub
@@ -1,31 +1,35 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
iphone_lib = [
- 'godot_iphone.cpp',
- 'os_iphone.cpp',
- 'semaphore_iphone.cpp',
- 'gl_view.mm',
- 'main.m',
- 'app_delegate.mm',
- 'view_controller.mm',
- 'game_center.mm',
- 'in_app_store.mm',
- 'icloud.mm',
- 'ios.mm',
- 'vulkan_context_iphone.mm',
+ "godot_iphone.cpp",
+ "os_iphone.cpp",
+ "semaphore_iphone.cpp",
+ "gl_view.mm",
+ "main.m",
+ "app_delegate.mm",
+ "view_controller.mm",
+ "game_center.mm",
+ "in_app_store.mm",
+ "icloud.mm",
+ "ios.mm",
+ "vulkan_context_iphone.mm",
]
env_ios = env.Clone()
-ios_lib = env_ios.add_library('iphone', iphone_lib)
+ios_lib = env_ios.add_library("iphone", iphone_lib)
+
def combine_libs(target=None, source=None, env=None):
lib_path = target[0].srcnode().abspath
if "osxcross" in env:
- libtool = '$IPHONEPATH/usr/bin/${ios_triple}libtool'
+ libtool = "$IPHONEPATH/usr/bin/${ios_triple}libtool"
else:
libtool = "$IPHONEPATH/usr/bin/libtool"
- env.Execute(libtool + ' -static -o "' + lib_path + '" ' + ' '.join([('"' + lib.srcnode().abspath + '"') for lib in source]))
+ env.Execute(
+ libtool + ' -static -o "' + lib_path + '" ' + " ".join([('"' + lib.srcnode().abspath + '"') for lib in source])
+ )
-combine_command = env_ios.Command('#bin/libgodot' + env_ios['LIBSUFFIX'], [ios_lib] + env_ios['LIBS'], combine_libs)
+
+combine_command = env_ios.Command("#bin/libgodot" + env_ios["LIBSUFFIX"], [ios_lib] + env_ios["LIBS"], combine_libs)
diff --git a/platform/iphone/detect.py b/platform/iphone/detect.py
index e01950c1dba..3e6c2f0ecf3 100644
--- a/platform/iphone/detect.py
+++ b/platform/iphone/detect.py
@@ -2,6 +2,7 @@ import os
import sys
from methods import detect_darwin_sdk_path
+
def is_active():
return True
@@ -12,7 +13,7 @@ def get_name():
def can_build():
- if sys.platform == 'darwin' or ("OSXCROSS_IOS" in os.environ):
+ if sys.platform == "darwin" or ("OSXCROSS_IOS" in os.environ):
return True
return False
@@ -20,22 +21,31 @@ def can_build():
def get_opts():
from SCons.Variables import BoolVariable
+
return [
- ('IPHONEPATH', 'Path to iPhone toolchain', '/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain'),
- ('IPHONESDK', 'Path to the iPhone SDK', ''),
- BoolVariable('use_static_mvk', 'Link MoltenVK statically as Level-0 driver (better portability) or use Vulkan ICD loader (enables validation layers)', False),
- BoolVariable('game_center', 'Support for game center', True),
- BoolVariable('store_kit', 'Support for in-app store', True),
- BoolVariable('icloud', 'Support for iCloud', True),
- BoolVariable('ios_exceptions', 'Enable exceptions', False),
- ('ios_triple', 'Triple for ios toolchain', ''),
+ (
+ "IPHONEPATH",
+ "Path to iPhone toolchain",
+ "/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain",
+ ),
+ ("IPHONESDK", "Path to the iPhone SDK", ""),
+ BoolVariable(
+ "use_static_mvk",
+ "Link MoltenVK statically as Level-0 driver (better portability) or use Vulkan ICD loader (enables validation layers)",
+ False,
+ ),
+ BoolVariable("game_center", "Support for game center", True),
+ BoolVariable("store_kit", "Support for in-app store", True),
+ BoolVariable("icloud", "Support for iCloud", True),
+ BoolVariable("ios_exceptions", "Enable exceptions", False),
+ ("ios_triple", "Triple for ios toolchain", ""),
]
def get_flags():
return [
- ('tools', False),
+ ("tools", False),
]
@@ -43,32 +53,32 @@ def configure(env):
## Build type
- if (env["target"].startswith("release")):
- env.Append(CPPDEFINES=['NDEBUG', ('NS_BLOCK_ASSERTIONS', 1)])
- if (env["optimize"] == "speed"): #optimize for speed (default)
- env.Append(CCFLAGS=['-O2', '-ftree-vectorize', '-fomit-frame-pointer'])
- env.Append(LINKFLAGS=['-O2'])
- else: #optimize for size
- env.Append(CCFLAGS=['-Os', '-ftree-vectorize'])
- env.Append(LINKFLAGS=['-Os'])
+ if env["target"].startswith("release"):
+ env.Append(CPPDEFINES=["NDEBUG", ("NS_BLOCK_ASSERTIONS", 1)])
+ if env["optimize"] == "speed": # optimize for speed (default)
+ env.Append(CCFLAGS=["-O2", "-ftree-vectorize", "-fomit-frame-pointer"])
+ env.Append(LINKFLAGS=["-O2"])
+ else: # optimize for size
+ env.Append(CCFLAGS=["-Os", "-ftree-vectorize"])
+ env.Append(LINKFLAGS=["-Os"])
if env["target"] == "release_debug":
- env.Append(CPPDEFINES=['DEBUG_ENABLED'])
+ env.Append(CPPDEFINES=["DEBUG_ENABLED"])
- elif (env["target"] == "debug"):
- env.Append(CCFLAGS=['-gdwarf-2', '-O0'])
- env.Append(CPPDEFINES=['_DEBUG', ('DEBUG', 1), 'DEBUG_ENABLED', 'DEBUG_MEMORY_ENABLED'])
+ elif env["target"] == "debug":
+ env.Append(CCFLAGS=["-gdwarf-2", "-O0"])
+ env.Append(CPPDEFINES=["_DEBUG", ("DEBUG", 1), "DEBUG_ENABLED", "DEBUG_MEMORY_ENABLED"])
- if (env["use_lto"]):
- env.Append(CCFLAGS=['-flto'])
- env.Append(LINKFLAGS=['-flto'])
+ if env["use_lto"]:
+ env.Append(CCFLAGS=["-flto"])
+ env.Append(LINKFLAGS=["-flto"])
## Architecture
if env["arch"] == "x86": # i386
env["bits"] = "32"
elif env["arch"] == "x86_64":
env["bits"] = "64"
- elif (env["arch"] == "arm" or env["arch"] == "arm32" or env["arch"] == "armv7" or env["bits"] == "32"): # arm
+ elif env["arch"] == "arm" or env["arch"] == "arm32" or env["arch"] == "armv7" or env["bits"] == "32": # arm
env["arch"] = "arm"
env["bits"] = "32"
else: # armv64
@@ -81,108 +91,145 @@ def configure(env):
if "OSXCROSS_IOS" in os.environ:
env["osxcross"] = True
- env['ENV']['PATH'] = env['IPHONEPATH'] + "/Developer/usr/bin/:" + env['ENV']['PATH']
+ env["ENV"]["PATH"] = env["IPHONEPATH"] + "/Developer/usr/bin/:" + env["ENV"]["PATH"]
- compiler_path = '$IPHONEPATH/usr/bin/${ios_triple}'
- s_compiler_path = '$IPHONEPATH/Developer/usr/bin/'
+ compiler_path = "$IPHONEPATH/usr/bin/${ios_triple}"
+ s_compiler_path = "$IPHONEPATH/Developer/usr/bin/"
ccache_path = os.environ.get("CCACHE")
if ccache_path is None:
- env['CC'] = compiler_path + 'clang'
- env['CXX'] = compiler_path + 'clang++'
- env['S_compiler'] = s_compiler_path + 'gcc'
+ env["CC"] = compiler_path + "clang"
+ env["CXX"] = compiler_path + "clang++"
+ env["S_compiler"] = s_compiler_path + "gcc"
else:
# there aren't any ccache wrappers available for iOS,
# to enable caching we need to prepend the path to the ccache binary
- env['CC'] = ccache_path + ' ' + compiler_path + 'clang'
- env['CXX'] = ccache_path + ' ' + compiler_path + 'clang++'
- env['S_compiler'] = ccache_path + ' ' + s_compiler_path + 'gcc'
- env['AR'] = compiler_path + 'ar'
- env['RANLIB'] = compiler_path + 'ranlib'
+ env["CC"] = ccache_path + " " + compiler_path + "clang"
+ env["CXX"] = ccache_path + " " + compiler_path + "clang++"
+ env["S_compiler"] = ccache_path + " " + s_compiler_path + "gcc"
+ env["AR"] = compiler_path + "ar"
+ env["RANLIB"] = compiler_path + "ranlib"
## Compile flags
- if (env["arch"] == "x86" or env["arch"] == "x86_64"):
- detect_darwin_sdk_path('iphonesimulator', env)
- env['ENV']['MACOSX_DEPLOYMENT_TARGET'] = '10.9'
+ if env["arch"] == "x86" or env["arch"] == "x86_64":
+ detect_darwin_sdk_path("iphonesimulator", env)
+ env["ENV"]["MACOSX_DEPLOYMENT_TARGET"] = "10.9"
arch_flag = "i386" if env["arch"] == "x86" else env["arch"]
- env.Append(CCFLAGS=('-arch ' + arch_flag + ' -fobjc-abi-version=2 -fobjc-legacy-dispatch -fmessage-length=0 -fpascal-strings -fblocks -fasm-blocks -isysroot $IPHONESDK -mios-simulator-version-min=10.0').split())
- elif (env["arch"] == "arm"):
- detect_darwin_sdk_path('iphone', env)
- env.Append(CCFLAGS='-fno-objc-arc -arch armv7 -fmessage-length=0 -fno-strict-aliasing -fdiagnostics-print-source-range-info -fdiagnostics-show-category=id -fdiagnostics-parseable-fixits -fpascal-strings -fblocks -isysroot $IPHONESDK -fvisibility=hidden -mthumb "-DIBOutlet=__attribute__((iboutlet))" "-DIBOutletCollection(ClassName)=__attribute__((iboutletcollection(ClassName)))" "-DIBAction=void)__attribute__((ibaction)" -miphoneos-version-min=10.0 -MMD -MT dependencies'.split())
- elif (env["arch"] == "arm64"):
- detect_darwin_sdk_path('iphone', env)
- env.Append(CCFLAGS='-fno-objc-arc -arch arm64 -fmessage-length=0 -fno-strict-aliasing -fdiagnostics-print-source-range-info -fdiagnostics-show-category=id -fdiagnostics-parseable-fixits -fpascal-strings -fblocks -fvisibility=hidden -MMD -MT dependencies -miphoneos-version-min=10.0 -isysroot $IPHONESDK'.split())
- env.Append(CPPDEFINES=['NEED_LONG_INT'])
- env.Append(CPPDEFINES=['LIBYUV_DISABLE_NEON'])
+ env.Append(
+ CCFLAGS=(
+ "-arch "
+ + arch_flag
+ + " -fobjc-abi-version=2 -fobjc-legacy-dispatch -fmessage-length=0 -fpascal-strings -fblocks -fasm-blocks -isysroot $IPHONESDK -mios-simulator-version-min=10.0"
+ ).split()
+ )
+ elif env["arch"] == "arm":
+ detect_darwin_sdk_path("iphone", env)
+ env.Append(
+ CCFLAGS='-fno-objc-arc -arch armv7 -fmessage-length=0 -fno-strict-aliasing -fdiagnostics-print-source-range-info -fdiagnostics-show-category=id -fdiagnostics-parseable-fixits -fpascal-strings -fblocks -isysroot $IPHONESDK -fvisibility=hidden -mthumb "-DIBOutlet=__attribute__((iboutlet))" "-DIBOutletCollection(ClassName)=__attribute__((iboutletcollection(ClassName)))" "-DIBAction=void)__attribute__((ibaction)" -miphoneos-version-min=10.0 -MMD -MT dependencies'.split()
+ )
+ elif env["arch"] == "arm64":
+ detect_darwin_sdk_path("iphone", env)
+ env.Append(
+ CCFLAGS="-fno-objc-arc -arch arm64 -fmessage-length=0 -fno-strict-aliasing -fdiagnostics-print-source-range-info -fdiagnostics-show-category=id -fdiagnostics-parseable-fixits -fpascal-strings -fblocks -fvisibility=hidden -MMD -MT dependencies -miphoneos-version-min=10.0 -isysroot $IPHONESDK".split()
+ )
+ env.Append(CPPDEFINES=["NEED_LONG_INT"])
+ env.Append(CPPDEFINES=["LIBYUV_DISABLE_NEON"])
# Disable exceptions on non-tools (template) builds
- if not env['tools']:
- if env['ios_exceptions']:
- env.Append(CCFLAGS=['-fexceptions'])
+ if not env["tools"]:
+ if env["ios_exceptions"]:
+ env.Append(CCFLAGS=["-fexceptions"])
else:
- env.Append(CCFLAGS=['-fno-exceptions'])
+ env.Append(CCFLAGS=["-fno-exceptions"])
## Link flags
- if (env["arch"] == "x86" or env["arch"] == "x86_64"):
+ if env["arch"] == "x86" or env["arch"] == "x86_64":
arch_flag = "i386" if env["arch"] == "x86" else env["arch"]
- env.Append(LINKFLAGS=['-arch', arch_flag, '-mios-simulator-version-min=10.0',
- '-isysroot', '$IPHONESDK',
- '-Xlinker',
- '-objc_abi_version',
- '-Xlinker', '2',
- '-F$IPHONESDK',
- ])
- elif (env["arch"] == "arm"):
- env.Append(LINKFLAGS=['-arch', 'armv7', '-Wl,-dead_strip', '-miphoneos-version-min=10.0'])
- if (env["arch"] == "arm64"):
- env.Append(LINKFLAGS=['-arch', 'arm64', '-Wl,-dead_strip', '-miphoneos-version-min=10.0'])
+ env.Append(
+ LINKFLAGS=[
+ "-arch",
+ arch_flag,
+ "-mios-simulator-version-min=10.0",
+ "-isysroot",
+ "$IPHONESDK",
+ "-Xlinker",
+ "-objc_abi_version",
+ "-Xlinker",
+ "2",
+ "-F$IPHONESDK",
+ ]
+ )
+ elif env["arch"] == "arm":
+ env.Append(LINKFLAGS=["-arch", "armv7", "-Wl,-dead_strip", "-miphoneos-version-min=10.0"])
+ if env["arch"] == "arm64":
+ env.Append(LINKFLAGS=["-arch", "arm64", "-Wl,-dead_strip", "-miphoneos-version-min=10.0"])
- env.Append(LINKFLAGS=['-isysroot', '$IPHONESDK',
- '-framework', 'AudioToolbox',
- '-framework', 'AVFoundation',
- '-framework', 'CoreAudio',
- '-framework', 'CoreGraphics',
- '-framework', 'CoreMedia',
- '-framework', 'CoreVideo',
- '-framework', 'CoreMotion',
- '-framework', 'Foundation',
- '-framework', 'GameController',
- '-framework', 'MediaPlayer',
- '-framework', 'Metal',
- '-framework', 'QuartzCore',
- '-framework', 'Security',
- '-framework', 'SystemConfiguration',
- '-framework', 'UIKit',
- '-framework', 'ARKit',
- ])
+ env.Append(
+ LINKFLAGS=[
+ "-isysroot",
+ "$IPHONESDK",
+ "-framework",
+ "AudioToolbox",
+ "-framework",
+ "AVFoundation",
+ "-framework",
+ "CoreAudio",
+ "-framework",
+ "CoreGraphics",
+ "-framework",
+ "CoreMedia",
+ "-framework",
+ "CoreVideo",
+ "-framework",
+ "CoreMotion",
+ "-framework",
+ "Foundation",
+ "-framework",
+ "GameController",
+ "-framework",
+ "MediaPlayer",
+ "-framework",
+ "Metal",
+ "-framework",
+ "QuartzCore",
+ "-framework",
+ "Security",
+ "-framework",
+ "SystemConfiguration",
+ "-framework",
+ "UIKit",
+ "-framework",
+ "ARKit",
+ ]
+ )
# Feature options
- if env['game_center']:
- env.Append(CPPDEFINES=['GAME_CENTER_ENABLED'])
- env.Append(LINKFLAGS=['-framework', 'GameKit'])
+ if env["game_center"]:
+ env.Append(CPPDEFINES=["GAME_CENTER_ENABLED"])
+ env.Append(LINKFLAGS=["-framework", "GameKit"])
- if env['store_kit']:
- env.Append(CPPDEFINES=['STOREKIT_ENABLED'])
- env.Append(LINKFLAGS=['-framework', 'StoreKit'])
+ if env["store_kit"]:
+ env.Append(CPPDEFINES=["STOREKIT_ENABLED"])
+ env.Append(LINKFLAGS=["-framework", "StoreKit"])
- if env['icloud']:
- env.Append(CPPDEFINES=['ICLOUD_ENABLED'])
+ if env["icloud"]:
+ env.Append(CPPDEFINES=["ICLOUD_ENABLED"])
- env.Prepend(CPPPATH=['$IPHONESDK/usr/include',
- '$IPHONESDK/System/Library/Frameworks/AudioUnit.framework/Headers',
- ])
+ env.Prepend(
+ CPPPATH=["$IPHONESDK/usr/include", "$IPHONESDK/System/Library/Frameworks/AudioUnit.framework/Headers",]
+ )
- env['ENV']['CODESIGN_ALLOCATE'] = '/Developer/Platforms/iPhoneOS.platform/Developer/usr/bin/codesign_allocate'
+ env["ENV"]["CODESIGN_ALLOCATE"] = "/Developer/Platforms/iPhoneOS.platform/Developer/usr/bin/codesign_allocate"
- env.Prepend(CPPPATH=['#platform/iphone'])
- env.Append(CPPDEFINES=['IPHONE_ENABLED', 'UNIX_ENABLED', 'COREAUDIO_ENABLED'])
+ env.Prepend(CPPPATH=["#platform/iphone"])
+ env.Append(CPPDEFINES=["IPHONE_ENABLED", "UNIX_ENABLED", "COREAUDIO_ENABLED"])
- env.Append(CPPDEFINES=['VULKAN_ENABLED'])
- env.Append(LINKFLAGS=['-framework', 'IOSurface'])
- if (env['use_static_mvk']):
- env.Append(LINKFLAGS=['-framework', 'MoltenVK'])
- env['builtin_vulkan'] = False
- elif not env['builtin_vulkan']:
- env.Append(LIBS=['vulkan'])
+ env.Append(CPPDEFINES=["VULKAN_ENABLED"])
+ env.Append(LINKFLAGS=["-framework", "IOSurface"])
+ if env["use_static_mvk"]:
+ env.Append(LINKFLAGS=["-framework", "MoltenVK"])
+ env["builtin_vulkan"] = False
+ elif not env["builtin_vulkan"]:
+ env.Append(LIBS=["vulkan"])
diff --git a/platform/javascript/SCsub b/platform/javascript/SCsub
index d3cd8f76b75..72396489378 100644
--- a/platform/javascript/SCsub
+++ b/platform/javascript/SCsub
@@ -1,67 +1,63 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
javascript_files = [
- 'audio_driver_javascript.cpp',
- 'http_client_javascript.cpp',
- 'javascript_eval.cpp',
- 'javascript_main.cpp',
- 'os_javascript.cpp',
+ "audio_driver_javascript.cpp",
+ "http_client_javascript.cpp",
+ "javascript_eval.cpp",
+ "javascript_main.cpp",
+ "os_javascript.cpp",
]
-build_targets = ['#bin/godot${PROGSUFFIX}.js', '#bin/godot${PROGSUFFIX}.wasm']
-if env['threads_enabled']:
- build_targets.append('#bin/godot${PROGSUFFIX}.worker.js')
+build_targets = ["#bin/godot${PROGSUFFIX}.js", "#bin/godot${PROGSUFFIX}.wasm"]
+if env["threads_enabled"]:
+ build_targets.append("#bin/godot${PROGSUFFIX}.worker.js")
build = env.add_program(build_targets, javascript_files)
js_libraries = [
- 'http_request.js',
+ "http_request.js",
]
for lib in js_libraries:
- env.Append(LINKFLAGS=['--js-library', env.File(lib).path])
+ env.Append(LINKFLAGS=["--js-library", env.File(lib).path])
env.Depends(build, js_libraries)
js_modules = [
- 'id_handler.js',
+ "id_handler.js",
]
for module in js_modules:
- env.Append(LINKFLAGS=['--pre-js', env.File(module).path])
+ env.Append(LINKFLAGS=["--pre-js", env.File(module).path])
env.Depends(build, js_modules)
engine = [
- 'engine/preloader.js',
- 'engine/loader.js',
- 'engine/utils.js',
- 'engine/engine.js',
+ "engine/preloader.js",
+ "engine/loader.js",
+ "engine/utils.js",
+ "engine/engine.js",
]
-externs = [
- env.File('#platform/javascript/engine/externs.js')
-]
-js_engine = env.CreateEngineFile('#bin/godot${PROGSUFFIX}.engine.js', engine, externs)
+externs = [env.File("#platform/javascript/engine/externs.js")]
+js_engine = env.CreateEngineFile("#bin/godot${PROGSUFFIX}.engine.js", engine, externs)
env.Depends(js_engine, externs)
wrap_list = [
build[0],
js_engine,
]
-js_wrapped = env.Textfile('#bin/godot', [env.File(f) for f in wrap_list], TEXTFILESUFFIX='${PROGSUFFIX}.wrapped.js')
+js_wrapped = env.Textfile("#bin/godot", [env.File(f) for f in wrap_list], TEXTFILESUFFIX="${PROGSUFFIX}.wrapped.js")
-zip_dir = env.Dir('#bin/.javascript_zip')
-out_files = [
- zip_dir.File('godot.js'),
- zip_dir.File('godot.wasm'),
- zip_dir.File('godot.html')
-]
-in_files = [
- js_wrapped,
- build[1],
- '#misc/dist/html/full-size.html'
-]
-if env['threads_enabled']:
+zip_dir = env.Dir("#bin/.javascript_zip")
+out_files = [zip_dir.File("godot.js"), zip_dir.File("godot.wasm"), zip_dir.File("godot.html")]
+in_files = [js_wrapped, build[1], "#misc/dist/html/full-size.html"]
+if env["threads_enabled"]:
in_files.append(build[2])
- out_files.append(zip_dir.File('godot.worker.js'))
+ out_files.append(zip_dir.File("godot.worker.js"))
zip_files = env.InstallAs(out_files, in_files)
-env.Zip('#bin/godot', zip_files, ZIPROOT=zip_dir, ZIPSUFFIX='${PROGSUFFIX}${ZIPSUFFIX}', ZIPCOMSTR='Archving $SOURCES as $TARGET')
+env.Zip(
+ "#bin/godot",
+ zip_files,
+ ZIPROOT=zip_dir,
+ ZIPSUFFIX="${PROGSUFFIX}${ZIPSUFFIX}",
+ ZIPCOMSTR="Archving $SOURCES as $TARGET",
+)
diff --git a/platform/javascript/detect.py b/platform/javascript/detect.py
index fb02752aa7e..9486e10717f 100644
--- a/platform/javascript/detect.py
+++ b/platform/javascript/detect.py
@@ -2,37 +2,39 @@ import os
from emscripten_helpers import parse_config, run_closure_compiler, create_engine_file
+
def is_active():
return True
def get_name():
- return 'JavaScript'
+ return "JavaScript"
def can_build():
- return 'EM_CONFIG' in os.environ or os.path.exists(os.path.expanduser('~/.emscripten'))
+ return "EM_CONFIG" in os.environ or os.path.exists(os.path.expanduser("~/.emscripten"))
def get_opts():
from SCons.Variables import BoolVariable
+
return [
# eval() can be a security concern, so it can be disabled.
- BoolVariable('javascript_eval', 'Enable JavaScript eval interface', True),
- BoolVariable('threads_enabled', 'Enable WebAssembly Threads support (limited browser support)', False),
- BoolVariable('use_closure_compiler', 'Use closure compiler to minimize Javascript code', False),
+ BoolVariable("javascript_eval", "Enable JavaScript eval interface", True),
+ BoolVariable("threads_enabled", "Enable WebAssembly Threads support (limited browser support)", False),
+ BoolVariable("use_closure_compiler", "Use closure compiler to minimize Javascript code", False),
]
def get_flags():
return [
- ('tools', False),
- ('builtin_pcre2_with_jit', False),
+ ("tools", False),
+ ("builtin_pcre2_with_jit", False),
# Disabling the mbedtls module reduces file size.
# The module has little use due to the limited networking functionality
# in this platform. For the available networking methods, the browser
# manages TLS.
- ('module_mbedtls_enabled', False),
+ ("module_mbedtls_enabled", False),
]
@@ -40,125 +42,125 @@ def configure(env):
## Build type
- if env['target'] == 'release':
+ if env["target"] == "release":
# Use -Os to prioritize optimizing for reduced file size. This is
# particularly valuable for the web platform because it directly
# decreases download time.
# -Os reduces file size by around 5 MiB over -O3. -Oz only saves about
# 100 KiB over -Os, which does not justify the negative impact on
# run-time performance.
- env.Append(CCFLAGS=['-Os'])
- env.Append(LINKFLAGS=['-Os'])
- elif env['target'] == 'release_debug':
- env.Append(CCFLAGS=['-Os'])
- env.Append(LINKFLAGS=['-Os'])
- env.Append(CPPDEFINES=['DEBUG_ENABLED'])
+ env.Append(CCFLAGS=["-Os"])
+ env.Append(LINKFLAGS=["-Os"])
+ elif env["target"] == "release_debug":
+ env.Append(CCFLAGS=["-Os"])
+ env.Append(LINKFLAGS=["-Os"])
+ env.Append(CPPDEFINES=["DEBUG_ENABLED"])
# Retain function names for backtraces at the cost of file size.
- env.Append(LINKFLAGS=['--profiling-funcs'])
- else: # 'debug'
- env.Append(CPPDEFINES=['DEBUG_ENABLED'])
- env.Append(CCFLAGS=['-O1', '-g'])
- env.Append(LINKFLAGS=['-O1', '-g'])
- env.Append(LINKFLAGS=['-s', 'ASSERTIONS=1'])
+ env.Append(LINKFLAGS=["--profiling-funcs"])
+ else: # 'debug'
+ env.Append(CPPDEFINES=["DEBUG_ENABLED"])
+ env.Append(CCFLAGS=["-O1", "-g"])
+ env.Append(LINKFLAGS=["-O1", "-g"])
+ env.Append(LINKFLAGS=["-s", "ASSERTIONS=1"])
- if env['tools']:
- if not env['threads_enabled']:
- raise RuntimeError("Threads must be enabled to build the editor. Please add the 'threads_enabled=yes' option")
+ if env["tools"]:
+ if not env["threads_enabled"]:
+ raise RuntimeError(
+ "Threads must be enabled to build the editor. Please add the 'threads_enabled=yes' option"
+ )
# Tools need more memory. Initial stack memory in bytes. See `src/settings.js` in emscripten repository (will be renamed to INITIAL_MEMORY).
- env.Append(LINKFLAGS=['-s', 'TOTAL_MEMORY=33554432'])
+ env.Append(LINKFLAGS=["-s", "TOTAL_MEMORY=33554432"])
else:
# Disable exceptions and rtti on non-tools (template) builds
# These flags help keep the file size down.
- env.Append(CCFLAGS=['-fno-exceptions', '-fno-rtti'])
+ env.Append(CCFLAGS=["-fno-exceptions", "-fno-rtti"])
# Don't use dynamic_cast, necessary with no-rtti.
- env.Append(CPPDEFINES=['NO_SAFE_CAST'])
+ env.Append(CPPDEFINES=["NO_SAFE_CAST"])
## Copy env variables.
- env['ENV'] = os.environ
+ env["ENV"] = os.environ
# LTO
- if env['use_lto']:
- env.Append(CCFLAGS=['-s', 'WASM_OBJECT_FILES=0'])
- env.Append(LINKFLAGS=['-s', 'WASM_OBJECT_FILES=0'])
- env.Append(LINKFLAGS=['--llvm-lto', '1'])
+ if env["use_lto"]:
+ env.Append(CCFLAGS=["-s", "WASM_OBJECT_FILES=0"])
+ env.Append(LINKFLAGS=["-s", "WASM_OBJECT_FILES=0"])
+ env.Append(LINKFLAGS=["--llvm-lto", "1"])
# Closure compiler
- if env['use_closure_compiler']:
+ if env["use_closure_compiler"]:
# For emscripten support code.
- env.Append(LINKFLAGS=['--closure', '1'])
+ env.Append(LINKFLAGS=["--closure", "1"])
# Register builder for our Engine files
- jscc = env.Builder(generator=run_closure_compiler, suffix='.cc.js', src_suffix='.js')
- env.Append(BUILDERS = {'BuildJS' : jscc})
+ jscc = env.Builder(generator=run_closure_compiler, suffix=".cc.js", src_suffix=".js")
+ env.Append(BUILDERS={"BuildJS": jscc})
# Add method that joins/compiles our Engine files.
env.AddMethod(create_engine_file, "CreateEngineFile")
# Closure compiler extern and support for ecmascript specs (const, let, etc).
- env['ENV']['EMCC_CLOSURE_ARGS'] = '--language_in ECMASCRIPT6'
+ env["ENV"]["EMCC_CLOSURE_ARGS"] = "--language_in ECMASCRIPT6"
em_config = parse_config()
- env.PrependENVPath('PATH', em_config['EMCC_ROOT'])
+ env.PrependENVPath("PATH", em_config["EMCC_ROOT"])
- env['CC'] = 'emcc'
- env['CXX'] = 'em++'
- env['LINK'] = 'emcc'
+ env["CC"] = "emcc"
+ env["CXX"] = "em++"
+ env["LINK"] = "emcc"
- env['AR'] = 'emar'
- env['RANLIB'] = 'emranlib'
+ env["AR"] = "emar"
+ env["RANLIB"] = "emranlib"
# Use TempFileMunge since some AR invocations are too long for cmd.exe.
# Use POSIX-style paths, required with TempFileMunge.
- env['ARCOM_POSIX'] = env['ARCOM'].replace(
- '$TARGET', '$TARGET.posix').replace(
- '$SOURCES', '$SOURCES.posix')
- env['ARCOM'] = '${TEMPFILE(ARCOM_POSIX)}'
+ env["ARCOM_POSIX"] = env["ARCOM"].replace("$TARGET", "$TARGET.posix").replace("$SOURCES", "$SOURCES.posix")
+ env["ARCOM"] = "${TEMPFILE(ARCOM_POSIX)}"
# All intermediate files are just LLVM bitcode.
- env['OBJPREFIX'] = ''
- env['OBJSUFFIX'] = '.bc'
- env['PROGPREFIX'] = ''
+ env["OBJPREFIX"] = ""
+ env["OBJSUFFIX"] = ".bc"
+ env["PROGPREFIX"] = ""
# Program() output consists of multiple files, so specify suffixes manually at builder.
- env['PROGSUFFIX'] = ''
- env['LIBPREFIX'] = 'lib'
- env['LIBSUFFIX'] = '.bc'
- env['LIBPREFIXES'] = ['$LIBPREFIX']
- env['LIBSUFFIXES'] = ['$LIBSUFFIX']
+ env["PROGSUFFIX"] = ""
+ env["LIBPREFIX"] = "lib"
+ env["LIBSUFFIX"] = ".bc"
+ env["LIBPREFIXES"] = ["$LIBPREFIX"]
+ env["LIBSUFFIXES"] = ["$LIBSUFFIX"]
- env.Prepend(CPPPATH=['#platform/javascript'])
- env.Append(CPPDEFINES=['JAVASCRIPT_ENABLED', 'UNIX_ENABLED'])
+ env.Prepend(CPPPATH=["#platform/javascript"])
+ env.Append(CPPDEFINES=["JAVASCRIPT_ENABLED", "UNIX_ENABLED"])
- if env['javascript_eval']:
- env.Append(CPPDEFINES=['JAVASCRIPT_EVAL_ENABLED'])
+ if env["javascript_eval"]:
+ env.Append(CPPDEFINES=["JAVASCRIPT_EVAL_ENABLED"])
# Thread support (via SharedArrayBuffer).
- if env['threads_enabled']:
- env.Append(CPPDEFINES=['PTHREAD_NO_RENAME'])
- env.Append(CCFLAGS=['-s', 'USE_PTHREADS=1'])
- env.Append(LINKFLAGS=['-s', 'USE_PTHREADS=1'])
- env.Append(LINKFLAGS=['-s', 'PTHREAD_POOL_SIZE=4'])
- env.Append(LINKFLAGS=['-s', 'WASM_MEM_MAX=2048MB'])
+ if env["threads_enabled"]:
+ env.Append(CPPDEFINES=["PTHREAD_NO_RENAME"])
+ env.Append(CCFLAGS=["-s", "USE_PTHREADS=1"])
+ env.Append(LINKFLAGS=["-s", "USE_PTHREADS=1"])
+ env.Append(LINKFLAGS=["-s", "PTHREAD_POOL_SIZE=4"])
+ env.Append(LINKFLAGS=["-s", "WASM_MEM_MAX=2048MB"])
else:
- env.Append(CPPDEFINES=['NO_THREADS'])
+ env.Append(CPPDEFINES=["NO_THREADS"])
# Reduce code size by generating less support code (e.g. skip NodeJS support).
- env.Append(LINKFLAGS=['-s', 'ENVIRONMENT=web,worker'])
+ env.Append(LINKFLAGS=["-s", "ENVIRONMENT=web,worker"])
# We use IDBFS in javascript_main.cpp. Since Emscripten 1.39.1 it needs to
# be linked explicitly.
- env.Append(LIBS=['idbfs.js'])
+ env.Append(LIBS=["idbfs.js"])
- env.Append(LINKFLAGS=['-s', 'BINARYEN=1'])
- env.Append(LINKFLAGS=['-s', 'MODULARIZE=1', '-s', 'EXPORT_NAME="Godot"'])
+ env.Append(LINKFLAGS=["-s", "BINARYEN=1"])
+ env.Append(LINKFLAGS=["-s", "MODULARIZE=1", "-s", 'EXPORT_NAME="Godot"'])
# Allow increasing memory buffer size during runtime. This is efficient
# when using WebAssembly (in comparison to asm.js) and works well for
# us since we don't know requirements at compile-time.
- env.Append(LINKFLAGS=['-s', 'ALLOW_MEMORY_GROWTH=1'])
+ env.Append(LINKFLAGS=["-s", "ALLOW_MEMORY_GROWTH=1"])
# This setting just makes WebGL 2 APIs available, it does NOT disable WebGL 1.
- env.Append(LINKFLAGS=['-s', 'USE_WEBGL2=1'])
+ env.Append(LINKFLAGS=["-s", "USE_WEBGL2=1"])
- env.Append(LINKFLAGS=['-s', 'INVOKE_RUN=0'])
+ env.Append(LINKFLAGS=["-s", "INVOKE_RUN=0"])
# callMain for manual start, FS for preloading.
- env.Append(LINKFLAGS=['-s', 'EXTRA_EXPORTED_RUNTIME_METHODS=["callMain", "FS"]'])
+ env.Append(LINKFLAGS=["-s", 'EXTRA_EXPORTED_RUNTIME_METHODS=["callMain", "FS"]'])
diff --git a/platform/javascript/emscripten_helpers.py b/platform/javascript/emscripten_helpers.py
index bda5b40a74c..a55c9d3f48f 100644
--- a/platform/javascript/emscripten_helpers.py
+++ b/platform/javascript/emscripten_helpers.py
@@ -1,7 +1,8 @@
import os
+
def parse_config():
- em_config_file = os.getenv('EM_CONFIG') or os.path.expanduser('~/.emscripten')
+ em_config_file = os.getenv("EM_CONFIG") or os.path.expanduser("~/.emscripten")
if not os.path.exists(em_config_file):
raise RuntimeError("Emscripten configuration file '%s' does not exist" % em_config_file)
@@ -13,25 +14,25 @@ def parse_config():
exec(f.read(), em_config)
except StandardError as e:
raise RuntimeError("Emscripten configuration file '%s' is invalid:\n%s" % (em_config_file, e))
- normalized['EMCC_ROOT'] = em_config.get('EMSCRIPTEN_ROOT')
- normalized['NODE_JS'] = em_config.get('NODE_JS')
- normalized['CLOSURE_BIN'] = os.path.join(normalized['EMCC_ROOT'], 'node_modules', '.bin', 'google-closure-compiler')
+ normalized["EMCC_ROOT"] = em_config.get("EMSCRIPTEN_ROOT")
+ normalized["NODE_JS"] = em_config.get("NODE_JS")
+ normalized["CLOSURE_BIN"] = os.path.join(normalized["EMCC_ROOT"], "node_modules", ".bin", "google-closure-compiler")
return normalized
def run_closure_compiler(target, source, env, for_signature):
cfg = parse_config()
- cmd = [cfg['NODE_JS'], cfg['CLOSURE_BIN']]
- cmd.extend(['--compilation_level', 'ADVANCED_OPTIMIZATIONS'])
- for f in env['JSEXTERNS']:
- cmd.extend(['--externs', f.get_abspath()])
+ cmd = [cfg["NODE_JS"], cfg["CLOSURE_BIN"]]
+ cmd.extend(["--compilation_level", "ADVANCED_OPTIMIZATIONS"])
+ for f in env["JSEXTERNS"]:
+ cmd.extend(["--externs", f.get_abspath()])
for f in source:
- cmd.extend(['--js', f.get_abspath()])
- cmd.extend(['--js_output_file', target[0].get_abspath()])
- return ' '.join(cmd)
+ cmd.extend(["--js", f.get_abspath()])
+ cmd.extend(["--js_output_file", target[0].get_abspath()])
+ return " ".join(cmd)
def create_engine_file(env, target, source, externs):
- if env['use_closure_compiler']:
+ if env["use_closure_compiler"]:
return env.BuildJS(target, source, JSEXTERNS=externs)
return env.Textfile(target, [env.File(s) for s in source])
diff --git a/platform/linuxbsd/SCsub b/platform/linuxbsd/SCsub
index f3f65e216ec..ae75a758301 100644
--- a/platform/linuxbsd/SCsub
+++ b/platform/linuxbsd/SCsub
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
from platform_methods import run_in_subprocess
import platform_linuxbsd_builders
@@ -14,10 +14,9 @@ common_x11 = [
"display_server_x11.cpp",
"vulkan_context_x11.cpp",
"key_mapping_x11.cpp",
-
]
-prog = env.add_program('#bin/godot', ['godot_linuxbsd.cpp'] + common_x11)
+prog = env.add_program("#bin/godot", ["godot_linuxbsd.cpp"] + common_x11)
if (env["debug_symbols"] == "full" or env["debug_symbols"] == "yes") and env["separate_debug_symbols"]:
env.AddPostAction(prog, run_in_subprocess(platform_linuxbsd_builders.make_debug_linuxbsd))
diff --git a/platform/linuxbsd/detect.py b/platform/linuxbsd/detect.py
index 1a395efffe8..5d8b4fba484 100644
--- a/platform/linuxbsd/detect.py
+++ b/platform/linuxbsd/detect.py
@@ -13,64 +13,65 @@ def get_name():
def can_build():
- if (os.name != "posix" or sys.platform == "darwin"):
+ if os.name != "posix" or sys.platform == "darwin":
return False
# Check the minimal dependencies
x11_error = os.system("pkg-config --version > /dev/null")
- if (x11_error):
+ if x11_error:
return False
x11_error = os.system("pkg-config x11 --modversion > /dev/null ")
- if (x11_error):
+ if x11_error:
return False
x11_error = os.system("pkg-config xcursor --modversion > /dev/null ")
- if (x11_error):
+ if x11_error:
print("xcursor not found.. x11 disabled.")
return False
x11_error = os.system("pkg-config xinerama --modversion > /dev/null ")
- if (x11_error):
+ if x11_error:
print("xinerama not found.. x11 disabled.")
return False
x11_error = os.system("pkg-config xrandr --modversion > /dev/null ")
- if (x11_error):
+ if x11_error:
print("xrandr not found.. x11 disabled.")
return False
x11_error = os.system("pkg-config xrender --modversion > /dev/null ")
- if (x11_error):
+ if x11_error:
print("xrender not found.. x11 disabled.")
return False
x11_error = os.system("pkg-config xi --modversion > /dev/null ")
- if (x11_error):
+ if x11_error:
print("xi not found.. Aborting.")
return False
return True
+
def get_opts():
from SCons.Variables import BoolVariable, EnumVariable
return [
- BoolVariable('use_llvm', 'Use the LLVM compiler', False),
- BoolVariable('use_lld', 'Use the LLD linker', False),
- BoolVariable('use_thinlto', 'Use ThinLTO', False),
- BoolVariable('use_static_cpp', 'Link libgcc and libstdc++ statically for better portability', False),
- BoolVariable('use_coverage', 'Test Godot coverage', False),
- BoolVariable('use_ubsan', 'Use LLVM/GCC compiler undefined behavior sanitizer (UBSAN)', False),
- BoolVariable('use_asan', 'Use LLVM/GCC compiler address sanitizer (ASAN))', False),
- BoolVariable('use_lsan', 'Use LLVM/GCC compiler leak sanitizer (LSAN))', False),
- BoolVariable('use_tsan', 'Use LLVM/GCC compiler thread sanitizer (TSAN))', False),
- BoolVariable('pulseaudio', 'Detect and use PulseAudio', True),
- BoolVariable('udev', 'Use udev for gamepad connection callbacks', False),
- EnumVariable('debug_symbols', 'Add debugging symbols to release builds', 'yes', ('yes', 'no', 'full')),
- BoolVariable('separate_debug_symbols', 'Create a separate file containing debugging symbols', False),
- BoolVariable('touch', 'Enable touch events', True),
- BoolVariable('execinfo', 'Use libexecinfo on systems where glibc is not available', False),
+ BoolVariable("use_llvm", "Use the LLVM compiler", False),
+ BoolVariable("use_lld", "Use the LLD linker", False),
+ BoolVariable("use_thinlto", "Use ThinLTO", False),
+ BoolVariable("use_static_cpp", "Link libgcc and libstdc++ statically for better portability", False),
+ BoolVariable("use_coverage", "Test Godot coverage", False),
+ BoolVariable("use_ubsan", "Use LLVM/GCC compiler undefined behavior sanitizer (UBSAN)", False),
+ BoolVariable("use_asan", "Use LLVM/GCC compiler address sanitizer (ASAN))", False),
+ BoolVariable("use_lsan", "Use LLVM/GCC compiler leak sanitizer (LSAN))", False),
+ BoolVariable("use_tsan", "Use LLVM/GCC compiler thread sanitizer (TSAN))", False),
+ BoolVariable("pulseaudio", "Detect and use PulseAudio", True),
+ BoolVariable("udev", "Use udev for gamepad connection callbacks", False),
+ EnumVariable("debug_symbols", "Add debugging symbols to release builds", "yes", ("yes", "no", "full")),
+ BoolVariable("separate_debug_symbols", "Create a separate file containing debugging symbols", False),
+ BoolVariable("touch", "Enable touch events", True),
+ BoolVariable("execinfo", "Use libexecinfo on systems where glibc is not available", False),
]
@@ -83,286 +84,294 @@ def configure(env):
## Build type
- if (env["target"] == "release"):
- if (env["optimize"] == "speed"): #optimize for speed (default)
- env.Prepend(CCFLAGS=['-O3'])
- else: #optimize for size
- env.Prepend(CCFLAGS=['-Os'])
+ if env["target"] == "release":
+ if env["optimize"] == "speed": # optimize for speed (default)
+ env.Prepend(CCFLAGS=["-O3"])
+ else: # optimize for size
+ env.Prepend(CCFLAGS=["-Os"])
- if (env["debug_symbols"] == "yes"):
- env.Prepend(CCFLAGS=['-g1'])
- if (env["debug_symbols"] == "full"):
- env.Prepend(CCFLAGS=['-g2'])
+ if env["debug_symbols"] == "yes":
+ env.Prepend(CCFLAGS=["-g1"])
+ if env["debug_symbols"] == "full":
+ env.Prepend(CCFLAGS=["-g2"])
- elif (env["target"] == "release_debug"):
- if (env["optimize"] == "speed"): #optimize for speed (default)
- env.Prepend(CCFLAGS=['-O2'])
- else: #optimize for size
- env.Prepend(CCFLAGS=['-Os'])
- env.Prepend(CPPDEFINES=['DEBUG_ENABLED'])
+ elif env["target"] == "release_debug":
+ if env["optimize"] == "speed": # optimize for speed (default)
+ env.Prepend(CCFLAGS=["-O2"])
+ else: # optimize for size
+ env.Prepend(CCFLAGS=["-Os"])
+ env.Prepend(CPPDEFINES=["DEBUG_ENABLED"])
- if (env["debug_symbols"] == "yes"):
- env.Prepend(CCFLAGS=['-g1'])
- if (env["debug_symbols"] == "full"):
- env.Prepend(CCFLAGS=['-g2'])
+ if env["debug_symbols"] == "yes":
+ env.Prepend(CCFLAGS=["-g1"])
+ if env["debug_symbols"] == "full":
+ env.Prepend(CCFLAGS=["-g2"])
- elif (env["target"] == "debug"):
- env.Prepend(CCFLAGS=['-g3'])
- env.Prepend(CPPDEFINES=['DEBUG_ENABLED', 'DEBUG_MEMORY_ENABLED'])
- env.Append(LINKFLAGS=['-rdynamic'])
+ elif env["target"] == "debug":
+ env.Prepend(CCFLAGS=["-g3"])
+ env.Prepend(CPPDEFINES=["DEBUG_ENABLED", "DEBUG_MEMORY_ENABLED"])
+ env.Append(LINKFLAGS=["-rdynamic"])
## Architecture
- is64 = sys.maxsize > 2**32
- if (env["bits"] == "default"):
+ is64 = sys.maxsize > 2 ** 32
+ if env["bits"] == "default":
env["bits"] = "64" if is64 else "32"
## Compiler configuration
- if 'CXX' in env and 'clang' in os.path.basename(env['CXX']):
+ if "CXX" in env and "clang" in os.path.basename(env["CXX"]):
# Convenience check to enforce the use_llvm overrides when CXX is clang(++)
- env['use_llvm'] = True
+ env["use_llvm"] = True
- if env['use_llvm']:
- if ('clang++' not in os.path.basename(env['CXX'])):
+ if env["use_llvm"]:
+ if "clang++" not in os.path.basename(env["CXX"]):
env["CC"] = "clang"
env["CXX"] = "clang++"
env["LINK"] = "clang++"
- env.Append(CPPDEFINES=['TYPED_METHOD_BIND'])
+ env.Append(CPPDEFINES=["TYPED_METHOD_BIND"])
env.extra_suffix = ".llvm" + env.extra_suffix
- if env['use_lld']:
- if env['use_llvm']:
- env.Append(LINKFLAGS=['-fuse-ld=lld'])
- if env['use_thinlto']:
+ if env["use_lld"]:
+ if env["use_llvm"]:
+ env.Append(LINKFLAGS=["-fuse-ld=lld"])
+ if env["use_thinlto"]:
# A convenience so you don't need to write use_lto too when using SCons
- env['use_lto'] = True
+ env["use_lto"] = True
else:
print("Using LLD with GCC is not supported yet, try compiling with 'use_llvm=yes'.")
sys.exit(255)
- if env['use_coverage']:
- env.Append(CCFLAGS=['-ftest-coverage', '-fprofile-arcs'])
- env.Append(LINKFLAGS=['-ftest-coverage', '-fprofile-arcs'])
+ if env["use_coverage"]:
+ env.Append(CCFLAGS=["-ftest-coverage", "-fprofile-arcs"])
+ env.Append(LINKFLAGS=["-ftest-coverage", "-fprofile-arcs"])
- if env['use_ubsan'] or env['use_asan'] or env['use_lsan'] or env['use_tsan']:
+ if env["use_ubsan"] or env["use_asan"] or env["use_lsan"] or env["use_tsan"]:
env.extra_suffix += "s"
- if env['use_ubsan']:
- env.Append(CCFLAGS=['-fsanitize=undefined'])
- env.Append(LINKFLAGS=['-fsanitize=undefined'])
+ if env["use_ubsan"]:
+ env.Append(CCFLAGS=["-fsanitize=undefined"])
+ env.Append(LINKFLAGS=["-fsanitize=undefined"])
- if env['use_asan']:
- env.Append(CCFLAGS=['-fsanitize=address'])
- env.Append(LINKFLAGS=['-fsanitize=address'])
+ if env["use_asan"]:
+ env.Append(CCFLAGS=["-fsanitize=address"])
+ env.Append(LINKFLAGS=["-fsanitize=address"])
- if env['use_lsan']:
- env.Append(CCFLAGS=['-fsanitize=leak'])
- env.Append(LINKFLAGS=['-fsanitize=leak'])
+ if env["use_lsan"]:
+ env.Append(CCFLAGS=["-fsanitize=leak"])
+ env.Append(LINKFLAGS=["-fsanitize=leak"])
- if env['use_tsan']:
- env.Append(CCFLAGS=['-fsanitize=thread'])
- env.Append(LINKFLAGS=['-fsanitize=thread'])
+ if env["use_tsan"]:
+ env.Append(CCFLAGS=["-fsanitize=thread"])
+ env.Append(LINKFLAGS=["-fsanitize=thread"])
- if env['use_lto']:
- if not env['use_llvm'] and env.GetOption("num_jobs") > 1:
- env.Append(CCFLAGS=['-flto'])
- env.Append(LINKFLAGS=['-flto=' + str(env.GetOption("num_jobs"))])
+ if env["use_lto"]:
+ if not env["use_llvm"] and env.GetOption("num_jobs") > 1:
+ env.Append(CCFLAGS=["-flto"])
+ env.Append(LINKFLAGS=["-flto=" + str(env.GetOption("num_jobs"))])
else:
- if env['use_lld'] and env['use_thinlto']:
- env.Append(CCFLAGS=['-flto=thin'])
- env.Append(LINKFLAGS=['-flto=thin'])
+ if env["use_lld"] and env["use_thinlto"]:
+ env.Append(CCFLAGS=["-flto=thin"])
+ env.Append(LINKFLAGS=["-flto=thin"])
else:
- env.Append(CCFLAGS=['-flto'])
- env.Append(LINKFLAGS=['-flto'])
+ env.Append(CCFLAGS=["-flto"])
+ env.Append(LINKFLAGS=["-flto"])
- if not env['use_llvm']:
- env['RANLIB'] = 'gcc-ranlib'
- env['AR'] = 'gcc-ar'
+ if not env["use_llvm"]:
+ env["RANLIB"] = "gcc-ranlib"
+ env["AR"] = "gcc-ar"
- env.Append(CCFLAGS=['-pipe'])
- env.Append(LINKFLAGS=['-pipe'])
+ env.Append(CCFLAGS=["-pipe"])
+ env.Append(LINKFLAGS=["-pipe"])
# -fpie and -no-pie is supported on GCC 6+ and Clang 4+, both below our
# minimal requirements.
- env.Append(CCFLAGS=['-fpie'])
- env.Append(LINKFLAGS=['-no-pie'])
+ env.Append(CCFLAGS=["-fpie"])
+ env.Append(LINKFLAGS=["-no-pie"])
## Dependencies
- env.ParseConfig('pkg-config x11 --cflags --libs')
- env.ParseConfig('pkg-config xcursor --cflags --libs')
- env.ParseConfig('pkg-config xinerama --cflags --libs')
- env.ParseConfig('pkg-config xrandr --cflags --libs')
- env.ParseConfig('pkg-config xrender --cflags --libs')
- env.ParseConfig('pkg-config xi --cflags --libs')
+ env.ParseConfig("pkg-config x11 --cflags --libs")
+ env.ParseConfig("pkg-config xcursor --cflags --libs")
+ env.ParseConfig("pkg-config xinerama --cflags --libs")
+ env.ParseConfig("pkg-config xrandr --cflags --libs")
+ env.ParseConfig("pkg-config xrender --cflags --libs")
+ env.ParseConfig("pkg-config xi --cflags --libs")
- if (env['touch']):
- env.Append(CPPDEFINES=['TOUCH_ENABLED'])
+ if env["touch"]:
+ env.Append(CPPDEFINES=["TOUCH_ENABLED"])
# FIXME: Check for existence of the libs before parsing their flags with pkg-config
# freetype depends on libpng and zlib, so bundling one of them while keeping others
# as shared libraries leads to weird issues
- if env['builtin_freetype'] or env['builtin_libpng'] or env['builtin_zlib']:
- env['builtin_freetype'] = True
- env['builtin_libpng'] = True
- env['builtin_zlib'] = True
+ if env["builtin_freetype"] or env["builtin_libpng"] or env["builtin_zlib"]:
+ env["builtin_freetype"] = True
+ env["builtin_libpng"] = True
+ env["builtin_zlib"] = True
- if not env['builtin_freetype']:
- env.ParseConfig('pkg-config freetype2 --cflags --libs')
+ if not env["builtin_freetype"]:
+ env.ParseConfig("pkg-config freetype2 --cflags --libs")
- if not env['builtin_libpng']:
- env.ParseConfig('pkg-config libpng16 --cflags --libs')
+ if not env["builtin_libpng"]:
+ env.ParseConfig("pkg-config libpng16 --cflags --libs")
- if not env['builtin_bullet']:
+ if not env["builtin_bullet"]:
# We need at least version 2.89
import subprocess
- bullet_version = subprocess.check_output(['pkg-config', 'bullet', '--modversion']).strip()
+
+ bullet_version = subprocess.check_output(["pkg-config", "bullet", "--modversion"]).strip()
if str(bullet_version) < "2.89":
# Abort as system bullet was requested but too old
- print("Bullet: System version {0} does not match minimal requirements ({1}). Aborting.".format(bullet_version, "2.89"))
+ print(
+ "Bullet: System version {0} does not match minimal requirements ({1}). Aborting.".format(
+ bullet_version, "2.89"
+ )
+ )
sys.exit(255)
- env.ParseConfig('pkg-config bullet --cflags --libs')
+ env.ParseConfig("pkg-config bullet --cflags --libs")
if False: # not env['builtin_assimp']:
# FIXME: Add min version check
- env.ParseConfig('pkg-config assimp --cflags --libs')
+ env.ParseConfig("pkg-config assimp --cflags --libs")
- if not env['builtin_enet']:
- env.ParseConfig('pkg-config libenet --cflags --libs')
+ if not env["builtin_enet"]:
+ env.ParseConfig("pkg-config libenet --cflags --libs")
- if not env['builtin_squish']:
- env.ParseConfig('pkg-config libsquish --cflags --libs')
+ if not env["builtin_squish"]:
+ env.ParseConfig("pkg-config libsquish --cflags --libs")
- if not env['builtin_zstd']:
- env.ParseConfig('pkg-config libzstd --cflags --libs')
+ if not env["builtin_zstd"]:
+ env.ParseConfig("pkg-config libzstd --cflags --libs")
# Sound and video libraries
# Keep the order as it triggers chained dependencies (ogg needed by others, etc.)
- if not env['builtin_libtheora']:
- env['builtin_libogg'] = False # Needed to link against system libtheora
- env['builtin_libvorbis'] = False # Needed to link against system libtheora
- env.ParseConfig('pkg-config theora theoradec --cflags --libs')
+ if not env["builtin_libtheora"]:
+ env["builtin_libogg"] = False # Needed to link against system libtheora
+ env["builtin_libvorbis"] = False # Needed to link against system libtheora
+ env.ParseConfig("pkg-config theora theoradec --cflags --libs")
else:
- list_of_x86 = ['x86_64', 'x86', 'i386', 'i586']
+ list_of_x86 = ["x86_64", "x86", "i386", "i586"]
if any(platform.machine() in s for s in list_of_x86):
env["x86_libtheora_opt_gcc"] = True
- if not env['builtin_libvpx']:
- env.ParseConfig('pkg-config vpx --cflags --libs')
+ if not env["builtin_libvpx"]:
+ env.ParseConfig("pkg-config vpx --cflags --libs")
- if not env['builtin_libvorbis']:
- env['builtin_libogg'] = False # Needed to link against system libvorbis
- env.ParseConfig('pkg-config vorbis vorbisfile --cflags --libs')
+ if not env["builtin_libvorbis"]:
+ env["builtin_libogg"] = False # Needed to link against system libvorbis
+ env.ParseConfig("pkg-config vorbis vorbisfile --cflags --libs")
- if not env['builtin_opus']:
- env['builtin_libogg'] = False # Needed to link against system opus
- env.ParseConfig('pkg-config opus opusfile --cflags --libs')
+ if not env["builtin_opus"]:
+ env["builtin_libogg"] = False # Needed to link against system opus
+ env.ParseConfig("pkg-config opus opusfile --cflags --libs")
- if not env['builtin_libogg']:
- env.ParseConfig('pkg-config ogg --cflags --libs')
+ if not env["builtin_libogg"]:
+ env.ParseConfig("pkg-config ogg --cflags --libs")
- if not env['builtin_libwebp']:
- env.ParseConfig('pkg-config libwebp --cflags --libs')
+ if not env["builtin_libwebp"]:
+ env.ParseConfig("pkg-config libwebp --cflags --libs")
- if not env['builtin_mbedtls']:
+ if not env["builtin_mbedtls"]:
# mbedTLS does not provide a pkgconfig config yet. See https://github.com/ARMmbed/mbedtls/issues/228
- env.Append(LIBS=['mbedtls', 'mbedcrypto', 'mbedx509'])
+ env.Append(LIBS=["mbedtls", "mbedcrypto", "mbedx509"])
- if not env['builtin_wslay']:
- env.ParseConfig('pkg-config libwslay --cflags --libs')
+ if not env["builtin_wslay"]:
+ env.ParseConfig("pkg-config libwslay --cflags --libs")
- if not env['builtin_miniupnpc']:
+ if not env["builtin_miniupnpc"]:
# No pkgconfig file so far, hardcode default paths.
env.Prepend(CPPPATH=["/usr/include/miniupnpc"])
env.Append(LIBS=["miniupnpc"])
# On Linux wchar_t should be 32-bits
# 16-bit library shouldn't be required due to compiler optimisations
- if not env['builtin_pcre2']:
- env.ParseConfig('pkg-config libpcre2-32 --cflags --libs')
+ if not env["builtin_pcre2"]:
+ env.ParseConfig("pkg-config libpcre2-32 --cflags --libs")
## Flags
- if (os.system("pkg-config --exists alsa") == 0): # 0 means found
+ if os.system("pkg-config --exists alsa") == 0: # 0 means found
print("Enabling ALSA")
env.Append(CPPDEFINES=["ALSA_ENABLED", "ALSAMIDI_ENABLED"])
- # Don't parse --cflags, we don't need to add /usr/include/alsa to include path
- env.ParseConfig('pkg-config alsa --libs')
+ # Don't parse --cflags, we don't need to add /usr/include/alsa to include path
+ env.ParseConfig("pkg-config alsa --libs")
else:
print("ALSA libraries not found, disabling driver")
- if env['pulseaudio']:
- if (os.system("pkg-config --exists libpulse") == 0): # 0 means found
+ if env["pulseaudio"]:
+ if os.system("pkg-config --exists libpulse") == 0: # 0 means found
print("Enabling PulseAudio")
env.Append(CPPDEFINES=["PULSEAUDIO_ENABLED"])
- env.ParseConfig('pkg-config --cflags --libs libpulse')
+ env.ParseConfig("pkg-config --cflags --libs libpulse")
else:
print("PulseAudio development libraries not found, disabling driver")
- if (platform.system() == "Linux"):
+ if platform.system() == "Linux":
env.Append(CPPDEFINES=["JOYDEV_ENABLED"])
- if env['udev']:
- if (os.system("pkg-config --exists libudev") == 0): # 0 means found
+ if env["udev"]:
+ if os.system("pkg-config --exists libudev") == 0: # 0 means found
print("Enabling udev support")
env.Append(CPPDEFINES=["UDEV_ENABLED"])
- env.ParseConfig('pkg-config libudev --cflags --libs')
+ env.ParseConfig("pkg-config libudev --cflags --libs")
else:
print("libudev development libraries not found, disabling udev support")
# Linkflags below this line should typically stay the last ones
- if not env['builtin_zlib']:
- env.ParseConfig('pkg-config zlib --cflags --libs')
+ if not env["builtin_zlib"]:
+ env.ParseConfig("pkg-config zlib --cflags --libs")
- env.Prepend(CPPPATH=['#platform/linuxbsd'])
- env.Append(CPPDEFINES=['X11_ENABLED', 'UNIX_ENABLED'])
+ env.Prepend(CPPPATH=["#platform/linuxbsd"])
+ env.Append(CPPDEFINES=["X11_ENABLED", "UNIX_ENABLED"])
- env.Append(CPPDEFINES=['VULKAN_ENABLED'])
- if not env['builtin_vulkan']:
- env.ParseConfig('pkg-config vulkan --cflags --libs')
- if not env['builtin_glslang']:
+ env.Append(CPPDEFINES=["VULKAN_ENABLED"])
+ if not env["builtin_vulkan"]:
+ env.ParseConfig("pkg-config vulkan --cflags --libs")
+ if not env["builtin_glslang"]:
# No pkgconfig file for glslang so far
- env.Append(LIBS=['glslang', 'SPIRV'])
+ env.Append(LIBS=["glslang", "SPIRV"])
- #env.Append(CPPDEFINES=['OPENGL_ENABLED'])
- env.Append(LIBS=['GL'])
+ # env.Append(CPPDEFINES=['OPENGL_ENABLED'])
+ env.Append(LIBS=["GL"])
- env.Append(LIBS=['pthread'])
+ env.Append(LIBS=["pthread"])
- if (platform.system() == "Linux"):
- env.Append(LIBS=['dl'])
+ if platform.system() == "Linux":
+ env.Append(LIBS=["dl"])
- if (platform.system().find("BSD") >= 0):
+ if platform.system().find("BSD") >= 0:
env["execinfo"] = True
if env["execinfo"]:
- env.Append(LIBS=['execinfo'])
+ env.Append(LIBS=["execinfo"])
- if not env['tools']:
+ if not env["tools"]:
import subprocess
import re
- linker_version_str = subprocess.check_output([env.subst(env["LINK"]), '-Wl,--version']).decode("utf-8")
- gnu_ld_version = re.search('^GNU ld [^$]*(\d+\.\d+)$', linker_version_str, re.MULTILINE)
+
+ linker_version_str = subprocess.check_output([env.subst(env["LINK"]), "-Wl,--version"]).decode("utf-8")
+ gnu_ld_version = re.search("^GNU ld [^$]*(\d+\.\d+)$", linker_version_str, re.MULTILINE)
if not gnu_ld_version:
- print("Warning: Creating template binaries enabled for PCK embedding is currently only supported with GNU ld")
+ print(
+ "Warning: Creating template binaries enabled for PCK embedding is currently only supported with GNU ld"
+ )
else:
if float(gnu_ld_version.group(1)) >= 2.30:
- env.Append(LINKFLAGS=['-T', 'platform/linuxbsd/pck_embed.ld'])
+ env.Append(LINKFLAGS=["-T", "platform/linuxbsd/pck_embed.ld"])
else:
- env.Append(LINKFLAGS=['-T', 'platform/linuxbsd/pck_embed.legacy.ld'])
+ env.Append(LINKFLAGS=["-T", "platform/linuxbsd/pck_embed.legacy.ld"])
## Cross-compilation
- if (is64 and env["bits"] == "32"):
- env.Append(CCFLAGS=['-m32'])
- env.Append(LINKFLAGS=['-m32', '-L/usr/lib/i386-linux-gnu'])
- elif (not is64 and env["bits"] == "64"):
- env.Append(CCFLAGS=['-m64'])
- env.Append(LINKFLAGS=['-m64', '-L/usr/lib/i686-linux-gnu'])
+ if is64 and env["bits"] == "32":
+ env.Append(CCFLAGS=["-m32"])
+ env.Append(LINKFLAGS=["-m32", "-L/usr/lib/i386-linux-gnu"])
+ elif not is64 and env["bits"] == "64":
+ env.Append(CCFLAGS=["-m64"])
+ env.Append(LINKFLAGS=["-m64", "-L/usr/lib/i686-linux-gnu"])
# Link those statically for portability
- if env['use_static_cpp']:
- env.Append(LINKFLAGS=['-static-libgcc', '-static-libstdc++'])
+ if env["use_static_cpp"]:
+ env.Append(LINKFLAGS=["-static-libgcc", "-static-libstdc++"])
diff --git a/platform/linuxbsd/platform_linuxbsd_builders.py b/platform/linuxbsd/platform_linuxbsd_builders.py
index a72306a9c06..58234f37486 100644
--- a/platform/linuxbsd/platform_linuxbsd_builders.py
+++ b/platform/linuxbsd/platform_linuxbsd_builders.py
@@ -8,10 +8,10 @@ from platform_methods import subprocess_main
def make_debug_linuxbsd(target, source, env):
- os.system('objcopy --only-keep-debug {0} {0}.debugsymbols'.format(target[0]))
- os.system('strip --strip-debug --strip-unneeded {0}'.format(target[0]))
- os.system('objcopy --add-gnu-debuglink={0}.debugsymbols {0}'.format(target[0]))
+ os.system("objcopy --only-keep-debug {0} {0}.debugsymbols".format(target[0]))
+ os.system("strip --strip-debug --strip-unneeded {0}".format(target[0]))
+ os.system("objcopy --add-gnu-debuglink={0}.debugsymbols {0}".format(target[0]))
-if __name__ == '__main__':
+if __name__ == "__main__":
subprocess_main(globals())
diff --git a/platform/osx/SCsub b/platform/osx/SCsub
index 4ec8aeab6d5..ad62db358b4 100644
--- a/platform/osx/SCsub
+++ b/platform/osx/SCsub
@@ -1,23 +1,22 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
from platform_methods import run_in_subprocess
import platform_osx_builders
files = [
- 'crash_handler_osx.mm',
- 'os_osx.mm',
- 'display_server_osx.mm',
- 'godot_main_osx.mm',
- 'dir_access_osx.mm',
- 'joypad_osx.cpp',
- 'vulkan_context_osx.mm',
- 'context_gl_osx.mm'
+ "crash_handler_osx.mm",
+ "os_osx.mm",
+ "display_server_osx.mm",
+ "godot_main_osx.mm",
+ "dir_access_osx.mm",
+ "joypad_osx.cpp",
+ "vulkan_context_osx.mm",
+ "context_gl_osx.mm",
]
-prog = env.add_program('#bin/godot', files)
+prog = env.add_program("#bin/godot", files)
if (env["debug_symbols"] == "full" or env["debug_symbols"] == "yes") and env["separate_debug_symbols"]:
env.AddPostAction(prog, run_in_subprocess(platform_osx_builders.make_debug_osx))
-
diff --git a/platform/osx/detect.py b/platform/osx/detect.py
index 12ca5c10dce..29aa8ece19f 100644
--- a/platform/osx/detect.py
+++ b/platform/osx/detect.py
@@ -14,7 +14,7 @@ def get_name():
def can_build():
- if (sys.platform == "darwin" or ("OSXCROSS_ROOT" in os.environ)):
+ if sys.platform == "darwin" or ("OSXCROSS_ROOT" in os.environ):
return True
return False
@@ -24,52 +24,55 @@ def get_opts():
from SCons.Variables import BoolVariable, EnumVariable
return [
- ('osxcross_sdk', 'OSXCross SDK version', 'darwin14'),
- ('MACOS_SDK_PATH', 'Path to the macOS SDK', ''),
- BoolVariable('use_static_mvk', 'Link MoltenVK statically as Level-0 driver (better portability) or use Vulkan ICD loader (enables validation layers)', False),
- EnumVariable('debug_symbols', 'Add debugging symbols to release builds', 'yes', ('yes', 'no', 'full')),
- BoolVariable('separate_debug_symbols', 'Create a separate file containing debugging symbols', False),
- BoolVariable('use_ubsan', 'Use LLVM/GCC compiler undefined behavior sanitizer (UBSAN)', False),
- BoolVariable('use_asan', 'Use LLVM/GCC compiler address sanitizer (ASAN))', False),
- BoolVariable('use_tsan', 'Use LLVM/GCC compiler thread sanitizer (TSAN))', False),
+ ("osxcross_sdk", "OSXCross SDK version", "darwin14"),
+ ("MACOS_SDK_PATH", "Path to the macOS SDK", ""),
+ BoolVariable(
+ "use_static_mvk",
+ "Link MoltenVK statically as Level-0 driver (better portability) or use Vulkan ICD loader (enables validation layers)",
+ False,
+ ),
+ EnumVariable("debug_symbols", "Add debugging symbols to release builds", "yes", ("yes", "no", "full")),
+ BoolVariable("separate_debug_symbols", "Create a separate file containing debugging symbols", False),
+ BoolVariable("use_ubsan", "Use LLVM/GCC compiler undefined behavior sanitizer (UBSAN)", False),
+ BoolVariable("use_asan", "Use LLVM/GCC compiler address sanitizer (ASAN))", False),
+ BoolVariable("use_tsan", "Use LLVM/GCC compiler thread sanitizer (TSAN))", False),
]
def get_flags():
- return [
- ]
+ return []
def configure(env):
- ## Build type
+ ## Build type
- if (env["target"] == "release"):
- if (env["optimize"] == "speed"): #optimize for speed (default)
- env.Prepend(CCFLAGS=['-O3', '-fomit-frame-pointer', '-ftree-vectorize', '-msse2'])
- else: #optimize for size
- env.Prepend(CCFLAGS=['-Os','-ftree-vectorize', '-msse2'])
+ if env["target"] == "release":
+ if env["optimize"] == "speed": # optimize for speed (default)
+ env.Prepend(CCFLAGS=["-O3", "-fomit-frame-pointer", "-ftree-vectorize", "-msse2"])
+ else: # optimize for size
+ env.Prepend(CCFLAGS=["-Os", "-ftree-vectorize", "-msse2"])
- if (env["debug_symbols"] == "yes"):
- env.Prepend(CCFLAGS=['-g1'])
- if (env["debug_symbols"] == "full"):
- env.Prepend(CCFLAGS=['-g2'])
+ if env["debug_symbols"] == "yes":
+ env.Prepend(CCFLAGS=["-g1"])
+ if env["debug_symbols"] == "full":
+ env.Prepend(CCFLAGS=["-g2"])
- elif (env["target"] == "release_debug"):
- if (env["optimize"] == "speed"): #optimize for speed (default)
- env.Prepend(CCFLAGS=['-O2'])
- else: #optimize for size
- env.Prepend(CCFLAGS=['-Os'])
- env.Prepend(CPPDEFINES=['DEBUG_ENABLED'])
- if (env["debug_symbols"] == "yes"):
- env.Prepend(CCFLAGS=['-g1'])
- if (env["debug_symbols"] == "full"):
- env.Prepend(CCFLAGS=['-g2'])
+ elif env["target"] == "release_debug":
+ if env["optimize"] == "speed": # optimize for speed (default)
+ env.Prepend(CCFLAGS=["-O2"])
+ else: # optimize for size
+ env.Prepend(CCFLAGS=["-Os"])
+ env.Prepend(CPPDEFINES=["DEBUG_ENABLED"])
+ if env["debug_symbols"] == "yes":
+ env.Prepend(CCFLAGS=["-g1"])
+ if env["debug_symbols"] == "full":
+ env.Prepend(CCFLAGS=["-g2"])
- elif (env["target"] == "debug"):
- env.Prepend(CCFLAGS=['-g3'])
- env.Prepend(CPPDEFINES=['DEBUG_ENABLED', 'DEBUG_MEMORY_ENABLED'])
+ elif env["target"] == "debug":
+ env.Prepend(CCFLAGS=["-g3"])
+ env.Prepend(CPPDEFINES=["DEBUG_ENABLED", "DEBUG_MEMORY_ENABLED"])
## Architecture
@@ -83,86 +86,109 @@ def configure(env):
if "OSXCROSS_ROOT" in os.environ:
env["osxcross"] = True
- if not "osxcross" in env: # regular native build
- env.Append(CCFLAGS=['-arch', 'x86_64'])
- env.Append(LINKFLAGS=['-arch', 'x86_64'])
- if (env["macports_clang"] != 'no'):
+ if not "osxcross" in env: # regular native build
+ env.Append(CCFLAGS=["-arch", "x86_64"])
+ env.Append(LINKFLAGS=["-arch", "x86_64"])
+ if env["macports_clang"] != "no":
mpprefix = os.environ.get("MACPORTS_PREFIX", "/opt/local")
mpclangver = env["macports_clang"]
env["CC"] = mpprefix + "/libexec/llvm-" + mpclangver + "/bin/clang"
env["LINK"] = mpprefix + "/libexec/llvm-" + mpclangver + "/bin/clang++"
env["CXX"] = mpprefix + "/libexec/llvm-" + mpclangver + "/bin/clang++"
- env['AR'] = mpprefix + "/libexec/llvm-" + mpclangver + "/bin/llvm-ar"
- env['RANLIB'] = mpprefix + "/libexec/llvm-" + mpclangver + "/bin/llvm-ranlib"
- env['AS'] = mpprefix + "/libexec/llvm-" + mpclangver + "/bin/llvm-as"
- env.Append(CPPDEFINES=['__MACPORTS__']) #hack to fix libvpx MM256_BROADCASTSI128_SI256 define
+ env["AR"] = mpprefix + "/libexec/llvm-" + mpclangver + "/bin/llvm-ar"
+ env["RANLIB"] = mpprefix + "/libexec/llvm-" + mpclangver + "/bin/llvm-ranlib"
+ env["AS"] = mpprefix + "/libexec/llvm-" + mpclangver + "/bin/llvm-as"
+ env.Append(CPPDEFINES=["__MACPORTS__"]) # hack to fix libvpx MM256_BROADCASTSI128_SI256 define
else:
- env['CC'] = 'clang'
- env['CXX'] = 'clang++'
+ env["CC"] = "clang"
+ env["CXX"] = "clang++"
- detect_darwin_sdk_path('osx', env)
- env.Append(CCFLAGS=['-isysroot', '$MACOS_SDK_PATH'])
- env.Append(LINKFLAGS=['-isysroot', '$MACOS_SDK_PATH'])
+ detect_darwin_sdk_path("osx", env)
+ env.Append(CCFLAGS=["-isysroot", "$MACOS_SDK_PATH"])
+ env.Append(LINKFLAGS=["-isysroot", "$MACOS_SDK_PATH"])
- else: # osxcross build
+ else: # osxcross build
root = os.environ.get("OSXCROSS_ROOT", 0)
basecmd = root + "/target/bin/x86_64-apple-" + env["osxcross_sdk"] + "-"
ccache_path = os.environ.get("CCACHE")
if ccache_path is None:
- env['CC'] = basecmd + "cc"
- env['CXX'] = basecmd + "c++"
+ env["CC"] = basecmd + "cc"
+ env["CXX"] = basecmd + "c++"
else:
# there aren't any ccache wrappers available for OS X cross-compile,
# to enable caching we need to prepend the path to the ccache binary
- env['CC'] = ccache_path + ' ' + basecmd + "cc"
- env['CXX'] = ccache_path + ' ' + basecmd + "c++"
- env['AR'] = basecmd + "ar"
- env['RANLIB'] = basecmd + "ranlib"
- env['AS'] = basecmd + "as"
- env.Append(CPPDEFINES=['__MACPORTS__']) #hack to fix libvpx MM256_BROADCASTSI128_SI256 define
+ env["CC"] = ccache_path + " " + basecmd + "cc"
+ env["CXX"] = ccache_path + " " + basecmd + "c++"
+ env["AR"] = basecmd + "ar"
+ env["RANLIB"] = basecmd + "ranlib"
+ env["AS"] = basecmd + "as"
+ env.Append(CPPDEFINES=["__MACPORTS__"]) # hack to fix libvpx MM256_BROADCASTSI128_SI256 define
- if (env["CXX"] == "clang++"):
- env.Append(CPPDEFINES=['TYPED_METHOD_BIND'])
+ if env["CXX"] == "clang++":
+ env.Append(CPPDEFINES=["TYPED_METHOD_BIND"])
env["CC"] = "clang"
env["LINK"] = "clang++"
- if env['use_ubsan'] or env['use_asan'] or env['use_tsan']:
+ if env["use_ubsan"] or env["use_asan"] or env["use_tsan"]:
env.extra_suffix += "s"
- if env['use_ubsan']:
- env.Append(CCFLAGS=['-fsanitize=undefined'])
- env.Append(LINKFLAGS=['-fsanitize=undefined'])
+ if env["use_ubsan"]:
+ env.Append(CCFLAGS=["-fsanitize=undefined"])
+ env.Append(LINKFLAGS=["-fsanitize=undefined"])
- if env['use_asan']:
- env.Append(CCFLAGS=['-fsanitize=address'])
- env.Append(LINKFLAGS=['-fsanitize=address'])
+ if env["use_asan"]:
+ env.Append(CCFLAGS=["-fsanitize=address"])
+ env.Append(LINKFLAGS=["-fsanitize=address"])
- if env['use_tsan']:
- env.Append(CCFLAGS=['-fsanitize=thread'])
- env.Append(LINKFLAGS=['-fsanitize=thread'])
+ if env["use_tsan"]:
+ env.Append(CCFLAGS=["-fsanitize=thread"])
+ env.Append(LINKFLAGS=["-fsanitize=thread"])
## Dependencies
- if env['builtin_libtheora']:
+ if env["builtin_libtheora"]:
env["x86_libtheora_opt_gcc"] = True
## Flags
- env.Prepend(CPPPATH=['#platform/osx'])
- env.Append(CPPDEFINES=['OSX_ENABLED', 'UNIX_ENABLED', 'APPLE_STYLE_KEYS', 'COREAUDIO_ENABLED', 'COREMIDI_ENABLED'])
- env.Append(LINKFLAGS=['-framework', 'Cocoa', '-framework', 'Carbon', '-framework', 'AudioUnit', '-framework', 'CoreAudio', '-framework', 'CoreMIDI', '-framework', 'IOKit', '-framework', 'ForceFeedback', '-framework', 'CoreVideo', '-framework', 'AVFoundation', '-framework', 'CoreMedia'])
- env.Append(LIBS=['pthread', 'z'])
+ env.Prepend(CPPPATH=["#platform/osx"])
+ env.Append(CPPDEFINES=["OSX_ENABLED", "UNIX_ENABLED", "APPLE_STYLE_KEYS", "COREAUDIO_ENABLED", "COREMIDI_ENABLED"])
+ env.Append(
+ LINKFLAGS=[
+ "-framework",
+ "Cocoa",
+ "-framework",
+ "Carbon",
+ "-framework",
+ "AudioUnit",
+ "-framework",
+ "CoreAudio",
+ "-framework",
+ "CoreMIDI",
+ "-framework",
+ "IOKit",
+ "-framework",
+ "ForceFeedback",
+ "-framework",
+ "CoreVideo",
+ "-framework",
+ "AVFoundation",
+ "-framework",
+ "CoreMedia",
+ ]
+ )
+ env.Append(LIBS=["pthread", "z"])
- env.Append(CPPDEFINES=['VULKAN_ENABLED'])
- env.Append(LINKFLAGS=['-framework', 'Metal', '-framework', 'QuartzCore', '-framework', 'IOSurface'])
- if (env['use_static_mvk']):
- env.Append(LINKFLAGS=['-framework', 'MoltenVK'])
- env['builtin_vulkan'] = False
- elif not env['builtin_vulkan']:
- env.Append(LIBS=['vulkan'])
+ env.Append(CPPDEFINES=["VULKAN_ENABLED"])
+ env.Append(LINKFLAGS=["-framework", "Metal", "-framework", "QuartzCore", "-framework", "IOSurface"])
+ if env["use_static_mvk"]:
+ env.Append(LINKFLAGS=["-framework", "MoltenVK"])
+ env["builtin_vulkan"] = False
+ elif not env["builtin_vulkan"]:
+ env.Append(LIBS=["vulkan"])
- #env.Append(CPPDEFINES=['GLES_ENABLED', 'OPENGL_ENABLED'])
+ # env.Append(CPPDEFINES=['GLES_ENABLED', 'OPENGL_ENABLED'])
- env.Append(CCFLAGS=['-mmacosx-version-min=10.12'])
- env.Append(LINKFLAGS=['-mmacosx-version-min=10.12'])
+ env.Append(CCFLAGS=["-mmacosx-version-min=10.12"])
+ env.Append(LINKFLAGS=["-mmacosx-version-min=10.12"])
diff --git a/platform/osx/platform_osx_builders.py b/platform/osx/platform_osx_builders.py
index 81997f674b2..953ed479db3 100644
--- a/platform/osx/platform_osx_builders.py
+++ b/platform/osx/platform_osx_builders.py
@@ -8,14 +8,14 @@ from platform_methods import subprocess_main
def make_debug_osx(target, source, env):
- if (env["macports_clang"] != 'no'):
+ if env["macports_clang"] != "no":
mpprefix = os.environ.get("MACPORTS_PREFIX", "/opt/local")
mpclangver = env["macports_clang"]
- os.system(mpprefix + '/libexec/llvm-' + mpclangver + '/bin/llvm-dsymutil {0} -o {0}.dSYM'.format(target[0]))
+ os.system(mpprefix + "/libexec/llvm-" + mpclangver + "/bin/llvm-dsymutil {0} -o {0}.dSYM".format(target[0]))
else:
- os.system('dsymutil {0} -o {0}.dSYM'.format(target[0]))
- os.system('strip -u -r {0}'.format(target[0]))
+ os.system("dsymutil {0} -o {0}.dSYM".format(target[0]))
+ os.system("strip -u -r {0}".format(target[0]))
-if __name__ == '__main__':
+if __name__ == "__main__":
subprocess_main(globals())
diff --git a/platform/server/SCsub b/platform/server/SCsub
index 8364164114d..15b9af4d259 100644
--- a/platform/server/SCsub
+++ b/platform/server/SCsub
@@ -2,15 +2,15 @@
import sys
-Import('env')
+Import("env")
-common_server = [\
- "os_server.cpp",\
+common_server = [
+ "os_server.cpp",
]
if sys.platform == "darwin":
- common_server.append("#platform/osx/crash_handler_osx.mm")
+ common_server.append("#platform/osx/crash_handler_osx.mm")
else:
- common_server.append("#platform/x11/crash_handler_x11.cpp")
+ common_server.append("#platform/x11/crash_handler_x11.cpp")
-prog = env.add_program('#bin/godot_server', ['godot_server.cpp'] + common_server)
+prog = env.add_program("#bin/godot_server", ["godot_server.cpp"] + common_server)
diff --git a/platform/server/detect.py b/platform/server/detect.py
index 4ea4bddddd4..a73810cdf4d 100644
--- a/platform/server/detect.py
+++ b/platform/server/detect.py
@@ -5,6 +5,7 @@ import sys
# This file is mostly based on platform/x11/detect.py.
# If editing this file, make sure to apply relevant changes here too.
+
def is_active():
return True
@@ -14,14 +15,14 @@ def get_name():
def get_program_suffix():
- if (sys.platform == "darwin"):
+ if sys.platform == "darwin":
return "osx"
return "linuxbsd"
def can_build():
- if (os.name != "posix"):
+ if os.name != "posix":
return False
return True
@@ -29,17 +30,18 @@ def can_build():
def get_opts():
from SCons.Variables import BoolVariable, EnumVariable
+
return [
- BoolVariable('use_llvm', 'Use the LLVM compiler', False),
- BoolVariable('use_static_cpp', 'Link libgcc and libstdc++ statically for better portability', False),
- BoolVariable('use_coverage', 'Test Godot coverage', False),
- BoolVariable('use_ubsan', 'Use LLVM/GCC compiler undefined behavior sanitizer (UBSAN)', False),
- BoolVariable('use_asan', 'Use LLVM/GCC compiler address sanitizer (ASAN))', False),
- BoolVariable('use_lsan', 'Use LLVM/GCC compiler leak sanitizer (LSAN))', False),
- BoolVariable('use_tsan', 'Use LLVM/GCC compiler thread sanitizer (TSAN))', False),
- EnumVariable('debug_symbols', 'Add debugging symbols to release builds', 'yes', ('yes', 'no', 'full')),
- BoolVariable('separate_debug_symbols', 'Create a separate file containing debugging symbols', False),
- BoolVariable('execinfo', 'Use libexecinfo on systems where glibc is not available', False),
+ BoolVariable("use_llvm", "Use the LLVM compiler", False),
+ BoolVariable("use_static_cpp", "Link libgcc and libstdc++ statically for better portability", False),
+ BoolVariable("use_coverage", "Test Godot coverage", False),
+ BoolVariable("use_ubsan", "Use LLVM/GCC compiler undefined behavior sanitizer (UBSAN)", False),
+ BoolVariable("use_asan", "Use LLVM/GCC compiler address sanitizer (ASAN))", False),
+ BoolVariable("use_lsan", "Use LLVM/GCC compiler leak sanitizer (LSAN))", False),
+ BoolVariable("use_tsan", "Use LLVM/GCC compiler thread sanitizer (TSAN))", False),
+ EnumVariable("debug_symbols", "Add debugging symbols to release builds", "yes", ("yes", "no", "full")),
+ BoolVariable("separate_debug_symbols", "Create a separate file containing debugging symbols", False),
+ BoolVariable("execinfo", "Use libexecinfo on systems where glibc is not available", False),
]
@@ -52,89 +54,89 @@ def configure(env):
## Build type
- if (env["target"] == "release"):
- if (env["optimize"] == "speed"): #optimize for speed (default)
- env.Prepend(CCFLAGS=['-O3'])
- else: #optimize for size
- env.Prepend(CCFLAGS=['-Os'])
+ if env["target"] == "release":
+ if env["optimize"] == "speed": # optimize for speed (default)
+ env.Prepend(CCFLAGS=["-O3"])
+ else: # optimize for size
+ env.Prepend(CCFLAGS=["-Os"])
- if (env["debug_symbols"] == "yes"):
- env.Prepend(CCFLAGS=['-g1'])
- if (env["debug_symbols"] == "full"):
- env.Prepend(CCFLAGS=['-g2'])
+ if env["debug_symbols"] == "yes":
+ env.Prepend(CCFLAGS=["-g1"])
+ if env["debug_symbols"] == "full":
+ env.Prepend(CCFLAGS=["-g2"])
- elif (env["target"] == "release_debug"):
- if (env["optimize"] == "speed"): #optimize for speed (default)
- env.Prepend(CCFLAGS=['-O2'])
- else: #optimize for size
- env.Prepend(CCFLAGS=['-Os'])
- env.Prepend(CPPDEFINES=['DEBUG_ENABLED'])
+ elif env["target"] == "release_debug":
+ if env["optimize"] == "speed": # optimize for speed (default)
+ env.Prepend(CCFLAGS=["-O2"])
+ else: # optimize for size
+ env.Prepend(CCFLAGS=["-Os"])
+ env.Prepend(CPPDEFINES=["DEBUG_ENABLED"])
- if (env["debug_symbols"] == "yes"):
- env.Prepend(CCFLAGS=['-g1'])
- if (env["debug_symbols"] == "full"):
- env.Prepend(CCFLAGS=['-g2'])
+ if env["debug_symbols"] == "yes":
+ env.Prepend(CCFLAGS=["-g1"])
+ if env["debug_symbols"] == "full":
+ env.Prepend(CCFLAGS=["-g2"])
- elif (env["target"] == "debug"):
- env.Prepend(CCFLAGS=['-g3'])
- env.Prepend(CPPDEFINES=['DEBUG_ENABLED', 'DEBUG_MEMORY_ENABLED'])
- env.Append(LINKFLAGS=['-rdynamic'])
+ elif env["target"] == "debug":
+ env.Prepend(CCFLAGS=["-g3"])
+ env.Prepend(CPPDEFINES=["DEBUG_ENABLED", "DEBUG_MEMORY_ENABLED"])
+ env.Append(LINKFLAGS=["-rdynamic"])
## Architecture
- is64 = sys.maxsize > 2**32
- if (env["bits"] == "default"):
+ is64 = sys.maxsize > 2 ** 32
+ if env["bits"] == "default":
env["bits"] = "64" if is64 else "32"
## Compiler configuration
- if 'CXX' in env and 'clang' in os.path.basename(env['CXX']):
+ if "CXX" in env and "clang" in os.path.basename(env["CXX"]):
# Convenience check to enforce the use_llvm overrides when CXX is clang(++)
- env['use_llvm'] = True
+ env["use_llvm"] = True
- if env['use_llvm']:
- if ('clang++' not in os.path.basename(env['CXX'])):
+ if env["use_llvm"]:
+ if "clang++" not in os.path.basename(env["CXX"]):
env["CC"] = "clang"
env["CXX"] = "clang++"
env["LINK"] = "clang++"
- env.Append(CPPDEFINES=['TYPED_METHOD_BIND'])
+ env.Append(CPPDEFINES=["TYPED_METHOD_BIND"])
env.extra_suffix = ".llvm" + env.extra_suffix
- if env['use_coverage']:
- env.Append(CCFLAGS=['-ftest-coverage', '-fprofile-arcs'])
- env.Append(LINKFLAGS=['-ftest-coverage', '-fprofile-arcs'])
+ if env["use_coverage"]:
+ env.Append(CCFLAGS=["-ftest-coverage", "-fprofile-arcs"])
+ env.Append(LINKFLAGS=["-ftest-coverage", "-fprofile-arcs"])
- if env['use_ubsan'] or env['use_asan'] or env['use_lsan'] or env['use_tsan']:
+ if env["use_ubsan"] or env["use_asan"] or env["use_lsan"] or env["use_tsan"]:
env.extra_suffix += "s"
- if env['use_ubsan']:
- env.Append(CCFLAGS=['-fsanitize=undefined'])
- env.Append(LINKFLAGS=['-fsanitize=undefined'])
+ if env["use_ubsan"]:
+ env.Append(CCFLAGS=["-fsanitize=undefined"])
+ env.Append(LINKFLAGS=["-fsanitize=undefined"])
- if env['use_asan']:
- env.Append(CCFLAGS=['-fsanitize=address'])
- env.Append(LINKFLAGS=['-fsanitize=address'])
+ if env["use_asan"]:
+ env.Append(CCFLAGS=["-fsanitize=address"])
+ env.Append(LINKFLAGS=["-fsanitize=address"])
- if env['use_lsan']:
- env.Append(CCFLAGS=['-fsanitize=leak'])
- env.Append(LINKFLAGS=['-fsanitize=leak'])
+ if env["use_lsan"]:
+ env.Append(CCFLAGS=["-fsanitize=leak"])
+ env.Append(LINKFLAGS=["-fsanitize=leak"])
- if env['use_tsan']:
- env.Append(CCFLAGS=['-fsanitize=thread'])
- env.Append(LINKFLAGS=['-fsanitize=thread'])
+ if env["use_tsan"]:
+ env.Append(CCFLAGS=["-fsanitize=thread"])
+ env.Append(LINKFLAGS=["-fsanitize=thread"])
- if env['use_lto']:
- env.Append(CCFLAGS=['-flto'])
- if not env['use_llvm'] and env.GetOption("num_jobs") > 1:
- env.Append(LINKFLAGS=['-flto=' + str(env.GetOption("num_jobs"))])
+ if env["use_lto"]:
+ env.Append(CCFLAGS=["-flto"])
+ if not env["use_llvm"] and env.GetOption("num_jobs") > 1:
+ env.Append(LINKFLAGS=["-flto=" + str(env.GetOption("num_jobs"))])
else:
- env.Append(LINKFLAGS=['-flto'])
- if not env['use_llvm']:
- env['RANLIB'] = 'gcc-ranlib'
- env['AR'] = 'gcc-ar'
+ env.Append(LINKFLAGS=["-flto"])
+ if not env["use_llvm"]:
+ env["RANLIB"] = "gcc-ranlib"
+ env["AR"] = "gcc-ar"
- env.Append(CCFLAGS=['-pipe'])
- env.Append(LINKFLAGS=['-pipe'])
+ env.Append(CCFLAGS=["-pipe"])
+ env.Append(LINKFLAGS=["-pipe"])
## Dependencies
@@ -142,109 +144,114 @@ def configure(env):
# freetype depends on libpng and zlib, so bundling one of them while keeping others
# as shared libraries leads to weird issues
- if env['builtin_freetype'] or env['builtin_libpng'] or env['builtin_zlib']:
- env['builtin_freetype'] = True
- env['builtin_libpng'] = True
- env['builtin_zlib'] = True
+ if env["builtin_freetype"] or env["builtin_libpng"] or env["builtin_zlib"]:
+ env["builtin_freetype"] = True
+ env["builtin_libpng"] = True
+ env["builtin_zlib"] = True
- if not env['builtin_freetype']:
- env.ParseConfig('pkg-config freetype2 --cflags --libs')
+ if not env["builtin_freetype"]:
+ env.ParseConfig("pkg-config freetype2 --cflags --libs")
- if not env['builtin_libpng']:
- env.ParseConfig('pkg-config libpng16 --cflags --libs')
+ if not env["builtin_libpng"]:
+ env.ParseConfig("pkg-config libpng16 --cflags --libs")
- if not env['builtin_bullet']:
+ if not env["builtin_bullet"]:
# We need at least version 2.89
import subprocess
- bullet_version = subprocess.check_output(['pkg-config', 'bullet', '--modversion']).strip()
+
+ bullet_version = subprocess.check_output(["pkg-config", "bullet", "--modversion"]).strip()
if str(bullet_version) < "2.89":
# Abort as system bullet was requested but too old
- print("Bullet: System version {0} does not match minimal requirements ({1}). Aborting.".format(bullet_version, "2.89"))
+ print(
+ "Bullet: System version {0} does not match minimal requirements ({1}). Aborting.".format(
+ bullet_version, "2.89"
+ )
+ )
sys.exit(255)
- env.ParseConfig('pkg-config bullet --cflags --libs')
+ env.ParseConfig("pkg-config bullet --cflags --libs")
if False: # not env['builtin_assimp']:
# FIXME: Add min version check
- env.ParseConfig('pkg-config assimp --cflags --libs')
+ env.ParseConfig("pkg-config assimp --cflags --libs")
- if not env['builtin_enet']:
- env.ParseConfig('pkg-config libenet --cflags --libs')
+ if not env["builtin_enet"]:
+ env.ParseConfig("pkg-config libenet --cflags --libs")
- if not env['builtin_squish']:
- env.ParseConfig('pkg-config libsquish --cflags --libs')
+ if not env["builtin_squish"]:
+ env.ParseConfig("pkg-config libsquish --cflags --libs")
- if not env['builtin_zstd']:
- env.ParseConfig('pkg-config libzstd --cflags --libs')
+ if not env["builtin_zstd"]:
+ env.ParseConfig("pkg-config libzstd --cflags --libs")
# Sound and video libraries
# Keep the order as it triggers chained dependencies (ogg needed by others, etc.)
- if not env['builtin_libtheora']:
- env['builtin_libogg'] = False # Needed to link against system libtheora
- env['builtin_libvorbis'] = False # Needed to link against system libtheora
- env.ParseConfig('pkg-config theora theoradec --cflags --libs')
+ if not env["builtin_libtheora"]:
+ env["builtin_libogg"] = False # Needed to link against system libtheora
+ env["builtin_libvorbis"] = False # Needed to link against system libtheora
+ env.ParseConfig("pkg-config theora theoradec --cflags --libs")
else:
- list_of_x86 = ['x86_64', 'x86', 'i386', 'i586']
+ list_of_x86 = ["x86_64", "x86", "i386", "i586"]
if any(platform.machine() in s for s in list_of_x86):
env["x86_libtheora_opt_gcc"] = True
- if not env['builtin_libvpx']:
- env.ParseConfig('pkg-config vpx --cflags --libs')
+ if not env["builtin_libvpx"]:
+ env.ParseConfig("pkg-config vpx --cflags --libs")
- if not env['builtin_libvorbis']:
- env['builtin_libogg'] = False # Needed to link against system libvorbis
- env.ParseConfig('pkg-config vorbis vorbisfile --cflags --libs')
+ if not env["builtin_libvorbis"]:
+ env["builtin_libogg"] = False # Needed to link against system libvorbis
+ env.ParseConfig("pkg-config vorbis vorbisfile --cflags --libs")
- if not env['builtin_opus']:
- env['builtin_libogg'] = False # Needed to link against system opus
- env.ParseConfig('pkg-config opus opusfile --cflags --libs')
+ if not env["builtin_opus"]:
+ env["builtin_libogg"] = False # Needed to link against system opus
+ env.ParseConfig("pkg-config opus opusfile --cflags --libs")
- if not env['builtin_libogg']:
- env.ParseConfig('pkg-config ogg --cflags --libs')
+ if not env["builtin_libogg"]:
+ env.ParseConfig("pkg-config ogg --cflags --libs")
- if not env['builtin_libwebp']:
- env.ParseConfig('pkg-config libwebp --cflags --libs')
+ if not env["builtin_libwebp"]:
+ env.ParseConfig("pkg-config libwebp --cflags --libs")
- if not env['builtin_mbedtls']:
+ if not env["builtin_mbedtls"]:
# mbedTLS does not provide a pkgconfig config yet. See https://github.com/ARMmbed/mbedtls/issues/228
- env.Append(LIBS=['mbedtls', 'mbedcrypto', 'mbedx509'])
+ env.Append(LIBS=["mbedtls", "mbedcrypto", "mbedx509"])
- if not env['builtin_wslay']:
- env.ParseConfig('pkg-config libwslay --cflags --libs')
+ if not env["builtin_wslay"]:
+ env.ParseConfig("pkg-config libwslay --cflags --libs")
- if not env['builtin_miniupnpc']:
+ if not env["builtin_miniupnpc"]:
# No pkgconfig file so far, hardcode default paths.
env.Prepend(CPPPATH=["/usr/include/miniupnpc"])
env.Append(LIBS=["miniupnpc"])
# On Linux wchar_t should be 32-bits
# 16-bit library shouldn't be required due to compiler optimisations
- if not env['builtin_pcre2']:
- env.ParseConfig('pkg-config libpcre2-32 --cflags --libs')
+ if not env["builtin_pcre2"]:
+ env.ParseConfig("pkg-config libpcre2-32 --cflags --libs")
## Flags
# Linkflags below this line should typically stay the last ones
- if not env['builtin_zlib']:
- env.ParseConfig('pkg-config zlib --cflags --libs')
+ if not env["builtin_zlib"]:
+ env.ParseConfig("pkg-config zlib --cflags --libs")
- env.Prepend(CPPPATH=['#platform/server'])
- env.Append(CPPDEFINES=['SERVER_ENABLED', 'UNIX_ENABLED'])
+ env.Prepend(CPPPATH=["#platform/server"])
+ env.Append(CPPDEFINES=["SERVER_ENABLED", "UNIX_ENABLED"])
- if (platform.system() == "Darwin"):
- env.Append(LINKFLAGS=['-framework', 'Cocoa', '-framework', 'Carbon', '-lz', '-framework', 'IOKit'])
+ if platform.system() == "Darwin":
+ env.Append(LINKFLAGS=["-framework", "Cocoa", "-framework", "Carbon", "-lz", "-framework", "IOKit"])
- env.Append(LIBS=['pthread'])
+ env.Append(LIBS=["pthread"])
- if (platform.system() == "Linux"):
- env.Append(LIBS=['dl'])
+ if platform.system() == "Linux":
+ env.Append(LIBS=["dl"])
- if (platform.system().find("BSD") >= 0):
+ if platform.system().find("BSD") >= 0:
env["execinfo"] = True
if env["execinfo"]:
- env.Append(LIBS=['execinfo'])
+ env.Append(LIBS=["execinfo"])
# Link those statically for portability
- if env['use_static_cpp']:
- env.Append(LINKFLAGS=['-static-libgcc', '-static-libstdc++'])
+ if env["use_static_cpp"]:
+ env.Append(LINKFLAGS=["-static-libgcc", "-static-libstdc++"])
diff --git a/platform/uwp/SCsub b/platform/uwp/SCsub
index 620d8c3c3a2..4358b0eeadb 100644
--- a/platform/uwp/SCsub
+++ b/platform/uwp/SCsub
@@ -1,21 +1,21 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
files = [
- 'thread_uwp.cpp',
- '#platform/windows/key_mapping_windows.cpp',
- '#platform/windows/windows_terminal_logger.cpp',
- 'joypad_uwp.cpp',
- 'context_egl_uwp.cpp',
- 'app.cpp',
- 'os_uwp.cpp',
+ "thread_uwp.cpp",
+ "#platform/windows/key_mapping_windows.cpp",
+ "#platform/windows/windows_terminal_logger.cpp",
+ "joypad_uwp.cpp",
+ "context_egl_uwp.cpp",
+ "app.cpp",
+ "os_uwp.cpp",
]
if "build_angle" in env and env["build_angle"]:
- cmd = env.AlwaysBuild(env.ANGLE('libANGLE.lib', None))
+ cmd = env.AlwaysBuild(env.ANGLE("libANGLE.lib", None))
-prog = env.add_program('#bin/godot', files)
+prog = env.add_program("#bin/godot", files)
if "build_angle" in env and env["build_angle"]:
env.Depends(prog, [cmd])
diff --git a/platform/uwp/detect.py b/platform/uwp/detect.py
index 000bd18e7da..669bfe6814c 100644
--- a/platform/uwp/detect.py
+++ b/platform/uwp/detect.py
@@ -12,11 +12,11 @@ def get_name():
def can_build():
- if (os.name == "nt"):
+ if os.name == "nt":
# building natively on windows!
- if (os.getenv("VSINSTALLDIR")):
+ if os.getenv("VSINSTALLDIR"):
- if (os.getenv("ANGLE_SRC_PATH") is None):
+ if os.getenv("ANGLE_SRC_PATH") is None:
return False
return True
@@ -25,16 +25,16 @@ def can_build():
def get_opts():
return [
- ('msvc_version', 'MSVC version to use (ignored if the VCINSTALLDIR environment variable is set)', None),
+ ("msvc_version", "MSVC version to use (ignored if the VCINSTALLDIR environment variable is set)", None),
]
def get_flags():
return [
- ('tools', False),
- ('xaudio2', True),
- ('builtin_pcre2_with_jit', False),
+ ("tools", False),
+ ("xaudio2", True),
+ ("builtin_pcre2_with_jit", False),
]
@@ -42,45 +42,53 @@ def configure(env):
env.msvc = True
- if (env["bits"] != "default"):
+ if env["bits"] != "default":
print("Error: bits argument is disabled for MSVC")
- print("""
+ print(
+ """
Bits argument is not supported for MSVC compilation. Architecture depends on the Native/Cross Compile Tools Prompt/Developer Console
(or Visual Studio settings) that is being used to run SCons. As a consequence, bits argument is disabled. Run scons again without bits
argument (example: scons p=uwp) and SCons will attempt to detect what MSVC compiler will be executed and inform you.
- """)
+ """
+ )
sys.exit()
## Build type
- if (env["target"] == "release"):
- env.Append(CCFLAGS=['/O2', '/GL'])
- env.Append(CCFLAGS=['/MD'])
- env.Append(LINKFLAGS=['/SUBSYSTEM:WINDOWS', '/LTCG'])
+ if env["target"] == "release":
+ env.Append(CCFLAGS=["/O2", "/GL"])
+ env.Append(CCFLAGS=["/MD"])
+ env.Append(LINKFLAGS=["/SUBSYSTEM:WINDOWS", "/LTCG"])
- elif (env["target"] == "release_debug"):
- env.Append(CCFLAGS=['/O2', '/Zi'])
- env.Append(CCFLAGS=['/MD'])
- env.Append(CPPDEFINES=['DEBUG_ENABLED'])
- env.Append(LINKFLAGS=['/SUBSYSTEM:CONSOLE'])
+ elif env["target"] == "release_debug":
+ env.Append(CCFLAGS=["/O2", "/Zi"])
+ env.Append(CCFLAGS=["/MD"])
+ env.Append(CPPDEFINES=["DEBUG_ENABLED"])
+ env.Append(LINKFLAGS=["/SUBSYSTEM:CONSOLE"])
- elif (env["target"] == "debug"):
- env.Append(CCFLAGS=['/Zi'])
- env.Append(CCFLAGS=['/MDd'])
- env.Append(CPPDEFINES=['DEBUG_ENABLED', 'DEBUG_MEMORY_ENABLED'])
- env.Append(LINKFLAGS=['/SUBSYSTEM:CONSOLE'])
- env.Append(LINKFLAGS=['/DEBUG'])
+ elif env["target"] == "debug":
+ env.Append(CCFLAGS=["/Zi"])
+ env.Append(CCFLAGS=["/MDd"])
+ env.Append(CPPDEFINES=["DEBUG_ENABLED", "DEBUG_MEMORY_ENABLED"])
+ env.Append(LINKFLAGS=["/SUBSYSTEM:CONSOLE"])
+ env.Append(LINKFLAGS=["/DEBUG"])
## Compiler configuration
- env['ENV'] = os.environ
- vc_base_path = os.environ['VCTOOLSINSTALLDIR'] if "VCTOOLSINSTALLDIR" in os.environ else os.environ['VCINSTALLDIR']
+ env["ENV"] = os.environ
+ vc_base_path = os.environ["VCTOOLSINSTALLDIR"] if "VCTOOLSINSTALLDIR" in os.environ else os.environ["VCINSTALLDIR"]
# ANGLE
angle_root = os.getenv("ANGLE_SRC_PATH")
- env.Prepend(CPPPATH=[angle_root + '/include'])
+ env.Prepend(CPPPATH=[angle_root + "/include"])
jobs = str(env.GetOption("num_jobs"))
- angle_build_cmd = "msbuild.exe " + angle_root + "/winrt/10/src/angle.sln /nologo /v:m /m:" + jobs + " /p:Configuration=Release /p:Platform="
+ angle_build_cmd = (
+ "msbuild.exe "
+ + angle_root
+ + "/winrt/10/src/angle.sln /nologo /v:m /m:"
+ + jobs
+ + " /p:Configuration=Release /p:Platform="
+ )
if os.path.isfile(str(os.getenv("ANGLE_SRC_PATH")) + "/winrt/10/src/angle.sln"):
env["build_angle"] = True
@@ -88,49 +96,51 @@ def configure(env):
## Architecture
arch = ""
- if str(os.getenv('Platform')).lower() == "arm":
+ if str(os.getenv("Platform")).lower() == "arm":
print("Compiled program architecture will be an ARM executable. (forcing bits=32).")
arch = "arm"
env["bits"] = "32"
- env.Append(LINKFLAGS=['/MACHINE:ARM'])
- env.Append(LIBPATH=[vc_base_path + 'lib/store/arm'])
+ env.Append(LINKFLAGS=["/MACHINE:ARM"])
+ env.Append(LIBPATH=[vc_base_path + "lib/store/arm"])
angle_build_cmd += "ARM"
- env.Append(LIBPATH=[angle_root + '/winrt/10/src/Release_ARM/lib'])
+ env.Append(LIBPATH=[angle_root + "/winrt/10/src/Release_ARM/lib"])
else:
- compiler_version_str = methods.detect_visual_c_compiler_version(env['ENV'])
+ compiler_version_str = methods.detect_visual_c_compiler_version(env["ENV"])
- if(compiler_version_str == "amd64" or compiler_version_str == "x86_amd64"):
+ if compiler_version_str == "amd64" or compiler_version_str == "x86_amd64":
env["bits"] = "64"
print("Compiled program architecture will be a x64 executable (forcing bits=64).")
- elif (compiler_version_str == "x86" or compiler_version_str == "amd64_x86"):
+ elif compiler_version_str == "x86" or compiler_version_str == "amd64_x86":
env["bits"] = "32"
print("Compiled program architecture will be a x86 executable. (forcing bits=32).")
else:
- print("Failed to detect MSVC compiler architecture version... Defaulting to 32-bit executable settings (forcing bits=32). Compilation attempt will continue, but SCons can not detect for what architecture this build is compiled for. You should check your settings/compilation setup.")
+ print(
+ "Failed to detect MSVC compiler architecture version... Defaulting to 32-bit executable settings (forcing bits=32). Compilation attempt will continue, but SCons can not detect for what architecture this build is compiled for. You should check your settings/compilation setup."
+ )
env["bits"] = "32"
- if (env["bits"] == "32"):
+ if env["bits"] == "32":
arch = "x86"
angle_build_cmd += "Win32"
- env.Append(LINKFLAGS=['/MACHINE:X86'])
- env.Append(LIBPATH=[vc_base_path + 'lib/store'])
- env.Append(LIBPATH=[angle_root + '/winrt/10/src/Release_Win32/lib'])
+ env.Append(LINKFLAGS=["/MACHINE:X86"])
+ env.Append(LIBPATH=[vc_base_path + "lib/store"])
+ env.Append(LIBPATH=[angle_root + "/winrt/10/src/Release_Win32/lib"])
else:
arch = "x64"
angle_build_cmd += "x64"
- env.Append(LINKFLAGS=['/MACHINE:X64'])
- env.Append(LIBPATH=[os.environ['VCINSTALLDIR'] + 'lib/store/amd64'])
- env.Append(LIBPATH=[angle_root + '/winrt/10/src/Release_x64/lib'])
+ env.Append(LINKFLAGS=["/MACHINE:X64"])
+ env.Append(LIBPATH=[os.environ["VCINSTALLDIR"] + "lib/store/amd64"])
+ env.Append(LIBPATH=[angle_root + "/winrt/10/src/Release_x64/lib"])
env["PROGSUFFIX"] = "." + arch + env["PROGSUFFIX"]
env["OBJSUFFIX"] = "." + arch + env["OBJSUFFIX"]
@@ -138,39 +148,61 @@ def configure(env):
## Compile flags
- env.Prepend(CPPPATH=['#platform/uwp', '#drivers/windows'])
- env.Append(CPPDEFINES=['UWP_ENABLED', 'WINDOWS_ENABLED', 'TYPED_METHOD_BIND'])
- env.Append(CPPDEFINES=['GLES_ENABLED', 'GL_GLEXT_PROTOTYPES', 'EGL_EGLEXT_PROTOTYPES', 'ANGLE_ENABLED'])
- winver = "0x0602" # Windows 8 is the minimum target for UWP build
- env.Append(CPPDEFINES=[('WINVER', winver), ('_WIN32_WINNT', winver), 'WIN32'])
+ env.Prepend(CPPPATH=["#platform/uwp", "#drivers/windows"])
+ env.Append(CPPDEFINES=["UWP_ENABLED", "WINDOWS_ENABLED", "TYPED_METHOD_BIND"])
+ env.Append(CPPDEFINES=["GLES_ENABLED", "GL_GLEXT_PROTOTYPES", "EGL_EGLEXT_PROTOTYPES", "ANGLE_ENABLED"])
+ winver = "0x0602" # Windows 8 is the minimum target for UWP build
+ env.Append(CPPDEFINES=[("WINVER", winver), ("_WIN32_WINNT", winver), "WIN32"])
- env.Append(CPPDEFINES=['__WRL_NO_DEFAULT_LIB__', ('PNG_ABORT', 'abort')])
+ env.Append(CPPDEFINES=["__WRL_NO_DEFAULT_LIB__", ("PNG_ABORT", "abort")])
- env.Append(CPPFLAGS=['/AI', vc_base_path + 'lib/store/references'])
- env.Append(CPPFLAGS=['/AI', vc_base_path + 'lib/x86/store/references'])
+ env.Append(CPPFLAGS=["/AI", vc_base_path + "lib/store/references"])
+ env.Append(CPPFLAGS=["/AI", vc_base_path + "lib/x86/store/references"])
- env.Append(CCFLAGS='/FS /MP /GS /wd"4453" /wd"28204" /wd"4291" /Zc:wchar_t /Gm- /fp:precise /errorReport:prompt /WX- /Zc:forScope /Gd /EHsc /nologo'.split())
- env.Append(CPPDEFINES=['_UNICODE', 'UNICODE', ('WINAPI_FAMILY', 'WINAPI_FAMILY_APP')])
- env.Append(CXXFLAGS=['/ZW'])
- env.Append(CCFLAGS=['/AI', vc_base_path + '\\vcpackages', '/AI', os.environ['WINDOWSSDKDIR'] + '\\References\\CommonConfiguration\\Neutral'])
+ env.Append(
+ CCFLAGS='/FS /MP /GS /wd"4453" /wd"28204" /wd"4291" /Zc:wchar_t /Gm- /fp:precise /errorReport:prompt /WX- /Zc:forScope /Gd /EHsc /nologo'.split()
+ )
+ env.Append(CPPDEFINES=["_UNICODE", "UNICODE", ("WINAPI_FAMILY", "WINAPI_FAMILY_APP")])
+ env.Append(CXXFLAGS=["/ZW"])
+ env.Append(
+ CCFLAGS=[
+ "/AI",
+ vc_base_path + "\\vcpackages",
+ "/AI",
+ os.environ["WINDOWSSDKDIR"] + "\\References\\CommonConfiguration\\Neutral",
+ ]
+ )
## Link flags
- env.Append(LINKFLAGS=['/MANIFEST:NO', '/NXCOMPAT', '/DYNAMICBASE', '/WINMD', '/APPCONTAINER', '/ERRORREPORT:PROMPT', '/NOLOGO', '/TLBID:1', '/NODEFAULTLIB:"kernel32.lib"', '/NODEFAULTLIB:"ole32.lib"'])
+ env.Append(
+ LINKFLAGS=[
+ "/MANIFEST:NO",
+ "/NXCOMPAT",
+ "/DYNAMICBASE",
+ "/WINMD",
+ "/APPCONTAINER",
+ "/ERRORREPORT:PROMPT",
+ "/NOLOGO",
+ "/TLBID:1",
+ '/NODEFAULTLIB:"kernel32.lib"',
+ '/NODEFAULTLIB:"ole32.lib"',
+ ]
+ )
LIBS = [
- 'WindowsApp',
- 'mincore',
- 'ws2_32',
- 'libANGLE',
- 'libEGL',
- 'libGLESv2',
- 'bcrypt',
+ "WindowsApp",
+ "mincore",
+ "ws2_32",
+ "libANGLE",
+ "libEGL",
+ "libGLESv2",
+ "bcrypt",
]
env.Append(LINKFLAGS=[p + ".lib" for p in LIBS])
# Incremental linking fix
- env['BUILDERS']['ProgramOriginal'] = env['BUILDERS']['Program']
- env['BUILDERS']['Program'] = methods.precious_program
+ env["BUILDERS"]["ProgramOriginal"] = env["BUILDERS"]["Program"]
+ env["BUILDERS"]["Program"] = methods.precious_program
- env.Append(BUILDERS={'ANGLE': env.Builder(action=angle_build_cmd)})
+ env.Append(BUILDERS={"ANGLE": env.Builder(action=angle_build_cmd)})
diff --git a/platform/windows/SCsub b/platform/windows/SCsub
index 89ee4dfa7a7..daffe59f345 100644
--- a/platform/windows/SCsub
+++ b/platform/windows/SCsub
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
import os
from platform_methods import run_in_subprocess
@@ -15,17 +15,17 @@ common_win = [
"joypad_windows.cpp",
"windows_terminal_logger.cpp",
"vulkan_context_win.cpp",
- "context_gl_windows.cpp"
+ "context_gl_windows.cpp",
]
-res_file = 'godot_res.rc'
+res_file = "godot_res.rc"
res_target = "godot_res" + env["OBJSUFFIX"]
res_obj = env.RES(res_target, res_file)
-prog = env.add_program('#bin/godot', common_win + res_obj, PROGSUFFIX=env["PROGSUFFIX"])
+prog = env.add_program("#bin/godot", common_win + res_obj, PROGSUFFIX=env["PROGSUFFIX"])
# Microsoft Visual Studio Project Generation
-if env['vsproj']:
+if env["vsproj"]:
env.vs_srcs = env.vs_srcs + ["platform/windows/" + res_file]
env.vs_srcs = env.vs_srcs + ["platform/windows/godot.natvis"]
for x in common_win:
diff --git a/platform/windows/detect.py b/platform/windows/detect.py
index cc859c0339f..9f79e92dcb8 100644
--- a/platform/windows/detect.py
+++ b/platform/windows/detect.py
@@ -14,10 +14,10 @@ def get_name():
def can_build():
- if (os.name == "nt"):
+ if os.name == "nt":
# Building natively on Windows
# If VCINSTALLDIR is set in the OS environ, use traditional Godot logic to set up MSVC
- if (os.getenv("VCINSTALLDIR")): # MSVC, manual setup
+ if os.getenv("VCINSTALLDIR"): # MSVC, manual setup
return True
# Otherwise, let SCons find MSVC if installed, or else Mingw.
@@ -26,18 +26,18 @@ def can_build():
# null compiler.
return True
- if (os.name == "posix"):
+ if os.name == "posix":
# Cross-compiling with MinGW-w64 (old MinGW32 is not supported)
mingw32 = "i686-w64-mingw32-"
mingw64 = "x86_64-w64-mingw32-"
- if (os.getenv("MINGW32_PREFIX")):
+ if os.getenv("MINGW32_PREFIX"):
mingw32 = os.getenv("MINGW32_PREFIX")
- if (os.getenv("MINGW64_PREFIX")):
+ if os.getenv("MINGW64_PREFIX"):
mingw64 = os.getenv("MINGW64_PREFIX")
test = "gcc --version > /dev/null 2>&1"
- if (os.system(mingw64 + test) == 0 or os.system(mingw32 + test) == 0):
+ if os.system(mingw64 + test) == 0 or os.system(mingw32 + test) == 0:
return True
return False
@@ -48,47 +48,47 @@ def get_opts():
mingw32 = ""
mingw64 = ""
- if (os.name == "posix"):
+ if os.name == "posix":
mingw32 = "i686-w64-mingw32-"
mingw64 = "x86_64-w64-mingw32-"
- if (os.getenv("MINGW32_PREFIX")):
+ if os.getenv("MINGW32_PREFIX"):
mingw32 = os.getenv("MINGW32_PREFIX")
- if (os.getenv("MINGW64_PREFIX")):
+ if os.getenv("MINGW64_PREFIX"):
mingw64 = os.getenv("MINGW64_PREFIX")
return [
- ('mingw_prefix_32', 'MinGW prefix (Win32)', mingw32),
- ('mingw_prefix_64', 'MinGW prefix (Win64)', mingw64),
+ ("mingw_prefix_32", "MinGW prefix (Win32)", mingw32),
+ ("mingw_prefix_64", "MinGW prefix (Win64)", mingw64),
# Targeted Windows version: 7 (and later), minimum supported version
# XP support dropped after EOL due to missing API for IPv6 and other issues
# Vista support dropped after EOL due to GH-10243
- ('target_win_version', 'Targeted Windows version, >= 0x0601 (Windows 7)', '0x0601'),
- EnumVariable('debug_symbols', 'Add debugging symbols to release builds', 'yes', ('yes', 'no', 'full')),
- BoolVariable('separate_debug_symbols', 'Create a separate file containing debugging symbols', False),
- ('msvc_version', 'MSVC version to use. Ignored if VCINSTALLDIR is set in shell env.', None),
- BoolVariable('use_mingw', 'Use the Mingw compiler, even if MSVC is installed. Only used on Windows.', False),
- BoolVariable('use_llvm', 'Use the LLVM compiler', False),
- BoolVariable('use_thinlto', 'Use ThinLTO', False),
+ ("target_win_version", "Targeted Windows version, >= 0x0601 (Windows 7)", "0x0601"),
+ EnumVariable("debug_symbols", "Add debugging symbols to release builds", "yes", ("yes", "no", "full")),
+ BoolVariable("separate_debug_symbols", "Create a separate file containing debugging symbols", False),
+ ("msvc_version", "MSVC version to use. Ignored if VCINSTALLDIR is set in shell env.", None),
+ BoolVariable("use_mingw", "Use the Mingw compiler, even if MSVC is installed. Only used on Windows.", False),
+ BoolVariable("use_llvm", "Use the LLVM compiler", False),
+ BoolVariable("use_thinlto", "Use ThinLTO", False),
]
def get_flags():
- return [
- ]
+ return []
def build_res_file(target, source, env):
- if (env["bits"] == "32"):
- cmdbase = env['mingw_prefix_32']
+ if env["bits"] == "32":
+ cmdbase = env["mingw_prefix_32"]
else:
- cmdbase = env['mingw_prefix_64']
- cmdbase = cmdbase + 'windres --include-dir . '
+ cmdbase = env["mingw_prefix_64"]
+ cmdbase = cmdbase + "windres --include-dir . "
import subprocess
+
for x in range(len(source)):
- cmd = cmdbase + '-i ' + str(source[x]) + ' -o ' + str(target[x])
+ cmd = cmdbase + "-i " + str(source[x]) + " -o " + str(target[x])
try:
out = subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE).communicate()
if len(out[1]):
@@ -100,12 +100,14 @@ def build_res_file(target, source, env):
def setup_msvc_manual(env):
"""Set up env to use MSVC manually, using VCINSTALLDIR"""
- if (env["bits"] != "default"):
- print("""
+ if env["bits"] != "default":
+ print(
+ """
Bits argument is not supported for MSVC compilation. Architecture depends on the Native/Cross Compile Tools Prompt/Developer Console
(or Visual Studio settings) that is being used to run SCons. As a consequence, bits argument is disabled. Run scons again without bits
argument (example: scons p=windows) and SCons will attempt to detect what MSVC compiler will be executed and inform you.
- """)
+ """
+ )
raise SCons.Errors.UserError("Bits argument should not be used when using VCINSTALLDIR")
# Force bits arg
@@ -114,18 +116,21 @@ def setup_msvc_manual(env):
env["x86_libtheora_opt_vc"] = True
# find compiler manually
- compiler_version_str = methods.detect_visual_c_compiler_version(env['ENV'])
+ compiler_version_str = methods.detect_visual_c_compiler_version(env["ENV"])
print("Found MSVC compiler: " + compiler_version_str)
# If building for 64bit architecture, disable assembly optimisations for 32 bit builds (theora as of writing)... vc compiler for 64bit can not compile _asm
- if(compiler_version_str == "amd64" or compiler_version_str == "x86_amd64"):
+ if compiler_version_str == "amd64" or compiler_version_str == "x86_amd64":
env["bits"] = "64"
env["x86_libtheora_opt_vc"] = False
print("Compiled program architecture will be a 64 bit executable (forcing bits=64).")
- elif (compiler_version_str == "x86" or compiler_version_str == "amd64_x86"):
+ elif compiler_version_str == "x86" or compiler_version_str == "amd64_x86":
print("Compiled program architecture will be a 32 bit executable. (forcing bits=32).")
else:
- print("Failed to manually detect MSVC compiler architecture version... Defaulting to 32bit executable settings (forcing bits=32). Compilation attempt will continue, but SCons can not detect for what architecture this build is compiled for. You should check your settings/compilation setup, or avoid setting VCINSTALLDIR.")
+ print(
+ "Failed to manually detect MSVC compiler architecture version... Defaulting to 32bit executable settings (forcing bits=32). Compilation attempt will continue, but SCons can not detect for what architecture this build is compiled for. You should check your settings/compilation setup, or avoid setting VCINSTALLDIR."
+ )
+
def setup_msvc_auto(env):
"""Set up MSVC using SCons's auto-detection logic"""
@@ -138,103 +143,127 @@ def setup_msvc_auto(env):
# (Ideally we'd decide on the tool config before configuring any
# environment, and just set the env up once, but this function runs
# on an existing env so this is the simplest way.)
- env['MSVC_SETUP_RUN'] = False # Need to set this to re-run the tool
- env['MSVS_VERSION'] = None
- env['MSVC_VERSION'] = None
- env['TARGET_ARCH'] = None
- if env['bits'] != 'default':
- env['TARGET_ARCH'] = {'32': 'x86', '64': 'x86_64'}[env['bits']]
- if env.has_key('msvc_version'):
- env['MSVC_VERSION'] = env['msvc_version']
- env.Tool('msvc')
- env.Tool('mssdk') # we want the MS SDK
+ env["MSVC_SETUP_RUN"] = False # Need to set this to re-run the tool
+ env["MSVS_VERSION"] = None
+ env["MSVC_VERSION"] = None
+ env["TARGET_ARCH"] = None
+ if env["bits"] != "default":
+ env["TARGET_ARCH"] = {"32": "x86", "64": "x86_64"}[env["bits"]]
+ if env.has_key("msvc_version"):
+ env["MSVC_VERSION"] = env["msvc_version"]
+ env.Tool("msvc")
+ env.Tool("mssdk") # we want the MS SDK
# Note: actual compiler version can be found in env['MSVC_VERSION'], e.g. "14.1" for VS2015
# Get actual target arch into bits (it may be "default" at this point):
- if env['TARGET_ARCH'] in ('amd64', 'x86_64'):
- env['bits'] = '64'
+ if env["TARGET_ARCH"] in ("amd64", "x86_64"):
+ env["bits"] = "64"
else:
- env['bits'] = '32'
- print("Found MSVC version %s, arch %s, bits=%s" % (env['MSVC_VERSION'], env['TARGET_ARCH'], env['bits']))
- if env['TARGET_ARCH'] in ('amd64', 'x86_64'):
+ env["bits"] = "32"
+ print("Found MSVC version %s, arch %s, bits=%s" % (env["MSVC_VERSION"], env["TARGET_ARCH"], env["bits"]))
+ if env["TARGET_ARCH"] in ("amd64", "x86_64"):
env["x86_libtheora_opt_vc"] = False
+
def setup_mingw(env):
"""Set up env for use with mingw"""
# Nothing to do here
print("Using MinGW")
pass
+
def configure_msvc(env, manual_msvc_config):
"""Configure env to work with MSVC"""
# Build type
- if (env["target"] == "release"):
- if (env["optimize"] == "speed"): #optimize for speed (default)
- env.Append(CCFLAGS=['/O2'])
- else: # optimize for size
- env.Append(CCFLAGS=['/O1'])
- env.Append(LINKFLAGS=['/SUBSYSTEM:WINDOWS'])
- env.Append(LINKFLAGS=['/ENTRY:mainCRTStartup'])
- env.Append(LINKFLAGS=['/OPT:REF'])
+ if env["target"] == "release":
+ if env["optimize"] == "speed": # optimize for speed (default)
+ env.Append(CCFLAGS=["/O2"])
+ else: # optimize for size
+ env.Append(CCFLAGS=["/O1"])
+ env.Append(LINKFLAGS=["/SUBSYSTEM:WINDOWS"])
+ env.Append(LINKFLAGS=["/ENTRY:mainCRTStartup"])
+ env.Append(LINKFLAGS=["/OPT:REF"])
- elif (env["target"] == "release_debug"):
- if (env["optimize"] == "speed"): #optimize for speed (default)
- env.Append(CCFLAGS=['/O2'])
- else: # optimize for size
- env.Append(CCFLAGS=['/O1'])
- env.AppendUnique(CPPDEFINES = ['DEBUG_ENABLED'])
- env.Append(LINKFLAGS=['/SUBSYSTEM:CONSOLE'])
- env.Append(LINKFLAGS=['/OPT:REF'])
+ elif env["target"] == "release_debug":
+ if env["optimize"] == "speed": # optimize for speed (default)
+ env.Append(CCFLAGS=["/O2"])
+ else: # optimize for size
+ env.Append(CCFLAGS=["/O1"])
+ env.AppendUnique(CPPDEFINES=["DEBUG_ENABLED"])
+ env.Append(LINKFLAGS=["/SUBSYSTEM:CONSOLE"])
+ env.Append(LINKFLAGS=["/OPT:REF"])
- elif (env["target"] == "debug"):
- env.AppendUnique(CCFLAGS=['/Z7', '/Od', '/EHsc'])
- env.AppendUnique(CPPDEFINES = ['DEBUG_ENABLED', 'DEBUG_MEMORY_ENABLED',
- 'D3D_DEBUG_INFO'])
- env.Append(LINKFLAGS=['/SUBSYSTEM:CONSOLE'])
- env.Append(LINKFLAGS=['/DEBUG'])
+ elif env["target"] == "debug":
+ env.AppendUnique(CCFLAGS=["/Z7", "/Od", "/EHsc"])
+ env.AppendUnique(CPPDEFINES=["DEBUG_ENABLED", "DEBUG_MEMORY_ENABLED", "D3D_DEBUG_INFO"])
+ env.Append(LINKFLAGS=["/SUBSYSTEM:CONSOLE"])
+ env.Append(LINKFLAGS=["/DEBUG"])
- if (env["debug_symbols"] == "full" or env["debug_symbols"] == "yes"):
- env.AppendUnique(CCFLAGS=['/Z7'])
- env.AppendUnique(LINKFLAGS=['/DEBUG'])
+ if env["debug_symbols"] == "full" or env["debug_symbols"] == "yes":
+ env.AppendUnique(CCFLAGS=["/Z7"])
+ env.AppendUnique(LINKFLAGS=["/DEBUG"])
## Compile/link flags
- env.AppendUnique(CCFLAGS=['/MT', '/Gd', '/GR', '/nologo'])
- if int(env['MSVC_VERSION'].split('.')[0]) >= 14: #vs2015 and later
- env.AppendUnique(CCFLAGS=['/utf-8'])
- env.AppendUnique(CXXFLAGS=['/TP']) # assume all sources are C++
- if manual_msvc_config: # should be automatic if SCons found it
+ env.AppendUnique(CCFLAGS=["/MT", "/Gd", "/GR", "/nologo"])
+ if int(env["MSVC_VERSION"].split(".")[0]) >= 14: # vs2015 and later
+ env.AppendUnique(CCFLAGS=["/utf-8"])
+ env.AppendUnique(CXXFLAGS=["/TP"]) # assume all sources are C++
+ if manual_msvc_config: # should be automatic if SCons found it
if os.getenv("WindowsSdkDir") is not None:
env.Prepend(CPPPATH=[os.getenv("WindowsSdkDir") + "/Include"])
else:
print("Missing environment variable: WindowsSdkDir")
- env.AppendUnique(CPPDEFINES = ['WINDOWS_ENABLED',
- 'WASAPI_ENABLED', 'WINMIDI_ENABLED',
- 'TYPED_METHOD_BIND',
- 'WIN32', 'MSVC',
- 'WINVER=%s' % env["target_win_version"],
- '_WIN32_WINNT=%s' % env["target_win_version"]])
- env.AppendUnique(CPPDEFINES=['NOMINMAX']) # disable bogus min/max WinDef.h macros
+ env.AppendUnique(
+ CPPDEFINES=[
+ "WINDOWS_ENABLED",
+ "WASAPI_ENABLED",
+ "WINMIDI_ENABLED",
+ "TYPED_METHOD_BIND",
+ "WIN32",
+ "MSVC",
+ "WINVER=%s" % env["target_win_version"],
+ "_WIN32_WINNT=%s" % env["target_win_version"],
+ ]
+ )
+ env.AppendUnique(CPPDEFINES=["NOMINMAX"]) # disable bogus min/max WinDef.h macros
if env["bits"] == "64":
- env.AppendUnique(CPPDEFINES=['_WIN64'])
+ env.AppendUnique(CPPDEFINES=["_WIN64"])
## Libs
- LIBS = ['winmm', 'dsound', 'kernel32', 'ole32', 'oleaut32',
- 'user32', 'gdi32', 'IPHLPAPI', 'Shlwapi', 'wsock32', 'Ws2_32',
- 'shell32', 'advapi32', 'dinput8', 'dxguid', 'imm32', 'bcrypt', 'Avrt',
- 'dwmapi']
+ LIBS = [
+ "winmm",
+ "dsound",
+ "kernel32",
+ "ole32",
+ "oleaut32",
+ "user32",
+ "gdi32",
+ "IPHLPAPI",
+ "Shlwapi",
+ "wsock32",
+ "Ws2_32",
+ "shell32",
+ "advapi32",
+ "dinput8",
+ "dxguid",
+ "imm32",
+ "bcrypt",
+ "Avrt",
+ "dwmapi",
+ ]
- env.AppendUnique(CPPDEFINES=['VULKAN_ENABLED'])
- if not env['builtin_vulkan']:
- LIBS += ['vulkan']
+ env.AppendUnique(CPPDEFINES=["VULKAN_ENABLED"])
+ if not env["builtin_vulkan"]:
+ LIBS += ["vulkan"]
else:
- LIBS += ['cfgmgr32']
+ LIBS += ["cfgmgr32"]
- #env.AppendUnique(CPPDEFINES = ['OPENGL_ENABLED'])
- LIBS += ['opengl32']
+ # env.AppendUnique(CPPDEFINES = ['OPENGL_ENABLED'])
+ LIBS += ["opengl32"]
env.Append(LINKFLAGS=[p + env["LIBSUFFIX"] for p in LIBS])
@@ -246,23 +275,24 @@ def configure_msvc(env, manual_msvc_config):
## LTO
- if (env["use_lto"]):
- env.AppendUnique(CCFLAGS=['/GL'])
- env.AppendUnique(ARFLAGS=['/LTCG'])
+ if env["use_lto"]:
+ env.AppendUnique(CCFLAGS=["/GL"])
+ env.AppendUnique(ARFLAGS=["/LTCG"])
if env["progress"]:
- env.AppendUnique(LINKFLAGS=['/LTCG:STATUS'])
+ env.AppendUnique(LINKFLAGS=["/LTCG:STATUS"])
else:
- env.AppendUnique(LINKFLAGS=['/LTCG'])
+ env.AppendUnique(LINKFLAGS=["/LTCG"])
if manual_msvc_config:
env.Prepend(CPPPATH=[p for p in os.getenv("INCLUDE").split(";")])
env.Append(LIBPATH=[p for p in os.getenv("LIB").split(";")])
# Incremental linking fix
- env['BUILDERS']['ProgramOriginal'] = env['BUILDERS']['Program']
- env['BUILDERS']['Program'] = methods.precious_program
+ env["BUILDERS"]["ProgramOriginal"] = env["BUILDERS"]["Program"]
+ env["BUILDERS"]["Program"] = methods.precious_program
+
+ env.AppendUnique(LINKFLAGS=["/STACK:" + str(STACK_SIZE)])
- env.AppendUnique(LINKFLAGS=['/STACK:' + str(STACK_SIZE)])
def configure_mingw(env):
# Workaround for MinGW. See:
@@ -271,125 +301,148 @@ def configure_mingw(env):
## Build type
- if (env["target"] == "release"):
- env.Append(CCFLAGS=['-msse2'])
+ if env["target"] == "release":
+ env.Append(CCFLAGS=["-msse2"])
- if (env["optimize"] == "speed"): #optimize for speed (default)
- if (env["bits"] == "64"):
- env.Append(CCFLAGS=['-O3'])
+ if env["optimize"] == "speed": # optimize for speed (default)
+ if env["bits"] == "64":
+ env.Append(CCFLAGS=["-O3"])
else:
- env.Append(CCFLAGS=['-O2'])
- else: #optimize for size
- env.Prepend(CCFLAGS=['-Os'])
+ env.Append(CCFLAGS=["-O2"])
+ else: # optimize for size
+ env.Prepend(CCFLAGS=["-Os"])
+ env.Append(LINKFLAGS=["-Wl,--subsystem,windows"])
- env.Append(LINKFLAGS=['-Wl,--subsystem,windows'])
+ if env["debug_symbols"] == "yes":
+ env.Prepend(CCFLAGS=["-g1"])
+ if env["debug_symbols"] == "full":
+ env.Prepend(CCFLAGS=["-g2"])
- if (env["debug_symbols"] == "yes"):
- env.Prepend(CCFLAGS=['-g1'])
- if (env["debug_symbols"] == "full"):
- env.Prepend(CCFLAGS=['-g2'])
+ elif env["target"] == "release_debug":
+ env.Append(CCFLAGS=["-O2"])
+ env.Append(CPPDEFINES=["DEBUG_ENABLED"])
+ if env["debug_symbols"] == "yes":
+ env.Prepend(CCFLAGS=["-g1"])
+ if env["debug_symbols"] == "full":
+ env.Prepend(CCFLAGS=["-g2"])
+ if env["optimize"] == "speed": # optimize for speed (default)
+ env.Append(CCFLAGS=["-O2"])
+ else: # optimize for size
+ env.Prepend(CCFLAGS=["-Os"])
- elif (env["target"] == "release_debug"):
- env.Append(CCFLAGS=['-O2'])
- env.Append(CPPDEFINES=['DEBUG_ENABLED'])
- if (env["debug_symbols"] == "yes"):
- env.Prepend(CCFLAGS=['-g1'])
- if (env["debug_symbols"] == "full"):
- env.Prepend(CCFLAGS=['-g2'])
- if (env["optimize"] == "speed"): #optimize for speed (default)
- env.Append(CCFLAGS=['-O2'])
- else: #optimize for size
- env.Prepend(CCFLAGS=['-Os'])
-
- elif (env["target"] == "debug"):
- env.Append(CCFLAGS=['-g3'])
- env.Append(CPPDEFINES=['DEBUG_ENABLED', 'DEBUG_MEMORY_ENABLED'])
+ elif env["target"] == "debug":
+ env.Append(CCFLAGS=["-g3"])
+ env.Append(CPPDEFINES=["DEBUG_ENABLED", "DEBUG_MEMORY_ENABLED"])
## Compiler configuration
if os.name != "nt":
env["PROGSUFFIX"] = env["PROGSUFFIX"] + ".exe" # for linux cross-compilation
- if (env["bits"] == "default"):
- if (os.name == "nt"):
+ if env["bits"] == "default":
+ if os.name == "nt":
env["bits"] = "64" if "PROGRAMFILES(X86)" in os.environ else "32"
- else: # default to 64-bit on Linux
+ else: # default to 64-bit on Linux
env["bits"] = "64"
mingw_prefix = ""
- if (env["bits"] == "32"):
- env.Append(LINKFLAGS=['-static'])
- env.Append(LINKFLAGS=['-static-libgcc'])
- env.Append(LINKFLAGS=['-static-libstdc++'])
+ if env["bits"] == "32":
+ env.Append(LINKFLAGS=["-static"])
+ env.Append(LINKFLAGS=["-static-libgcc"])
+ env.Append(LINKFLAGS=["-static-libstdc++"])
mingw_prefix = env["mingw_prefix_32"]
else:
- env.Append(LINKFLAGS=['-static'])
+ env.Append(LINKFLAGS=["-static"])
mingw_prefix = env["mingw_prefix_64"]
- if env['use_llvm']:
+ if env["use_llvm"]:
env["CC"] = mingw_prefix + "clang"
- env['AS'] = mingw_prefix + "as"
+ env["AS"] = mingw_prefix + "as"
env["CXX"] = mingw_prefix + "clang++"
- env['AR'] = mingw_prefix + "ar"
- env['RANLIB'] = mingw_prefix + "ranlib"
+ env["AR"] = mingw_prefix + "ar"
+ env["RANLIB"] = mingw_prefix + "ranlib"
env["LINK"] = mingw_prefix + "clang++"
else:
env["CC"] = mingw_prefix + "gcc"
- env['AS'] = mingw_prefix + "as"
- env['CXX'] = mingw_prefix + "g++"
- env['AR'] = mingw_prefix + "gcc-ar"
- env['RANLIB'] = mingw_prefix + "gcc-ranlib"
- env['LINK'] = mingw_prefix + "g++"
+ env["AS"] = mingw_prefix + "as"
+ env["CXX"] = mingw_prefix + "g++"
+ env["AR"] = mingw_prefix + "gcc-ar"
+ env["RANLIB"] = mingw_prefix + "gcc-ranlib"
+ env["LINK"] = mingw_prefix + "g++"
env["x86_libtheora_opt_gcc"] = True
- if env['use_lto']:
- if not env['use_llvm'] and env.GetOption("num_jobs") > 1:
- env.Append(CCFLAGS=['-flto'])
- env.Append(LINKFLAGS=['-flto=' + str(env.GetOption("num_jobs"))])
+ if env["use_lto"]:
+ if not env["use_llvm"] and env.GetOption("num_jobs") > 1:
+ env.Append(CCFLAGS=["-flto"])
+ env.Append(LINKFLAGS=["-flto=" + str(env.GetOption("num_jobs"))])
else:
- if env['use_thinlto']:
- env.Append(CCFLAGS=['-flto=thin'])
- env.Append(LINKFLAGS=['-flto=thin'])
+ if env["use_thinlto"]:
+ env.Append(CCFLAGS=["-flto=thin"])
+ env.Append(LINKFLAGS=["-flto=thin"])
else:
- env.Append(CCFLAGS=['-flto'])
- env.Append(LINKFLAGS=['-flto'])
+ env.Append(CCFLAGS=["-flto"])
+ env.Append(LINKFLAGS=["-flto"])
- env.Append(LINKFLAGS=['-Wl,--stack,' + str(STACK_SIZE)])
+ env.Append(LINKFLAGS=["-Wl,--stack," + str(STACK_SIZE)])
## Compile flags
- env.Append(CCFLAGS=['-mwindows'])
+ env.Append(CCFLAGS=["-mwindows"])
- env.Append(CPPDEFINES=['WINDOWS_ENABLED', 'WASAPI_ENABLED', 'WINMIDI_ENABLED'])
- env.Append(CPPDEFINES=[('WINVER', env['target_win_version']), ('_WIN32_WINNT', env['target_win_version'])])
- env.Append(LIBS=['mingw32', 'dsound', 'ole32', 'd3d9', 'winmm', 'gdi32', 'iphlpapi', 'shlwapi', 'wsock32', 'ws2_32', 'kernel32', 'oleaut32', 'dinput8', 'dxguid', 'ksuser', 'imm32', 'bcrypt', 'avrt', 'uuid', 'dwmapi'])
+ env.Append(CPPDEFINES=["WINDOWS_ENABLED", "WASAPI_ENABLED", "WINMIDI_ENABLED"])
+ env.Append(CPPDEFINES=[("WINVER", env["target_win_version"]), ("_WIN32_WINNT", env["target_win_version"])])
+ env.Append(
+ LIBS=[
+ "mingw32",
+ "dsound",
+ "ole32",
+ "d3d9",
+ "winmm",
+ "gdi32",
+ "iphlpapi",
+ "shlwapi",
+ "wsock32",
+ "ws2_32",
+ "kernel32",
+ "oleaut32",
+ "dinput8",
+ "dxguid",
+ "ksuser",
+ "imm32",
+ "bcrypt",
+ "avrt",
+ "uuid",
+ "dwmapi",
+ ]
+ )
- env.Append(CPPDEFINES=['VULKAN_ENABLED'])
- if not env['builtin_vulkan']:
- env.Append(LIBS=['vulkan'])
+ env.Append(CPPDEFINES=["VULKAN_ENABLED"])
+ if not env["builtin_vulkan"]:
+ env.Append(LIBS=["vulkan"])
else:
- env.Append(LIBS=['cfgmgr32'])
+ env.Append(LIBS=["cfgmgr32"])
## TODO !!! Reenable when OpenGLES Rendering Device is implemented !!!
- #env.Append(CPPDEFINES=['OPENGL_ENABLED'])
- env.Append(LIBS=['opengl32'])
+ # env.Append(CPPDEFINES=['OPENGL_ENABLED'])
+ env.Append(LIBS=["opengl32"])
- env.Append(CPPDEFINES=['MINGW_ENABLED', ('MINGW_HAS_SECURE_API', 1)])
+ env.Append(CPPDEFINES=["MINGW_ENABLED", ("MINGW_HAS_SECURE_API", 1)])
# resrc
- env.Append(BUILDERS={'RES': env.Builder(action=build_res_file, suffix='.o', src_suffix='.rc')})
+ env.Append(BUILDERS={"RES": env.Builder(action=build_res_file, suffix=".o", src_suffix=".rc")})
+
def configure(env):
# At this point the env has been set up with basic tools/compilers.
- env.Prepend(CPPPATH=['#platform/windows'])
+ env.Prepend(CPPPATH=["#platform/windows"])
- print("Configuring for Windows: target=%s, bits=%s" % (env['target'], env['bits']))
+ print("Configuring for Windows: target=%s, bits=%s" % (env["target"], env["bits"]))
- if (os.name == "nt"):
- env['ENV'] = os.environ # this makes build less repeatable, but simplifies some things
- env['ENV']['TMP'] = os.environ['TMP']
+ if os.name == "nt":
+ env["ENV"] = os.environ # this makes build less repeatable, but simplifies some things
+ env["ENV"]["TMP"] = os.environ["TMP"]
# First figure out which compiler, version, and target arch we're using
if os.getenv("VCINSTALLDIR") and not env["use_mingw"]:
@@ -397,7 +450,7 @@ def configure(env):
setup_msvc_manual(env)
env.msvc = True
manual_msvc_config = True
- elif env.get('MSVC_VERSION', '') and not env["use_mingw"]:
+ elif env.get("MSVC_VERSION", "") and not env["use_mingw"]:
setup_msvc_auto(env)
env.msvc = True
manual_msvc_config = False
@@ -409,5 +462,5 @@ def configure(env):
if env.msvc:
configure_msvc(env, manual_msvc_config)
- else: # MinGW
+ else: # MinGW
configure_mingw(env)
diff --git a/platform/windows/platform_windows_builders.py b/platform/windows/platform_windows_builders.py
index a1ad3b8b509..22e33b51b49 100644
--- a/platform/windows/platform_windows_builders.py
+++ b/platform/windows/platform_windows_builders.py
@@ -9,14 +9,14 @@ from platform_methods import subprocess_main
def make_debug_mingw(target, source, env):
mingw_prefix = ""
- if (env["bits"] == "32"):
+ if env["bits"] == "32":
mingw_prefix = env["mingw_prefix_32"]
else:
mingw_prefix = env["mingw_prefix_64"]
- os.system(mingw_prefix + 'objcopy --only-keep-debug {0} {0}.debugsymbols'.format(target[0]))
- os.system(mingw_prefix + 'strip --strip-debug --strip-unneeded {0}'.format(target[0]))
- os.system(mingw_prefix + 'objcopy --add-gnu-debuglink={0}.debugsymbols {0}'.format(target[0]))
+ os.system(mingw_prefix + "objcopy --only-keep-debug {0} {0}.debugsymbols".format(target[0]))
+ os.system(mingw_prefix + "strip --strip-debug --strip-unneeded {0}".format(target[0]))
+ os.system(mingw_prefix + "objcopy --add-gnu-debuglink={0}.debugsymbols {0}".format(target[0]))
-if __name__ == '__main__':
+if __name__ == "__main__":
subprocess_main(globals())
diff --git a/platform_methods.py b/platform_methods.py
index eed76bc8a87..805d7de82a0 100644
--- a/platform_methods.py
+++ b/platform_methods.py
@@ -11,7 +11,6 @@ JSON_SERIALIZABLE_TYPES = (bool, int, float, str)
def run_in_subprocess(builder_function):
-
@functools.wraps(builder_function)
def wrapper(target, source, env):
@@ -20,38 +19,36 @@ def run_in_subprocess(builder_function):
source = [node.srcnode().abspath for node in source]
# Short circuit on non-Windows platforms, no need to run in subprocess
- if sys.platform not in ('win32', 'cygwin'):
+ if sys.platform not in ("win32", "cygwin"):
return builder_function(target, source, env)
# Identify module
module_name = builder_function.__module__
function_name = builder_function.__name__
module_path = sys.modules[module_name].__file__
- if module_path.endswith('.pyc') or module_path.endswith('.pyo'):
+ if module_path.endswith(".pyc") or module_path.endswith(".pyo"):
module_path = module_path[:-1]
# Subprocess environment
subprocess_env = os.environ.copy()
- subprocess_env['PYTHONPATH'] = os.pathsep.join([os.getcwd()] + sys.path)
+ subprocess_env["PYTHONPATH"] = os.pathsep.join([os.getcwd()] + sys.path)
# Keep only JSON serializable environment items
- filtered_env = dict(
- (key, value)
- for key, value in env.items()
- if isinstance(value, JSON_SERIALIZABLE_TYPES)
- )
+ filtered_env = dict((key, value) for key, value in env.items() if isinstance(value, JSON_SERIALIZABLE_TYPES))
# Save parameters
args = (target, source, filtered_env)
data = dict(fn=function_name, args=args)
- json_path = os.path.join(os.environ['TMP'], uuid.uuid4().hex + '.json')
- with open(json_path, 'wt') as json_file:
+ json_path = os.path.join(os.environ["TMP"], uuid.uuid4().hex + ".json")
+ with open(json_path, "wt") as json_file:
json.dump(data, json_file, indent=2)
json_file_size = os.stat(json_path).st_size
- print('Executing builder function in subprocess: '
- 'module_path=%r, parameter_file=%r, parameter_file_size=%r, target=%r, source=%r' % (
- module_path, json_path, json_file_size, target, source))
+ print(
+ "Executing builder function in subprocess: "
+ "module_path=%r, parameter_file=%r, parameter_file_size=%r, target=%r, source=%r"
+ % (module_path, json_path, json_file_size, target, source)
+ )
try:
exit_code = subprocess.call([sys.executable, module_path, json_path], env=subprocess_env)
finally:
@@ -59,13 +56,15 @@ def run_in_subprocess(builder_function):
os.remove(json_path)
except (OSError, IOError) as e:
# Do not fail the entire build if it cannot delete a temporary file
- print('WARNING: Could not delete temporary file: path=%r; [%s] %s' %
- (json_path, e.__class__.__name__, e))
+ print(
+ "WARNING: Could not delete temporary file: path=%r; [%s] %s" % (json_path, e.__class__.__name__, e)
+ )
# Must succeed
if exit_code:
raise RuntimeError(
- 'Failed to run builder function in subprocess: module_path=%r; data=%r' % (module_path, data))
+ "Failed to run builder function in subprocess: module_path=%r; data=%r" % (module_path, data)
+ )
return wrapper
@@ -75,5 +74,5 @@ def subprocess_main(namespace):
with open(sys.argv[1]) as json_file:
data = json.load(json_file)
- fn = namespace[data['fn']]
- fn(*data['args'])
+ fn = namespace[data["fn"]]
+ fn(*data["args"])
diff --git a/scene/2d/SCsub b/scene/2d/SCsub
index b01e2fd54de..fc61250247d 100644
--- a/scene/2d/SCsub
+++ b/scene/2d/SCsub
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
env.add_source_files(env.scene_sources, "*.cpp")
diff --git a/scene/3d/SCsub b/scene/3d/SCsub
index 45b96bd9c47..ce69e8aa192 100644
--- a/scene/3d/SCsub
+++ b/scene/3d/SCsub
@@ -1,8 +1,8 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
-if env['disable_3d']:
+if env["disable_3d"]:
env.add_source_files(env.scene_sources, "node_3d.cpp")
env.add_source_files(env.scene_sources, "skeleton_3d.cpp")
else:
diff --git a/scene/SCsub b/scene/SCsub
index 1c5b87b87aa..f9fc00f3f24 100644
--- a/scene/SCsub
+++ b/scene/SCsub
@@ -1,16 +1,16 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
env.scene_sources = []
# Thirdparty code
thirdparty_dir = "#thirdparty/misc/"
thirdparty_sources = [
- # C++ sources
- "easing_equations.cpp",
- # C sources
- "mikktspace.c",
+ # C++ sources
+ "easing_equations.cpp",
+ # C sources
+ "mikktspace.c",
]
thirdparty_sources = [thirdparty_dir + file for file in thirdparty_sources]
@@ -23,14 +23,14 @@ env.add_source_files(env.scene_sources, "*.cpp")
# Chain load SCsubs
-SConscript('main/SCsub')
-SConscript('gui/SCsub')
-SConscript('3d/SCsub')
-SConscript('2d/SCsub')
-SConscript('animation/SCsub')
-SConscript('audio/SCsub')
-SConscript('resources/SCsub')
-SConscript('debugger/SCsub')
+SConscript("main/SCsub")
+SConscript("gui/SCsub")
+SConscript("3d/SCsub")
+SConscript("2d/SCsub")
+SConscript("animation/SCsub")
+SConscript("audio/SCsub")
+SConscript("resources/SCsub")
+SConscript("debugger/SCsub")
# Build it all as a library
diff --git a/scene/animation/SCsub b/scene/animation/SCsub
index b01e2fd54de..fc61250247d 100644
--- a/scene/animation/SCsub
+++ b/scene/animation/SCsub
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
env.add_source_files(env.scene_sources, "*.cpp")
diff --git a/scene/audio/SCsub b/scene/audio/SCsub
index b01e2fd54de..fc61250247d 100644
--- a/scene/audio/SCsub
+++ b/scene/audio/SCsub
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
env.add_source_files(env.scene_sources, "*.cpp")
diff --git a/scene/debugger/SCsub b/scene/debugger/SCsub
index b01e2fd54de..fc61250247d 100644
--- a/scene/debugger/SCsub
+++ b/scene/debugger/SCsub
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
env.add_source_files(env.scene_sources, "*.cpp")
diff --git a/scene/gui/SCsub b/scene/gui/SCsub
index b01e2fd54de..fc61250247d 100644
--- a/scene/gui/SCsub
+++ b/scene/gui/SCsub
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
env.add_source_files(env.scene_sources, "*.cpp")
diff --git a/scene/main/SCsub b/scene/main/SCsub
index b01e2fd54de..fc61250247d 100644
--- a/scene/main/SCsub
+++ b/scene/main/SCsub
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
env.add_source_files(env.scene_sources, "*.cpp")
diff --git a/scene/resources/SCsub b/scene/resources/SCsub
index 5e5b6f8fd51..3a86b228351 100644
--- a/scene/resources/SCsub
+++ b/scene/resources/SCsub
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
env.add_source_files(env.scene_sources, "*.cpp")
diff --git a/scene/resources/default_theme/SCsub b/scene/resources/default_theme/SCsub
index b01e2fd54de..fc61250247d 100644
--- a/scene/resources/default_theme/SCsub
+++ b/scene/resources/default_theme/SCsub
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
env.add_source_files(env.scene_sources, "*.cpp")
diff --git a/scene/resources/default_theme/make_header.py b/scene/resources/default_theme/make_header.py
index cf0ccf1c3ab..efad3b28159 100755
--- a/scene/resources/default_theme/make_header.py
+++ b/scene/resources/default_theme/make_header.py
@@ -13,7 +13,7 @@ os.chdir(os.path.dirname(os.path.realpath(__file__)))
f = open("theme_data.h", "wb")
-f.write(b"// THIS FILE HAS BEEN AUTOGENERATED, DON\'T EDIT!!\n")
+f.write(b"// THIS FILE HAS BEEN AUTOGENERATED, DON'T EDIT!!\n")
# Generate png image block
f.write(b"\n// png image block\n")
@@ -31,17 +31,17 @@ for x in pixmaps:
pngf = open(x, "rb")
b = pngf.read(1)
- while(len(b) == 1):
+ while len(b) == 1:
f.write(hex(ord(b)).encode(enc))
b = pngf.read(1)
- if (len(b) == 1):
+ if len(b) == 1:
f.write(b", ")
f.write(b"\n};\n")
pngf.close()
# Generate shaders block
-f.write(b"\n// shaders block\n");
+f.write(b"\n// shaders block\n")
shaders = glob.glob("*.gsl")
shaders.sort()
@@ -56,15 +56,15 @@ for x in shaders:
sf = open(x, "rb")
b = sf.readline()
- while(b != ""):
- if (b.endswith("\r\n")):
+ while b != "":
+ if b.endswith("\r\n"):
b = b[:-2]
- if (b.endswith("\n")):
+ if b.endswith("\n"):
b = b[:-1]
- s = ' \"' + b
+ s = ' "' + b
f.write(s.encode(enc))
b = sf.readline()
- if (b != ""):
+ if b != "":
f.write(b'"\n')
f.write(b'";\n')
diff --git a/servers/SCsub b/servers/SCsub
index 7706b99d876..7080a110da0 100644
--- a/servers/SCsub
+++ b/servers/SCsub
@@ -1,16 +1,16 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
env.servers_sources = []
env.add_source_files(env.servers_sources, "*.cpp")
-SConscript('arvr/SCsub')
-SConscript('camera/SCsub')
-SConscript('physics_3d/SCsub')
-SConscript('physics_2d/SCsub')
-SConscript('rendering/SCsub')
-SConscript('audio/SCsub')
+SConscript("arvr/SCsub")
+SConscript("camera/SCsub")
+SConscript("physics_3d/SCsub")
+SConscript("physics_2d/SCsub")
+SConscript("rendering/SCsub")
+SConscript("audio/SCsub")
lib = env.add_library("servers", env.servers_sources)
diff --git a/servers/arvr/SCsub b/servers/arvr/SCsub
index d730144861b..86681f9c74d 100644
--- a/servers/arvr/SCsub
+++ b/servers/arvr/SCsub
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
env.add_source_files(env.servers_sources, "*.cpp")
diff --git a/servers/audio/SCsub b/servers/audio/SCsub
index 3c18c180437..5021e578c36 100644
--- a/servers/audio/SCsub
+++ b/servers/audio/SCsub
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
env.add_source_files(env.servers_sources, "*.cpp")
diff --git a/servers/audio/effects/SCsub b/servers/audio/effects/SCsub
index d730144861b..86681f9c74d 100644
--- a/servers/audio/effects/SCsub
+++ b/servers/audio/effects/SCsub
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
env.add_source_files(env.servers_sources, "*.cpp")
diff --git a/servers/camera/SCsub b/servers/camera/SCsub
index ccc76e823f6..c949f3bb259 100644
--- a/servers/camera/SCsub
+++ b/servers/camera/SCsub
@@ -1,7 +1,7 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
env.add_source_files(env.servers_sources, "*.cpp")
-Export('env')
+Export("env")
diff --git a/servers/physics_2d/SCsub b/servers/physics_2d/SCsub
index d730144861b..86681f9c74d 100644
--- a/servers/physics_2d/SCsub
+++ b/servers/physics_2d/SCsub
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
env.add_source_files(env.servers_sources, "*.cpp")
diff --git a/servers/physics_3d/SCsub b/servers/physics_3d/SCsub
index c5cc8891123..df7b521693b 100644
--- a/servers/physics_3d/SCsub
+++ b/servers/physics_3d/SCsub
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
env.add_source_files(env.servers_sources, "*.cpp")
diff --git a/servers/physics_3d/joints/SCsub b/servers/physics_3d/joints/SCsub
index d730144861b..86681f9c74d 100644
--- a/servers/physics_3d/joints/SCsub
+++ b/servers/physics_3d/joints/SCsub
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
env.add_source_files(env.servers_sources, "*.cpp")
diff --git a/servers/rendering/SCsub b/servers/rendering/SCsub
index fca18bfea09..5ea0d404867 100644
--- a/servers/rendering/SCsub
+++ b/servers/rendering/SCsub
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
env.add_source_files(env.servers_sources, "*.cpp")
diff --git a/servers/rendering/rasterizer_rd/SCsub b/servers/rendering/rasterizer_rd/SCsub
index cc17feeb05b..6a2e682c67b 100644
--- a/servers/rendering/rasterizer_rd/SCsub
+++ b/servers/rendering/rasterizer_rd/SCsub
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
env.add_source_files(env.servers_sources, "*.cpp")
diff --git a/servers/rendering/rasterizer_rd/shaders/SCsub b/servers/rendering/rasterizer_rd/shaders/SCsub
index 2dcb2a703f6..6e852e2dc5a 100644
--- a/servers/rendering/rasterizer_rd/shaders/SCsub
+++ b/servers/rendering/rasterizer_rd/shaders/SCsub
@@ -1,24 +1,24 @@
#!/usr/bin/env python
-Import('env')
+Import("env")
-if 'RD_GLSL' in env['BUILDERS']:
- env.RD_GLSL('canvas.glsl');
- env.RD_GLSL('canvas_occlusion.glsl');
- env.RD_GLSL('blur.glsl');
- env.RD_GLSL('cubemap_roughness.glsl');
- env.RD_GLSL('cubemap_downsampler.glsl');
- env.RD_GLSL('cubemap_filter.glsl');
- env.RD_GLSL('scene_high_end.glsl');
- env.RD_GLSL('sky.glsl');
- env.RD_GLSL('tonemap.glsl');
- env.RD_GLSL('copy.glsl');
- env.RD_GLSL('giprobe.glsl');
- env.RD_GLSL('giprobe_debug.glsl');
- env.RD_GLSL('giprobe_sdf.glsl');
- env.RD_GLSL('luminance_reduce.glsl');
- env.RD_GLSL('bokeh_dof.glsl');
- env.RD_GLSL('ssao.glsl');
- env.RD_GLSL('ssao_minify.glsl');
- env.RD_GLSL('ssao_blur.glsl');
- env.RD_GLSL('roughness_limiter.glsl');
+if "RD_GLSL" in env["BUILDERS"]:
+ env.RD_GLSL("canvas.glsl")
+ env.RD_GLSL("canvas_occlusion.glsl")
+ env.RD_GLSL("blur.glsl")
+ env.RD_GLSL("cubemap_roughness.glsl")
+ env.RD_GLSL("cubemap_downsampler.glsl")
+ env.RD_GLSL("cubemap_filter.glsl")
+ env.RD_GLSL("scene_high_end.glsl")
+ env.RD_GLSL("sky.glsl")
+ env.RD_GLSL("tonemap.glsl")
+ env.RD_GLSL("copy.glsl")
+ env.RD_GLSL("giprobe.glsl")
+ env.RD_GLSL("giprobe_debug.glsl")
+ env.RD_GLSL("giprobe_sdf.glsl")
+ env.RD_GLSL("luminance_reduce.glsl")
+ env.RD_GLSL("bokeh_dof.glsl")
+ env.RD_GLSL("ssao.glsl")
+ env.RD_GLSL("ssao_minify.glsl")
+ env.RD_GLSL("ssao_blur.glsl")
+ env.RD_GLSL("roughness_limiter.glsl")