2014-02-10 01:10:30 +00:00
|
|
|
import os
|
2018-03-17 22:23:55 +00:00
|
|
|
import os.path
|
|
|
|
import sys
|
|
|
|
import re
|
|
|
|
import glob
|
|
|
|
import string
|
|
|
|
import datetime
|
|
|
|
import subprocess
|
2018-07-28 16:13:48 +00:00
|
|
|
from compat import iteritems, isbasestring
|
2014-02-10 01:10:30 +00:00
|
|
|
|
2016-10-30 18:05:14 +00:00
|
|
|
|
2016-10-30 17:57:40 +00:00
|
|
|
def add_source_files(self, sources, filetype, lib_env=None, shared=False):
|
2016-10-30 17:44:57 +00:00
|
|
|
|
2018-07-28 16:13:48 +00:00
|
|
|
if isbasestring(filetype):
|
2018-03-17 22:23:55 +00:00
|
|
|
dir_path = self.Dir('.').abspath
|
2018-08-28 18:40:51 +00:00
|
|
|
filetype = sorted(glob.glob(dir_path + "/" + filetype))
|
2014-02-10 01:10:30 +00:00
|
|
|
|
2018-03-17 22:23:55 +00:00
|
|
|
for path in filetype:
|
|
|
|
sources.append(self.Object(path))
|
2014-02-10 01:10:30 +00:00
|
|
|
|
|
|
|
|
2018-09-28 11:29:52 +00:00
|
|
|
def disable_warnings(self):
|
|
|
|
# 'self' is the environment
|
|
|
|
if self.msvc:
|
2018-10-03 11:38:09 +00:00
|
|
|
# We have to remove existing warning level defines before appending /w,
|
|
|
|
# otherwise we get: "warning D9025 : overriding '/W3' with '/w'"
|
|
|
|
warn_flags = ['/Wall', '/W4', '/W3', '/W2', '/W1', '/WX']
|
|
|
|
self['CCFLAGS'] = [x for x in self['CCFLAGS'] if not x in warn_flags]
|
2018-09-28 11:29:52 +00:00
|
|
|
self.Append(CCFLAGS=['/w'])
|
|
|
|
else:
|
|
|
|
self.Append(CCFLAGS=['-w'])
|
|
|
|
|
|
|
|
|
2017-11-02 02:12:28 +00:00
|
|
|
def add_module_version_string(self,s):
|
2017-11-19 20:18:01 +00:00
|
|
|
self.module_version_string += "." + s
|
2017-11-02 02:12:28 +00:00
|
|
|
|
2018-03-17 22:23:55 +00:00
|
|
|
|
2017-11-02 02:12:28 +00:00
|
|
|
def update_version(module_version_string=""):
|
2014-02-10 01:10:30 +00:00
|
|
|
|
2017-11-19 20:26:05 +00:00
|
|
|
build_name = "custom_build"
|
2018-03-17 22:23:55 +00:00
|
|
|
if os.getenv("BUILD_NAME") != None:
|
2017-11-19 20:26:05 +00:00
|
|
|
build_name = os.getenv("BUILD_NAME")
|
|
|
|
print("Using custom build name: " + build_name)
|
2016-04-02 18:26:12 +00:00
|
|
|
|
2016-10-30 17:44:57 +00:00
|
|
|
import version
|
2016-04-02 18:26:12 +00:00
|
|
|
|
2018-03-17 22:23:55 +00:00
|
|
|
# NOTE: It is safe to generate this file here, since this is still executed serially
|
2017-08-26 16:53:49 +00:00
|
|
|
f = open("core/version_generated.gen.h", "w")
|
2017-11-19 20:18:01 +00:00
|
|
|
f.write("#define VERSION_SHORT_NAME \"" + str(version.short_name) + "\"\n")
|
|
|
|
f.write("#define VERSION_NAME \"" + str(version.name) + "\"\n")
|
2016-10-30 17:57:40 +00:00
|
|
|
f.write("#define VERSION_MAJOR " + str(version.major) + "\n")
|
|
|
|
f.write("#define VERSION_MINOR " + str(version.minor) + "\n")
|
2018-03-17 22:23:55 +00:00
|
|
|
if hasattr(version, 'patch'):
|
2016-10-30 17:57:40 +00:00
|
|
|
f.write("#define VERSION_PATCH " + str(version.patch) + "\n")
|
2017-11-19 20:18:01 +00:00
|
|
|
f.write("#define VERSION_STATUS \"" + str(version.status) + "\"\n")
|
2017-11-19 20:26:05 +00:00
|
|
|
f.write("#define VERSION_BUILD \"" + str(build_name) + "\"\n")
|
2017-11-02 02:12:28 +00:00
|
|
|
f.write("#define VERSION_MODULE_CONFIG \"" + str(version.module_config) + module_version_string + "\"\n")
|
2018-08-29 03:55:37 +00:00
|
|
|
f.write("#define VERSION_YEAR " + str(2018) + "\n")
|
2017-07-10 08:47:38 +00:00
|
|
|
f.close()
|
|
|
|
|
2018-03-17 22:23:55 +00:00
|
|
|
# NOTE: It is safe to generate this file here, since this is still executed serially
|
2017-08-26 16:53:49 +00:00
|
|
|
fhash = open("core/version_hash.gen.h", "w")
|
2017-07-10 08:47:38 +00:00
|
|
|
githash = ""
|
|
|
|
if os.path.isfile(".git/HEAD"):
|
2017-08-26 16:53:49 +00:00
|
|
|
head = open(".git/HEAD", "r").readline().strip()
|
2017-07-10 08:47:38 +00:00
|
|
|
if head.startswith("ref: "):
|
|
|
|
head = ".git/" + head[5:]
|
|
|
|
if os.path.isfile(head):
|
2017-08-26 16:53:49 +00:00
|
|
|
githash = open(head, "r").readline().strip()
|
2017-07-10 08:47:38 +00:00
|
|
|
else:
|
|
|
|
githash = head
|
|
|
|
fhash.write("#define VERSION_HASH \"" + githash + "\"")
|
|
|
|
fhash.close()
|
2014-02-10 01:10:30 +00:00
|
|
|
|
2016-10-30 18:05:14 +00:00
|
|
|
|
2014-02-10 01:10:30 +00:00
|
|
|
def parse_cg_file(fname, uniforms, sizes, conditionals):
|
|
|
|
|
2016-10-30 17:44:57 +00:00
|
|
|
fs = open(fname, "r")
|
2016-10-30 17:57:40 +00:00
|
|
|
line = fs.readline()
|
2014-02-10 01:10:30 +00:00
|
|
|
|
2016-10-30 17:44:57 +00:00
|
|
|
while line:
|
2014-02-10 01:10:30 +00:00
|
|
|
|
2016-10-30 17:44:57 +00:00
|
|
|
if re.match(r"^\s*uniform", line):
|
2014-02-10 01:10:30 +00:00
|
|
|
|
2016-10-30 17:44:57 +00:00
|
|
|
res = re.match(r"uniform ([\d\w]*) ([\d\w]*)")
|
|
|
|
type = res.groups(1)
|
|
|
|
name = res.groups(2)
|
2014-02-10 01:10:30 +00:00
|
|
|
|
2016-10-31 23:24:30 +00:00
|
|
|
uniforms.append(name)
|
2014-02-10 01:10:30 +00:00
|
|
|
|
2018-03-17 22:23:55 +00:00
|
|
|
if type.find("texobj") != -1:
|
2016-10-31 23:24:30 +00:00
|
|
|
sizes.append(1)
|
2016-10-30 17:44:57 +00:00
|
|
|
else:
|
2016-10-31 23:24:30 +00:00
|
|
|
t = re.match(r"float(\d)x(\d)", type)
|
2016-10-30 17:44:57 +00:00
|
|
|
if t:
|
|
|
|
sizes.append(int(t.groups(1)) * int(t.groups(2)))
|
|
|
|
else:
|
2016-10-31 23:24:30 +00:00
|
|
|
t = re.match(r"float(\d)", type)
|
2016-10-30 17:44:57 +00:00
|
|
|
sizes.append(int(t.groups(1)))
|
2014-02-10 01:10:30 +00:00
|
|
|
|
2016-10-30 17:44:57 +00:00
|
|
|
if line.find("[branch]") != -1:
|
2016-10-31 23:24:30 +00:00
|
|
|
conditionals.append(name)
|
2014-02-10 01:10:30 +00:00
|
|
|
|
2016-10-31 23:24:30 +00:00
|
|
|
line = fs.readline()
|
2014-02-10 01:10:30 +00:00
|
|
|
|
2018-03-10 17:37:33 +00:00
|
|
|
fs.close()
|
|
|
|
|
|
|
|
|
2014-02-10 01:10:30 +00:00
|
|
|
def detect_modules():
|
|
|
|
|
2016-10-30 17:57:40 +00:00
|
|
|
module_list = []
|
|
|
|
includes_cpp = ""
|
|
|
|
register_cpp = ""
|
|
|
|
unregister_cpp = ""
|
2016-10-30 17:44:57 +00:00
|
|
|
|
|
|
|
files = glob.glob("modules/*")
|
2016-10-30 17:57:40 +00:00
|
|
|
files.sort() # so register_module_types does not change that often, and also plugins are registered in alphabetic order
|
2016-10-30 17:44:57 +00:00
|
|
|
for x in files:
|
2018-03-17 22:23:55 +00:00
|
|
|
if not os.path.isdir(x):
|
2016-10-30 17:44:57 +00:00
|
|
|
continue
|
2018-03-17 22:23:55 +00:00
|
|
|
if not os.path.exists(x + "/config.py"):
|
2018-01-07 14:56:18 +00:00
|
|
|
continue
|
2016-10-30 17:57:40 +00:00
|
|
|
x = x.replace("modules/", "") # rest of world
|
|
|
|
x = x.replace("modules\\", "") # win32
|
2016-10-30 17:44:57 +00:00
|
|
|
module_list.append(x)
|
|
|
|
try:
|
2016-10-30 17:57:40 +00:00
|
|
|
with open("modules/" + x + "/register_types.h"):
|
|
|
|
includes_cpp += '#include "modules/' + x + '/register_types.h"\n'
|
|
|
|
register_cpp += '#ifdef MODULE_' + x.upper() + '_ENABLED\n'
|
|
|
|
register_cpp += '\tregister_' + x + '_types();\n'
|
|
|
|
register_cpp += '#endif\n'
|
|
|
|
unregister_cpp += '#ifdef MODULE_' + x.upper() + '_ENABLED\n'
|
|
|
|
unregister_cpp += '\tunregister_' + x + '_types();\n'
|
|
|
|
unregister_cpp += '#endif\n'
|
2016-10-30 17:44:57 +00:00
|
|
|
except IOError:
|
|
|
|
pass
|
|
|
|
|
2016-10-30 17:57:40 +00:00
|
|
|
modules_cpp = """
|
2014-02-10 01:10:30 +00:00
|
|
|
// modules.cpp - THIS FILE IS GENERATED, DO NOT EDIT!!!!!!!
|
|
|
|
#include "register_module_types.h"
|
|
|
|
|
2016-10-30 17:57:40 +00:00
|
|
|
""" + includes_cpp + """
|
2014-02-10 01:10:30 +00:00
|
|
|
|
|
|
|
void register_module_types() {
|
2016-10-30 17:57:40 +00:00
|
|
|
""" + register_cpp + """
|
2014-02-10 01:10:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void unregister_module_types() {
|
2016-10-30 17:57:40 +00:00
|
|
|
""" + unregister_cpp + """
|
2014-02-10 01:10:30 +00:00
|
|
|
}
|
|
|
|
"""
|
|
|
|
|
2018-03-17 22:23:55 +00:00
|
|
|
# NOTE: It is safe to generate this file here, since this is still executed serially
|
2018-03-10 17:37:33 +00:00
|
|
|
with open("modules/register_module_types.gen.cpp", "w") as f:
|
|
|
|
f.write(modules_cpp)
|
2014-02-10 01:10:30 +00:00
|
|
|
|
2016-10-30 17:44:57 +00:00
|
|
|
return module_list
|
2014-02-10 01:10:30 +00:00
|
|
|
|
|
|
|
|
|
|
|
def win32_spawn(sh, escape, cmd, args, env):
|
2016-10-30 17:44:57 +00:00
|
|
|
import subprocess
|
|
|
|
newargs = ' '.join(args[1:])
|
|
|
|
cmdline = cmd + " " + newargs
|
|
|
|
startupinfo = subprocess.STARTUPINFO()
|
|
|
|
for e in env:
|
|
|
|
if type(env[e]) != type(""):
|
|
|
|
env[e] = str(env[e])
|
|
|
|
proc = subprocess.Popen(cmdline, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
|
2016-10-30 17:57:40 +00:00
|
|
|
stderr=subprocess.PIPE, startupinfo=startupinfo, shell=False, env=env)
|
2016-10-30 17:44:57 +00:00
|
|
|
data, err = proc.communicate()
|
|
|
|
rv = proc.wait()
|
|
|
|
if rv:
|
2017-08-26 16:53:49 +00:00
|
|
|
print("=====")
|
|
|
|
print(err)
|
|
|
|
print("=====")
|
2016-10-30 17:44:57 +00:00
|
|
|
return rv
|
2014-02-10 01:10:30 +00:00
|
|
|
|
|
|
|
"""
|
|
|
|
def win32_spawn(sh, escape, cmd, args, spawnenv):
|
|
|
|
import win32file
|
|
|
|
import win32event
|
|
|
|
import win32process
|
|
|
|
import win32security
|
|
|
|
for var in spawnenv:
|
|
|
|
spawnenv[var] = spawnenv[var].encode('ascii', 'replace')
|
|
|
|
|
|
|
|
sAttrs = win32security.SECURITY_ATTRIBUTES()
|
|
|
|
StartupInfo = win32process.STARTUPINFO()
|
|
|
|
newargs = ' '.join(map(escape, args[1:]))
|
|
|
|
cmdline = cmd + " " + newargs
|
|
|
|
|
|
|
|
# check for any special operating system commands
|
|
|
|
if cmd == 'del':
|
|
|
|
for arg in args[1:]:
|
|
|
|
win32file.DeleteFile(arg)
|
|
|
|
exit_code = 0
|
|
|
|
else:
|
|
|
|
# otherwise execute the command.
|
|
|
|
hProcess, hThread, dwPid, dwTid = win32process.CreateProcess(None, cmdline, None, None, 1, 0, spawnenv, None, StartupInfo)
|
|
|
|
win32event.WaitForSingleObject(hProcess, win32event.INFINITE)
|
|
|
|
exit_code = win32process.GetExitCodeProcess(hProcess)
|
|
|
|
win32file.CloseHandle(hProcess);
|
|
|
|
win32file.CloseHandle(hThread);
|
|
|
|
return exit_code
|
|
|
|
"""
|
|
|
|
|
2017-10-07 09:40:17 +00:00
|
|
|
def android_add_flat_dir(self, dir):
|
|
|
|
if (dir not in self.android_flat_dirs):
|
|
|
|
self.android_flat_dirs.append(dir)
|
|
|
|
|
2016-10-30 17:57:40 +00:00
|
|
|
def android_add_maven_repository(self, url):
|
2017-04-10 03:04:40 +00:00
|
|
|
if (url not in self.android_maven_repos):
|
|
|
|
self.android_maven_repos.append(url)
|
2016-10-30 18:05:14 +00:00
|
|
|
|
2016-10-30 17:57:40 +00:00
|
|
|
def android_add_dependency(self, depline):
|
2017-04-10 03:04:40 +00:00
|
|
|
if (depline not in self.android_dependencies):
|
|
|
|
self.android_dependencies.append(depline)
|
2016-10-30 18:05:14 +00:00
|
|
|
|
2016-10-30 17:57:40 +00:00
|
|
|
def android_add_java_dir(self, subpath):
|
|
|
|
base_path = self.Dir(".").abspath + "/modules/" + self.current_module + "/" + subpath
|
2017-04-10 03:04:40 +00:00
|
|
|
if (base_path not in self.android_java_dirs):
|
|
|
|
self.android_java_dirs.append(base_path)
|
2016-10-30 18:05:14 +00:00
|
|
|
|
2016-10-30 17:57:40 +00:00
|
|
|
def android_add_res_dir(self, subpath):
|
|
|
|
base_path = self.Dir(".").abspath + "/modules/" + self.current_module + "/" + subpath
|
2017-04-10 03:04:40 +00:00
|
|
|
if (base_path not in self.android_res_dirs):
|
|
|
|
self.android_res_dirs.append(base_path)
|
2016-10-30 18:05:14 +00:00
|
|
|
|
2018-07-03 21:28:24 +00:00
|
|
|
def android_add_asset_dir(self, subpath):
|
|
|
|
base_path = self.Dir(".").abspath + "/modules/" + self.current_module + "/" + subpath
|
|
|
|
if (base_path not in self.android_asset_dirs):
|
|
|
|
self.android_asset_dirs.append(base_path)
|
|
|
|
|
2016-10-30 17:57:40 +00:00
|
|
|
def android_add_aidl_dir(self, subpath):
|
|
|
|
base_path = self.Dir(".").abspath + "/modules/" + self.current_module + "/" + subpath
|
2017-04-10 03:04:40 +00:00
|
|
|
if (base_path not in self.android_aidl_dirs):
|
|
|
|
self.android_aidl_dirs.append(base_path)
|
2016-10-30 18:05:14 +00:00
|
|
|
|
2016-10-30 17:57:40 +00:00
|
|
|
def android_add_jni_dir(self, subpath):
|
|
|
|
base_path = self.Dir(".").abspath + "/modules/" + self.current_module + "/" + subpath
|
2017-04-10 03:04:40 +00:00
|
|
|
if (base_path not in self.android_jni_dirs):
|
|
|
|
self.android_jni_dirs.append(base_path)
|
2016-10-30 18:05:14 +00:00
|
|
|
|
2017-03-06 10:04:21 +00:00
|
|
|
def android_add_gradle_plugin(self, plugin):
|
2017-04-10 03:04:40 +00:00
|
|
|
if (plugin not in self.android_gradle_plugins):
|
|
|
|
self.android_gradle_plugins.append(plugin)
|
2017-03-06 10:04:21 +00:00
|
|
|
|
|
|
|
def android_add_gradle_classpath(self, classpath):
|
2017-04-10 03:04:40 +00:00
|
|
|
if (classpath not in self.android_gradle_classpath):
|
|
|
|
self.android_gradle_classpath.append(classpath)
|
2016-10-30 18:05:14 +00:00
|
|
|
|
2016-10-30 17:57:40 +00:00
|
|
|
def android_add_default_config(self, config):
|
2017-04-10 03:04:40 +00:00
|
|
|
if (config not in self.android_default_config):
|
|
|
|
self.android_default_config.append(config)
|
2016-10-30 18:05:14 +00:00
|
|
|
|
2016-10-30 17:57:40 +00:00
|
|
|
def android_add_to_manifest(self, file):
|
|
|
|
base_path = self.Dir(".").abspath + "/modules/" + self.current_module + "/" + file
|
2018-03-10 17:37:33 +00:00
|
|
|
with open(base_path, "r") as f:
|
|
|
|
self.android_manifest_chunk += f.read()
|
2016-10-30 18:05:14 +00:00
|
|
|
|
2016-10-30 17:57:40 +00:00
|
|
|
def android_add_to_permissions(self, file):
|
|
|
|
base_path = self.Dir(".").abspath + "/modules/" + self.current_module + "/" + file
|
2018-03-10 17:37:33 +00:00
|
|
|
with open(base_path, "r") as f:
|
|
|
|
self.android_permission_chunk += f.read()
|
2016-10-30 18:05:14 +00:00
|
|
|
|
2016-10-30 17:57:40 +00:00
|
|
|
def android_add_to_attributes(self, file):
|
|
|
|
base_path = self.Dir(".").abspath + "/modules/" + self.current_module + "/" + file
|
2018-03-10 17:37:33 +00:00
|
|
|
with open(base_path, "r") as f:
|
|
|
|
self.android_appattributes_chunk += f.read()
|
2014-02-10 01:10:30 +00:00
|
|
|
|
|
|
|
def disable_module(self):
|
2016-10-30 17:44:57 +00:00
|
|
|
self.disabled_modules.append(self.current_module)
|
2016-01-25 03:21:04 +00:00
|
|
|
|
2016-06-14 14:27:16 +00:00
|
|
|
def use_windows_spawn_fix(self, platform=None):
|
2016-01-25 03:21:04 +00:00
|
|
|
|
2016-10-30 17:57:40 +00:00
|
|
|
if (os.name != "nt"):
|
|
|
|
return # not needed, only for windows
|
2016-01-25 03:21:04 +00:00
|
|
|
|
2016-10-29 01:34:53 +00:00
|
|
|
# On Windows, due to the limited command line length, when creating a static library
|
|
|
|
# from a very high number of objects SCons will invoke "ar" once per object file;
|
|
|
|
# that makes object files with same names to be overwritten so the last wins and
|
|
|
|
# the library looses symbols defined by overwritten objects.
|
|
|
|
# By enabling quick append instead of the default mode (replacing), libraries will
|
2018-02-21 16:30:55 +00:00
|
|
|
# got built correctly regardless the invocation strategy.
|
2016-10-29 01:34:53 +00:00
|
|
|
# Furthermore, since SCons will rebuild the library from scratch when an object file
|
|
|
|
# changes, no multiple versions of the same object file will be present.
|
|
|
|
self.Replace(ARFLAGS='q')
|
2016-01-25 03:21:04 +00:00
|
|
|
|
2016-10-30 17:57:40 +00:00
|
|
|
def mySubProcess(cmdline, env):
|
2016-10-30 17:44:57 +00:00
|
|
|
|
|
|
|
startupinfo = subprocess.STARTUPINFO()
|
|
|
|
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
|
2018-03-21 14:51:44 +00:00
|
|
|
proc = subprocess.Popen(cmdline, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
|
2016-10-30 17:57:40 +00:00
|
|
|
stderr=subprocess.PIPE, startupinfo=startupinfo, shell=False, env=env)
|
2016-10-30 17:44:57 +00:00
|
|
|
data, err = proc.communicate()
|
|
|
|
rv = proc.wait()
|
|
|
|
if rv:
|
2017-08-26 16:53:49 +00:00
|
|
|
print("=====")
|
|
|
|
print(err)
|
|
|
|
print("=====")
|
2016-10-30 17:44:57 +00:00
|
|
|
return rv
|
2016-01-25 03:21:04 +00:00
|
|
|
|
|
|
|
def mySpawn(sh, escape, cmd, args, env):
|
|
|
|
|
2016-10-30 17:44:57 +00:00
|
|
|
newargs = ' '.join(args[1:])
|
|
|
|
cmdline = cmd + " " + newargs
|
2016-01-25 03:21:04 +00:00
|
|
|
|
2016-10-30 17:57:40 +00:00
|
|
|
rv = 0
|
2017-08-26 16:53:49 +00:00
|
|
|
env = {str(key): str(value) for key, value in iteritems(env)}
|
2016-10-30 17:57:40 +00:00
|
|
|
if len(cmdline) > 32000 and cmd.endswith("ar"):
|
2016-10-30 17:44:57 +00:00
|
|
|
cmdline = cmd + " " + args[1] + " " + args[2] + " "
|
2016-10-30 17:57:40 +00:00
|
|
|
for i in range(3, len(args)):
|
|
|
|
rv = mySubProcess(cmdline + args[i], env)
|
|
|
|
if rv:
|
2016-10-30 17:44:57 +00:00
|
|
|
break
|
|
|
|
else:
|
2016-10-30 17:57:40 +00:00
|
|
|
rv = mySubProcess(cmdline, env)
|
2016-01-25 03:21:04 +00:00
|
|
|
|
2016-10-30 17:44:57 +00:00
|
|
|
return rv
|
2016-01-25 03:21:04 +00:00
|
|
|
|
|
|
|
self['SPAWN'] = mySpawn
|
|
|
|
|
|
|
|
|
2018-01-24 22:06:35 +00:00
|
|
|
def split_lib(self, libname, src_list = None, env_lib = None):
|
2016-10-30 17:44:57 +00:00
|
|
|
env = self
|
|
|
|
|
|
|
|
num = 0
|
|
|
|
cur_base = ""
|
|
|
|
max_src = 64
|
|
|
|
list = []
|
|
|
|
lib_list = []
|
|
|
|
|
2018-10-26 23:18:15 +00:00
|
|
|
if src_list is None:
|
2018-01-24 22:06:35 +00:00
|
|
|
src_list = getattr(env, libname + "_sources")
|
2018-01-25 05:45:18 +00:00
|
|
|
|
|
|
|
if type(env_lib) == type(None):
|
|
|
|
env_lib = env
|
2018-01-24 22:06:35 +00:00
|
|
|
|
|
|
|
for f in src_list:
|
2016-10-30 17:44:57 +00:00
|
|
|
fname = ""
|
|
|
|
if type(f) == type(""):
|
|
|
|
fname = env.File(f).path
|
|
|
|
else:
|
|
|
|
fname = env.File(f)[0].path
|
|
|
|
fname = fname.replace("\\", "/")
|
|
|
|
base = string.join(fname.split("/")[:2], "/")
|
|
|
|
if base != cur_base and len(list) > max_src:
|
|
|
|
if num > 0:
|
2018-01-24 22:06:35 +00:00
|
|
|
lib = env_lib.add_library(libname + str(num), list)
|
2016-10-30 17:44:57 +00:00
|
|
|
lib_list.append(lib)
|
|
|
|
list = []
|
|
|
|
num = num + 1
|
|
|
|
cur_base = base
|
|
|
|
list.append(f)
|
|
|
|
|
2018-01-24 22:06:35 +00:00
|
|
|
lib = env_lib.add_library(libname + str(num), list)
|
2016-10-30 17:44:57 +00:00
|
|
|
lib_list.append(lib)
|
|
|
|
|
|
|
|
if len(lib_list) > 0:
|
|
|
|
if os.name == 'posix' and sys.platform == 'msys':
|
2016-10-30 17:57:40 +00:00
|
|
|
env.Replace(ARFLAGS=['rcsT'])
|
2018-01-24 22:06:35 +00:00
|
|
|
lib = env_lib.add_library(libname + "_collated", lib_list)
|
2016-10-30 17:44:57 +00:00
|
|
|
lib_list = [lib]
|
|
|
|
|
|
|
|
lib_base = []
|
2018-01-24 22:06:35 +00:00
|
|
|
env_lib.add_source_files(lib_base, "*.cpp")
|
|
|
|
lib = env_lib.add_library(libname, lib_base)
|
2017-11-27 13:39:05 +00:00
|
|
|
lib_list.insert(0, lib)
|
2016-10-30 17:44:57 +00:00
|
|
|
|
2016-10-30 17:57:40 +00:00
|
|
|
env.Prepend(LIBS=lib_list)
|
2016-10-30 16:04:07 +00:00
|
|
|
|
|
|
|
|
2016-10-30 17:57:40 +00:00
|
|
|
def save_active_platforms(apnames, ap):
|
2014-02-10 01:10:30 +00:00
|
|
|
|
2016-10-30 17:44:57 +00:00
|
|
|
for x in ap:
|
2017-05-25 18:57:13 +00:00
|
|
|
names = ['logo']
|
|
|
|
if os.path.isfile(x + "/run_icon.png"):
|
|
|
|
names.append('run_icon')
|
2016-10-30 17:57:40 +00:00
|
|
|
|
2017-05-25 18:57:13 +00:00
|
|
|
for name in names:
|
|
|
|
pngf = open(x + "/" + name + ".png", "rb")
|
|
|
|
b = pngf.read(1)
|
|
|
|
str = " /* AUTOGENERATED FILE, DO NOT EDIT */ \n"
|
|
|
|
str += " static const unsigned char _" + x[9:] + "_" + name + "[]={"
|
2018-03-17 22:23:55 +00:00
|
|
|
while len(b) == 1:
|
2017-05-25 18:57:13 +00:00
|
|
|
str += hex(ord(b))
|
|
|
|
b = pngf.read(1)
|
|
|
|
if (len(b) == 1):
|
|
|
|
str += ","
|
|
|
|
|
|
|
|
str += "};\n"
|
|
|
|
|
2018-03-10 17:37:33 +00:00
|
|
|
pngf.close()
|
|
|
|
|
2018-03-17 22:23:55 +00:00
|
|
|
# NOTE: It is safe to generate this file here, since this is still executed serially
|
2017-05-25 18:57:13 +00:00
|
|
|
wf = x + "/" + name + ".gen.h"
|
2018-03-10 17:37:33 +00:00
|
|
|
with open(wf, "w") as pngw:
|
|
|
|
pngw.write(str)
|
2014-02-10 01:10:30 +00:00
|
|
|
|
2015-01-12 04:54:17 +00:00
|
|
|
|
2016-10-30 17:57:40 +00:00
|
|
|
def no_verbose(sys, env):
|
2015-01-12 04:54:17 +00:00
|
|
|
|
2016-10-30 17:44:57 +00:00
|
|
|
colors = {}
|
2018-01-13 16:56:41 +00:00
|
|
|
|
|
|
|
# Colors are disabled in non-TTY environments such as pipes. This means
|
|
|
|
# that if output is redirected to a file, it will not contain color codes
|
|
|
|
if sys.stdout.isatty():
|
|
|
|
colors['cyan'] = '\033[96m'
|
|
|
|
colors['purple'] = '\033[95m'
|
|
|
|
colors['blue'] = '\033[94m'
|
|
|
|
colors['green'] = '\033[92m'
|
|
|
|
colors['yellow'] = '\033[93m'
|
|
|
|
colors['red'] = '\033[91m'
|
|
|
|
colors['end'] = '\033[0m'
|
|
|
|
else:
|
|
|
|
colors['cyan'] = ''
|
|
|
|
colors['purple'] = ''
|
|
|
|
colors['blue'] = ''
|
|
|
|
colors['green'] = ''
|
|
|
|
colors['yellow'] = ''
|
|
|
|
colors['red'] = ''
|
|
|
|
colors['end'] = ''
|
2016-10-30 17:44:57 +00:00
|
|
|
|
2016-10-30 17:57:40 +00:00
|
|
|
compile_source_message = '%sCompiling %s==> %s$SOURCE%s' % (colors['blue'], colors['purple'], colors['yellow'], colors['end'])
|
|
|
|
java_compile_source_message = '%sCompiling %s==> %s$SOURCE%s' % (colors['blue'], colors['purple'], colors['yellow'], colors['end'])
|
2016-10-30 17:44:57 +00:00
|
|
|
compile_shared_source_message = '%sCompiling shared %s==> %s$SOURCE%s' % (colors['blue'], colors['purple'], colors['yellow'], colors['end'])
|
2016-10-30 17:57:40 +00:00
|
|
|
link_program_message = '%sLinking Program %s==> %s$TARGET%s' % (colors['red'], colors['purple'], colors['yellow'], colors['end'])
|
|
|
|
link_library_message = '%sLinking Static Library %s==> %s$TARGET%s' % (colors['red'], colors['purple'], colors['yellow'], colors['end'])
|
|
|
|
ranlib_library_message = '%sRanlib Library %s==> %s$TARGET%s' % (colors['red'], colors['purple'], colors['yellow'], colors['end'])
|
|
|
|
link_shared_library_message = '%sLinking Shared Library %s==> %s$TARGET%s' % (colors['red'], colors['purple'], colors['yellow'], colors['end'])
|
|
|
|
java_library_message = '%sCreating Java Archive %s==> %s$TARGET%s' % (colors['red'], colors['purple'], colors['yellow'], colors['end'])
|
|
|
|
|
|
|
|
env.Append(CXXCOMSTR=[compile_source_message])
|
|
|
|
env.Append(CCCOMSTR=[compile_source_message])
|
|
|
|
env.Append(SHCCCOMSTR=[compile_shared_source_message])
|
|
|
|
env.Append(SHCXXCOMSTR=[compile_shared_source_message])
|
|
|
|
env.Append(ARCOMSTR=[link_library_message])
|
|
|
|
env.Append(RANLIBCOMSTR=[ranlib_library_message])
|
|
|
|
env.Append(SHLINKCOMSTR=[link_shared_library_message])
|
|
|
|
env.Append(LINKCOMSTR=[link_program_message])
|
|
|
|
env.Append(JARCOMSTR=[java_library_message])
|
|
|
|
env.Append(JAVACCOMSTR=[java_compile_source_message])
|
2015-01-12 04:54:17 +00:00
|
|
|
|
2016-10-30 18:05:14 +00:00
|
|
|
|
2016-09-03 22:25:43 +00:00
|
|
|
def detect_visual_c_compiler_version(tools_env):
|
2016-10-30 17:44:57 +00:00
|
|
|
# tools_env is the variable scons uses to call tools that execute tasks, SCons's env['ENV'] that executes tasks...
|
|
|
|
# (see the SCons documentation for more information on what it does)...
|
2017-03-24 20:45:31 +00:00
|
|
|
# in order for this function to be well encapsulated i choose to force it to receive SCons's TOOLS env (env['ENV']
|
2016-10-30 17:44:57 +00:00
|
|
|
# and not scons setup environment (env)... so make sure you call the right environment on it or it will fail to detect
|
2017-03-24 20:45:31 +00:00
|
|
|
# the proper vc version that will be called
|
2016-10-30 17:44:57 +00:00
|
|
|
|
2018-01-19 03:13:48 +00:00
|
|
|
# There is no flag to give to visual c compilers to set the architecture, ie scons bits argument (32,64,ARM etc)
|
2016-10-30 17:44:57 +00:00
|
|
|
# There are many different cl.exe files that are run, and each one compiles & links to a different architecture
|
|
|
|
# As far as I know, the only way to figure out what compiler will be run when Scons calls cl.exe via Program()
|
2018-02-21 16:30:55 +00:00
|
|
|
# is to check the PATH variable and figure out which one will be called first. Code below does that and returns:
|
2016-10-30 17:44:57 +00:00
|
|
|
# the following string values:
|
|
|
|
|
|
|
|
# "" Compiler not detected
|
|
|
|
# "amd64" Native 64 bit compiler
|
|
|
|
# "amd64_x86" 64 bit Cross Compiler for 32 bit
|
|
|
|
# "x86" Native 32 bit compiler
|
|
|
|
# "x86_amd64" 32 bit Cross Compiler for 64 bit
|
|
|
|
|
|
|
|
# There are other architectures, but Godot does not support them currently, so this function does not detect arm/amd64_arm
|
|
|
|
# and similar architectures/compilers
|
|
|
|
|
|
|
|
# Set chosen compiler to "not detected"
|
|
|
|
vc_chosen_compiler_index = -1
|
|
|
|
vc_chosen_compiler_str = ""
|
|
|
|
|
2017-05-23 11:50:06 +00:00
|
|
|
# Start with Pre VS 2017 checks which uses VCINSTALLDIR:
|
|
|
|
if 'VCINSTALLDIR' in tools_env:
|
2017-08-26 16:53:49 +00:00
|
|
|
# print("Checking VCINSTALLDIR")
|
2017-05-23 11:50:06 +00:00
|
|
|
|
2018-02-21 16:30:55 +00:00
|
|
|
# find() works with -1 so big ifs below are needed... the simplest solution, in fact
|
2017-05-23 11:50:06 +00:00
|
|
|
# First test if amd64 and amd64_x86 compilers are present in the path
|
|
|
|
vc_amd64_compiler_detection_index = tools_env["PATH"].find(tools_env["VCINSTALLDIR"] + "BIN\\amd64;")
|
|
|
|
if(vc_amd64_compiler_detection_index > -1):
|
|
|
|
vc_chosen_compiler_index = vc_amd64_compiler_detection_index
|
|
|
|
vc_chosen_compiler_str = "amd64"
|
|
|
|
|
|
|
|
vc_amd64_x86_compiler_detection_index = tools_env["PATH"].find(tools_env["VCINSTALLDIR"] + "BIN\\amd64_x86;")
|
|
|
|
if(vc_amd64_x86_compiler_detection_index > -1
|
|
|
|
and (vc_chosen_compiler_index == -1
|
|
|
|
or vc_chosen_compiler_index > vc_amd64_x86_compiler_detection_index)):
|
|
|
|
vc_chosen_compiler_index = vc_amd64_x86_compiler_detection_index
|
|
|
|
vc_chosen_compiler_str = "amd64_x86"
|
|
|
|
|
|
|
|
# Now check the 32 bit compilers
|
|
|
|
vc_x86_compiler_detection_index = tools_env["PATH"].find(tools_env["VCINSTALLDIR"] + "BIN;")
|
|
|
|
if(vc_x86_compiler_detection_index > -1
|
|
|
|
and (vc_chosen_compiler_index == -1
|
|
|
|
or vc_chosen_compiler_index > vc_x86_compiler_detection_index)):
|
|
|
|
vc_chosen_compiler_index = vc_x86_compiler_detection_index
|
|
|
|
vc_chosen_compiler_str = "x86"
|
|
|
|
|
|
|
|
vc_x86_amd64_compiler_detection_index = tools_env["PATH"].find(tools_env['VCINSTALLDIR'] + "BIN\\x86_amd64;")
|
|
|
|
if(vc_x86_amd64_compiler_detection_index > -1
|
|
|
|
and (vc_chosen_compiler_index == -1
|
|
|
|
or vc_chosen_compiler_index > vc_x86_amd64_compiler_detection_index)):
|
|
|
|
vc_chosen_compiler_index = vc_x86_amd64_compiler_detection_index
|
|
|
|
vc_chosen_compiler_str = "x86_amd64"
|
|
|
|
|
|
|
|
# and for VS 2017 and newer we check VCTOOLSINSTALLDIR:
|
|
|
|
if 'VCTOOLSINSTALLDIR' in tools_env:
|
|
|
|
|
|
|
|
# Newer versions have a different path available
|
|
|
|
vc_amd64_compiler_detection_index = tools_env["PATH"].upper().find(tools_env['VCTOOLSINSTALLDIR'].upper() + "BIN\\HOSTX64\\X64;")
|
|
|
|
if(vc_amd64_compiler_detection_index > -1):
|
|
|
|
vc_chosen_compiler_index = vc_amd64_compiler_detection_index
|
|
|
|
vc_chosen_compiler_str = "amd64"
|
|
|
|
|
|
|
|
vc_amd64_x86_compiler_detection_index = tools_env["PATH"].upper().find(tools_env['VCTOOLSINSTALLDIR'].upper() + "BIN\\HOSTX64\\X86;")
|
|
|
|
if(vc_amd64_x86_compiler_detection_index > -1
|
|
|
|
and (vc_chosen_compiler_index == -1
|
|
|
|
or vc_chosen_compiler_index > vc_amd64_x86_compiler_detection_index)):
|
|
|
|
vc_chosen_compiler_index = vc_amd64_x86_compiler_detection_index
|
|
|
|
vc_chosen_compiler_str = "amd64_x86"
|
|
|
|
|
|
|
|
vc_x86_compiler_detection_index = tools_env["PATH"].upper().find(tools_env['VCTOOLSINSTALLDIR'].upper() + "BIN\\HOSTX86\\X86;")
|
|
|
|
if(vc_x86_compiler_detection_index > -1
|
|
|
|
and (vc_chosen_compiler_index == -1
|
|
|
|
or vc_chosen_compiler_index > vc_x86_compiler_detection_index)):
|
|
|
|
vc_chosen_compiler_index = vc_x86_compiler_detection_index
|
|
|
|
vc_chosen_compiler_str = "x86"
|
|
|
|
|
|
|
|
vc_x86_amd64_compiler_detection_index = tools_env["PATH"].upper().find(tools_env['VCTOOLSINSTALLDIR'].upper() + "BIN\\HOSTX86\\X64;")
|
|
|
|
if(vc_x86_amd64_compiler_detection_index > -1
|
|
|
|
and (vc_chosen_compiler_index == -1
|
|
|
|
or vc_chosen_compiler_index > vc_x86_amd64_compiler_detection_index)):
|
|
|
|
vc_chosen_compiler_index = vc_x86_amd64_compiler_detection_index
|
|
|
|
vc_chosen_compiler_str = "x86_amd64"
|
2017-05-19 01:27:17 +00:00
|
|
|
|
2016-10-30 17:44:57 +00:00
|
|
|
return vc_chosen_compiler_str
|
2016-09-03 22:25:43 +00:00
|
|
|
|
2017-08-28 15:17:26 +00:00
|
|
|
def find_visual_c_batch_file(env):
|
2018-03-17 22:23:55 +00:00
|
|
|
from SCons.Tool.MSCommon.vc import get_default_version, get_host_target, find_batch_file
|
2017-08-28 15:17:26 +00:00
|
|
|
|
|
|
|
version = get_default_version(env)
|
|
|
|
(host_platform, target_platform,req_target_platform) = get_host_target(env)
|
|
|
|
return find_batch_file(env, version, host_platform, target_platform)[0]
|
|
|
|
|
2017-11-18 17:09:18 +00:00
|
|
|
def generate_cpp_hint_file(filename):
|
|
|
|
if os.path.isfile(filename):
|
|
|
|
# Don't overwrite an existing hint file since the user may have customized it.
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
try:
|
2018-03-10 17:37:33 +00:00
|
|
|
with open(filename, "w") as fd:
|
|
|
|
fd.write("#define GDCLASS(m_class, m_inherits)\n")
|
2017-11-18 17:09:18 +00:00
|
|
|
except IOError:
|
|
|
|
print("Could not write cpp.hint file.")
|
2017-08-28 15:17:26 +00:00
|
|
|
|
|
|
|
def generate_vs_project(env, num_jobs):
|
|
|
|
batch_file = find_visual_c_batch_file(env)
|
|
|
|
if batch_file:
|
|
|
|
def build_commandline(commands):
|
|
|
|
common_build_prefix = ['cmd /V /C set "plat=$(PlatformTarget)"',
|
|
|
|
'(if "$(PlatformTarget)"=="x64" (set "plat=x86_amd64"))',
|
|
|
|
'set "tools=yes"',
|
|
|
|
'(if "$(Configuration)"=="release" (set "tools=no"))',
|
|
|
|
'call "' + batch_file + '" !plat!']
|
|
|
|
|
|
|
|
result = " ^& ".join(common_build_prefix + [commands])
|
|
|
|
return result
|
|
|
|
|
|
|
|
env.AddToVSProject(env.core_sources)
|
|
|
|
env.AddToVSProject(env.main_sources)
|
|
|
|
env.AddToVSProject(env.modules_sources)
|
|
|
|
env.AddToVSProject(env.scene_sources)
|
|
|
|
env.AddToVSProject(env.servers_sources)
|
|
|
|
env.AddToVSProject(env.editor_sources)
|
|
|
|
|
2017-10-29 21:10:44 +00:00
|
|
|
# windows allows us to have spaces in paths, so we need
|
|
|
|
# to double quote off the directory. However, the path ends
|
|
|
|
# in a backslash, so we need to remove this, lest it escape the
|
|
|
|
# last double quote off, confusing MSBuild
|
2017-11-18 16:51:24 +00:00
|
|
|
env['MSVSBUILDCOM'] = build_commandline('scons --directory="$(ProjectDir.TrimEnd(\'\\\'))" platform=windows progress=no target=$(Configuration) tools=!tools! -j' + str(num_jobs))
|
|
|
|
env['MSVSREBUILDCOM'] = build_commandline('scons --directory="$(ProjectDir.TrimEnd(\'\\\'))" platform=windows progress=no target=$(Configuration) tools=!tools! vsproj=yes -j' + str(num_jobs))
|
|
|
|
env['MSVSCLEANCOM'] = build_commandline('scons --directory="$(ProjectDir.TrimEnd(\'\\\'))" --clean platform=windows progress=no target=$(Configuration) tools=!tools! -j' + str(num_jobs))
|
2017-08-28 15:17:26 +00:00
|
|
|
|
|
|
|
# This version information (Win32, x64, Debug, Release, Release_Debug seems to be
|
|
|
|
# required for Visual Studio to understand that it needs to generate an NMAKE
|
|
|
|
# project. Do not modify without knowing what you are doing.
|
|
|
|
debug_variants = ['debug|Win32'] + ['debug|x64']
|
|
|
|
release_variants = ['release|Win32'] + ['release|x64']
|
|
|
|
release_debug_variants = ['release_debug|Win32'] + ['release_debug|x64']
|
|
|
|
variants = debug_variants + release_variants + release_debug_variants
|
|
|
|
debug_targets = ['bin\\godot.windows.tools.32.exe'] + ['bin\\godot.windows.tools.64.exe']
|
|
|
|
release_targets = ['bin\\godot.windows.opt.32.exe'] + ['bin\\godot.windows.opt.64.exe']
|
|
|
|
release_debug_targets = ['bin\\godot.windows.opt.tools.32.exe'] + ['bin\\godot.windows.opt.tools.64.exe']
|
|
|
|
targets = debug_targets + release_targets + release_debug_targets
|
2018-09-13 10:42:52 +00:00
|
|
|
if not env.get('MSVS'):
|
|
|
|
env['MSVS']['PROJECTSUFFIX'] = '.vcxproj'
|
|
|
|
env['MSVS']['SOLUTIONSUFFIX'] = '.sln'
|
2018-03-17 22:23:55 +00:00
|
|
|
env.MSVSProject(
|
|
|
|
target=['#godot' + env['MSVSPROJECTSUFFIX']],
|
|
|
|
incs=env.vs_incs,
|
|
|
|
srcs=env.vs_srcs,
|
|
|
|
runfile=targets,
|
|
|
|
buildtarget=targets,
|
|
|
|
auto_build_solution=1,
|
|
|
|
variant=variants)
|
2017-08-28 15:17:26 +00:00
|
|
|
else:
|
|
|
|
print("Could not locate Visual Studio batch file for setting up the build environment. Not generating VS project.")
|
2016-09-15 16:04:26 +00:00
|
|
|
|
2016-09-03 22:25:43 +00:00
|
|
|
def precious_program(env, program, sources, **args):
|
2016-10-30 17:44:57 +00:00
|
|
|
program = env.ProgramOriginal(program, sources, **args)
|
|
|
|
env.Precious(program)
|
|
|
|
return program
|
2017-11-28 20:27:57 +00:00
|
|
|
|
|
|
|
def add_shared_library(env, name, sources, **args):
|
2017-12-18 22:01:09 +00:00
|
|
|
library = env.SharedLibrary(name, sources, **args)
|
|
|
|
env.NoCache(library)
|
|
|
|
return library
|
2017-11-28 20:27:57 +00:00
|
|
|
|
|
|
|
def add_library(env, name, sources, **args):
|
2017-12-18 22:01:09 +00:00
|
|
|
library = env.Library(name, sources, **args)
|
|
|
|
env.NoCache(library)
|
|
|
|
return library
|
2017-11-28 20:27:57 +00:00
|
|
|
|
|
|
|
def add_program(env, name, sources, **args):
|
2017-12-18 22:01:09 +00:00
|
|
|
program = env.Program(name, sources, **args)
|
|
|
|
env.NoCache(program)
|
|
|
|
return program
|
2018-06-21 04:33:25 +00:00
|
|
|
|
|
|
|
def CommandNoCache(env, target, sources, command, **args):
|
|
|
|
result = env.Command(target, sources, command, **args)
|
|
|
|
env.NoCache(result)
|
|
|
|
return result
|