[Buildsystem] Fix encoding when reading files
This commit is contained in:
parent
99ff024f78
commit
4933fa8bf5
|
@ -13,7 +13,7 @@ def make_default_controller_mappings(target, source, env):
|
||||||
# ensure mappings have a consistent order
|
# ensure mappings have a consistent order
|
||||||
platform_mappings: dict = OrderedDict()
|
platform_mappings: dict = OrderedDict()
|
||||||
for src_path in source:
|
for src_path in source:
|
||||||
with open(str(src_path), "r") as f:
|
with open(str(src_path), "r", encoding="utf-8") as f:
|
||||||
# read mapping file and skip header
|
# read mapping file and skip header
|
||||||
mapping_file_lines = f.readlines()[2:]
|
mapping_file_lines = f.readlines()[2:]
|
||||||
|
|
||||||
|
|
|
@ -31,7 +31,7 @@ class GLES3HeaderStruct:
|
||||||
|
|
||||||
|
|
||||||
def include_file_in_gles3_header(filename: str, header_data: GLES3HeaderStruct, depth: int):
|
def include_file_in_gles3_header(filename: str, header_data: GLES3HeaderStruct, depth: int):
|
||||||
with open(filename, "r") as fs:
|
with open(filename, "r", encoding="utf-8") as fs:
|
||||||
line = fs.readline()
|
line = fs.readline()
|
||||||
|
|
||||||
while line:
|
while line:
|
||||||
|
|
|
@ -38,7 +38,7 @@ class RDHeaderStruct:
|
||||||
|
|
||||||
|
|
||||||
def include_file_in_rd_header(filename: str, header_data: RDHeaderStruct, depth: int) -> RDHeaderStruct:
|
def include_file_in_rd_header(filename: str, header_data: RDHeaderStruct, depth: int) -> RDHeaderStruct:
|
||||||
with open(filename, "r") as fs:
|
with open(filename, "r", encoding="utf-8") as fs:
|
||||||
line = fs.readline()
|
line = fs.readline()
|
||||||
|
|
||||||
while line:
|
while line:
|
||||||
|
@ -172,7 +172,7 @@ class RAWHeaderStruct:
|
||||||
|
|
||||||
|
|
||||||
def include_file_in_raw_header(filename: str, header_data: RAWHeaderStruct, depth: int) -> None:
|
def include_file_in_raw_header(filename: str, header_data: RAWHeaderStruct, depth: int) -> None:
|
||||||
with open(filename, "r") as fs:
|
with open(filename, "r", encoding="utf-8") as fs:
|
||||||
line = fs.readline()
|
line = fs.readline()
|
||||||
|
|
||||||
while line:
|
while line:
|
||||||
|
|
20
methods.py
20
methods.py
|
@ -179,7 +179,7 @@ def get_version_info(module_version_string="", silent=False):
|
||||||
gitfolder = ".git"
|
gitfolder = ".git"
|
||||||
|
|
||||||
if os.path.isfile(".git"):
|
if os.path.isfile(".git"):
|
||||||
with open(".git", "r") as file:
|
with open(".git", "r", encoding="utf-8") as file:
|
||||||
module_folder = file.readline().strip()
|
module_folder = file.readline().strip()
|
||||||
if module_folder.startswith("gitdir: "):
|
if module_folder.startswith("gitdir: "):
|
||||||
gitfolder = module_folder[8:]
|
gitfolder = module_folder[8:]
|
||||||
|
@ -196,12 +196,12 @@ def get_version_info(module_version_string="", silent=False):
|
||||||
head = os.path.join(gitfolder, ref)
|
head = os.path.join(gitfolder, ref)
|
||||||
packedrefs = os.path.join(gitfolder, "packed-refs")
|
packedrefs = os.path.join(gitfolder, "packed-refs")
|
||||||
if os.path.isfile(head):
|
if os.path.isfile(head):
|
||||||
with open(head, "r") as file:
|
with open(head, "r", encoding="utf-8") as file:
|
||||||
githash = file.readline().strip()
|
githash = file.readline().strip()
|
||||||
elif os.path.isfile(packedrefs):
|
elif os.path.isfile(packedrefs):
|
||||||
# Git may pack refs into a single file. This code searches .git/packed-refs file for the current ref's hash.
|
# Git may pack refs into a single file. This code searches .git/packed-refs file for the current ref's hash.
|
||||||
# https://mirrors.edge.kernel.org/pub/software/scm/git/docs/git-pack-refs.html
|
# https://mirrors.edge.kernel.org/pub/software/scm/git/docs/git-pack-refs.html
|
||||||
for line in open(packedrefs, "r").read().splitlines():
|
for line in open(packedrefs, "r", encoding="utf-8").read().splitlines():
|
||||||
if line.startswith("#"):
|
if line.startswith("#"):
|
||||||
continue
|
continue
|
||||||
(line_hash, line_ref) = line.split(" ")
|
(line_hash, line_ref) = line.split(" ")
|
||||||
|
@ -270,7 +270,7 @@ const uint64_t VERSION_TIMESTAMP = {git_timestamp};
|
||||||
|
|
||||||
|
|
||||||
def parse_cg_file(fname, uniforms, sizes, conditionals):
|
def parse_cg_file(fname, uniforms, sizes, conditionals):
|
||||||
with open(fname, "r") as fs:
|
with open(fname, "r", encoding="utf-8") as fs:
|
||||||
line = fs.readline()
|
line = fs.readline()
|
||||||
|
|
||||||
while line:
|
while line:
|
||||||
|
@ -1243,7 +1243,7 @@ def generate_vs_project(env, original_args, project_name="godot"):
|
||||||
).hexdigest()
|
).hexdigest()
|
||||||
|
|
||||||
if os.path.exists(f"{project_name}.vcxproj.filters"):
|
if os.path.exists(f"{project_name}.vcxproj.filters"):
|
||||||
with open(f"{project_name}.vcxproj.filters", "r") as file:
|
with open(f"{project_name}.vcxproj.filters", "r", encoding="utf-8") as file:
|
||||||
existing_filters = file.read()
|
existing_filters = file.read()
|
||||||
match = re.search(r"(?ms)^<!-- CHECKSUM$.([0-9a-f]{32})", existing_filters)
|
match = re.search(r"(?ms)^<!-- CHECKSUM$.([0-9a-f]{32})", existing_filters)
|
||||||
if match is not None and md5 == match.group(1):
|
if match is not None and md5 == match.group(1):
|
||||||
|
@ -1255,7 +1255,7 @@ def generate_vs_project(env, original_args, project_name="godot"):
|
||||||
if not skip_filters:
|
if not skip_filters:
|
||||||
print(f"Regenerating {project_name}.vcxproj.filters")
|
print(f"Regenerating {project_name}.vcxproj.filters")
|
||||||
|
|
||||||
with open("misc/msvs/vcxproj.filters.template", "r") as file:
|
with open("misc/msvs/vcxproj.filters.template", "r", encoding="utf-8") as file:
|
||||||
filters_template = file.read()
|
filters_template = file.read()
|
||||||
for i in range(1, 10):
|
for i in range(1, 10):
|
||||||
filters_template = filters_template.replace(f"%%UUID{i}%%", str(uuid.uuid4()))
|
filters_template = filters_template.replace(f"%%UUID{i}%%", str(uuid.uuid4()))
|
||||||
|
@ -1409,7 +1409,7 @@ def generate_vs_project(env, original_args, project_name="godot"):
|
||||||
)
|
)
|
||||||
output = f'bin\\godot{env["PROGSUFFIX"]}'
|
output = f'bin\\godot{env["PROGSUFFIX"]}'
|
||||||
|
|
||||||
with open("misc/msvs/props.template", "r") as file:
|
with open("misc/msvs/props.template", "r", encoding="utf-8") as file:
|
||||||
props_template = file.read()
|
props_template = file.read()
|
||||||
|
|
||||||
props_template = props_template.replace("%%VSCONF%%", vsconf)
|
props_template = props_template.replace("%%VSCONF%%", vsconf)
|
||||||
|
@ -1478,7 +1478,7 @@ def generate_vs_project(env, original_args, project_name="godot"):
|
||||||
sln_uuid = str(uuid.uuid4())
|
sln_uuid = str(uuid.uuid4())
|
||||||
|
|
||||||
if os.path.exists(f"{project_name}.sln"):
|
if os.path.exists(f"{project_name}.sln"):
|
||||||
for line in open(f"{project_name}.sln", "r").read().splitlines():
|
for line in open(f"{project_name}.sln", "r", encoding="utf-8").read().splitlines():
|
||||||
if line.startswith('Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}")'):
|
if line.startswith('Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}")'):
|
||||||
proj_uuid = re.search(
|
proj_uuid = re.search(
|
||||||
r"\"{(\b[0-9a-fA-F]{8}\b-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-\b[0-9a-fA-F]{12}\b)}\"$",
|
r"\"{(\b[0-9a-fA-F]{8}\b-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-\b[0-9a-fA-F]{12}\b)}\"$",
|
||||||
|
@ -1567,7 +1567,7 @@ def generate_vs_project(env, original_args, project_name="godot"):
|
||||||
section2 = sorted(section2)
|
section2 = sorted(section2)
|
||||||
|
|
||||||
if not get_bool(original_args, "vsproj_props_only", False):
|
if not get_bool(original_args, "vsproj_props_only", False):
|
||||||
with open("misc/msvs/vcxproj.template", "r") as file:
|
with open("misc/msvs/vcxproj.template", "r", encoding="utf-8") as file:
|
||||||
proj_template = file.read()
|
proj_template = file.read()
|
||||||
proj_template = proj_template.replace("%%UUID%%", proj_uuid)
|
proj_template = proj_template.replace("%%UUID%%", proj_uuid)
|
||||||
proj_template = proj_template.replace("%%CONFS%%", "\n ".join(configurations))
|
proj_template = proj_template.replace("%%CONFS%%", "\n ".join(configurations))
|
||||||
|
@ -1579,7 +1579,7 @@ def generate_vs_project(env, original_args, project_name="godot"):
|
||||||
f.write(proj_template)
|
f.write(proj_template)
|
||||||
|
|
||||||
if not get_bool(original_args, "vsproj_props_only", False):
|
if not get_bool(original_args, "vsproj_props_only", False):
|
||||||
with open("misc/msvs/sln.template", "r") as file:
|
with open("misc/msvs/sln.template", "r", encoding="utf-8") as file:
|
||||||
sln_template = file.read()
|
sln_template = file.read()
|
||||||
sln_template = sln_template.replace("%%NAME%%", project_name)
|
sln_template = sln_template.replace("%%NAME%%", project_name)
|
||||||
sln_template = sln_template.replace("%%UUID%%", proj_uuid)
|
sln_template = sln_template.replace("%%UUID%%", proj_uuid)
|
||||||
|
|
|
@ -9,7 +9,7 @@ if len(sys.argv) < 2:
|
||||||
|
|
||||||
fname = sys.argv[1]
|
fname = sys.argv[1]
|
||||||
|
|
||||||
with open(fname.strip(), "r") as fileread:
|
with open(fname.strip(), "r", encoding="utf-8") as fileread:
|
||||||
file_contents = fileread.read()
|
file_contents = fileread.read()
|
||||||
|
|
||||||
# If find "ERROR: AddressSanitizer:", then happens invalid read or write
|
# If find "ERROR: AddressSanitizer:", then happens invalid read or write
|
||||||
|
|
|
@ -69,7 +69,7 @@ for f in sys.argv[1:]:
|
||||||
# In a second pass, we skip all consecutive comment lines starting with "/*",
|
# In a second pass, we skip all consecutive comment lines starting with "/*",
|
||||||
# then we can append the rest (step 2).
|
# then we can append the rest (step 2).
|
||||||
|
|
||||||
with open(fname.strip(), "r") as fileread:
|
with open(fname.strip(), "r", encoding="utf-8") as fileread:
|
||||||
line = fileread.readline()
|
line = fileread.readline()
|
||||||
header_done = False
|
header_done = False
|
||||||
|
|
||||||
|
|
|
@ -187,7 +187,7 @@ with open(os.path.join(dest_dir, "kernels/config.h"), "w", encoding="utf-8", new
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
with open("CMakeLists.txt", "r") as cmake_file:
|
with open("CMakeLists.txt", "r", encoding="utf-8") as cmake_file:
|
||||||
cmake_content = cmake_file.read()
|
cmake_content = cmake_file.read()
|
||||||
major_version = int(re.compile(r"EMBREE_VERSION_MAJOR\s(\d+)").findall(cmake_content)[0])
|
major_version = int(re.compile(r"EMBREE_VERSION_MAJOR\s(\d+)").findall(cmake_content)[0])
|
||||||
minor_version = int(re.compile(r"EMBREE_VERSION_MINOR\s(\d+)").findall(cmake_content)[0])
|
minor_version = int(re.compile(r"EMBREE_VERSION_MINOR\s(\d+)").findall(cmake_content)[0])
|
||||||
|
|
|
@ -101,7 +101,7 @@ TEST_CASE("[{name_pascal_case}] Example test case") {{
|
||||||
|
|
||||||
if args.invasive:
|
if args.invasive:
|
||||||
print("Trying to insert include directive in test_main.cpp...")
|
print("Trying to insert include directive in test_main.cpp...")
|
||||||
with open("test_main.cpp", "r") as file:
|
with open("test_main.cpp", "r", encoding="utf-8") as file:
|
||||||
contents = file.read()
|
contents = file.read()
|
||||||
match = re.search(r'#include "tests.*\n', contents)
|
match = re.search(r'#include "tests.*\n', contents)
|
||||||
|
|
||||||
|
|
|
@ -17,15 +17,15 @@ def test_gles3_builder(shader_files, builder, header_struct):
|
||||||
|
|
||||||
builder(shader_files["path_input"], "drivers/gles3/shader_gles3.h", "GLES3", header_data=header)
|
builder(shader_files["path_input"], "drivers/gles3/shader_gles3.h", "GLES3", header_data=header)
|
||||||
|
|
||||||
with open(shader_files["path_expected_parts"], "r") as f:
|
with open(shader_files["path_expected_parts"], "r", encoding="utf-8") as f:
|
||||||
expected_parts = json.load(f)
|
expected_parts = json.load(f)
|
||||||
assert expected_parts == header.__dict__
|
assert expected_parts == header.__dict__
|
||||||
|
|
||||||
with open(shader_files["path_output"], "r") as f:
|
with open(shader_files["path_output"], "r", encoding="utf-8") as f:
|
||||||
actual_output = f.read()
|
actual_output = f.read()
|
||||||
assert actual_output
|
assert actual_output
|
||||||
|
|
||||||
with open(shader_files["path_expected_full"], "r") as f:
|
with open(shader_files["path_expected_full"], "r", encoding="utf-8") as f:
|
||||||
expected_output = f.read()
|
expected_output = f.read()
|
||||||
|
|
||||||
assert actual_output == expected_output
|
assert actual_output == expected_output
|
||||||
|
|
|
@ -23,15 +23,15 @@ def test_glsl_builder(shader_files, builder, header_struct):
|
||||||
header = header_struct()
|
header = header_struct()
|
||||||
builder(shader_files["path_input"], header_data=header)
|
builder(shader_files["path_input"], header_data=header)
|
||||||
|
|
||||||
with open(shader_files["path_expected_parts"], "r") as f:
|
with open(shader_files["path_expected_parts"], "r", encoding="utf-8") as f:
|
||||||
expected_parts = json.load(f)
|
expected_parts = json.load(f)
|
||||||
assert expected_parts == header.__dict__
|
assert expected_parts == header.__dict__
|
||||||
|
|
||||||
with open(shader_files["path_output"], "r") as f:
|
with open(shader_files["path_output"], "r", encoding="utf-8") as f:
|
||||||
actual_output = f.read()
|
actual_output = f.read()
|
||||||
assert actual_output
|
assert actual_output
|
||||||
|
|
||||||
with open(shader_files["path_expected_full"], "r") as f:
|
with open(shader_files["path_expected_full"], "r", encoding="utf-8") as f:
|
||||||
expected_output = f.read()
|
expected_output = f.read()
|
||||||
|
|
||||||
assert actual_output == expected_output
|
assert actual_output == expected_output
|
||||||
|
|
Loading…
Reference in New Issue