Add 'engine/' from commit 'a8e37fc010'

git-subtree-dir: engine
git-subtree-mainline: b74841629e
git-subtree-split: a8e37fc010
This commit is contained in:
Sara Gerretsen 2026-03-13 11:22:19 +01:00
commit c3f9669b10
14113 changed files with 7458101 additions and 0 deletions

View file

@ -0,0 +1,133 @@
#!/usr/bin/env python3
# Script used to dump char ranges for specific properties from
# the Unicode Character Database to the `char_range.cpp` file.
# NOTE: This script is deliberately not integrated into the build system;
# you should run it manually whenever you want to update the data.
from __future__ import annotations
import os
import sys
from typing import Final
from urllib.request import urlopen
if __name__ == "__main__":
sys.path.insert(1, os.path.join(os.path.dirname(__file__), "../../"))
from methods import generate_copyright_header
URL: Final[str] = "https://www.unicode.org/Public/17.0.0/ucd/DerivedCoreProperties.txt"
xid_start: list[tuple[int, int]] = []
xid_continue: list[tuple[int, int]] = []
uppercase_letter: list[tuple[int, int]] = []
lowercase_letter: list[tuple[int, int]] = []
unicode_letter: list[tuple[int, int]] = []
def merge_ranges(ranges: list[tuple[int, int]]) -> None:
if len(ranges) < 2:
return
last_start: int = ranges[0][0]
last_end: int = ranges[0][1]
original_ranges: list[tuple[int, int]] = ranges[1:]
ranges.clear()
for curr_range in original_ranges:
curr_start: int = curr_range[0]
curr_end: int = curr_range[1]
if last_end + 1 != curr_start:
ranges.append((last_start, last_end))
last_start = curr_start
last_end = curr_end
ranges.append((last_start, last_end))
def parse_unicode_data() -> None:
lines: list[str] = [line.decode("utf-8") for line in urlopen(URL)]
for line in lines:
if line.startswith("#") or not line.strip():
continue
split_line: list[str] = line.split(";")
char_range: str = split_line[0].strip()
char_property: str = split_line[1].strip().split("#")[0].strip()
range_start: str = char_range
range_end: str = char_range
if ".." in char_range:
range_start, range_end = char_range.split("..")
range_tuple: tuple[int, int] = (int(range_start, 16), int(range_end, 16))
if char_property == "XID_Start":
xid_start.append(range_tuple)
elif char_property == "XID_Continue":
xid_continue.append(range_tuple)
elif char_property == "Uppercase":
uppercase_letter.append(range_tuple)
elif char_property == "Lowercase":
lowercase_letter.append(range_tuple)
elif char_property == "Alphabetic":
unicode_letter.append(range_tuple)
# Underscore technically isn't in XID_Start, but for our purposes it's included.
xid_start.append((0x005F, 0x005F))
xid_start.sort(key=lambda x: x[0])
merge_ranges(xid_start)
merge_ranges(xid_continue)
merge_ranges(uppercase_letter)
merge_ranges(lowercase_letter)
merge_ranges(unicode_letter)
def make_array(array_name: str, range_list: list[tuple[int, int]]) -> str:
result: str = f"\n\nconst int {array_name}_size = {len(range_list)};\n"
result += f"const CharRange {array_name}[{array_name}_size] = {{\n"
for start, end in range_list:
result += f"\t{{ 0x{start:x}, 0x{end:x} }},\n"
result += "};"
return result
def generate_char_range_inc() -> None:
parse_unicode_data()
source: str = generate_copyright_header("char_range.cpp")
source += f"""
// This file was generated using the `misc/scripts/char_range_fetch.py` script.
#include "core/string/char_utils.h"
// Unicode Derived Core Properties
// Source: {URL}\
"""
source += make_array("xid_start", xid_start)
source += make_array("xid_continue", xid_continue)
source += make_array("uppercase_letter", uppercase_letter)
source += make_array("lowercase_letter", lowercase_letter)
source += make_array("unicode_letter", unicode_letter)
source += "\n"
char_range_path: str = os.path.join(os.path.dirname(__file__), "../../core/string/char_range.cpp")
with open(char_range_path, "w", newline="\n") as f:
f.write(source)
print("`char_range.cpp` generated successfully.")
if __name__ == "__main__":
generate_char_range_inc()

View file

@ -0,0 +1,76 @@
#!/usr/bin/env python3
import os
import sys
if len(sys.argv) < 2:
print("ERROR: You must run program with file name as argument.")
sys.exit(50)
fname = sys.argv[1]
with open(fname.strip(), "r", encoding="utf-8") as fileread:
file_contents = fileread.read()
# If find "ERROR: AddressSanitizer:", then happens invalid read or write
# This is critical bug, so we need to fix this as fast as possible
if file_contents.find("ERROR: AddressSanitizer:") != -1:
print("FATAL ERROR: An incorrectly used memory was found.")
sys.exit(51)
# There is also possible, that program crashed with or without backtrace.
if (
file_contents.find("Program crashed with signal") != -1
or file_contents.find("Dumping the backtrace") != -1
or file_contents.find("Segmentation fault (core dumped)") != -1
or file_contents.find("Aborted (core dumped)") != -1
or file_contents.find("terminate called without an active exception") != -1
):
print("FATAL ERROR: Godot has been crashed.")
sys.exit(52)
# Finding memory leaks in Godot is quite difficult, because we need to take into
# account leaks also in external libraries. They are usually provided without
# debugging symbols, so the leak report from it usually has only 2/3 lines,
# so searching for 5 element - "#4 0x" - should correctly detect the vast
# majority of memory leaks
if file_contents.find("ERROR: LeakSanitizer:") != -1:
if file_contents.find("#4 0x") != -1:
print("ERROR: Memory leak was found")
sys.exit(53)
# It may happen that Godot detects leaking nodes/resources and removes them, so
# this possibility should also be handled as a potential error, even if
# LeakSanitizer doesn't report anything
if (
file_contents.find("ObjectDB instance was leaked at exit") != -1
or file_contents.find("ObjectDB instances were leaked at exit") != -1
):
print("ERROR: Memory leak was found")
sys.exit(54)
# In test project may be put several assert functions which will control if
# project is executed with right parameters etc. which normally will not stop
# execution of project
if file_contents.find("Assertion failed") != -1:
print("ERROR: Assertion failed in project, check execution log for more info")
sys.exit(55)
if os.environ.get("GODOT_CHECK_CI_LOG_ALL_ERRORS"):
# If any occurrence of "ERROR:" is found in the log, we consider it a failure.
if file_contents.find("ERROR:") != -1:
print("ERROR: 'ERROR:' found in log and GODOT_CHECK_CI_LOG_ALL_ERRORS is set.")
sys.exit(56)
# For now Godot leaks a lot of rendering stuff so for now we just show info
# about it and this needs to be re-enabled after fixing this memory leaks.
if file_contents.find("were leaked") != -1 or file_contents.find("were never freed") != -1:
print("WARNING: Memory leak was found")
sys.exit(0)

View file

@ -0,0 +1,95 @@
#!/usr/bin/env python3
import os
import sys
header = """\
/**************************************************************************/
/* $filename */
/**************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
/* https://godotengine.org */
/**************************************************************************/
/* Copyright (c) 2014-present Godot Engine contributors (see AUTHORS.md). */
/* Copyright (c) 2007-2014 Juan Linietsky, Ariel Manzur. */
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. */
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/**************************************************************************/
"""
if len(sys.argv) < 2:
print("Invalid usage of copyright_headers.py, it should be called with a path to one or multiple files.")
sys.exit(1)
for f in sys.argv[1:]:
fname = f
# Handle replacing $filename with actual filename and keep alignment
fsingle = os.path.basename(fname.strip())
rep_fl = "$filename"
rep_fi = fsingle
len_fl = len(rep_fl)
len_fi = len(rep_fi)
# Pad with spaces to keep alignment
if len_fi < len_fl:
for x in range(len_fl - len_fi):
rep_fi += " "
elif len_fl < len_fi:
for x in range(len_fi - len_fl):
rep_fl += " "
if header.find(rep_fl) != -1:
text = header.replace(rep_fl, rep_fi)
else:
text = header.replace("$filename", fsingle)
text += "\n"
# We now have the proper header, so we want to ignore the one in the original file
# and potentially empty lines and badly formatted lines, while keeping comments that
# come after the header, and then keep everything non-header unchanged.
# To do so, we skip empty lines that may be at the top in a first pass.
# In a second pass, we skip all consecutive comment lines starting with "/*",
# then we can append the rest (step 2).
with open(fname.strip(), "r", encoding="utf-8") as fileread:
line = fileread.readline()
header_done = False
while line.strip() == "" and line != "": # Skip empty lines at the top
line = fileread.readline()
if line.find("/**********") == -1: # Godot header starts this way
# Maybe starting with a non-Godot comment, abort header magic
header_done = True
while not header_done: # Handle header now
if line.find("/*") != 0: # No more starting with a comment
header_done = True
if line.strip() != "":
text += line
line = fileread.readline()
while line != "": # Dump everything until EOF
text += line
line = fileread.readline()
# Write
with open(fname.strip(), "w", encoding="utf-8", newline="\n") as filewrite:
filewrite.write(text)

View file

@ -0,0 +1,33 @@
#!/usr/bin/env python3
import glob
import os
import sys
if len(sys.argv) < 2:
print("Invalid usage of dotnet_format.py, it should be called with a path to one or multiple files.")
sys.exit(1)
# Create dummy generated files, if needed.
for path in [
"modules/mono/SdkPackageVersions.props",
]:
if os.path.exists(path):
continue
os.makedirs(os.path.dirname(path), exist_ok=True)
with open(path, "w", encoding="utf-8", newline="\n") as f:
f.write("<Project />")
# Avoid importing GeneratedIncludes.props.
os.environ["GodotSkipGenerated"] = "true"
# Match all the input files to their respective C# project.
projects = {
path: " ".join([f for f in sys.argv[1:] if os.path.commonpath([f, path]) == path])
for path in [os.path.dirname(f) for f in glob.glob("**/*.csproj", recursive=True)]
}
# Run dotnet format on all projects with more than 0 modified files.
for path, files in projects.items():
if files:
os.system(f"dotnet format {path} --include {files}")

View file

@ -0,0 +1,50 @@
#!/usr/bin/env python3
import sys
if len(sys.argv) < 2:
print("Invalid usage of file_format.py, it should be called with a path to one or multiple files.")
sys.exit(1)
BOM = b"\xef\xbb\xbf"
changed = []
invalid = []
for file in sys.argv[1:]:
try:
with open(file, "rt", encoding="utf-8") as f:
original = f.read()
except UnicodeDecodeError:
invalid.append(file)
continue
if original == "":
continue
EOL = "\r\n" if file.endswith((".csproj", ".sln", ".bat")) or file.startswith("misc/msvs") else "\n"
WANTS_BOM = file.endswith((".csproj", ".sln"))
revamp = EOL.join([line.rstrip("\n\r\t ") for line in original.splitlines(True)]).rstrip(EOL) + EOL
new_raw = revamp.encode(encoding="utf-8")
if not WANTS_BOM and new_raw.startswith(BOM):
new_raw = new_raw[len(BOM) :]
elif WANTS_BOM and not new_raw.startswith(BOM):
new_raw = BOM + new_raw
with open(file, "rb") as f:
old_raw = f.read()
if old_raw != new_raw:
changed.append(file)
with open(file, "wb") as f:
f.write(new_raw)
if changed:
for file in changed:
print(f"FIXED: {file}")
if invalid:
for file in invalid:
print(f"REQUIRES MANUAL CHANGES: {file}")
sys.exit(1)

View file

@ -0,0 +1,28 @@
#!/usr/bin/env sh
set -uo pipefail
shopt -s globstar
echo -e ".gitignore validation..."
# Get a list of files that exist in the repo but are ignored.
# The --verbose flag also includes files un-ignored via ! prefixes.
# We filter those out with a somewhat awkward `awk` directive.
# (Explanation: Split each line by : delimiters,
# see if the actual gitignore line shown in the third field starts with !,
# if it doesn't, print it.)
# ignorecase for the sake of Windows users.
output=$(git -c core.ignorecase=true check-ignore --verbose --no-index **/* | \
awk -F ':' '{ if ($3 !~ /^!/) print $0 }')
# Then we take this result and return success if it's empty.
if [ -z "$output" ]; then
exit 0
else
# And print the result if it isn't.
echo "$output"
exit 1
fi

View file

@ -0,0 +1,87 @@
#!/usr/bin/env python3
import sys
if len(sys.argv) < 2:
print("Invalid usage of header_guards.py, it should be called with a path to one or multiple files.")
sys.exit(1)
changed = []
invalid = []
for file in sys.argv[1:]:
header_start = -1
header_end = -1
with open(file.strip(), "rt", encoding="utf-8", newline="\n") as f:
lines = f.readlines()
for idx, line in enumerate(lines):
sline = line.strip()
if header_start < 0:
if sline == "": # Skip empty lines at the top.
continue
if sline.startswith("/**********"): # Godot header starts this way.
header_start = idx
else:
header_end = 0 # There is no Godot header.
break
else:
if not sline.startswith(("*", "/*")): # Not in the Godot header anymore.
header_end = idx + 1 # The guard should be two lines below the Godot header.
break
if (HEADER_CHECK_OFFSET := header_end) < 0 or HEADER_CHECK_OFFSET >= len(lines):
invalid.append(file)
continue
if lines[HEADER_CHECK_OFFSET].startswith("#pragma once"):
continue
# Might be using legacy header guards.
HEADER_BEGIN_OFFSET = HEADER_CHECK_OFFSET + 1
HEADER_END_OFFSET = len(lines) - 1
if HEADER_BEGIN_OFFSET >= HEADER_END_OFFSET:
invalid.append(file)
continue
if (
lines[HEADER_CHECK_OFFSET].startswith("#ifndef")
and lines[HEADER_BEGIN_OFFSET].startswith("#define")
and lines[HEADER_END_OFFSET].startswith("#endif")
):
lines[HEADER_CHECK_OFFSET] = "#pragma once"
lines[HEADER_BEGIN_OFFSET] = "\n"
lines.pop()
with open(file, "wt", encoding="utf-8", newline="\n") as f:
f.writelines(lines)
changed.append(file)
continue
# Verify `#pragma once` doesn't exist at invalid location.
misplaced = False
for line in lines:
if line.startswith("#pragma once"):
misplaced = True
break
if misplaced:
invalid.append(file)
continue
# Assume that we're simply missing a guard entirely.
lines.insert(HEADER_CHECK_OFFSET, "#pragma once\n\n")
with open(file, "wt", encoding="utf-8", newline="\n") as f:
f.writelines(lines)
changed.append(file)
if changed:
for file in changed:
print(f"FIXED: {file}")
if invalid:
for file in invalid:
print(f"REQUIRES MANUAL CHANGES: {file}")
sys.exit(1)

View file

@ -0,0 +1,49 @@
#!/usr/bin/env python3
if __name__ != "__main__":
raise SystemExit(f'Utility script "{__file__}" should not be used as a module!')
import os
import shutil
import sys
import urllib.request
sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), "../../"))
# Base Godot dependencies path
# If cross-compiling (no LOCALAPPDATA), we install in `bin`
deps_folder = os.getenv("LOCALAPPDATA")
if deps_folder:
deps_folder = os.path.join(deps_folder, "Godot", "build_deps")
else:
deps_folder = os.path.join("bin", "build_deps")
# AccessKit
ac_version = "0.21.2"
# Create dependencies folder
if not os.path.exists(deps_folder):
os.makedirs(deps_folder)
ac_filename = "accesskit-c-" + ac_version + ".zip"
ac_archive = os.path.join(deps_folder, "accesskit.zip")
ac_folder = os.path.join(deps_folder, "accesskit")
if os.path.isfile(ac_archive):
os.remove(ac_archive)
print(f"Downloading AccessKit {ac_filename} ...")
urllib.request.urlretrieve(
f"https://github.com/godotengine/godot-accesskit-c-static/releases/download/{ac_version}/{ac_filename}",
ac_archive,
)
if os.path.exists(ac_folder):
print(f"Removing existing local AccessKit installation in {ac_folder} ...")
shutil.rmtree(ac_folder)
print(f"Extracting AccessKit {ac_filename} to {ac_folder} ...")
shutil.unpack_archive(ac_archive, deps_folder)
os.remove(ac_archive)
os.rename(os.path.join(deps_folder, "accesskit-c-" + ac_version), ac_folder)
print("AccessKit installed successfully.\n")

View file

@ -0,0 +1,147 @@
#!/usr/bin/env python3
if __name__ != "__main__":
raise SystemExit(f'Utility script "{__file__}" should not be used as a module!')
import argparse
import os
import shutil
import subprocess
import sys
import urllib.request
sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), "../../"))
from misc.utility.color import Ansi, color_print
parser = argparse.ArgumentParser(description="Install D3D12 dependencies for Windows platforms.")
parser.add_argument(
"--mingw_prefix",
default=os.getenv("MINGW_PREFIX", ""),
help="Explicitly specify a path containing the MinGW bin folder.",
)
args = parser.parse_args()
# Base Godot dependencies path
# If cross-compiling (no LOCALAPPDATA), we install in `bin`
deps_folder = os.getenv("LOCALAPPDATA")
if deps_folder:
deps_folder = os.path.join(deps_folder, "Godot", "build_deps")
else:
deps_folder = os.path.join("bin", "build_deps")
# Mesa NIR
# Sync with `drivers/d3d12/SCsub` when updating Mesa.
# Check for latest version: https://github.com/godotengine/godot-nir-static/releases/latest
mesa_version = "25.3.1-1"
# WinPixEventRuntime
# Check for latest version: https://www.nuget.org/api/v2/package/WinPixEventRuntime (check downloaded filename)
pix_version = "1.0.240308001"
pix_archive = os.path.join(deps_folder, f"WinPixEventRuntime_{pix_version}.nupkg")
pix_folder = os.path.join(deps_folder, "pix")
# DirectX 12 Agility SDK
# Check for latest version: https://www.nuget.org/api/v2/package/Microsoft.Direct3D.D3D12 (check downloaded filename)
# After updating this, remember to change the default value of the `rendering/rendering_device/d3d12/agility_sdk_version`
# project setting to match the minor version (e.g. for `1.618.5`, it should be `618`).
agility_sdk_version = "1.618.5"
agility_sdk_archive = os.path.join(deps_folder, f"Agility_SDK_{agility_sdk_version}.nupkg")
agility_sdk_folder = os.path.join(deps_folder, "agility_sdk")
# Create dependencies folder
if not os.path.exists(deps_folder):
os.makedirs(deps_folder)
# Mesa NIR
color_print(f"{Ansi.BOLD}[1/3] Mesa NIR")
for arch in [
"arm64-llvm",
"arm64-msvc",
"x86_32-gcc",
"x86_32-llvm",
"x86_32-msvc",
"x86_64-gcc",
"x86_64-llvm",
"x86_64-msvc",
]:
mesa_filename = "godot-nir-static-" + arch + "-release.zip"
mesa_archive = os.path.join(deps_folder, mesa_filename)
mesa_folder = os.path.join(deps_folder, "mesa-" + arch)
if os.path.isfile(mesa_archive):
os.remove(mesa_archive)
print(f"Downloading Mesa NIR {mesa_filename} ...")
urllib.request.urlretrieve(
f"https://github.com/godotengine/godot-nir-static/releases/download/{mesa_version}/{mesa_filename}",
mesa_archive,
)
if os.path.exists(mesa_folder):
print(f"Removing existing local Mesa NIR installation in {mesa_folder} ...")
shutil.rmtree(mesa_folder)
print(f"Extracting Mesa NIR {mesa_filename} to {mesa_folder} ...")
shutil.unpack_archive(mesa_archive, mesa_folder)
os.remove(mesa_archive)
print("Mesa NIR installed successfully.\n")
# WinPixEventRuntime
# MinGW needs DLLs converted with dlltool.
# We rely on finding gendef/dlltool to detect if we have MinGW.
# Check existence of needed tools for generating mingw library.
pathstr = os.environ.get("PATH", "")
if args.mingw_prefix:
pathstr = os.path.join(args.mingw_prefix, "bin") + os.pathsep + pathstr
gendef = shutil.which("x86_64-w64-mingw32-gendef", path=pathstr) or shutil.which("gendef", path=pathstr) or ""
dlltool = shutil.which("x86_64-w64-mingw32-dlltool", path=pathstr) or shutil.which("dlltool", path=pathstr) or ""
has_mingw = gendef != "" and dlltool != ""
color_print(f"{Ansi.BOLD}[2/3] WinPixEventRuntime")
if os.path.isfile(pix_archive):
os.remove(pix_archive)
print(f"Downloading WinPixEventRuntime {pix_version} ...")
urllib.request.urlretrieve(f"https://www.nuget.org/api/v2/package/WinPixEventRuntime/{pix_version}", pix_archive)
if os.path.exists(pix_folder):
print(f"Removing existing local WinPixEventRuntime installation in {pix_folder} ...")
shutil.rmtree(pix_folder)
print(f"Extracting WinPixEventRuntime {pix_version} to {pix_folder} ...")
shutil.unpack_archive(pix_archive, pix_folder, "zip")
os.remove(pix_archive)
if has_mingw:
print("Adapting WinPixEventRuntime to also support MinGW alongside MSVC.")
cwd = os.getcwd()
os.chdir(pix_folder)
subprocess.run([gendef, "./bin/x64/WinPixEventRuntime.dll"])
subprocess.run(
[dlltool]
+ "--machine i386:x86-64 --no-leading-underscore -d WinPixEventRuntime.def -D WinPixEventRuntime.dll -l ./bin/x64/libWinPixEventRuntime.a".split()
)
subprocess.run([gendef, "./bin/ARM64/WinPixEventRuntime.dll"])
subprocess.run(
[dlltool]
+ "--machine arm64 --no-leading-underscore -d WinPixEventRuntime.def -D WinPixEventRuntime.dll -l ./bin/ARM64/libWinPixEventRuntime.a".split()
)
os.chdir(cwd)
else:
print(
'MinGW support requires "dlltool" and "gendef" dependencies, so only MSVC support is provided for WinPixEventRuntime. Did you forget to provide a `--mingw_prefix`?'
)
print(f"WinPixEventRuntime {pix_version} installed successfully.\n")
# DirectX 12 Agility SDK
color_print(f"{Ansi.BOLD}[3/3] DirectX 12 Agility SDK")
if os.path.isfile(agility_sdk_archive):
os.remove(agility_sdk_archive)
print(f"Downloading DirectX 12 Agility SDK {agility_sdk_version} ...")
urllib.request.urlretrieve(
f"https://www.nuget.org/api/v2/package/Microsoft.Direct3D.D3D12/{agility_sdk_version}", agility_sdk_archive
)
if os.path.exists(agility_sdk_folder):
print(f"Removing existing local DirectX 12 Agility SDK installation in {agility_sdk_folder} ...")
shutil.rmtree(agility_sdk_folder)
print(f"Extracting DirectX 12 Agility SDK {agility_sdk_version} to {agility_sdk_folder} ...")
shutil.unpack_archive(agility_sdk_archive, agility_sdk_folder, "zip")
os.remove(agility_sdk_archive)
print(f"DirectX 12 Agility SDK {agility_sdk_version} installed successfully.\n")
# Complete message
color_print(f'{Ansi.GREEN}All Direct3D 12 SDK components were installed to "{deps_folder}" successfully!')
color_print(f'{Ansi.GREEN}You can now build Godot with Direct3D 12 support enabled by running "scons d3d12=yes".')

View file

@ -0,0 +1,57 @@
#!/usr/bin/env python
import os
import shutil
import sys
import tempfile
import urllib.request
from zipfile import ZipFile
sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), "../../"))
from misc.utility.color import Ansi, color_print
# Swappy
# Check for latest version: https://github.com/godotengine/godot-swappy/releases/latest
swappy_tag = "from-source-2025-01-31"
swappy_filename = "godot-swappy.zip"
swappy_folder = "thirdparty/swappy-frame-pacing"
swappy_archs = [
"arm64-v8a",
"armeabi-v7a",
"x86",
"x86_64",
]
swappy_archive_destination = os.path.join(tempfile.gettempdir(), swappy_filename)
if os.path.isfile(swappy_archive_destination):
os.remove(swappy_archive_destination)
print(f"Downloading Swappy {swappy_tag} ...")
urllib.request.urlretrieve(
f"https://github.com/godotengine/godot-swappy/releases/download/{swappy_tag}/{swappy_filename}",
swappy_archive_destination,
)
for arch in swappy_archs:
folder = os.path.join(swappy_folder, arch)
if os.path.exists(folder):
print(f"Removing existing local Swappy installation in {folder} ...")
shutil.rmtree(folder)
print(f"Extracting Swappy {swappy_tag} to {swappy_folder} ...")
with ZipFile(swappy_archive_destination, "r") as zip_file:
for arch in swappy_archs:
zip_file.getinfo(f"{arch}/libswappy_static.a").filename = os.path.join(
swappy_folder, f"{arch}/libswappy_static.a"
)
zip_file.extract(f"{arch}/libswappy_static.a")
os.remove(swappy_archive_destination)
print("Swappy installed successfully.\n")
# Complete message
color_print(f'{Ansi.GREEN}Swappy was installed to "{swappy_folder}" successfully!')
color_print(
f'{Ansi.GREEN}You can now build Godot with Swappy support enabled by running "scons platform=android swappy=yes".'
)

View file

@ -0,0 +1,44 @@
#!/usr/bin/env sh
set -euo pipefail
IFS=$'\n\t'
new_ver_full=''
# Check currently installed and latest available Vulkan SDK versions.
if command -v jq 2>&1 >/dev/null; then
curl -L "https://sdk.lunarg.com/sdk/download/latest/mac/config.json" -o /tmp/vulkan-sdk.json
new_ver_full=`jq -r '.version' /tmp/vulkan-sdk.json`
new_ver=`echo "$new_ver_full" | awk -F. '{ printf("%d%02d%04d%02d\n", $1,$2,$3,$4); }';`
rm -f /tmp/vulkan-sdk.json
for f in $HOME/VulkanSDK/*; do
if [ -d "$f" ]; then
f=`echo "${f##*/}" | awk -F. '{ printf("%d%02d%04d%02d\n", $1,$2,$3,$4); }';`
if [ $f -ge $new_ver ]; then
echo 'Latest or newer Vulkan SDK is already installed. Skipping installation.'
exit 0
fi
fi
done
else
echo 'Error: Could not find 'jq' command. Is jq installed? Try running "brew install jq" or "port install jq" and rerunning this script.'
exit 1
fi
# Download and install the Vulkan SDK.
curl -L "https://sdk.lunarg.com/sdk/download/latest/mac/vulkan-sdk.zip" -o /tmp/vulkan-sdk.zip
unzip /tmp/vulkan-sdk.zip -d /tmp
if [ -d "/tmp/vulkansdk-macOS-$new_ver_full.app" ]; then
/tmp/vulkansdk-macOS-$new_ver_full.app/Contents/MacOS/vulkansdk-macOS-$new_ver_full --accept-licenses --default-answer --confirm-command install
rm -rf /tmp/vulkansdk-macOS-$new_ver_full.app
else
echo "Couldn't install the Vulkan SDK, the unzipped contents may no longer match what this script expects."
exit 1
fi
rm -f /tmp/vulkan-sdk.zip
echo 'Vulkan SDK installed successfully! You can now build Godot by running "scons".'

View file

@ -0,0 +1,26 @@
#!/usr/bin/env bash
# Generate .ico, .icns and .zip set of icons for Steam
# Make icons with transparent backgrounds and all sizes
for s in 16 24 32 48 64 128 256 512 1024; do
convert -resize ${s}x$s -antialias \
-background transparent \
../../icon.svg icon$s.png
done
# 16px tga file for library
convert icon16.png icon16.tga
# zip for Linux
zip godot-icons.zip icon*.png
# ico for Windows
# Not including biggest ones or it blows up in size
icotool -c -o godot-icon.ico icon{16,24,32,48,64,128,256}.png
# icns for macOS
# Only some sizes: https://iconhandbook.co.uk/reference/chart/osx/
png2icns godot-icon.icns icon{16,32,128,256,512,1024}.png
rm -f icon*.png

View file

@ -0,0 +1,66 @@
#!/usr/bin/env sh
if [ ! -e "version.py" ]; then
echo "This script should be ran from the root folder of the Godot repository."
exit 1
fi
while getopts "h?sv:g:" opt; do
case "$opt" in
h|\?)
echo "Usage: $0 [OPTIONS...]"
echo
echo " -s script friendly file name (godot.tar.gz)"
echo " -v godot version for file name (e.g. 4.0-stable)"
echo " -g git treeish to archive (e.g. master)"
echo
exit 1
;;
s)
script_friendly_name=1
;;
v)
godot_version=$OPTARG
;;
g)
git_treeish=$OPTARG
;;
esac
done
if [ ! -z "$git_treeish" ]; then
HEAD=$(git rev-parse $git_treeish)
else
HEAD=$(git rev-parse HEAD)
fi
if [ ! -z "$script_friendly_name" ]; then
NAME=godot
else
if [ ! -z "$godot_version" ]; then
NAME=godot-$godot_version
else
NAME=godot-$HEAD
fi
fi
CURDIR=$(pwd)
TMPDIR=$(mktemp -d -t godot-XXXXXX)
echo "Generating tarball for revision $HEAD with folder name '$NAME'."
echo
echo "The tarball will be written to the parent folder:"
echo " $(dirname $CURDIR)/$NAME.tar.gz"
git archive $HEAD --prefix=$NAME/ -o $TMPDIR/$NAME.tar
# Adding custom .git/HEAD to tarball so that we can generate GODOT_VERSION_HASH.
cd $TMPDIR
mkdir -p $NAME/.git
echo $HEAD > $NAME/.git/HEAD
tar -uf $NAME.tar $NAME
cd $CURDIR
gzip -c $TMPDIR/$NAME.tar > ../$NAME.tar.gz
rm -rf $TMPDIR

View file

@ -0,0 +1,47 @@
#!/usr/bin/env python3
import argparse
import glob
import os
if __name__ != "__main__":
raise ImportError(f"{__name__} should not be used as a module.")
def main():
parser = argparse.ArgumentParser(description="Cleanup old cache files")
parser.add_argument("timestamp", type=int, help="Unix timestamp cutoff")
parser.add_argument("directory", help="Path to cache directory")
args = parser.parse_args()
ret = 0
# TODO: Convert to non-hardcoded path
if os.path.exists("redundant.txt"):
with open("redundant.txt") as redundant:
for item in map(str.strip, redundant):
if os.path.isfile(item):
try:
os.remove(item)
except OSError:
print(f'Failed to handle "{item}"; skipping.')
ret += 1
for file in glob.glob(os.path.join(args.directory, "*", "*")):
try:
if os.path.getatime(file) < args.timestamp:
os.remove(file)
except OSError:
print(f'Failed to handle "{file}"; skipping.')
ret += 1
return ret
try:
raise SystemExit(main())
except KeyboardInterrupt:
import signal
signal.signal(signal.SIGINT, signal.SIG_DFL)
os.kill(os.getpid(), signal.SIGINT)

View file

@ -0,0 +1,118 @@
#!/usr/bin/env python3
# Script used to dump case mappings from
# the Unicode Character Database to the `ucaps.h` file.
# NOTE: This script is deliberately not integrated into the build system;
# you should run it manually whenever you want to update the data.
from __future__ import annotations
import os
import sys
from typing import Final
from urllib.request import urlopen
if __name__ == "__main__":
sys.path.insert(1, os.path.join(os.path.dirname(__file__), "../../"))
from methods import generate_copyright_header
URL: Final[str] = "https://www.unicode.org/Public/17.0.0/ucd/UnicodeData.txt"
lower_to_upper: list[tuple[str, str]] = []
upper_to_lower: list[tuple[str, str]] = []
def parse_unicode_data() -> None:
lines: list[str] = [line.decode("utf-8") for line in urlopen(URL)]
for line in lines:
split_line: list[str] = line.split(";")
code_value: str = split_line[0].strip()
uppercase_mapping: str = split_line[12].strip()
lowercase_mapping: str = split_line[13].strip()
if uppercase_mapping:
lower_to_upper.append((f"0x{code_value}", f"0x{uppercase_mapping}"))
if lowercase_mapping:
upper_to_lower.append((f"0x{code_value}", f"0x{lowercase_mapping}"))
def make_cap_table(table_name: str, len_name: str, table: list[tuple[str, str]]) -> str:
result: str = f"static const int {table_name}[{len_name}][2] = {{\n"
for first, second in table:
result += f"\t{{ {first}, {second} }},\n"
result += "};\n\n"
return result
def generate_ucaps_fetch() -> None:
parse_unicode_data()
source: str = generate_copyright_header("ucaps.h")
source += f"""
#pragma once
// This file was generated using the `misc/scripts/ucaps_fetch.py` script.
#define LTU_LEN {len(lower_to_upper)}
#define UTL_LEN {len(upper_to_lower)}\n\n"""
source += make_cap_table("caps_table", "LTU_LEN", lower_to_upper)
source += make_cap_table("reverse_caps_table", "UTL_LEN", upper_to_lower)
source += """static int _find_upper(int ch) {
\tint low = 0;
\tint high = LTU_LEN - 1;
\tint middle;
\twhile (low <= high) {
\t\tmiddle = (low + high) / 2;
\t\tif (ch < caps_table[middle][0]) {
\t\t\thigh = middle - 1; // Search low end of array.
\t\t} else if (caps_table[middle][0] < ch) {
\t\t\tlow = middle + 1; // Search high end of array.
\t\t} else {
\t\t\treturn caps_table[middle][1];
\t\t}
\t}
\treturn ch;
}
static int _find_lower(int ch) {
\tint low = 0;
\tint high = UTL_LEN - 1;
\tint middle;
\twhile (low <= high) {
\t\tmiddle = (low + high) / 2;
\t\tif (ch < reverse_caps_table[middle][0]) {
\t\t\thigh = middle - 1; // Search low end of array.
\t\t} else if (reverse_caps_table[middle][0] < ch) {
\t\t\tlow = middle + 1; // Search high end of array.
\t\t} else {
\t\t\treturn reverse_caps_table[middle][1];
\t\t}
\t}
\treturn ch;
}
"""
ucaps_path: str = os.path.join(os.path.dirname(__file__), "../../core/string/ucaps.h")
with open(ucaps_path, "w", newline="\n") as f:
f.write(source)
print("`ucaps.h` generated successfully.")
if __name__ == "__main__":
generate_ucaps_fetch()

View file

@ -0,0 +1,100 @@
#!/usr/bin/env python3
# Script used to dump char ranges from
# the Unicode Character Database to the `unicode_ranges.inc` file.
# NOTE: This script is deliberately not integrated into the build system;
# you should run it manually whenever you want to update the data.
from __future__ import annotations
import os
import sys
from typing import Final
from urllib.request import urlopen
if __name__ == "__main__":
sys.path.insert(1, os.path.join(os.path.dirname(__file__), "../../"))
from methods import generate_copyright_header
URL: Final[str] = "https://www.unicode.org/Public/17.0.0/ucd/Blocks.txt"
ranges: list[tuple[str, str, str]] = []
exclude_blocks: set[str] = {
"High Surrogates",
"High Private Use Surrogates",
"Low Surrogates",
"Variation Selectors",
"Specials",
"Egyptian Hieroglyph Format Controls",
"Tags",
"Variation Selectors Supplement",
}
def parse_unicode_data() -> None:
lines: list[str] = [line.decode("utf-8") for line in urlopen(URL)]
for line in lines:
if line.startswith("#") or not line.strip():
continue
split_line: list[str] = line.split(";")
char_range: str = split_line[0].strip()
block: str = split_line[1].strip()
if block in exclude_blocks:
continue
range_start, range_end = char_range.split("..")
ranges.append((f"0x{range_start}", f"0x{range_end}", block))
def make_array(array_name: str, ranges: list[tuple[str, str, str]]) -> str:
result: str = f"static UniRange {array_name}[] = {{\n"
for start, end, block in ranges:
result += f'\t{{ {start}, {end}, U"{block}" }},\n'
result += """\t{ 0x10FFFF, 0x10FFFF, String() }
};\n\n"""
return result
def generate_unicode_ranges_inc() -> None:
parse_unicode_data()
source: str = generate_copyright_header("unicode_ranges.inc")
source += f"""
// This file was generated using the `misc/scripts/unicode_ranges_fetch.py` script.
#ifndef UNICODE_RANGES_INC
#define UNICODE_RANGES_INC
// Unicode Character Blocks
// Source: {URL}
struct UniRange {{
\tint32_t start;
\tint32_t end;
\tString name;
}};\n\n"""
source += make_array("unicode_ranges", ranges)
source += "#endif // UNICODE_RANGES_INC\n"
unicode_ranges_path: str = os.path.join(os.path.dirname(__file__), "../../editor/import/unicode_ranges.inc")
with open(unicode_ranges_path, "w", newline="\n") as f:
f.write(source)
print("`unicode_ranges.inc` generated successfully.")
if __name__ == "__main__":
generate_unicode_ranges_inc()

View file

@ -0,0 +1,148 @@
#!/usr/bin/env python3
if __name__ != "__main__":
raise SystemExit(f'Utility script "{__file__}" should not be used as a module!')
import argparse
import re
import subprocess
import sys
sys.path.insert(0, "./")
try:
from methods import print_error, print_info
except ImportError:
raise SystemExit(f"Utility script {__file__} must be run from repository root!")
def glob_to_regex(glob: str) -> re.Pattern[str]:
"""Convert a CODEOWNERS glob to a RegEx pattern."""
# Heavily inspired by: https://github.com/hmarr/codeowners/blob/main/match.go
# Handle specific edgecases first.
if "***" in glob:
raise SyntaxError("Pattern cannot contain three consecutive asterisks")
if glob == "/":
raise SyntaxError('Standalone "/" will not match anything')
if not glob:
raise ValueError("Empty pattern")
segments = glob.split("/")
if not segments[0]:
# Leading slash; relative to root.
segments = segments[1:]
else:
# Check for single-segment pattern, which matches relative to any descendent path.
# This is equivalent to a leading `**/`.
if len(segments) == 1 or (len(segments) == 2 and not segments[1]):
if segments[0] != "**":
segments.insert(0, "**")
if len(segments) > 1 and not segments[-1]:
# A trailing slash is equivalent to `/**`.
segments[-1] = "**"
last_index = len(segments) - 1
need_slash = False
pattern = r"\A"
for index, segment in enumerate(segments):
if segment == "**":
if index == 0 and index == last_index:
pattern += r".+" # Pattern is just `**`; match everything.
elif index == 0:
pattern += r"(?:.+/)?" # Pattern starts with `**`; match any leading path segment.
need_slash = False
elif index == last_index:
pattern += r"/.*" # Pattern ends with `**`; match any trailing path segment.
else:
pattern += r"(?:/.+)?" # Pattern contains `**`; match zero or more path segments.
need_slash = True
elif segment == "*":
if need_slash:
pattern += "/"
# Regular wildcard; match any non-separator characters.
pattern += r"[^/]+"
need_slash = True
else:
if need_slash:
pattern += "/"
escape = False
for char in segment:
if escape:
escape = False
pattern += re.escape(char)
continue
elif char == "\\":
escape = True
elif char == "*":
# Multi-character wildcard.
pattern += r"[^/]*"
elif char == "?":
# Single-character wildcard.
pattern += r"[^/]"
else:
# Regular character
pattern += re.escape(char)
if index == last_index:
pattern += r"(?:/.*)?" # No trailing slash; match descendent paths.
need_slash = True
pattern += r"\Z"
return re.compile(pattern)
RE_CODEOWNERS = re.compile(r"^(?P<code>[^#](?:\\ |[^\s])+) +(?P<owners>(?:[^#][^\s]+ ?)+)")
def parse_codeowners() -> list[tuple[re.Pattern[str], list[str]]]:
codeowners = []
with open(".github/CODEOWNERS", encoding="utf-8", newline="\n") as file:
for line in reversed(file.readlines()): # Lower items have higher precedence.
if match := RE_CODEOWNERS.match(line):
codeowners.append((glob_to_regex(match["code"]), match["owners"].split()))
return codeowners
def main() -> int:
parser = argparse.ArgumentParser(description="Utility script for validating CODEOWNERS assignment.")
parser.add_argument("files", nargs="*", help="A list of files to validate. If excluded, checks all owned files.")
parser.add_argument("-u", "--unowned", action="store_true", help="Only output files without an owner.")
args = parser.parse_args()
files: list[str] = args.files
if not files:
files = subprocess.run(["git", "ls-files"], text=True, capture_output=True).stdout.splitlines()
ret = 0
codeowners = parse_codeowners()
for file in files:
matched = False
for code, owners in codeowners:
if code.match(file):
matched = True
if not args.unowned:
print_info(f"{file}: {owners}")
break
if not matched:
print_error(f"{file}: <UNOWNED>")
ret += 1
return ret
try:
raise SystemExit(main())
except KeyboardInterrupt:
import os
import signal
signal.signal(signal.SIGINT, signal.SIG_DFL)
os.kill(os.getpid(), signal.SIGINT)

View file

@ -0,0 +1,96 @@
#!/usr/bin/env bash
set -o pipefail
if [ ! -f "version.py" ]; then
echo "Warning: This script is intended to be run from the root of the Godot repository."
echo "Some of the paths checks may not work as intended from a different folder."
fi
if [ $# != 1 ]; then
echo "Usage: @0 <path-to-godot-executable>"
exit 1
fi
api_validation_dir="$( dirname -- "$( dirname -- "${BASH_SOURCE[0]//\.\//}" )" )/extension_api_validation/"
has_problems=0
warn_extra=0
reference_tag=""
expected_errors=""
make_annotation()
{
local title=$1
local body=$2
local type=$3
local file=$4
if [[ "$GITHUB_OUTPUT" == "" ]]; then
echo "$title"
echo "$body"
else
body="$(awk 1 ORS='%0A' - <<<"$body")"
echo "::$type file=$file,title=$title ::$body"
fi
}
get_expected_output()
{
local parts=()
IFS='_' read -ra parts <<< "$(basename "$1")"
if [[ "${#parts[@]}" == "2" ]]; then
while read -r file; do
cat "$file" >> "$expected_errors"
done <<< "$(find "$1" -type f -name "*.txt")"
next="$(find "$api_validation_dir" -type d -name "${parts[1]}*")"
if [[ "$next" != "" ]]; then
get_expected_output "$next"
fi
reference_tag="${parts[0]}"
warn_extra=0
else
while read -r file; do
cat "$file" >> "$expected_errors"
done <<< "$(find "$1" -type f -name "*.txt")"
reference_tag="${parts[0]}"
warn_extra=1
fi
}
while read -r dir; do
reference_file="$(mktemp)"
validate="$(mktemp)"
validation_output="$(mktemp)"
allowed_errors="$(mktemp)"
expected_errors="$(mktemp)"
get_expected_output "$dir"
# Download the reference extension_api.json
wget -nv --retry-on-http-error=503 --tries=5 --timeout=60 -cO "$reference_file" "https://raw.githubusercontent.com/godotengine/godot-headers/godot-$reference_tag/extension_api.json" || has_problems=1
# Validate the current API against the reference
"$1" --headless --validate-extension-api "$reference_file" 2>&1 | tee "$validate" | awk '!/^Validate extension JSON:/' - || true
# Collect the expected and actual validation errors
awk '/^Validate extension JSON:/' - < "$validate" | sort > "$validation_output"
awk '/^Validate extension JSON:/' - < "$expected_errors" | sort > "$allowed_errors"
# Differences between the expected and actual errors
new_validation_error="$(comm -23 "$validation_output" "$allowed_errors")"
obsolete_validation_error="$(comm -13 "$validation_output" "$allowed_errors")"
if [ -n "$obsolete_validation_error" ] && [ "$warn_extra" = "1" ]; then
#make_annotation "The following validation errors no longer occur (compared to $reference_tag):" "$obsolete_validation_error" warning "$file"
echo "The following validation errors no longer occur (compared to $reference_tag):"
echo "$obsolete_validation_error"
fi
if [ -n "$new_validation_error" ]; then
make_annotation "Compatibility to $reference_tag is broken in the following ways:" "$new_validation_error" error "$file"
has_problems=1
fi
rm -f "$reference_file" "$validate" "$validation_output" "$allowed_errors" "$expected_errors"
done <<< "$(find "$api_validation_dir" -type d -mindepth 1 -maxdepth 1)"
exit $has_problems

View file

@ -0,0 +1,44 @@
#!/usr/bin/env python3
if __name__ != "__main__":
raise SystemExit(f'Utility script "{__file__}" should not be used as a module!')
import argparse
import sys
import xmlschema # Third-party module. Automatically installed in associated pre-commit hook.
sys.path.insert(0, "./")
try:
from methods import print_error
except ImportError:
raise SystemExit(f"Utility script {__file__} must be run from repository root!")
def main():
parser = argparse.ArgumentParser(description="Validate XML documents against `doc/class.xsd`")
parser.add_argument("files", nargs="+", help="A list of XML files to parse")
args = parser.parse_args()
SCHEMA = xmlschema.XMLSchema("doc/class.xsd")
ret = 0
for file in args.files:
try:
SCHEMA.validate(file)
except xmlschema.validators.exceptions.XMLSchemaValidationError as err:
print_error(f'Validation failed for "{file}"!\n\n{err}')
ret += 1
return ret
try:
raise SystemExit(main())
except KeyboardInterrupt:
import os
import signal
signal.signal(signal.SIGINT, signal.SIG_DFL)
os.kill(os.getpid(), signal.SIGINT)