1#!/usr/bin/env python3
2
3"""
4This script is shared between SDL2, SDL3, and all satellite libraries.
5Don't specialize this script for doing project-specific modifications.
6Rather, modify release-info.json.
7"""
8
9import argparse
10import collections
11import dataclasses
12from collections.abc import Callable
13import contextlib
14import datetime
15import fnmatch
16import glob
17import io
18import json
19import logging
20import multiprocessing
21import os
22from pathlib import Path
23import platform
24import re
25import shlex
26import shutil
27import subprocess
28import sys
29import tarfile
30import tempfile
31import textwrap
32import typing
33import zipfile
34
35
36logger = logging.getLogger(__name__)
37GIT_HASH_FILENAME = ".git-hash"
38REVISION_TXT = "REVISION.txt"
39
40
41def safe_isotime_to_datetime(str_isotime: str) -> datetime.datetime:
42 try:
43 return datetime.datetime.fromisoformat(str_isotime)
44 except ValueError:
45 pass
46 logger.warning("Invalid iso time: %s", str_isotime)
47 if str_isotime[-6:-5] in ("+", "-"):
48 # Commits can have isotime with invalid timezone offset (e.g. "2021-07-04T20:01:40+32:00")
49 modified_str_isotime = str_isotime[:-6] + "+00:00"
50 try:
51 return datetime.datetime.fromisoformat(modified_str_isotime)
52 except ValueError:
53 pass
54 raise ValueError(f"Invalid isotime: {str_isotime}")
55
56
57def arc_join(*parts: list[str]) -> str:
58 assert all(p[:1] != "/" and p[-1:] != "/" for p in parts), f"None of {parts} may start or end with '/'"
59 return "/".join(p for p in parts if p)
60
61
62@dataclasses.dataclass(frozen=True)
63class VsArchPlatformConfig:
64 arch: str
65 configuration: str
66 platform: str
67
68 def extra_context(self):
69 return {
70 "ARCH": self.arch,
71 "CONFIGURATION": self.configuration,
72 "PLATFORM": self.platform,
73 }
74
75
76@contextlib.contextmanager
77def chdir(path):
78 original_cwd = os.getcwd()
79 try:
80 os.chdir(path)
81 yield
82 finally:
83 os.chdir(original_cwd)
84
85
86class Executer:
87 def __init__(self, root: Path, dry: bool=False):
88 self.root = root
89 self.dry = dry
90
91 def run(self, cmd, cwd=None, env=None):
92 logger.info("Executing args=%r", cmd)
93 sys.stdout.flush()
94 if not self.dry:
95 subprocess.check_call(cmd, cwd=cwd or self.root, env=env, text=True)
96
97 def check_output(self, cmd, cwd=None, dry_out=None, env=None, text=True):
98 logger.info("Executing args=%r", cmd)
99 sys.stdout.flush()
100 if self.dry:
101 return dry_out
102 return subprocess.check_output(cmd, cwd=cwd or self.root, env=env, text=text)
103
104
105class SectionPrinter:
106 @contextlib.contextmanager
107 def group(self, title: str):
108 print(f"{title}:")
109 yield
110
111
112class GitHubSectionPrinter(SectionPrinter):
113 def __init__(self):
114 super().__init__()
115 self.in_group = False
116
117 @contextlib.contextmanager
118 def group(self, title: str):
119 print(f"::group::{title}")
120 assert not self.in_group, "Can enter a group only once"
121 self.in_group = True
122 yield
123 self.in_group = False
124 print("::endgroup::")
125
126
127class VisualStudio:
128 def __init__(self, executer: Executer, year: typing.Optional[str]=None):
129 self.executer = executer
130 self.vsdevcmd = self.find_vsdevcmd(year)
131 self.msbuild = self.find_msbuild()
132
133 @property
134 def dry(self) -> bool:
135 return self.executer.dry
136
137 VS_YEAR_TO_VERSION = {
138 "2022": 17,
139 "2019": 16,
140 "2017": 15,
141 "2015": 14,
142 "2013": 12,
143 }
144
145 def find_vsdevcmd(self, year: typing.Optional[str]=None) -> typing.Optional[Path]:
146 vswhere_spec = ["-latest"]
147 if year is not None:
148 try:
149 version = self.VS_YEAR_TO_VERSION[year]
150 except KeyError:
151 logger.error("Invalid Visual Studio year")
152 return None
153 vswhere_spec.extend(["-version", f"[{version},{version+1})"])
154 vswhere_cmd = ["vswhere"] + vswhere_spec + ["-property", "installationPath"]
155 vs_install_path = Path(self.executer.check_output(vswhere_cmd, dry_out="/tmp").strip())
156 logger.info("VS install_path = %s", vs_install_path)
157 assert vs_install_path.is_dir(), "VS installation path does not exist"
158 vsdevcmd_path = vs_install_path / "Common7/Tools/vsdevcmd.bat"
159 logger.info("vsdevcmd path = %s", vsdevcmd_path)
160 if self.dry:
161 vsdevcmd_path.parent.mkdir(parents=True, exist_ok=True)
162 vsdevcmd_path.touch(exist_ok=True)
163 assert vsdevcmd_path.is_file(), "vsdevcmd.bat batch file does not exist"
164 return vsdevcmd_path
165
166 def find_msbuild(self) -> typing.Optional[Path]:
167 vswhere_cmd = ["vswhere", "-latest", "-requires", "Microsoft.Component.MSBuild", "-find", r"MSBuild\**\Bin\MSBuild.exe"]
168 msbuild_path = Path(self.executer.check_output(vswhere_cmd, dry_out="/tmp/MSBuild.exe").strip())
169 logger.info("MSBuild path = %s", msbuild_path)
170 if self.dry:
171 msbuild_path.parent.mkdir(parents=True, exist_ok=True)
172 msbuild_path.touch(exist_ok=True)
173 assert msbuild_path.is_file(), "MSBuild.exe does not exist"
174 return msbuild_path
175
176 def build(self, arch_platform: VsArchPlatformConfig, projects: list[Path]):
177 assert projects, "Need at least one project to build"
178
179 vsdev_cmd_str = f"\"{self.vsdevcmd}\" -arch={arch_platform.arch}"
180 msbuild_cmd_str = " && ".join([f"\"{self.msbuild}\" \"{project}\" /m /p:BuildInParallel=true /p:Platform={arch_platform.platform} /p:Configuration={arch_platform.configuration}" for project in projects])
181 bat_contents = f"{vsdev_cmd_str} && {msbuild_cmd_str}\n"
182 bat_path = Path(tempfile.gettempdir()) / "cmd.bat"
183 with bat_path.open("w") as f:
184 f.write(bat_contents)
185
186 logger.info("Running cmd.exe script (%s): %s", bat_path, bat_contents)
187 cmd = ["cmd.exe", "/D", "/E:ON", "/V:OFF", "/S", "/C", f"CALL {str(bat_path)}"]
188 self.executer.run(cmd)
189
190
191class Archiver:
192 def __init__(self, zip_path: typing.Optional[Path]=None, tgz_path: typing.Optional[Path]=None, txz_path: typing.Optional[Path]=None):
193 self._zip_files = []
194 self._tar_files = []
195 self._added_files = set()
196 if zip_path:
197 self._zip_files.append(zipfile.ZipFile(zip_path, "w", compression=zipfile.ZIP_DEFLATED))
198 if tgz_path:
199 self._tar_files.append(tarfile.open(tgz_path, "w:gz"))
200 if txz_path:
201 self._tar_files.append(tarfile.open(txz_path, "w:xz"))
202
203 @property
204 def added_files(self) -> set[str]:
205 return self._added_files
206
207 def add_file_data(self, arcpath: str, data: bytes, mode: int, time: datetime.datetime):
208 for zf in self._zip_files:
209 file_data_time = (time.year, time.month, time.day, time.hour, time.minute, time.second)
210 zip_info = zipfile.ZipInfo(filename=arcpath, date_time=file_data_time)
211 zip_info.external_attr = mode << 16
212 zip_info.compress_type = zipfile.ZIP_DEFLATED
213 zf.writestr(zip_info, data=data)
214 for tf in self._tar_files:
215 tar_info = tarfile.TarInfo(arcpath)
216 tar_info.type = tarfile.REGTYPE
217 tar_info.mode = mode
218 tar_info.size = len(data)
219 tar_info.mtime = int(time.timestamp())
220 tf.addfile(tar_info, fileobj=io.BytesIO(data))
221
222 self._added_files.add(arcpath)
223
224 def add_symlink(self, arcpath: str, target: str, time: datetime.datetime, files_for_zip):
225 logger.debug("Adding symlink (target=%r) -> %s", target, arcpath)
226 for zf in self._zip_files:
227 file_data_time = (time.year, time.month, time.day, time.hour, time.minute, time.second)
228 for f in files_for_zip:
229 zip_info = zipfile.ZipInfo(filename=f["arcpath"], date_time=file_data_time)
230 zip_info.external_attr = f["mode"] << 16
231 zip_info.compress_type = zipfile.ZIP_DEFLATED
232 zf.writestr(zip_info, data=f["data"])
233 for tf in self._tar_files:
234 tar_info = tarfile.TarInfo(arcpath)
235 tar_info.type = tarfile.SYMTYPE
236 tar_info.mode = 0o777
237 tar_info.mtime = int(time.timestamp())
238 tar_info.linkname = target
239 tf.addfile(tar_info)
240
241 self._added_files.update(f["arcpath"] for f in files_for_zip)
242
243 def add_git_hash(self, arcdir: str, commit: str, time: datetime.datetime):
244 arcpath = arc_join(arcdir, GIT_HASH_FILENAME)
245 data = f"{commit}\n".encode()
246 self.add_file_data(arcpath=arcpath, data=data, mode=0o100644, time=time)
247
248 def add_file_path(self, arcpath: str, path: Path):
249 assert path.is_file(), f"{path} should be a file"
250 logger.debug("Adding %s -> %s", path, arcpath)
251 for zf in self._zip_files:
252 zf.write(path, arcname=arcpath)
253 for tf in self._tar_files:
254 tf.add(path, arcname=arcpath)
255
256 def add_file_directory(self, arcdirpath: str, dirpath: Path):
257 assert dirpath.is_dir()
258 if arcdirpath and arcdirpath[-1:] != "/":
259 arcdirpath += "/"
260 for f in dirpath.iterdir():
261 if f.is_file():
262 arcpath = f"{arcdirpath}{f.name}"
263 logger.debug("Adding %s to %s", f, arcpath)
264 self.add_file_path(arcpath=arcpath, path=f)
265
266 def close(self):
267 # Archiver is intentionally made invalid after this function
268 del self._zip_files
269 self._zip_files = None
270 del self._tar_files
271 self._tar_files = None
272
273 def __enter__(self):
274 return self
275
276 def __exit__(self, type, value, traceback):
277 self.close()
278
279
280class NodeInArchive:
281 def __init__(self, arcpath: str, path: typing.Optional[Path]=None, data: typing.Optional[bytes]=None, mode: typing.Optional[int]=None, symtarget: typing.Optional[str]=None, time: typing.Optional[datetime.datetime]=None, directory: bool=False):
282 self.arcpath = arcpath
283 self.path = path
284 self.data = data
285 self.mode = mode
286 self.symtarget = symtarget
287 self.time = time
288 self.directory = directory
289
290 @classmethod
291 def from_fs(cls, arcpath: str, path: Path, mode: int=0o100644, time: typing.Optional[datetime.datetime]=None) -> "NodeInArchive":
292 if time is None:
293 time = datetime.datetime.fromtimestamp(os.stat(path).st_mtime)
294 return cls(arcpath=arcpath, path=path, mode=mode)
295
296 @classmethod
297 def from_data(cls, arcpath: str, data: bytes, time: datetime.datetime) -> "NodeInArchive":
298 return cls(arcpath=arcpath, data=data, time=time, mode=0o100644)
299
300 @classmethod
301 def from_text(cls, arcpath: str, text: str, time: datetime.datetime) -> "NodeInArchive":
302 return cls.from_data(arcpath=arcpath, data=text.encode(), time=time)
303
304 @classmethod
305 def from_symlink(cls, arcpath: str, symtarget: str) -> "NodeInArchive":
306 return cls(arcpath=arcpath, symtarget=symtarget)
307
308 @classmethod
309 def from_directory(cls, arcpath: str) -> "NodeInArchive":
310 return cls(arcpath=arcpath, directory=True)
311
312 def __repr__(self) -> str:
313 return f"<{type(self).__name__}:arcpath={self.arcpath},path='{str(self.path)}',len(data)={len(self.data) if self.data else 'n/a'},directory={self.directory},symtarget={self.symtarget}>"
314
315
316def configure_file(path: Path, context: dict[str, str]) -> bytes:
317 text = path.read_text()
318 return configure_text(text, context=context).encode()
319
320
321def configure_text(text: str, context: dict[str, str]) -> str:
322 original_text = text
323 for txt, repl in context.items():
324 text = text.replace(f"@<@{txt}@>@", repl)
325 success = all(thing not in text for thing in ("@<@", "@>@"))
326 if not success:
327 raise ValueError(f"Failed to configure {repr(original_text)}")
328 return text
329
330
331def configure_text_list(text_list: list[str], context: dict[str, str]) -> list[str]:
332 return [configure_text(text=e, context=context) for e in text_list]
333
334
335class ArchiveFileTree:
336 def __init__(self):
337 self._tree: dict[str, NodeInArchive] = {}
338
339 def add_file(self, file: NodeInArchive):
340 self._tree[file.arcpath] = file
341
342 def __iter__(self) -> typing.Iterable[NodeInArchive]:
343 yield from self._tree.values()
344
345 def __contains__(self, value: str) -> bool:
346 return value in self._tree
347
348 def get_latest_mod_time(self) -> datetime.datetime:
349 return max(item.time for item in self._tree.values() if item.time)
350
351 def add_to_archiver(self, archive_base: str, archiver: Archiver):
352 remaining_symlinks = set()
353 added_files = dict()
354
355 def calculate_symlink_target(s: NodeInArchive) -> str:
356 dest_dir = os.path.dirname(s.arcpath)
357 if dest_dir:
358 dest_dir += "/"
359 target = dest_dir + s.symtarget
360 while True:
361 new_target, n = re.subn(r"([^/]+/+[.]{2}/)", "", target)
362 target = new_target
363 if not n:
364 break
365 return target
366
367 # Add files in first pass
368 for arcpath, node in self._tree.items():
369 assert node is not None, f"{arcpath} -> node"
370 if node.data is not None:
371 archiver.add_file_data(arcpath=arc_join(archive_base, arcpath), data=node.data, time=node.time, mode=node.mode)
372 assert node.arcpath is not None, f"{node=}"
373 added_files[node.arcpath] = node
374 elif node.path is not None:
375 archiver.add_file_path(arcpath=arc_join(archive_base, arcpath), path=node.path)
376 assert node.arcpath is not None, f"{node=}"
377 added_files[node.arcpath] = node
378 elif node.symtarget is not None:
379 remaining_symlinks.add(node)
380 elif node.directory:
381 pass
382 else:
383 raise ValueError(f"Invalid Archive Node: {repr(node)}")
384
385 assert None not in added_files
386
387 # Resolve symlinks in second pass: zipfile does not support symlinks, so add files to zip archive
388 while True:
389 if not remaining_symlinks:
390 break
391 symlinks_this_time = set()
392 extra_added_files = {}
393 for symlink in remaining_symlinks:
394 symlink_files_for_zip = {}
395 symlink_target_path = calculate_symlink_target(symlink)
396 if symlink_target_path in added_files:
397 symlink_files_for_zip[symlink.arcpath] = added_files[symlink_target_path]
398 else:
399 symlink_target_path_slash = symlink_target_path + "/"
400 for added_file in added_files:
401 if added_file.startswith(symlink_target_path_slash):
402 path_in_symlink = symlink.arcpath + "/" + added_file.removeprefix(symlink_target_path_slash)
403 symlink_files_for_zip[path_in_symlink] = added_files[added_file]
404 if symlink_files_for_zip:
405 symlinks_this_time.add(symlink)
406 extra_added_files.update(symlink_files_for_zip)
407 files_for_zip = [{"arcpath": f"{archive_base}/{sym_path}", "data": sym_info.data, "mode": sym_info.mode} for sym_path, sym_info in symlink_files_for_zip.items()]
408 archiver.add_symlink(arcpath=f"{archive_base}/{symlink.arcpath}", target=symlink.symtarget, time=symlink.time, files_for_zip=files_for_zip)
409 # if not symlinks_this_time:
410 # logger.info("files added: %r", set(path for path in added_files.keys()))
411 assert symlinks_this_time, f"No targets found for symlinks: {remaining_symlinks}"
412 remaining_symlinks.difference_update(symlinks_this_time)
413 added_files.update(extra_added_files)
414
415 def add_directory_tree(self, arc_dir: str, path: Path, time: datetime.datetime):
416 assert path.is_dir()
417 for files_dir, _, filenames in os.walk(path):
418 files_dir_path = Path(files_dir)
419 rel_files_path = files_dir_path.relative_to(path)
420 for filename in filenames:
421 self.add_file(NodeInArchive.from_fs(arcpath=arc_join(arc_dir, str(rel_files_path), filename), path=files_dir_path / filename, time=time))
422
423 def _add_files_recursively(self, arc_dir: str, paths: list[Path], time: datetime.datetime):
424 logger.debug(f"_add_files_recursively({arc_dir=} {paths=})")
425 for path in paths:
426 arcpath = arc_join(arc_dir, path.name)
427 if path.is_file():
428 logger.debug("Adding %s as %s", path, arcpath)
429 self.add_file(NodeInArchive.from_fs(arcpath=arcpath, path=path, time=time))
430 elif path.is_dir():
431 self._add_files_recursively(arc_dir=arc_join(arc_dir, path.name), paths=list(path.iterdir()), time=time)
432 else:
433 raise ValueError(f"Unsupported file type to add recursively: {path}")
434
435 def add_file_mapping(self, arc_dir: str, file_mapping: dict[str, list[str]], file_mapping_root: Path, context: dict[str, str], time: datetime.datetime):
436 for meta_rel_destdir, meta_file_globs in file_mapping.items():
437 rel_destdir = configure_text(meta_rel_destdir, context=context)
438 assert "@" not in rel_destdir, f"archive destination should not contain an @ after configuration ({repr(meta_rel_destdir)}->{repr(rel_destdir)})"
439 for meta_file_glob in meta_file_globs:
440 file_glob = configure_text(meta_file_glob, context=context)
441 assert "@" not in rel_destdir, f"archive glob should not contain an @ after configuration ({repr(meta_file_glob)}->{repr(file_glob)})"
442 if ":" in file_glob:
443 original_path, new_filename = file_glob.rsplit(":", 1)
444 assert ":" not in original_path, f"Too many ':' in {repr(file_glob)}"
445 assert "/" not in new_filename, f"New filename cannot contain a '/' in {repr(file_glob)}"
446 path = file_mapping_root / original_path
447 arcpath = arc_join(arc_dir, rel_destdir, new_filename)
448 if path.suffix == ".in":
449 data = configure_file(path, context=context)
450 logger.debug("Adding processed %s -> %s", path, arcpath)
451 self.add_file(NodeInArchive.from_data(arcpath=arcpath, data=data, time=time))
452 else:
453 logger.debug("Adding %s -> %s", path, arcpath)
454 self.add_file(NodeInArchive.from_fs(arcpath=arcpath, path=path, time=time))
455 else:
456 relative_file_paths = glob.glob(file_glob, root_dir=file_mapping_root)
457 assert relative_file_paths, f"Glob '{file_glob}' does not match any file"
458 self._add_files_recursively(arc_dir=arc_join(arc_dir, rel_destdir), paths=[file_mapping_root / p for p in relative_file_paths], time=time)
459
460
461class SourceCollector:
462 # TreeItem = collections.namedtuple("TreeItem", ("path", "mode", "data", "symtarget", "directory", "time"))
463 def __init__(self, root: Path, commit: str, filter: typing.Optional[Callable[[str], bool]], executer: Executer):
464 self.root = root
465 self.commit = commit
466 self.filter = filter
467 self.executer = executer
468
469 def get_archive_file_tree(self) -> ArchiveFileTree:
470 git_archive_args = ["git", "archive", "--format=tar.gz", self.commit, "-o", "/dev/stdout"]
471 logger.info("Executing args=%r", git_archive_args)
472 contents_tgz = subprocess.check_output(git_archive_args, cwd=self.root, text=False)
473 tar_archive = tarfile.open(fileobj=io.BytesIO(contents_tgz), mode="r:gz")
474 filenames = tuple(m.name for m in tar_archive if (m.isfile() or m.issym()))
475
476 file_times = self._get_file_times(paths=filenames)
477 git_contents = ArchiveFileTree()
478 for ti in tar_archive:
479 if self.filter and not self.filter(ti.name):
480 continue
481 data = None
482 symtarget = None
483 directory = False
484 file_time = None
485 if ti.isfile():
486 contents_file = tar_archive.extractfile(ti.name)
487 data = contents_file.read()
488 file_time = file_times[ti.name]
489 elif ti.issym():
490 symtarget = ti.linkname
491 file_time = file_times[ti.name]
492 elif ti.isdir():
493 directory = True
494 else:
495 raise ValueError(f"{ti.name}: unknown type")
496 node = NodeInArchive(arcpath=ti.name, data=data, mode=ti.mode, symtarget=symtarget, time=file_time, directory=directory)
497 git_contents.add_file(node)
498 return git_contents
499
500 def _get_file_times(self, paths: tuple[str, ...]) -> dict[str, datetime.datetime]:
501 dry_out = textwrap.dedent("""\
502 time=2024-03-14T15:40:25-07:00
503
504 M\tCMakeLists.txt
505 """)
506 git_log_out = self.executer.check_output(["git", "log", "--name-status", '--pretty=time=%cI', self.commit], dry_out=dry_out, cwd=self.root).splitlines(keepends=False)
507 current_time = None
508 set_paths = set(paths)
509 path_times: dict[str, datetime.datetime] = {}
510 for line in git_log_out:
511 if not line:
512 continue
513 if line.startswith("time="):
514 current_time = safe_isotime_to_datetime(line.removeprefix("time="))
515 continue
516 mod_type, file_paths = line.split(maxsplit=1)
517 assert current_time is not None
518 for file_path in file_paths.split("\t"):
519 if file_path in set_paths and file_path not in path_times:
520 path_times[file_path] = current_time
521
522 # FIXME: find out why some files are not shown in "git log"
523 # assert set(path_times.keys()) == set_paths
524 if set(path_times.keys()) != set_paths:
525 found_times = set(path_times.keys())
526 paths_without_times = set_paths.difference(found_times)
527 logger.warning("No times found for these paths: %s", paths_without_times)
528 max_time = max(time for time in path_times.values())
529 for path in paths_without_times:
530 path_times[path] = max_time
531
532 return path_times
533
534
535class Releaser:
536 def __init__(self, release_info: dict, commit: str, revision: str, root: Path, dist_path: Path, section_printer: SectionPrinter, executer: Executer, cmake_generator: str, deps_path: Path, overwrite: bool, github: bool, fast: bool):
537 self.release_info = release_info
538 self.project = release_info["name"]
539 self.version = self.extract_sdl_version(root=root, release_info=release_info)
540 self.root = root
541 self.commit = commit
542 self.revision = revision
543 self.dist_path = dist_path
544 self.section_printer = section_printer
545 self.executer = executer
546 self.cmake_generator = cmake_generator
547 self.cpu_count = multiprocessing.cpu_count()
548 self.deps_path = deps_path
549 self.overwrite = overwrite
550 self.github = github
551 self.fast = fast
552 self.arc_time = datetime.datetime.now()
553
554 self.artifacts: dict[str, Path] = {}
555
556 def get_context(self, extra_context: typing.Optional[dict[str, str]]=None) -> dict[str, str]:
557 ctx = {
558 "PROJECT_NAME": self.project,
559 "PROJECT_VERSION": self.version,
560 "PROJECT_COMMIT": self.commit,
561 "PROJECT_REVISION": self.revision,
562 "PROJECT_ROOT": str(self.root),
563 }
564 if extra_context:
565 ctx.update(extra_context)
566 return ctx
567
568 @property
569 def dry(self) -> bool:
570 return self.executer.dry
571
572 def prepare(self):
573 logger.debug("Creating dist folder")
574 self.dist_path.mkdir(parents=True, exist_ok=True)
575
576 @classmethod
577 def _path_filter(cls, path: str) -> bool:
578 if ".gitmodules" in path:
579 return True
580 if path.startswith(".git"):
581 return False
582 return True
583
584 @classmethod
585 def _external_repo_path_filter(cls, path: str) -> bool:
586 if not cls._path_filter(path):
587 return False
588 if path.startswith("test/") or path.startswith("tests/"):
589 return False
590 return True
591
592 def create_source_archives(self) -> None:
593 source_collector = SourceCollector(root=self.root, commit=self.commit, executer=self.executer, filter=self._path_filter)
594 print(f"Collecting sources of {self.project}...")
595 archive_tree: ArchiveFileTree = source_collector.get_archive_file_tree()
596 latest_mod_time = archive_tree.get_latest_mod_time()
597 archive_tree.add_file(NodeInArchive.from_text(arcpath=REVISION_TXT, text=f"{self.revision}\n", time=latest_mod_time))
598 archive_tree.add_file(NodeInArchive.from_text(arcpath=f"{GIT_HASH_FILENAME}", text=f"{self.commit}\n", time=latest_mod_time))
599 archive_tree.add_file_mapping(arc_dir="", file_mapping=self.release_info["source"].get("files", {}), file_mapping_root=self.root, context=self.get_context(), time=latest_mod_time)
600
601 if "Makefile.am" in archive_tree:
602 patched_time = latest_mod_time + datetime.timedelta(minutes=1)
603 print(f"Makefile.am detected -> touching aclocal.m4, */Makefile.in, configure")
604 for node_data in archive_tree:
605 arc_name = os.path.basename(node_data.arcpath)
606 arc_name_we, arc_name_ext = os.path.splitext(arc_name)
607 if arc_name in ("aclocal.m4", "configure", "Makefile.in"):
608 print(f"Bumping time of {node_data.arcpath}")
609 node_data.time = patched_time
610
611 archive_base = f"{self.project}-{self.version}"
612 zip_path = self.dist_path / f"{archive_base}.zip"
613 tgz_path = self.dist_path / f"{archive_base}.tar.gz"
614 txz_path = self.dist_path / f"{archive_base}.tar.xz"
615
616 logger.info("Creating zip/tgz/txz source archives ...")
617 if self.dry:
618 zip_path.touch()
619 tgz_path.touch()
620 txz_path.touch()
621 else:
622 with Archiver(zip_path=zip_path, tgz_path=tgz_path, txz_path=txz_path) as archiver:
623 print(f"Adding source files of {self.project}...")
624 archive_tree.add_to_archiver(archive_base=archive_base, archiver=archiver)
625
626 for extra_repo in self.release_info["source"].get("extra-repos", []):
627 extra_repo_root = self.root / extra_repo
628 assert (extra_repo_root / ".git").exists(), f"{extra_repo_root} must be a git repo"
629 extra_repo_commit = self.executer.check_output(["git", "rev-parse", "HEAD"], dry_out=f"gitsha-extra-repo-{extra_repo}", cwd=extra_repo_root).strip()
630 extra_repo_source_collector = SourceCollector(root=extra_repo_root, commit=extra_repo_commit, executer=self.executer, filter=self._external_repo_path_filter)
631 print(f"Collecting sources of {extra_repo} ...")
632 extra_repo_archive_tree = extra_repo_source_collector.get_archive_file_tree()
633 print(f"Adding source files of {extra_repo} ...")
634 extra_repo_archive_tree.add_to_archiver(archive_base=f"{archive_base}/{extra_repo}", archiver=archiver)
635
636 for file in self.release_info["source"]["checks"]:
637 assert f"{archive_base}/{file}" in archiver.added_files, f"'{archive_base}/{file}' must exist"
638
639 logger.info("... done")
640
641 self.artifacts["src-zip"] = zip_path
642 self.artifacts["src-tar-gz"] = tgz_path
643 self.artifacts["src-tar-xz"] = txz_path
644
645 if not self.dry:
646 with tgz_path.open("r+b") as f:
647 # Zero the embedded timestamp in the gzip'ed tarball
648 f.seek(4, 0)
649 f.write(b"\x00\x00\x00\x00")
650
651 def create_dmg(self, configuration: str="Release") -> None:
652 dmg_in = self.root / self.release_info["dmg"]["path"]
653 xcode_project = self.root / self.release_info["dmg"]["project"]
654 assert xcode_project.is_dir(), f"{xcode_project} must be a directory"
655 assert (xcode_project / "project.pbxproj").is_file, f"{xcode_project} must contain project.pbxproj"
656 if not self.fast:
657 dmg_in.unlink(missing_ok=True)
658 build_xcconfig = self.release_info["dmg"].get("build-xcconfig")
659 if build_xcconfig:
660 shutil.copy(self.root / build_xcconfig, xcode_project.parent / "build.xcconfig")
661
662 xcode_scheme = self.release_info["dmg"].get("scheme")
663 xcode_target = self.release_info["dmg"].get("target")
664 assert xcode_scheme or xcode_target, "dmg needs scheme or target"
665 assert not (xcode_scheme and xcode_target), "dmg cannot have both scheme and target set"
666 if xcode_scheme:
667 scheme_or_target = "-scheme"
668 target_like = xcode_scheme
669 else:
670 scheme_or_target = "-target"
671 target_like = xcode_target
672 self.executer.run(["xcodebuild", "ONLY_ACTIVE_ARCH=NO", "-project", xcode_project, scheme_or_target, target_like, "-configuration", configuration])
673 if self.dry:
674 dmg_in.parent.mkdir(parents=True, exist_ok=True)
675 dmg_in.touch()
676
677 assert dmg_in.is_file(), f"{self.project}.dmg was not created by xcodebuild"
678
679 dmg_out = self.dist_path / f"{self.project}-{self.version}.dmg"
680 shutil.copy(dmg_in, dmg_out)
681 self.artifacts["dmg"] = dmg_out
682
683 @property
684 def git_hash_data(self) -> bytes:
685 return f"{self.commit}\n".encode()
686
687 def create_mingw_archives(self) -> None:
688 build_type = "Release"
689 build_parent_dir = self.root / "build-mingw"
690 ARCH_TO_GNU_ARCH = {
691 # "arm64": "aarch64",
692 "x86": "i686",
693 "x64": "x86_64",
694 }
695 ARCH_TO_TRIPLET = {
696 # "arm64": "aarch64-w64-mingw32",
697 "x86": "i686-w64-mingw32",
698 "x64": "x86_64-w64-mingw32",
699 }
700
701 new_env = dict(os.environ)
702
703 cmake_prefix_paths = []
704 mingw_deps_path = self.deps_path / "mingw-deps"
705
706 if "dependencies" in self.release_info["mingw"]:
707 shutil.rmtree(mingw_deps_path, ignore_errors=True)
708 mingw_deps_path.mkdir()
709
710 for triplet in ARCH_TO_TRIPLET.values():
711 (mingw_deps_path / triplet).mkdir()
712
713 def extract_filter(member: tarfile.TarInfo, path: str, /):
714 if member.name.startswith("SDL"):
715 member.name = "/".join(Path(member.name).parts[1:])
716 return member
717 for dep in self.release_info.get("dependencies", {}):
718 extract_path = mingw_deps_path / f"extract-{dep}"
719 extract_path.mkdir()
720 with chdir(extract_path):
721 tar_path = self.deps_path / glob.glob(self.release_info["mingw"]["dependencies"][dep]["artifact"], root_dir=self.deps_path)[0]
722 logger.info("Extracting %s to %s", tar_path, mingw_deps_path)
723 assert tar_path.suffix in (".gz", ".xz")
724 with tarfile.open(tar_path, mode=f"r:{tar_path.suffix.strip('.')}") as tarf:
725 tarf.extractall(filter=extract_filter)
726 for arch, triplet in ARCH_TO_TRIPLET.items():
727 install_cmd = self.release_info["mingw"]["dependencies"][dep]["install-command"]
728 extra_configure_data = {
729 "ARCH": ARCH_TO_GNU_ARCH[arch],
730 "TRIPLET": triplet,
731 "PREFIX": str(mingw_deps_path / triplet),
732 }
733 install_cmd = configure_text(install_cmd, context=self.get_context(extra_configure_data))
734 self.executer.run(shlex.split(install_cmd), cwd=str(extract_path))
735
736 dep_binpath = mingw_deps_path / triplet / "bin"
737 assert dep_binpath.is_dir(), f"{dep_binpath} for PATH should exist"
738 dep_pkgconfig = mingw_deps_path / triplet / "lib/pkgconfig"
739 assert dep_pkgconfig.is_dir(), f"{dep_pkgconfig} for PKG_CONFIG_PATH should exist"
740
741 new_env["PATH"] = os.pathsep.join([str(dep_binpath), new_env["PATH"]])
742 new_env["PKG_CONFIG_PATH"] = str(dep_pkgconfig)
743 cmake_prefix_paths.append(mingw_deps_path)
744
745 new_env["CFLAGS"] = f"-O2 -ffile-prefix-map={self.root}=/src/{self.project}"
746 new_env["CXXFLAGS"] = f"-O2 -ffile-prefix-map={self.root}=/src/{self.project}"
747
748 assert any(system in self.release_info["mingw"] for system in ("autotools", "cmake"))
749 assert not all(system in self.release_info["mingw"] for system in ("autotools", "cmake"))
750
751 mingw_archs = set()
752 arc_root = f"{self.project}-{self.version}"
753 archive_file_tree = ArchiveFileTree()
754
755 if "autotools" in self.release_info["mingw"]:
756 for arch in self.release_info["mingw"]["autotools"]["archs"]:
757 triplet = ARCH_TO_TRIPLET[arch]
758 new_env["CC"] = f"{triplet}-gcc"
759 new_env["CXX"] = f"{triplet}-g++"
760 new_env["RC"] = f"{triplet}-windres"
761
762 assert arch not in mingw_archs
763 mingw_archs.add(arch)
764
765 build_path = build_parent_dir / f"build-{triplet}"
766 install_path = build_parent_dir / f"install-{triplet}"
767 shutil.rmtree(install_path, ignore_errors=True)
768 build_path.mkdir(parents=True, exist_ok=True)
769 context = self.get_context({
770 "ARCH": arch,
771 "DEP_PREFIX": str(mingw_deps_path / triplet),
772 })
773 extra_args = configure_text_list(text_list=self.release_info["mingw"]["autotools"]["args"], context=context)
774
775 with self.section_printer.group(f"Configuring MinGW {triplet} (autotools)"):
776 assert "@" not in " ".join(extra_args), f"@ should not be present in extra arguments ({extra_args})"
777 self.executer.run([
778 self.root / "configure",
779 f"--prefix={install_path}",
780 f"--includedir=${{prefix}}/include",
781 f"--libdir=${{prefix}}/lib",
782 f"--bindir=${{prefix}}/bin",
783 f"--host={triplet}",
784 f"--build=x86_64-none-linux-gnu",
785 "CFLAGS=-O2",
786 "CXXFLAGS=-O2",
787 "LDFLAGS=-Wl,-s",
788 ] + extra_args, cwd=build_path, env=new_env)
789 with self.section_printer.group(f"Build MinGW {triplet} (autotools)"):
790 self.executer.run(["make", f"-j{self.cpu_count}"], cwd=build_path, env=new_env)
791 with self.section_printer.group(f"Install MinGW {triplet} (autotools)"):
792 self.executer.run(["make", "install"], cwd=build_path, env=new_env)
793 archive_file_tree.add_directory_tree(arc_dir=arc_join(arc_root, triplet), path=install_path, time=self.arc_time)
794
795 print("Recording arch-dependent extra files for MinGW development archive ...")
796 extra_context = {
797 "TRIPLET": ARCH_TO_TRIPLET[arch],
798 }
799 archive_file_tree.add_file_mapping(arc_dir=arc_root, file_mapping=self.release_info["mingw"]["autotools"].get("files", {}), file_mapping_root=self.root, context=self.get_context(extra_context=extra_context), time=self.arc_time)
800
801 if "cmake" in self.release_info["mingw"]:
802 assert self.release_info["mingw"]["cmake"]["shared-static"] in ("args", "both")
803 for arch in self.release_info["mingw"]["cmake"]["archs"]:
804 triplet = ARCH_TO_TRIPLET[arch]
805 new_env["CC"] = f"{triplet}-gcc"
806 new_env["CXX"] = f"{triplet}-g++"
807 new_env["RC"] = f"{triplet}-windres"
808
809 assert arch not in mingw_archs
810 mingw_archs.add(arch)
811
812 context = self.get_context({
813 "ARCH": arch,
814 "DEP_PREFIX": str(mingw_deps_path / triplet),
815 })
816 extra_args = configure_text_list(text_list=self.release_info["mingw"]["cmake"]["args"], context=context)
817
818 build_path = build_parent_dir / f"build-{triplet}"
819 install_path = build_parent_dir / f"install-{triplet}"
820 shutil.rmtree(install_path, ignore_errors=True)
821 build_path.mkdir(parents=True, exist_ok=True)
822 if self.release_info["mingw"]["cmake"]["shared-static"] == "args":
823 args_for_shared_static = ([], )
824 elif self.release_info["mingw"]["cmake"]["shared-static"] == "both":
825 args_for_shared_static = (["-DBUILD_SHARED_LIBS=ON"], ["-DBUILD_SHARED_LIBS=OFF"])
826 for arg_for_shared_static in args_for_shared_static:
827 with self.section_printer.group(f"Configuring MinGW {triplet} (CMake)"):
828 assert "@" not in " ".join(extra_args), f"@ should not be present in extra arguments ({extra_args})"
829 self.executer.run([
830 f"cmake",
831 f"-S", str(self.root), "-B", str(build_path),
832 f"-DCMAKE_BUILD_TYPE={build_type}",
833 f'''-DCMAKE_C_FLAGS="-ffile-prefix-map={self.root}=/src/{self.project}"''',
834 f'''-DCMAKE_CXX_FLAGS="-ffile-prefix-map={self.root}=/src/{self.project}"''',
835 f"-DCMAKE_PREFIX_PATH={mingw_deps_path / triplet}",
836 f"-DCMAKE_INSTALL_PREFIX={install_path}",
837 f"-DCMAKE_INSTALL_INCLUDEDIR=include",
838 f"-DCMAKE_INSTALL_LIBDIR=lib",
839 f"-DCMAKE_INSTALL_BINDIR=bin",
840 f"-DCMAKE_INSTALL_DATAROOTDIR=share",
841 f"-DCMAKE_TOOLCHAIN_FILE={self.root}/build-scripts/cmake-toolchain-mingw64-{ARCH_TO_GNU_ARCH[arch]}.cmake",
842 f"-G{self.cmake_generator}",
843 ] + extra_args + ([] if self.fast else ["--fresh"]) + arg_for_shared_static, cwd=build_path, env=new_env)
844 with self.section_printer.group(f"Build MinGW {triplet} (CMake)"):
845 self.executer.run(["cmake", "--build", str(build_path), "--verbose", "--config", build_type], cwd=build_path, env=new_env)
846 with self.section_printer.group(f"Install MinGW {triplet} (CMake)"):
847 self.executer.run(["cmake", "--install", str(build_path)], cwd=build_path, env=new_env)
848 archive_file_tree.add_directory_tree(arc_dir=arc_join(arc_root, triplet), path=install_path, time=self.arc_time)
849
850 print("Recording arch-dependent extra files for MinGW development archive ...")
851 extra_context = {
852 "TRIPLET": ARCH_TO_TRIPLET[arch],
853 }
854 archive_file_tree.add_file_mapping(arc_dir=arc_root, file_mapping=self.release_info["mingw"]["cmake"].get("files", {}), file_mapping_root=self.root, context=self.get_context(extra_context=extra_context), time=self.arc_time)
855 print("... done")
856
857 print("Recording extra files for MinGW development archive ...")
858 archive_file_tree.add_file_mapping(arc_dir=arc_root, file_mapping=self.release_info["mingw"].get("files", {}), file_mapping_root=self.root, context=self.get_context(), time=self.arc_time)
859 print("... done")
860
861 print("Creating zip/tgz/txz development archives ...")
862 zip_path = self.dist_path / f"{self.project}-devel-{self.version}-mingw.zip"
863 tgz_path = self.dist_path / f"{self.project}-devel-{self.version}-mingw.tar.gz"
864 txz_path = self.dist_path / f"{self.project}-devel-{self.version}-mingw.tar.xz"
865
866 with Archiver(zip_path=zip_path, tgz_path=tgz_path, txz_path=txz_path) as archiver:
867 archive_file_tree.add_to_archiver(archive_base="", archiver=archiver)
868 archiver.add_git_hash(arcdir=arc_root, commit=self.commit, time=self.arc_time)
869 print("... done")
870
871 self.artifacts["mingw-devel-zip"] = zip_path
872 self.artifacts["mingw-devel-tar-gz"] = tgz_path
873 self.artifacts["mingw-devel-tar-xz"] = txz_path
874
875 def _detect_android_api(self, android_home: str) -> typing.Optional[int]:
876 platform_dirs = list(Path(p) for p in glob.glob(f"{android_home}/platforms/android-*"))
877 re_platform = re.compile("android-([0-9]+)")
878 platform_versions = []
879 for platform_dir in platform_dirs:
880 logger.debug("Found Android Platform SDK: %s", platform_dir)
881 if m:= re_platform.match(platform_dir.name):
882 platform_versions.append(int(m.group(1)))
883 platform_versions.sort()
884 logger.info("Available platform versions: %s", platform_versions)
885 platform_versions = list(filter(lambda v: v >= self._android_api_minimum, platform_versions))
886 logger.info("Valid platform versions (>=%d): %s", self._android_api_minimum, platform_versions)
887 if not platform_versions:
888 return None
889 android_api = platform_versions[0]
890 logger.info("Selected API version %d", android_api)
891 return android_api
892
893 def _get_prefab_json_text(self) -> str:
894 return textwrap.dedent(f"""\
895 {{
896 "schema_version": 2,
897 "name": "{self.project}",
898 "version": "{self.version}",
899 "dependencies": []
900 }}
901 """)
902
903 def _get_prefab_module_json_text(self, library_name: typing.Optional[str], export_libraries: list[str]) -> str:
904 for lib in export_libraries:
905 assert isinstance(lib, str), f"{lib} must be a string"
906 module_json_dict = {
907 "export_libraries": export_libraries,
908 }
909 if library_name:
910 module_json_dict["library_name"] = f"lib{library_name}"
911 return json.dumps(module_json_dict, indent=4)
912
913 @property
914 def _android_api_minimum(self):
915 return self.release_info["android"]["api-minimum"]
916
917 @property
918 def _android_api_target(self):
919 return self.release_info["android"]["api-target"]
920
921 @property
922 def _android_ndk_minimum(self):
923 return self.release_info["android"]["ndk-minimum"]
924
925 def _get_prefab_abi_json_text(self, abi: str, cpp: bool, shared: bool) -> str:
926 abi_json_dict = {
927 "abi": abi,
928 "api": self._android_api_minimum,
929 "ndk": self._android_ndk_minimum,
930 "stl": "c++_shared" if cpp else "none",
931 "static": not shared,
932 }
933 return json.dumps(abi_json_dict, indent=4)
934
935 def _get_android_manifest_text(self) -> str:
936 return textwrap.dedent(f"""\
937 <manifest
938 xmlns:android="http://schemas.android.com/apk/res/android"
939 package="org.libsdl.android.{self.project}" android:versionCode="1"
940 android:versionName="1.0">
941 <uses-sdk android:minSdkVersion="{self._android_api_minimum}"
942 android:targetSdkVersion="{self._android_api_target}" />
943 </manifest>
944 """)
945
946 def create_android_archives(self, android_api: int, android_home: Path, android_ndk_home: Path) -> None:
947 cmake_toolchain_file = Path(android_ndk_home) / "build/cmake/android.toolchain.cmake"
948 if not cmake_toolchain_file.exists():
949 logger.error("CMake toolchain file does not exist (%s)", cmake_toolchain_file)
950 raise SystemExit(1)
951 aar_path = self.dist_path / f"{self.project}-{self.version}.aar"
952 android_abis = self.release_info["android"]["abis"]
953 java_jars_added = False
954 module_data_added = False
955 android_deps_path = self.deps_path / "android-deps"
956 shutil.rmtree(android_deps_path, ignore_errors=True)
957
958 for dep, depinfo in self.release_info["android"].get("dependencies", {}).items():
959 android_aar = self.deps_path / glob.glob(depinfo["artifact"], root_dir=self.deps_path)[0]
960 with self.section_printer.group(f"Extracting Android dependency {dep} ({android_aar.name})"):
961 self.executer.run([sys.executable, str(android_aar), "-o", str(android_deps_path)])
962
963 for module_name, module_info in self.release_info["android"]["modules"].items():
964 assert "type" in module_info and module_info["type"] in ("interface", "library"), f"module {module_name} must have a valid type"
965
966 archive_file_tree = ArchiveFileTree()
967
968 for android_abi in android_abis:
969 with self.section_printer.group(f"Building for Android {android_api} {android_abi}"):
970 build_dir = self.root / "build-android" / f"{android_abi}-build"
971 install_dir = self.root / "install-android" / f"{android_abi}-install"
972 shutil.rmtree(install_dir, ignore_errors=True)
973 assert not install_dir.is_dir(), f"{install_dir} should not exist prior to build"
974 build_type = "Release"
975 cmake_args = [
976 "cmake",
977 "-S", str(self.root),
978 "-B", str(build_dir),
979 f'''-DCMAKE_C_FLAGS="-ffile-prefix-map={self.root}=/src/{self.project}"''',
980 f'''-DCMAKE_CXX_FLAGS="-ffile-prefix-map={self.root}=/src/{self.project}"''',
981 f"-DCMAKE_TOOLCHAIN_FILE={cmake_toolchain_file}",
982 f"-DCMAKE_PREFIX_PATH={str(android_deps_path)}",
983 f"-DCMAKE_FIND_ROOT_PATH_MODE_PACKAGE=BOTH",
984 f"-DANDROID_HOME={android_home}",
985 f"-DANDROID_PLATFORM={android_api}",
986 f"-DANDROID_ABI={android_abi}",
987 "-DCMAKE_POSITION_INDEPENDENT_CODE=ON",
988 f"-DCMAKE_INSTALL_PREFIX={install_dir}",
989 "-DCMAKE_INSTALL_INCLUDEDIR=include ",
990 "-DCMAKE_INSTALL_LIBDIR=lib",
991 "-DCMAKE_INSTALL_DATAROOTDIR=share",
992 f"-DCMAKE_BUILD_TYPE={build_type}",
993 f"-G{self.cmake_generator}",
994 ] + self.release_info["android"]["cmake"]["args"] + ([] if self.fast else ["--fresh"])
995 build_args = [
996 "cmake",
997 "--build", str(build_dir),
998 "--verbose",
999 "--config", build_type,
1000 ]
1001 install_args = [
1002 "cmake",
1003 "--install", str(build_dir),
1004 "--config", build_type,
1005 ]
1006 self.executer.run(cmake_args)
1007 self.executer.run(build_args)
1008 self.executer.run(install_args)
1009
1010 for module_name, module_info in self.release_info["android"]["modules"].items():
1011 arcdir_prefab_module = f"prefab/modules/{module_name}"
1012 if module_info["type"] == "library":
1013 library = install_dir / module_info["library"]
1014 assert library.suffix in (".so", ".a")
1015 assert library.is_file(), f"CMake should have built library '{library}' for module {module_name}"
1016 arcdir_prefab_libs = f"{arcdir_prefab_module}/libs/android.{android_abi}"
1017 archive_file_tree.add_file(NodeInArchive.from_fs(arcpath=f"{arcdir_prefab_libs}/{library.name}", path=library, time=self.arc_time))
1018 archive_file_tree.add_file(NodeInArchive.from_text(arcpath=f"{arcdir_prefab_libs}/abi.json", text=self._get_prefab_abi_json_text(abi=android_abi, cpp=False, shared=library.suffix == ".so"), time=self.arc_time))
1019
1020 if not module_data_added:
1021 library_name = None
1022 if module_info["type"] == "library":
1023 library_name = Path(module_info["library"]).stem.removeprefix("lib")
1024 export_libraries = module_info.get("export-libraries", [])
1025 archive_file_tree.add_file(NodeInArchive.from_text(arcpath=arc_join(arcdir_prefab_module, "module.json"), text=self._get_prefab_module_json_text(library_name=library_name, export_libraries=export_libraries), time=self.arc_time))
1026 arcdir_prefab_include = f"prefab/modules/{module_name}/include"
1027 if "includes" in module_info:
1028 archive_file_tree.add_file_mapping(arc_dir=arcdir_prefab_include, file_mapping=module_info["includes"], file_mapping_root=install_dir, context=self.get_context(), time=self.arc_time)
1029 else:
1030 archive_file_tree.add_file(NodeInArchive.from_text(arcpath=arc_join(arcdir_prefab_include, ".keep"), text="\n", time=self.arc_time))
1031 module_data_added = True
1032
1033 if not java_jars_added:
1034 java_jars_added = True
1035 if "jars" in self.release_info["android"]:
1036 classes_jar_path = install_dir / configure_text(text=self.release_info["android"]["jars"]["classes"], context=self.get_context())
1037 sources_jar_path = install_dir / configure_text(text=self.release_info["android"]["jars"]["sources"], context=self.get_context())
1038 doc_jar_path = install_dir / configure_text(text=self.release_info["android"]["jars"]["doc"], context=self.get_context())
1039 assert classes_jar_path.is_file(), f"CMake should have compiled the java sources and archived them into a JAR ({classes_jar_path})"
1040 assert sources_jar_path.is_file(), f"CMake should have archived the java sources into a JAR ({sources_jar_path})"
1041 assert doc_jar_path.is_file(), f"CMake should have archived javadoc into a JAR ({doc_jar_path})"
1042
1043 archive_file_tree.add_file(NodeInArchive.from_fs(arcpath="classes.jar", path=classes_jar_path, time=self.arc_time))
1044 archive_file_tree.add_file(NodeInArchive.from_fs(arcpath="classes-sources.jar", path=sources_jar_path, time=self.arc_time))
1045 archive_file_tree.add_file(NodeInArchive.from_fs(arcpath="classes-doc.jar", path=doc_jar_path, time=self.arc_time))
1046
1047 assert ("jars" in self.release_info["android"] and java_jars_added) or "jars" not in self.release_info["android"], "Must have archived java JAR archives"
1048
1049 archive_file_tree.add_file_mapping(arc_dir="", file_mapping=self.release_info["android"].get("files", {}), file_mapping_root=self.root, context=self.get_context(), time=self.arc_time)
1050
1051 archive_file_tree.add_file(NodeInArchive.from_text(arcpath="prefab/prefab.json", text=self._get_prefab_json_text(), time=self.arc_time))
1052 archive_file_tree.add_file(NodeInArchive.from_text(arcpath="AndroidManifest.xml", text=self._get_android_manifest_text(), time=self.arc_time))
1053
1054 with Archiver(zip_path=aar_path) as archiver:
1055 archive_file_tree.add_to_archiver(archive_base="", archiver=archiver)
1056 archiver.add_git_hash(arcdir="", commit=self.commit, time=self.arc_time)
1057 self.artifacts[f"android-aar"] = aar_path
1058
1059 def download_dependencies(self):
1060 shutil.rmtree(self.deps_path, ignore_errors=True)
1061 self.deps_path.mkdir(parents=True)
1062
1063 if self.github:
1064 with open(os.environ["GITHUB_OUTPUT"], "a") as f:
1065 f.write(f"dep-path={self.deps_path.absolute()}\n")
1066
1067 for dep, depinfo in self.release_info.get("dependencies", {}).items():
1068 startswith = depinfo["startswith"]
1069 dep_repo = depinfo["repo"]
1070 # FIXME: dropped "--exclude-pre-releases"
1071 dep_string_data = self.executer.check_output(["gh", "-R", dep_repo, "release", "list", "--exclude-drafts", "--json", "name,createdAt,tagName", "--jq", f'[.[]|select(.name|startswith("{startswith}"))]|max_by(.createdAt)']).strip()
1072 dep_data = json.loads(dep_string_data)
1073 dep_tag = dep_data["tagName"]
1074 dep_version = dep_data["name"]
1075 logger.info("Download dependency %s version %s (tag=%s) ", dep, dep_version, dep_tag)
1076 self.executer.run(["gh", "-R", dep_repo, "release", "download", dep_tag], cwd=self.deps_path)
1077 if self.github:
1078 with open(os.environ["GITHUB_OUTPUT"], "a") as f:
1079 f.write(f"dep-{dep.lower()}-version={dep_version}\n")
1080
1081 def verify_dependencies(self):
1082 for dep, depinfo in self.release_info.get("dependencies", {}).items():
1083 if "mingw" in self.release_info:
1084 mingw_matches = glob.glob(self.release_info["mingw"]["dependencies"][dep]["artifact"], root_dir=self.deps_path)
1085 assert len(mingw_matches) == 1, f"Exactly one archive matches mingw {dep} dependency: {mingw_matches}"
1086 if "dmg" in self.release_info:
1087 dmg_matches = glob.glob(self.release_info["dmg"]["dependencies"][dep]["artifact"], root_dir=self.deps_path)
1088 assert len(dmg_matches) == 1, f"Exactly one archive matches dmg {dep} dependency: {dmg_matches}"
1089 if "msvc" in self.release_info:
1090 msvc_matches = glob.glob(self.release_info["msvc"]["dependencies"][dep]["artifact"], root_dir=self.deps_path)
1091 assert len(msvc_matches) == 1, f"Exactly one archive matches msvc {dep} dependency: {msvc_matches}"
1092 if "android" in self.release_info:
1093 android_matches = glob.glob(self.release_info["android"]["dependencies"][dep]["artifact"], root_dir=self.deps_path)
1094 assert len(android_matches) == 1, f"Exactly one archive matches msvc {dep} dependency: {msvc_matches}"
1095
1096 @staticmethod
1097 def _arch_to_vs_platform(arch: str, configuration: str="Release") -> VsArchPlatformConfig:
1098 ARCH_TO_VS_PLATFORM = {
1099 "x86": VsArchPlatformConfig(arch="x86", platform="Win32", configuration=configuration),
1100 "x64": VsArchPlatformConfig(arch="x64", platform="x64", configuration=configuration),
1101 "arm64": VsArchPlatformConfig(arch="arm64", platform="ARM64", configuration=configuration),
1102 }
1103 return ARCH_TO_VS_PLATFORM[arch]
1104
1105 def build_msvc(self):
1106 with self.section_printer.group("Find Visual Studio"):
1107 vs = VisualStudio(executer=self.executer)
1108 for arch in self.release_info["msvc"].get("msbuild", {}).get("archs", []):
1109 self._build_msvc_msbuild(arch_platform=self._arch_to_vs_platform(arch=arch), vs=vs)
1110 if "cmake" in self.release_info["msvc"]:
1111 deps_path = self.root / "msvc-deps"
1112 shutil.rmtree(deps_path, ignore_errors=True)
1113 dep_roots = []
1114 for dep, depinfo in self.release_info["msvc"].get("dependencies", {}).items():
1115 dep_extract_path = deps_path / f"extract-{dep}"
1116 msvc_zip = self.deps_path / glob.glob(depinfo["artifact"], root_dir=self.deps_path)[0]
1117 with zipfile.ZipFile(msvc_zip, "r") as zf:
1118 zf.extractall(dep_extract_path)
1119 contents_msvc_zip = glob.glob(str(dep_extract_path / "*"))
1120 assert len(contents_msvc_zip) == 1, f"There must be exactly one root item in the root directory of {dep}"
1121 dep_roots.append(contents_msvc_zip[0])
1122
1123 for arch in self.release_info["msvc"].get("cmake", {}).get("archs", []):
1124 self._build_msvc_cmake(arch_platform=self._arch_to_vs_platform(arch=arch), dep_roots=dep_roots)
1125 with self.section_printer.group("Create SDL VC development zip"):
1126 self._build_msvc_devel()
1127
1128 def _build_msvc_msbuild(self, arch_platform: VsArchPlatformConfig, vs: VisualStudio):
1129 platform_context = self.get_context(arch_platform.extra_context())
1130 for dep, depinfo in self.release_info["msvc"].get("dependencies", {}).items():
1131 msvc_zip = self.deps_path / glob.glob(depinfo["artifact"], root_dir=self.deps_path)[0]
1132
1133 src_globs = [configure_text(instr["src"], context=platform_context) for instr in depinfo["copy"]]
1134 with zipfile.ZipFile(msvc_zip, "r") as zf:
1135 for member in zf.namelist():
1136 member_path = "/".join(Path(member).parts[1:])
1137 for src_i, src_glob in enumerate(src_globs):
1138 if fnmatch.fnmatch(member_path, src_glob):
1139 dst = (self.root / configure_text(depinfo["copy"][src_i]["dst"], context=platform_context)).resolve() / Path(member_path).name
1140 zip_data = zf.read(member)
1141 if dst.exists():
1142 identical = False
1143 if dst.is_file():
1144 orig_bytes = dst.read_bytes()
1145 if orig_bytes == zip_data:
1146 identical = True
1147 if not identical:
1148 logger.warning("Extracting dependency %s, will cause %s to be overwritten", dep, dst)
1149 if not self.overwrite:
1150 raise RuntimeError("Run with --overwrite to allow overwriting")
1151 logger.debug("Extracting %s -> %s", member, dst)
1152
1153 dst.parent.mkdir(exist_ok=True, parents=True)
1154 dst.write_bytes(zip_data)
1155
1156 prebuilt_paths = set(self.root / full_prebuilt_path for prebuilt_path in self.release_info["msvc"]["msbuild"].get("prebuilt", []) for full_prebuilt_path in glob.glob(configure_text(prebuilt_path, context=platform_context), root_dir=self.root))
1157 msbuild_paths = set(self.root / configure_text(f, context=platform_context) for file_mapping in (self.release_info["msvc"]["msbuild"]["files-lib"], self.release_info["msvc"]["msbuild"]["files-devel"]) for files_list in file_mapping.values() for f in files_list)
1158 assert prebuilt_paths.issubset(msbuild_paths), f"msvc.msbuild.prebuilt must be a subset of (msvc.msbuild.files-lib, msvc.msbuild.files-devel)"
1159 built_paths = msbuild_paths.difference(prebuilt_paths)
1160 logger.info("MSbuild builds these files, to be included in the package: %s", built_paths)
1161 if not self.fast:
1162 for b in built_paths:
1163 b.unlink(missing_ok=True)
1164
1165 rel_projects: list[str] = self.release_info["msvc"]["msbuild"]["projects"]
1166 projects = list(self.root / p for p in rel_projects)
1167
1168 directory_build_props_src_relpath = self.release_info["msvc"]["msbuild"].get("directory-build-props")
1169 for project in projects:
1170 dir_b_props = project.parent / "Directory.Build.props"
1171 dir_b_props.unlink(missing_ok = True)
1172 if directory_build_props_src_relpath:
1173 src = self.root / directory_build_props_src_relpath
1174 logger.debug("Copying %s -> %s", src, dir_b_props)
1175 shutil.copy(src=src, dst=dir_b_props)
1176
1177 with self.section_printer.group(f"Build {arch_platform.arch} VS binary"):
1178 vs.build(arch_platform=arch_platform, projects=projects)
1179
1180 if self.dry:
1181 for b in built_paths:
1182 b.parent.mkdir(parents=True, exist_ok=True)
1183 b.touch()
1184
1185 for b in built_paths:
1186 assert b.is_file(), f"{b} has not been created"
1187 b.parent.mkdir(parents=True, exist_ok=True)
1188 b.touch()
1189
1190 zip_path = self.dist_path / f"{self.project}-{self.version}-win32-{arch_platform.arch}.zip"
1191 zip_path.unlink(missing_ok=True)
1192
1193 logger.info("Collecting files...")
1194 archive_file_tree = ArchiveFileTree()
1195 archive_file_tree.add_file_mapping(arc_dir="", file_mapping=self.release_info["msvc"]["msbuild"]["files-lib"], file_mapping_root=self.root, context=platform_context, time=self.arc_time)
1196 archive_file_tree.add_file_mapping(arc_dir="", file_mapping=self.release_info["msvc"]["files-lib"], file_mapping_root=self.root, context=platform_context, time=self.arc_time)
1197
1198 logger.info("Writing to %s", zip_path)
1199 with Archiver(zip_path=zip_path) as archiver:
1200 arc_root = f""
1201 archive_file_tree.add_to_archiver(archive_base=arc_root, archiver=archiver)
1202 archiver.add_git_hash(arcdir=arc_root, commit=self.commit, time=self.arc_time)
1203 self.artifacts[f"VC-{arch_platform.arch}"] = zip_path
1204
1205 for p in built_paths:
1206 assert p.is_file(), f"{p} should exist"
1207
1208 def _arch_platform_to_build_path(self, arch_platform: VsArchPlatformConfig) -> Path:
1209 return self.root / f"build-vs-{arch_platform.arch}"
1210
1211 def _arch_platform_to_install_path(self, arch_platform: VsArchPlatformConfig) -> Path:
1212 return self._arch_platform_to_build_path(arch_platform) / "prefix"
1213
1214 def _build_msvc_cmake(self, arch_platform: VsArchPlatformConfig, dep_roots: list[Path]):
1215 build_path = self._arch_platform_to_build_path(arch_platform)
1216 install_path = self._arch_platform_to_install_path(arch_platform)
1217 platform_context = self.get_context(extra_context=arch_platform.extra_context())
1218
1219 build_type = "Release"
1220
1221 built_paths = set(install_path / configure_text(f, context=platform_context) for file_mapping in (self.release_info["msvc"]["cmake"]["files-lib"], self.release_info["msvc"]["cmake"]["files-devel"]) for files_list in file_mapping.values() for f in files_list)
1222 logger.info("CMake builds these files, to be included in the package: %s", built_paths)
1223 if not self.fast:
1224 for b in built_paths:
1225 b.unlink(missing_ok=True)
1226
1227 shutil.rmtree(install_path, ignore_errors=True)
1228 build_path.mkdir(parents=True, exist_ok=True)
1229 with self.section_printer.group(f"Configure VC CMake project for {arch_platform.arch}"):
1230 self.executer.run([
1231 "cmake", "-S", str(self.root), "-B", str(build_path),
1232 "-A", arch_platform.platform,
1233 "-DCMAKE_INSTALL_BINDIR=bin",
1234 "-DCMAKE_INSTALL_DATAROOTDIR=share",
1235 "-DCMAKE_INSTALL_INCLUDEDIR=include",
1236 "-DCMAKE_INSTALL_LIBDIR=lib",
1237 f"-DCMAKE_BUILD_TYPE={build_type}",
1238 f"-DCMAKE_INSTALL_PREFIX={install_path}",
1239 # MSVC debug information format flags are selected by an abstraction
1240 "-DCMAKE_POLICY_DEFAULT_CMP0141=NEW",
1241 # MSVC debug information format
1242 "-DCMAKE_MSVC_DEBUG_INFORMATION_FORMAT=ProgramDatabase",
1243 # Linker flags for executables
1244 "-DCMAKE_EXE_LINKER_FLAGS=-INCREMENTAL:NO -DEBUG -OPT:REF -OPT:ICF",
1245 # Linker flag for shared libraries
1246 "-DCMAKE_SHARED_LINKER_FLAGS=-INCREMENTAL:NO -DEBUG -OPT:REF -OPT:ICF",
1247 # MSVC runtime library flags are selected by an abstraction
1248 "-DCMAKE_POLICY_DEFAULT_CMP0091=NEW",
1249 # Use statically linked runtime (-MT) (ideally, should be "MultiThreaded$<$<CONFIG:Debug>:Debug>")
1250 "-DCMAKE_MSVC_RUNTIME_LIBRARY=MultiThreaded",
1251 f"-DCMAKE_PREFIX_PATH={';'.join(str(s) for s in dep_roots)}",
1252 ] + self.release_info["msvc"]["cmake"]["args"] + ([] if self.fast else ["--fresh"]))
1253
1254 with self.section_printer.group(f"Build VC CMake project for {arch_platform.arch}"):
1255 self.executer.run(["cmake", "--build", str(build_path), "--verbose", "--config", build_type])
1256 with self.section_printer.group(f"Install VC CMake project for {arch_platform.arch}"):
1257 self.executer.run(["cmake", "--install", str(build_path), "--config", build_type])
1258
1259 if self.dry:
1260 for b in built_paths:
1261 b.parent.mkdir(parents=True, exist_ok=True)
1262 b.touch()
1263
1264 zip_path = self.dist_path / f"{self.project}-{self.version}-win32-{arch_platform.arch}.zip"
1265 zip_path.unlink(missing_ok=True)
1266
1267 logger.info("Collecting files...")
1268 archive_file_tree = ArchiveFileTree()
1269 archive_file_tree.add_file_mapping(arc_dir="", file_mapping=self.release_info["msvc"]["cmake"]["files-lib"], file_mapping_root=install_path, context=platform_context, time=self.arc_time)
1270 archive_file_tree.add_file_mapping(arc_dir="", file_mapping=self.release_info["msvc"]["files-lib"], file_mapping_root=self.root, context=self.get_context(), time=self.arc_time)
1271
1272 logger.info("Creating %s", zip_path)
1273 with Archiver(zip_path=zip_path) as archiver:
1274 arc_root = f""
1275 archive_file_tree.add_to_archiver(archive_base=arc_root, archiver=archiver)
1276 archiver.add_git_hash(arcdir=arc_root, commit=self.commit, time=self.arc_time)
1277
1278 for p in built_paths:
1279 assert p.is_file(), f"{p} should exist"
1280
1281 def _build_msvc_devel(self) -> None:
1282 zip_path = self.dist_path / f"{self.project}-devel-{self.version}-VC.zip"
1283 arc_root = f"{self.project}-{self.version}"
1284
1285 def copy_files_devel(ctx):
1286 archive_file_tree.add_file_mapping(arc_dir=arc_root, file_mapping=self.release_info["msvc"]["files-devel"], file_mapping_root=self.root, context=ctx, time=self.arc_time)
1287
1288
1289 logger.info("Collecting files...")
1290 archive_file_tree = ArchiveFileTree()
1291 if "msbuild" in self.release_info["msvc"]:
1292 for arch in self.release_info["msvc"]["msbuild"]["archs"]:
1293 arch_platform = self._arch_to_vs_platform(arch=arch)
1294 platform_context = self.get_context(arch_platform.extra_context())
1295 archive_file_tree.add_file_mapping(arc_dir=arc_root, file_mapping=self.release_info["msvc"]["msbuild"]["files-devel"], file_mapping_root=self.root, context=platform_context, time=self.arc_time)
1296 copy_files_devel(ctx=platform_context)
1297 if "cmake" in self.release_info["msvc"]:
1298 for arch in self.release_info["msvc"]["cmake"]["archs"]:
1299 arch_platform = self._arch_to_vs_platform(arch=arch)
1300 platform_context = self.get_context(arch_platform.extra_context())
1301 archive_file_tree.add_file_mapping(arc_dir=arc_root, file_mapping=self.release_info["msvc"]["cmake"]["files-devel"], file_mapping_root=self._arch_platform_to_install_path(arch_platform), context=platform_context, time=self.arc_time)
1302 copy_files_devel(ctx=platform_context)
1303
1304 with Archiver(zip_path=zip_path) as archiver:
1305 archive_file_tree.add_to_archiver(archive_base="", archiver=archiver)
1306 archiver.add_git_hash(arcdir=arc_root, commit=self.commit, time=self.arc_time)
1307 self.artifacts["VC-devel"] = zip_path
1308
1309 @classmethod
1310 def extract_sdl_version(cls, root: Path, release_info: dict) -> str:
1311 with open(root / release_info["version"]["file"], "r") as f:
1312 text = f.read()
1313 major = next(re.finditer(release_info["version"]["re_major"], text, flags=re.M)).group(1)
1314 minor = next(re.finditer(release_info["version"]["re_minor"], text, flags=re.M)).group(1)
1315 micro = next(re.finditer(release_info["version"]["re_micro"], text, flags=re.M)).group(1)
1316 return f"{major}.{minor}.{micro}"
1317
1318
1319def main(argv=None) -> int:
1320 if sys.version_info < (3, 11):
1321 logger.error("This script needs at least python 3.11")
1322 return 1
1323
1324 parser = argparse.ArgumentParser(allow_abbrev=False, description="Create SDL release artifacts")
1325 parser.add_argument("--root", metavar="DIR", type=Path, default=Path(__file__).absolute().parents[1], help="Root of project")
1326 parser.add_argument("--release-info", metavar="JSON", dest="path_release_info", type=Path, default=Path(__file__).absolute().parent / "release-info.json", help="Path of release-info.json")
1327 parser.add_argument("--dependency-folder", metavar="FOLDER", dest="deps_path", type=Path, default="deps", help="Directory containing pre-built archives of dependencies (will be removed when downloading archives)")
1328 parser.add_argument("--out", "-o", metavar="DIR", dest="dist_path", type=Path, default="dist", help="Output directory")
1329 parser.add_argument("--github", action="store_true", help="Script is running on a GitHub runner")
1330 parser.add_argument("--commit", default="HEAD", help="Git commit/tag of which a release should be created")
1331 parser.add_argument("--actions", choices=["download", "source", "android", "mingw", "msvc", "dmg"], required=True, nargs="+", dest="actions", help="What to do?")
1332 parser.set_defaults(loglevel=logging.INFO)
1333 parser.add_argument('--vs-year', dest="vs_year", help="Visual Studio year")
1334 parser.add_argument('--android-api', type=int, dest="android_api", help="Android API version")
1335 parser.add_argument('--android-home', dest="android_home", default=os.environ.get("ANDROID_HOME"), help="Android Home folder")
1336 parser.add_argument('--android-ndk-home', dest="android_ndk_home", default=os.environ.get("ANDROID_NDK_HOME"), help="Android NDK Home folder")
1337 parser.add_argument('--cmake-generator', dest="cmake_generator", default="Ninja", help="CMake Generator")
1338 parser.add_argument('--debug', action='store_const', const=logging.DEBUG, dest="loglevel", help="Print script debug information")
1339 parser.add_argument('--dry-run', action='store_true', dest="dry", help="Don't execute anything")
1340 parser.add_argument('--force', action='store_true', dest="force", help="Ignore a non-clean git tree")
1341 parser.add_argument('--overwrite', action='store_true', dest="overwrite", help="Allow potentially overwriting other projects")
1342 parser.add_argument('--fast', action='store_true', dest="fast", help="Don't do a rebuild")
1343
1344 args = parser.parse_args(argv)
1345 logging.basicConfig(level=args.loglevel, format='[%(levelname)s] %(message)s')
1346 args.deps_path = args.deps_path.absolute()
1347 args.dist_path = args.dist_path.absolute()
1348 args.root = args.root.absolute()
1349 args.dist_path = args.dist_path.absolute()
1350 if args.dry:
1351 args.dist_path = args.dist_path / "dry"
1352
1353 if args.github:
1354 section_printer: SectionPrinter = GitHubSectionPrinter()
1355 else:
1356 section_printer = SectionPrinter()
1357
1358 if args.github and "GITHUB_OUTPUT" not in os.environ:
1359 os.environ["GITHUB_OUTPUT"] = "/tmp/github_output.txt"
1360
1361 executer = Executer(root=args.root, dry=args.dry)
1362
1363 root_git_hash_path = args.root / GIT_HASH_FILENAME
1364 root_is_maybe_archive = root_git_hash_path.is_file()
1365 if root_is_maybe_archive:
1366 logger.warning("%s detected: Building from archive", GIT_HASH_FILENAME)
1367 archive_commit = root_git_hash_path.read_text().strip()
1368 if args.commit != archive_commit:
1369 logger.warning("Commit argument is %s, but archive commit is %s. Using %s.", args.commit, archive_commit, archive_commit)
1370 args.commit = archive_commit
1371 revision = (args.root / REVISION_TXT).read_text().strip()
1372 else:
1373 args.commit = executer.check_output(["git", "rev-parse", args.commit], dry_out="e5812a9fd2cda317b503325a702ba3c1c37861d9").strip()
1374 revision = executer.check_output(["git", "describe", "--always", "--tags", "--long", args.commit], dry_out="preview-3.1.3-96-g9512f2144").strip()
1375 logger.info("Using commit %s", args.commit)
1376
1377 try:
1378 with args.path_release_info.open() as f:
1379 release_info = json.load(f)
1380 except FileNotFoundError:
1381 logger.error(f"Could not find {args.path_release_info}")
1382
1383 releaser = Releaser(
1384 release_info=release_info,
1385 commit=args.commit,
1386 revision=revision,
1387 root=args.root,
1388 dist_path=args.dist_path,
1389 executer=executer,
1390 section_printer=section_printer,
1391 cmake_generator=args.cmake_generator,
1392 deps_path=args.deps_path,
1393 overwrite=args.overwrite,
1394 github=args.github,
1395 fast=args.fast,
1396 )
1397
1398 if root_is_maybe_archive:
1399 logger.warning("Building from archive. Skipping clean git tree check.")
1400 else:
1401 porcelain_status = executer.check_output(["git", "status", "--ignored", "--porcelain"], dry_out="\n").strip()
1402 if porcelain_status:
1403 print(porcelain_status)
1404 logger.warning("The tree is dirty! Do not publish any generated artifacts!")
1405 if not args.force:
1406 raise Exception("The git repo contains modified and/or non-committed files. Run with --force to ignore.")
1407
1408 if args.fast:
1409 logger.warning("Doing fast build! Do not publish generated artifacts!")
1410
1411 with section_printer.group("Arguments"):
1412 print(f"project = {releaser.project}")
1413 print(f"version = {releaser.version}")
1414 print(f"revision = {revision}")
1415 print(f"commit = {args.commit}")
1416 print(f"out = {args.dist_path}")
1417 print(f"actions = {args.actions}")
1418 print(f"dry = {args.dry}")
1419 print(f"force = {args.force}")
1420 print(f"overwrite = {args.overwrite}")
1421 print(f"cmake_generator = {args.cmake_generator}")
1422
1423 releaser.prepare()
1424
1425 if "download" in args.actions:
1426 releaser.download_dependencies()
1427
1428 if set(args.actions).intersection({"msvc", "mingw", "android"}):
1429 print("Verifying presence of dependencies (run 'download' action to download) ...")
1430 releaser.verify_dependencies()
1431 print("... done")
1432
1433 if "source" in args.actions:
1434 if root_is_maybe_archive:
1435 raise Exception("Cannot build source archive from source archive")
1436 with section_printer.group("Create source archives"):
1437 releaser.create_source_archives()
1438
1439 if "dmg" in args.actions:
1440 if platform.system() != "Darwin" and not args.dry:
1441 parser.error("framework artifact(s) can only be built on Darwin")
1442
1443 releaser.create_dmg()
1444
1445 if "msvc" in args.actions:
1446 if platform.system() != "Windows" and not args.dry:
1447 parser.error("msvc artifact(s) can only be built on Windows")
1448 releaser.build_msvc()
1449
1450 if "mingw" in args.actions:
1451 releaser.create_mingw_archives()
1452
1453 if "android" in args.actions:
1454 if args.android_home is None or not Path(args.android_home).is_dir():
1455 parser.error("Invalid $ANDROID_HOME or --android-home: must be a directory containing the Android SDK")
1456 if args.android_ndk_home is None or not Path(args.android_ndk_home).is_dir():
1457 parser.error("Invalid $ANDROID_NDK_HOME or --android_ndk_home: must be a directory containing the Android NDK")
1458 if args.android_api is None:
1459 with section_printer.group("Detect Android APIS"):
1460 args.android_api = releaser._detect_android_api(android_home=args.android_home)
1461 if args.android_api is None or not (Path(args.android_home) / f"platforms/android-{args.android_api}").is_dir():
1462 parser.error("Invalid --android-api, and/or could not be detected")
1463 with section_printer.group("Android arguments"):
1464 print(f"android_home = {args.android_home}")
1465 print(f"android_ndk_home = {args.android_ndk_home}")
1466 print(f"android_api = {args.android_api}")
1467 releaser.create_android_archives(
1468 android_api=args.android_api,
1469 android_home=args.android_home,
1470 android_ndk_home=args.android_ndk_home,
1471 )
1472 with section_printer.group("Summary"):
1473 print(f"artifacts = {releaser.artifacts}")
1474
1475 if args.github:
1476 with open(os.environ["GITHUB_OUTPUT"], "a") as f:
1477 f.write(f"project={releaser.project}\n")
1478 f.write(f"version={releaser.version}\n")
1479 for k, v in releaser.artifacts.items():
1480 f.write(f"{k}={v.name}\n")
1481 return 0
1482
1483
1484if __name__ == "__main__":
1485 raise SystemExit(main())