11#!/usr/bin/env python3
2+ """Parse and compile libtcod and SDL sources for CFFI."""
23from __future__ import annotations
34
5+ import contextlib
46import glob
57import os
68import platform
79import re
810import sys
911from pathlib import Path
10- from typing import Any , Dict , Iterable , Iterator , List , Set , Tuple , Union
12+ from typing import Any , Iterable , Iterator
1113
1214from cffi import FFI
1315
2022HEADER_PARSE_PATHS = ("tcod/" , "libtcod/src/libtcod/" )
2123HEADER_PARSE_EXCLUDES = ("gl2_ext_.h" , "renderer_gl_internal.h" , "event.h" )
2224
23- BITSIZE , LINKAGE = platform .architecture ()
25+ BIT_SIZE , LINKAGE = platform .architecture ()
2426
2527# Regular expressions to parse the headers for cffi.
2628RE_COMMENT = re .compile (r"\s*/\*.*?\*/|\s*//*?$" , re .DOTALL | re .MULTILINE )
@@ -43,18 +45,18 @@ class ParsedHeader:
4345 """
4446
4547 # Class dictionary of all parsed headers.
46- all_headers : Dict [Path , ParsedHeader ] = {}
48+ all_headers : dict [Path , ParsedHeader ] = {}
4749
4850 def __init__ (self , path : Path ) -> None :
51+ """Initialize and organize a header file."""
4952 self .path = path = path .resolve (True )
5053 directory = path .parent
5154 depends = set ()
52- with open (self .path , "r" , encoding = "utf-8" ) as f :
53- header = f .read ()
55+ header = self .path .read_text (encoding = "utf-8" )
5456 header = RE_COMMENT .sub ("" , header )
5557 header = RE_CPLUSPLUS .sub ("" , header )
5658 for dependency in RE_INCLUDE .findall (header ):
57- depends .add ((directory / dependency ).resolve (True ))
59+ depends .add ((directory / str ( dependency ) ).resolve (True ))
5860 header = RE_PREPROCESSOR .sub ("" , header )
5961 header = RE_TAGS .sub ("" , header )
6062 header = RE_VAFUNC .sub ("" , header )
@@ -63,22 +65,22 @@ def __init__(self, path: Path) -> None:
6365 self .depends = frozenset (depends )
6466 self .all_headers [self .path ] = self
6567
66- def parsed_depends (self ) -> Iterator [" ParsedHeader" ]:
68+ def parsed_depends (self ) -> Iterator [ParsedHeader ]:
6769 """Return dependencies excluding ones that were not loaded."""
6870 for dep in self .depends :
69- try :
71+ with contextlib . suppress ( KeyError ) :
7072 yield self .all_headers [dep ]
71- except KeyError :
72- pass
7373
7474 def __str__ (self ) -> str :
75- return "Parsed harder at '%s'\n Depends on: %s" % (
75+ """Return useful info on this object."""
76+ return "Parsed harder at '{}'\n Depends on: {}" .format (
7677 self .path ,
77- "\n \t " .join (self .depends ),
78+ "\n \t " .join (str ( d ) for d in self .depends ),
7879 )
7980
8081 def __repr__ (self ) -> str :
81- return f"ParsedHeader({ self .path } )"
82+ """Return the representation of this object."""
83+ return f"ParsedHeader({ self .path !r} )"
8284
8385
8486def walk_includes (directory : str ) -> Iterator [ParsedHeader ]:
@@ -93,37 +95,40 @@ def walk_includes(directory: str) -> Iterator[ParsedHeader]:
9395
9496def resolve_dependencies (
9597 includes : Iterable [ParsedHeader ],
96- ) -> List [ParsedHeader ]:
98+ ) -> list [ParsedHeader ]:
9799 """Sort headers by their correct include order."""
98100 unresolved = set (includes )
99- resolved : Set [ParsedHeader ] = set ()
101+ resolved : set [ParsedHeader ] = set ()
100102 result = []
101103 while unresolved :
102104 for item in unresolved :
103105 if frozenset (item .parsed_depends ()).issubset (resolved ):
104106 resolved .add (item )
105107 result .append (item )
106108 if not unresolved & resolved :
107- raise RuntimeError (
108- "Could not resolve header load order.\n "
109- f"Possible cyclic dependency with the unresolved headers:\n { unresolved } "
109+ msg = (
110+ "Could not resolve header load order."
111+ "\n Possible cyclic dependency with the unresolved headers:"
112+ f"\n { unresolved } "
110113 )
114+ raise RuntimeError (msg )
111115 unresolved -= resolved
112116 return result
113117
114118
115- def parse_includes () -> List [ParsedHeader ]:
119+ def parse_includes () -> list [ParsedHeader ]:
116120 """Collect all parsed header files and return them.
117121
118122 Reads HEADER_PARSE_PATHS and HEADER_PARSE_EXCLUDES.
119123 """
120- includes : List [ParsedHeader ] = []
124+ includes : list [ParsedHeader ] = []
121125 for dirpath in HEADER_PARSE_PATHS :
122126 includes .extend (walk_includes (dirpath ))
123127 return resolve_dependencies (includes )
124128
125129
126130def walk_sources (directory : str ) -> Iterator [str ]:
131+ """Iterate over the C sources of a directory recursively."""
127132 for path , _dirs , files in os .walk (directory ):
128133 for source in files :
129134 if source .endswith (".c" ):
@@ -133,21 +138,21 @@ def walk_sources(directory: str) -> Iterator[str]:
133138includes = parse_includes ()
134139
135140module_name = "tcod._libtcod"
136- include_dirs : List [str ] = [
141+ include_dirs : list [str ] = [
137142 "." ,
138143 "libtcod/src/vendor/" ,
139144 "libtcod/src/vendor/utf8proc" ,
140145 "libtcod/src/vendor/zlib/" ,
141146 * build_sdl .include_dirs ,
142147]
143148
144- extra_compile_args : List [str ] = [* build_sdl .extra_compile_args ]
145- extra_link_args : List [str ] = [* build_sdl .extra_link_args ]
146- sources : List [str ] = []
149+ extra_compile_args : list [str ] = [* build_sdl .extra_compile_args ]
150+ extra_link_args : list [str ] = [* build_sdl .extra_link_args ]
151+ sources : list [str ] = []
147152
148- libraries : List [str ] = [* build_sdl .libraries ]
149- library_dirs : List [str ] = [* build_sdl .library_dirs ]
150- define_macros : List [ Tuple [str , Any ]] = [("Py_LIMITED_API" , Py_LIMITED_API )]
153+ libraries : list [str ] = [* build_sdl .libraries ]
154+ library_dirs : list [str ] = [* build_sdl .library_dirs ]
155+ define_macros : list [ tuple [str , Any ]] = [("Py_LIMITED_API" , Py_LIMITED_API )]
151156
152157sources += walk_sources ("tcod/" )
153158sources += walk_sources ("libtcod/src/libtcod/" )
@@ -173,7 +178,7 @@ def walk_sources(directory: str) -> Iterator[str]:
173178tdl_build = os .environ .get ("TDL_BUILD" , "RELEASE" ).upper ()
174179
175180MSVC_CFLAGS = {"DEBUG" : ["/Od" ], "RELEASE" : ["/GL" , "/O2" , "/GS-" , "/wd4996" ]}
176- MSVC_LDFLAGS : Dict [str , List [str ]] = {"DEBUG" : [], "RELEASE" : ["/LTCG" ]}
181+ MSVC_LDFLAGS : dict [str , list [str ]] = {"DEBUG" : [], "RELEASE" : ["/LTCG" ]}
177182GCC_CFLAGS = {
178183 "DEBUG" : ["-std=c99" , "-Og" , "-g" , "-fPIC" ],
179184 "RELEASE" : [
@@ -238,7 +243,7 @@ def walk_sources(directory: str) -> Iterator[str]:
238243'''
239244
240245
241- def find_sdl_attrs (prefix : str ) -> Iterator [Tuple [str , Union [ int , str , Any ] ]]:
246+ def find_sdl_attrs (prefix : str ) -> Iterator [tuple [str , int | str | Any ]]:
242247 """Return names and values from `tcod.lib`.
243248
244249 `prefix` is used to filter out which names to copy.
@@ -294,24 +299,22 @@ def parse_sdl_attrs(prefix: str, all_names: list[str] | None) -> tuple[str, str]
294299]
295300
296301
297- def update_module_all (filename : str , new_all : str ) -> None :
302+ def update_module_all (filename : Path , new_all : str ) -> None :
298303 """Update the __all__ of a file with the constants from new_all."""
299304 RE_CONSTANTS_ALL = re .compile (
300305 r"(.*# --- From constants.py ---).*(# --- End constants.py ---.*)" ,
301306 re .DOTALL ,
302307 )
303- with open (filename , "r" , encoding = "utf-8" ) as f :
304- match = RE_CONSTANTS_ALL .match (f .read ())
308+ match = RE_CONSTANTS_ALL .match (filename .read_text (encoding = "utf-8" ))
305309 assert match , f"Can't determine __all__ subsection in { filename } !"
306310 header , footer = match .groups ()
307- with open (filename , "w" , encoding = "utf-8" ) as f :
308- f .write (f"{ header } \n { new_all } ,\n { footer } " )
311+ filename .write_text (f"{ header } \n { new_all } ,\n { footer } " , encoding = "utf-8" )
309312
310313
311314def generate_enums (prefix : str ) -> Iterator [str ]:
312315 """Generate attribute assignments suitable for a Python enum."""
313- for name , value in sorted (find_sdl_attrs (prefix ), key = lambda item : item [1 ]):
314- name = name .split ("_" , 1 )[ 1 ]
316+ for symbol , value in sorted (find_sdl_attrs (prefix ), key = lambda item : item [1 ]):
317+ _ , name = symbol .split ("_" , 1 )
315318 if name .isdigit ():
316319 name = f"N{ name } "
317320 if name in "IOl" : # Handle Flake8 warnings.
@@ -325,7 +328,7 @@ def write_library_constants() -> None:
325328 import tcod .color
326329 from tcod ._libtcod import ffi , lib
327330
328- with open ("tcod/constants.py" , "w" , encoding = "utf-8" ) as f :
331+ with Path ("tcod/constants.py" ). open ( "w" , encoding = "utf-8" ) as f :
329332 all_names = []
330333 f .write (CONSTANT_MODULE_HEADER )
331334 for name in dir (lib ):
@@ -363,10 +366,10 @@ def write_library_constants() -> None:
363366
364367 all_names_merged = ",\n " .join (f'"{ name } "' for name in all_names )
365368 f .write (f"\n __all__ = [\n { all_names_merged } ,\n ]\n " )
366- update_module_all ("tcod/__init__.py" , all_names_merged )
367- update_module_all ("tcod/libtcodpy.py" , all_names_merged )
369+ update_module_all (Path ( "tcod/__init__.py" ) , all_names_merged )
370+ update_module_all (Path ( "tcod/libtcodpy.py" ) , all_names_merged )
368371
369- with open ("tcod/event_constants.py" , "w" , encoding = "utf-8" ) as f :
372+ with Path ("tcod/event_constants.py" ). open ( "w" , encoding = "utf-8" ) as f :
370373 all_names = []
371374 f .write (EVENT_CONSTANT_MODULE_HEADER )
372375 f .write ("\n # --- SDL scancodes ---\n " )
@@ -376,10 +379,10 @@ def write_library_constants() -> None:
376379 f .write (f"""{ parse_sdl_attrs ("SDLK" , None )[0 ]} \n """ )
377380
378381 f .write ("\n # --- SDL keyboard modifiers ---\n " )
379- f .write ("%s \n _REVERSE_MOD_TABLE = %s \n " % parse_sdl_attrs ("KMOD" , all_names ))
382+ f .write ("{} \n _REVERSE_MOD_TABLE = {} \n " . format ( * parse_sdl_attrs ("KMOD" , all_names ) ))
380383
381384 f .write ("\n # --- SDL wheel ---\n " )
382- f .write ("%s \n _REVERSE_WHEEL_TABLE = %s \n " % parse_sdl_attrs ("SDL_MOUSEWHEEL" , all_names ))
385+ f .write ("{} \n _REVERSE_WHEEL_TABLE = {} \n " . format ( * parse_sdl_attrs ("SDL_MOUSEWHEEL" , all_names ) ))
383386 all_names_merged = ",\n " .join (f'"{ name } "' for name in all_names )
384387 f .write (f"\n __all__ = [\n { all_names_merged } ,\n ]\n " )
385388
0 commit comments