diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index af904a567cfb7e..f10aa0828333ec 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -88,7 +88,6 @@ Modules/Setup* @erlend-aasland @AA-Turner @emmatyping Tools/build/regen-configure.sh @AA-Turner # generate-build-details -Tools/build/generate-build-details.py @FFY00 Lib/test/test_build_details.py @FFY00 diff --git a/.github/workflows/mypy.yml b/.github/workflows/mypy.yml index 7f6571ef954576..660e8f946ce780 100644 --- a/.github/workflows/mypy.yml +++ b/.github/workflows/mypy.yml @@ -18,7 +18,6 @@ on: - "Tools/build/compute-changes.py" - "Tools/build/consts_getter.py" - "Tools/build/deepfreeze.py" - - "Tools/build/generate-build-details.py" - "Tools/build/generate_levenshtein_examples.py" - "Tools/build/generate_sbom.py" - "Tools/build/generate_stdlib_module_names.py" diff --git a/Lib/sysconfig/_build_details.py b/Lib/sysconfig/_build_details.py new file mode 100644 index 00000000000000..29ec3334b8df48 --- /dev/null +++ b/Lib/sysconfig/_build_details.py @@ -0,0 +1,418 @@ +"""Helper module to generate and manipulate build-details.json data (see PEP 739).""" + +from __future__ import annotations + +import argparse +import dataclasses +import importlib.machinery +import json +import os +import pathlib +import sys +import sysconfig +import typing +import types + +from typing import Any, Callable, ClassVar, Literal, NoDefault, Self, Type + + +# Types + + +class _NamespaceType(typing.Protocol): + __dict__: dict[str, Any] + + +_PathType = str | os.PathLike[str] + + +# Helpers + + +class _FieldTransform[GetT, SetT]: + """Descriptor that transforms values into the desired type. + + The dataclasses API does not provide a way to accept a different types in + the constructor, and convert them to the field type. This is a very common + functionality — eg. accepting str or os.PathLike in pathlib.Path fields. + Such scenarios require us to disable the automatic __init__ generation, + which is not ergonomic, requiring us to write a large amount of boilerplate. + Alternatively, we can disable init inclusion for such fields, and define + extra "init-only" fields to receive the value, allowing us to set the + transformed field value in __post_init__. However, this requires the + __init__ arguments to have a different name than the actual field. + + This descriptor provides a more ergonomic way to achieve this functionality. + It allows defining a custom setter function, which can receive different + types. It's similar to the functionality provided by "property". + + For better ergonomics when dealing with optional types, the transform + function is not called when setting the value to None. + """ + + # XXX: Repurpose typing.NoDefault as our sentinel here. + def __init__(self, fn: Callable[[SetT], GetT], default: GetT | NoDefault = NoDefault) -> None: + self._fn = fn + self._default_value = default + self._value: dict[int, GetT] = {} + + def __set_name__(self, cls: Type[object], name: str) -> None: + self.__name__ = name + + def __get__(self, obj: object | None, cls: Type[object]) -> GetT: + if obj and id(obj) in self._value: + return self._value[id(obj)] + if self._default_value is not NoDefault: + return self._default_value + assert self.__name__, 'missing __set_name__' + raise AttributeError(f'{cls.__name__!r} has no attribute {self.__name__!r}') from None + + def __set__(self, obj: object, value: SetT) -> None: + self._value[id(obj)] = self._fn(value) if value else None + + +# Common data types. + + +class VersionInfo(typing.NamedTuple): + major: int + minor: int + micro: int + releaselevel: Literal['alpha', 'beta', 'candidate', 'final'] + serial: int + + @classmethod + def from_object(cls, obj: object) -> VersionInfo: + return cls(obj.major, obj.minor, obj.micro, obj.releaselevel, obj.serial) + + +@dataclasses.dataclass(kw_only=True) +class Implementation: + name: str + version: _FieldTransform[VersionInfo, object] = _FieldTransform(VersionInfo.from_object) + hexversion: int + cache_tag: str + + @classmethod + def from_object(cls, obj: _NamespaceType) -> Implementation: + """Creates a Implementation instance from a sys.implementation namespace object. + + The namespace object must define __dict__ (eg. SimpleNamespace instances). + + If the namespace object defines extra fields, a new dataclass type is + created on-the-fly. This type subclasses Implementation, and defines the + extra fields. + """ + try: + fields_dict = vars(obj) + except TypeError: + raise TypeError('object must define __dict__') from None + + obj_fields = set(fields_dict) + cls_fields = {field.name for field in dataclasses.fields(cls)} + + if missing := (cls_fields - obj_fields): + missing_list_repr = ', '.join(sorted(missing, key=cls._fields.index)) + raise ValueError(f'{obj} is missing the following fields: {missing_list_repr}') + + if obj_fields == cls_fields: + return cls(**fields_dict) + + new_cls = dataclasses.make_dataclass( + cls_name=f'Implementation{id(obj)}', + fields=sorted(obj_fields - cls_fields, key=list(obj_fields).index), + bases=(cls,), + ) + return new_cls(**fields_dict) + + +# Base schema data structure definitions. + + +class SchemaVersion(typing.NamedTuple): + major: int + minor: int + + def __str__(self) -> str: + return f'{self.major}.{self.minor}' + + def __repr__(self) -> str: + return f'SchemaVersion({self.major}.{self.minor})' + + +@dataclasses.dataclass(kw_only=True) +class SchemaBase: + """Base type for versioned data structures.""" + + schema_version: ClassVar[SchemaVersion] + + def __init_subclass__(cls, schema: SchemaVersion): + cls.schema_version = SchemaVersion(*schema) + + def _validate(self) -> Exception: + # Check that all versioned objects in the dataclass fields are compatible + for field in dataclasses.fields(self): + value = getattr(self, field.name) + if not isinstance(value, SchemaBase): + continue + if value.schema_version != self.schema_version: + yield ValueError( + f'{self}.{key}: {value} has an incompatible schema version ' + f'{str(value.schema_version)!r} (expected {str(self.schema_version)!r})' + ) + + def __post_init__(self) -> None: + if validation_excs := list(self._validate()): + raise ExceptionGroup('Compatibility validation failed', validation_excs) + + +# Schema definitions - V1 + + +@dataclasses.dataclass(kw_only=True) +class LanguageV1(SchemaBase, schema=(1, 0)): + """PEP 739 — "language" field (https://peps.python.org/pep-0739/#language)""" + version: str + version_info: VersionInfo | None = None + + +@dataclasses.dataclass(kw_only=True) +class ABIV1(SchemaBase, schema=(1, 0)): + """PEP 739 — "abi" field (https://peps.python.org/pep-0739/#abi)""" + flags: tuple[str, ...] + extension_suffix: str | None = None + stable_abi_suffix: str | None = None + + +@dataclasses.dataclass(kw_only=True) +class SuffixesV1(SchemaBase, schema=(1, 0)): + """PEP 739 — "suffixes" field (https://peps.python.org/pep-0739/#suffixes)""" + source: tuple[str, ...] | None = None + bytecode: tuple[str, ...] | None = None + optimized_bytecode: tuple[str, ...] | None = None + debug_bytecode: tuple[str, ...] | None = None + extensions: tuple[str, ...] | None = None + + +@dataclasses.dataclass(kw_only=True) +class LibpythonV1(SchemaBase, schema=(1, 0)): + """PEP 739 — "libpython" field (https://peps.python.org/pep-0739/#libpython)""" + dynamic: _FieldTransform[pathlib.Path | None, _PathType] = _FieldTransform(pathlib.Path, default=None) + dynamic_stableabi: _FieldTransform[pathlib.Path | None, _PathType] = _FieldTransform(pathlib.Path, default=None) + static: _FieldTransform[pathlib.Path | None, _PathType] = _FieldTransform(pathlib.Path, default=None) + link_extensions: bool | None = None + + +@dataclasses.dataclass(kw_only=True) +class CAPIV1(SchemaBase, schema=(1, 0)): + """PEP 739 — "capi" field (https://peps.python.org/pep-0739/#capi)""" + headers: _FieldTransform[pathlib.Path, _PathType] = _FieldTransform(pathlib.Path) + pkgconfig_path: _FieldTransform[pathlib.Path | None, _PathType] = _FieldTransform(pathlib.Path, default=None) + + +@dataclasses.dataclass(kw_only=True) +class BuildDetailsV1(SchemaBase, schema=(1, 0)): + """PEP 739 — root object (https://peps.python.org/pep-0739/#format)""" + schema_version: SchemaVersion + + base_prefix: _FieldTransform[pathlib.Path, _PathType] = _FieldTransform(pathlib.Path) + base_interpreter: _FieldTransform[pathlib.Path | None, _PathType] = _FieldTransform(pathlib.Path, default=None) + platform: str + + language: LanguageV1 + implementation: Implementation + abi: ABIV1 | None = None + suffixes: SuffixesV1 | None = None + libpython: LibpythonV1 | None = None + c_api: CAPIV1 | None = None + + arbitrary_data: SchemaBase | None = None + + def _relocate(self, obj: SchemaBase, origin: pathlib.Path) -> None: + changes = {} + for field in dataclasses.fields(obj): + if not field.init or field.name == 'base_prefix': + continue + value = getattr(obj, field.name) + if isinstance(value, pathlib.Path): + changes[field.name] = value.relative_to(origin) + elif isinstance(value, SchemaBase): + changes[field.name] = self._relocate(value, origin) + return dataclasses.replace(obj, **changes) + + def as_relocatable(self) -> Self: + """Returns a copy of the object where all the paths are relative to base_prefix.""" + return self._relocate(self, origin=self.base_prefix) + + def _dataclass_asdict(self, items: list[tuple[str, Any]]) -> dict[str, Any]: + data = dict(items) + for key, value in items: + # Filter out empty sections. + if not value: + data.pop(key, None) + # Convert schema_version to string. + elif isinstance(value, SchemaVersion): + data[key] = str(value) + # Convert VersionInfo namedtuple to dict. + elif isinstance(value, VersionInfo): + data[key] = value._asdict() + # Convert paths to string. + elif isinstance(value, pathlib.Path): + data[key] = os.fspath(value) + # Join '.' so that the it is formated as './path' instead of 'path'. + if not value.is_absolute(): + data[key] = os.path.join('.', data[key]) + return data + + def as_json(self) -> str: + """Serialize data as JSON.""" + data = dataclasses.asdict(self, dict_factory=self._dataclass_asdict) + return json.dumps(data, indent=2) + + @classmethod + def from_interpreter(cls) -> BuildDetailsV1: + """Generate the build-details.json data for the running interpreter.""" + # TODO: Abstract data sources, in order to be able to re-use this code for other environments. + default_scheme_paths = types.SimpleNamespace({ + key: pathlib.Path(value) + for key, value in sysconfig.get_paths().items() + }) + + LDLIBRARY = sysconfig.get_config_var('LDLIBRARY') + LIBRARY = sysconfig.get_config_var('LIBRARY') + PY3LIBRARY = sysconfig.get_config_var('PY3LIBRARY') + LIBPYTHON = sysconfig.get_config_var('LIBPYTHON') + + LIBDIR = pathlib.Path(sysconfig.get_config_var('LIBDIR')) + LIBPC = pathlib.Path(sysconfig.get_config_var('LIBPC')) + INCLUDEPY = pathlib.Path(sysconfig.get_config_var('INCLUDEPY')) + + interpreter_name = 'python' + sysconfig.get_config_var('LDVERSION') + sysconfig.get_config_var('EXE') + + data = types.SimpleNamespace() + + data.base_prefix = sysconfig.get_config_var('installed_base') + data.base_interpreter = default_scheme_paths.scripts / interpreter_name + data.platform = sysconfig.get_platform() + data.language = LanguageV1( + version=sysconfig.get_python_version(), + version_info=VersionInfo.from_object(sys.version_info), + ) + data.implementation = Implementation.from_object(sys.implementation) + data.abi = ABIV1( + flags=tuple(sys.abiflags), + ) + data.libpython = LibpythonV1() + data.suffixes = SuffixesV1( + source=importlib.machinery.SOURCE_SUFFIXES, + bytecode=importlib.machinery.BYTECODE_SUFFIXES, + #optimized_bytecode=importlib.machinery.OPTIMIZED_BYTECODE_SUFFIXES, + #debug_bytecode=importlib.machinery.DEBUG_BYTECODE_SUFFIXES, + extensions=importlib.machinery.EXTENSION_SUFFIXES, + ) + data.c_api = CAPIV1( + headers=INCLUDEPY, + ) + + if os.name == 'posix': + # On POSIX, LIBRARY is always the static library, while LDLIBRARY is the + # dynamic library if enabled, otherwise it's the static library. + # If LIBRARY != LDLIBRARY, support for the dynamic library is enabled. + has_dynamic_library = LDLIBRARY != LIBRARY + has_static_library = sysconfig.get_config_var('STATIC_LIBPYTHON') + elif os.name == 'nt': + # Windows can only use a dynamic library or a static library. + # If it's using a dynamic library, sys.dllhandle will be set. + # Static builds on Windows are not really well supported, though. + # More context: https://github.com/python/cpython/issues/110234 + has_dynamic_library = hasattr(sys, 'dllhandle') + has_static_library = not has_dynamic_library + else: + raise NotADirectoryError(f'Unknown platform: {os.name}') + + # On POSIX, EXT_SUFFIX is set regardless if extension modules are supported + # or not, and on Windows older versions of CPython only set EXT_SUFFIX when + # extension modules are supported, but newer versions of CPython set it + # regardless. + # + # We only want to set abi.extension_suffix and stable_abi_suffix if + # extension modules are supported. + if has_dynamic_library: + data.abi.extension_suffix = sysconfig.get_config_var('EXT_SUFFIX') + + # EXTENSION_SUFFIXES has been constant for a long time, and currently we + # don't have a better information source to find the stable ABI suffix. + for suffix in importlib.machinery.EXTENSION_SUFFIXES: + if suffix.startswith('.abi'): + data.abi.stable_abi_suffix = suffix + break + + data.libpython.dynamic = LIBDIR / LDLIBRARY + # FIXME: Not sure if windows has a different dll for the stable ABI, and + # even if it does, currently we don't have a way to get its name. + if PY3LIBRARY: + data.libpython.dynamic_stableabi = LIBDIR / PY3LIBRARY + + # Os POSIX, this is defined by the LIBPYTHON Makefile variable not being + # empty. On Windows, don't link extensions — LIBPYTHON won't be defined, + data.libpython.link_extensions = bool(LIBPYTHON) + + if has_static_library: + data.libpython.static = LIBDIR / LIBRARY + + if LIBPC: + data.c_api.pkgconfig_path = LIBPC + + return BuildDetailsV1(**vars(data)) + + +# Default alias. + + +class _BuildDetailsProtocol(typing.Protocol): + def as_relocatable(self) -> Self: + """Returns a copy of the object where all the paths are relative to base_prefix.""" + return self._relocate(self, origin=self.base_prefix) + + def as_json(self) -> str: + return json.dumps(self, indent=2, object_hook=self._json_object_serializer) + + @classmethod + def from_interpreter(cls) -> SchemaBase: ... + + +BuildDetails: _BuildDetailsProtocol = BuildDetailsV1 + + +# Generation/CLI helpers. + + +def generate(path: _PathType, relative_paths=False) -> None: + """Generates build-details.json for the current interpreter.""" + data = BuildDetails.from_interpreter() + + if relative_paths: + data = data.as_relocatable() + # If relative_paths is enabled, we also make base_prefix relative. + data.base_prefix = data.base_prefix.relative_to(path) + + with open(path, 'w', encoding='utf-8') as f: + f.write(data.as_json() + '\n') + + +def main() -> None: + parser = argparse.ArgumentParser(exit_on_error=False) + parser.add_argument('location') + parser.add_argument( + '--relative-paths', + action='store_true', + help='Whether to specify paths as absolute, or as relative paths to ``base_prefix``.', + ) + + args = parser.parse_args() + + generate(args.location, args.relative_paths) + + +if __name__ == '__main__': + main() diff --git a/Lib/test/test_build_details.py b/Lib/test/test_build_details.py index 30d9c213077ab7..cecb8135d3cfb3 100644 --- a/Lib/test/test_build_details.py +++ b/Lib/test/test_build_details.py @@ -1,4 +1,3 @@ -import importlib import json import os import os.path @@ -6,29 +5,10 @@ import sysconfig import string import unittest -from pathlib import Path +import sysconfig._build_details as build_details from test.support import is_android, is_apple_mobile, is_wasm32 -BASE_PATH = Path( - __file__, # Lib/test/test_build_details.py - '..', # Lib/test - '..', # Lib - '..', # -).resolve() -MODULE_PATH = BASE_PATH / 'Tools' / 'build' / 'generate-build-details.py' - -try: - # Import "generate-build-details.py" as "generate_build_details" - spec = importlib.util.spec_from_file_location( - "generate_build_details", MODULE_PATH - ) - generate_build_details = importlib.util.module_from_spec(spec) - sys.modules["generate_build_details"] = generate_build_details - spec.loader.exec_module(generate_build_details) -except (FileNotFoundError, ImportError): - generate_build_details = None - class FormatTestsBase: @property @@ -174,23 +154,19 @@ def test_c_api(self): self.assertTrue(os.path.exists(os.path.join(value['pkgconfig_path'], f'python-{version}.pc'))) -@unittest.skipIf( - generate_build_details is None, - "Failed to import generate-build-details" -) +@unittest.skipIf('_PYTHON_SYSCONFIGDATA_NAME' in os.environ, 'Cross-compiling') @unittest.skipIf(os.name != 'posix', 'Feature only implemented on POSIX right now') @unittest.skipIf(is_wasm32, 'Feature not available on WebAssembly builds') class BuildDetailsRelativePathsTests(unittest.TestCase): @property def build_details_absolute_paths(self): - data = generate_build_details.generate_data(schema_version='1.0') - return json.loads(json.dumps(data)) + json_data = build_details.BuildDetails.from_interpreter().as_json() + return json.loads(json_data) @property def build_details_relative_paths(self): - data = self.build_details_absolute_paths - generate_build_details.make_paths_relative(data, config_path=None) - return data + json_data = build_details.BuildDetails.from_interpreter().as_relocatable().as_json() + return json.loads(json_data) def test_round_trip(self): data_abs_path = self.build_details_absolute_paths diff --git a/Makefile.pre.in b/Makefile.pre.in index 2ce53c6a816212..6a9d9c7af2f81f 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -1017,7 +1017,7 @@ pybuilddir.txt: $(PYTHON_FOR_BUILD_DEPS) fi build-details.json: pybuilddir.txt - $(RUNSHARED) $(PYTHON_FOR_BUILD) $(srcdir)/Tools/build/generate-build-details.py `cat pybuilddir.txt`/build-details.json + $(RUNSHARED) $(PYTHON_FOR_BUILD) -m sysconfig._build_details `cat pybuilddir.txt`/build-details.json # Build static library $(LIBRARY): $(LIBRARY_OBJS) diff --git a/Tools/build/generate-build-details.py b/Tools/build/generate-build-details.py deleted file mode 100644 index 8272635bc627d6..00000000000000 --- a/Tools/build/generate-build-details.py +++ /dev/null @@ -1,215 +0,0 @@ -"""Generate build-details.json (see PEP 739).""" - -# Script initially imported from: -# https://github.com/FFY00/python-instrospection/blob/main/python_introspection/scripts/generate-build-details.py - -from __future__ import annotations - -import argparse -import collections -import importlib.machinery -import json -import os -import sys -import sysconfig - -TYPE_CHECKING = False -if TYPE_CHECKING: - from typing import Any - - -def version_info_to_dict(obj: sys._version_info) -> dict[str, Any]: - field_names = ('major', 'minor', 'micro', 'releaselevel', 'serial') - return {field: getattr(obj, field) for field in field_names} - - -def get_dict_key(container: dict[str, Any], key: str) -> dict[str, Any]: - for part in key.split('.'): - container = container[part] - return container - - -def generate_data(schema_version: str) -> collections.defaultdict[str, Any]: - """Generate the build-details.json data (PEP 739). - - :param schema_version: The schema version of the data we want to generate. - """ - - if schema_version != '1.0': - raise ValueError(f'Unsupported schema_version: {schema_version}') - - data: collections.defaultdict[str, Any] = collections.defaultdict( - lambda: collections.defaultdict(dict), - ) - - data['schema_version'] = schema_version - - data['base_prefix'] = sysconfig.get_config_var('installed_base') - #data['base_interpreter'] = sys._base_executable - data['base_interpreter'] = os.path.join( - sysconfig.get_path('scripts'), - "python" - + sysconfig.get_config_var('LDVERSION') - + sysconfig.get_config_var('EXE'), - ) - data['platform'] = sysconfig.get_platform() - - data['language']['version'] = sysconfig.get_python_version() - data['language']['version_info'] = version_info_to_dict(sys.version_info) - - data['implementation'] = vars(sys.implementation).copy() - data['implementation']['version'] = version_info_to_dict(sys.implementation.version) - # Fix cross-compilation - if '_multiarch' in data['implementation']: - data['implementation']['_multiarch'] = sysconfig.get_config_var('MULTIARCH') - - data['abi']['flags'] = list(sys.abiflags) - - data['suffixes']['source'] = importlib.machinery.SOURCE_SUFFIXES - data['suffixes']['bytecode'] = importlib.machinery.BYTECODE_SUFFIXES - #data['suffixes']['optimized_bytecode'] = importlib.machinery.OPTIMIZED_BYTECODE_SUFFIXES - #data['suffixes']['debug_bytecode'] = importlib.machinery.DEBUG_BYTECODE_SUFFIXES - data['suffixes']['extensions'] = importlib.machinery.EXTENSION_SUFFIXES - - LIBDIR = sysconfig.get_config_var('LIBDIR') - LDLIBRARY = sysconfig.get_config_var('LDLIBRARY') - LIBRARY = sysconfig.get_config_var('LIBRARY') - PY3LIBRARY = sysconfig.get_config_var('PY3LIBRARY') - LIBPYTHON = sysconfig.get_config_var('LIBPYTHON') - LIBPC = sysconfig.get_config_var('LIBPC') - INCLUDEPY = sysconfig.get_config_var('INCLUDEPY') - - if os.name == 'posix': - # On POSIX, LIBRARY is always the static library, while LDLIBRARY is the - # dynamic library if enabled, otherwise it's the static library. - # If LIBRARY != LDLIBRARY, support for the dynamic library is enabled. - has_dynamic_library = LDLIBRARY != LIBRARY - has_static_library = sysconfig.get_config_var('STATIC_LIBPYTHON') - elif os.name == 'nt': - # Windows can only use a dynamic library or a static library. - # If it's using a dynamic library, sys.dllhandle will be set. - # Static builds on Windows are not really well supported, though. - # More context: https://github.com/python/cpython/issues/110234 - has_dynamic_library = hasattr(sys, 'dllhandle') - has_static_library = not has_dynamic_library - else: - raise NotADirectoryError(f'Unknown platform: {os.name}') - - # On POSIX, EXT_SUFFIX is set regardless if extension modules are supported - # or not, and on Windows older versions of CPython only set EXT_SUFFIX when - # extension modules are supported, but newer versions of CPython set it - # regardless. - # - # We only want to set abi.extension_suffix and stable_abi_suffix if - # extension modules are supported. - if has_dynamic_library: - data['abi']['extension_suffix'] = sysconfig.get_config_var('EXT_SUFFIX') - - # EXTENSION_SUFFIXES has been constant for a long time, and currently we - # don't have a better information source to find the stable ABI suffix. - for suffix in importlib.machinery.EXTENSION_SUFFIXES: - if suffix.startswith('.abi'): - data['abi']['stable_abi_suffix'] = suffix - break - - data['libpython']['dynamic'] = os.path.join(LIBDIR, LDLIBRARY) - # FIXME: Not sure if windows has a different dll for the stable ABI, and - # even if it does, currently we don't have a way to get its name. - if PY3LIBRARY: - data['libpython']['dynamic_stableabi'] = os.path.join(LIBDIR, PY3LIBRARY) - - # Os POSIX, this is defined by the LIBPYTHON Makefile variable not being - # empty. On Windows, don't link extensions — LIBPYTHON won't be defined, - data['libpython']['link_extensions'] = bool(LIBPYTHON) - - if has_static_library: - data['libpython']['static'] = os.path.join(LIBDIR, LIBRARY) - - data['c_api']['headers'] = INCLUDEPY - if LIBPC: - data['c_api']['pkgconfig_path'] = LIBPC - - return data - - -def make_paths_relative(data: dict[str, Any], config_path: str | None = None) -> None: - # Make base_prefix relative to the config_path directory - if config_path: - data['base_prefix'] = relative_path(data['base_prefix'], - os.path.dirname(config_path)) - base_prefix = data['base_prefix'] - - # Update path values to make them relative to base_prefix - PATH_KEYS = ( - 'base_interpreter', - 'libpython.dynamic', - 'libpython.dynamic_stableabi', - 'libpython.static', - 'c_api.headers', - 'c_api.pkgconfig_path', - ) - for entry in PATH_KEYS: - *parents, child = entry.split('.') - # Get the key container object - try: - container = data - for part in parents: - container = container[part] - if child not in container: - raise KeyError(child) - current_path = container[child] - except KeyError: - continue - # Get the relative path - new_path = relative_path(current_path, base_prefix) - # Join '.' so that the path is formated as './path' instead of 'path' - new_path = os.path.join('.', new_path) - container[child] = new_path - - -def relative_path(path: str, base: str) -> str: - if os.name != 'nt': - return os.path.relpath(path, base) - - # There are no relative paths between drives on Windows. - path_drv, _ = os.path.splitdrive(path) - base_drv, _ = os.path.splitdrive(base) - if path_drv.lower() == base_drv.lower(): - return os.path.relpath(path, base) - - return path - - -def main() -> None: - parser = argparse.ArgumentParser(exit_on_error=False) - parser.add_argument('location') - parser.add_argument( - '--schema-version', - default='1.0', - help='Schema version of the build-details.json file to generate.', - ) - parser.add_argument( - '--relative-paths', - action='store_true', - help='Whether to specify paths as absolute, or as relative paths to ``base_prefix``.', - ) - parser.add_argument( - '--config-file-path', - default=None, - help='If specified, ``base_prefix`` will be set as a relative path to the given config file path.', - ) - - args = parser.parse_args() - - data = generate_data(args.schema_version) - if args.relative_paths: - make_paths_relative(data, args.config_file_path) - - json_output = json.dumps(data, indent=2) - with open(args.location, 'w', encoding='utf-8') as f: - f.write(json_output) - f.write('\n') - - -if __name__ == '__main__': - main()