update
This commit is contained in:
@@ -0,0 +1,248 @@
|
||||
"""Extensions to the 'distutils' for large or complex distributions"""
|
||||
# mypy: disable_error_code=override
|
||||
# Command.reinitialize_command has an extra **kw param that distutils doesn't have
|
||||
# Can't disable on the exact line because distutils doesn't exists on Python 3.12
|
||||
# and mypy isn't aware of distutils_hack, causing distutils.core.Command to be Any,
|
||||
# and a [unused-ignore] to be raised on 3.12+
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import functools
|
||||
import os
|
||||
import sys
|
||||
from abc import abstractmethod
|
||||
from collections.abc import Mapping
|
||||
from typing import TYPE_CHECKING, TypeVar, overload
|
||||
|
||||
sys.path.extend(((vendor_path := os.path.join(os.path.dirname(os.path.dirname(__file__)), 'setuptools', '_vendor')) not in sys.path) * [vendor_path]) # fmt: skip
|
||||
# workaround for #4476
|
||||
sys.modules.pop('backports', None)
|
||||
|
||||
import _distutils_hack.override # noqa: F401
|
||||
|
||||
from . import logging, monkey
|
||||
from .depends import Require
|
||||
from .discovery import PackageFinder, PEP420PackageFinder
|
||||
from .dist import Distribution
|
||||
from .extension import Extension
|
||||
from .version import __version__ as __version__
|
||||
from .warnings import SetuptoolsDeprecationWarning
|
||||
|
||||
import distutils.core
|
||||
|
||||
__all__ = [
|
||||
'setup',
|
||||
'Distribution',
|
||||
'Command',
|
||||
'Extension',
|
||||
'Require',
|
||||
'SetuptoolsDeprecationWarning',
|
||||
'find_packages',
|
||||
'find_namespace_packages',
|
||||
]
|
||||
|
||||
_CommandT = TypeVar("_CommandT", bound="_Command")
|
||||
|
||||
bootstrap_install_from = None
|
||||
|
||||
find_packages = PackageFinder.find
|
||||
find_namespace_packages = PEP420PackageFinder.find
|
||||
|
||||
|
||||
def _install_setup_requires(attrs):
|
||||
# Note: do not use `setuptools.Distribution` directly, as
|
||||
# our PEP 517 backend patch `distutils.core.Distribution`.
|
||||
class MinimalDistribution(distutils.core.Distribution):
|
||||
"""
|
||||
A minimal version of a distribution for supporting the
|
||||
fetch_build_eggs interface.
|
||||
"""
|
||||
|
||||
def __init__(self, attrs: Mapping[str, object]) -> None:
|
||||
_incl = 'dependency_links', 'setup_requires'
|
||||
filtered = {k: attrs[k] for k in set(_incl) & set(attrs)}
|
||||
super().__init__(filtered)
|
||||
# Prevent accidentally triggering discovery with incomplete set of attrs
|
||||
self.set_defaults._disable()
|
||||
|
||||
def _get_project_config_files(self, filenames=None):
|
||||
"""Ignore ``pyproject.toml``, they are not related to setup_requires"""
|
||||
try:
|
||||
cfg, _toml = super()._split_standard_project_metadata(filenames)
|
||||
except Exception:
|
||||
return filenames, ()
|
||||
return cfg, ()
|
||||
|
||||
def finalize_options(self):
|
||||
"""
|
||||
Disable finalize_options to avoid building the working set.
|
||||
Ref #2158.
|
||||
"""
|
||||
|
||||
dist = MinimalDistribution(attrs)
|
||||
|
||||
# Honor setup.cfg's options.
|
||||
dist.parse_config_files(ignore_option_errors=True)
|
||||
if dist.setup_requires:
|
||||
_fetch_build_eggs(dist)
|
||||
|
||||
|
||||
def _fetch_build_eggs(dist: Distribution):
|
||||
try:
|
||||
dist.fetch_build_eggs(dist.setup_requires)
|
||||
except Exception as ex:
|
||||
msg = """
|
||||
It is possible a package already installed in your system
|
||||
contains an version that is invalid according to PEP 440.
|
||||
You can try `pip install --use-pep517` as a workaround for this problem,
|
||||
or rely on a new virtual environment.
|
||||
|
||||
If the problem refers to a package that is not installed yet,
|
||||
please contact that package's maintainers or distributors.
|
||||
"""
|
||||
if "InvalidVersion" in ex.__class__.__name__:
|
||||
if hasattr(ex, "add_note"):
|
||||
ex.add_note(msg) # PEP 678
|
||||
else:
|
||||
dist.announce(f"\n{msg}\n")
|
||||
raise
|
||||
|
||||
|
||||
def setup(**attrs):
|
||||
logging.configure()
|
||||
# Make sure we have any requirements needed to interpret 'attrs'.
|
||||
_install_setup_requires(attrs)
|
||||
return distutils.core.setup(**attrs)
|
||||
|
||||
|
||||
setup.__doc__ = distutils.core.setup.__doc__
|
||||
|
||||
if TYPE_CHECKING:
|
||||
# Work around a mypy issue where type[T] can't be used as a base: https://github.com/python/mypy/issues/10962
|
||||
from distutils.core import Command as _Command
|
||||
else:
|
||||
_Command = monkey.get_unpatched(distutils.core.Command)
|
||||
|
||||
|
||||
class Command(_Command):
|
||||
"""
|
||||
Setuptools internal actions are organized using a *command design pattern*.
|
||||
This means that each action (or group of closely related actions) executed during
|
||||
the build should be implemented as a ``Command`` subclass.
|
||||
|
||||
These commands are abstractions and do not necessarily correspond to a command that
|
||||
can (or should) be executed via a terminal, in a CLI fashion (although historically
|
||||
they would).
|
||||
|
||||
When creating a new command from scratch, custom defined classes **SHOULD** inherit
|
||||
from ``setuptools.Command`` and implement a few mandatory methods.
|
||||
Between these mandatory methods, are listed:
|
||||
:meth:`initialize_options`, :meth:`finalize_options` and :meth:`run`.
|
||||
|
||||
A useful analogy for command classes is to think of them as subroutines with local
|
||||
variables called "options". The options are "declared" in :meth:`initialize_options`
|
||||
and "defined" (given their final values, aka "finalized") in :meth:`finalize_options`,
|
||||
both of which must be defined by every command class. The "body" of the subroutine,
|
||||
(where it does all the work) is the :meth:`run` method.
|
||||
Between :meth:`initialize_options` and :meth:`finalize_options`, ``setuptools`` may set
|
||||
the values for options/attributes based on user's input (or circumstance),
|
||||
which means that the implementation should be careful to not overwrite values in
|
||||
:meth:`finalize_options` unless necessary.
|
||||
|
||||
Please note that other commands (or other parts of setuptools) may also overwrite
|
||||
the values of the command's options/attributes multiple times during the build
|
||||
process.
|
||||
Therefore it is important to consistently implement :meth:`initialize_options` and
|
||||
:meth:`finalize_options`. For example, all derived attributes (or attributes that
|
||||
depend on the value of other attributes) **SHOULD** be recomputed in
|
||||
:meth:`finalize_options`.
|
||||
|
||||
When overwriting existing commands, custom defined classes **MUST** abide by the
|
||||
same APIs implemented by the original class. They also **SHOULD** inherit from the
|
||||
original class.
|
||||
"""
|
||||
|
||||
command_consumes_arguments = False
|
||||
distribution: Distribution # override distutils.dist.Distribution with setuptools.dist.Distribution
|
||||
|
||||
def __init__(self, dist: Distribution, **kw) -> None:
|
||||
"""
|
||||
Construct the command for dist, updating
|
||||
vars(self) with any keyword parameters.
|
||||
"""
|
||||
super().__init__(dist)
|
||||
vars(self).update(kw)
|
||||
|
||||
@overload
|
||||
def reinitialize_command(
|
||||
self, command: str, reinit_subcommands: bool = False, **kw
|
||||
) -> _Command: ...
|
||||
@overload
|
||||
def reinitialize_command(
|
||||
self, command: _CommandT, reinit_subcommands: bool = False, **kw
|
||||
) -> _CommandT: ...
|
||||
def reinitialize_command(
|
||||
self, command: str | _Command, reinit_subcommands: bool = False, **kw
|
||||
) -> _Command:
|
||||
cmd = _Command.reinitialize_command(self, command, reinit_subcommands)
|
||||
vars(cmd).update(kw)
|
||||
return cmd # pyright: ignore[reportReturnType] # pypa/distutils#307
|
||||
|
||||
@abstractmethod
|
||||
def initialize_options(self) -> None:
|
||||
"""
|
||||
Set or (reset) all options/attributes/caches used by the command
|
||||
to their default values. Note that these values may be overwritten during
|
||||
the build.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
def finalize_options(self) -> None:
|
||||
"""
|
||||
Set final values for all options/attributes used by the command.
|
||||
Most of the time, each option/attribute/cache should only be set if it does not
|
||||
have any value yet (e.g. ``if self.attr is None: self.attr = val``).
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
def run(self) -> None:
|
||||
"""
|
||||
Execute the actions intended by the command.
|
||||
(Side effects **SHOULD** only take place when :meth:`run` is executed,
|
||||
for example, creating new files or writing to the terminal output).
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
def _find_all_simple(path):
|
||||
"""
|
||||
Find all files under 'path'
|
||||
"""
|
||||
results = (
|
||||
os.path.join(base, file)
|
||||
for base, dirs, files in os.walk(path, followlinks=True)
|
||||
for file in files
|
||||
)
|
||||
return filter(os.path.isfile, results)
|
||||
|
||||
|
||||
def findall(dir=os.curdir):
|
||||
"""
|
||||
Find all files under 'dir' and return the list of full filenames.
|
||||
Unless dir is '.', return full filenames with dir prepended.
|
||||
"""
|
||||
files = _find_all_simple(dir)
|
||||
if dir == os.curdir:
|
||||
make_rel = functools.partial(os.path.relpath, start=dir)
|
||||
files = map(make_rel, files)
|
||||
return list(files)
|
||||
|
||||
|
||||
class sic(str):
|
||||
"""Treat this string as-is (https://en.wikipedia.org/wiki/Sic)"""
|
||||
|
||||
|
||||
# Apply monkey patches
|
||||
monkey.patch_all()
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,337 @@
|
||||
"""
|
||||
Handling of Core Metadata for Python packages (including reading and writing).
|
||||
|
||||
See: https://packaging.python.org/en/latest/specifications/core-metadata/
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import stat
|
||||
import textwrap
|
||||
from email import message_from_file
|
||||
from email.message import Message
|
||||
from tempfile import NamedTemporaryFile
|
||||
|
||||
from packaging.markers import Marker
|
||||
from packaging.requirements import Requirement
|
||||
from packaging.utils import canonicalize_name, canonicalize_version
|
||||
from packaging.version import Version
|
||||
|
||||
from . import _normalization, _reqs
|
||||
from ._static import is_static
|
||||
from .warnings import SetuptoolsDeprecationWarning
|
||||
|
||||
from distutils.util import rfc822_escape
|
||||
|
||||
|
||||
def get_metadata_version(self):
|
||||
mv = getattr(self, 'metadata_version', None)
|
||||
if mv is None:
|
||||
mv = Version('2.4')
|
||||
self.metadata_version = mv
|
||||
return mv
|
||||
|
||||
|
||||
def rfc822_unescape(content: str) -> str:
|
||||
"""Reverse RFC-822 escaping by removing leading whitespaces from content."""
|
||||
lines = content.splitlines()
|
||||
if len(lines) == 1:
|
||||
return lines[0].lstrip()
|
||||
return '\n'.join((lines[0].lstrip(), textwrap.dedent('\n'.join(lines[1:]))))
|
||||
|
||||
|
||||
def _read_field_from_msg(msg: Message, field: str) -> str | None:
|
||||
"""Read Message header field."""
|
||||
value = msg[field]
|
||||
if value == 'UNKNOWN':
|
||||
return None
|
||||
return value
|
||||
|
||||
|
||||
def _read_field_unescaped_from_msg(msg: Message, field: str) -> str | None:
|
||||
"""Read Message header field and apply rfc822_unescape."""
|
||||
value = _read_field_from_msg(msg, field)
|
||||
if value is None:
|
||||
return value
|
||||
return rfc822_unescape(value)
|
||||
|
||||
|
||||
def _read_list_from_msg(msg: Message, field: str) -> list[str] | None:
|
||||
"""Read Message header field and return all results as list."""
|
||||
values = msg.get_all(field, None)
|
||||
if values == []:
|
||||
return None
|
||||
return values
|
||||
|
||||
|
||||
def _read_payload_from_msg(msg: Message) -> str | None:
|
||||
value = str(msg.get_payload()).strip()
|
||||
if value == 'UNKNOWN' or not value:
|
||||
return None
|
||||
return value
|
||||
|
||||
|
||||
def read_pkg_file(self, file):
|
||||
"""Reads the metadata values from a file object."""
|
||||
msg = message_from_file(file)
|
||||
|
||||
self.metadata_version = Version(msg['metadata-version'])
|
||||
self.name = _read_field_from_msg(msg, 'name')
|
||||
self.version = _read_field_from_msg(msg, 'version')
|
||||
self.description = _read_field_from_msg(msg, 'summary')
|
||||
# we are filling author only.
|
||||
self.author = _read_field_from_msg(msg, 'author')
|
||||
self.maintainer = None
|
||||
self.author_email = _read_field_from_msg(msg, 'author-email')
|
||||
self.maintainer_email = None
|
||||
self.url = _read_field_from_msg(msg, 'home-page')
|
||||
self.download_url = _read_field_from_msg(msg, 'download-url')
|
||||
self.license = _read_field_unescaped_from_msg(msg, 'license')
|
||||
self.license_expression = _read_field_unescaped_from_msg(msg, 'license-expression')
|
||||
|
||||
self.long_description = _read_field_unescaped_from_msg(msg, 'description')
|
||||
if self.long_description is None and self.metadata_version >= Version('2.1'):
|
||||
self.long_description = _read_payload_from_msg(msg)
|
||||
self.description = _read_field_from_msg(msg, 'summary')
|
||||
|
||||
if 'keywords' in msg:
|
||||
self.keywords = _read_field_from_msg(msg, 'keywords').split(',')
|
||||
|
||||
self.platforms = _read_list_from_msg(msg, 'platform')
|
||||
self.classifiers = _read_list_from_msg(msg, 'classifier')
|
||||
|
||||
# PEP 314 - these fields only exist in 1.1
|
||||
if self.metadata_version == Version('1.1'):
|
||||
self.requires = _read_list_from_msg(msg, 'requires')
|
||||
self.provides = _read_list_from_msg(msg, 'provides')
|
||||
self.obsoletes = _read_list_from_msg(msg, 'obsoletes')
|
||||
else:
|
||||
self.requires = None
|
||||
self.provides = None
|
||||
self.obsoletes = None
|
||||
|
||||
self.license_files = _read_list_from_msg(msg, 'license-file')
|
||||
|
||||
|
||||
def single_line(val):
|
||||
"""
|
||||
Quick and dirty validation for Summary pypa/setuptools#1390.
|
||||
"""
|
||||
if '\n' in val:
|
||||
# TODO: Replace with `raise ValueError("newlines not allowed")`
|
||||
# after reviewing #2893.
|
||||
msg = "newlines are not allowed in `summary` and will break in the future"
|
||||
SetuptoolsDeprecationWarning.emit("Invalid config.", msg)
|
||||
# due_date is undefined. Controversial change, there was a lot of push back.
|
||||
val = val.strip().split('\n')[0]
|
||||
return val
|
||||
|
||||
|
||||
def write_pkg_info(self, base_dir):
|
||||
"""Write the PKG-INFO file into the release tree."""
|
||||
temp = ""
|
||||
final = os.path.join(base_dir, 'PKG-INFO')
|
||||
try:
|
||||
# Use a temporary file while writing to avoid race conditions
|
||||
# (e.g. `importlib.metadata` reading `.egg-info/PKG-INFO`):
|
||||
with NamedTemporaryFile("w", encoding="utf-8", dir=base_dir, delete=False) as f:
|
||||
temp = f.name
|
||||
self.write_pkg_file(f)
|
||||
permissions = stat.S_IMODE(os.lstat(temp).st_mode)
|
||||
os.chmod(temp, permissions | stat.S_IRGRP | stat.S_IROTH)
|
||||
os.replace(temp, final) # atomic operation.
|
||||
finally:
|
||||
if temp and os.path.exists(temp):
|
||||
os.remove(temp)
|
||||
|
||||
|
||||
# Based on Python 3.5 version
|
||||
def write_pkg_file(self, file): # noqa: C901 # is too complex (14) # FIXME
|
||||
"""Write the PKG-INFO format data to a file object."""
|
||||
version = self.get_metadata_version()
|
||||
|
||||
def write_field(key, value):
|
||||
file.write(f"{key}: {value}\n")
|
||||
|
||||
write_field('Metadata-Version', str(version))
|
||||
write_field('Name', self.get_name())
|
||||
write_field('Version', self.get_version())
|
||||
|
||||
summary = self.get_description()
|
||||
if summary:
|
||||
write_field('Summary', single_line(summary))
|
||||
|
||||
optional_fields = (
|
||||
('Home-page', 'url'),
|
||||
('Download-URL', 'download_url'),
|
||||
('Author', 'author'),
|
||||
('Author-email', 'author_email'),
|
||||
('Maintainer', 'maintainer'),
|
||||
('Maintainer-email', 'maintainer_email'),
|
||||
)
|
||||
|
||||
for field, attr in optional_fields:
|
||||
attr_val = getattr(self, attr, None)
|
||||
if attr_val is not None:
|
||||
write_field(field, attr_val)
|
||||
|
||||
if license_expression := self.license_expression:
|
||||
write_field('License-Expression', license_expression)
|
||||
elif license := self.get_license():
|
||||
write_field('License', rfc822_escape(license))
|
||||
|
||||
for label, url in self.project_urls.items():
|
||||
write_field('Project-URL', f'{label}, {url}')
|
||||
|
||||
keywords = ','.join(self.get_keywords())
|
||||
if keywords:
|
||||
write_field('Keywords', keywords)
|
||||
|
||||
platforms = self.get_platforms() or []
|
||||
for platform in platforms:
|
||||
write_field('Platform', platform)
|
||||
|
||||
self._write_list(file, 'Classifier', self.get_classifiers())
|
||||
|
||||
# PEP 314
|
||||
self._write_list(file, 'Requires', self.get_requires())
|
||||
self._write_list(file, 'Provides', self.get_provides())
|
||||
self._write_list(file, 'Obsoletes', self.get_obsoletes())
|
||||
|
||||
# Setuptools specific for PEP 345
|
||||
if hasattr(self, 'python_requires'):
|
||||
write_field('Requires-Python', self.python_requires)
|
||||
|
||||
# PEP 566
|
||||
if self.long_description_content_type:
|
||||
write_field('Description-Content-Type', self.long_description_content_type)
|
||||
|
||||
safe_license_files = map(_safe_license_file, self.license_files or [])
|
||||
self._write_list(file, 'License-File', safe_license_files)
|
||||
_write_requirements(self, file)
|
||||
|
||||
for field, attr in _POSSIBLE_DYNAMIC_FIELDS.items():
|
||||
if (val := getattr(self, attr, None)) and not is_static(val):
|
||||
write_field('Dynamic', field)
|
||||
|
||||
long_description = self.get_long_description()
|
||||
if long_description:
|
||||
file.write(f"\n{long_description}")
|
||||
if not long_description.endswith("\n"):
|
||||
file.write("\n")
|
||||
|
||||
|
||||
def _write_requirements(self, file):
|
||||
for req in _reqs.parse(self.install_requires):
|
||||
file.write(f"Requires-Dist: {req}\n")
|
||||
|
||||
processed_extras = {}
|
||||
for augmented_extra, reqs in self.extras_require.items():
|
||||
# Historically, setuptools allows "augmented extras": `<extra>:<condition>`
|
||||
unsafe_extra, _, condition = augmented_extra.partition(":")
|
||||
unsafe_extra = unsafe_extra.strip()
|
||||
extra = _normalization.safe_extra(unsafe_extra)
|
||||
|
||||
if extra:
|
||||
_write_provides_extra(file, processed_extras, extra, unsafe_extra)
|
||||
for req in _reqs.parse_strings(reqs):
|
||||
r = _include_extra(req, extra, condition.strip())
|
||||
file.write(f"Requires-Dist: {r}\n")
|
||||
|
||||
return processed_extras
|
||||
|
||||
|
||||
def _include_extra(req: str, extra: str, condition: str) -> Requirement:
|
||||
r = Requirement(req) # create a fresh object that can be modified
|
||||
parts = (
|
||||
f"({r.marker})" if r.marker else None,
|
||||
f"({condition})" if condition else None,
|
||||
f"extra == {extra!r}" if extra else None,
|
||||
)
|
||||
r.marker = Marker(" and ".join(x for x in parts if x))
|
||||
return r
|
||||
|
||||
|
||||
def _write_provides_extra(file, processed_extras, safe, unsafe):
|
||||
previous = processed_extras.get(safe)
|
||||
if previous == unsafe:
|
||||
SetuptoolsDeprecationWarning.emit(
|
||||
'Ambiguity during "extra" normalization for dependencies.',
|
||||
f"""
|
||||
{previous!r} and {unsafe!r} normalize to the same value:\n
|
||||
{safe!r}\n
|
||||
In future versions, setuptools might halt the build process.
|
||||
""",
|
||||
see_url="https://peps.python.org/pep-0685/",
|
||||
)
|
||||
else:
|
||||
processed_extras[safe] = unsafe
|
||||
file.write(f"Provides-Extra: {safe}\n")
|
||||
|
||||
|
||||
# from pypa/distutils#244; needed only until that logic is always available
|
||||
def get_fullname(self):
|
||||
return _distribution_fullname(self.get_name(), self.get_version())
|
||||
|
||||
|
||||
def _distribution_fullname(name: str, version: str) -> str:
|
||||
"""
|
||||
>>> _distribution_fullname('setup.tools', '1.0-2')
|
||||
'setup_tools-1.0.post2'
|
||||
>>> _distribution_fullname('setup-tools', '1.2post2')
|
||||
'setup_tools-1.2.post2'
|
||||
>>> _distribution_fullname('setup-tools', '1.0-r2')
|
||||
'setup_tools-1.0.post2'
|
||||
>>> _distribution_fullname('setup.tools', '1.0.post')
|
||||
'setup_tools-1.0.post0'
|
||||
>>> _distribution_fullname('setup.tools', '1.0+ubuntu-1')
|
||||
'setup_tools-1.0+ubuntu.1'
|
||||
"""
|
||||
return "{}-{}".format(
|
||||
canonicalize_name(name).replace('-', '_'),
|
||||
canonicalize_version(version, strip_trailing_zero=False),
|
||||
)
|
||||
|
||||
|
||||
def _safe_license_file(file):
|
||||
# XXX: Do we need this after the deprecation discussed in #4892, #4896??
|
||||
normalized = os.path.normpath(file).replace(os.sep, "/")
|
||||
if "../" in normalized:
|
||||
return os.path.basename(normalized) # Temporarily restore pre PEP639 behaviour
|
||||
return normalized
|
||||
|
||||
|
||||
_POSSIBLE_DYNAMIC_FIELDS = {
|
||||
# Core Metadata Field x related Distribution attribute
|
||||
"author": "author",
|
||||
"author-email": "author_email",
|
||||
"classifier": "classifiers",
|
||||
"description": "long_description",
|
||||
"description-content-type": "long_description_content_type",
|
||||
"download-url": "download_url",
|
||||
"home-page": "url",
|
||||
"keywords": "keywords",
|
||||
"license": "license",
|
||||
# XXX: License-File is complicated because the user gives globs that are expanded
|
||||
# during the build. Without special handling it is likely always
|
||||
# marked as Dynamic, which is an acceptable outcome according to:
|
||||
# https://github.com/pypa/setuptools/issues/4629#issuecomment-2331233677
|
||||
"license-file": "license_files",
|
||||
"license-expression": "license_expression", # PEP 639
|
||||
"maintainer": "maintainer",
|
||||
"maintainer-email": "maintainer_email",
|
||||
"obsoletes": "obsoletes",
|
||||
# "obsoletes-dist": "obsoletes_dist", # NOT USED
|
||||
"platform": "platforms",
|
||||
"project-url": "project_urls",
|
||||
"provides": "provides",
|
||||
# "provides-dist": "provides_dist", # NOT USED
|
||||
"provides-extra": "extras_require",
|
||||
"requires": "requires",
|
||||
"requires-dist": "install_requires",
|
||||
# "requires-external": "requires_external", # NOT USED
|
||||
"requires-python": "python_requires",
|
||||
"summary": "description",
|
||||
# "supported-platform": "supported_platforms", # NOT USED
|
||||
}
|
||||
@@ -0,0 +1,33 @@
|
||||
import functools
|
||||
import operator
|
||||
|
||||
import packaging.requirements
|
||||
|
||||
|
||||
# from coherent.build.discovery
|
||||
def extras_from_dep(dep):
|
||||
try:
|
||||
markers = packaging.requirements.Requirement(dep).marker._markers
|
||||
except AttributeError:
|
||||
markers = ()
|
||||
return set(
|
||||
marker[2].value
|
||||
for marker in markers
|
||||
if isinstance(marker, tuple) and marker[0].value == 'extra'
|
||||
)
|
||||
|
||||
|
||||
def extras_from_deps(deps):
|
||||
"""
|
||||
>>> extras_from_deps(['requests'])
|
||||
set()
|
||||
>>> extras_from_deps(['pytest; extra == "test"'])
|
||||
{'test'}
|
||||
>>> sorted(extras_from_deps([
|
||||
... 'requests',
|
||||
... 'pytest; extra == "test"',
|
||||
... 'pytest-cov; extra == "test"',
|
||||
... 'sphinx; extra=="doc"']))
|
||||
['doc', 'test']
|
||||
"""
|
||||
return functools.reduce(operator.or_, map(extras_from_dep, deps), set())
|
||||
@@ -0,0 +1,14 @@
|
||||
import importlib
|
||||
import sys
|
||||
|
||||
__version__, _, _ = sys.version.partition(' ')
|
||||
|
||||
|
||||
try:
|
||||
# Allow Debian and pkgsrc (only) to customize system
|
||||
# behavior. Ref pypa/distutils#2 and pypa/distutils#16.
|
||||
# This hook is deprecated and no other environments
|
||||
# should use it.
|
||||
importlib.import_module('_distutils_system_mod')
|
||||
except ImportError:
|
||||
pass
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,3 @@
|
||||
import logging
|
||||
|
||||
log = logging.getLogger()
|
||||
@@ -0,0 +1,12 @@
|
||||
import importlib
|
||||
import sys
|
||||
|
||||
|
||||
def bypass_compiler_fixup(cmd, args):
|
||||
return cmd
|
||||
|
||||
|
||||
if sys.platform == 'darwin':
|
||||
compiler_fixup = importlib.import_module('_osx_support').compiler_fixup
|
||||
else:
|
||||
compiler_fixup = bypass_compiler_fixup
|
||||
@@ -0,0 +1,95 @@
|
||||
"""Timestamp comparison of files and groups of files."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import functools
|
||||
import os.path
|
||||
from collections.abc import Callable, Iterable
|
||||
from typing import Literal, TypeVar
|
||||
|
||||
from jaraco.functools import splat
|
||||
|
||||
from .compat.py39 import zip_strict
|
||||
from .errors import DistutilsFileError
|
||||
|
||||
_SourcesT = TypeVar(
|
||||
"_SourcesT", bound="str | bytes | os.PathLike[str] | os.PathLike[bytes]"
|
||||
)
|
||||
_TargetsT = TypeVar(
|
||||
"_TargetsT", bound="str | bytes | os.PathLike[str] | os.PathLike[bytes]"
|
||||
)
|
||||
|
||||
|
||||
def _newer(source, target):
|
||||
return not os.path.exists(target) or (
|
||||
os.path.getmtime(source) > os.path.getmtime(target)
|
||||
)
|
||||
|
||||
|
||||
def newer(
|
||||
source: str | bytes | os.PathLike[str] | os.PathLike[bytes],
|
||||
target: str | bytes | os.PathLike[str] | os.PathLike[bytes],
|
||||
) -> bool:
|
||||
"""
|
||||
Is source modified more recently than target.
|
||||
|
||||
Returns True if 'source' is modified more recently than
|
||||
'target' or if 'target' does not exist.
|
||||
|
||||
Raises DistutilsFileError if 'source' does not exist.
|
||||
"""
|
||||
if not os.path.exists(source):
|
||||
raise DistutilsFileError(f"file {os.path.abspath(source)!r} does not exist")
|
||||
|
||||
return _newer(source, target)
|
||||
|
||||
|
||||
def newer_pairwise(
|
||||
sources: Iterable[_SourcesT],
|
||||
targets: Iterable[_TargetsT],
|
||||
newer: Callable[[_SourcesT, _TargetsT], bool] = newer,
|
||||
) -> tuple[list[_SourcesT], list[_TargetsT]]:
|
||||
"""
|
||||
Filter filenames where sources are newer than targets.
|
||||
|
||||
Walk two filename iterables in parallel, testing if each source is newer
|
||||
than its corresponding target. Returns a pair of lists (sources,
|
||||
targets) where source is newer than target, according to the semantics
|
||||
of 'newer()'.
|
||||
"""
|
||||
newer_pairs = filter(splat(newer), zip_strict(sources, targets))
|
||||
return tuple(map(list, zip(*newer_pairs))) or ([], [])
|
||||
|
||||
|
||||
def newer_group(
|
||||
sources: Iterable[str | bytes | os.PathLike[str] | os.PathLike[bytes]],
|
||||
target: str | bytes | os.PathLike[str] | os.PathLike[bytes],
|
||||
missing: Literal["error", "ignore", "newer"] = "error",
|
||||
) -> bool:
|
||||
"""
|
||||
Is target out-of-date with respect to any file in sources.
|
||||
|
||||
Return True if 'target' is out-of-date with respect to any file
|
||||
listed in 'sources'. In other words, if 'target' exists and is newer
|
||||
than every file in 'sources', return False; otherwise return True.
|
||||
``missing`` controls how to handle a missing source file:
|
||||
|
||||
- error (default): allow the ``stat()`` call to fail.
|
||||
- ignore: silently disregard any missing source files.
|
||||
- newer: treat missing source files as "target out of date". This
|
||||
mode is handy in "dry-run" mode: it will pretend to carry out
|
||||
commands that wouldn't work because inputs are missing, but
|
||||
that doesn't matter because dry-run won't run the commands.
|
||||
"""
|
||||
|
||||
def missing_as_newer(source):
|
||||
return missing == 'newer' and not os.path.exists(source)
|
||||
|
||||
ignored = os.path.exists if missing == 'ignore' else None
|
||||
return not os.path.exists(target) or any(
|
||||
missing_as_newer(source) or _newer(source, target)
|
||||
for source in filter(ignored, sources)
|
||||
)
|
||||
|
||||
|
||||
newer_pairwise_group = functools.partial(newer_pairwise, newer=newer_group)
|
||||
@@ -0,0 +1,16 @@
|
||||
import warnings
|
||||
|
||||
from .compilers.C import msvc
|
||||
|
||||
__all__ = ["MSVCCompiler"]
|
||||
|
||||
MSVCCompiler = msvc.Compiler
|
||||
|
||||
|
||||
def __getattr__(name):
|
||||
if name == '_get_vc_env':
|
||||
warnings.warn(
|
||||
"_get_vc_env is private; find an alternative (pypa/distutils#340)"
|
||||
)
|
||||
return msvc._get_vc_env
|
||||
raise AttributeError(name)
|
||||
@@ -0,0 +1,294 @@
|
||||
"""distutils.archive_util
|
||||
|
||||
Utility functions for creating archive files (tarballs, zip files,
|
||||
that sort of thing)."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from typing import Literal, overload
|
||||
|
||||
try:
|
||||
import zipfile
|
||||
except ImportError:
|
||||
zipfile = None
|
||||
|
||||
|
||||
from ._log import log
|
||||
from .dir_util import mkpath
|
||||
from .errors import DistutilsExecError
|
||||
from .spawn import spawn
|
||||
|
||||
try:
|
||||
from pwd import getpwnam
|
||||
except ImportError:
|
||||
getpwnam = None
|
||||
|
||||
try:
|
||||
from grp import getgrnam
|
||||
except ImportError:
|
||||
getgrnam = None
|
||||
|
||||
|
||||
def _get_gid(name):
|
||||
"""Returns a gid, given a group name."""
|
||||
if getgrnam is None or name is None:
|
||||
return None
|
||||
try:
|
||||
result = getgrnam(name)
|
||||
except KeyError:
|
||||
result = None
|
||||
if result is not None:
|
||||
return result[2]
|
||||
return None
|
||||
|
||||
|
||||
def _get_uid(name):
|
||||
"""Returns an uid, given a user name."""
|
||||
if getpwnam is None or name is None:
|
||||
return None
|
||||
try:
|
||||
result = getpwnam(name)
|
||||
except KeyError:
|
||||
result = None
|
||||
if result is not None:
|
||||
return result[2]
|
||||
return None
|
||||
|
||||
|
||||
def make_tarball(
|
||||
base_name: str,
|
||||
base_dir: str | os.PathLike[str],
|
||||
compress: Literal["gzip", "bzip2", "xz"] | None = "gzip",
|
||||
verbose: bool = False,
|
||||
dry_run: bool = False,
|
||||
owner: str | None = None,
|
||||
group: str | None = None,
|
||||
) -> str:
|
||||
"""Create a (possibly compressed) tar file from all the files under
|
||||
'base_dir'.
|
||||
|
||||
'compress' must be "gzip" (the default), "bzip2", "xz", or None.
|
||||
|
||||
'owner' and 'group' can be used to define an owner and a group for the
|
||||
archive that is being built. If not provided, the current owner and group
|
||||
will be used.
|
||||
|
||||
The output tar file will be named 'base_dir' + ".tar", possibly plus
|
||||
the appropriate compression extension (".gz", ".bz2", ".xz" or ".Z").
|
||||
|
||||
Returns the output filename.
|
||||
"""
|
||||
tar_compression = {
|
||||
'gzip': 'gz',
|
||||
'bzip2': 'bz2',
|
||||
'xz': 'xz',
|
||||
None: '',
|
||||
}
|
||||
compress_ext = {'gzip': '.gz', 'bzip2': '.bz2', 'xz': '.xz'}
|
||||
|
||||
# flags for compression program, each element of list will be an argument
|
||||
if compress is not None and compress not in compress_ext.keys():
|
||||
raise ValueError(
|
||||
"bad value for 'compress': must be None, 'gzip', 'bzip2', 'xz'"
|
||||
)
|
||||
|
||||
archive_name = base_name + '.tar'
|
||||
archive_name += compress_ext.get(compress, '')
|
||||
|
||||
mkpath(os.path.dirname(archive_name), dry_run=dry_run)
|
||||
|
||||
# creating the tarball
|
||||
import tarfile # late import so Python build itself doesn't break
|
||||
|
||||
log.info('Creating tar archive')
|
||||
|
||||
uid = _get_uid(owner)
|
||||
gid = _get_gid(group)
|
||||
|
||||
def _set_uid_gid(tarinfo):
|
||||
if gid is not None:
|
||||
tarinfo.gid = gid
|
||||
tarinfo.gname = group
|
||||
if uid is not None:
|
||||
tarinfo.uid = uid
|
||||
tarinfo.uname = owner
|
||||
return tarinfo
|
||||
|
||||
if not dry_run:
|
||||
tar = tarfile.open(archive_name, f'w|{tar_compression[compress]}')
|
||||
try:
|
||||
tar.add(base_dir, filter=_set_uid_gid)
|
||||
finally:
|
||||
tar.close()
|
||||
|
||||
return archive_name
|
||||
|
||||
|
||||
def make_zipfile( # noqa: C901
|
||||
base_name: str,
|
||||
base_dir: str | os.PathLike[str],
|
||||
verbose: bool = False,
|
||||
dry_run: bool = False,
|
||||
) -> str:
|
||||
"""Create a zip file from all the files under 'base_dir'.
|
||||
|
||||
The output zip file will be named 'base_name' + ".zip". Uses either the
|
||||
"zipfile" Python module (if available) or the InfoZIP "zip" utility
|
||||
(if installed and found on the default search path). If neither tool is
|
||||
available, raises DistutilsExecError. Returns the name of the output zip
|
||||
file.
|
||||
"""
|
||||
zip_filename = base_name + ".zip"
|
||||
mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
|
||||
|
||||
# If zipfile module is not available, try spawning an external
|
||||
# 'zip' command.
|
||||
if zipfile is None:
|
||||
if verbose:
|
||||
zipoptions = "-r"
|
||||
else:
|
||||
zipoptions = "-rq"
|
||||
|
||||
try:
|
||||
spawn(["zip", zipoptions, zip_filename, base_dir], dry_run=dry_run)
|
||||
except DistutilsExecError:
|
||||
# XXX really should distinguish between "couldn't find
|
||||
# external 'zip' command" and "zip failed".
|
||||
raise DistutilsExecError(
|
||||
f"unable to create zip file '{zip_filename}': "
|
||||
"could neither import the 'zipfile' module nor "
|
||||
"find a standalone zip utility"
|
||||
)
|
||||
|
||||
else:
|
||||
log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)
|
||||
|
||||
if not dry_run:
|
||||
try:
|
||||
zip = zipfile.ZipFile(
|
||||
zip_filename, "w", compression=zipfile.ZIP_DEFLATED
|
||||
)
|
||||
except RuntimeError:
|
||||
zip = zipfile.ZipFile(zip_filename, "w", compression=zipfile.ZIP_STORED)
|
||||
|
||||
with zip:
|
||||
if base_dir != os.curdir:
|
||||
path = os.path.normpath(os.path.join(base_dir, ''))
|
||||
zip.write(path, path)
|
||||
log.info("adding '%s'", path)
|
||||
for dirpath, dirnames, filenames in os.walk(base_dir):
|
||||
for name in dirnames:
|
||||
path = os.path.normpath(os.path.join(dirpath, name, ''))
|
||||
zip.write(path, path)
|
||||
log.info("adding '%s'", path)
|
||||
for name in filenames:
|
||||
path = os.path.normpath(os.path.join(dirpath, name))
|
||||
if os.path.isfile(path):
|
||||
zip.write(path, path)
|
||||
log.info("adding '%s'", path)
|
||||
|
||||
return zip_filename
|
||||
|
||||
|
||||
ARCHIVE_FORMATS = {
|
||||
'gztar': (make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"),
|
||||
'bztar': (make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"),
|
||||
'xztar': (make_tarball, [('compress', 'xz')], "xz'ed tar-file"),
|
||||
'ztar': (make_tarball, [('compress', 'compress')], "compressed tar file"),
|
||||
'tar': (make_tarball, [('compress', None)], "uncompressed tar file"),
|
||||
'zip': (make_zipfile, [], "ZIP file"),
|
||||
}
|
||||
|
||||
|
||||
def check_archive_formats(formats):
|
||||
"""Returns the first format from the 'format' list that is unknown.
|
||||
|
||||
If all formats are known, returns None
|
||||
"""
|
||||
for format in formats:
|
||||
if format not in ARCHIVE_FORMATS:
|
||||
return format
|
||||
return None
|
||||
|
||||
|
||||
@overload
|
||||
def make_archive(
|
||||
base_name: str,
|
||||
format: str,
|
||||
root_dir: str | os.PathLike[str] | bytes | os.PathLike[bytes] | None = None,
|
||||
base_dir: str | None = None,
|
||||
verbose: bool = False,
|
||||
dry_run: bool = False,
|
||||
owner: str | None = None,
|
||||
group: str | None = None,
|
||||
) -> str: ...
|
||||
@overload
|
||||
def make_archive(
|
||||
base_name: str | os.PathLike[str],
|
||||
format: str,
|
||||
root_dir: str | os.PathLike[str] | bytes | os.PathLike[bytes],
|
||||
base_dir: str | None = None,
|
||||
verbose: bool = False,
|
||||
dry_run: bool = False,
|
||||
owner: str | None = None,
|
||||
group: str | None = None,
|
||||
) -> str: ...
|
||||
def make_archive(
|
||||
base_name: str | os.PathLike[str],
|
||||
format: str,
|
||||
root_dir: str | os.PathLike[str] | bytes | os.PathLike[bytes] | None = None,
|
||||
base_dir: str | None = None,
|
||||
verbose: bool = False,
|
||||
dry_run: bool = False,
|
||||
owner: str | None = None,
|
||||
group: str | None = None,
|
||||
) -> str:
|
||||
"""Create an archive file (eg. zip or tar).
|
||||
|
||||
'base_name' is the name of the file to create, minus any format-specific
|
||||
extension; 'format' is the archive format: one of "zip", "tar", "gztar",
|
||||
"bztar", "xztar", or "ztar".
|
||||
|
||||
'root_dir' is a directory that will be the root directory of the
|
||||
archive; ie. we typically chdir into 'root_dir' before creating the
|
||||
archive. 'base_dir' is the directory where we start archiving from;
|
||||
ie. 'base_dir' will be the common prefix of all files and
|
||||
directories in the archive. 'root_dir' and 'base_dir' both default
|
||||
to the current directory. Returns the name of the archive file.
|
||||
|
||||
'owner' and 'group' are used when creating a tar archive. By default,
|
||||
uses the current owner and group.
|
||||
"""
|
||||
save_cwd = os.getcwd()
|
||||
if root_dir is not None:
|
||||
log.debug("changing into '%s'", root_dir)
|
||||
base_name = os.path.abspath(base_name)
|
||||
if not dry_run:
|
||||
os.chdir(root_dir)
|
||||
|
||||
if base_dir is None:
|
||||
base_dir = os.curdir
|
||||
|
||||
kwargs = {'dry_run': dry_run}
|
||||
|
||||
try:
|
||||
format_info = ARCHIVE_FORMATS[format]
|
||||
except KeyError:
|
||||
raise ValueError(f"unknown archive format '{format}'")
|
||||
|
||||
func = format_info[0]
|
||||
kwargs.update(format_info[1])
|
||||
|
||||
if format != 'zip':
|
||||
kwargs['owner'] = owner
|
||||
kwargs['group'] = group
|
||||
|
||||
try:
|
||||
filename = func(base_name, base_dir, **kwargs)
|
||||
finally:
|
||||
if root_dir is not None:
|
||||
log.debug("changing back to '%s'", save_cwd)
|
||||
os.chdir(save_cwd)
|
||||
|
||||
return filename
|
||||
@@ -0,0 +1,26 @@
|
||||
from .compat.numpy import ( # noqa: F401
|
||||
_default_compilers,
|
||||
compiler_class,
|
||||
)
|
||||
from .compilers.C import base
|
||||
from .compilers.C.base import (
|
||||
gen_lib_options,
|
||||
gen_preprocess_options,
|
||||
get_default_compiler,
|
||||
new_compiler,
|
||||
show_compilers,
|
||||
)
|
||||
from .compilers.C.errors import CompileError, LinkError
|
||||
|
||||
__all__ = [
|
||||
'CompileError',
|
||||
'LinkError',
|
||||
'gen_lib_options',
|
||||
'gen_preprocess_options',
|
||||
'get_default_compiler',
|
||||
'new_compiler',
|
||||
'show_compilers',
|
||||
]
|
||||
|
||||
|
||||
CCompiler = base.Compiler
|
||||
@@ -0,0 +1,554 @@
|
||||
"""distutils.cmd
|
||||
|
||||
Provides the Command class, the base class for the command classes
|
||||
in the distutils.command package.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from abc import abstractmethod
|
||||
from collections.abc import Callable, MutableSequence
|
||||
from typing import TYPE_CHECKING, Any, ClassVar, TypeVar, overload
|
||||
|
||||
from . import _modified, archive_util, dir_util, file_util, util
|
||||
from ._log import log
|
||||
from .errors import DistutilsOptionError
|
||||
|
||||
if TYPE_CHECKING:
|
||||
# type-only import because of mutual dependence between these classes
|
||||
from distutils.dist import Distribution
|
||||
|
||||
from typing_extensions import TypeVarTuple, Unpack
|
||||
|
||||
_Ts = TypeVarTuple("_Ts")
|
||||
|
||||
_StrPathT = TypeVar("_StrPathT", bound="str | os.PathLike[str]")
|
||||
_BytesPathT = TypeVar("_BytesPathT", bound="bytes | os.PathLike[bytes]")
|
||||
_CommandT = TypeVar("_CommandT", bound="Command")
|
||||
|
||||
|
||||
class Command:
|
||||
"""Abstract base class for defining command classes, the "worker bees"
|
||||
of the Distutils. A useful analogy for command classes is to think of
|
||||
them as subroutines with local variables called "options". The options
|
||||
are "declared" in 'initialize_options()' and "defined" (given their
|
||||
final values, aka "finalized") in 'finalize_options()', both of which
|
||||
must be defined by every command class. The distinction between the
|
||||
two is necessary because option values might come from the outside
|
||||
world (command line, config file, ...), and any options dependent on
|
||||
other options must be computed *after* these outside influences have
|
||||
been processed -- hence 'finalize_options()'. The "body" of the
|
||||
subroutine, where it does all its work based on the values of its
|
||||
options, is the 'run()' method, which must also be implemented by every
|
||||
command class.
|
||||
"""
|
||||
|
||||
# 'sub_commands' formalizes the notion of a "family" of commands,
|
||||
# eg. "install" as the parent with sub-commands "install_lib",
|
||||
# "install_headers", etc. The parent of a family of commands
|
||||
# defines 'sub_commands' as a class attribute; it's a list of
|
||||
# (command_name : string, predicate : unbound_method | string | None)
|
||||
# tuples, where 'predicate' is a method of the parent command that
|
||||
# determines whether the corresponding command is applicable in the
|
||||
# current situation. (Eg. we "install_headers" is only applicable if
|
||||
# we have any C header files to install.) If 'predicate' is None,
|
||||
# that command is always applicable.
|
||||
#
|
||||
# 'sub_commands' is usually defined at the *end* of a class, because
|
||||
# predicates can be unbound methods, so they must already have been
|
||||
# defined. The canonical example is the "install" command.
|
||||
sub_commands: ClassVar[ # Any to work around variance issues
|
||||
list[tuple[str, Callable[[Any], bool] | None]]
|
||||
] = []
|
||||
|
||||
user_options: ClassVar[
|
||||
# Specifying both because list is invariant. Avoids mypy override assignment issues
|
||||
list[tuple[str, str, str]] | list[tuple[str, str | None, str]]
|
||||
] = []
|
||||
|
||||
# -- Creation/initialization methods -------------------------------
|
||||
|
||||
def __init__(self, dist: Distribution) -> None:
|
||||
"""Create and initialize a new Command object. Most importantly,
|
||||
invokes the 'initialize_options()' method, which is the real
|
||||
initializer and depends on the actual command being
|
||||
instantiated.
|
||||
"""
|
||||
# late import because of mutual dependence between these classes
|
||||
from distutils.dist import Distribution
|
||||
|
||||
if not isinstance(dist, Distribution):
|
||||
raise TypeError("dist must be a Distribution instance")
|
||||
if self.__class__ is Command:
|
||||
raise RuntimeError("Command is an abstract class")
|
||||
|
||||
self.distribution = dist
|
||||
self.initialize_options()
|
||||
|
||||
# Per-command versions of the global flags, so that the user can
|
||||
# customize Distutils' behaviour command-by-command and let some
|
||||
# commands fall back on the Distribution's behaviour. None means
|
||||
# "not defined, check self.distribution's copy", while 0 or 1 mean
|
||||
# false and true (duh). Note that this means figuring out the real
|
||||
# value of each flag is a touch complicated -- hence "self._dry_run"
|
||||
# will be handled by __getattr__, below.
|
||||
# XXX This needs to be fixed.
|
||||
self._dry_run = None
|
||||
|
||||
# verbose is largely ignored, but needs to be set for
|
||||
# backwards compatibility (I think)?
|
||||
self.verbose = dist.verbose
|
||||
|
||||
# Some commands define a 'self.force' option to ignore file
|
||||
# timestamps, but methods defined *here* assume that
|
||||
# 'self.force' exists for all commands. So define it here
|
||||
# just to be safe.
|
||||
self.force = None
|
||||
|
||||
# The 'help' flag is just used for command-line parsing, so
|
||||
# none of that complicated bureaucracy is needed.
|
||||
self.help = False
|
||||
|
||||
# 'finalized' records whether or not 'finalize_options()' has been
|
||||
# called. 'finalize_options()' itself should not pay attention to
|
||||
# this flag: it is the business of 'ensure_finalized()', which
|
||||
# always calls 'finalize_options()', to respect/update it.
|
||||
self.finalized = False
|
||||
|
||||
# XXX A more explicit way to customize dry_run would be better.
|
||||
def __getattr__(self, attr):
|
||||
if attr == 'dry_run':
|
||||
myval = getattr(self, "_" + attr)
|
||||
if myval is None:
|
||||
return getattr(self.distribution, attr)
|
||||
else:
|
||||
return myval
|
||||
else:
|
||||
raise AttributeError(attr)
|
||||
|
||||
def ensure_finalized(self) -> None:
|
||||
if not self.finalized:
|
||||
self.finalize_options()
|
||||
self.finalized = True
|
||||
|
||||
# Subclasses must define:
|
||||
# initialize_options()
|
||||
# provide default values for all options; may be customized by
|
||||
# setup script, by options from config file(s), or by command-line
|
||||
# options
|
||||
# finalize_options()
|
||||
# decide on the final values for all options; this is called
|
||||
# after all possible intervention from the outside world
|
||||
# (command-line, option file, etc.) has been processed
|
||||
# run()
|
||||
# run the command: do whatever it is we're here to do,
|
||||
# controlled by the command's various option values
|
||||
|
||||
@abstractmethod
|
||||
def initialize_options(self) -> None:
|
||||
"""Set default values for all the options that this command
|
||||
supports. Note that these defaults may be overridden by other
|
||||
commands, by the setup script, by config files, or by the
|
||||
command-line. Thus, this is not the place to code dependencies
|
||||
between options; generally, 'initialize_options()' implementations
|
||||
are just a bunch of "self.foo = None" assignments.
|
||||
|
||||
This method must be implemented by all command classes.
|
||||
"""
|
||||
raise RuntimeError(
|
||||
f"abstract method -- subclass {self.__class__} must override"
|
||||
)
|
||||
|
||||
@abstractmethod
|
||||
def finalize_options(self) -> None:
|
||||
"""Set final values for all the options that this command supports.
|
||||
This is always called as late as possible, ie. after any option
|
||||
assignments from the command-line or from other commands have been
|
||||
done. Thus, this is the place to code option dependencies: if
|
||||
'foo' depends on 'bar', then it is safe to set 'foo' from 'bar' as
|
||||
long as 'foo' still has the same value it was assigned in
|
||||
'initialize_options()'.
|
||||
|
||||
This method must be implemented by all command classes.
|
||||
"""
|
||||
raise RuntimeError(
|
||||
f"abstract method -- subclass {self.__class__} must override"
|
||||
)
|
||||
|
||||
def dump_options(self, header=None, indent=""):
|
||||
from distutils.fancy_getopt import longopt_xlate
|
||||
|
||||
if header is None:
|
||||
header = f"command options for '{self.get_command_name()}':"
|
||||
self.announce(indent + header, level=logging.INFO)
|
||||
indent = indent + " "
|
||||
for option, _, _ in self.user_options:
|
||||
option = option.translate(longopt_xlate)
|
||||
if option[-1] == "=":
|
||||
option = option[:-1]
|
||||
value = getattr(self, option)
|
||||
self.announce(indent + f"{option} = {value}", level=logging.INFO)
|
||||
|
||||
@abstractmethod
|
||||
def run(self) -> None:
|
||||
"""A command's raison d'etre: carry out the action it exists to
|
||||
perform, controlled by the options initialized in
|
||||
'initialize_options()', customized by other commands, the setup
|
||||
script, the command-line, and config files, and finalized in
|
||||
'finalize_options()'. All terminal output and filesystem
|
||||
interaction should be done by 'run()'.
|
||||
|
||||
This method must be implemented by all command classes.
|
||||
"""
|
||||
raise RuntimeError(
|
||||
f"abstract method -- subclass {self.__class__} must override"
|
||||
)
|
||||
|
||||
def announce(self, msg: object, level: int = logging.DEBUG) -> None:
|
||||
log.log(level, msg)
|
||||
|
||||
def debug_print(self, msg: object) -> None:
|
||||
"""Print 'msg' to stdout if the global DEBUG (taken from the
|
||||
DISTUTILS_DEBUG environment variable) flag is true.
|
||||
"""
|
||||
from distutils.debug import DEBUG
|
||||
|
||||
if DEBUG:
|
||||
print(msg)
|
||||
sys.stdout.flush()
|
||||
|
||||
# -- Option validation methods -------------------------------------
|
||||
# (these are very handy in writing the 'finalize_options()' method)
|
||||
#
|
||||
# NB. the general philosophy here is to ensure that a particular option
|
||||
# value meets certain type and value constraints. If not, we try to
|
||||
# force it into conformance (eg. if we expect a list but have a string,
|
||||
# split the string on comma and/or whitespace). If we can't force the
|
||||
# option into conformance, raise DistutilsOptionError. Thus, command
|
||||
# classes need do nothing more than (eg.)
|
||||
# self.ensure_string_list('foo')
|
||||
# and they can be guaranteed that thereafter, self.foo will be
|
||||
# a list of strings.
|
||||
|
||||
def _ensure_stringlike(self, option, what, default=None):
|
||||
val = getattr(self, option)
|
||||
if val is None:
|
||||
setattr(self, option, default)
|
||||
return default
|
||||
elif not isinstance(val, str):
|
||||
raise DistutilsOptionError(f"'{option}' must be a {what} (got `{val}`)")
|
||||
return val
|
||||
|
||||
def ensure_string(self, option: str, default: str | None = None) -> None:
|
||||
"""Ensure that 'option' is a string; if not defined, set it to
|
||||
'default'.
|
||||
"""
|
||||
self._ensure_stringlike(option, "string", default)
|
||||
|
||||
def ensure_string_list(self, option: str) -> None:
|
||||
r"""Ensure that 'option' is a list of strings. If 'option' is
|
||||
currently a string, we split it either on /,\s*/ or /\s+/, so
|
||||
"foo bar baz", "foo,bar,baz", and "foo, bar baz" all become
|
||||
["foo", "bar", "baz"].
|
||||
"""
|
||||
val = getattr(self, option)
|
||||
if val is None:
|
||||
return
|
||||
elif isinstance(val, str):
|
||||
setattr(self, option, re.split(r',\s*|\s+', val))
|
||||
else:
|
||||
if isinstance(val, list):
|
||||
ok = all(isinstance(v, str) for v in val)
|
||||
else:
|
||||
ok = False
|
||||
if not ok:
|
||||
raise DistutilsOptionError(
|
||||
f"'{option}' must be a list of strings (got {val!r})"
|
||||
)
|
||||
|
||||
def _ensure_tested_string(self, option, tester, what, error_fmt, default=None):
|
||||
val = self._ensure_stringlike(option, what, default)
|
||||
if val is not None and not tester(val):
|
||||
raise DistutilsOptionError(
|
||||
("error in '%s' option: " + error_fmt) % (option, val)
|
||||
)
|
||||
|
||||
def ensure_filename(self, option: str) -> None:
|
||||
"""Ensure that 'option' is the name of an existing file."""
|
||||
self._ensure_tested_string(
|
||||
option, os.path.isfile, "filename", "'%s' does not exist or is not a file"
|
||||
)
|
||||
|
||||
def ensure_dirname(self, option: str) -> None:
|
||||
self._ensure_tested_string(
|
||||
option,
|
||||
os.path.isdir,
|
||||
"directory name",
|
||||
"'%s' does not exist or is not a directory",
|
||||
)
|
||||
|
||||
# -- Convenience methods for commands ------------------------------
|
||||
|
||||
def get_command_name(self) -> str:
|
||||
if hasattr(self, 'command_name'):
|
||||
return self.command_name
|
||||
else:
|
||||
return self.__class__.__name__
|
||||
|
||||
def set_undefined_options(
|
||||
self, src_cmd: str, *option_pairs: tuple[str, str]
|
||||
) -> None:
|
||||
"""Set the values of any "undefined" options from corresponding
|
||||
option values in some other command object. "Undefined" here means
|
||||
"is None", which is the convention used to indicate that an option
|
||||
has not been changed between 'initialize_options()' and
|
||||
'finalize_options()'. Usually called from 'finalize_options()' for
|
||||
options that depend on some other command rather than another
|
||||
option of the same command. 'src_cmd' is the other command from
|
||||
which option values will be taken (a command object will be created
|
||||
for it if necessary); the remaining arguments are
|
||||
'(src_option,dst_option)' tuples which mean "take the value of
|
||||
'src_option' in the 'src_cmd' command object, and copy it to
|
||||
'dst_option' in the current command object".
|
||||
"""
|
||||
# Option_pairs: list of (src_option, dst_option) tuples
|
||||
src_cmd_obj = self.distribution.get_command_obj(src_cmd)
|
||||
src_cmd_obj.ensure_finalized()
|
||||
for src_option, dst_option in option_pairs:
|
||||
if getattr(self, dst_option) is None:
|
||||
setattr(self, dst_option, getattr(src_cmd_obj, src_option))
|
||||
|
||||
# NOTE: Because distutils is private to Setuptools and not all commands are exposed here,
|
||||
# not every possible command is enumerated in the signature.
|
||||
def get_finalized_command(self, command: str, create: bool = True) -> Command:
|
||||
"""Wrapper around Distribution's 'get_command_obj()' method: find
|
||||
(create if necessary and 'create' is true) the command object for
|
||||
'command', call its 'ensure_finalized()' method, and return the
|
||||
finalized command object.
|
||||
"""
|
||||
cmd_obj = self.distribution.get_command_obj(command, create)
|
||||
cmd_obj.ensure_finalized()
|
||||
return cmd_obj
|
||||
|
||||
# XXX rename to 'get_reinitialized_command()'? (should do the
|
||||
# same in dist.py, if so)
|
||||
@overload
|
||||
def reinitialize_command(
|
||||
self, command: str, reinit_subcommands: bool = False
|
||||
) -> Command: ...
|
||||
@overload
|
||||
def reinitialize_command(
|
||||
self, command: _CommandT, reinit_subcommands: bool = False
|
||||
) -> _CommandT: ...
|
||||
def reinitialize_command(
|
||||
self, command: str | Command, reinit_subcommands=False
|
||||
) -> Command:
|
||||
return self.distribution.reinitialize_command(command, reinit_subcommands)
|
||||
|
||||
def run_command(self, command: str) -> None:
|
||||
"""Run some other command: uses the 'run_command()' method of
|
||||
Distribution, which creates and finalizes the command object if
|
||||
necessary and then invokes its 'run()' method.
|
||||
"""
|
||||
self.distribution.run_command(command)
|
||||
|
||||
def get_sub_commands(self) -> list[str]:
|
||||
"""Determine the sub-commands that are relevant in the current
|
||||
distribution (ie., that need to be run). This is based on the
|
||||
'sub_commands' class attribute: each tuple in that list may include
|
||||
a method that we call to determine if the subcommand needs to be
|
||||
run for the current distribution. Return a list of command names.
|
||||
"""
|
||||
commands = []
|
||||
for cmd_name, method in self.sub_commands:
|
||||
if method is None or method(self):
|
||||
commands.append(cmd_name)
|
||||
return commands
|
||||
|
||||
# -- External world manipulation -----------------------------------
|
||||
|
||||
def warn(self, msg: object) -> None:
|
||||
log.warning("warning: %s: %s\n", self.get_command_name(), msg)
|
||||
|
||||
def execute(
|
||||
self,
|
||||
func: Callable[[Unpack[_Ts]], object],
|
||||
args: tuple[Unpack[_Ts]],
|
||||
msg: object = None,
|
||||
level: int = 1,
|
||||
) -> None:
|
||||
util.execute(func, args, msg, dry_run=self.dry_run)
|
||||
|
||||
def mkpath(self, name: str, mode: int = 0o777) -> None:
|
||||
dir_util.mkpath(name, mode, dry_run=self.dry_run)
|
||||
|
||||
@overload
|
||||
def copy_file(
|
||||
self,
|
||||
infile: str | os.PathLike[str],
|
||||
outfile: _StrPathT,
|
||||
preserve_mode: bool = True,
|
||||
preserve_times: bool = True,
|
||||
link: str | None = None,
|
||||
level: int = 1,
|
||||
) -> tuple[_StrPathT | str, bool]: ...
|
||||
@overload
|
||||
def copy_file(
|
||||
self,
|
||||
infile: bytes | os.PathLike[bytes],
|
||||
outfile: _BytesPathT,
|
||||
preserve_mode: bool = True,
|
||||
preserve_times: bool = True,
|
||||
link: str | None = None,
|
||||
level: int = 1,
|
||||
) -> tuple[_BytesPathT | bytes, bool]: ...
|
||||
def copy_file(
|
||||
self,
|
||||
infile: str | os.PathLike[str] | bytes | os.PathLike[bytes],
|
||||
outfile: str | os.PathLike[str] | bytes | os.PathLike[bytes],
|
||||
preserve_mode: bool = True,
|
||||
preserve_times: bool = True,
|
||||
link: str | None = None,
|
||||
level: int = 1,
|
||||
) -> tuple[str | os.PathLike[str] | bytes | os.PathLike[bytes], bool]:
|
||||
"""Copy a file respecting verbose, dry-run and force flags. (The
|
||||
former two default to whatever is in the Distribution object, and
|
||||
the latter defaults to false for commands that don't define it.)"""
|
||||
return file_util.copy_file(
|
||||
infile,
|
||||
outfile,
|
||||
preserve_mode,
|
||||
preserve_times,
|
||||
not self.force,
|
||||
link,
|
||||
dry_run=self.dry_run,
|
||||
)
|
||||
|
||||
def copy_tree(
|
||||
self,
|
||||
infile: str | os.PathLike[str],
|
||||
outfile: str,
|
||||
preserve_mode: bool = True,
|
||||
preserve_times: bool = True,
|
||||
preserve_symlinks: bool = False,
|
||||
level: int = 1,
|
||||
) -> list[str]:
|
||||
"""Copy an entire directory tree respecting verbose, dry-run,
|
||||
and force flags.
|
||||
"""
|
||||
return dir_util.copy_tree(
|
||||
infile,
|
||||
outfile,
|
||||
preserve_mode,
|
||||
preserve_times,
|
||||
preserve_symlinks,
|
||||
not self.force,
|
||||
dry_run=self.dry_run,
|
||||
)
|
||||
|
||||
@overload
|
||||
def move_file(
|
||||
self, src: str | os.PathLike[str], dst: _StrPathT, level: int = 1
|
||||
) -> _StrPathT | str: ...
|
||||
@overload
|
||||
def move_file(
|
||||
self, src: bytes | os.PathLike[bytes], dst: _BytesPathT, level: int = 1
|
||||
) -> _BytesPathT | bytes: ...
|
||||
def move_file(
|
||||
self,
|
||||
src: str | os.PathLike[str] | bytes | os.PathLike[bytes],
|
||||
dst: str | os.PathLike[str] | bytes | os.PathLike[bytes],
|
||||
level: int = 1,
|
||||
) -> str | os.PathLike[str] | bytes | os.PathLike[bytes]:
|
||||
"""Move a file respecting dry-run flag."""
|
||||
return file_util.move_file(src, dst, dry_run=self.dry_run)
|
||||
|
||||
def spawn(
|
||||
self, cmd: MutableSequence[str], search_path: bool = True, level: int = 1
|
||||
) -> None:
|
||||
"""Spawn an external command respecting dry-run flag."""
|
||||
from distutils.spawn import spawn
|
||||
|
||||
spawn(cmd, search_path, dry_run=self.dry_run)
|
||||
|
||||
@overload
|
||||
def make_archive(
|
||||
self,
|
||||
base_name: str,
|
||||
format: str,
|
||||
root_dir: str | os.PathLike[str] | bytes | os.PathLike[bytes] | None = None,
|
||||
base_dir: str | None = None,
|
||||
owner: str | None = None,
|
||||
group: str | None = None,
|
||||
) -> str: ...
|
||||
@overload
|
||||
def make_archive(
|
||||
self,
|
||||
base_name: str | os.PathLike[str],
|
||||
format: str,
|
||||
root_dir: str | os.PathLike[str] | bytes | os.PathLike[bytes],
|
||||
base_dir: str | None = None,
|
||||
owner: str | None = None,
|
||||
group: str | None = None,
|
||||
) -> str: ...
|
||||
def make_archive(
|
||||
self,
|
||||
base_name: str | os.PathLike[str],
|
||||
format: str,
|
||||
root_dir: str | os.PathLike[str] | bytes | os.PathLike[bytes] | None = None,
|
||||
base_dir: str | None = None,
|
||||
owner: str | None = None,
|
||||
group: str | None = None,
|
||||
) -> str:
|
||||
return archive_util.make_archive(
|
||||
base_name,
|
||||
format,
|
||||
root_dir,
|
||||
base_dir,
|
||||
dry_run=self.dry_run,
|
||||
owner=owner,
|
||||
group=group,
|
||||
)
|
||||
|
||||
def make_file(
|
||||
self,
|
||||
infiles: str | list[str] | tuple[str, ...],
|
||||
outfile: str | os.PathLike[str] | bytes | os.PathLike[bytes],
|
||||
func: Callable[[Unpack[_Ts]], object],
|
||||
args: tuple[Unpack[_Ts]],
|
||||
exec_msg: object = None,
|
||||
skip_msg: object = None,
|
||||
level: int = 1,
|
||||
) -> None:
|
||||
"""Special case of 'execute()' for operations that process one or
|
||||
more input files and generate one output file. Works just like
|
||||
'execute()', except the operation is skipped and a different
|
||||
message printed if 'outfile' already exists and is newer than all
|
||||
files listed in 'infiles'. If the command defined 'self.force',
|
||||
and it is true, then the command is unconditionally run -- does no
|
||||
timestamp checks.
|
||||
"""
|
||||
if skip_msg is None:
|
||||
skip_msg = f"skipping {outfile} (inputs unchanged)"
|
||||
|
||||
# Allow 'infiles' to be a single string
|
||||
if isinstance(infiles, str):
|
||||
infiles = (infiles,)
|
||||
elif not isinstance(infiles, (list, tuple)):
|
||||
raise TypeError("'infiles' must be a string, or a list or tuple of strings")
|
||||
|
||||
if exec_msg is None:
|
||||
exec_msg = "generating {} from {}".format(outfile, ', '.join(infiles))
|
||||
|
||||
# If 'outfile' must be regenerated (either because it doesn't
|
||||
# exist, is out-of-date, or the 'force' flag is true) then
|
||||
# perform the action that presumably regenerates it
|
||||
if self.force or _modified.newer_group(infiles, outfile):
|
||||
self.execute(func, args, exec_msg, level)
|
||||
# Otherwise, print the "skip" message
|
||||
else:
|
||||
log.debug(skip_msg)
|
||||
@@ -0,0 +1,23 @@
|
||||
"""distutils.command
|
||||
|
||||
Package containing implementation of all the standard Distutils
|
||||
commands."""
|
||||
|
||||
__all__ = [
|
||||
'build',
|
||||
'build_py',
|
||||
'build_ext',
|
||||
'build_clib',
|
||||
'build_scripts',
|
||||
'clean',
|
||||
'install',
|
||||
'install_lib',
|
||||
'install_headers',
|
||||
'install_scripts',
|
||||
'install_data',
|
||||
'sdist',
|
||||
'bdist',
|
||||
'bdist_dumb',
|
||||
'bdist_rpm',
|
||||
'check',
|
||||
]
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,54 @@
|
||||
"""
|
||||
Backward compatibility for homebrew builds on macOS.
|
||||
"""
|
||||
|
||||
import functools
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import sysconfig
|
||||
|
||||
|
||||
@functools.lru_cache
|
||||
def enabled():
|
||||
"""
|
||||
Only enabled for Python 3.9 framework homebrew builds
|
||||
except ensurepip and venv.
|
||||
"""
|
||||
PY39 = (3, 9) < sys.version_info < (3, 10)
|
||||
framework = sys.platform == 'darwin' and sys._framework
|
||||
homebrew = "Cellar" in sysconfig.get_config_var('projectbase')
|
||||
venv = sys.prefix != sys.base_prefix
|
||||
ensurepip = os.environ.get("ENSUREPIP_OPTIONS")
|
||||
return PY39 and framework and homebrew and not venv and not ensurepip
|
||||
|
||||
|
||||
schemes = dict(
|
||||
osx_framework_library=dict(
|
||||
stdlib='{installed_base}/{platlibdir}/python{py_version_short}',
|
||||
platstdlib='{platbase}/{platlibdir}/python{py_version_short}',
|
||||
purelib='{homebrew_prefix}/lib/python{py_version_short}/site-packages',
|
||||
platlib='{homebrew_prefix}/{platlibdir}/python{py_version_short}/site-packages',
|
||||
include='{installed_base}/include/python{py_version_short}{abiflags}',
|
||||
platinclude='{installed_platbase}/include/python{py_version_short}{abiflags}',
|
||||
scripts='{homebrew_prefix}/bin',
|
||||
data='{homebrew_prefix}',
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@functools.lru_cache
|
||||
def vars():
|
||||
if not enabled():
|
||||
return {}
|
||||
homebrew_prefix = subprocess.check_output(['brew', '--prefix'], text=True).strip()
|
||||
return locals()
|
||||
|
||||
|
||||
def scheme(name):
|
||||
"""
|
||||
Override the selected scheme for posix_prefix.
|
||||
"""
|
||||
if not enabled() or not name.endswith('_prefix'):
|
||||
return name
|
||||
return 'osx_framework_library'
|
||||
@@ -0,0 +1,167 @@
|
||||
"""distutils.command.bdist
|
||||
|
||||
Implements the Distutils 'bdist' command (create a built [binary]
|
||||
distribution)."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import warnings
|
||||
from collections.abc import Callable
|
||||
from typing import TYPE_CHECKING, ClassVar
|
||||
|
||||
from ..core import Command
|
||||
from ..errors import DistutilsOptionError, DistutilsPlatformError
|
||||
from ..util import get_platform
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing_extensions import deprecated
|
||||
else:
|
||||
|
||||
def deprecated(message):
|
||||
return lambda fn: fn
|
||||
|
||||
|
||||
def show_formats():
|
||||
"""Print list of available formats (arguments to "--format" option)."""
|
||||
from ..fancy_getopt import FancyGetopt
|
||||
|
||||
formats = [
|
||||
("formats=" + format, None, bdist.format_commands[format][1])
|
||||
for format in bdist.format_commands
|
||||
]
|
||||
pretty_printer = FancyGetopt(formats)
|
||||
pretty_printer.print_help("List of available distribution formats:")
|
||||
|
||||
|
||||
class ListCompat(dict[str, tuple[str, str]]):
|
||||
# adapter to allow for Setuptools compatibility in format_commands
|
||||
@deprecated("format_commands is now a dict. append is deprecated.")
|
||||
def append(self, item: object) -> None:
|
||||
warnings.warn(
|
||||
"format_commands is now a dict. append is deprecated.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
|
||||
|
||||
class bdist(Command):
|
||||
description = "create a built (binary) distribution"
|
||||
|
||||
user_options = [
|
||||
('bdist-base=', 'b', "temporary directory for creating built distributions"),
|
||||
(
|
||||
'plat-name=',
|
||||
'p',
|
||||
"platform name to embed in generated filenames "
|
||||
f"[default: {get_platform()}]",
|
||||
),
|
||||
('formats=', None, "formats for distribution (comma-separated list)"),
|
||||
(
|
||||
'dist-dir=',
|
||||
'd',
|
||||
"directory to put final built distributions in [default: dist]",
|
||||
),
|
||||
('skip-build', None, "skip rebuilding everything (for testing/debugging)"),
|
||||
(
|
||||
'owner=',
|
||||
'u',
|
||||
"Owner name used when creating a tar file [default: current user]",
|
||||
),
|
||||
(
|
||||
'group=',
|
||||
'g',
|
||||
"Group name used when creating a tar file [default: current group]",
|
||||
),
|
||||
]
|
||||
|
||||
boolean_options: ClassVar[list[str]] = ['skip-build']
|
||||
|
||||
help_options: ClassVar[list[tuple[str, str | None, str, Callable[[], object]]]] = [
|
||||
('help-formats', None, "lists available distribution formats", show_formats),
|
||||
]
|
||||
|
||||
# The following commands do not take a format option from bdist
|
||||
no_format_option: ClassVar[tuple[str, ...]] = ('bdist_rpm',)
|
||||
|
||||
# This won't do in reality: will need to distinguish RPM-ish Linux,
|
||||
# Debian-ish Linux, Solaris, FreeBSD, ..., Windows, Mac OS.
|
||||
default_format: ClassVar[dict[str, str]] = {'posix': 'gztar', 'nt': 'zip'}
|
||||
|
||||
# Define commands in preferred order for the --help-formats option
|
||||
format_commands = ListCompat({
|
||||
'rpm': ('bdist_rpm', "RPM distribution"),
|
||||
'gztar': ('bdist_dumb', "gzip'ed tar file"),
|
||||
'bztar': ('bdist_dumb', "bzip2'ed tar file"),
|
||||
'xztar': ('bdist_dumb', "xz'ed tar file"),
|
||||
'ztar': ('bdist_dumb', "compressed tar file"),
|
||||
'tar': ('bdist_dumb', "tar file"),
|
||||
'zip': ('bdist_dumb', "ZIP file"),
|
||||
})
|
||||
|
||||
# for compatibility until consumers only reference format_commands
|
||||
format_command = format_commands
|
||||
|
||||
def initialize_options(self):
|
||||
self.bdist_base = None
|
||||
self.plat_name = None
|
||||
self.formats = None
|
||||
self.dist_dir = None
|
||||
self.skip_build = False
|
||||
self.group = None
|
||||
self.owner = None
|
||||
|
||||
def finalize_options(self) -> None:
|
||||
# have to finalize 'plat_name' before 'bdist_base'
|
||||
if self.plat_name is None:
|
||||
if self.skip_build:
|
||||
self.plat_name = get_platform()
|
||||
else:
|
||||
self.plat_name = self.get_finalized_command('build').plat_name
|
||||
|
||||
# 'bdist_base' -- parent of per-built-distribution-format
|
||||
# temporary directories (eg. we'll probably have
|
||||
# "build/bdist.<plat>/dumb", "build/bdist.<plat>/rpm", etc.)
|
||||
if self.bdist_base is None:
|
||||
build_base = self.get_finalized_command('build').build_base
|
||||
self.bdist_base = os.path.join(build_base, 'bdist.' + self.plat_name)
|
||||
|
||||
self.ensure_string_list('formats')
|
||||
if self.formats is None:
|
||||
try:
|
||||
self.formats = [self.default_format[os.name]]
|
||||
except KeyError:
|
||||
raise DistutilsPlatformError(
|
||||
"don't know how to create built distributions "
|
||||
f"on platform {os.name}"
|
||||
)
|
||||
|
||||
if self.dist_dir is None:
|
||||
self.dist_dir = "dist"
|
||||
|
||||
def run(self) -> None:
|
||||
# Figure out which sub-commands we need to run.
|
||||
commands = []
|
||||
for format in self.formats:
|
||||
try:
|
||||
commands.append(self.format_commands[format][0])
|
||||
except KeyError:
|
||||
raise DistutilsOptionError(f"invalid format '{format}'")
|
||||
|
||||
# Reinitialize and run each command.
|
||||
for i in range(len(self.formats)):
|
||||
cmd_name = commands[i]
|
||||
sub_cmd = self.reinitialize_command(cmd_name)
|
||||
if cmd_name not in self.no_format_option:
|
||||
sub_cmd.format = self.formats[i]
|
||||
|
||||
# passing the owner and group names for tar archiving
|
||||
if cmd_name == 'bdist_dumb':
|
||||
sub_cmd.owner = self.owner
|
||||
sub_cmd.group = self.group
|
||||
|
||||
# If we're going to need to run this command again, tell it to
|
||||
# keep its temporary files around so subsequent runs go faster.
|
||||
if cmd_name in commands[i + 1 :]:
|
||||
sub_cmd.keep_temp = True
|
||||
self.run_command(cmd_name)
|
||||
@@ -0,0 +1,141 @@
|
||||
"""distutils.command.bdist_dumb
|
||||
|
||||
Implements the Distutils 'bdist_dumb' command (create a "dumb" built
|
||||
distribution -- i.e., just an archive to be unpacked under $prefix or
|
||||
$exec_prefix)."""
|
||||
|
||||
import os
|
||||
from distutils._log import log
|
||||
from typing import ClassVar
|
||||
|
||||
from ..core import Command
|
||||
from ..dir_util import ensure_relative, remove_tree
|
||||
from ..errors import DistutilsPlatformError
|
||||
from ..sysconfig import get_python_version
|
||||
from ..util import get_platform
|
||||
|
||||
|
||||
class bdist_dumb(Command):
|
||||
description = "create a \"dumb\" built distribution"
|
||||
|
||||
user_options = [
|
||||
('bdist-dir=', 'd', "temporary directory for creating the distribution"),
|
||||
(
|
||||
'plat-name=',
|
||||
'p',
|
||||
"platform name to embed in generated filenames "
|
||||
f"[default: {get_platform()}]",
|
||||
),
|
||||
(
|
||||
'format=',
|
||||
'f',
|
||||
"archive format to create (tar, gztar, bztar, xztar, ztar, zip)",
|
||||
),
|
||||
(
|
||||
'keep-temp',
|
||||
'k',
|
||||
"keep the pseudo-installation tree around after creating the distribution archive",
|
||||
),
|
||||
('dist-dir=', 'd', "directory to put final built distributions in"),
|
||||
('skip-build', None, "skip rebuilding everything (for testing/debugging)"),
|
||||
(
|
||||
'relative',
|
||||
None,
|
||||
"build the archive using relative paths [default: false]",
|
||||
),
|
||||
(
|
||||
'owner=',
|
||||
'u',
|
||||
"Owner name used when creating a tar file [default: current user]",
|
||||
),
|
||||
(
|
||||
'group=',
|
||||
'g',
|
||||
"Group name used when creating a tar file [default: current group]",
|
||||
),
|
||||
]
|
||||
|
||||
boolean_options: ClassVar[list[str]] = ['keep-temp', 'skip-build', 'relative']
|
||||
|
||||
default_format = {'posix': 'gztar', 'nt': 'zip'}
|
||||
|
||||
def initialize_options(self):
|
||||
self.bdist_dir = None
|
||||
self.plat_name = None
|
||||
self.format = None
|
||||
self.keep_temp = False
|
||||
self.dist_dir = None
|
||||
self.skip_build = None
|
||||
self.relative = False
|
||||
self.owner = None
|
||||
self.group = None
|
||||
|
||||
def finalize_options(self):
|
||||
if self.bdist_dir is None:
|
||||
bdist_base = self.get_finalized_command('bdist').bdist_base
|
||||
self.bdist_dir = os.path.join(bdist_base, 'dumb')
|
||||
|
||||
if self.format is None:
|
||||
try:
|
||||
self.format = self.default_format[os.name]
|
||||
except KeyError:
|
||||
raise DistutilsPlatformError(
|
||||
"don't know how to create dumb built distributions "
|
||||
f"on platform {os.name}"
|
||||
)
|
||||
|
||||
self.set_undefined_options(
|
||||
'bdist',
|
||||
('dist_dir', 'dist_dir'),
|
||||
('plat_name', 'plat_name'),
|
||||
('skip_build', 'skip_build'),
|
||||
)
|
||||
|
||||
def run(self):
|
||||
if not self.skip_build:
|
||||
self.run_command('build')
|
||||
|
||||
install = self.reinitialize_command('install', reinit_subcommands=True)
|
||||
install.root = self.bdist_dir
|
||||
install.skip_build = self.skip_build
|
||||
install.warn_dir = False
|
||||
|
||||
log.info("installing to %s", self.bdist_dir)
|
||||
self.run_command('install')
|
||||
|
||||
# And make an archive relative to the root of the
|
||||
# pseudo-installation tree.
|
||||
archive_basename = f"{self.distribution.get_fullname()}.{self.plat_name}"
|
||||
|
||||
pseudoinstall_root = os.path.join(self.dist_dir, archive_basename)
|
||||
if not self.relative:
|
||||
archive_root = self.bdist_dir
|
||||
else:
|
||||
if self.distribution.has_ext_modules() and (
|
||||
install.install_base != install.install_platbase
|
||||
):
|
||||
raise DistutilsPlatformError(
|
||||
"can't make a dumb built distribution where "
|
||||
f"base and platbase are different ({install.install_base!r}, {install.install_platbase!r})"
|
||||
)
|
||||
else:
|
||||
archive_root = os.path.join(
|
||||
self.bdist_dir, ensure_relative(install.install_base)
|
||||
)
|
||||
|
||||
# Make the archive
|
||||
filename = self.make_archive(
|
||||
pseudoinstall_root,
|
||||
self.format,
|
||||
root_dir=archive_root,
|
||||
owner=self.owner,
|
||||
group=self.group,
|
||||
)
|
||||
if self.distribution.has_ext_modules():
|
||||
pyversion = get_python_version()
|
||||
else:
|
||||
pyversion = 'any'
|
||||
self.distribution.dist_files.append(('bdist_dumb', pyversion, filename))
|
||||
|
||||
if not self.keep_temp:
|
||||
remove_tree(self.bdist_dir, dry_run=self.dry_run)
|
||||
@@ -0,0 +1,598 @@
|
||||
"""distutils.command.bdist_rpm
|
||||
|
||||
Implements the Distutils 'bdist_rpm' command (create RPM source and binary
|
||||
distributions)."""
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from distutils._log import log
|
||||
from typing import ClassVar
|
||||
|
||||
from ..core import Command
|
||||
from ..debug import DEBUG
|
||||
from ..errors import (
|
||||
DistutilsExecError,
|
||||
DistutilsFileError,
|
||||
DistutilsOptionError,
|
||||
DistutilsPlatformError,
|
||||
)
|
||||
from ..file_util import write_file
|
||||
from ..sysconfig import get_python_version
|
||||
|
||||
|
||||
class bdist_rpm(Command):
|
||||
description = "create an RPM distribution"
|
||||
|
||||
user_options = [
|
||||
('bdist-base=', None, "base directory for creating built distributions"),
|
||||
(
|
||||
'rpm-base=',
|
||||
None,
|
||||
"base directory for creating RPMs (defaults to \"rpm\" under "
|
||||
"--bdist-base; must be specified for RPM 2)",
|
||||
),
|
||||
(
|
||||
'dist-dir=',
|
||||
'd',
|
||||
"directory to put final RPM files in (and .spec files if --spec-only)",
|
||||
),
|
||||
(
|
||||
'python=',
|
||||
None,
|
||||
"path to Python interpreter to hard-code in the .spec file "
|
||||
"[default: \"python\"]",
|
||||
),
|
||||
(
|
||||
'fix-python',
|
||||
None,
|
||||
"hard-code the exact path to the current Python interpreter in "
|
||||
"the .spec file",
|
||||
),
|
||||
('spec-only', None, "only regenerate spec file"),
|
||||
('source-only', None, "only generate source RPM"),
|
||||
('binary-only', None, "only generate binary RPM"),
|
||||
('use-bzip2', None, "use bzip2 instead of gzip to create source distribution"),
|
||||
# More meta-data: too RPM-specific to put in the setup script,
|
||||
# but needs to go in the .spec file -- so we make these options
|
||||
# to "bdist_rpm". The idea is that packagers would put this
|
||||
# info in setup.cfg, although they are of course free to
|
||||
# supply it on the command line.
|
||||
(
|
||||
'distribution-name=',
|
||||
None,
|
||||
"name of the (Linux) distribution to which this "
|
||||
"RPM applies (*not* the name of the module distribution!)",
|
||||
),
|
||||
('group=', None, "package classification [default: \"Development/Libraries\"]"),
|
||||
('release=', None, "RPM release number"),
|
||||
('serial=', None, "RPM serial number"),
|
||||
(
|
||||
'vendor=',
|
||||
None,
|
||||
"RPM \"vendor\" (eg. \"Joe Blow <joe@example.com>\") "
|
||||
"[default: maintainer or author from setup script]",
|
||||
),
|
||||
(
|
||||
'packager=',
|
||||
None,
|
||||
"RPM packager (eg. \"Jane Doe <jane@example.net>\") [default: vendor]",
|
||||
),
|
||||
('doc-files=', None, "list of documentation files (space or comma-separated)"),
|
||||
('changelog=', None, "RPM changelog"),
|
||||
('icon=', None, "name of icon file"),
|
||||
('provides=', None, "capabilities provided by this package"),
|
||||
('requires=', None, "capabilities required by this package"),
|
||||
('conflicts=', None, "capabilities which conflict with this package"),
|
||||
('build-requires=', None, "capabilities required to build this package"),
|
||||
('obsoletes=', None, "capabilities made obsolete by this package"),
|
||||
('no-autoreq', None, "do not automatically calculate dependencies"),
|
||||
# Actions to take when building RPM
|
||||
('keep-temp', 'k', "don't clean up RPM build directory"),
|
||||
('no-keep-temp', None, "clean up RPM build directory [default]"),
|
||||
(
|
||||
'use-rpm-opt-flags',
|
||||
None,
|
||||
"compile with RPM_OPT_FLAGS when building from source RPM",
|
||||
),
|
||||
('no-rpm-opt-flags', None, "do not pass any RPM CFLAGS to compiler"),
|
||||
('rpm3-mode', None, "RPM 3 compatibility mode (default)"),
|
||||
('rpm2-mode', None, "RPM 2 compatibility mode"),
|
||||
# Add the hooks necessary for specifying custom scripts
|
||||
('prep-script=', None, "Specify a script for the PREP phase of RPM building"),
|
||||
('build-script=', None, "Specify a script for the BUILD phase of RPM building"),
|
||||
(
|
||||
'pre-install=',
|
||||
None,
|
||||
"Specify a script for the pre-INSTALL phase of RPM building",
|
||||
),
|
||||
(
|
||||
'install-script=',
|
||||
None,
|
||||
"Specify a script for the INSTALL phase of RPM building",
|
||||
),
|
||||
(
|
||||
'post-install=',
|
||||
None,
|
||||
"Specify a script for the post-INSTALL phase of RPM building",
|
||||
),
|
||||
(
|
||||
'pre-uninstall=',
|
||||
None,
|
||||
"Specify a script for the pre-UNINSTALL phase of RPM building",
|
||||
),
|
||||
(
|
||||
'post-uninstall=',
|
||||
None,
|
||||
"Specify a script for the post-UNINSTALL phase of RPM building",
|
||||
),
|
||||
('clean-script=', None, "Specify a script for the CLEAN phase of RPM building"),
|
||||
(
|
||||
'verify-script=',
|
||||
None,
|
||||
"Specify a script for the VERIFY phase of the RPM build",
|
||||
),
|
||||
# Allow a packager to explicitly force an architecture
|
||||
('force-arch=', None, "Force an architecture onto the RPM build process"),
|
||||
('quiet', 'q', "Run the INSTALL phase of RPM building in quiet mode"),
|
||||
]
|
||||
|
||||
boolean_options: ClassVar[list[str]] = [
|
||||
'keep-temp',
|
||||
'use-rpm-opt-flags',
|
||||
'rpm3-mode',
|
||||
'no-autoreq',
|
||||
'quiet',
|
||||
]
|
||||
|
||||
negative_opt: ClassVar[dict[str, str]] = {
|
||||
'no-keep-temp': 'keep-temp',
|
||||
'no-rpm-opt-flags': 'use-rpm-opt-flags',
|
||||
'rpm2-mode': 'rpm3-mode',
|
||||
}
|
||||
|
||||
def initialize_options(self):
|
||||
self.bdist_base = None
|
||||
self.rpm_base = None
|
||||
self.dist_dir = None
|
||||
self.python = None
|
||||
self.fix_python = None
|
||||
self.spec_only = None
|
||||
self.binary_only = None
|
||||
self.source_only = None
|
||||
self.use_bzip2 = None
|
||||
|
||||
self.distribution_name = None
|
||||
self.group = None
|
||||
self.release = None
|
||||
self.serial = None
|
||||
self.vendor = None
|
||||
self.packager = None
|
||||
self.doc_files = None
|
||||
self.changelog = None
|
||||
self.icon = None
|
||||
|
||||
self.prep_script = None
|
||||
self.build_script = None
|
||||
self.install_script = None
|
||||
self.clean_script = None
|
||||
self.verify_script = None
|
||||
self.pre_install = None
|
||||
self.post_install = None
|
||||
self.pre_uninstall = None
|
||||
self.post_uninstall = None
|
||||
self.prep = None
|
||||
self.provides = None
|
||||
self.requires = None
|
||||
self.conflicts = None
|
||||
self.build_requires = None
|
||||
self.obsoletes = None
|
||||
|
||||
self.keep_temp = False
|
||||
self.use_rpm_opt_flags = True
|
||||
self.rpm3_mode = True
|
||||
self.no_autoreq = False
|
||||
|
||||
self.force_arch = None
|
||||
self.quiet = False
|
||||
|
||||
def finalize_options(self) -> None:
|
||||
self.set_undefined_options('bdist', ('bdist_base', 'bdist_base'))
|
||||
if self.rpm_base is None:
|
||||
if not self.rpm3_mode:
|
||||
raise DistutilsOptionError("you must specify --rpm-base in RPM 2 mode")
|
||||
self.rpm_base = os.path.join(self.bdist_base, "rpm")
|
||||
|
||||
if self.python is None:
|
||||
if self.fix_python:
|
||||
self.python = sys.executable
|
||||
else:
|
||||
self.python = "python3"
|
||||
elif self.fix_python:
|
||||
raise DistutilsOptionError(
|
||||
"--python and --fix-python are mutually exclusive options"
|
||||
)
|
||||
|
||||
if os.name != 'posix':
|
||||
raise DistutilsPlatformError(
|
||||
f"don't know how to create RPM distributions on platform {os.name}"
|
||||
)
|
||||
if self.binary_only and self.source_only:
|
||||
raise DistutilsOptionError(
|
||||
"cannot supply both '--source-only' and '--binary-only'"
|
||||
)
|
||||
|
||||
# don't pass CFLAGS to pure python distributions
|
||||
if not self.distribution.has_ext_modules():
|
||||
self.use_rpm_opt_flags = False
|
||||
|
||||
self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
|
||||
self.finalize_package_data()
|
||||
|
||||
def finalize_package_data(self) -> None:
|
||||
self.ensure_string('group', "Development/Libraries")
|
||||
self.ensure_string(
|
||||
'vendor',
|
||||
f"{self.distribution.get_contact()} <{self.distribution.get_contact_email()}>",
|
||||
)
|
||||
self.ensure_string('packager')
|
||||
self.ensure_string_list('doc_files')
|
||||
if isinstance(self.doc_files, list):
|
||||
for readme in ('README', 'README.txt'):
|
||||
if os.path.exists(readme) and readme not in self.doc_files:
|
||||
self.doc_files.append(readme)
|
||||
|
||||
self.ensure_string('release', "1")
|
||||
self.ensure_string('serial') # should it be an int?
|
||||
|
||||
self.ensure_string('distribution_name')
|
||||
|
||||
self.ensure_string('changelog')
|
||||
# Format changelog correctly
|
||||
self.changelog = self._format_changelog(self.changelog)
|
||||
|
||||
self.ensure_filename('icon')
|
||||
|
||||
self.ensure_filename('prep_script')
|
||||
self.ensure_filename('build_script')
|
||||
self.ensure_filename('install_script')
|
||||
self.ensure_filename('clean_script')
|
||||
self.ensure_filename('verify_script')
|
||||
self.ensure_filename('pre_install')
|
||||
self.ensure_filename('post_install')
|
||||
self.ensure_filename('pre_uninstall')
|
||||
self.ensure_filename('post_uninstall')
|
||||
|
||||
# XXX don't forget we punted on summaries and descriptions -- they
|
||||
# should be handled here eventually!
|
||||
|
||||
# Now *this* is some meta-data that belongs in the setup script...
|
||||
self.ensure_string_list('provides')
|
||||
self.ensure_string_list('requires')
|
||||
self.ensure_string_list('conflicts')
|
||||
self.ensure_string_list('build_requires')
|
||||
self.ensure_string_list('obsoletes')
|
||||
|
||||
self.ensure_string('force_arch')
|
||||
|
||||
def run(self) -> None: # noqa: C901
|
||||
if DEBUG:
|
||||
print("before _get_package_data():")
|
||||
print("vendor =", self.vendor)
|
||||
print("packager =", self.packager)
|
||||
print("doc_files =", self.doc_files)
|
||||
print("changelog =", self.changelog)
|
||||
|
||||
# make directories
|
||||
if self.spec_only:
|
||||
spec_dir = self.dist_dir
|
||||
self.mkpath(spec_dir)
|
||||
else:
|
||||
rpm_dir = {}
|
||||
for d in ('SOURCES', 'SPECS', 'BUILD', 'RPMS', 'SRPMS'):
|
||||
rpm_dir[d] = os.path.join(self.rpm_base, d)
|
||||
self.mkpath(rpm_dir[d])
|
||||
spec_dir = rpm_dir['SPECS']
|
||||
|
||||
# Spec file goes into 'dist_dir' if '--spec-only specified',
|
||||
# build/rpm.<plat> otherwise.
|
||||
spec_path = os.path.join(spec_dir, f"{self.distribution.get_name()}.spec")
|
||||
self.execute(
|
||||
write_file, (spec_path, self._make_spec_file()), f"writing '{spec_path}'"
|
||||
)
|
||||
|
||||
if self.spec_only: # stop if requested
|
||||
return
|
||||
|
||||
# Make a source distribution and copy to SOURCES directory with
|
||||
# optional icon.
|
||||
saved_dist_files = self.distribution.dist_files[:]
|
||||
sdist = self.reinitialize_command('sdist')
|
||||
if self.use_bzip2:
|
||||
sdist.formats = ['bztar']
|
||||
else:
|
||||
sdist.formats = ['gztar']
|
||||
self.run_command('sdist')
|
||||
self.distribution.dist_files = saved_dist_files
|
||||
|
||||
source = sdist.get_archive_files()[0]
|
||||
source_dir = rpm_dir['SOURCES']
|
||||
self.copy_file(source, source_dir)
|
||||
|
||||
if self.icon:
|
||||
if os.path.exists(self.icon):
|
||||
self.copy_file(self.icon, source_dir)
|
||||
else:
|
||||
raise DistutilsFileError(f"icon file '{self.icon}' does not exist")
|
||||
|
||||
# build package
|
||||
log.info("building RPMs")
|
||||
rpm_cmd = ['rpmbuild']
|
||||
|
||||
if self.source_only: # what kind of RPMs?
|
||||
rpm_cmd.append('-bs')
|
||||
elif self.binary_only:
|
||||
rpm_cmd.append('-bb')
|
||||
else:
|
||||
rpm_cmd.append('-ba')
|
||||
rpm_cmd.extend(['--define', f'__python {self.python}'])
|
||||
if self.rpm3_mode:
|
||||
rpm_cmd.extend(['--define', f'_topdir {os.path.abspath(self.rpm_base)}'])
|
||||
if not self.keep_temp:
|
||||
rpm_cmd.append('--clean')
|
||||
|
||||
if self.quiet:
|
||||
rpm_cmd.append('--quiet')
|
||||
|
||||
rpm_cmd.append(spec_path)
|
||||
# Determine the binary rpm names that should be built out of this spec
|
||||
# file
|
||||
# Note that some of these may not be really built (if the file
|
||||
# list is empty)
|
||||
nvr_string = "%{name}-%{version}-%{release}"
|
||||
src_rpm = nvr_string + ".src.rpm"
|
||||
non_src_rpm = "%{arch}/" + nvr_string + ".%{arch}.rpm"
|
||||
q_cmd = rf"rpm -q --qf '{src_rpm} {non_src_rpm}\n' --specfile '{spec_path}'"
|
||||
|
||||
out = os.popen(q_cmd)
|
||||
try:
|
||||
binary_rpms = []
|
||||
source_rpm = None
|
||||
while True:
|
||||
line = out.readline()
|
||||
if not line:
|
||||
break
|
||||
ell = line.strip().split()
|
||||
assert len(ell) == 2
|
||||
binary_rpms.append(ell[1])
|
||||
# The source rpm is named after the first entry in the spec file
|
||||
if source_rpm is None:
|
||||
source_rpm = ell[0]
|
||||
|
||||
status = out.close()
|
||||
if status:
|
||||
raise DistutilsExecError(f"Failed to execute: {q_cmd!r}")
|
||||
|
||||
finally:
|
||||
out.close()
|
||||
|
||||
self.spawn(rpm_cmd)
|
||||
|
||||
if not self.dry_run:
|
||||
if self.distribution.has_ext_modules():
|
||||
pyversion = get_python_version()
|
||||
else:
|
||||
pyversion = 'any'
|
||||
|
||||
if not self.binary_only:
|
||||
srpm = os.path.join(rpm_dir['SRPMS'], source_rpm)
|
||||
assert os.path.exists(srpm)
|
||||
self.move_file(srpm, self.dist_dir)
|
||||
filename = os.path.join(self.dist_dir, source_rpm)
|
||||
self.distribution.dist_files.append(('bdist_rpm', pyversion, filename))
|
||||
|
||||
if not self.source_only:
|
||||
for rpm in binary_rpms:
|
||||
rpm = os.path.join(rpm_dir['RPMS'], rpm)
|
||||
if os.path.exists(rpm):
|
||||
self.move_file(rpm, self.dist_dir)
|
||||
filename = os.path.join(self.dist_dir, os.path.basename(rpm))
|
||||
self.distribution.dist_files.append((
|
||||
'bdist_rpm',
|
||||
pyversion,
|
||||
filename,
|
||||
))
|
||||
|
||||
def _dist_path(self, path):
|
||||
return os.path.join(self.dist_dir, os.path.basename(path))
|
||||
|
||||
def _make_spec_file(self): # noqa: C901
|
||||
"""Generate the text of an RPM spec file and return it as a
|
||||
list of strings (one per line).
|
||||
"""
|
||||
# definitions and headers
|
||||
spec_file = [
|
||||
'%define name ' + self.distribution.get_name(),
|
||||
'%define version ' + self.distribution.get_version().replace('-', '_'),
|
||||
'%define unmangled_version ' + self.distribution.get_version(),
|
||||
'%define release ' + self.release.replace('-', '_'),
|
||||
'',
|
||||
'Summary: ' + (self.distribution.get_description() or "UNKNOWN"),
|
||||
]
|
||||
|
||||
# Workaround for #14443 which affects some RPM based systems such as
|
||||
# RHEL6 (and probably derivatives)
|
||||
vendor_hook = subprocess.getoutput('rpm --eval %{__os_install_post}')
|
||||
# Generate a potential replacement value for __os_install_post (whilst
|
||||
# normalizing the whitespace to simplify the test for whether the
|
||||
# invocation of brp-python-bytecompile passes in __python):
|
||||
vendor_hook = '\n'.join([
|
||||
f' {line.strip()} \\' for line in vendor_hook.splitlines()
|
||||
])
|
||||
problem = "brp-python-bytecompile \\\n"
|
||||
fixed = "brp-python-bytecompile %{__python} \\\n"
|
||||
fixed_hook = vendor_hook.replace(problem, fixed)
|
||||
if fixed_hook != vendor_hook:
|
||||
spec_file.append('# Workaround for https://bugs.python.org/issue14443')
|
||||
spec_file.append('%define __os_install_post ' + fixed_hook + '\n')
|
||||
|
||||
# put locale summaries into spec file
|
||||
# XXX not supported for now (hard to put a dictionary
|
||||
# in a config file -- arg!)
|
||||
# for locale in self.summaries.keys():
|
||||
# spec_file.append('Summary(%s): %s' % (locale,
|
||||
# self.summaries[locale]))
|
||||
|
||||
spec_file.extend([
|
||||
'Name: %{name}',
|
||||
'Version: %{version}',
|
||||
'Release: %{release}',
|
||||
])
|
||||
|
||||
# XXX yuck! this filename is available from the "sdist" command,
|
||||
# but only after it has run: and we create the spec file before
|
||||
# running "sdist", in case of --spec-only.
|
||||
if self.use_bzip2:
|
||||
spec_file.append('Source0: %{name}-%{unmangled_version}.tar.bz2')
|
||||
else:
|
||||
spec_file.append('Source0: %{name}-%{unmangled_version}.tar.gz')
|
||||
|
||||
spec_file.extend([
|
||||
'License: ' + (self.distribution.get_license() or "UNKNOWN"),
|
||||
'Group: ' + self.group,
|
||||
'BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-buildroot',
|
||||
'Prefix: %{_prefix}',
|
||||
])
|
||||
|
||||
if not self.force_arch:
|
||||
# noarch if no extension modules
|
||||
if not self.distribution.has_ext_modules():
|
||||
spec_file.append('BuildArch: noarch')
|
||||
else:
|
||||
spec_file.append(f'BuildArch: {self.force_arch}')
|
||||
|
||||
for field in (
|
||||
'Vendor',
|
||||
'Packager',
|
||||
'Provides',
|
||||
'Requires',
|
||||
'Conflicts',
|
||||
'Obsoletes',
|
||||
):
|
||||
val = getattr(self, field.lower())
|
||||
if isinstance(val, list):
|
||||
spec_file.append('{}: {}'.format(field, ' '.join(val)))
|
||||
elif val is not None:
|
||||
spec_file.append(f'{field}: {val}')
|
||||
|
||||
if self.distribution.get_url():
|
||||
spec_file.append('Url: ' + self.distribution.get_url())
|
||||
|
||||
if self.distribution_name:
|
||||
spec_file.append('Distribution: ' + self.distribution_name)
|
||||
|
||||
if self.build_requires:
|
||||
spec_file.append('BuildRequires: ' + ' '.join(self.build_requires))
|
||||
|
||||
if self.icon:
|
||||
spec_file.append('Icon: ' + os.path.basename(self.icon))
|
||||
|
||||
if self.no_autoreq:
|
||||
spec_file.append('AutoReq: 0')
|
||||
|
||||
spec_file.extend([
|
||||
'',
|
||||
'%description',
|
||||
self.distribution.get_long_description() or "",
|
||||
])
|
||||
|
||||
# put locale descriptions into spec file
|
||||
# XXX again, suppressed because config file syntax doesn't
|
||||
# easily support this ;-(
|
||||
# for locale in self.descriptions.keys():
|
||||
# spec_file.extend([
|
||||
# '',
|
||||
# '%description -l ' + locale,
|
||||
# self.descriptions[locale],
|
||||
# ])
|
||||
|
||||
# rpm scripts
|
||||
# figure out default build script
|
||||
def_setup_call = f"{self.python} {os.path.basename(sys.argv[0])}"
|
||||
def_build = f"{def_setup_call} build"
|
||||
if self.use_rpm_opt_flags:
|
||||
def_build = 'env CFLAGS="$RPM_OPT_FLAGS" ' + def_build
|
||||
|
||||
# insert contents of files
|
||||
|
||||
# XXX this is kind of misleading: user-supplied options are files
|
||||
# that we open and interpolate into the spec file, but the defaults
|
||||
# are just text that we drop in as-is. Hmmm.
|
||||
|
||||
install_cmd = f'{def_setup_call} install -O1 --root=$RPM_BUILD_ROOT --record=INSTALLED_FILES'
|
||||
|
||||
script_options = [
|
||||
('prep', 'prep_script', "%setup -n %{name}-%{unmangled_version}"),
|
||||
('build', 'build_script', def_build),
|
||||
('install', 'install_script', install_cmd),
|
||||
('clean', 'clean_script', "rm -rf $RPM_BUILD_ROOT"),
|
||||
('verifyscript', 'verify_script', None),
|
||||
('pre', 'pre_install', None),
|
||||
('post', 'post_install', None),
|
||||
('preun', 'pre_uninstall', None),
|
||||
('postun', 'post_uninstall', None),
|
||||
]
|
||||
|
||||
for rpm_opt, attr, default in script_options:
|
||||
# Insert contents of file referred to, if no file is referred to
|
||||
# use 'default' as contents of script
|
||||
val = getattr(self, attr)
|
||||
if val or default:
|
||||
spec_file.extend([
|
||||
'',
|
||||
'%' + rpm_opt,
|
||||
])
|
||||
if val:
|
||||
with open(val) as f:
|
||||
spec_file.extend(f.read().split('\n'))
|
||||
else:
|
||||
spec_file.append(default)
|
||||
|
||||
# files section
|
||||
spec_file.extend([
|
||||
'',
|
||||
'%files -f INSTALLED_FILES',
|
||||
'%defattr(-,root,root)',
|
||||
])
|
||||
|
||||
if self.doc_files:
|
||||
spec_file.append('%doc ' + ' '.join(self.doc_files))
|
||||
|
||||
if self.changelog:
|
||||
spec_file.extend([
|
||||
'',
|
||||
'%changelog',
|
||||
])
|
||||
spec_file.extend(self.changelog)
|
||||
|
||||
return spec_file
|
||||
|
||||
def _format_changelog(self, changelog):
|
||||
"""Format the changelog correctly and convert it to a list of strings"""
|
||||
if not changelog:
|
||||
return changelog
|
||||
new_changelog = []
|
||||
for line in changelog.strip().split('\n'):
|
||||
line = line.strip()
|
||||
if line[0] == '*':
|
||||
new_changelog.extend(['', line])
|
||||
elif line[0] == '-':
|
||||
new_changelog.append(line)
|
||||
else:
|
||||
new_changelog.append(' ' + line)
|
||||
|
||||
# strip trailing newline inserted by first changelog entry
|
||||
if not new_changelog[0]:
|
||||
del new_changelog[0]
|
||||
|
||||
return new_changelog
|
||||
@@ -0,0 +1,156 @@
|
||||
"""distutils.command.build
|
||||
|
||||
Implements the Distutils 'build' command."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
import sysconfig
|
||||
from collections.abc import Callable
|
||||
from typing import ClassVar
|
||||
|
||||
from ..ccompiler import show_compilers
|
||||
from ..core import Command
|
||||
from ..errors import DistutilsOptionError
|
||||
from ..util import get_platform
|
||||
|
||||
|
||||
class build(Command):
|
||||
description = "build everything needed to install"
|
||||
|
||||
user_options = [
|
||||
('build-base=', 'b', "base directory for build library"),
|
||||
('build-purelib=', None, "build directory for platform-neutral distributions"),
|
||||
('build-platlib=', None, "build directory for platform-specific distributions"),
|
||||
(
|
||||
'build-lib=',
|
||||
None,
|
||||
"build directory for all distribution (defaults to either build-purelib or build-platlib",
|
||||
),
|
||||
('build-scripts=', None, "build directory for scripts"),
|
||||
('build-temp=', 't', "temporary build directory"),
|
||||
(
|
||||
'plat-name=',
|
||||
'p',
|
||||
f"platform name to build for, if supported [default: {get_platform()}]",
|
||||
),
|
||||
('compiler=', 'c', "specify the compiler type"),
|
||||
('parallel=', 'j', "number of parallel build jobs"),
|
||||
('debug', 'g', "compile extensions and libraries with debugging information"),
|
||||
('force', 'f', "forcibly build everything (ignore file timestamps)"),
|
||||
('executable=', 'e', "specify final destination interpreter path (build.py)"),
|
||||
]
|
||||
|
||||
boolean_options: ClassVar[list[str]] = ['debug', 'force']
|
||||
|
||||
help_options: ClassVar[list[tuple[str, str | None, str, Callable[[], object]]]] = [
|
||||
('help-compiler', None, "list available compilers", show_compilers),
|
||||
]
|
||||
|
||||
def initialize_options(self):
|
||||
self.build_base = 'build'
|
||||
# these are decided only after 'build_base' has its final value
|
||||
# (unless overridden by the user or client)
|
||||
self.build_purelib = None
|
||||
self.build_platlib = None
|
||||
self.build_lib = None
|
||||
self.build_temp = None
|
||||
self.build_scripts = None
|
||||
self.compiler = None
|
||||
self.plat_name = None
|
||||
self.debug = None
|
||||
self.force = False
|
||||
self.executable = None
|
||||
self.parallel = None
|
||||
|
||||
def finalize_options(self) -> None: # noqa: C901
|
||||
if self.plat_name is None:
|
||||
self.plat_name = get_platform()
|
||||
else:
|
||||
# plat-name only supported for windows (other platforms are
|
||||
# supported via ./configure flags, if at all). Avoid misleading
|
||||
# other platforms.
|
||||
if os.name != 'nt':
|
||||
raise DistutilsOptionError(
|
||||
"--plat-name only supported on Windows (try "
|
||||
"using './configure --help' on your platform)"
|
||||
)
|
||||
|
||||
plat_specifier = f".{self.plat_name}-{sys.implementation.cache_tag}"
|
||||
|
||||
# Python 3.13+ with --disable-gil shouldn't share build directories
|
||||
if sysconfig.get_config_var('Py_GIL_DISABLED'):
|
||||
plat_specifier += 't'
|
||||
|
||||
# Make it so Python 2.x and Python 2.x with --with-pydebug don't
|
||||
# share the same build directories. Doing so confuses the build
|
||||
# process for C modules
|
||||
if hasattr(sys, 'gettotalrefcount'):
|
||||
plat_specifier += '-pydebug'
|
||||
|
||||
# 'build_purelib' and 'build_platlib' just default to 'lib' and
|
||||
# 'lib.<plat>' under the base build directory. We only use one of
|
||||
# them for a given distribution, though --
|
||||
if self.build_purelib is None:
|
||||
self.build_purelib = os.path.join(self.build_base, 'lib')
|
||||
if self.build_platlib is None:
|
||||
self.build_platlib = os.path.join(self.build_base, 'lib' + plat_specifier)
|
||||
|
||||
# 'build_lib' is the actual directory that we will use for this
|
||||
# particular module distribution -- if user didn't supply it, pick
|
||||
# one of 'build_purelib' or 'build_platlib'.
|
||||
if self.build_lib is None:
|
||||
if self.distribution.has_ext_modules():
|
||||
self.build_lib = self.build_platlib
|
||||
else:
|
||||
self.build_lib = self.build_purelib
|
||||
|
||||
# 'build_temp' -- temporary directory for compiler turds,
|
||||
# "build/temp.<plat>"
|
||||
if self.build_temp is None:
|
||||
self.build_temp = os.path.join(self.build_base, 'temp' + plat_specifier)
|
||||
if self.build_scripts is None:
|
||||
self.build_scripts = os.path.join(
|
||||
self.build_base,
|
||||
f'scripts-{sys.version_info.major}.{sys.version_info.minor}',
|
||||
)
|
||||
|
||||
if self.executable is None and sys.executable:
|
||||
self.executable = os.path.normpath(sys.executable)
|
||||
|
||||
if isinstance(self.parallel, str):
|
||||
try:
|
||||
self.parallel = int(self.parallel)
|
||||
except ValueError:
|
||||
raise DistutilsOptionError("parallel should be an integer")
|
||||
|
||||
def run(self) -> None:
|
||||
# Run all relevant sub-commands. This will be some subset of:
|
||||
# - build_py - pure Python modules
|
||||
# - build_clib - standalone C libraries
|
||||
# - build_ext - Python extensions
|
||||
# - build_scripts - (Python) scripts
|
||||
for cmd_name in self.get_sub_commands():
|
||||
self.run_command(cmd_name)
|
||||
|
||||
# -- Predicates for the sub-command list ---------------------------
|
||||
|
||||
def has_pure_modules(self):
|
||||
return self.distribution.has_pure_modules()
|
||||
|
||||
def has_c_libraries(self):
|
||||
return self.distribution.has_c_libraries()
|
||||
|
||||
def has_ext_modules(self):
|
||||
return self.distribution.has_ext_modules()
|
||||
|
||||
def has_scripts(self):
|
||||
return self.distribution.has_scripts()
|
||||
|
||||
sub_commands = [
|
||||
('build_py', has_pure_modules),
|
||||
('build_clib', has_c_libraries),
|
||||
('build_ext', has_ext_modules),
|
||||
('build_scripts', has_scripts),
|
||||
]
|
||||
@@ -0,0 +1,201 @@
|
||||
"""distutils.command.build_clib
|
||||
|
||||
Implements the Distutils 'build_clib' command, to build a C/C++ library
|
||||
that is included in the module distribution and needed by an extension
|
||||
module."""
|
||||
|
||||
# XXX this module has *lots* of code ripped-off quite transparently from
|
||||
# build_ext.py -- not surprisingly really, as the work required to build
|
||||
# a static library from a collection of C source files is not really all
|
||||
# that different from what's required to build a shared object file from
|
||||
# a collection of C source files. Nevertheless, I haven't done the
|
||||
# necessary refactoring to account for the overlap in code between the
|
||||
# two modules, mainly because a number of subtle details changed in the
|
||||
# cut 'n paste. Sigh.
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from collections.abc import Callable
|
||||
from distutils._log import log
|
||||
from typing import ClassVar
|
||||
|
||||
from ..ccompiler import new_compiler, show_compilers
|
||||
from ..core import Command
|
||||
from ..errors import DistutilsSetupError
|
||||
from ..sysconfig import customize_compiler
|
||||
|
||||
|
||||
class build_clib(Command):
|
||||
description = "build C/C++ libraries used by Python extensions"
|
||||
|
||||
user_options: ClassVar[list[tuple[str, str, str]]] = [
|
||||
('build-clib=', 'b', "directory to build C/C++ libraries to"),
|
||||
('build-temp=', 't', "directory to put temporary build by-products"),
|
||||
('debug', 'g', "compile with debugging information"),
|
||||
('force', 'f', "forcibly build everything (ignore file timestamps)"),
|
||||
('compiler=', 'c', "specify the compiler type"),
|
||||
]
|
||||
|
||||
boolean_options: ClassVar[list[str]] = ['debug', 'force']
|
||||
|
||||
help_options: ClassVar[list[tuple[str, str | None, str, Callable[[], object]]]] = [
|
||||
('help-compiler', None, "list available compilers", show_compilers),
|
||||
]
|
||||
|
||||
def initialize_options(self):
|
||||
self.build_clib = None
|
||||
self.build_temp = None
|
||||
|
||||
# List of libraries to build
|
||||
self.libraries = None
|
||||
|
||||
# Compilation options for all libraries
|
||||
self.include_dirs = None
|
||||
self.define = None
|
||||
self.undef = None
|
||||
self.debug = None
|
||||
self.force = False
|
||||
self.compiler = None
|
||||
|
||||
def finalize_options(self) -> None:
|
||||
# This might be confusing: both build-clib and build-temp default
|
||||
# to build-temp as defined by the "build" command. This is because
|
||||
# I think that C libraries are really just temporary build
|
||||
# by-products, at least from the point of view of building Python
|
||||
# extensions -- but I want to keep my options open.
|
||||
self.set_undefined_options(
|
||||
'build',
|
||||
('build_temp', 'build_clib'),
|
||||
('build_temp', 'build_temp'),
|
||||
('compiler', 'compiler'),
|
||||
('debug', 'debug'),
|
||||
('force', 'force'),
|
||||
)
|
||||
|
||||
self.libraries = self.distribution.libraries
|
||||
if self.libraries:
|
||||
self.check_library_list(self.libraries)
|
||||
|
||||
if self.include_dirs is None:
|
||||
self.include_dirs = self.distribution.include_dirs or []
|
||||
if isinstance(self.include_dirs, str):
|
||||
self.include_dirs = self.include_dirs.split(os.pathsep)
|
||||
|
||||
# XXX same as for build_ext -- what about 'self.define' and
|
||||
# 'self.undef' ?
|
||||
|
||||
def run(self) -> None:
|
||||
if not self.libraries:
|
||||
return
|
||||
|
||||
self.compiler = new_compiler(
|
||||
compiler=self.compiler, dry_run=self.dry_run, force=self.force
|
||||
)
|
||||
customize_compiler(self.compiler)
|
||||
|
||||
if self.include_dirs is not None:
|
||||
self.compiler.set_include_dirs(self.include_dirs)
|
||||
if self.define is not None:
|
||||
# 'define' option is a list of (name,value) tuples
|
||||
for name, value in self.define:
|
||||
self.compiler.define_macro(name, value)
|
||||
if self.undef is not None:
|
||||
for macro in self.undef:
|
||||
self.compiler.undefine_macro(macro)
|
||||
|
||||
self.build_libraries(self.libraries)
|
||||
|
||||
def check_library_list(self, libraries) -> None:
|
||||
"""Ensure that the list of libraries is valid.
|
||||
|
||||
`library` is presumably provided as a command option 'libraries'.
|
||||
This method checks that it is a list of 2-tuples, where the tuples
|
||||
are (library_name, build_info_dict).
|
||||
|
||||
Raise DistutilsSetupError if the structure is invalid anywhere;
|
||||
just returns otherwise.
|
||||
"""
|
||||
if not isinstance(libraries, list):
|
||||
raise DistutilsSetupError("'libraries' option must be a list of tuples")
|
||||
|
||||
for lib in libraries:
|
||||
if not isinstance(lib, tuple) and len(lib) != 2:
|
||||
raise DistutilsSetupError("each element of 'libraries' must a 2-tuple")
|
||||
|
||||
name, build_info = lib
|
||||
|
||||
if not isinstance(name, str):
|
||||
raise DistutilsSetupError(
|
||||
"first element of each tuple in 'libraries' "
|
||||
"must be a string (the library name)"
|
||||
)
|
||||
|
||||
if '/' in name or (os.sep != '/' and os.sep in name):
|
||||
raise DistutilsSetupError(
|
||||
f"bad library name '{lib[0]}': may not contain directory separators"
|
||||
)
|
||||
|
||||
if not isinstance(build_info, dict):
|
||||
raise DistutilsSetupError(
|
||||
"second element of each tuple in 'libraries' "
|
||||
"must be a dictionary (build info)"
|
||||
)
|
||||
|
||||
def get_library_names(self):
|
||||
# Assume the library list is valid -- 'check_library_list()' is
|
||||
# called from 'finalize_options()', so it should be!
|
||||
if not self.libraries:
|
||||
return None
|
||||
|
||||
lib_names = []
|
||||
for lib_name, _build_info in self.libraries:
|
||||
lib_names.append(lib_name)
|
||||
return lib_names
|
||||
|
||||
def get_source_files(self):
|
||||
self.check_library_list(self.libraries)
|
||||
filenames = []
|
||||
for lib_name, build_info in self.libraries:
|
||||
sources = build_info.get('sources')
|
||||
if sources is None or not isinstance(sources, (list, tuple)):
|
||||
raise DistutilsSetupError(
|
||||
f"in 'libraries' option (library '{lib_name}'), "
|
||||
"'sources' must be present and must be "
|
||||
"a list of source filenames"
|
||||
)
|
||||
|
||||
filenames.extend(sources)
|
||||
return filenames
|
||||
|
||||
def build_libraries(self, libraries) -> None:
|
||||
for lib_name, build_info in libraries:
|
||||
sources = build_info.get('sources')
|
||||
if sources is None or not isinstance(sources, (list, tuple)):
|
||||
raise DistutilsSetupError(
|
||||
f"in 'libraries' option (library '{lib_name}'), "
|
||||
"'sources' must be present and must be "
|
||||
"a list of source filenames"
|
||||
)
|
||||
sources = list(sources)
|
||||
|
||||
log.info("building '%s' library", lib_name)
|
||||
|
||||
# First, compile the source code to object files in the library
|
||||
# directory. (This should probably change to putting object
|
||||
# files in a temporary build directory.)
|
||||
macros = build_info.get('macros')
|
||||
include_dirs = build_info.get('include_dirs')
|
||||
objects = self.compiler.compile(
|
||||
sources,
|
||||
output_dir=self.build_temp,
|
||||
macros=macros,
|
||||
include_dirs=include_dirs,
|
||||
debug=self.debug,
|
||||
)
|
||||
|
||||
# Now "link" the object files together into a static library.
|
||||
# (On Unix at least, this isn't really linking -- it just
|
||||
# builds an archive. Whatever.)
|
||||
self.compiler.create_static_lib(
|
||||
objects, lib_name, output_dir=self.build_clib, debug=self.debug
|
||||
)
|
||||
@@ -0,0 +1,812 @@
|
||||
"""distutils.command.build_ext
|
||||
|
||||
Implements the Distutils 'build_ext' command, for building extension
|
||||
modules (currently limited to C extensions, should accommodate C++
|
||||
extensions ASAP)."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import contextlib
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from collections.abc import Callable
|
||||
from distutils._log import log
|
||||
from site import USER_BASE
|
||||
from typing import ClassVar
|
||||
|
||||
from .._modified import newer_group
|
||||
from ..ccompiler import new_compiler, show_compilers
|
||||
from ..core import Command
|
||||
from ..errors import (
|
||||
CCompilerError,
|
||||
CompileError,
|
||||
DistutilsError,
|
||||
DistutilsOptionError,
|
||||
DistutilsPlatformError,
|
||||
DistutilsSetupError,
|
||||
)
|
||||
from ..extension import Extension
|
||||
from ..sysconfig import customize_compiler, get_config_h_filename, get_python_version
|
||||
from ..util import get_platform, is_freethreaded, is_mingw
|
||||
|
||||
# An extension name is just a dot-separated list of Python NAMEs (ie.
|
||||
# the same as a fully-qualified module name).
|
||||
extension_name_re = re.compile(r'^[a-zA-Z_][a-zA-Z_0-9]*(\.[a-zA-Z_][a-zA-Z_0-9]*)*$')
|
||||
|
||||
|
||||
class build_ext(Command):
|
||||
description = "build C/C++ extensions (compile/link to build directory)"
|
||||
|
||||
# XXX thoughts on how to deal with complex command-line options like
|
||||
# these, i.e. how to make it so fancy_getopt can suck them off the
|
||||
# command line and make it look like setup.py defined the appropriate
|
||||
# lists of tuples of what-have-you.
|
||||
# - each command needs a callback to process its command-line options
|
||||
# - Command.__init__() needs access to its share of the whole
|
||||
# command line (must ultimately come from
|
||||
# Distribution.parse_command_line())
|
||||
# - it then calls the current command class' option-parsing
|
||||
# callback to deal with weird options like -D, which have to
|
||||
# parse the option text and churn out some custom data
|
||||
# structure
|
||||
# - that data structure (in this case, a list of 2-tuples)
|
||||
# will then be present in the command object by the time
|
||||
# we get to finalize_options() (i.e. the constructor
|
||||
# takes care of both command-line and client options
|
||||
# in between initialize_options() and finalize_options())
|
||||
|
||||
sep_by = f" (separated by '{os.pathsep}')"
|
||||
user_options = [
|
||||
('build-lib=', 'b', "directory for compiled extension modules"),
|
||||
('build-temp=', 't', "directory for temporary files (build by-products)"),
|
||||
(
|
||||
'plat-name=',
|
||||
'p',
|
||||
"platform name to cross-compile for, if supported "
|
||||
f"[default: {get_platform()}]",
|
||||
),
|
||||
(
|
||||
'inplace',
|
||||
'i',
|
||||
"ignore build-lib and put compiled extensions into the source "
|
||||
"directory alongside your pure Python modules",
|
||||
),
|
||||
(
|
||||
'include-dirs=',
|
||||
'I',
|
||||
"list of directories to search for header files" + sep_by,
|
||||
),
|
||||
('define=', 'D', "C preprocessor macros to define"),
|
||||
('undef=', 'U', "C preprocessor macros to undefine"),
|
||||
('libraries=', 'l', "external C libraries to link with"),
|
||||
(
|
||||
'library-dirs=',
|
||||
'L',
|
||||
"directories to search for external C libraries" + sep_by,
|
||||
),
|
||||
('rpath=', 'R', "directories to search for shared C libraries at runtime"),
|
||||
('link-objects=', 'O', "extra explicit link objects to include in the link"),
|
||||
('debug', 'g', "compile/link with debugging information"),
|
||||
('force', 'f', "forcibly build everything (ignore file timestamps)"),
|
||||
('compiler=', 'c', "specify the compiler type"),
|
||||
('parallel=', 'j', "number of parallel build jobs"),
|
||||
('swig-cpp', None, "make SWIG create C++ files (default is C)"),
|
||||
('swig-opts=', None, "list of SWIG command line options"),
|
||||
('swig=', None, "path to the SWIG executable"),
|
||||
('user', None, "add user include, library and rpath"),
|
||||
]
|
||||
|
||||
boolean_options: ClassVar[list[str]] = [
|
||||
'inplace',
|
||||
'debug',
|
||||
'force',
|
||||
'swig-cpp',
|
||||
'user',
|
||||
]
|
||||
|
||||
help_options: ClassVar[list[tuple[str, str | None, str, Callable[[], object]]]] = [
|
||||
('help-compiler', None, "list available compilers", show_compilers),
|
||||
]
|
||||
|
||||
def initialize_options(self):
|
||||
self.extensions = None
|
||||
self.build_lib = None
|
||||
self.plat_name = None
|
||||
self.build_temp = None
|
||||
self.inplace = False
|
||||
self.package = None
|
||||
|
||||
self.include_dirs = None
|
||||
self.define = None
|
||||
self.undef = None
|
||||
self.libraries = None
|
||||
self.library_dirs = None
|
||||
self.rpath = None
|
||||
self.link_objects = None
|
||||
self.debug = None
|
||||
self.force = None
|
||||
self.compiler = None
|
||||
self.swig = None
|
||||
self.swig_cpp = None
|
||||
self.swig_opts = None
|
||||
self.user = None
|
||||
self.parallel = None
|
||||
|
||||
@staticmethod
|
||||
def _python_lib_dir(sysconfig):
|
||||
"""
|
||||
Resolve Python's library directory for building extensions
|
||||
that rely on a shared Python library.
|
||||
|
||||
See python/cpython#44264 and python/cpython#48686
|
||||
"""
|
||||
if not sysconfig.get_config_var('Py_ENABLE_SHARED'):
|
||||
return
|
||||
|
||||
if sysconfig.python_build:
|
||||
yield '.'
|
||||
return
|
||||
|
||||
if sys.platform == 'zos':
|
||||
# On z/OS, a user is not required to install Python to
|
||||
# a predetermined path, but can use Python portably
|
||||
installed_dir = sysconfig.get_config_var('base')
|
||||
lib_dir = sysconfig.get_config_var('platlibdir')
|
||||
yield os.path.join(installed_dir, lib_dir)
|
||||
else:
|
||||
# building third party extensions
|
||||
yield sysconfig.get_config_var('LIBDIR')
|
||||
|
||||
def finalize_options(self) -> None: # noqa: C901
|
||||
from distutils import sysconfig
|
||||
|
||||
self.set_undefined_options(
|
||||
'build',
|
||||
('build_lib', 'build_lib'),
|
||||
('build_temp', 'build_temp'),
|
||||
('compiler', 'compiler'),
|
||||
('debug', 'debug'),
|
||||
('force', 'force'),
|
||||
('parallel', 'parallel'),
|
||||
('plat_name', 'plat_name'),
|
||||
)
|
||||
|
||||
if self.package is None:
|
||||
self.package = self.distribution.ext_package
|
||||
|
||||
self.extensions = self.distribution.ext_modules
|
||||
|
||||
# Make sure Python's include directories (for Python.h, pyconfig.h,
|
||||
# etc.) are in the include search path.
|
||||
py_include = sysconfig.get_python_inc()
|
||||
plat_py_include = sysconfig.get_python_inc(plat_specific=True)
|
||||
if self.include_dirs is None:
|
||||
self.include_dirs = self.distribution.include_dirs or []
|
||||
if isinstance(self.include_dirs, str):
|
||||
self.include_dirs = self.include_dirs.split(os.pathsep)
|
||||
|
||||
# If in a virtualenv, add its include directory
|
||||
# Issue 16116
|
||||
if sys.exec_prefix != sys.base_exec_prefix:
|
||||
self.include_dirs.append(os.path.join(sys.exec_prefix, 'include'))
|
||||
|
||||
# Put the Python "system" include dir at the end, so that
|
||||
# any local include dirs take precedence.
|
||||
self.include_dirs.extend(py_include.split(os.path.pathsep))
|
||||
if plat_py_include != py_include:
|
||||
self.include_dirs.extend(plat_py_include.split(os.path.pathsep))
|
||||
|
||||
self.ensure_string_list('libraries')
|
||||
self.ensure_string_list('link_objects')
|
||||
|
||||
# Life is easier if we're not forever checking for None, so
|
||||
# simplify these options to empty lists if unset
|
||||
if self.libraries is None:
|
||||
self.libraries = []
|
||||
if self.library_dirs is None:
|
||||
self.library_dirs = []
|
||||
elif isinstance(self.library_dirs, str):
|
||||
self.library_dirs = self.library_dirs.split(os.pathsep)
|
||||
|
||||
if self.rpath is None:
|
||||
self.rpath = []
|
||||
elif isinstance(self.rpath, str):
|
||||
self.rpath = self.rpath.split(os.pathsep)
|
||||
|
||||
# for extensions under windows use different directories
|
||||
# for Release and Debug builds.
|
||||
# also Python's library directory must be appended to library_dirs
|
||||
if os.name == 'nt' and not is_mingw():
|
||||
# the 'libs' directory is for binary installs - we assume that
|
||||
# must be the *native* platform. But we don't really support
|
||||
# cross-compiling via a binary install anyway, so we let it go.
|
||||
self.library_dirs.append(os.path.join(sys.exec_prefix, 'libs'))
|
||||
if sys.base_exec_prefix != sys.prefix: # Issue 16116
|
||||
self.library_dirs.append(os.path.join(sys.base_exec_prefix, 'libs'))
|
||||
if self.debug:
|
||||
self.build_temp = os.path.join(self.build_temp, "Debug")
|
||||
else:
|
||||
self.build_temp = os.path.join(self.build_temp, "Release")
|
||||
|
||||
# Append the source distribution include and library directories,
|
||||
# this allows distutils on windows to work in the source tree
|
||||
self.include_dirs.append(os.path.dirname(get_config_h_filename()))
|
||||
self.library_dirs.append(sys.base_exec_prefix)
|
||||
|
||||
# Use the .lib files for the correct architecture
|
||||
if self.plat_name == 'win32':
|
||||
suffix = 'win32'
|
||||
else:
|
||||
# win-amd64
|
||||
suffix = self.plat_name[4:]
|
||||
new_lib = os.path.join(sys.exec_prefix, 'PCbuild')
|
||||
if suffix:
|
||||
new_lib = os.path.join(new_lib, suffix)
|
||||
self.library_dirs.append(new_lib)
|
||||
|
||||
# For extensions under Cygwin, Python's library directory must be
|
||||
# appended to library_dirs
|
||||
if sys.platform[:6] == 'cygwin':
|
||||
if not sysconfig.python_build:
|
||||
# building third party extensions
|
||||
self.library_dirs.append(
|
||||
os.path.join(
|
||||
sys.prefix, "lib", "python" + get_python_version(), "config"
|
||||
)
|
||||
)
|
||||
else:
|
||||
# building python standard extensions
|
||||
self.library_dirs.append('.')
|
||||
|
||||
self.library_dirs.extend(self._python_lib_dir(sysconfig))
|
||||
|
||||
# The argument parsing will result in self.define being a string, but
|
||||
# it has to be a list of 2-tuples. All the preprocessor symbols
|
||||
# specified by the 'define' option will be set to '1'. Multiple
|
||||
# symbols can be separated with commas.
|
||||
|
||||
if self.define:
|
||||
defines = self.define.split(',')
|
||||
self.define = [(symbol, '1') for symbol in defines]
|
||||
|
||||
# The option for macros to undefine is also a string from the
|
||||
# option parsing, but has to be a list. Multiple symbols can also
|
||||
# be separated with commas here.
|
||||
if self.undef:
|
||||
self.undef = self.undef.split(',')
|
||||
|
||||
if self.swig_opts is None:
|
||||
self.swig_opts = []
|
||||
else:
|
||||
self.swig_opts = self.swig_opts.split(' ')
|
||||
|
||||
# Finally add the user include and library directories if requested
|
||||
if self.user:
|
||||
user_include = os.path.join(USER_BASE, "include")
|
||||
user_lib = os.path.join(USER_BASE, "lib")
|
||||
if os.path.isdir(user_include):
|
||||
self.include_dirs.append(user_include)
|
||||
if os.path.isdir(user_lib):
|
||||
self.library_dirs.append(user_lib)
|
||||
self.rpath.append(user_lib)
|
||||
|
||||
if isinstance(self.parallel, str):
|
||||
try:
|
||||
self.parallel = int(self.parallel)
|
||||
except ValueError:
|
||||
raise DistutilsOptionError("parallel should be an integer")
|
||||
|
||||
def run(self) -> None: # noqa: C901
|
||||
# 'self.extensions', as supplied by setup.py, is a list of
|
||||
# Extension instances. See the documentation for Extension (in
|
||||
# distutils.extension) for details.
|
||||
#
|
||||
# For backwards compatibility with Distutils 0.8.2 and earlier, we
|
||||
# also allow the 'extensions' list to be a list of tuples:
|
||||
# (ext_name, build_info)
|
||||
# where build_info is a dictionary containing everything that
|
||||
# Extension instances do except the name, with a few things being
|
||||
# differently named. We convert these 2-tuples to Extension
|
||||
# instances as needed.
|
||||
|
||||
if not self.extensions:
|
||||
return
|
||||
|
||||
# If we were asked to build any C/C++ libraries, make sure that the
|
||||
# directory where we put them is in the library search path for
|
||||
# linking extensions.
|
||||
if self.distribution.has_c_libraries():
|
||||
build_clib = self.get_finalized_command('build_clib')
|
||||
self.libraries.extend(build_clib.get_library_names() or [])
|
||||
self.library_dirs.append(build_clib.build_clib)
|
||||
|
||||
# Setup the CCompiler object that we'll use to do all the
|
||||
# compiling and linking
|
||||
self.compiler = new_compiler(
|
||||
compiler=self.compiler,
|
||||
verbose=self.verbose,
|
||||
dry_run=self.dry_run,
|
||||
force=self.force,
|
||||
)
|
||||
customize_compiler(self.compiler)
|
||||
# If we are cross-compiling, init the compiler now (if we are not
|
||||
# cross-compiling, init would not hurt, but people may rely on
|
||||
# late initialization of compiler even if they shouldn't...)
|
||||
if os.name == 'nt' and self.plat_name != get_platform():
|
||||
self.compiler.initialize(self.plat_name)
|
||||
|
||||
# The official Windows free threaded Python installer doesn't set
|
||||
# Py_GIL_DISABLED because its pyconfig.h is shared with the
|
||||
# default build, so define it here (pypa/setuptools#4662).
|
||||
if os.name == 'nt' and is_freethreaded():
|
||||
self.compiler.define_macro('Py_GIL_DISABLED', '1')
|
||||
|
||||
# And make sure that any compile/link-related options (which might
|
||||
# come from the command-line or from the setup script) are set in
|
||||
# that CCompiler object -- that way, they automatically apply to
|
||||
# all compiling and linking done here.
|
||||
if self.include_dirs is not None:
|
||||
self.compiler.set_include_dirs(self.include_dirs)
|
||||
if self.define is not None:
|
||||
# 'define' option is a list of (name,value) tuples
|
||||
for name, value in self.define:
|
||||
self.compiler.define_macro(name, value)
|
||||
if self.undef is not None:
|
||||
for macro in self.undef:
|
||||
self.compiler.undefine_macro(macro)
|
||||
if self.libraries is not None:
|
||||
self.compiler.set_libraries(self.libraries)
|
||||
if self.library_dirs is not None:
|
||||
self.compiler.set_library_dirs(self.library_dirs)
|
||||
if self.rpath is not None:
|
||||
self.compiler.set_runtime_library_dirs(self.rpath)
|
||||
if self.link_objects is not None:
|
||||
self.compiler.set_link_objects(self.link_objects)
|
||||
|
||||
# Now actually compile and link everything.
|
||||
self.build_extensions()
|
||||
|
||||
def check_extensions_list(self, extensions) -> None: # noqa: C901
|
||||
"""Ensure that the list of extensions (presumably provided as a
|
||||
command option 'extensions') is valid, i.e. it is a list of
|
||||
Extension objects. We also support the old-style list of 2-tuples,
|
||||
where the tuples are (ext_name, build_info), which are converted to
|
||||
Extension instances here.
|
||||
|
||||
Raise DistutilsSetupError if the structure is invalid anywhere;
|
||||
just returns otherwise.
|
||||
"""
|
||||
if not isinstance(extensions, list):
|
||||
raise DistutilsSetupError(
|
||||
"'ext_modules' option must be a list of Extension instances"
|
||||
)
|
||||
|
||||
for i, ext in enumerate(extensions):
|
||||
if isinstance(ext, Extension):
|
||||
continue # OK! (assume type-checking done
|
||||
# by Extension constructor)
|
||||
|
||||
if not isinstance(ext, tuple) or len(ext) != 2:
|
||||
raise DistutilsSetupError(
|
||||
"each element of 'ext_modules' option must be an "
|
||||
"Extension instance or 2-tuple"
|
||||
)
|
||||
|
||||
ext_name, build_info = ext
|
||||
|
||||
log.warning(
|
||||
"old-style (ext_name, build_info) tuple found in "
|
||||
"ext_modules for extension '%s' "
|
||||
"-- please convert to Extension instance",
|
||||
ext_name,
|
||||
)
|
||||
|
||||
if not (isinstance(ext_name, str) and extension_name_re.match(ext_name)):
|
||||
raise DistutilsSetupError(
|
||||
"first element of each tuple in 'ext_modules' "
|
||||
"must be the extension name (a string)"
|
||||
)
|
||||
|
||||
if not isinstance(build_info, dict):
|
||||
raise DistutilsSetupError(
|
||||
"second element of each tuple in 'ext_modules' "
|
||||
"must be a dictionary (build info)"
|
||||
)
|
||||
|
||||
# OK, the (ext_name, build_info) dict is type-safe: convert it
|
||||
# to an Extension instance.
|
||||
ext = Extension(ext_name, build_info['sources'])
|
||||
|
||||
# Easy stuff: one-to-one mapping from dict elements to
|
||||
# instance attributes.
|
||||
for key in (
|
||||
'include_dirs',
|
||||
'library_dirs',
|
||||
'libraries',
|
||||
'extra_objects',
|
||||
'extra_compile_args',
|
||||
'extra_link_args',
|
||||
):
|
||||
val = build_info.get(key)
|
||||
if val is not None:
|
||||
setattr(ext, key, val)
|
||||
|
||||
# Medium-easy stuff: same syntax/semantics, different names.
|
||||
ext.runtime_library_dirs = build_info.get('rpath')
|
||||
if 'def_file' in build_info:
|
||||
log.warning("'def_file' element of build info dict no longer supported")
|
||||
|
||||
# Non-trivial stuff: 'macros' split into 'define_macros'
|
||||
# and 'undef_macros'.
|
||||
macros = build_info.get('macros')
|
||||
if macros:
|
||||
ext.define_macros = []
|
||||
ext.undef_macros = []
|
||||
for macro in macros:
|
||||
if not (isinstance(macro, tuple) and len(macro) in (1, 2)):
|
||||
raise DistutilsSetupError(
|
||||
"'macros' element of build info dict must be 1- or 2-tuple"
|
||||
)
|
||||
if len(macro) == 1:
|
||||
ext.undef_macros.append(macro[0])
|
||||
elif len(macro) == 2:
|
||||
ext.define_macros.append(macro)
|
||||
|
||||
extensions[i] = ext
|
||||
|
||||
def get_source_files(self):
|
||||
self.check_extensions_list(self.extensions)
|
||||
filenames = []
|
||||
|
||||
# Wouldn't it be neat if we knew the names of header files too...
|
||||
for ext in self.extensions:
|
||||
filenames.extend(ext.sources)
|
||||
return filenames
|
||||
|
||||
def get_outputs(self):
|
||||
# Sanity check the 'extensions' list -- can't assume this is being
|
||||
# done in the same run as a 'build_extensions()' call (in fact, we
|
||||
# can probably assume that it *isn't*!).
|
||||
self.check_extensions_list(self.extensions)
|
||||
|
||||
# And build the list of output (built) filenames. Note that this
|
||||
# ignores the 'inplace' flag, and assumes everything goes in the
|
||||
# "build" tree.
|
||||
return [self.get_ext_fullpath(ext.name) for ext in self.extensions]
|
||||
|
||||
def build_extensions(self) -> None:
|
||||
# First, sanity-check the 'extensions' list
|
||||
self.check_extensions_list(self.extensions)
|
||||
if self.parallel:
|
||||
self._build_extensions_parallel()
|
||||
else:
|
||||
self._build_extensions_serial()
|
||||
|
||||
def _build_extensions_parallel(self):
|
||||
workers = self.parallel
|
||||
if self.parallel is True:
|
||||
workers = os.cpu_count() # may return None
|
||||
try:
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
except ImportError:
|
||||
workers = None
|
||||
|
||||
if workers is None:
|
||||
self._build_extensions_serial()
|
||||
return
|
||||
|
||||
with ThreadPoolExecutor(max_workers=workers) as executor:
|
||||
futures = [
|
||||
executor.submit(self.build_extension, ext) for ext in self.extensions
|
||||
]
|
||||
for ext, fut in zip(self.extensions, futures):
|
||||
with self._filter_build_errors(ext):
|
||||
fut.result()
|
||||
|
||||
def _build_extensions_serial(self):
|
||||
for ext in self.extensions:
|
||||
with self._filter_build_errors(ext):
|
||||
self.build_extension(ext)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _filter_build_errors(self, ext):
|
||||
try:
|
||||
yield
|
||||
except (CCompilerError, DistutilsError, CompileError) as e:
|
||||
if not ext.optional:
|
||||
raise
|
||||
self.warn(f'building extension "{ext.name}" failed: {e}')
|
||||
|
||||
def build_extension(self, ext) -> None:
|
||||
sources = ext.sources
|
||||
if sources is None or not isinstance(sources, (list, tuple)):
|
||||
raise DistutilsSetupError(
|
||||
f"in 'ext_modules' option (extension '{ext.name}'), "
|
||||
"'sources' must be present and must be "
|
||||
"a list of source filenames"
|
||||
)
|
||||
# sort to make the resulting .so file build reproducible
|
||||
sources = sorted(sources)
|
||||
|
||||
ext_path = self.get_ext_fullpath(ext.name)
|
||||
depends = sources + ext.depends
|
||||
if not (self.force or newer_group(depends, ext_path, 'newer')):
|
||||
log.debug("skipping '%s' extension (up-to-date)", ext.name)
|
||||
return
|
||||
else:
|
||||
log.info("building '%s' extension", ext.name)
|
||||
|
||||
# First, scan the sources for SWIG definition files (.i), run
|
||||
# SWIG on 'em to create .c files, and modify the sources list
|
||||
# accordingly.
|
||||
sources = self.swig_sources(sources, ext)
|
||||
|
||||
# Next, compile the source code to object files.
|
||||
|
||||
# XXX not honouring 'define_macros' or 'undef_macros' -- the
|
||||
# CCompiler API needs to change to accommodate this, and I
|
||||
# want to do one thing at a time!
|
||||
|
||||
# Two possible sources for extra compiler arguments:
|
||||
# - 'extra_compile_args' in Extension object
|
||||
# - CFLAGS environment variable (not particularly
|
||||
# elegant, but people seem to expect it and I
|
||||
# guess it's useful)
|
||||
# The environment variable should take precedence, and
|
||||
# any sensible compiler will give precedence to later
|
||||
# command line args. Hence we combine them in order:
|
||||
extra_args = ext.extra_compile_args or []
|
||||
|
||||
macros = ext.define_macros[:]
|
||||
for undef in ext.undef_macros:
|
||||
macros.append((undef,))
|
||||
|
||||
objects = self.compiler.compile(
|
||||
sources,
|
||||
output_dir=self.build_temp,
|
||||
macros=macros,
|
||||
include_dirs=ext.include_dirs,
|
||||
debug=self.debug,
|
||||
extra_postargs=extra_args,
|
||||
depends=ext.depends,
|
||||
)
|
||||
|
||||
# XXX outdated variable, kept here in case third-part code
|
||||
# needs it.
|
||||
self._built_objects = objects[:]
|
||||
|
||||
# Now link the object files together into a "shared object" --
|
||||
# of course, first we have to figure out all the other things
|
||||
# that go into the mix.
|
||||
if ext.extra_objects:
|
||||
objects.extend(ext.extra_objects)
|
||||
extra_args = ext.extra_link_args or []
|
||||
|
||||
# Detect target language, if not provided
|
||||
language = ext.language or self.compiler.detect_language(sources)
|
||||
|
||||
self.compiler.link_shared_object(
|
||||
objects,
|
||||
ext_path,
|
||||
libraries=self.get_libraries(ext),
|
||||
library_dirs=ext.library_dirs,
|
||||
runtime_library_dirs=ext.runtime_library_dirs,
|
||||
extra_postargs=extra_args,
|
||||
export_symbols=self.get_export_symbols(ext),
|
||||
debug=self.debug,
|
||||
build_temp=self.build_temp,
|
||||
target_lang=language,
|
||||
)
|
||||
|
||||
def swig_sources(self, sources, extension):
|
||||
"""Walk the list of source files in 'sources', looking for SWIG
|
||||
interface (.i) files. Run SWIG on all that are found, and
|
||||
return a modified 'sources' list with SWIG source files replaced
|
||||
by the generated C (or C++) files.
|
||||
"""
|
||||
new_sources = []
|
||||
swig_sources = []
|
||||
swig_targets = {}
|
||||
|
||||
# XXX this drops generated C/C++ files into the source tree, which
|
||||
# is fine for developers who want to distribute the generated
|
||||
# source -- but there should be an option to put SWIG output in
|
||||
# the temp dir.
|
||||
|
||||
if self.swig_cpp:
|
||||
log.warning("--swig-cpp is deprecated - use --swig-opts=-c++")
|
||||
|
||||
if (
|
||||
self.swig_cpp
|
||||
or ('-c++' in self.swig_opts)
|
||||
or ('-c++' in extension.swig_opts)
|
||||
):
|
||||
target_ext = '.cpp'
|
||||
else:
|
||||
target_ext = '.c'
|
||||
|
||||
for source in sources:
|
||||
(base, ext) = os.path.splitext(source)
|
||||
if ext == ".i": # SWIG interface file
|
||||
new_sources.append(base + '_wrap' + target_ext)
|
||||
swig_sources.append(source)
|
||||
swig_targets[source] = new_sources[-1]
|
||||
else:
|
||||
new_sources.append(source)
|
||||
|
||||
if not swig_sources:
|
||||
return new_sources
|
||||
|
||||
swig = self.swig or self.find_swig()
|
||||
swig_cmd = [swig, "-python"]
|
||||
swig_cmd.extend(self.swig_opts)
|
||||
if self.swig_cpp:
|
||||
swig_cmd.append("-c++")
|
||||
|
||||
# Do not override commandline arguments
|
||||
if not self.swig_opts:
|
||||
swig_cmd.extend(extension.swig_opts)
|
||||
|
||||
for source in swig_sources:
|
||||
target = swig_targets[source]
|
||||
log.info("swigging %s to %s", source, target)
|
||||
self.spawn(swig_cmd + ["-o", target, source])
|
||||
|
||||
return new_sources
|
||||
|
||||
def find_swig(self):
|
||||
"""Return the name of the SWIG executable. On Unix, this is
|
||||
just "swig" -- it should be in the PATH. Tries a bit harder on
|
||||
Windows.
|
||||
"""
|
||||
if os.name == "posix":
|
||||
return "swig"
|
||||
elif os.name == "nt":
|
||||
# Look for SWIG in its standard installation directory on
|
||||
# Windows (or so I presume!). If we find it there, great;
|
||||
# if not, act like Unix and assume it's in the PATH.
|
||||
for vers in ("1.3", "1.2", "1.1"):
|
||||
fn = os.path.join(f"c:\\swig{vers}", "swig.exe")
|
||||
if os.path.isfile(fn):
|
||||
return fn
|
||||
else:
|
||||
return "swig.exe"
|
||||
else:
|
||||
raise DistutilsPlatformError(
|
||||
f"I don't know how to find (much less run) SWIG on platform '{os.name}'"
|
||||
)
|
||||
|
||||
# -- Name generators -----------------------------------------------
|
||||
# (extension names, filenames, whatever)
|
||||
def get_ext_fullpath(self, ext_name: str) -> str:
|
||||
"""Returns the path of the filename for a given extension.
|
||||
|
||||
The file is located in `build_lib` or directly in the package
|
||||
(inplace option).
|
||||
"""
|
||||
fullname = self.get_ext_fullname(ext_name)
|
||||
modpath = fullname.split('.')
|
||||
filename = self.get_ext_filename(modpath[-1])
|
||||
|
||||
if not self.inplace:
|
||||
# no further work needed
|
||||
# returning :
|
||||
# build_dir/package/path/filename
|
||||
filename = os.path.join(*modpath[:-1] + [filename])
|
||||
return os.path.join(self.build_lib, filename)
|
||||
|
||||
# the inplace option requires to find the package directory
|
||||
# using the build_py command for that
|
||||
package = '.'.join(modpath[0:-1])
|
||||
build_py = self.get_finalized_command('build_py')
|
||||
package_dir = os.path.abspath(build_py.get_package_dir(package))
|
||||
|
||||
# returning
|
||||
# package_dir/filename
|
||||
return os.path.join(package_dir, filename)
|
||||
|
||||
def get_ext_fullname(self, ext_name: str) -> str:
|
||||
"""Returns the fullname of a given extension name.
|
||||
|
||||
Adds the `package.` prefix"""
|
||||
if self.package is None:
|
||||
return ext_name
|
||||
else:
|
||||
return self.package + '.' + ext_name
|
||||
|
||||
def get_ext_filename(self, ext_name: str) -> str:
|
||||
r"""Convert the name of an extension (eg. "foo.bar") into the name
|
||||
of the file from which it will be loaded (eg. "foo/bar.so", or
|
||||
"foo\bar.pyd").
|
||||
"""
|
||||
from ..sysconfig import get_config_var
|
||||
|
||||
ext_path = ext_name.split('.')
|
||||
ext_suffix = get_config_var('EXT_SUFFIX')
|
||||
return os.path.join(*ext_path) + ext_suffix
|
||||
|
||||
def get_export_symbols(self, ext: Extension) -> list[str]:
|
||||
"""Return the list of symbols that a shared extension has to
|
||||
export. This either uses 'ext.export_symbols' or, if it's not
|
||||
provided, "PyInit_" + module_name. Only relevant on Windows, where
|
||||
the .pyd file (DLL) must export the module "PyInit_" function.
|
||||
"""
|
||||
name = self._get_module_name_for_symbol(ext)
|
||||
try:
|
||||
# Unicode module name support as defined in PEP-489
|
||||
# https://peps.python.org/pep-0489/#export-hook-name
|
||||
name.encode('ascii')
|
||||
except UnicodeEncodeError:
|
||||
suffix = 'U_' + name.encode('punycode').replace(b'-', b'_').decode('ascii')
|
||||
else:
|
||||
suffix = "_" + name
|
||||
|
||||
initfunc_name = "PyInit" + suffix
|
||||
if initfunc_name not in ext.export_symbols:
|
||||
ext.export_symbols.append(initfunc_name)
|
||||
return ext.export_symbols
|
||||
|
||||
def _get_module_name_for_symbol(self, ext):
|
||||
# Package name should be used for `__init__` modules
|
||||
# https://github.com/python/cpython/issues/80074
|
||||
# https://github.com/pypa/setuptools/issues/4826
|
||||
parts = ext.name.split(".")
|
||||
if parts[-1] == "__init__" and len(parts) >= 2:
|
||||
return parts[-2]
|
||||
return parts[-1]
|
||||
|
||||
def get_libraries(self, ext: Extension) -> list[str]: # noqa: C901
|
||||
"""Return the list of libraries to link against when building a
|
||||
shared extension. On most platforms, this is just 'ext.libraries';
|
||||
on Windows, we add the Python library (eg. python20.dll).
|
||||
"""
|
||||
# The python library is always needed on Windows. For MSVC, this
|
||||
# is redundant, since the library is mentioned in a pragma in
|
||||
# pyconfig.h that MSVC groks. The other Windows compilers all seem
|
||||
# to need it mentioned explicitly, though, so that's what we do.
|
||||
# Append '_d' to the python import library on debug builds.
|
||||
if sys.platform == "win32" and not is_mingw():
|
||||
from .._msvccompiler import MSVCCompiler
|
||||
|
||||
if not isinstance(self.compiler, MSVCCompiler):
|
||||
template = "python%d%d"
|
||||
if self.debug:
|
||||
template = template + '_d'
|
||||
pythonlib = template % (
|
||||
sys.hexversion >> 24,
|
||||
(sys.hexversion >> 16) & 0xFF,
|
||||
)
|
||||
# don't extend ext.libraries, it may be shared with other
|
||||
# extensions, it is a reference to the original list
|
||||
return ext.libraries + [pythonlib]
|
||||
else:
|
||||
# On Android only the main executable and LD_PRELOADs are considered
|
||||
# to be RTLD_GLOBAL, all the dependencies of the main executable
|
||||
# remain RTLD_LOCAL and so the shared libraries must be linked with
|
||||
# libpython when python is built with a shared python library (issue
|
||||
# bpo-21536).
|
||||
# On Cygwin (and if required, other POSIX-like platforms based on
|
||||
# Windows like MinGW) it is simply necessary that all symbols in
|
||||
# shared libraries are resolved at link time.
|
||||
from ..sysconfig import get_config_var
|
||||
|
||||
link_libpython = False
|
||||
if get_config_var('Py_ENABLE_SHARED'):
|
||||
# A native build on an Android device or on Cygwin
|
||||
if hasattr(sys, 'getandroidapilevel'):
|
||||
link_libpython = True
|
||||
elif sys.platform == 'cygwin' or is_mingw():
|
||||
link_libpython = True
|
||||
elif '_PYTHON_HOST_PLATFORM' in os.environ:
|
||||
# We are cross-compiling for one of the relevant platforms
|
||||
if get_config_var('ANDROID_API_LEVEL') != 0:
|
||||
link_libpython = True
|
||||
elif get_config_var('MACHDEP') == 'cygwin':
|
||||
link_libpython = True
|
||||
|
||||
if link_libpython:
|
||||
ldversion = get_config_var('LDVERSION')
|
||||
return ext.libraries + ['python' + ldversion]
|
||||
|
||||
return ext.libraries
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user