forked from Raiza.dev/EliteBot
Cleaned up the directories
This commit is contained in:
parent
f708506d68
commit
a683fcffea
1340 changed files with 554582 additions and 6840 deletions
|
@ -0,0 +1,12 @@
|
|||
from distutils.command.bdist import bdist
|
||||
import sys
|
||||
|
||||
if 'egg' not in bdist.format_commands:
|
||||
try:
|
||||
bdist.format_commands['egg'] = ('bdist_egg', "Python .egg file")
|
||||
except TypeError:
|
||||
# For backward compatibility with older distutils (stdlib)
|
||||
bdist.format_command['egg'] = ('bdist_egg', "Python .egg file")
|
||||
bdist.format_commands.append('egg')
|
||||
|
||||
del bdist, sys
|
|
@ -0,0 +1,78 @@
|
|||
from distutils.errors import DistutilsOptionError
|
||||
|
||||
from setuptools.command.setopt import edit_config, option_base, config_file
|
||||
|
||||
|
||||
def shquote(arg):
|
||||
"""Quote an argument for later parsing by shlex.split()"""
|
||||
for c in '"', "'", "\\", "#":
|
||||
if c in arg:
|
||||
return repr(arg)
|
||||
if arg.split() != [arg]:
|
||||
return repr(arg)
|
||||
return arg
|
||||
|
||||
|
||||
class alias(option_base):
|
||||
"""Define a shortcut that invokes one or more commands"""
|
||||
|
||||
description = "define a shortcut to invoke one or more commands"
|
||||
command_consumes_arguments = True
|
||||
|
||||
user_options = [
|
||||
('remove', 'r', 'remove (unset) the alias'),
|
||||
] + option_base.user_options
|
||||
|
||||
boolean_options = option_base.boolean_options + ['remove']
|
||||
|
||||
def initialize_options(self):
|
||||
option_base.initialize_options(self)
|
||||
self.args = None
|
||||
self.remove = None
|
||||
|
||||
def finalize_options(self):
|
||||
option_base.finalize_options(self)
|
||||
if self.remove and len(self.args) != 1:
|
||||
raise DistutilsOptionError(
|
||||
"Must specify exactly one argument (the alias name) when "
|
||||
"using --remove"
|
||||
)
|
||||
|
||||
def run(self):
|
||||
aliases = self.distribution.get_option_dict('aliases')
|
||||
|
||||
if not self.args:
|
||||
print("Command Aliases")
|
||||
print("---------------")
|
||||
for alias in aliases:
|
||||
print("setup.py alias", format_alias(alias, aliases))
|
||||
return
|
||||
|
||||
elif len(self.args) == 1:
|
||||
(alias,) = self.args
|
||||
if self.remove:
|
||||
command = None
|
||||
elif alias in aliases:
|
||||
print("setup.py alias", format_alias(alias, aliases))
|
||||
return
|
||||
else:
|
||||
print("No alias definition found for %r" % alias)
|
||||
return
|
||||
else:
|
||||
alias = self.args[0]
|
||||
command = ' '.join(map(shquote, self.args[1:]))
|
||||
|
||||
edit_config(self.filename, {'aliases': {alias: command}}, self.dry_run)
|
||||
|
||||
|
||||
def format_alias(name, aliases):
|
||||
source, command = aliases[name]
|
||||
if source == config_file('global'):
|
||||
source = '--global-config '
|
||||
elif source == config_file('user'):
|
||||
source = '--user-config '
|
||||
elif source == config_file('local'):
|
||||
source = ''
|
||||
else:
|
||||
source = '--filename=%r' % source
|
||||
return source + name + ' ' + command
|
|
@ -0,0 +1,464 @@
|
|||
"""setuptools.command.bdist_egg
|
||||
|
||||
Build .egg distributions"""
|
||||
|
||||
from distutils.dir_util import remove_tree, mkpath
|
||||
from distutils import log
|
||||
from types import CodeType
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
import textwrap
|
||||
import marshal
|
||||
|
||||
from setuptools.extension import Library
|
||||
from setuptools import Command
|
||||
from .._path import ensure_directory
|
||||
|
||||
from sysconfig import get_path, get_python_version
|
||||
|
||||
|
||||
def _get_purelib():
|
||||
return get_path("purelib")
|
||||
|
||||
|
||||
def strip_module(filename):
|
||||
if '.' in filename:
|
||||
filename = os.path.splitext(filename)[0]
|
||||
if filename.endswith('module'):
|
||||
filename = filename[:-6]
|
||||
return filename
|
||||
|
||||
|
||||
def sorted_walk(dir):
|
||||
"""Do os.walk in a reproducible way,
|
||||
independent of indeterministic filesystem readdir order
|
||||
"""
|
||||
for base, dirs, files in os.walk(dir):
|
||||
dirs.sort()
|
||||
files.sort()
|
||||
yield base, dirs, files
|
||||
|
||||
|
||||
def write_stub(resource, pyfile):
|
||||
_stub_template = textwrap.dedent(
|
||||
"""
|
||||
def __bootstrap__():
|
||||
global __bootstrap__, __loader__, __file__
|
||||
import sys, pkg_resources, importlib.util
|
||||
__file__ = pkg_resources.resource_filename(__name__, %r)
|
||||
__loader__ = None; del __bootstrap__, __loader__
|
||||
spec = importlib.util.spec_from_file_location(__name__,__file__)
|
||||
mod = importlib.util.module_from_spec(spec)
|
||||
spec.loader.exec_module(mod)
|
||||
__bootstrap__()
|
||||
"""
|
||||
).lstrip()
|
||||
with open(pyfile, 'w') as f:
|
||||
f.write(_stub_template % resource)
|
||||
|
||||
|
||||
class bdist_egg(Command):
|
||||
description = "create an \"egg\" distribution"
|
||||
|
||||
user_options = [
|
||||
('bdist-dir=', 'b', "temporary directory for creating the distribution"),
|
||||
(
|
||||
'plat-name=',
|
||||
'p',
|
||||
"platform name to embed in generated filenames "
|
||||
"(by default uses `pkg_resources.get_build_platform()`)",
|
||||
),
|
||||
('exclude-source-files', None, "remove all .py files from the generated egg"),
|
||||
(
|
||||
'keep-temp',
|
||||
'k',
|
||||
"keep the pseudo-installation tree around after "
|
||||
+ "creating the distribution archive",
|
||||
),
|
||||
('dist-dir=', 'd', "directory to put final built distributions in"),
|
||||
('skip-build', None, "skip rebuilding everything (for testing/debugging)"),
|
||||
]
|
||||
|
||||
boolean_options = ['keep-temp', 'skip-build', 'exclude-source-files']
|
||||
|
||||
def initialize_options(self):
|
||||
self.bdist_dir = None
|
||||
self.plat_name = None
|
||||
self.keep_temp = 0
|
||||
self.dist_dir = None
|
||||
self.skip_build = 0
|
||||
self.egg_output = None
|
||||
self.exclude_source_files = None
|
||||
|
||||
def finalize_options(self):
|
||||
ei_cmd = self.ei_cmd = self.get_finalized_command("egg_info")
|
||||
self.egg_info = ei_cmd.egg_info
|
||||
|
||||
if self.bdist_dir is None:
|
||||
bdist_base = self.get_finalized_command('bdist').bdist_base
|
||||
self.bdist_dir = os.path.join(bdist_base, 'egg')
|
||||
|
||||
if self.plat_name is None:
|
||||
from pkg_resources import get_build_platform
|
||||
|
||||
self.plat_name = get_build_platform()
|
||||
|
||||
self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
|
||||
|
||||
if self.egg_output is None:
|
||||
# Compute filename of the output egg
|
||||
basename = ei_cmd._get_egg_basename(
|
||||
py_version=get_python_version(),
|
||||
platform=self.distribution.has_ext_modules() and self.plat_name,
|
||||
)
|
||||
|
||||
self.egg_output = os.path.join(self.dist_dir, basename + '.egg')
|
||||
|
||||
def do_install_data(self):
|
||||
# Hack for packages that install data to install's --install-lib
|
||||
self.get_finalized_command('install').install_lib = self.bdist_dir
|
||||
|
||||
site_packages = os.path.normcase(os.path.realpath(_get_purelib()))
|
||||
old, self.distribution.data_files = self.distribution.data_files, []
|
||||
|
||||
for item in old:
|
||||
if isinstance(item, tuple) and len(item) == 2:
|
||||
if os.path.isabs(item[0]):
|
||||
realpath = os.path.realpath(item[0])
|
||||
normalized = os.path.normcase(realpath)
|
||||
if normalized == site_packages or normalized.startswith(
|
||||
site_packages + os.sep
|
||||
):
|
||||
item = realpath[len(site_packages) + 1 :], item[1]
|
||||
# XXX else: raise ???
|
||||
self.distribution.data_files.append(item)
|
||||
|
||||
try:
|
||||
log.info("installing package data to %s", self.bdist_dir)
|
||||
self.call_command('install_data', force=0, root=None)
|
||||
finally:
|
||||
self.distribution.data_files = old
|
||||
|
||||
def get_outputs(self):
|
||||
return [self.egg_output]
|
||||
|
||||
def call_command(self, cmdname, **kw):
|
||||
"""Invoke reinitialized command `cmdname` with keyword args"""
|
||||
for dirname in INSTALL_DIRECTORY_ATTRS:
|
||||
kw.setdefault(dirname, self.bdist_dir)
|
||||
kw.setdefault('skip_build', self.skip_build)
|
||||
kw.setdefault('dry_run', self.dry_run)
|
||||
cmd = self.reinitialize_command(cmdname, **kw)
|
||||
self.run_command(cmdname)
|
||||
return cmd
|
||||
|
||||
def run(self): # noqa: C901 # is too complex (14) # FIXME
|
||||
# Generate metadata first
|
||||
self.run_command("egg_info")
|
||||
# We run install_lib before install_data, because some data hacks
|
||||
# pull their data path from the install_lib command.
|
||||
log.info("installing library code to %s", self.bdist_dir)
|
||||
instcmd = self.get_finalized_command('install')
|
||||
old_root = instcmd.root
|
||||
instcmd.root = None
|
||||
if self.distribution.has_c_libraries() and not self.skip_build:
|
||||
self.run_command('build_clib')
|
||||
cmd = self.call_command('install_lib', warn_dir=0)
|
||||
instcmd.root = old_root
|
||||
|
||||
all_outputs, ext_outputs = self.get_ext_outputs()
|
||||
self.stubs = []
|
||||
to_compile = []
|
||||
for p, ext_name in enumerate(ext_outputs):
|
||||
filename, ext = os.path.splitext(ext_name)
|
||||
pyfile = os.path.join(self.bdist_dir, strip_module(filename) + '.py')
|
||||
self.stubs.append(pyfile)
|
||||
log.info("creating stub loader for %s", ext_name)
|
||||
if not self.dry_run:
|
||||
write_stub(os.path.basename(ext_name), pyfile)
|
||||
to_compile.append(pyfile)
|
||||
ext_outputs[p] = ext_name.replace(os.sep, '/')
|
||||
|
||||
if to_compile:
|
||||
cmd.byte_compile(to_compile)
|
||||
if self.distribution.data_files:
|
||||
self.do_install_data()
|
||||
|
||||
# Make the EGG-INFO directory
|
||||
archive_root = self.bdist_dir
|
||||
egg_info = os.path.join(archive_root, 'EGG-INFO')
|
||||
self.mkpath(egg_info)
|
||||
if self.distribution.scripts:
|
||||
script_dir = os.path.join(egg_info, 'scripts')
|
||||
log.info("installing scripts to %s", script_dir)
|
||||
self.call_command('install_scripts', install_dir=script_dir, no_ep=1)
|
||||
|
||||
self.copy_metadata_to(egg_info)
|
||||
native_libs = os.path.join(egg_info, "native_libs.txt")
|
||||
if all_outputs:
|
||||
log.info("writing %s", native_libs)
|
||||
if not self.dry_run:
|
||||
ensure_directory(native_libs)
|
||||
libs_file = open(native_libs, 'wt')
|
||||
libs_file.write('\n'.join(all_outputs))
|
||||
libs_file.write('\n')
|
||||
libs_file.close()
|
||||
elif os.path.isfile(native_libs):
|
||||
log.info("removing %s", native_libs)
|
||||
if not self.dry_run:
|
||||
os.unlink(native_libs)
|
||||
|
||||
write_safety_flag(os.path.join(archive_root, 'EGG-INFO'), self.zip_safe())
|
||||
|
||||
if os.path.exists(os.path.join(self.egg_info, 'depends.txt')):
|
||||
log.warn(
|
||||
"WARNING: 'depends.txt' will not be used by setuptools 0.6!\n"
|
||||
"Use the install_requires/extras_require setup() args instead."
|
||||
)
|
||||
|
||||
if self.exclude_source_files:
|
||||
self.zap_pyfiles()
|
||||
|
||||
# Make the archive
|
||||
make_zipfile(
|
||||
self.egg_output,
|
||||
archive_root,
|
||||
verbose=self.verbose,
|
||||
dry_run=self.dry_run,
|
||||
mode=self.gen_header(),
|
||||
)
|
||||
if not self.keep_temp:
|
||||
remove_tree(self.bdist_dir, dry_run=self.dry_run)
|
||||
|
||||
# Add to 'Distribution.dist_files' so that the "upload" command works
|
||||
getattr(self.distribution, 'dist_files', []).append(
|
||||
('bdist_egg', get_python_version(), self.egg_output)
|
||||
)
|
||||
|
||||
def zap_pyfiles(self):
|
||||
log.info("Removing .py files from temporary directory")
|
||||
for base, dirs, files in walk_egg(self.bdist_dir):
|
||||
for name in files:
|
||||
path = os.path.join(base, name)
|
||||
|
||||
if name.endswith('.py'):
|
||||
log.debug("Deleting %s", path)
|
||||
os.unlink(path)
|
||||
|
||||
if base.endswith('__pycache__'):
|
||||
path_old = path
|
||||
|
||||
pattern = r'(?P<name>.+)\.(?P<magic>[^.]+)\.pyc'
|
||||
m = re.match(pattern, name)
|
||||
path_new = os.path.join(base, os.pardir, m.group('name') + '.pyc')
|
||||
log.info("Renaming file from [%s] to [%s]" % (path_old, path_new))
|
||||
try:
|
||||
os.remove(path_new)
|
||||
except OSError:
|
||||
pass
|
||||
os.rename(path_old, path_new)
|
||||
|
||||
def zip_safe(self):
|
||||
safe = getattr(self.distribution, 'zip_safe', None)
|
||||
if safe is not None:
|
||||
return safe
|
||||
log.warn("zip_safe flag not set; analyzing archive contents...")
|
||||
return analyze_egg(self.bdist_dir, self.stubs)
|
||||
|
||||
def gen_header(self):
|
||||
return 'w'
|
||||
|
||||
def copy_metadata_to(self, target_dir):
|
||||
"Copy metadata (egg info) to the target_dir"
|
||||
# normalize the path (so that a forward-slash in egg_info will
|
||||
# match using startswith below)
|
||||
norm_egg_info = os.path.normpath(self.egg_info)
|
||||
prefix = os.path.join(norm_egg_info, '')
|
||||
for path in self.ei_cmd.filelist.files:
|
||||
if path.startswith(prefix):
|
||||
target = os.path.join(target_dir, path[len(prefix) :])
|
||||
ensure_directory(target)
|
||||
self.copy_file(path, target)
|
||||
|
||||
def get_ext_outputs(self):
|
||||
"""Get a list of relative paths to C extensions in the output distro"""
|
||||
|
||||
all_outputs = []
|
||||
ext_outputs = []
|
||||
|
||||
paths = {self.bdist_dir: ''}
|
||||
for base, dirs, files in sorted_walk(self.bdist_dir):
|
||||
for filename in files:
|
||||
if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS:
|
||||
all_outputs.append(paths[base] + filename)
|
||||
for filename in dirs:
|
||||
paths[os.path.join(base, filename)] = paths[base] + filename + '/'
|
||||
|
||||
if self.distribution.has_ext_modules():
|
||||
build_cmd = self.get_finalized_command('build_ext')
|
||||
for ext in build_cmd.extensions:
|
||||
if isinstance(ext, Library):
|
||||
continue
|
||||
fullname = build_cmd.get_ext_fullname(ext.name)
|
||||
filename = build_cmd.get_ext_filename(fullname)
|
||||
if not os.path.basename(filename).startswith('dl-'):
|
||||
if os.path.exists(os.path.join(self.bdist_dir, filename)):
|
||||
ext_outputs.append(filename)
|
||||
|
||||
return all_outputs, ext_outputs
|
||||
|
||||
|
||||
NATIVE_EXTENSIONS = dict.fromkeys('.dll .so .dylib .pyd'.split())
|
||||
|
||||
|
||||
def walk_egg(egg_dir):
|
||||
"""Walk an unpacked egg's contents, skipping the metadata directory"""
|
||||
walker = sorted_walk(egg_dir)
|
||||
base, dirs, files = next(walker)
|
||||
if 'EGG-INFO' in dirs:
|
||||
dirs.remove('EGG-INFO')
|
||||
yield base, dirs, files
|
||||
for bdf in walker:
|
||||
yield bdf
|
||||
|
||||
|
||||
def analyze_egg(egg_dir, stubs):
|
||||
# check for existing flag in EGG-INFO
|
||||
for flag, fn in safety_flags.items():
|
||||
if os.path.exists(os.path.join(egg_dir, 'EGG-INFO', fn)):
|
||||
return flag
|
||||
if not can_scan():
|
||||
return False
|
||||
safe = True
|
||||
for base, dirs, files in walk_egg(egg_dir):
|
||||
for name in files:
|
||||
if name.endswith('.py') or name.endswith('.pyw'):
|
||||
continue
|
||||
elif name.endswith('.pyc') or name.endswith('.pyo'):
|
||||
# always scan, even if we already know we're not safe
|
||||
safe = scan_module(egg_dir, base, name, stubs) and safe
|
||||
return safe
|
||||
|
||||
|
||||
def write_safety_flag(egg_dir, safe):
|
||||
# Write or remove zip safety flag file(s)
|
||||
for flag, fn in safety_flags.items():
|
||||
fn = os.path.join(egg_dir, fn)
|
||||
if os.path.exists(fn):
|
||||
if safe is None or bool(safe) != flag:
|
||||
os.unlink(fn)
|
||||
elif safe is not None and bool(safe) == flag:
|
||||
f = open(fn, 'wt')
|
||||
f.write('\n')
|
||||
f.close()
|
||||
|
||||
|
||||
safety_flags = {
|
||||
True: 'zip-safe',
|
||||
False: 'not-zip-safe',
|
||||
}
|
||||
|
||||
|
||||
def scan_module(egg_dir, base, name, stubs):
|
||||
"""Check whether module possibly uses unsafe-for-zipfile stuff"""
|
||||
|
||||
filename = os.path.join(base, name)
|
||||
if filename[:-1] in stubs:
|
||||
return True # Extension module
|
||||
pkg = base[len(egg_dir) + 1 :].replace(os.sep, '.')
|
||||
module = pkg + (pkg and '.' or '') + os.path.splitext(name)[0]
|
||||
if sys.version_info < (3, 7):
|
||||
skip = 12 # skip magic & date & file size
|
||||
else:
|
||||
skip = 16 # skip magic & reserved? & date & file size
|
||||
f = open(filename, 'rb')
|
||||
f.read(skip)
|
||||
code = marshal.load(f)
|
||||
f.close()
|
||||
safe = True
|
||||
symbols = dict.fromkeys(iter_symbols(code))
|
||||
for bad in ['__file__', '__path__']:
|
||||
if bad in symbols:
|
||||
log.warn("%s: module references %s", module, bad)
|
||||
safe = False
|
||||
if 'inspect' in symbols:
|
||||
for bad in [
|
||||
'getsource',
|
||||
'getabsfile',
|
||||
'getsourcefile',
|
||||
'getfile' 'getsourcelines',
|
||||
'findsource',
|
||||
'getcomments',
|
||||
'getframeinfo',
|
||||
'getinnerframes',
|
||||
'getouterframes',
|
||||
'stack',
|
||||
'trace',
|
||||
]:
|
||||
if bad in symbols:
|
||||
log.warn("%s: module MAY be using inspect.%s", module, bad)
|
||||
safe = False
|
||||
return safe
|
||||
|
||||
|
||||
def iter_symbols(code):
|
||||
"""Yield names and strings used by `code` and its nested code objects"""
|
||||
for name in code.co_names:
|
||||
yield name
|
||||
for const in code.co_consts:
|
||||
if isinstance(const, str):
|
||||
yield const
|
||||
elif isinstance(const, CodeType):
|
||||
for name in iter_symbols(const):
|
||||
yield name
|
||||
|
||||
|
||||
def can_scan():
|
||||
if not sys.platform.startswith('java') and sys.platform != 'cli':
|
||||
# CPython, PyPy, etc.
|
||||
return True
|
||||
log.warn("Unable to analyze compiled code on this platform.")
|
||||
log.warn(
|
||||
"Please ask the author to include a 'zip_safe'"
|
||||
" setting (either True or False) in the package's setup.py"
|
||||
)
|
||||
|
||||
|
||||
# Attribute names of options for commands that might need to be convinced to
|
||||
# install to the egg build directory
|
||||
|
||||
INSTALL_DIRECTORY_ATTRS = ['install_lib', 'install_dir', 'install_data', 'install_base']
|
||||
|
||||
|
||||
def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=True, mode='w'):
|
||||
"""Create a zip file from all the files under 'base_dir'. The output
|
||||
zip file will be named 'base_dir' + ".zip". Uses either the "zipfile"
|
||||
Python module (if available) or the InfoZIP "zip" utility (if installed
|
||||
and found on the default search path). If neither tool is available,
|
||||
raises DistutilsExecError. Returns the name of the output zip file.
|
||||
"""
|
||||
import zipfile
|
||||
|
||||
mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
|
||||
log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)
|
||||
|
||||
def visit(z, dirname, names):
|
||||
for name in names:
|
||||
path = os.path.normpath(os.path.join(dirname, name))
|
||||
if os.path.isfile(path):
|
||||
p = path[len(base_dir) + 1 :]
|
||||
if not dry_run:
|
||||
z.write(path, p)
|
||||
log.debug("adding '%s'", p)
|
||||
|
||||
compression = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED
|
||||
if not dry_run:
|
||||
z = zipfile.ZipFile(zip_filename, mode, compression=compression)
|
||||
for dirname, dirs, files in sorted_walk(base_dir):
|
||||
visit(z, dirname, files)
|
||||
z.close()
|
||||
else:
|
||||
for dirname, dirs, files in sorted_walk(base_dir):
|
||||
visit(None, dirname, files)
|
||||
return zip_filename
|
|
@ -0,0 +1,40 @@
|
|||
import distutils.command.bdist_rpm as orig
|
||||
|
||||
from ..warnings import SetuptoolsDeprecationWarning
|
||||
|
||||
|
||||
class bdist_rpm(orig.bdist_rpm):
|
||||
"""
|
||||
Override the default bdist_rpm behavior to do the following:
|
||||
|
||||
1. Run egg_info to ensure the name and version are properly calculated.
|
||||
2. Always run 'install' using --single-version-externally-managed to
|
||||
disable eggs in RPM distributions.
|
||||
"""
|
||||
|
||||
def run(self):
|
||||
SetuptoolsDeprecationWarning.emit(
|
||||
"Deprecated command",
|
||||
"""
|
||||
bdist_rpm is deprecated and will be removed in a future version.
|
||||
Use bdist_wheel (wheel packages) instead.
|
||||
""",
|
||||
see_url="https://github.com/pypa/setuptools/issues/1988",
|
||||
due_date=(2023, 10, 30), # Deprecation introduced in 22 Oct 2021.
|
||||
)
|
||||
|
||||
# ensure distro name is up-to-date
|
||||
self.run_command('egg_info')
|
||||
|
||||
orig.bdist_rpm.run(self)
|
||||
|
||||
def _make_spec_file(self):
|
||||
spec = orig.bdist_rpm._make_spec_file(self)
|
||||
spec = [
|
||||
line.replace(
|
||||
"setup.py install ",
|
||||
"setup.py install --single-version-externally-managed ",
|
||||
).replace("%setup", "%setup -n %{name}-%{unmangled_version}")
|
||||
for line in spec
|
||||
]
|
||||
return spec
|
|
@ -0,0 +1,149 @@
|
|||
import sys
|
||||
from typing import TYPE_CHECKING, List, Dict
|
||||
from distutils.command.build import build as _build
|
||||
|
||||
from ..warnings import SetuptoolsDeprecationWarning
|
||||
|
||||
if sys.version_info >= (3, 8):
|
||||
from typing import Protocol
|
||||
elif TYPE_CHECKING:
|
||||
from typing_extensions import Protocol
|
||||
else:
|
||||
from abc import ABC as Protocol
|
||||
|
||||
|
||||
_ORIGINAL_SUBCOMMANDS = {"build_py", "build_clib", "build_ext", "build_scripts"}
|
||||
|
||||
|
||||
class build(_build):
|
||||
# copy to avoid sharing the object with parent class
|
||||
sub_commands = _build.sub_commands[:]
|
||||
|
||||
def get_sub_commands(self):
|
||||
subcommands = {cmd[0] for cmd in _build.sub_commands}
|
||||
if subcommands - _ORIGINAL_SUBCOMMANDS:
|
||||
SetuptoolsDeprecationWarning.emit(
|
||||
"Direct usage of `distutils` commands",
|
||||
"""
|
||||
It seems that you are using `distutils.command.build` to add
|
||||
new subcommands. Using `distutils` directly is considered deprecated,
|
||||
please use `setuptools.command.build`.
|
||||
""",
|
||||
due_date=(2023, 12, 13), # Warning introduced in 13 Jun 2022.
|
||||
see_url="https://peps.python.org/pep-0632/",
|
||||
)
|
||||
self.sub_commands = _build.sub_commands
|
||||
return super().get_sub_commands()
|
||||
|
||||
|
||||
class SubCommand(Protocol):
|
||||
"""In order to support editable installations (see :pep:`660`) all
|
||||
build subcommands **SHOULD** implement this protocol. They also **MUST** inherit
|
||||
from ``setuptools.Command``.
|
||||
|
||||
When creating an :pep:`editable wheel <660>`, ``setuptools`` will try to evaluate
|
||||
custom ``build`` subcommands using the following procedure:
|
||||
|
||||
1. ``setuptools`` will set the ``editable_mode`` attribute to ``True``
|
||||
2. ``setuptools`` will execute the ``run()`` command.
|
||||
|
||||
.. important::
|
||||
Subcommands **SHOULD** take advantage of ``editable_mode=True`` to adequate
|
||||
its behaviour or perform optimisations.
|
||||
|
||||
For example, if a subcommand doesn't need to generate an extra file and
|
||||
all it does is to copy a source file into the build directory,
|
||||
``run()`` **SHOULD** simply "early return".
|
||||
|
||||
Similarly, if the subcommand creates files that would be placed alongside
|
||||
Python files in the final distribution, during an editable install
|
||||
the command **SHOULD** generate these files "in place" (i.e. write them to
|
||||
the original source directory, instead of using the build directory).
|
||||
Note that ``get_output_mapping()`` should reflect that and include mappings
|
||||
for "in place" builds accordingly.
|
||||
|
||||
3. ``setuptools`` use any knowledge it can derive from the return values of
|
||||
``get_outputs()`` and ``get_output_mapping()`` to create an editable wheel.
|
||||
When relevant ``setuptools`` **MAY** attempt to use file links based on the value
|
||||
of ``get_output_mapping()``. Alternatively, ``setuptools`` **MAY** attempt to use
|
||||
:doc:`import hooks <python:reference/import>` to redirect any attempt to import
|
||||
to the directory with the original source code and other files built in place.
|
||||
|
||||
Please note that custom sub-commands **SHOULD NOT** rely on ``run()`` being
|
||||
executed (or not) to provide correct return values for ``get_outputs()``,
|
||||
``get_output_mapping()`` or ``get_source_files()``. The ``get_*`` methods should
|
||||
work independently of ``run()``.
|
||||
"""
|
||||
|
||||
editable_mode: bool = False
|
||||
"""Boolean flag that will be set to ``True`` when setuptools is used for an
|
||||
editable installation (see :pep:`660`).
|
||||
Implementations **SHOULD** explicitly set the default value of this attribute to
|
||||
``False``.
|
||||
When subcommands run, they can use this flag to perform optimizations or change
|
||||
their behaviour accordingly.
|
||||
"""
|
||||
|
||||
build_lib: str
|
||||
"""String representing the directory where the build artifacts should be stored,
|
||||
e.g. ``build/lib``.
|
||||
For example, if a distribution wants to provide a Python module named ``pkg.mod``,
|
||||
then a corresponding file should be written to ``{build_lib}/package/module.py``.
|
||||
A way of thinking about this is that the files saved under ``build_lib``
|
||||
would be eventually copied to one of the directories in :obj:`site.PREFIXES`
|
||||
upon installation.
|
||||
|
||||
A command that produces platform-independent files (e.g. compiling text templates
|
||||
into Python functions), **CAN** initialize ``build_lib`` by copying its value from
|
||||
the ``build_py`` command. On the other hand, a command that produces
|
||||
platform-specific files **CAN** initialize ``build_lib`` by copying its value from
|
||||
the ``build_ext`` command. In general this is done inside the ``finalize_options``
|
||||
method with the help of the ``set_undefined_options`` command::
|
||||
|
||||
def finalize_options(self):
|
||||
self.set_undefined_options("build_py", ("build_lib", "build_lib"))
|
||||
...
|
||||
"""
|
||||
|
||||
def initialize_options(self):
|
||||
"""(Required by the original :class:`setuptools.Command` interface)"""
|
||||
|
||||
def finalize_options(self):
|
||||
"""(Required by the original :class:`setuptools.Command` interface)"""
|
||||
|
||||
def run(self):
|
||||
"""(Required by the original :class:`setuptools.Command` interface)"""
|
||||
|
||||
def get_source_files(self) -> List[str]:
|
||||
"""
|
||||
Return a list of all files that are used by the command to create the expected
|
||||
outputs.
|
||||
For example, if your build command transpiles Java files into Python, you should
|
||||
list here all the Java files.
|
||||
The primary purpose of this function is to help populating the ``sdist``
|
||||
with all the files necessary to build the distribution.
|
||||
All files should be strings relative to the project root directory.
|
||||
"""
|
||||
|
||||
def get_outputs(self) -> List[str]:
|
||||
"""
|
||||
Return a list of files intended for distribution as they would have been
|
||||
produced by the build.
|
||||
These files should be strings in the form of
|
||||
``"{build_lib}/destination/file/path"``.
|
||||
|
||||
.. note::
|
||||
The return value of ``get_output()`` should include all files used as keys
|
||||
in ``get_output_mapping()`` plus files that are generated during the build
|
||||
and don't correspond to any source file already present in the project.
|
||||
"""
|
||||
|
||||
def get_output_mapping(self) -> Dict[str, str]:
|
||||
"""
|
||||
Return a mapping between destination files as they would be produced by the
|
||||
build (dict keys) into the respective existing (source) files (dict values).
|
||||
Existing (source) files should be represented as strings relative to the project
|
||||
root directory.
|
||||
Destination files should be strings in the form of
|
||||
``"{build_lib}/destination/file/path"``.
|
||||
"""
|
|
@ -0,0 +1,99 @@
|
|||
import distutils.command.build_clib as orig
|
||||
from distutils.errors import DistutilsSetupError
|
||||
from distutils import log
|
||||
from setuptools.dep_util import newer_pairwise_group
|
||||
|
||||
|
||||
class build_clib(orig.build_clib):
|
||||
"""
|
||||
Override the default build_clib behaviour to do the following:
|
||||
|
||||
1. Implement a rudimentary timestamp-based dependency system
|
||||
so 'compile()' doesn't run every time.
|
||||
2. Add more keys to the 'build_info' dictionary:
|
||||
* obj_deps - specify dependencies for each object compiled.
|
||||
this should be a dictionary mapping a key
|
||||
with the source filename to a list of
|
||||
dependencies. Use an empty string for global
|
||||
dependencies.
|
||||
* cflags - specify a list of additional flags to pass to
|
||||
the compiler.
|
||||
"""
|
||||
|
||||
def build_libraries(self, libraries):
|
||||
for lib_name, build_info in libraries:
|
||||
sources = build_info.get('sources')
|
||||
if sources is None or not isinstance(sources, (list, tuple)):
|
||||
raise DistutilsSetupError(
|
||||
"in 'libraries' option (library '%s'), "
|
||||
"'sources' must be present and must be "
|
||||
"a list of source filenames" % lib_name
|
||||
)
|
||||
sources = sorted(list(sources))
|
||||
|
||||
log.info("building '%s' library", lib_name)
|
||||
|
||||
# Make sure everything is the correct type.
|
||||
# obj_deps should be a dictionary of keys as sources
|
||||
# and a list/tuple of files that are its dependencies.
|
||||
obj_deps = build_info.get('obj_deps', dict())
|
||||
if not isinstance(obj_deps, dict):
|
||||
raise DistutilsSetupError(
|
||||
"in 'libraries' option (library '%s'), "
|
||||
"'obj_deps' must be a dictionary of "
|
||||
"type 'source: list'" % lib_name
|
||||
)
|
||||
dependencies = []
|
||||
|
||||
# Get the global dependencies that are specified by the '' key.
|
||||
# These will go into every source's dependency list.
|
||||
global_deps = obj_deps.get('', list())
|
||||
if not isinstance(global_deps, (list, tuple)):
|
||||
raise DistutilsSetupError(
|
||||
"in 'libraries' option (library '%s'), "
|
||||
"'obj_deps' must be a dictionary of "
|
||||
"type 'source: list'" % lib_name
|
||||
)
|
||||
|
||||
# Build the list to be used by newer_pairwise_group
|
||||
# each source will be auto-added to its dependencies.
|
||||
for source in sources:
|
||||
src_deps = [source]
|
||||
src_deps.extend(global_deps)
|
||||
extra_deps = obj_deps.get(source, list())
|
||||
if not isinstance(extra_deps, (list, tuple)):
|
||||
raise DistutilsSetupError(
|
||||
"in 'libraries' option (library '%s'), "
|
||||
"'obj_deps' must be a dictionary of "
|
||||
"type 'source: list'" % lib_name
|
||||
)
|
||||
src_deps.extend(extra_deps)
|
||||
dependencies.append(src_deps)
|
||||
|
||||
expected_objects = self.compiler.object_filenames(
|
||||
sources,
|
||||
output_dir=self.build_temp,
|
||||
)
|
||||
|
||||
if newer_pairwise_group(dependencies, expected_objects) != ([], []):
|
||||
# First, compile the source code to object files in the library
|
||||
# directory. (This should probably change to putting object
|
||||
# files in a temporary build directory.)
|
||||
macros = build_info.get('macros')
|
||||
include_dirs = build_info.get('include_dirs')
|
||||
cflags = build_info.get('cflags')
|
||||
self.compiler.compile(
|
||||
sources,
|
||||
output_dir=self.build_temp,
|
||||
macros=macros,
|
||||
include_dirs=include_dirs,
|
||||
extra_postargs=cflags,
|
||||
debug=self.debug,
|
||||
)
|
||||
|
||||
# Now "link" the object files together into a static library.
|
||||
# (On Unix at least, this isn't really linking -- it just
|
||||
# builds an archive. Whatever.)
|
||||
self.compiler.create_static_lib(
|
||||
expected_objects, lib_name, output_dir=self.build_clib, debug=self.debug
|
||||
)
|
|
@ -0,0 +1,456 @@
|
|||
import os
|
||||
import sys
|
||||
import itertools
|
||||
from importlib.machinery import EXTENSION_SUFFIXES
|
||||
from importlib.util import cache_from_source as _compiled_file_name
|
||||
from typing import Dict, Iterator, List, Tuple
|
||||
from pathlib import Path
|
||||
|
||||
from distutils.command.build_ext import build_ext as _du_build_ext
|
||||
from distutils.ccompiler import new_compiler
|
||||
from distutils.sysconfig import customize_compiler, get_config_var
|
||||
from distutils import log
|
||||
|
||||
from setuptools.errors import BaseError
|
||||
from setuptools.extension import Extension, Library
|
||||
|
||||
try:
|
||||
# Attempt to use Cython for building extensions, if available
|
||||
from Cython.Distutils.build_ext import build_ext as _build_ext
|
||||
|
||||
# Additionally, assert that the compiler module will load
|
||||
# also. Ref #1229.
|
||||
__import__('Cython.Compiler.Main')
|
||||
except ImportError:
|
||||
_build_ext = _du_build_ext
|
||||
|
||||
# make sure _config_vars is initialized
|
||||
get_config_var("LDSHARED")
|
||||
from distutils.sysconfig import _config_vars as _CONFIG_VARS # noqa
|
||||
|
||||
|
||||
def _customize_compiler_for_shlib(compiler):
|
||||
if sys.platform == "darwin":
|
||||
# building .dylib requires additional compiler flags on OSX; here we
|
||||
# temporarily substitute the pyconfig.h variables so that distutils'
|
||||
# 'customize_compiler' uses them before we build the shared libraries.
|
||||
tmp = _CONFIG_VARS.copy()
|
||||
try:
|
||||
# XXX Help! I don't have any idea whether these are right...
|
||||
_CONFIG_VARS[
|
||||
'LDSHARED'
|
||||
] = "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup"
|
||||
_CONFIG_VARS['CCSHARED'] = " -dynamiclib"
|
||||
_CONFIG_VARS['SO'] = ".dylib"
|
||||
customize_compiler(compiler)
|
||||
finally:
|
||||
_CONFIG_VARS.clear()
|
||||
_CONFIG_VARS.update(tmp)
|
||||
else:
|
||||
customize_compiler(compiler)
|
||||
|
||||
|
||||
have_rtld = False
|
||||
use_stubs = False
|
||||
libtype = 'shared'
|
||||
|
||||
if sys.platform == "darwin":
|
||||
use_stubs = True
|
||||
elif os.name != 'nt':
|
||||
try:
|
||||
import dl
|
||||
|
||||
use_stubs = have_rtld = hasattr(dl, 'RTLD_NOW')
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
def if_dl(s):
|
||||
return s if have_rtld else ''
|
||||
|
||||
|
||||
def get_abi3_suffix():
|
||||
"""Return the file extension for an abi3-compliant Extension()"""
|
||||
for suffix in EXTENSION_SUFFIXES:
|
||||
if '.abi3' in suffix: # Unix
|
||||
return suffix
|
||||
elif suffix == '.pyd': # Windows
|
||||
return suffix
|
||||
|
||||
|
||||
class build_ext(_build_ext):
|
||||
editable_mode: bool = False
|
||||
inplace: bool = False
|
||||
|
||||
def run(self):
|
||||
"""Build extensions in build directory, then copy if --inplace"""
|
||||
old_inplace, self.inplace = self.inplace, 0
|
||||
_build_ext.run(self)
|
||||
self.inplace = old_inplace
|
||||
if old_inplace:
|
||||
self.copy_extensions_to_source()
|
||||
|
||||
def _get_inplace_equivalent(self, build_py, ext: Extension) -> Tuple[str, str]:
|
||||
fullname = self.get_ext_fullname(ext.name)
|
||||
filename = self.get_ext_filename(fullname)
|
||||
modpath = fullname.split('.')
|
||||
package = '.'.join(modpath[:-1])
|
||||
package_dir = build_py.get_package_dir(package)
|
||||
inplace_file = os.path.join(package_dir, os.path.basename(filename))
|
||||
regular_file = os.path.join(self.build_lib, filename)
|
||||
return (inplace_file, regular_file)
|
||||
|
||||
def copy_extensions_to_source(self):
|
||||
build_py = self.get_finalized_command('build_py')
|
||||
for ext in self.extensions:
|
||||
inplace_file, regular_file = self._get_inplace_equivalent(build_py, ext)
|
||||
|
||||
# Always copy, even if source is older than destination, to ensure
|
||||
# that the right extensions for the current Python/platform are
|
||||
# used.
|
||||
if os.path.exists(regular_file) or not ext.optional:
|
||||
self.copy_file(regular_file, inplace_file, level=self.verbose)
|
||||
|
||||
if ext._needs_stub:
|
||||
inplace_stub = self._get_equivalent_stub(ext, inplace_file)
|
||||
self._write_stub_file(inplace_stub, ext, compile=True)
|
||||
# Always compile stub and remove the original (leave the cache behind)
|
||||
# (this behaviour was observed in previous iterations of the code)
|
||||
|
||||
def _get_equivalent_stub(self, ext: Extension, output_file: str) -> str:
|
||||
dir_ = os.path.dirname(output_file)
|
||||
_, _, name = ext.name.rpartition(".")
|
||||
return f"{os.path.join(dir_, name)}.py"
|
||||
|
||||
def _get_output_mapping(self) -> Iterator[Tuple[str, str]]:
|
||||
if not self.inplace:
|
||||
return
|
||||
|
||||
build_py = self.get_finalized_command('build_py')
|
||||
opt = self.get_finalized_command('install_lib').optimize or ""
|
||||
|
||||
for ext in self.extensions:
|
||||
inplace_file, regular_file = self._get_inplace_equivalent(build_py, ext)
|
||||
yield (regular_file, inplace_file)
|
||||
|
||||
if ext._needs_stub:
|
||||
# This version of `build_ext` always builds artifacts in another dir,
|
||||
# when "inplace=True" is given it just copies them back.
|
||||
# This is done in the `copy_extensions_to_source` function, which
|
||||
# always compile stub files via `_compile_and_remove_stub`.
|
||||
# At the end of the process, a `.pyc` stub file is created without the
|
||||
# corresponding `.py`.
|
||||
|
||||
inplace_stub = self._get_equivalent_stub(ext, inplace_file)
|
||||
regular_stub = self._get_equivalent_stub(ext, regular_file)
|
||||
inplace_cache = _compiled_file_name(inplace_stub, optimization=opt)
|
||||
output_cache = _compiled_file_name(regular_stub, optimization=opt)
|
||||
yield (output_cache, inplace_cache)
|
||||
|
||||
def get_ext_filename(self, fullname):
|
||||
so_ext = os.getenv('SETUPTOOLS_EXT_SUFFIX')
|
||||
if so_ext:
|
||||
filename = os.path.join(*fullname.split('.')) + so_ext
|
||||
else:
|
||||
filename = _build_ext.get_ext_filename(self, fullname)
|
||||
so_ext = get_config_var('EXT_SUFFIX')
|
||||
|
||||
if fullname in self.ext_map:
|
||||
ext = self.ext_map[fullname]
|
||||
use_abi3 = getattr(ext, 'py_limited_api') and get_abi3_suffix()
|
||||
if use_abi3:
|
||||
filename = filename[: -len(so_ext)]
|
||||
so_ext = get_abi3_suffix()
|
||||
filename = filename + so_ext
|
||||
if isinstance(ext, Library):
|
||||
fn, ext = os.path.splitext(filename)
|
||||
return self.shlib_compiler.library_filename(fn, libtype)
|
||||
elif use_stubs and ext._links_to_dynamic:
|
||||
d, fn = os.path.split(filename)
|
||||
return os.path.join(d, 'dl-' + fn)
|
||||
return filename
|
||||
|
||||
def initialize_options(self):
|
||||
_build_ext.initialize_options(self)
|
||||
self.shlib_compiler = None
|
||||
self.shlibs = []
|
||||
self.ext_map = {}
|
||||
self.editable_mode = False
|
||||
|
||||
def finalize_options(self):
|
||||
_build_ext.finalize_options(self)
|
||||
self.extensions = self.extensions or []
|
||||
self.check_extensions_list(self.extensions)
|
||||
self.shlibs = [ext for ext in self.extensions if isinstance(ext, Library)]
|
||||
if self.shlibs:
|
||||
self.setup_shlib_compiler()
|
||||
for ext in self.extensions:
|
||||
ext._full_name = self.get_ext_fullname(ext.name)
|
||||
for ext in self.extensions:
|
||||
fullname = ext._full_name
|
||||
self.ext_map[fullname] = ext
|
||||
|
||||
# distutils 3.1 will also ask for module names
|
||||
# XXX what to do with conflicts?
|
||||
self.ext_map[fullname.split('.')[-1]] = ext
|
||||
|
||||
ltd = self.shlibs and self.links_to_dynamic(ext) or False
|
||||
ns = ltd and use_stubs and not isinstance(ext, Library)
|
||||
ext._links_to_dynamic = ltd
|
||||
ext._needs_stub = ns
|
||||
filename = ext._file_name = self.get_ext_filename(fullname)
|
||||
libdir = os.path.dirname(os.path.join(self.build_lib, filename))
|
||||
if ltd and libdir not in ext.library_dirs:
|
||||
ext.library_dirs.append(libdir)
|
||||
if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs:
|
||||
ext.runtime_library_dirs.append(os.curdir)
|
||||
|
||||
if self.editable_mode:
|
||||
self.inplace = True
|
||||
|
||||
def setup_shlib_compiler(self):
|
||||
compiler = self.shlib_compiler = new_compiler(
|
||||
compiler=self.compiler, dry_run=self.dry_run, force=self.force
|
||||
)
|
||||
_customize_compiler_for_shlib(compiler)
|
||||
|
||||
if self.include_dirs is not None:
|
||||
compiler.set_include_dirs(self.include_dirs)
|
||||
if self.define is not None:
|
||||
# 'define' option is a list of (name,value) tuples
|
||||
for name, value in self.define:
|
||||
compiler.define_macro(name, value)
|
||||
if self.undef is not None:
|
||||
for macro in self.undef:
|
||||
compiler.undefine_macro(macro)
|
||||
if self.libraries is not None:
|
||||
compiler.set_libraries(self.libraries)
|
||||
if self.library_dirs is not None:
|
||||
compiler.set_library_dirs(self.library_dirs)
|
||||
if self.rpath is not None:
|
||||
compiler.set_runtime_library_dirs(self.rpath)
|
||||
if self.link_objects is not None:
|
||||
compiler.set_link_objects(self.link_objects)
|
||||
|
||||
# hack so distutils' build_extension() builds a library instead
|
||||
compiler.link_shared_object = link_shared_object.__get__(compiler)
|
||||
|
||||
def get_export_symbols(self, ext):
|
||||
if isinstance(ext, Library):
|
||||
return ext.export_symbols
|
||||
return _build_ext.get_export_symbols(self, ext)
|
||||
|
||||
def build_extension(self, ext):
|
||||
ext._convert_pyx_sources_to_lang()
|
||||
_compiler = self.compiler
|
||||
try:
|
||||
if isinstance(ext, Library):
|
||||
self.compiler = self.shlib_compiler
|
||||
_build_ext.build_extension(self, ext)
|
||||
if ext._needs_stub:
|
||||
build_lib = self.get_finalized_command('build_py').build_lib
|
||||
self.write_stub(build_lib, ext)
|
||||
finally:
|
||||
self.compiler = _compiler
|
||||
|
||||
def links_to_dynamic(self, ext):
|
||||
"""Return true if 'ext' links to a dynamic lib in the same package"""
|
||||
# XXX this should check to ensure the lib is actually being built
|
||||
# XXX as dynamic, and not just using a locally-found version or a
|
||||
# XXX static-compiled version
|
||||
libnames = dict.fromkeys([lib._full_name for lib in self.shlibs])
|
||||
pkg = '.'.join(ext._full_name.split('.')[:-1] + [''])
|
||||
return any(pkg + libname in libnames for libname in ext.libraries)
|
||||
|
||||
def get_source_files(self) -> List[str]:
|
||||
return [*_build_ext.get_source_files(self), *self._get_internal_depends()]
|
||||
|
||||
def _get_internal_depends(self) -> Iterator[str]:
|
||||
"""Yield ``ext.depends`` that are contained by the project directory"""
|
||||
project_root = Path(self.distribution.src_root or os.curdir).resolve()
|
||||
depends = (dep for ext in self.extensions for dep in ext.depends)
|
||||
|
||||
def skip(orig_path: str, reason: str) -> None:
|
||||
log.info(
|
||||
"dependency %s won't be automatically "
|
||||
"included in the manifest: the path %s",
|
||||
orig_path,
|
||||
reason,
|
||||
)
|
||||
|
||||
for dep in depends:
|
||||
path = Path(dep)
|
||||
|
||||
if path.is_absolute():
|
||||
skip(dep, "must be relative")
|
||||
continue
|
||||
|
||||
if ".." in path.parts:
|
||||
skip(dep, "can't have `..` segments")
|
||||
continue
|
||||
|
||||
try:
|
||||
resolved = (project_root / path).resolve(strict=True)
|
||||
except OSError:
|
||||
skip(dep, "doesn't exist")
|
||||
continue
|
||||
|
||||
try:
|
||||
resolved.relative_to(project_root)
|
||||
except ValueError:
|
||||
skip(dep, "must be inside the project root")
|
||||
continue
|
||||
|
||||
yield path.as_posix()
|
||||
|
||||
def get_outputs(self) -> List[str]:
|
||||
if self.inplace:
|
||||
return list(self.get_output_mapping().keys())
|
||||
return sorted(_build_ext.get_outputs(self) + self.__get_stubs_outputs())
|
||||
|
||||
def get_output_mapping(self) -> Dict[str, str]:
|
||||
"""See :class:`setuptools.commands.build.SubCommand`"""
|
||||
mapping = self._get_output_mapping()
|
||||
return dict(sorted(mapping, key=lambda x: x[0]))
|
||||
|
||||
def __get_stubs_outputs(self):
|
||||
# assemble the base name for each extension that needs a stub
|
||||
ns_ext_bases = (
|
||||
os.path.join(self.build_lib, *ext._full_name.split('.'))
|
||||
for ext in self.extensions
|
||||
if ext._needs_stub
|
||||
)
|
||||
# pair each base with the extension
|
||||
pairs = itertools.product(ns_ext_bases, self.__get_output_extensions())
|
||||
return list(base + fnext for base, fnext in pairs)
|
||||
|
||||
def __get_output_extensions(self):
|
||||
yield '.py'
|
||||
yield '.pyc'
|
||||
if self.get_finalized_command('build_py').optimize:
|
||||
yield '.pyo'
|
||||
|
||||
def write_stub(self, output_dir, ext, compile=False):
|
||||
stub_file = os.path.join(output_dir, *ext._full_name.split('.')) + '.py'
|
||||
self._write_stub_file(stub_file, ext, compile)
|
||||
|
||||
def _write_stub_file(self, stub_file: str, ext: Extension, compile=False):
|
||||
log.info("writing stub loader for %s to %s", ext._full_name, stub_file)
|
||||
if compile and os.path.exists(stub_file):
|
||||
raise BaseError(stub_file + " already exists! Please delete.")
|
||||
if not self.dry_run:
|
||||
f = open(stub_file, 'w')
|
||||
f.write(
|
||||
'\n'.join(
|
||||
[
|
||||
"def __bootstrap__():",
|
||||
" global __bootstrap__, __file__, __loader__",
|
||||
" import sys, os, pkg_resources, importlib.util"
|
||||
+ if_dl(", dl"),
|
||||
" __file__ = pkg_resources.resource_filename"
|
||||
"(__name__,%r)" % os.path.basename(ext._file_name),
|
||||
" del __bootstrap__",
|
||||
" if '__loader__' in globals():",
|
||||
" del __loader__",
|
||||
if_dl(" old_flags = sys.getdlopenflags()"),
|
||||
" old_dir = os.getcwd()",
|
||||
" try:",
|
||||
" os.chdir(os.path.dirname(__file__))",
|
||||
if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"),
|
||||
" spec = importlib.util.spec_from_file_location(",
|
||||
" __name__, __file__)",
|
||||
" mod = importlib.util.module_from_spec(spec)",
|
||||
" spec.loader.exec_module(mod)",
|
||||
" finally:",
|
||||
if_dl(" sys.setdlopenflags(old_flags)"),
|
||||
" os.chdir(old_dir)",
|
||||
"__bootstrap__()",
|
||||
"", # terminal \n
|
||||
]
|
||||
)
|
||||
)
|
||||
f.close()
|
||||
if compile:
|
||||
self._compile_and_remove_stub(stub_file)
|
||||
|
||||
def _compile_and_remove_stub(self, stub_file: str):
|
||||
from distutils.util import byte_compile
|
||||
|
||||
byte_compile([stub_file], optimize=0, force=True, dry_run=self.dry_run)
|
||||
optimize = self.get_finalized_command('install_lib').optimize
|
||||
if optimize > 0:
|
||||
byte_compile(
|
||||
[stub_file], optimize=optimize, force=True, dry_run=self.dry_run
|
||||
)
|
||||
if os.path.exists(stub_file) and not self.dry_run:
|
||||
os.unlink(stub_file)
|
||||
|
||||
|
||||
if use_stubs or os.name == 'nt':
|
||||
# Build shared libraries
|
||||
#
|
||||
def link_shared_object(
|
||||
self,
|
||||
objects,
|
||||
output_libname,
|
||||
output_dir=None,
|
||||
libraries=None,
|
||||
library_dirs=None,
|
||||
runtime_library_dirs=None,
|
||||
export_symbols=None,
|
||||
debug=0,
|
||||
extra_preargs=None,
|
||||
extra_postargs=None,
|
||||
build_temp=None,
|
||||
target_lang=None,
|
||||
):
|
||||
self.link(
|
||||
self.SHARED_LIBRARY,
|
||||
objects,
|
||||
output_libname,
|
||||
output_dir,
|
||||
libraries,
|
||||
library_dirs,
|
||||
runtime_library_dirs,
|
||||
export_symbols,
|
||||
debug,
|
||||
extra_preargs,
|
||||
extra_postargs,
|
||||
build_temp,
|
||||
target_lang,
|
||||
)
|
||||
|
||||
else:
|
||||
# Build static libraries everywhere else
|
||||
libtype = 'static'
|
||||
|
||||
def link_shared_object(
|
||||
self,
|
||||
objects,
|
||||
output_libname,
|
||||
output_dir=None,
|
||||
libraries=None,
|
||||
library_dirs=None,
|
||||
runtime_library_dirs=None,
|
||||
export_symbols=None,
|
||||
debug=0,
|
||||
extra_preargs=None,
|
||||
extra_postargs=None,
|
||||
build_temp=None,
|
||||
target_lang=None,
|
||||
):
|
||||
# XXX we need to either disallow these attrs on Library instances,
|
||||
# or warn/abort here if set, or something...
|
||||
# libraries=None, library_dirs=None, runtime_library_dirs=None,
|
||||
# export_symbols=None, extra_preargs=None, extra_postargs=None,
|
||||
# build_temp=None
|
||||
|
||||
assert output_dir is None # distutils build_ext doesn't pass this
|
||||
output_dir, filename = os.path.split(output_libname)
|
||||
basename, ext = os.path.splitext(filename)
|
||||
if self.library_filename("x").startswith('lib'):
|
||||
# strip 'lib' prefix; this is kludgy if some platform uses
|
||||
# a different prefix
|
||||
basename = basename[3:]
|
||||
|
||||
self.create_static_lib(objects, basename, output_dir, debug, target_lang)
|
|
@ -0,0 +1,389 @@
|
|||
from functools import partial
|
||||
from glob import glob
|
||||
from distutils.util import convert_path
|
||||
import distutils.command.build_py as orig
|
||||
import os
|
||||
import fnmatch
|
||||
import textwrap
|
||||
import io
|
||||
import distutils.errors
|
||||
import itertools
|
||||
import stat
|
||||
from pathlib import Path
|
||||
from typing import Dict, Iterable, Iterator, List, Optional, Tuple
|
||||
|
||||
from ..extern.more_itertools import unique_everseen
|
||||
from ..warnings import SetuptoolsDeprecationWarning
|
||||
|
||||
|
||||
def make_writable(target):
|
||||
os.chmod(target, os.stat(target).st_mode | stat.S_IWRITE)
|
||||
|
||||
|
||||
class build_py(orig.build_py):
|
||||
"""Enhanced 'build_py' command that includes data files with packages
|
||||
|
||||
The data files are specified via a 'package_data' argument to 'setup()'.
|
||||
See 'setuptools.dist.Distribution' for more details.
|
||||
|
||||
Also, this version of the 'build_py' command allows you to specify both
|
||||
'py_modules' and 'packages' in the same setup operation.
|
||||
"""
|
||||
|
||||
editable_mode: bool = False
|
||||
existing_egg_info_dir: Optional[str] = None #: Private API, internal use only.
|
||||
|
||||
def finalize_options(self):
|
||||
orig.build_py.finalize_options(self)
|
||||
self.package_data = self.distribution.package_data
|
||||
self.exclude_package_data = self.distribution.exclude_package_data or {}
|
||||
if 'data_files' in self.__dict__:
|
||||
del self.__dict__['data_files']
|
||||
self.__updated_files = []
|
||||
|
||||
def copy_file(
|
||||
self, infile, outfile, preserve_mode=1, preserve_times=1, link=None, level=1
|
||||
):
|
||||
# Overwrite base class to allow using links
|
||||
if link:
|
||||
infile = str(Path(infile).resolve())
|
||||
outfile = str(Path(outfile).resolve())
|
||||
return super().copy_file(
|
||||
infile, outfile, preserve_mode, preserve_times, link, level
|
||||
)
|
||||
|
||||
def run(self):
|
||||
"""Build modules, packages, and copy data files to build directory"""
|
||||
if not (self.py_modules or self.packages) or self.editable_mode:
|
||||
return
|
||||
|
||||
if self.py_modules:
|
||||
self.build_modules()
|
||||
|
||||
if self.packages:
|
||||
self.build_packages()
|
||||
self.build_package_data()
|
||||
|
||||
# Only compile actual .py files, using our base class' idea of what our
|
||||
# output files are.
|
||||
self.byte_compile(orig.build_py.get_outputs(self, include_bytecode=0))
|
||||
|
||||
def __getattr__(self, attr):
|
||||
"lazily compute data files"
|
||||
if attr == 'data_files':
|
||||
self.data_files = self._get_data_files()
|
||||
return self.data_files
|
||||
return orig.build_py.__getattr__(self, attr)
|
||||
|
||||
def build_module(self, module, module_file, package):
|
||||
outfile, copied = orig.build_py.build_module(self, module, module_file, package)
|
||||
if copied:
|
||||
self.__updated_files.append(outfile)
|
||||
return outfile, copied
|
||||
|
||||
def _get_data_files(self):
|
||||
"""Generate list of '(package,src_dir,build_dir,filenames)' tuples"""
|
||||
self.analyze_manifest()
|
||||
return list(map(self._get_pkg_data_files, self.packages or ()))
|
||||
|
||||
def get_data_files_without_manifest(self):
|
||||
"""
|
||||
Generate list of ``(package,src_dir,build_dir,filenames)`` tuples,
|
||||
but without triggering any attempt to analyze or build the manifest.
|
||||
"""
|
||||
# Prevent eventual errors from unset `manifest_files`
|
||||
# (that would otherwise be set by `analyze_manifest`)
|
||||
self.__dict__.setdefault('manifest_files', {})
|
||||
return list(map(self._get_pkg_data_files, self.packages or ()))
|
||||
|
||||
def _get_pkg_data_files(self, package):
|
||||
# Locate package source directory
|
||||
src_dir = self.get_package_dir(package)
|
||||
|
||||
# Compute package build directory
|
||||
build_dir = os.path.join(*([self.build_lib] + package.split('.')))
|
||||
|
||||
# Strip directory from globbed filenames
|
||||
filenames = [
|
||||
os.path.relpath(file, src_dir)
|
||||
for file in self.find_data_files(package, src_dir)
|
||||
]
|
||||
return package, src_dir, build_dir, filenames
|
||||
|
||||
def find_data_files(self, package, src_dir):
|
||||
"""Return filenames for package's data files in 'src_dir'"""
|
||||
patterns = self._get_platform_patterns(
|
||||
self.package_data,
|
||||
package,
|
||||
src_dir,
|
||||
)
|
||||
globs_expanded = map(partial(glob, recursive=True), patterns)
|
||||
# flatten the expanded globs into an iterable of matches
|
||||
globs_matches = itertools.chain.from_iterable(globs_expanded)
|
||||
glob_files = filter(os.path.isfile, globs_matches)
|
||||
files = itertools.chain(
|
||||
self.manifest_files.get(package, []),
|
||||
glob_files,
|
||||
)
|
||||
return self.exclude_data_files(package, src_dir, files)
|
||||
|
||||
def get_outputs(self, include_bytecode=1) -> List[str]:
|
||||
"""See :class:`setuptools.commands.build.SubCommand`"""
|
||||
if self.editable_mode:
|
||||
return list(self.get_output_mapping().keys())
|
||||
return super().get_outputs(include_bytecode)
|
||||
|
||||
def get_output_mapping(self) -> Dict[str, str]:
|
||||
"""See :class:`setuptools.commands.build.SubCommand`"""
|
||||
mapping = itertools.chain(
|
||||
self._get_package_data_output_mapping(),
|
||||
self._get_module_mapping(),
|
||||
)
|
||||
return dict(sorted(mapping, key=lambda x: x[0]))
|
||||
|
||||
def _get_module_mapping(self) -> Iterator[Tuple[str, str]]:
|
||||
"""Iterate over all modules producing (dest, src) pairs."""
|
||||
for package, module, module_file in self.find_all_modules():
|
||||
package = package.split('.')
|
||||
filename = self.get_module_outfile(self.build_lib, package, module)
|
||||
yield (filename, module_file)
|
||||
|
||||
def _get_package_data_output_mapping(self) -> Iterator[Tuple[str, str]]:
|
||||
"""Iterate over package data producing (dest, src) pairs."""
|
||||
for package, src_dir, build_dir, filenames in self.data_files:
|
||||
for filename in filenames:
|
||||
target = os.path.join(build_dir, filename)
|
||||
srcfile = os.path.join(src_dir, filename)
|
||||
yield (target, srcfile)
|
||||
|
||||
def build_package_data(self):
|
||||
"""Copy data files into build directory"""
|
||||
for target, srcfile in self._get_package_data_output_mapping():
|
||||
self.mkpath(os.path.dirname(target))
|
||||
_outf, _copied = self.copy_file(srcfile, target)
|
||||
make_writable(target)
|
||||
|
||||
def analyze_manifest(self):
|
||||
self.manifest_files = mf = {}
|
||||
if not self.distribution.include_package_data:
|
||||
return
|
||||
src_dirs = {}
|
||||
for package in self.packages or ():
|
||||
# Locate package source directory
|
||||
src_dirs[assert_relative(self.get_package_dir(package))] = package
|
||||
|
||||
if (
|
||||
getattr(self, 'existing_egg_info_dir', None)
|
||||
and Path(self.existing_egg_info_dir, "SOURCES.txt").exists()
|
||||
):
|
||||
egg_info_dir = self.existing_egg_info_dir
|
||||
manifest = Path(egg_info_dir, "SOURCES.txt")
|
||||
files = manifest.read_text(encoding="utf-8").splitlines()
|
||||
else:
|
||||
self.run_command('egg_info')
|
||||
ei_cmd = self.get_finalized_command('egg_info')
|
||||
egg_info_dir = ei_cmd.egg_info
|
||||
files = ei_cmd.filelist.files
|
||||
|
||||
check = _IncludePackageDataAbuse()
|
||||
for path in self._filter_build_files(files, egg_info_dir):
|
||||
d, f = os.path.split(assert_relative(path))
|
||||
prev = None
|
||||
oldf = f
|
||||
while d and d != prev and d not in src_dirs:
|
||||
prev = d
|
||||
d, df = os.path.split(d)
|
||||
f = os.path.join(df, f)
|
||||
if d in src_dirs:
|
||||
if f == oldf:
|
||||
if check.is_module(f):
|
||||
continue # it's a module, not data
|
||||
else:
|
||||
importable = check.importable_subpackage(src_dirs[d], f)
|
||||
if importable:
|
||||
check.warn(importable)
|
||||
mf.setdefault(src_dirs[d], []).append(path)
|
||||
|
||||
def _filter_build_files(self, files: Iterable[str], egg_info: str) -> Iterator[str]:
|
||||
"""
|
||||
``build_meta`` may try to create egg_info outside of the project directory,
|
||||
and this can be problematic for certain plugins (reported in issue #3500).
|
||||
|
||||
Extensions might also include between their sources files created on the
|
||||
``build_lib`` and ``build_temp`` directories.
|
||||
|
||||
This function should filter this case of invalid files out.
|
||||
"""
|
||||
build = self.get_finalized_command("build")
|
||||
build_dirs = (egg_info, self.build_lib, build.build_temp, build.build_base)
|
||||
norm_dirs = [os.path.normpath(p) for p in build_dirs if p]
|
||||
|
||||
for file in files:
|
||||
norm_path = os.path.normpath(file)
|
||||
if not os.path.isabs(file) or all(d not in norm_path for d in norm_dirs):
|
||||
yield file
|
||||
|
||||
def get_data_files(self):
|
||||
pass # Lazily compute data files in _get_data_files() function.
|
||||
|
||||
def check_package(self, package, package_dir):
|
||||
"""Check namespace packages' __init__ for declare_namespace"""
|
||||
try:
|
||||
return self.packages_checked[package]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
init_py = orig.build_py.check_package(self, package, package_dir)
|
||||
self.packages_checked[package] = init_py
|
||||
|
||||
if not init_py or not self.distribution.namespace_packages:
|
||||
return init_py
|
||||
|
||||
for pkg in self.distribution.namespace_packages:
|
||||
if pkg == package or pkg.startswith(package + '.'):
|
||||
break
|
||||
else:
|
||||
return init_py
|
||||
|
||||
with io.open(init_py, 'rb') as f:
|
||||
contents = f.read()
|
||||
if b'declare_namespace' not in contents:
|
||||
raise distutils.errors.DistutilsError(
|
||||
"Namespace package problem: %s is a namespace package, but "
|
||||
"its\n__init__.py does not call declare_namespace()! Please "
|
||||
'fix it.\n(See the setuptools manual under '
|
||||
'"Namespace Packages" for details.)\n"' % (package,)
|
||||
)
|
||||
return init_py
|
||||
|
||||
def initialize_options(self):
|
||||
self.packages_checked = {}
|
||||
orig.build_py.initialize_options(self)
|
||||
self.editable_mode = False
|
||||
self.existing_egg_info_dir = None
|
||||
|
||||
def get_package_dir(self, package):
|
||||
res = orig.build_py.get_package_dir(self, package)
|
||||
if self.distribution.src_root is not None:
|
||||
return os.path.join(self.distribution.src_root, res)
|
||||
return res
|
||||
|
||||
def exclude_data_files(self, package, src_dir, files):
|
||||
"""Filter filenames for package's data files in 'src_dir'"""
|
||||
files = list(files)
|
||||
patterns = self._get_platform_patterns(
|
||||
self.exclude_package_data,
|
||||
package,
|
||||
src_dir,
|
||||
)
|
||||
match_groups = (fnmatch.filter(files, pattern) for pattern in patterns)
|
||||
# flatten the groups of matches into an iterable of matches
|
||||
matches = itertools.chain.from_iterable(match_groups)
|
||||
bad = set(matches)
|
||||
keepers = (fn for fn in files if fn not in bad)
|
||||
# ditch dupes
|
||||
return list(unique_everseen(keepers))
|
||||
|
||||
@staticmethod
|
||||
def _get_platform_patterns(spec, package, src_dir):
|
||||
"""
|
||||
yield platform-specific path patterns (suitable for glob
|
||||
or fn_match) from a glob-based spec (such as
|
||||
self.package_data or self.exclude_package_data)
|
||||
matching package in src_dir.
|
||||
"""
|
||||
raw_patterns = itertools.chain(
|
||||
spec.get('', []),
|
||||
spec.get(package, []),
|
||||
)
|
||||
return (
|
||||
# Each pattern has to be converted to a platform-specific path
|
||||
os.path.join(src_dir, convert_path(pattern))
|
||||
for pattern in raw_patterns
|
||||
)
|
||||
|
||||
|
||||
def assert_relative(path):
|
||||
if not os.path.isabs(path):
|
||||
return path
|
||||
from distutils.errors import DistutilsSetupError
|
||||
|
||||
msg = (
|
||||
textwrap.dedent(
|
||||
"""
|
||||
Error: setup script specifies an absolute path:
|
||||
|
||||
%s
|
||||
|
||||
setup() arguments must *always* be /-separated paths relative to the
|
||||
setup.py directory, *never* absolute paths.
|
||||
"""
|
||||
).lstrip()
|
||||
% path
|
||||
)
|
||||
raise DistutilsSetupError(msg)
|
||||
|
||||
|
||||
class _IncludePackageDataAbuse:
|
||||
"""Inform users that package or module is included as 'data file'"""
|
||||
|
||||
class _Warning(SetuptoolsDeprecationWarning):
|
||||
_SUMMARY = """
|
||||
Package {importable!r} is absent from the `packages` configuration.
|
||||
"""
|
||||
|
||||
_DETAILS = """
|
||||
############################
|
||||
# Package would be ignored #
|
||||
############################
|
||||
Python recognizes {importable!r} as an importable package[^1],
|
||||
but it is absent from setuptools' `packages` configuration.
|
||||
|
||||
This leads to an ambiguous overall configuration. If you want to distribute this
|
||||
package, please make sure that {importable!r} is explicitly added
|
||||
to the `packages` configuration field.
|
||||
|
||||
Alternatively, you can also rely on setuptools' discovery methods
|
||||
(for example by using `find_namespace_packages(...)`/`find_namespace:`
|
||||
instead of `find_packages(...)`/`find:`).
|
||||
|
||||
You can read more about "package discovery" on setuptools documentation page:
|
||||
|
||||
- https://setuptools.pypa.io/en/latest/userguide/package_discovery.html
|
||||
|
||||
If you don't want {importable!r} to be distributed and are
|
||||
already explicitly excluding {importable!r} via
|
||||
`find_namespace_packages(...)/find_namespace` or `find_packages(...)/find`,
|
||||
you can try to use `exclude_package_data`, or `include-package-data=False` in
|
||||
combination with a more fine grained `package-data` configuration.
|
||||
|
||||
You can read more about "package data files" on setuptools documentation page:
|
||||
|
||||
- https://setuptools.pypa.io/en/latest/userguide/datafiles.html
|
||||
|
||||
|
||||
[^1]: For Python, any directory (with suitable naming) can be imported,
|
||||
even if it does not contain any `.py` files.
|
||||
On the other hand, currently there is no concept of package data
|
||||
directory, all directories are treated like packages.
|
||||
"""
|
||||
# _DUE_DATE: still not defined as this is particularly controversial.
|
||||
# Warning initially introduced in May 2022. See issue #3340 for discussion.
|
||||
|
||||
def __init__(self):
|
||||
self._already_warned = set()
|
||||
|
||||
def is_module(self, file):
|
||||
return file.endswith(".py") and file[: -len(".py")].isidentifier()
|
||||
|
||||
def importable_subpackage(self, parent, file):
|
||||
pkg = Path(file).parent
|
||||
parts = list(itertools.takewhile(str.isidentifier, pkg.parts))
|
||||
if parts:
|
||||
return ".".join([parent, *parts])
|
||||
return None
|
||||
|
||||
def warn(self, importable):
|
||||
if importable not in self._already_warned:
|
||||
self._Warning.emit(importable=importable)
|
||||
self._already_warned.add(importable)
|
|
@ -0,0 +1,188 @@
|
|||
from distutils.util import convert_path
|
||||
from distutils import log
|
||||
from distutils.errors import DistutilsOptionError
|
||||
import os
|
||||
import glob
|
||||
import io
|
||||
|
||||
from setuptools.command.easy_install import easy_install
|
||||
from setuptools import _path
|
||||
from setuptools import namespaces
|
||||
import setuptools
|
||||
|
||||
|
||||
class develop(namespaces.DevelopInstaller, easy_install):
|
||||
"""Set up package for development"""
|
||||
|
||||
description = "install package in 'development mode'"
|
||||
|
||||
user_options = easy_install.user_options + [
|
||||
("uninstall", "u", "Uninstall this source package"),
|
||||
("egg-path=", None, "Set the path to be used in the .egg-link file"),
|
||||
]
|
||||
|
||||
boolean_options = easy_install.boolean_options + ['uninstall']
|
||||
|
||||
command_consumes_arguments = False # override base
|
||||
|
||||
def run(self):
|
||||
if self.uninstall:
|
||||
self.multi_version = True
|
||||
self.uninstall_link()
|
||||
self.uninstall_namespaces()
|
||||
else:
|
||||
self.install_for_development()
|
||||
self.warn_deprecated_options()
|
||||
|
||||
def initialize_options(self):
|
||||
self.uninstall = None
|
||||
self.egg_path = None
|
||||
easy_install.initialize_options(self)
|
||||
self.setup_path = None
|
||||
self.always_copy_from = '.' # always copy eggs installed in curdir
|
||||
|
||||
def finalize_options(self):
|
||||
import pkg_resources
|
||||
|
||||
ei = self.get_finalized_command("egg_info")
|
||||
self.args = [ei.egg_name]
|
||||
|
||||
easy_install.finalize_options(self)
|
||||
self.expand_basedirs()
|
||||
self.expand_dirs()
|
||||
# pick up setup-dir .egg files only: no .egg-info
|
||||
self.package_index.scan(glob.glob('*.egg'))
|
||||
|
||||
egg_link_fn = ei.egg_name + '.egg-link'
|
||||
self.egg_link = os.path.join(self.install_dir, egg_link_fn)
|
||||
self.egg_base = ei.egg_base
|
||||
if self.egg_path is None:
|
||||
self.egg_path = os.path.abspath(ei.egg_base)
|
||||
|
||||
target = _path.normpath(self.egg_base)
|
||||
egg_path = _path.normpath(os.path.join(self.install_dir, self.egg_path))
|
||||
if egg_path != target:
|
||||
raise DistutilsOptionError(
|
||||
"--egg-path must be a relative path from the install"
|
||||
" directory to " + target
|
||||
)
|
||||
|
||||
# Make a distribution for the package's source
|
||||
self.dist = pkg_resources.Distribution(
|
||||
target,
|
||||
pkg_resources.PathMetadata(target, os.path.abspath(ei.egg_info)),
|
||||
project_name=ei.egg_name,
|
||||
)
|
||||
|
||||
self.setup_path = self._resolve_setup_path(
|
||||
self.egg_base,
|
||||
self.install_dir,
|
||||
self.egg_path,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _resolve_setup_path(egg_base, install_dir, egg_path):
|
||||
"""
|
||||
Generate a path from egg_base back to '.' where the
|
||||
setup script resides and ensure that path points to the
|
||||
setup path from $install_dir/$egg_path.
|
||||
"""
|
||||
path_to_setup = egg_base.replace(os.sep, '/').rstrip('/')
|
||||
if path_to_setup != os.curdir:
|
||||
path_to_setup = '../' * (path_to_setup.count('/') + 1)
|
||||
resolved = _path.normpath(os.path.join(install_dir, egg_path, path_to_setup))
|
||||
curdir = _path.normpath(os.curdir)
|
||||
if resolved != curdir:
|
||||
raise DistutilsOptionError(
|
||||
"Can't get a consistent path to setup script from"
|
||||
" installation directory",
|
||||
resolved,
|
||||
curdir,
|
||||
)
|
||||
return path_to_setup
|
||||
|
||||
def install_for_development(self):
|
||||
self.run_command('egg_info')
|
||||
|
||||
# Build extensions in-place
|
||||
self.reinitialize_command('build_ext', inplace=1)
|
||||
self.run_command('build_ext')
|
||||
|
||||
if setuptools.bootstrap_install_from:
|
||||
self.easy_install(setuptools.bootstrap_install_from)
|
||||
setuptools.bootstrap_install_from = None
|
||||
|
||||
self.install_namespaces()
|
||||
|
||||
# create an .egg-link in the installation dir, pointing to our egg
|
||||
log.info("Creating %s (link to %s)", self.egg_link, self.egg_base)
|
||||
if not self.dry_run:
|
||||
with open(self.egg_link, "w") as f:
|
||||
f.write(self.egg_path + "\n" + self.setup_path)
|
||||
# postprocess the installed distro, fixing up .pth, installing scripts,
|
||||
# and handling requirements
|
||||
self.process_distribution(None, self.dist, not self.no_deps)
|
||||
|
||||
def uninstall_link(self):
|
||||
if os.path.exists(self.egg_link):
|
||||
log.info("Removing %s (link to %s)", self.egg_link, self.egg_base)
|
||||
egg_link_file = open(self.egg_link)
|
||||
contents = [line.rstrip() for line in egg_link_file]
|
||||
egg_link_file.close()
|
||||
if contents not in ([self.egg_path], [self.egg_path, self.setup_path]):
|
||||
log.warn("Link points to %s: uninstall aborted", contents)
|
||||
return
|
||||
if not self.dry_run:
|
||||
os.unlink(self.egg_link)
|
||||
if not self.dry_run:
|
||||
self.update_pth(self.dist) # remove any .pth link to us
|
||||
if self.distribution.scripts:
|
||||
# XXX should also check for entry point scripts!
|
||||
log.warn("Note: you must uninstall or replace scripts manually!")
|
||||
|
||||
def install_egg_scripts(self, dist):
|
||||
if dist is not self.dist:
|
||||
# Installing a dependency, so fall back to normal behavior
|
||||
return easy_install.install_egg_scripts(self, dist)
|
||||
|
||||
# create wrapper scripts in the script dir, pointing to dist.scripts
|
||||
|
||||
# new-style...
|
||||
self.install_wrapper_scripts(dist)
|
||||
|
||||
# ...and old-style
|
||||
for script_name in self.distribution.scripts or []:
|
||||
script_path = os.path.abspath(convert_path(script_name))
|
||||
script_name = os.path.basename(script_path)
|
||||
with io.open(script_path) as strm:
|
||||
script_text = strm.read()
|
||||
self.install_script(dist, script_name, script_text, script_path)
|
||||
|
||||
def install_wrapper_scripts(self, dist):
|
||||
dist = VersionlessRequirement(dist)
|
||||
return easy_install.install_wrapper_scripts(self, dist)
|
||||
|
||||
|
||||
class VersionlessRequirement:
|
||||
"""
|
||||
Adapt a pkg_resources.Distribution to simply return the project
|
||||
name as the 'requirement' so that scripts will work across
|
||||
multiple versions.
|
||||
|
||||
>>> from pkg_resources import Distribution
|
||||
>>> dist = Distribution(project_name='foo', version='1.0')
|
||||
>>> str(dist.as_requirement())
|
||||
'foo==1.0'
|
||||
>>> adapted_dist = VersionlessRequirement(dist)
|
||||
>>> str(adapted_dist.as_requirement())
|
||||
'foo'
|
||||
"""
|
||||
|
||||
def __init__(self, dist):
|
||||
self.__dist = dist
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self.__dist, name)
|
||||
|
||||
def as_requirement(self):
|
||||
return self.project_name
|
|
@ -0,0 +1,127 @@
|
|||
"""
|
||||
Create a dist_info directory
|
||||
As defined in the wheel specification
|
||||
"""
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
from distutils import log
|
||||
from distutils.core import Command
|
||||
from pathlib import Path
|
||||
|
||||
from .. import _normalization
|
||||
from ..warnings import SetuptoolsDeprecationWarning
|
||||
|
||||
|
||||
class dist_info(Command):
|
||||
"""
|
||||
This command is private and reserved for internal use of setuptools,
|
||||
users should rely on ``setuptools.build_meta`` APIs.
|
||||
"""
|
||||
|
||||
description = "DO NOT CALL DIRECTLY, INTERNAL ONLY: create .dist-info directory"
|
||||
|
||||
user_options = [
|
||||
(
|
||||
'egg-base=',
|
||||
'e',
|
||||
"directory containing .egg-info directories"
|
||||
" (default: top of the source tree)"
|
||||
" DEPRECATED: use --output-dir.",
|
||||
),
|
||||
(
|
||||
'output-dir=',
|
||||
'o',
|
||||
"directory inside of which the .dist-info will be"
|
||||
"created (default: top of the source tree)",
|
||||
),
|
||||
('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"),
|
||||
('tag-build=', 'b', "Specify explicit tag to add to version number"),
|
||||
('no-date', 'D', "Don't include date stamp [default]"),
|
||||
('keep-egg-info', None, "*TRANSITIONAL* will be removed in the future"),
|
||||
]
|
||||
|
||||
boolean_options = ['tag-date', 'keep-egg-info']
|
||||
negative_opt = {'no-date': 'tag-date'}
|
||||
|
||||
def initialize_options(self):
|
||||
self.egg_base = None
|
||||
self.output_dir = None
|
||||
self.name = None
|
||||
self.dist_info_dir = None
|
||||
self.tag_date = None
|
||||
self.tag_build = None
|
||||
self.keep_egg_info = False
|
||||
|
||||
def finalize_options(self):
|
||||
if self.egg_base:
|
||||
msg = "--egg-base is deprecated for dist_info command. Use --output-dir."
|
||||
SetuptoolsDeprecationWarning.emit(msg, due_date=(2023, 9, 26))
|
||||
# This command is internal to setuptools, therefore it should be safe
|
||||
# to remove the deprecated support soon.
|
||||
self.output_dir = self.egg_base or self.output_dir
|
||||
|
||||
dist = self.distribution
|
||||
project_dir = dist.src_root or os.curdir
|
||||
self.output_dir = Path(self.output_dir or project_dir)
|
||||
|
||||
egg_info = self.reinitialize_command("egg_info")
|
||||
egg_info.egg_base = str(self.output_dir)
|
||||
|
||||
if self.tag_date:
|
||||
egg_info.tag_date = self.tag_date
|
||||
else:
|
||||
self.tag_date = egg_info.tag_date
|
||||
|
||||
if self.tag_build:
|
||||
egg_info.tag_build = self.tag_build
|
||||
else:
|
||||
self.tag_build = egg_info.tag_build
|
||||
|
||||
egg_info.finalize_options()
|
||||
self.egg_info = egg_info
|
||||
|
||||
name = _normalization.safer_name(dist.get_name())
|
||||
version = _normalization.safer_best_effort_version(dist.get_version())
|
||||
self.name = f"{name}-{version}"
|
||||
self.dist_info_dir = os.path.join(self.output_dir, f"{self.name}.dist-info")
|
||||
|
||||
@contextmanager
|
||||
def _maybe_bkp_dir(self, dir_path: str, requires_bkp: bool):
|
||||
if requires_bkp:
|
||||
bkp_name = f"{dir_path}.__bkp__"
|
||||
_rm(bkp_name, ignore_errors=True)
|
||||
_copy(dir_path, bkp_name, dirs_exist_ok=True, symlinks=True)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
_rm(dir_path, ignore_errors=True)
|
||||
shutil.move(bkp_name, dir_path)
|
||||
else:
|
||||
yield
|
||||
|
||||
def run(self):
|
||||
self.output_dir.mkdir(parents=True, exist_ok=True)
|
||||
self.egg_info.run()
|
||||
egg_info_dir = self.egg_info.egg_info
|
||||
assert os.path.isdir(egg_info_dir), ".egg-info dir should have been created"
|
||||
|
||||
log.info("creating '{}'".format(os.path.abspath(self.dist_info_dir)))
|
||||
bdist_wheel = self.get_finalized_command('bdist_wheel')
|
||||
|
||||
# TODO: if bdist_wheel if merged into setuptools, just add "keep_egg_info" there
|
||||
with self._maybe_bkp_dir(egg_info_dir, self.keep_egg_info):
|
||||
bdist_wheel.egg2dist(egg_info_dir, self.dist_info_dir)
|
||||
|
||||
|
||||
def _rm(dir_name, **opts):
|
||||
if os.path.isdir(dir_name):
|
||||
shutil.rmtree(dir_name, **opts)
|
||||
|
||||
|
||||
def _copy(src, dst, **opts):
|
||||
if sys.version_info < (3, 8):
|
||||
opts.pop("dirs_exist_ok", None)
|
||||
shutil.copytree(src, dst, **opts)
|
File diff suppressed because it is too large
Load diff
|
@ -0,0 +1,882 @@
|
|||
"""
|
||||
Create a wheel that, when installed, will make the source package 'editable'
|
||||
(add it to the interpreter's path, including metadata) per PEP 660. Replaces
|
||||
'setup.py develop'.
|
||||
|
||||
.. note::
|
||||
One of the mechanisms briefly mentioned in PEP 660 to implement editable installs is
|
||||
to create a separated directory inside ``build`` and use a .pth file to point to that
|
||||
directory. In the context of this file such directory is referred as
|
||||
*auxiliary build directory* or ``auxiliary_dir``.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import io
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import traceback
|
||||
from contextlib import suppress
|
||||
from enum import Enum
|
||||
from inspect import cleandoc
|
||||
from itertools import chain
|
||||
from pathlib import Path
|
||||
from tempfile import TemporaryDirectory
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Dict,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
Tuple,
|
||||
TypeVar,
|
||||
Union,
|
||||
)
|
||||
|
||||
from .. import (
|
||||
Command,
|
||||
_normalization,
|
||||
_path,
|
||||
errors,
|
||||
namespaces,
|
||||
)
|
||||
from ..discovery import find_package_path
|
||||
from ..dist import Distribution
|
||||
from ..warnings import (
|
||||
InformationOnly,
|
||||
SetuptoolsDeprecationWarning,
|
||||
SetuptoolsWarning,
|
||||
)
|
||||
from .build_py import build_py as build_py_cls
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from wheel.wheelfile import WheelFile # noqa
|
||||
|
||||
if sys.version_info >= (3, 8):
|
||||
from typing import Protocol
|
||||
elif TYPE_CHECKING:
|
||||
from typing_extensions import Protocol
|
||||
else:
|
||||
from abc import ABC as Protocol
|
||||
|
||||
_Path = Union[str, Path]
|
||||
_P = TypeVar("_P", bound=_Path)
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class _EditableMode(Enum):
|
||||
"""
|
||||
Possible editable installation modes:
|
||||
`lenient` (new files automatically added to the package - DEFAULT);
|
||||
`strict` (requires a new installation when files are added/removed); or
|
||||
`compat` (attempts to emulate `python setup.py develop` - DEPRECATED).
|
||||
"""
|
||||
|
||||
STRICT = "strict"
|
||||
LENIENT = "lenient"
|
||||
COMPAT = "compat" # TODO: Remove `compat` after Dec/2022.
|
||||
|
||||
@classmethod
|
||||
def convert(cls, mode: Optional[str]) -> "_EditableMode":
|
||||
if not mode:
|
||||
return _EditableMode.LENIENT # default
|
||||
|
||||
_mode = mode.upper()
|
||||
if _mode not in _EditableMode.__members__:
|
||||
raise errors.OptionError(f"Invalid editable mode: {mode!r}. Try: 'strict'.")
|
||||
|
||||
if _mode == "COMPAT":
|
||||
SetuptoolsDeprecationWarning.emit(
|
||||
"Compat editable installs",
|
||||
"""
|
||||
The 'compat' editable mode is transitional and will be removed
|
||||
in future versions of `setuptools`.
|
||||
Please adapt your code accordingly to use either the 'strict' or the
|
||||
'lenient' modes.
|
||||
""",
|
||||
see_docs="userguide/development_mode.html",
|
||||
# TODO: define due_date
|
||||
# There is a series of shortcomings with the available editable install
|
||||
# methods, and they are very controversial. This is something that still
|
||||
# needs work.
|
||||
# Moreover, `pip` is still hiding this warning, so users are not aware.
|
||||
)
|
||||
|
||||
return _EditableMode[_mode]
|
||||
|
||||
|
||||
_STRICT_WARNING = """
|
||||
New or renamed files may not be automatically picked up without a new installation.
|
||||
"""
|
||||
|
||||
_LENIENT_WARNING = """
|
||||
Options like `package-data`, `include/exclude-package-data` or
|
||||
`packages.find.exclude/include` may have no effect.
|
||||
"""
|
||||
|
||||
|
||||
class editable_wheel(Command):
|
||||
"""Build 'editable' wheel for development.
|
||||
This command is private and reserved for internal use of setuptools,
|
||||
users should rely on ``setuptools.build_meta`` APIs.
|
||||
"""
|
||||
|
||||
description = "DO NOT CALL DIRECTLY, INTERNAL ONLY: create PEP 660 editable wheel"
|
||||
|
||||
user_options = [
|
||||
("dist-dir=", "d", "directory to put final built distributions in"),
|
||||
("dist-info-dir=", "I", "path to a pre-build .dist-info directory"),
|
||||
("mode=", None, cleandoc(_EditableMode.__doc__ or "")),
|
||||
]
|
||||
|
||||
def initialize_options(self):
|
||||
self.dist_dir = None
|
||||
self.dist_info_dir = None
|
||||
self.project_dir = None
|
||||
self.mode = None
|
||||
|
||||
def finalize_options(self):
|
||||
dist = self.distribution
|
||||
self.project_dir = dist.src_root or os.curdir
|
||||
self.package_dir = dist.package_dir or {}
|
||||
self.dist_dir = Path(self.dist_dir or os.path.join(self.project_dir, "dist"))
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
self.dist_dir.mkdir(exist_ok=True)
|
||||
self._ensure_dist_info()
|
||||
|
||||
# Add missing dist_info files
|
||||
self.reinitialize_command("bdist_wheel")
|
||||
bdist_wheel = self.get_finalized_command("bdist_wheel")
|
||||
bdist_wheel.write_wheelfile(self.dist_info_dir)
|
||||
|
||||
self._create_wheel_file(bdist_wheel)
|
||||
except Exception:
|
||||
traceback.print_exc()
|
||||
project = self.distribution.name or self.distribution.get_name()
|
||||
_DebuggingTips.emit(project=project)
|
||||
raise
|
||||
|
||||
def _ensure_dist_info(self):
|
||||
if self.dist_info_dir is None:
|
||||
dist_info = self.reinitialize_command("dist_info")
|
||||
dist_info.output_dir = self.dist_dir
|
||||
dist_info.ensure_finalized()
|
||||
dist_info.run()
|
||||
self.dist_info_dir = dist_info.dist_info_dir
|
||||
else:
|
||||
assert str(self.dist_info_dir).endswith(".dist-info")
|
||||
assert Path(self.dist_info_dir, "METADATA").exists()
|
||||
|
||||
def _install_namespaces(self, installation_dir, pth_prefix):
|
||||
# XXX: Only required to support the deprecated namespace practice
|
||||
dist = self.distribution
|
||||
if not dist.namespace_packages:
|
||||
return
|
||||
|
||||
src_root = Path(self.project_dir, self.package_dir.get("", ".")).resolve()
|
||||
installer = _NamespaceInstaller(dist, installation_dir, pth_prefix, src_root)
|
||||
installer.install_namespaces()
|
||||
|
||||
def _find_egg_info_dir(self) -> Optional[str]:
|
||||
parent_dir = Path(self.dist_info_dir).parent if self.dist_info_dir else Path()
|
||||
candidates = map(str, parent_dir.glob("*.egg-info"))
|
||||
return next(candidates, None)
|
||||
|
||||
def _configure_build(
|
||||
self, name: str, unpacked_wheel: _Path, build_lib: _Path, tmp_dir: _Path
|
||||
):
|
||||
"""Configure commands to behave in the following ways:
|
||||
|
||||
- Build commands can write to ``build_lib`` if they really want to...
|
||||
(but this folder is expected to be ignored and modules are expected to live
|
||||
in the project directory...)
|
||||
- Binary extensions should be built in-place (editable_mode = True)
|
||||
- Data/header/script files are not part of the "editable" specification
|
||||
so they are written directly to the unpacked_wheel directory.
|
||||
"""
|
||||
# Non-editable files (data, headers, scripts) are written directly to the
|
||||
# unpacked_wheel
|
||||
|
||||
dist = self.distribution
|
||||
wheel = str(unpacked_wheel)
|
||||
build_lib = str(build_lib)
|
||||
data = str(Path(unpacked_wheel, f"{name}.data", "data"))
|
||||
headers = str(Path(unpacked_wheel, f"{name}.data", "headers"))
|
||||
scripts = str(Path(unpacked_wheel, f"{name}.data", "scripts"))
|
||||
|
||||
# egg-info may be generated again to create a manifest (used for package data)
|
||||
egg_info = dist.reinitialize_command("egg_info", reinit_subcommands=True)
|
||||
egg_info.egg_base = str(tmp_dir)
|
||||
egg_info.ignore_egg_info_in_manifest = True
|
||||
|
||||
build = dist.reinitialize_command("build", reinit_subcommands=True)
|
||||
install = dist.reinitialize_command("install", reinit_subcommands=True)
|
||||
|
||||
build.build_platlib = build.build_purelib = build.build_lib = build_lib
|
||||
install.install_purelib = install.install_platlib = install.install_lib = wheel
|
||||
install.install_scripts = build.build_scripts = scripts
|
||||
install.install_headers = headers
|
||||
install.install_data = data
|
||||
|
||||
install_scripts = dist.get_command_obj("install_scripts")
|
||||
install_scripts.no_ep = True
|
||||
|
||||
build.build_temp = str(tmp_dir)
|
||||
|
||||
build_py = dist.get_command_obj("build_py")
|
||||
build_py.compile = False
|
||||
build_py.existing_egg_info_dir = self._find_egg_info_dir()
|
||||
|
||||
self._set_editable_mode()
|
||||
|
||||
build.ensure_finalized()
|
||||
install.ensure_finalized()
|
||||
|
||||
def _set_editable_mode(self):
|
||||
"""Set the ``editable_mode`` flag in the build sub-commands"""
|
||||
dist = self.distribution
|
||||
build = dist.get_command_obj("build")
|
||||
for cmd_name in build.get_sub_commands():
|
||||
cmd = dist.get_command_obj(cmd_name)
|
||||
if hasattr(cmd, "editable_mode"):
|
||||
cmd.editable_mode = True
|
||||
elif hasattr(cmd, "inplace"):
|
||||
cmd.inplace = True # backward compatibility with distutils
|
||||
|
||||
def _collect_build_outputs(self) -> Tuple[List[str], Dict[str, str]]:
|
||||
files: List[str] = []
|
||||
mapping: Dict[str, str] = {}
|
||||
build = self.get_finalized_command("build")
|
||||
|
||||
for cmd_name in build.get_sub_commands():
|
||||
cmd = self.get_finalized_command(cmd_name)
|
||||
if hasattr(cmd, "get_outputs"):
|
||||
files.extend(cmd.get_outputs() or [])
|
||||
if hasattr(cmd, "get_output_mapping"):
|
||||
mapping.update(cmd.get_output_mapping() or {})
|
||||
|
||||
return files, mapping
|
||||
|
||||
def _run_build_commands(
|
||||
self, dist_name: str, unpacked_wheel: _Path, build_lib: _Path, tmp_dir: _Path
|
||||
) -> Tuple[List[str], Dict[str, str]]:
|
||||
self._configure_build(dist_name, unpacked_wheel, build_lib, tmp_dir)
|
||||
self._run_build_subcommands()
|
||||
files, mapping = self._collect_build_outputs()
|
||||
self._run_install("headers")
|
||||
self._run_install("scripts")
|
||||
self._run_install("data")
|
||||
return files, mapping
|
||||
|
||||
def _run_build_subcommands(self):
|
||||
"""
|
||||
Issue #3501 indicates that some plugins/customizations might rely on:
|
||||
|
||||
1. ``build_py`` not running
|
||||
2. ``build_py`` always copying files to ``build_lib``
|
||||
|
||||
However both these assumptions may be false in editable_wheel.
|
||||
This method implements a temporary workaround to support the ecosystem
|
||||
while the implementations catch up.
|
||||
"""
|
||||
# TODO: Once plugins/customisations had the chance to catch up, replace
|
||||
# `self._run_build_subcommands()` with `self.run_command("build")`.
|
||||
# Also remove _safely_run, TestCustomBuildPy. Suggested date: Aug/2023.
|
||||
build: Command = self.get_finalized_command("build")
|
||||
for name in build.get_sub_commands():
|
||||
cmd = self.get_finalized_command(name)
|
||||
if name == "build_py" and type(cmd) != build_py_cls:
|
||||
self._safely_run(name)
|
||||
else:
|
||||
self.run_command(name)
|
||||
|
||||
def _safely_run(self, cmd_name: str):
|
||||
try:
|
||||
return self.run_command(cmd_name)
|
||||
except Exception:
|
||||
SetuptoolsDeprecationWarning.emit(
|
||||
"Customization incompatible with editable install",
|
||||
f"""
|
||||
{traceback.format_exc()}
|
||||
|
||||
If you are seeing this warning it is very likely that a setuptools
|
||||
plugin or customization overrides the `{cmd_name}` command, without
|
||||
taking into consideration how editable installs run build steps
|
||||
starting from setuptools v64.0.0.
|
||||
|
||||
Plugin authors and developers relying on custom build steps are
|
||||
encouraged to update their `{cmd_name}` implementation considering the
|
||||
information about editable installs in
|
||||
https://setuptools.pypa.io/en/latest/userguide/extension.html.
|
||||
|
||||
For the time being `setuptools` will silence this error and ignore
|
||||
the faulty command, but this behaviour will change in future versions.
|
||||
""",
|
||||
# TODO: define due_date
|
||||
# There is a series of shortcomings with the available editable install
|
||||
# methods, and they are very controversial. This is something that still
|
||||
# needs work.
|
||||
)
|
||||
|
||||
def _create_wheel_file(self, bdist_wheel):
|
||||
from wheel.wheelfile import WheelFile
|
||||
|
||||
dist_info = self.get_finalized_command("dist_info")
|
||||
dist_name = dist_info.name
|
||||
tag = "-".join(bdist_wheel.get_tag())
|
||||
build_tag = "0.editable" # According to PEP 427 needs to start with digit
|
||||
archive_name = f"{dist_name}-{build_tag}-{tag}.whl"
|
||||
wheel_path = Path(self.dist_dir, archive_name)
|
||||
if wheel_path.exists():
|
||||
wheel_path.unlink()
|
||||
|
||||
unpacked_wheel = TemporaryDirectory(suffix=archive_name)
|
||||
build_lib = TemporaryDirectory(suffix=".build-lib")
|
||||
build_tmp = TemporaryDirectory(suffix=".build-temp")
|
||||
|
||||
with unpacked_wheel as unpacked, build_lib as lib, build_tmp as tmp:
|
||||
unpacked_dist_info = Path(unpacked, Path(self.dist_info_dir).name)
|
||||
shutil.copytree(self.dist_info_dir, unpacked_dist_info)
|
||||
self._install_namespaces(unpacked, dist_info.name)
|
||||
files, mapping = self._run_build_commands(dist_name, unpacked, lib, tmp)
|
||||
strategy = self._select_strategy(dist_name, tag, lib)
|
||||
with strategy, WheelFile(wheel_path, "w") as wheel_obj:
|
||||
strategy(wheel_obj, files, mapping)
|
||||
wheel_obj.write_files(unpacked)
|
||||
|
||||
return wheel_path
|
||||
|
||||
def _run_install(self, category: str):
|
||||
has_category = getattr(self.distribution, f"has_{category}", None)
|
||||
if has_category and has_category():
|
||||
_logger.info(f"Installing {category} as non editable")
|
||||
self.run_command(f"install_{category}")
|
||||
|
||||
def _select_strategy(
|
||||
self,
|
||||
name: str,
|
||||
tag: str,
|
||||
build_lib: _Path,
|
||||
) -> "EditableStrategy":
|
||||
"""Decides which strategy to use to implement an editable installation."""
|
||||
build_name = f"__editable__.{name}-{tag}"
|
||||
project_dir = Path(self.project_dir)
|
||||
mode = _EditableMode.convert(self.mode)
|
||||
|
||||
if mode is _EditableMode.STRICT:
|
||||
auxiliary_dir = _empty_dir(Path(self.project_dir, "build", build_name))
|
||||
return _LinkTree(self.distribution, name, auxiliary_dir, build_lib)
|
||||
|
||||
packages = _find_packages(self.distribution)
|
||||
has_simple_layout = _simple_layout(packages, self.package_dir, project_dir)
|
||||
is_compat_mode = mode is _EditableMode.COMPAT
|
||||
if set(self.package_dir) == {""} and has_simple_layout or is_compat_mode:
|
||||
# src-layout(ish) is relatively safe for a simple pth file
|
||||
src_dir = self.package_dir.get("", ".")
|
||||
return _StaticPth(self.distribution, name, [Path(project_dir, src_dir)])
|
||||
|
||||
# Use a MetaPathFinder to avoid adding accidental top-level packages/modules
|
||||
return _TopLevelFinder(self.distribution, name)
|
||||
|
||||
|
||||
class EditableStrategy(Protocol):
|
||||
def __call__(self, wheel: "WheelFile", files: List[str], mapping: Dict[str, str]):
|
||||
...
|
||||
|
||||
def __enter__(self):
|
||||
...
|
||||
|
||||
def __exit__(self, _exc_type, _exc_value, _traceback):
|
||||
...
|
||||
|
||||
|
||||
class _StaticPth:
|
||||
def __init__(self, dist: Distribution, name: str, path_entries: List[Path]):
|
||||
self.dist = dist
|
||||
self.name = name
|
||||
self.path_entries = path_entries
|
||||
|
||||
def __call__(self, wheel: "WheelFile", files: List[str], mapping: Dict[str, str]):
|
||||
entries = "\n".join((str(p.resolve()) for p in self.path_entries))
|
||||
contents = _encode_pth(f"{entries}\n")
|
||||
wheel.writestr(f"__editable__.{self.name}.pth", contents)
|
||||
|
||||
def __enter__(self):
|
||||
msg = f"""
|
||||
Editable install will be performed using .pth file to extend `sys.path` with:
|
||||
{list(map(os.fspath, self.path_entries))!r}
|
||||
"""
|
||||
_logger.warning(msg + _LENIENT_WARNING)
|
||||
return self
|
||||
|
||||
def __exit__(self, _exc_type, _exc_value, _traceback):
|
||||
...
|
||||
|
||||
|
||||
class _LinkTree(_StaticPth):
|
||||
"""
|
||||
Creates a ``.pth`` file that points to a link tree in the ``auxiliary_dir``.
|
||||
|
||||
This strategy will only link files (not dirs), so it can be implemented in
|
||||
any OS, even if that means using hardlinks instead of symlinks.
|
||||
|
||||
By collocating ``auxiliary_dir`` and the original source code, limitations
|
||||
with hardlinks should be avoided.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
dist: Distribution,
|
||||
name: str,
|
||||
auxiliary_dir: _Path,
|
||||
build_lib: _Path,
|
||||
):
|
||||
self.auxiliary_dir = Path(auxiliary_dir)
|
||||
self.build_lib = Path(build_lib).resolve()
|
||||
self._file = dist.get_command_obj("build_py").copy_file
|
||||
super().__init__(dist, name, [self.auxiliary_dir])
|
||||
|
||||
def __call__(self, wheel: "WheelFile", files: List[str], mapping: Dict[str, str]):
|
||||
self._create_links(files, mapping)
|
||||
super().__call__(wheel, files, mapping)
|
||||
|
||||
def _normalize_output(self, file: str) -> Optional[str]:
|
||||
# Files relative to build_lib will be normalized to None
|
||||
with suppress(ValueError):
|
||||
path = Path(file).resolve().relative_to(self.build_lib)
|
||||
return str(path).replace(os.sep, '/')
|
||||
return None
|
||||
|
||||
def _create_file(self, relative_output: str, src_file: str, link=None):
|
||||
dest = self.auxiliary_dir / relative_output
|
||||
if not dest.parent.is_dir():
|
||||
dest.parent.mkdir(parents=True)
|
||||
self._file(src_file, dest, link=link)
|
||||
|
||||
def _create_links(self, outputs, output_mapping):
|
||||
self.auxiliary_dir.mkdir(parents=True, exist_ok=True)
|
||||
link_type = "sym" if _can_symlink_files(self.auxiliary_dir) else "hard"
|
||||
mappings = {self._normalize_output(k): v for k, v in output_mapping.items()}
|
||||
mappings.pop(None, None) # remove files that are not relative to build_lib
|
||||
|
||||
for output in outputs:
|
||||
relative = self._normalize_output(output)
|
||||
if relative and relative not in mappings:
|
||||
self._create_file(relative, output)
|
||||
|
||||
for relative, src in mappings.items():
|
||||
self._create_file(relative, src, link=link_type)
|
||||
|
||||
def __enter__(self):
|
||||
msg = "Strict editable install will be performed using a link tree.\n"
|
||||
_logger.warning(msg + _STRICT_WARNING)
|
||||
return self
|
||||
|
||||
def __exit__(self, _exc_type, _exc_value, _traceback):
|
||||
msg = f"""\n
|
||||
Strict editable installation performed using the auxiliary directory:
|
||||
{self.auxiliary_dir}
|
||||
|
||||
Please be careful to not remove this directory, otherwise you might not be able
|
||||
to import/use your package.
|
||||
"""
|
||||
InformationOnly.emit("Editable installation.", msg)
|
||||
|
||||
|
||||
class _TopLevelFinder:
|
||||
def __init__(self, dist: Distribution, name: str):
|
||||
self.dist = dist
|
||||
self.name = name
|
||||
|
||||
def __call__(self, wheel: "WheelFile", files: List[str], mapping: Dict[str, str]):
|
||||
src_root = self.dist.src_root or os.curdir
|
||||
top_level = chain(_find_packages(self.dist), _find_top_level_modules(self.dist))
|
||||
package_dir = self.dist.package_dir or {}
|
||||
roots = _find_package_roots(top_level, package_dir, src_root)
|
||||
|
||||
namespaces_: Dict[str, List[str]] = dict(
|
||||
chain(
|
||||
_find_namespaces(self.dist.packages or [], roots),
|
||||
((ns, []) for ns in _find_virtual_namespaces(roots)),
|
||||
)
|
||||
)
|
||||
|
||||
name = f"__editable__.{self.name}.finder"
|
||||
finder = _normalization.safe_identifier(name)
|
||||
content = bytes(_finder_template(name, roots, namespaces_), "utf-8")
|
||||
wheel.writestr(f"{finder}.py", content)
|
||||
|
||||
content = _encode_pth(f"import {finder}; {finder}.install()")
|
||||
wheel.writestr(f"__editable__.{self.name}.pth", content)
|
||||
|
||||
def __enter__(self):
|
||||
msg = "Editable install will be performed using a meta path finder.\n"
|
||||
_logger.warning(msg + _LENIENT_WARNING)
|
||||
return self
|
||||
|
||||
def __exit__(self, _exc_type, _exc_value, _traceback):
|
||||
msg = """\n
|
||||
Please be careful with folders in your working directory with the same
|
||||
name as your package as they may take precedence during imports.
|
||||
"""
|
||||
InformationOnly.emit("Editable installation.", msg)
|
||||
|
||||
|
||||
def _encode_pth(content: str) -> bytes:
|
||||
""".pth files are always read with 'locale' encoding, the recommendation
|
||||
from the cpython core developers is to write them as ``open(path, "w")``
|
||||
and ignore warnings (see python/cpython#77102, pypa/setuptools#3937).
|
||||
This function tries to simulate this behaviour without having to create an
|
||||
actual file, in a way that supports a range of active Python versions.
|
||||
(There seems to be some variety in the way different version of Python handle
|
||||
``encoding=None``, not all of them use ``locale.getpreferredencoding(False)``).
|
||||
"""
|
||||
encoding = "locale" if sys.version_info >= (3, 10) else None
|
||||
with io.BytesIO() as buffer:
|
||||
wrapper = io.TextIOWrapper(buffer, encoding)
|
||||
wrapper.write(content)
|
||||
wrapper.flush()
|
||||
buffer.seek(0)
|
||||
return buffer.read()
|
||||
|
||||
|
||||
def _can_symlink_files(base_dir: Path) -> bool:
|
||||
with TemporaryDirectory(dir=str(base_dir.resolve())) as tmp:
|
||||
path1, path2 = Path(tmp, "file1.txt"), Path(tmp, "file2.txt")
|
||||
path1.write_text("file1", encoding="utf-8")
|
||||
with suppress(AttributeError, NotImplementedError, OSError):
|
||||
os.symlink(path1, path2)
|
||||
if path2.is_symlink() and path2.read_text(encoding="utf-8") == "file1":
|
||||
return True
|
||||
|
||||
try:
|
||||
os.link(path1, path2) # Ensure hard links can be created
|
||||
except Exception as ex:
|
||||
msg = (
|
||||
"File system does not seem to support either symlinks or hard links. "
|
||||
"Strict editable installs require one of them to be supported."
|
||||
)
|
||||
raise LinksNotSupported(msg) from ex
|
||||
return False
|
||||
|
||||
|
||||
def _simple_layout(
|
||||
packages: Iterable[str], package_dir: Dict[str, str], project_dir: Path
|
||||
) -> bool:
|
||||
"""Return ``True`` if:
|
||||
- all packages are contained by the same parent directory, **and**
|
||||
- all packages become importable if the parent directory is added to ``sys.path``.
|
||||
|
||||
>>> _simple_layout(['a'], {"": "src"}, "/tmp/myproj")
|
||||
True
|
||||
>>> _simple_layout(['a', 'a.b'], {"": "src"}, "/tmp/myproj")
|
||||
True
|
||||
>>> _simple_layout(['a', 'a.b'], {}, "/tmp/myproj")
|
||||
True
|
||||
>>> _simple_layout(['a', 'a.a1', 'a.a1.a2', 'b'], {"": "src"}, "/tmp/myproj")
|
||||
True
|
||||
>>> _simple_layout(['a', 'a.a1', 'a.a1.a2', 'b'], {"a": "a", "b": "b"}, ".")
|
||||
True
|
||||
>>> _simple_layout(['a', 'a.a1', 'a.a1.a2', 'b'], {"a": "_a", "b": "_b"}, ".")
|
||||
False
|
||||
>>> _simple_layout(['a', 'a.a1', 'a.a1.a2', 'b'], {"a": "_a"}, "/tmp/myproj")
|
||||
False
|
||||
>>> _simple_layout(['a', 'a.a1', 'a.a1.a2', 'b'], {"a.a1.a2": "_a2"}, ".")
|
||||
False
|
||||
>>> _simple_layout(['a', 'a.b'], {"": "src", "a.b": "_ab"}, "/tmp/myproj")
|
||||
False
|
||||
>>> # Special cases, no packages yet:
|
||||
>>> _simple_layout([], {"": "src"}, "/tmp/myproj")
|
||||
True
|
||||
>>> _simple_layout([], {"a": "_a", "": "src"}, "/tmp/myproj")
|
||||
False
|
||||
"""
|
||||
layout = {pkg: find_package_path(pkg, package_dir, project_dir) for pkg in packages}
|
||||
if not layout:
|
||||
return set(package_dir) in ({}, {""})
|
||||
parent = os.path.commonpath([_parent_path(k, v) for k, v in layout.items()])
|
||||
return all(
|
||||
_path.same_path(Path(parent, *key.split('.')), value)
|
||||
for key, value in layout.items()
|
||||
)
|
||||
|
||||
|
||||
def _parent_path(pkg, pkg_path):
|
||||
"""Infer the parent path containing a package, that if added to ``sys.path`` would
|
||||
allow importing that package.
|
||||
When ``pkg`` is directly mapped into a directory with a different name, return its
|
||||
own path.
|
||||
>>> _parent_path("a", "src/a")
|
||||
'src'
|
||||
>>> _parent_path("b", "src/c")
|
||||
'src/c'
|
||||
"""
|
||||
parent = pkg_path[: -len(pkg)] if pkg_path.endswith(pkg) else pkg_path
|
||||
return parent.rstrip("/" + os.sep)
|
||||
|
||||
|
||||
def _find_packages(dist: Distribution) -> Iterator[str]:
|
||||
yield from iter(dist.packages or [])
|
||||
|
||||
py_modules = dist.py_modules or []
|
||||
nested_modules = [mod for mod in py_modules if "." in mod]
|
||||
if dist.ext_package:
|
||||
yield dist.ext_package
|
||||
else:
|
||||
ext_modules = dist.ext_modules or []
|
||||
nested_modules += [x.name for x in ext_modules if "." in x.name]
|
||||
|
||||
for module in nested_modules:
|
||||
package, _, _ = module.rpartition(".")
|
||||
yield package
|
||||
|
||||
|
||||
def _find_top_level_modules(dist: Distribution) -> Iterator[str]:
|
||||
py_modules = dist.py_modules or []
|
||||
yield from (mod for mod in py_modules if "." not in mod)
|
||||
|
||||
if not dist.ext_package:
|
||||
ext_modules = dist.ext_modules or []
|
||||
yield from (x.name for x in ext_modules if "." not in x.name)
|
||||
|
||||
|
||||
def _find_package_roots(
|
||||
packages: Iterable[str],
|
||||
package_dir: Mapping[str, str],
|
||||
src_root: _Path,
|
||||
) -> Dict[str, str]:
|
||||
pkg_roots: Dict[str, str] = {
|
||||
pkg: _absolute_root(find_package_path(pkg, package_dir, src_root))
|
||||
for pkg in sorted(packages)
|
||||
}
|
||||
|
||||
return _remove_nested(pkg_roots)
|
||||
|
||||
|
||||
def _absolute_root(path: _Path) -> str:
|
||||
"""Works for packages and top-level modules"""
|
||||
path_ = Path(path)
|
||||
parent = path_.parent
|
||||
|
||||
if path_.exists():
|
||||
return str(path_.resolve())
|
||||
else:
|
||||
return str(parent.resolve() / path_.name)
|
||||
|
||||
|
||||
def _find_virtual_namespaces(pkg_roots: Dict[str, str]) -> Iterator[str]:
|
||||
"""By carefully designing ``package_dir``, it is possible to implement the logical
|
||||
structure of PEP 420 in a package without the corresponding directories.
|
||||
|
||||
Moreover a parent package can be purposefully/accidentally skipped in the discovery
|
||||
phase (e.g. ``find_packages(include=["mypkg.*"])``, when ``mypkg.foo`` is included
|
||||
by ``mypkg`` itself is not).
|
||||
We consider this case to also be a virtual namespace (ignoring the original
|
||||
directory) to emulate a non-editable installation.
|
||||
|
||||
This function will try to find these kinds of namespaces.
|
||||
"""
|
||||
for pkg in pkg_roots:
|
||||
if "." not in pkg:
|
||||
continue
|
||||
parts = pkg.split(".")
|
||||
for i in range(len(parts) - 1, 0, -1):
|
||||
partial_name = ".".join(parts[:i])
|
||||
path = Path(find_package_path(partial_name, pkg_roots, ""))
|
||||
if not path.exists() or partial_name not in pkg_roots:
|
||||
# partial_name not in pkg_roots ==> purposefully/accidentally skipped
|
||||
yield partial_name
|
||||
|
||||
|
||||
def _find_namespaces(
|
||||
packages: List[str], pkg_roots: Dict[str, str]
|
||||
) -> Iterator[Tuple[str, List[str]]]:
|
||||
for pkg in packages:
|
||||
path = find_package_path(pkg, pkg_roots, "")
|
||||
if Path(path).exists() and not Path(path, "__init__.py").exists():
|
||||
yield (pkg, [path])
|
||||
|
||||
|
||||
def _remove_nested(pkg_roots: Dict[str, str]) -> Dict[str, str]:
|
||||
output = dict(pkg_roots.copy())
|
||||
|
||||
for pkg, path in reversed(list(pkg_roots.items())):
|
||||
if any(
|
||||
pkg != other and _is_nested(pkg, path, other, other_path)
|
||||
for other, other_path in pkg_roots.items()
|
||||
):
|
||||
output.pop(pkg)
|
||||
|
||||
return output
|
||||
|
||||
|
||||
def _is_nested(pkg: str, pkg_path: str, parent: str, parent_path: str) -> bool:
|
||||
"""
|
||||
Return ``True`` if ``pkg`` is nested inside ``parent`` both logically and in the
|
||||
file system.
|
||||
>>> _is_nested("a.b", "path/a/b", "a", "path/a")
|
||||
True
|
||||
>>> _is_nested("a.b", "path/a/b", "a", "otherpath/a")
|
||||
False
|
||||
>>> _is_nested("a.b", "path/a/b", "c", "path/c")
|
||||
False
|
||||
>>> _is_nested("a.a", "path/a/a", "a", "path/a")
|
||||
True
|
||||
>>> _is_nested("b.a", "path/b/a", "a", "path/a")
|
||||
False
|
||||
"""
|
||||
norm_pkg_path = _path.normpath(pkg_path)
|
||||
rest = pkg.replace(parent, "", 1).strip(".").split(".")
|
||||
return pkg.startswith(parent) and norm_pkg_path == _path.normpath(
|
||||
Path(parent_path, *rest)
|
||||
)
|
||||
|
||||
|
||||
def _empty_dir(dir_: _P) -> _P:
|
||||
"""Create a directory ensured to be empty. Existing files may be removed."""
|
||||
shutil.rmtree(dir_, ignore_errors=True)
|
||||
os.makedirs(dir_)
|
||||
return dir_
|
||||
|
||||
|
||||
class _NamespaceInstaller(namespaces.Installer):
|
||||
def __init__(self, distribution, installation_dir, editable_name, src_root):
|
||||
self.distribution = distribution
|
||||
self.src_root = src_root
|
||||
self.installation_dir = installation_dir
|
||||
self.editable_name = editable_name
|
||||
self.outputs = []
|
||||
self.dry_run = False
|
||||
|
||||
def _get_target(self):
|
||||
"""Installation target."""
|
||||
return os.path.join(self.installation_dir, self.editable_name)
|
||||
|
||||
def _get_root(self):
|
||||
"""Where the modules/packages should be loaded from."""
|
||||
return repr(str(self.src_root))
|
||||
|
||||
|
||||
_FINDER_TEMPLATE = """\
|
||||
import sys
|
||||
from importlib.machinery import ModuleSpec, PathFinder
|
||||
from importlib.machinery import all_suffixes as module_suffixes
|
||||
from importlib.util import spec_from_file_location
|
||||
from itertools import chain
|
||||
from pathlib import Path
|
||||
|
||||
MAPPING = {mapping!r}
|
||||
NAMESPACES = {namespaces!r}
|
||||
PATH_PLACEHOLDER = {name!r} + ".__path_hook__"
|
||||
|
||||
|
||||
class _EditableFinder: # MetaPathFinder
|
||||
@classmethod
|
||||
def find_spec(cls, fullname, path=None, target=None):
|
||||
# Top-level packages and modules (we know these exist in the FS)
|
||||
if fullname in MAPPING:
|
||||
pkg_path = MAPPING[fullname]
|
||||
return cls._find_spec(fullname, Path(pkg_path))
|
||||
|
||||
# Handle immediate children modules (required for namespaces to work)
|
||||
# To avoid problems with case sensitivity in the file system we delegate
|
||||
# to the importlib.machinery implementation.
|
||||
parent, _, child = fullname.rpartition(".")
|
||||
if parent and parent in MAPPING:
|
||||
return PathFinder.find_spec(fullname, path=[MAPPING[parent]])
|
||||
|
||||
# Other levels of nesting should be handled automatically by importlib
|
||||
# using the parent path.
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def _find_spec(cls, fullname, candidate_path):
|
||||
init = candidate_path / "__init__.py"
|
||||
candidates = (candidate_path.with_suffix(x) for x in module_suffixes())
|
||||
for candidate in chain([init], candidates):
|
||||
if candidate.exists():
|
||||
return spec_from_file_location(fullname, candidate)
|
||||
|
||||
|
||||
class _EditableNamespaceFinder: # PathEntryFinder
|
||||
@classmethod
|
||||
def _path_hook(cls, path):
|
||||
if path == PATH_PLACEHOLDER:
|
||||
return cls
|
||||
raise ImportError
|
||||
|
||||
@classmethod
|
||||
def _paths(cls, fullname):
|
||||
# Ensure __path__ is not empty for the spec to be considered a namespace.
|
||||
return NAMESPACES[fullname] or MAPPING.get(fullname) or [PATH_PLACEHOLDER]
|
||||
|
||||
@classmethod
|
||||
def find_spec(cls, fullname, target=None):
|
||||
if fullname in NAMESPACES:
|
||||
spec = ModuleSpec(fullname, None, is_package=True)
|
||||
spec.submodule_search_locations = cls._paths(fullname)
|
||||
return spec
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def find_module(cls, fullname):
|
||||
return None
|
||||
|
||||
|
||||
def install():
|
||||
if not any(finder == _EditableFinder for finder in sys.meta_path):
|
||||
sys.meta_path.append(_EditableFinder)
|
||||
|
||||
if not NAMESPACES:
|
||||
return
|
||||
|
||||
if not any(hook == _EditableNamespaceFinder._path_hook for hook in sys.path_hooks):
|
||||
# PathEntryFinder is needed to create NamespaceSpec without private APIS
|
||||
sys.path_hooks.append(_EditableNamespaceFinder._path_hook)
|
||||
if PATH_PLACEHOLDER not in sys.path:
|
||||
sys.path.append(PATH_PLACEHOLDER) # Used just to trigger the path hook
|
||||
"""
|
||||
|
||||
|
||||
def _finder_template(
|
||||
name: str, mapping: Mapping[str, str], namespaces: Dict[str, List[str]]
|
||||
) -> str:
|
||||
"""Create a string containing the code for the``MetaPathFinder`` and
|
||||
``PathEntryFinder``.
|
||||
"""
|
||||
mapping = dict(sorted(mapping.items(), key=lambda p: p[0]))
|
||||
return _FINDER_TEMPLATE.format(name=name, mapping=mapping, namespaces=namespaces)
|
||||
|
||||
|
||||
class LinksNotSupported(errors.FileError):
|
||||
"""File system does not seem to support either symlinks or hard links."""
|
||||
|
||||
|
||||
class _DebuggingTips(SetuptoolsWarning):
|
||||
_SUMMARY = "Problem in editable installation."
|
||||
_DETAILS = """
|
||||
An error happened while installing `{project}` in editable mode.
|
||||
|
||||
The following steps are recommended to help debug this problem:
|
||||
|
||||
- Try to install the project normally, without using the editable mode.
|
||||
Does the error still persist?
|
||||
(If it does, try fixing the problem before attempting the editable mode).
|
||||
- If you are using binary extensions, make sure you have all OS-level
|
||||
dependencies installed (e.g. compilers, toolchains, binary libraries, ...).
|
||||
- Try the latest version of setuptools (maybe the error was already fixed).
|
||||
- If you (or your project dependencies) are using any setuptools extension
|
||||
or customization, make sure they support the editable mode.
|
||||
|
||||
After following the steps above, if the problem still persists and
|
||||
you think this is related to how setuptools handles editable installations,
|
||||
please submit a reproducible example
|
||||
(see https://stackoverflow.com/help/minimal-reproducible-example) to:
|
||||
|
||||
https://github.com/pypa/setuptools/issues
|
||||
"""
|
||||
_SEE_DOCS = "userguide/development_mode.html"
|
|
@ -0,0 +1,758 @@
|
|||
"""setuptools.command.egg_info
|
||||
|
||||
Create a distribution's .egg-info directory and contents"""
|
||||
|
||||
from distutils.filelist import FileList as _FileList
|
||||
from distutils.errors import DistutilsInternalError
|
||||
from distutils.util import convert_path
|
||||
from distutils import log
|
||||
import distutils.errors
|
||||
import distutils.filelist
|
||||
import functools
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import io
|
||||
import time
|
||||
import collections
|
||||
|
||||
from .._importlib import metadata
|
||||
from .. import _entry_points, _normalization
|
||||
|
||||
from setuptools import Command
|
||||
from setuptools.command.sdist import sdist
|
||||
from setuptools.command.sdist import walk_revctrl
|
||||
from setuptools.command.setopt import edit_config
|
||||
from setuptools.command import bdist_egg
|
||||
import setuptools.unicode_utils as unicode_utils
|
||||
from setuptools.glob import glob
|
||||
|
||||
from setuptools.extern import packaging
|
||||
from setuptools.extern.jaraco.text import yield_lines
|
||||
from ..warnings import SetuptoolsDeprecationWarning
|
||||
|
||||
|
||||
PY_MAJOR = '{}.{}'.format(*sys.version_info)
|
||||
|
||||
|
||||
def translate_pattern(glob): # noqa: C901 # is too complex (14) # FIXME
|
||||
"""
|
||||
Translate a file path glob like '*.txt' in to a regular expression.
|
||||
This differs from fnmatch.translate which allows wildcards to match
|
||||
directory separators. It also knows about '**/' which matches any number of
|
||||
directories.
|
||||
"""
|
||||
pat = ''
|
||||
|
||||
# This will split on '/' within [character classes]. This is deliberate.
|
||||
chunks = glob.split(os.path.sep)
|
||||
|
||||
sep = re.escape(os.sep)
|
||||
valid_char = '[^%s]' % (sep,)
|
||||
|
||||
for c, chunk in enumerate(chunks):
|
||||
last_chunk = c == len(chunks) - 1
|
||||
|
||||
# Chunks that are a literal ** are globstars. They match anything.
|
||||
if chunk == '**':
|
||||
if last_chunk:
|
||||
# Match anything if this is the last component
|
||||
pat += '.*'
|
||||
else:
|
||||
# Match '(name/)*'
|
||||
pat += '(?:%s+%s)*' % (valid_char, sep)
|
||||
continue # Break here as the whole path component has been handled
|
||||
|
||||
# Find any special characters in the remainder
|
||||
i = 0
|
||||
chunk_len = len(chunk)
|
||||
while i < chunk_len:
|
||||
char = chunk[i]
|
||||
if char == '*':
|
||||
# Match any number of name characters
|
||||
pat += valid_char + '*'
|
||||
elif char == '?':
|
||||
# Match a name character
|
||||
pat += valid_char
|
||||
elif char == '[':
|
||||
# Character class
|
||||
inner_i = i + 1
|
||||
# Skip initial !/] chars
|
||||
if inner_i < chunk_len and chunk[inner_i] == '!':
|
||||
inner_i = inner_i + 1
|
||||
if inner_i < chunk_len and chunk[inner_i] == ']':
|
||||
inner_i = inner_i + 1
|
||||
|
||||
# Loop till the closing ] is found
|
||||
while inner_i < chunk_len and chunk[inner_i] != ']':
|
||||
inner_i = inner_i + 1
|
||||
|
||||
if inner_i >= chunk_len:
|
||||
# Got to the end of the string without finding a closing ]
|
||||
# Do not treat this as a matching group, but as a literal [
|
||||
pat += re.escape(char)
|
||||
else:
|
||||
# Grab the insides of the [brackets]
|
||||
inner = chunk[i + 1 : inner_i]
|
||||
char_class = ''
|
||||
|
||||
# Class negation
|
||||
if inner[0] == '!':
|
||||
char_class = '^'
|
||||
inner = inner[1:]
|
||||
|
||||
char_class += re.escape(inner)
|
||||
pat += '[%s]' % (char_class,)
|
||||
|
||||
# Skip to the end ]
|
||||
i = inner_i
|
||||
else:
|
||||
pat += re.escape(char)
|
||||
i += 1
|
||||
|
||||
# Join each chunk with the dir separator
|
||||
if not last_chunk:
|
||||
pat += sep
|
||||
|
||||
pat += r'\Z'
|
||||
return re.compile(pat, flags=re.MULTILINE | re.DOTALL)
|
||||
|
||||
|
||||
class InfoCommon:
|
||||
tag_build = None
|
||||
tag_date = None
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return _normalization.safe_name(self.distribution.get_name())
|
||||
|
||||
def tagged_version(self):
|
||||
tagged = self._maybe_tag(self.distribution.get_version())
|
||||
return _normalization.best_effort_version(tagged)
|
||||
|
||||
def _maybe_tag(self, version):
|
||||
"""
|
||||
egg_info may be called more than once for a distribution,
|
||||
in which case the version string already contains all tags.
|
||||
"""
|
||||
return (
|
||||
version
|
||||
if self.vtags and self._already_tagged(version)
|
||||
else version + self.vtags
|
||||
)
|
||||
|
||||
def _already_tagged(self, version: str) -> bool:
|
||||
# Depending on their format, tags may change with version normalization.
|
||||
# So in addition the regular tags, we have to search for the normalized ones.
|
||||
return version.endswith(self.vtags) or version.endswith(self._safe_tags())
|
||||
|
||||
def _safe_tags(self) -> str:
|
||||
# To implement this we can rely on `safe_version` pretending to be version 0
|
||||
# followed by tags. Then we simply discard the starting 0 (fake version number)
|
||||
return _normalization.best_effort_version(f"0{self.vtags}")[1:]
|
||||
|
||||
def tags(self) -> str:
|
||||
version = ''
|
||||
if self.tag_build:
|
||||
version += self.tag_build
|
||||
if self.tag_date:
|
||||
version += time.strftime("%Y%m%d")
|
||||
return version
|
||||
|
||||
vtags = property(tags)
|
||||
|
||||
|
||||
class egg_info(InfoCommon, Command):
|
||||
description = "create a distribution's .egg-info directory"
|
||||
|
||||
user_options = [
|
||||
(
|
||||
'egg-base=',
|
||||
'e',
|
||||
"directory containing .egg-info directories"
|
||||
" (default: top of the source tree)",
|
||||
),
|
||||
('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"),
|
||||
('tag-build=', 'b', "Specify explicit tag to add to version number"),
|
||||
('no-date', 'D', "Don't include date stamp [default]"),
|
||||
]
|
||||
|
||||
boolean_options = ['tag-date']
|
||||
negative_opt = {
|
||||
'no-date': 'tag-date',
|
||||
}
|
||||
|
||||
def initialize_options(self):
|
||||
self.egg_base = None
|
||||
self.egg_name = None
|
||||
self.egg_info = None
|
||||
self.egg_version = None
|
||||
self.ignore_egg_info_in_manifest = False
|
||||
|
||||
####################################
|
||||
# allow the 'tag_svn_revision' to be detected and
|
||||
# set, supporting sdists built on older Setuptools.
|
||||
@property
|
||||
def tag_svn_revision(self):
|
||||
pass
|
||||
|
||||
@tag_svn_revision.setter
|
||||
def tag_svn_revision(self, value):
|
||||
pass
|
||||
|
||||
####################################
|
||||
|
||||
def save_version_info(self, filename):
|
||||
"""
|
||||
Materialize the value of date into the
|
||||
build tag. Install build keys in a deterministic order
|
||||
to avoid arbitrary reordering on subsequent builds.
|
||||
"""
|
||||
egg_info = collections.OrderedDict()
|
||||
# follow the order these keys would have been added
|
||||
# when PYTHONHASHSEED=0
|
||||
egg_info['tag_build'] = self.tags()
|
||||
egg_info['tag_date'] = 0
|
||||
edit_config(filename, dict(egg_info=egg_info))
|
||||
|
||||
def finalize_options(self):
|
||||
# Note: we need to capture the current value returned
|
||||
# by `self.tagged_version()`, so we can later update
|
||||
# `self.distribution.metadata.version` without
|
||||
# repercussions.
|
||||
self.egg_name = self.name
|
||||
self.egg_version = self.tagged_version()
|
||||
parsed_version = packaging.version.Version(self.egg_version)
|
||||
|
||||
try:
|
||||
is_version = isinstance(parsed_version, packaging.version.Version)
|
||||
spec = "%s==%s" if is_version else "%s===%s"
|
||||
packaging.requirements.Requirement(spec % (self.egg_name, self.egg_version))
|
||||
except ValueError as e:
|
||||
raise distutils.errors.DistutilsOptionError(
|
||||
"Invalid distribution name or version syntax: %s-%s"
|
||||
% (self.egg_name, self.egg_version)
|
||||
) from e
|
||||
|
||||
if self.egg_base is None:
|
||||
dirs = self.distribution.package_dir
|
||||
self.egg_base = (dirs or {}).get('', os.curdir)
|
||||
|
||||
self.ensure_dirname('egg_base')
|
||||
self.egg_info = _normalization.filename_component(self.egg_name) + '.egg-info'
|
||||
if self.egg_base != os.curdir:
|
||||
self.egg_info = os.path.join(self.egg_base, self.egg_info)
|
||||
|
||||
# Set package version for the benefit of dumber commands
|
||||
# (e.g. sdist, bdist_wininst, etc.)
|
||||
#
|
||||
self.distribution.metadata.version = self.egg_version
|
||||
|
||||
# If we bootstrapped around the lack of a PKG-INFO, as might be the
|
||||
# case in a fresh checkout, make sure that any special tags get added
|
||||
# to the version info
|
||||
#
|
||||
pd = self.distribution._patched_dist
|
||||
key = getattr(pd, "key", None) or getattr(pd, "name", None)
|
||||
if pd is not None and key == self.egg_name.lower():
|
||||
pd._version = self.egg_version
|
||||
pd._parsed_version = packaging.version.Version(self.egg_version)
|
||||
self.distribution._patched_dist = None
|
||||
|
||||
def _get_egg_basename(self, py_version=PY_MAJOR, platform=None):
|
||||
"""Compute filename of the output egg. Private API."""
|
||||
return _egg_basename(self.egg_name, self.egg_version, py_version, platform)
|
||||
|
||||
def write_or_delete_file(self, what, filename, data, force=False):
|
||||
"""Write `data` to `filename` or delete if empty
|
||||
|
||||
If `data` is non-empty, this routine is the same as ``write_file()``.
|
||||
If `data` is empty but not ``None``, this is the same as calling
|
||||
``delete_file(filename)`. If `data` is ``None``, then this is a no-op
|
||||
unless `filename` exists, in which case a warning is issued about the
|
||||
orphaned file (if `force` is false), or deleted (if `force` is true).
|
||||
"""
|
||||
if data:
|
||||
self.write_file(what, filename, data)
|
||||
elif os.path.exists(filename):
|
||||
if data is None and not force:
|
||||
log.warn("%s not set in setup(), but %s exists", what, filename)
|
||||
return
|
||||
else:
|
||||
self.delete_file(filename)
|
||||
|
||||
def write_file(self, what, filename, data):
|
||||
"""Write `data` to `filename` (if not a dry run) after announcing it
|
||||
|
||||
`what` is used in a log message to identify what is being written
|
||||
to the file.
|
||||
"""
|
||||
log.info("writing %s to %s", what, filename)
|
||||
data = data.encode("utf-8")
|
||||
if not self.dry_run:
|
||||
f = open(filename, 'wb')
|
||||
f.write(data)
|
||||
f.close()
|
||||
|
||||
def delete_file(self, filename):
|
||||
"""Delete `filename` (if not a dry run) after announcing it"""
|
||||
log.info("deleting %s", filename)
|
||||
if not self.dry_run:
|
||||
os.unlink(filename)
|
||||
|
||||
def run(self):
|
||||
self.mkpath(self.egg_info)
|
||||
try:
|
||||
os.utime(self.egg_info, None)
|
||||
except OSError as e:
|
||||
msg = f"Cannot update time stamp of directory '{self.egg_info}'"
|
||||
raise distutils.errors.DistutilsFileError(msg) from e
|
||||
for ep in metadata.entry_points(group='egg_info.writers'):
|
||||
writer = ep.load()
|
||||
writer(self, ep.name, os.path.join(self.egg_info, ep.name))
|
||||
|
||||
# Get rid of native_libs.txt if it was put there by older bdist_egg
|
||||
nl = os.path.join(self.egg_info, "native_libs.txt")
|
||||
if os.path.exists(nl):
|
||||
self.delete_file(nl)
|
||||
|
||||
self.find_sources()
|
||||
|
||||
def find_sources(self):
|
||||
"""Generate SOURCES.txt manifest file"""
|
||||
manifest_filename = os.path.join(self.egg_info, "SOURCES.txt")
|
||||
mm = manifest_maker(self.distribution)
|
||||
mm.ignore_egg_info_dir = self.ignore_egg_info_in_manifest
|
||||
mm.manifest = manifest_filename
|
||||
mm.run()
|
||||
self.filelist = mm.filelist
|
||||
|
||||
|
||||
class FileList(_FileList):
|
||||
# Implementations of the various MANIFEST.in commands
|
||||
|
||||
def __init__(self, warn=None, debug_print=None, ignore_egg_info_dir=False):
|
||||
super().__init__(warn, debug_print)
|
||||
self.ignore_egg_info_dir = ignore_egg_info_dir
|
||||
|
||||
def process_template_line(self, line):
|
||||
# Parse the line: split it up, make sure the right number of words
|
||||
# is there, and return the relevant words. 'action' is always
|
||||
# defined: it's the first word of the line. Which of the other
|
||||
# three are defined depends on the action; it'll be either
|
||||
# patterns, (dir and patterns), or (dir_pattern).
|
||||
(action, patterns, dir, dir_pattern) = self._parse_template_line(line)
|
||||
|
||||
action_map = {
|
||||
'include': self.include,
|
||||
'exclude': self.exclude,
|
||||
'global-include': self.global_include,
|
||||
'global-exclude': self.global_exclude,
|
||||
'recursive-include': functools.partial(
|
||||
self.recursive_include,
|
||||
dir,
|
||||
),
|
||||
'recursive-exclude': functools.partial(
|
||||
self.recursive_exclude,
|
||||
dir,
|
||||
),
|
||||
'graft': self.graft,
|
||||
'prune': self.prune,
|
||||
}
|
||||
log_map = {
|
||||
'include': "warning: no files found matching '%s'",
|
||||
'exclude': ("warning: no previously-included files found " "matching '%s'"),
|
||||
'global-include': (
|
||||
"warning: no files found matching '%s' " "anywhere in distribution"
|
||||
),
|
||||
'global-exclude': (
|
||||
"warning: no previously-included files matching "
|
||||
"'%s' found anywhere in distribution"
|
||||
),
|
||||
'recursive-include': (
|
||||
"warning: no files found matching '%s' " "under directory '%s'"
|
||||
),
|
||||
'recursive-exclude': (
|
||||
"warning: no previously-included files matching "
|
||||
"'%s' found under directory '%s'"
|
||||
),
|
||||
'graft': "warning: no directories found matching '%s'",
|
||||
'prune': "no previously-included directories found matching '%s'",
|
||||
}
|
||||
|
||||
try:
|
||||
process_action = action_map[action]
|
||||
except KeyError:
|
||||
raise DistutilsInternalError(
|
||||
"this cannot happen: invalid action '{action!s}'".format(action=action),
|
||||
)
|
||||
|
||||
# OK, now we know that the action is valid and we have the
|
||||
# right number of words on the line for that action -- so we
|
||||
# can proceed with minimal error-checking.
|
||||
|
||||
action_is_recursive = action.startswith('recursive-')
|
||||
if action in {'graft', 'prune'}:
|
||||
patterns = [dir_pattern]
|
||||
extra_log_args = (dir,) if action_is_recursive else ()
|
||||
log_tmpl = log_map[action]
|
||||
|
||||
self.debug_print(
|
||||
' '.join(
|
||||
[action] + ([dir] if action_is_recursive else []) + patterns,
|
||||
)
|
||||
)
|
||||
for pattern in patterns:
|
||||
if not process_action(pattern):
|
||||
log.warn(log_tmpl, pattern, *extra_log_args)
|
||||
|
||||
def _remove_files(self, predicate):
|
||||
"""
|
||||
Remove all files from the file list that match the predicate.
|
||||
Return True if any matching files were removed
|
||||
"""
|
||||
found = False
|
||||
for i in range(len(self.files) - 1, -1, -1):
|
||||
if predicate(self.files[i]):
|
||||
self.debug_print(" removing " + self.files[i])
|
||||
del self.files[i]
|
||||
found = True
|
||||
return found
|
||||
|
||||
def include(self, pattern):
|
||||
"""Include files that match 'pattern'."""
|
||||
found = [f for f in glob(pattern) if not os.path.isdir(f)]
|
||||
self.extend(found)
|
||||
return bool(found)
|
||||
|
||||
def exclude(self, pattern):
|
||||
"""Exclude files that match 'pattern'."""
|
||||
match = translate_pattern(pattern)
|
||||
return self._remove_files(match.match)
|
||||
|
||||
def recursive_include(self, dir, pattern):
|
||||
"""
|
||||
Include all files anywhere in 'dir/' that match the pattern.
|
||||
"""
|
||||
full_pattern = os.path.join(dir, '**', pattern)
|
||||
found = [f for f in glob(full_pattern, recursive=True) if not os.path.isdir(f)]
|
||||
self.extend(found)
|
||||
return bool(found)
|
||||
|
||||
def recursive_exclude(self, dir, pattern):
|
||||
"""
|
||||
Exclude any file anywhere in 'dir/' that match the pattern.
|
||||
"""
|
||||
match = translate_pattern(os.path.join(dir, '**', pattern))
|
||||
return self._remove_files(match.match)
|
||||
|
||||
def graft(self, dir):
|
||||
"""Include all files from 'dir/'."""
|
||||
found = [
|
||||
item
|
||||
for match_dir in glob(dir)
|
||||
for item in distutils.filelist.findall(match_dir)
|
||||
]
|
||||
self.extend(found)
|
||||
return bool(found)
|
||||
|
||||
def prune(self, dir):
|
||||
"""Filter out files from 'dir/'."""
|
||||
match = translate_pattern(os.path.join(dir, '**'))
|
||||
return self._remove_files(match.match)
|
||||
|
||||
def global_include(self, pattern):
|
||||
"""
|
||||
Include all files anywhere in the current directory that match the
|
||||
pattern. This is very inefficient on large file trees.
|
||||
"""
|
||||
if self.allfiles is None:
|
||||
self.findall()
|
||||
match = translate_pattern(os.path.join('**', pattern))
|
||||
found = [f for f in self.allfiles if match.match(f)]
|
||||
self.extend(found)
|
||||
return bool(found)
|
||||
|
||||
def global_exclude(self, pattern):
|
||||
"""
|
||||
Exclude all files anywhere that match the pattern.
|
||||
"""
|
||||
match = translate_pattern(os.path.join('**', pattern))
|
||||
return self._remove_files(match.match)
|
||||
|
||||
def append(self, item):
|
||||
if item.endswith('\r'): # Fix older sdists built on Windows
|
||||
item = item[:-1]
|
||||
path = convert_path(item)
|
||||
|
||||
if self._safe_path(path):
|
||||
self.files.append(path)
|
||||
|
||||
def extend(self, paths):
|
||||
self.files.extend(filter(self._safe_path, paths))
|
||||
|
||||
def _repair(self):
|
||||
"""
|
||||
Replace self.files with only safe paths
|
||||
|
||||
Because some owners of FileList manipulate the underlying
|
||||
``files`` attribute directly, this method must be called to
|
||||
repair those paths.
|
||||
"""
|
||||
self.files = list(filter(self._safe_path, self.files))
|
||||
|
||||
def _safe_path(self, path):
|
||||
enc_warn = "'%s' not %s encodable -- skipping"
|
||||
|
||||
# To avoid accidental trans-codings errors, first to unicode
|
||||
u_path = unicode_utils.filesys_decode(path)
|
||||
if u_path is None:
|
||||
log.warn("'%s' in unexpected encoding -- skipping" % path)
|
||||
return False
|
||||
|
||||
# Must ensure utf-8 encodability
|
||||
utf8_path = unicode_utils.try_encode(u_path, "utf-8")
|
||||
if utf8_path is None:
|
||||
log.warn(enc_warn, path, 'utf-8')
|
||||
return False
|
||||
|
||||
try:
|
||||
# ignore egg-info paths
|
||||
is_egg_info = ".egg-info" in u_path or b".egg-info" in utf8_path
|
||||
if self.ignore_egg_info_dir and is_egg_info:
|
||||
return False
|
||||
# accept is either way checks out
|
||||
if os.path.exists(u_path) or os.path.exists(utf8_path):
|
||||
return True
|
||||
# this will catch any encode errors decoding u_path
|
||||
except UnicodeEncodeError:
|
||||
log.warn(enc_warn, path, sys.getfilesystemencoding())
|
||||
|
||||
|
||||
class manifest_maker(sdist):
|
||||
template = "MANIFEST.in"
|
||||
|
||||
def initialize_options(self):
|
||||
self.use_defaults = 1
|
||||
self.prune = 1
|
||||
self.manifest_only = 1
|
||||
self.force_manifest = 1
|
||||
self.ignore_egg_info_dir = False
|
||||
|
||||
def finalize_options(self):
|
||||
pass
|
||||
|
||||
def run(self):
|
||||
self.filelist = FileList(ignore_egg_info_dir=self.ignore_egg_info_dir)
|
||||
if not os.path.exists(self.manifest):
|
||||
self.write_manifest() # it must exist so it'll get in the list
|
||||
self.add_defaults()
|
||||
if os.path.exists(self.template):
|
||||
self.read_template()
|
||||
self.add_license_files()
|
||||
self._add_referenced_files()
|
||||
self.prune_file_list()
|
||||
self.filelist.sort()
|
||||
self.filelist.remove_duplicates()
|
||||
self.write_manifest()
|
||||
|
||||
def _manifest_normalize(self, path):
|
||||
path = unicode_utils.filesys_decode(path)
|
||||
return path.replace(os.sep, '/')
|
||||
|
||||
def write_manifest(self):
|
||||
"""
|
||||
Write the file list in 'self.filelist' to the manifest file
|
||||
named by 'self.manifest'.
|
||||
"""
|
||||
self.filelist._repair()
|
||||
|
||||
# Now _repairs should encodability, but not unicode
|
||||
files = [self._manifest_normalize(f) for f in self.filelist.files]
|
||||
msg = "writing manifest file '%s'" % self.manifest
|
||||
self.execute(write_file, (self.manifest, files), msg)
|
||||
|
||||
def warn(self, msg):
|
||||
if not self._should_suppress_warning(msg):
|
||||
sdist.warn(self, msg)
|
||||
|
||||
@staticmethod
|
||||
def _should_suppress_warning(msg):
|
||||
"""
|
||||
suppress missing-file warnings from sdist
|
||||
"""
|
||||
return re.match(r"standard file .*not found", msg)
|
||||
|
||||
def add_defaults(self):
|
||||
sdist.add_defaults(self)
|
||||
self.filelist.append(self.template)
|
||||
self.filelist.append(self.manifest)
|
||||
rcfiles = list(walk_revctrl())
|
||||
if rcfiles:
|
||||
self.filelist.extend(rcfiles)
|
||||
elif os.path.exists(self.manifest):
|
||||
self.read_manifest()
|
||||
|
||||
if os.path.exists("setup.py"):
|
||||
# setup.py should be included by default, even if it's not
|
||||
# the script called to create the sdist
|
||||
self.filelist.append("setup.py")
|
||||
|
||||
ei_cmd = self.get_finalized_command('egg_info')
|
||||
self.filelist.graft(ei_cmd.egg_info)
|
||||
|
||||
def add_license_files(self):
|
||||
license_files = self.distribution.metadata.license_files or []
|
||||
for lf in license_files:
|
||||
log.info("adding license file '%s'", lf)
|
||||
self.filelist.extend(license_files)
|
||||
|
||||
def _add_referenced_files(self):
|
||||
"""Add files referenced by the config (e.g. `file:` directive) to filelist"""
|
||||
referenced = getattr(self.distribution, '_referenced_files', [])
|
||||
# ^-- fallback if dist comes from distutils or is a custom class
|
||||
for rf in referenced:
|
||||
log.debug("adding file referenced by config '%s'", rf)
|
||||
self.filelist.extend(referenced)
|
||||
|
||||
def prune_file_list(self):
|
||||
build = self.get_finalized_command('build')
|
||||
base_dir = self.distribution.get_fullname()
|
||||
self.filelist.prune(build.build_base)
|
||||
self.filelist.prune(base_dir)
|
||||
sep = re.escape(os.sep)
|
||||
self.filelist.exclude_pattern(
|
||||
r'(^|' + sep + r')(RCS|CVS|\.svn)' + sep, is_regex=1
|
||||
)
|
||||
|
||||
def _safe_data_files(self, build_py):
|
||||
"""
|
||||
The parent class implementation of this method
|
||||
(``sdist``) will try to include data files, which
|
||||
might cause recursion problems when
|
||||
``include_package_data=True``.
|
||||
|
||||
Therefore, avoid triggering any attempt of
|
||||
analyzing/building the manifest again.
|
||||
"""
|
||||
if hasattr(build_py, 'get_data_files_without_manifest'):
|
||||
return build_py.get_data_files_without_manifest()
|
||||
|
||||
SetuptoolsDeprecationWarning.emit(
|
||||
"`build_py` command does not inherit from setuptools' `build_py`.",
|
||||
"""
|
||||
Custom 'build_py' does not implement 'get_data_files_without_manifest'.
|
||||
Please extend command classes from setuptools instead of distutils.
|
||||
""",
|
||||
see_url="https://peps.python.org/pep-0632/",
|
||||
# due_date not defined yet, old projects might still do it?
|
||||
)
|
||||
return build_py.get_data_files()
|
||||
|
||||
|
||||
def write_file(filename, contents):
|
||||
"""Create a file with the specified name and write 'contents' (a
|
||||
sequence of strings without line terminators) to it.
|
||||
"""
|
||||
contents = "\n".join(contents)
|
||||
|
||||
# assuming the contents has been vetted for utf-8 encoding
|
||||
contents = contents.encode("utf-8")
|
||||
|
||||
with open(filename, "wb") as f: # always write POSIX-style manifest
|
||||
f.write(contents)
|
||||
|
||||
|
||||
def write_pkg_info(cmd, basename, filename):
|
||||
log.info("writing %s", filename)
|
||||
if not cmd.dry_run:
|
||||
metadata = cmd.distribution.metadata
|
||||
metadata.version, oldver = cmd.egg_version, metadata.version
|
||||
metadata.name, oldname = cmd.egg_name, metadata.name
|
||||
|
||||
try:
|
||||
# write unescaped data to PKG-INFO, so older pkg_resources
|
||||
# can still parse it
|
||||
metadata.write_pkg_info(cmd.egg_info)
|
||||
finally:
|
||||
metadata.name, metadata.version = oldname, oldver
|
||||
|
||||
safe = getattr(cmd.distribution, 'zip_safe', None)
|
||||
|
||||
bdist_egg.write_safety_flag(cmd.egg_info, safe)
|
||||
|
||||
|
||||
def warn_depends_obsolete(cmd, basename, filename):
|
||||
"""
|
||||
Unused: left to avoid errors when updating (from source) from <= 67.8.
|
||||
Old installations have a .dist-info directory with the entry-point
|
||||
``depends.txt = setuptools.command.egg_info:warn_depends_obsolete``.
|
||||
This may trigger errors when running the first egg_info in build_meta.
|
||||
TODO: Remove this function in a version sufficiently > 68.
|
||||
"""
|
||||
|
||||
|
||||
def _write_requirements(stream, reqs):
|
||||
lines = yield_lines(reqs or ())
|
||||
|
||||
def append_cr(line):
|
||||
return line + '\n'
|
||||
|
||||
lines = map(append_cr, sorted(lines))
|
||||
stream.writelines(lines)
|
||||
|
||||
|
||||
def write_requirements(cmd, basename, filename):
|
||||
dist = cmd.distribution
|
||||
data = io.StringIO()
|
||||
_write_requirements(data, dist.install_requires)
|
||||
extras_require = dist.extras_require or {}
|
||||
for extra in sorted(extras_require):
|
||||
data.write('\n[{extra}]\n'.format(**vars()))
|
||||
_write_requirements(data, extras_require[extra])
|
||||
cmd.write_or_delete_file("requirements", filename, data.getvalue())
|
||||
|
||||
|
||||
def write_setup_requirements(cmd, basename, filename):
|
||||
data = io.StringIO()
|
||||
_write_requirements(data, cmd.distribution.setup_requires)
|
||||
cmd.write_or_delete_file("setup-requirements", filename, data.getvalue())
|
||||
|
||||
|
||||
def write_toplevel_names(cmd, basename, filename):
|
||||
pkgs = dict.fromkeys(
|
||||
[k.split('.', 1)[0] for k in cmd.distribution.iter_distribution_names()]
|
||||
)
|
||||
cmd.write_file("top-level names", filename, '\n'.join(sorted(pkgs)) + '\n')
|
||||
|
||||
|
||||
def overwrite_arg(cmd, basename, filename):
|
||||
write_arg(cmd, basename, filename, True)
|
||||
|
||||
|
||||
def write_arg(cmd, basename, filename, force=False):
|
||||
argname = os.path.splitext(basename)[0]
|
||||
value = getattr(cmd.distribution, argname, None)
|
||||
if value is not None:
|
||||
value = '\n'.join(value) + '\n'
|
||||
cmd.write_or_delete_file(argname, filename, value, force)
|
||||
|
||||
|
||||
def write_entries(cmd, basename, filename):
|
||||
eps = _entry_points.load(cmd.distribution.entry_points)
|
||||
defn = _entry_points.render(eps)
|
||||
cmd.write_or_delete_file('entry points', filename, defn, True)
|
||||
|
||||
|
||||
def _egg_basename(egg_name, egg_version, py_version=None, platform=None):
|
||||
"""Compute filename of the output egg. Private API."""
|
||||
name = _normalization.filename_component(egg_name)
|
||||
version = _normalization.filename_component(egg_version)
|
||||
egg = f"{name}-{version}-py{py_version or PY_MAJOR}"
|
||||
if platform:
|
||||
egg += f"-{platform}"
|
||||
return egg
|
||||
|
||||
|
||||
class EggInfoDeprecationWarning(SetuptoolsDeprecationWarning):
|
||||
"""Deprecated behavior warning for EggInfo, bypassing suppression."""
|
|
@ -0,0 +1,147 @@
|
|||
from distutils.errors import DistutilsArgError
|
||||
import inspect
|
||||
import glob
|
||||
import platform
|
||||
import distutils.command.install as orig
|
||||
|
||||
import setuptools
|
||||
from ..warnings import SetuptoolsDeprecationWarning, SetuptoolsWarning
|
||||
|
||||
# Prior to numpy 1.9, NumPy relies on the '_install' name, so provide it for
|
||||
# now. See https://github.com/pypa/setuptools/issues/199/
|
||||
_install = orig.install
|
||||
|
||||
|
||||
class install(orig.install):
|
||||
"""Use easy_install to install the package, w/dependencies"""
|
||||
|
||||
user_options = orig.install.user_options + [
|
||||
('old-and-unmanageable', None, "Try not to use this!"),
|
||||
(
|
||||
'single-version-externally-managed',
|
||||
None,
|
||||
"used by system package builders to create 'flat' eggs",
|
||||
),
|
||||
]
|
||||
boolean_options = orig.install.boolean_options + [
|
||||
'old-and-unmanageable',
|
||||
'single-version-externally-managed',
|
||||
]
|
||||
new_commands = [
|
||||
('install_egg_info', lambda self: True),
|
||||
('install_scripts', lambda self: True),
|
||||
]
|
||||
_nc = dict(new_commands)
|
||||
|
||||
def initialize_options(self):
|
||||
SetuptoolsDeprecationWarning.emit(
|
||||
"setup.py install is deprecated.",
|
||||
"""
|
||||
Please avoid running ``setup.py`` directly.
|
||||
Instead, use pypa/build, pypa/installer or other
|
||||
standards-based tools.
|
||||
""",
|
||||
see_url="https://blog.ganssle.io/articles/2021/10/setup-py-deprecated.html",
|
||||
# TODO: Document how to bootstrap setuptools without install
|
||||
# (e.g. by unziping the wheel file)
|
||||
# and then add a due_date to this warning.
|
||||
)
|
||||
|
||||
orig.install.initialize_options(self)
|
||||
self.old_and_unmanageable = None
|
||||
self.single_version_externally_managed = None
|
||||
|
||||
def finalize_options(self):
|
||||
orig.install.finalize_options(self)
|
||||
if self.root:
|
||||
self.single_version_externally_managed = True
|
||||
elif self.single_version_externally_managed:
|
||||
if not self.root and not self.record:
|
||||
raise DistutilsArgError(
|
||||
"You must specify --record or --root when building system"
|
||||
" packages"
|
||||
)
|
||||
|
||||
def handle_extra_path(self):
|
||||
if self.root or self.single_version_externally_managed:
|
||||
# explicit backward-compatibility mode, allow extra_path to work
|
||||
return orig.install.handle_extra_path(self)
|
||||
|
||||
# Ignore extra_path when installing an egg (or being run by another
|
||||
# command without --root or --single-version-externally-managed
|
||||
self.path_file = None
|
||||
self.extra_dirs = ''
|
||||
|
||||
def run(self):
|
||||
# Explicit request for old-style install? Just do it
|
||||
if self.old_and_unmanageable or self.single_version_externally_managed:
|
||||
return orig.install.run(self)
|
||||
|
||||
if not self._called_from_setup(inspect.currentframe()):
|
||||
# Run in backward-compatibility mode to support bdist_* commands.
|
||||
orig.install.run(self)
|
||||
else:
|
||||
self.do_egg_install()
|
||||
|
||||
@staticmethod
|
||||
def _called_from_setup(run_frame):
|
||||
"""
|
||||
Attempt to detect whether run() was called from setup() or by another
|
||||
command. If called by setup(), the parent caller will be the
|
||||
'run_command' method in 'distutils.dist', and *its* caller will be
|
||||
the 'run_commands' method. If called any other way, the
|
||||
immediate caller *might* be 'run_command', but it won't have been
|
||||
called by 'run_commands'. Return True in that case or if a call stack
|
||||
is unavailable. Return False otherwise.
|
||||
"""
|
||||
if run_frame is None:
|
||||
msg = "Call stack not available. bdist_* commands may fail."
|
||||
SetuptoolsWarning.emit(msg)
|
||||
if platform.python_implementation() == 'IronPython':
|
||||
msg = "For best results, pass -X:Frames to enable call stack."
|
||||
SetuptoolsWarning.emit(msg)
|
||||
return True
|
||||
|
||||
frames = inspect.getouterframes(run_frame)
|
||||
for frame in frames[2:4]:
|
||||
(caller,) = frame[:1]
|
||||
info = inspect.getframeinfo(caller)
|
||||
caller_module = caller.f_globals.get('__name__', '')
|
||||
|
||||
if caller_module == "setuptools.dist" and info.function == "run_command":
|
||||
# Starting from v61.0.0 setuptools overwrites dist.run_command
|
||||
continue
|
||||
|
||||
return caller_module == 'distutils.dist' and info.function == 'run_commands'
|
||||
|
||||
def do_egg_install(self):
|
||||
easy_install = self.distribution.get_command_class('easy_install')
|
||||
|
||||
cmd = easy_install(
|
||||
self.distribution,
|
||||
args="x",
|
||||
root=self.root,
|
||||
record=self.record,
|
||||
)
|
||||
cmd.ensure_finalized() # finalize before bdist_egg munges install cmd
|
||||
cmd.always_copy_from = '.' # make sure local-dir eggs get installed
|
||||
|
||||
# pick up setup-dir .egg files only: no .egg-info
|
||||
cmd.package_index.scan(glob.glob('*.egg'))
|
||||
|
||||
self.run_command('bdist_egg')
|
||||
args = [self.distribution.get_command_obj('bdist_egg').egg_output]
|
||||
|
||||
if setuptools.bootstrap_install_from:
|
||||
# Bootstrap self-installation of setuptools
|
||||
args.insert(0, setuptools.bootstrap_install_from)
|
||||
|
||||
cmd.args = args
|
||||
cmd.run(show_deprecation=False)
|
||||
setuptools.bootstrap_install_from = None
|
||||
|
||||
|
||||
# XXX Python 3.1 doesn't see _nc if this is inside the class
|
||||
install.sub_commands = [
|
||||
cmd for cmd in orig.install.sub_commands if cmd[0] not in install._nc
|
||||
] + install.new_commands
|
|
@ -0,0 +1,77 @@
|
|||
from distutils import log, dir_util
|
||||
import os, sys
|
||||
|
||||
from setuptools import Command
|
||||
from setuptools import namespaces
|
||||
from setuptools.archive_util import unpack_archive
|
||||
from .._path import ensure_directory
|
||||
|
||||
|
||||
class install_egg_info(namespaces.Installer, Command):
|
||||
"""Install an .egg-info directory for the package"""
|
||||
|
||||
description = "Install an .egg-info directory for the package"
|
||||
|
||||
user_options = [
|
||||
('install-dir=', 'd', "directory to install to"),
|
||||
]
|
||||
|
||||
def initialize_options(self):
|
||||
self.install_dir = None
|
||||
self.install_layout = None
|
||||
self.prefix_option = None
|
||||
|
||||
def finalize_options(self):
|
||||
self.set_undefined_options('install_lib', ('install_dir', 'install_dir'))
|
||||
self.set_undefined_options('install',('install_layout','install_layout'))
|
||||
if sys.hexversion > 0x2060000:
|
||||
self.set_undefined_options('install',('prefix_option','prefix_option'))
|
||||
ei_cmd = self.get_finalized_command("egg_info")
|
||||
basename = f"{ei_cmd._get_egg_basename()}.egg-info"
|
||||
|
||||
if self.install_layout:
|
||||
if not self.install_layout.lower() in ['deb']:
|
||||
raise DistutilsOptionError("unknown value for --install-layout")
|
||||
self.install_layout = self.install_layout.lower()
|
||||
basename = basename.replace('-py%s' % sys.version[:4], '')
|
||||
elif self.prefix_option or 'real_prefix' in sys.__dict__:
|
||||
# don't modify for virtualenv
|
||||
pass
|
||||
else:
|
||||
basename = basename.replace('-py%s' % sys.version[:4], '')
|
||||
|
||||
self.source = ei_cmd.egg_info
|
||||
self.target = os.path.join(self.install_dir, basename)
|
||||
self.outputs = []
|
||||
|
||||
def run(self):
|
||||
self.run_command('egg_info')
|
||||
if os.path.isdir(self.target) and not os.path.islink(self.target):
|
||||
dir_util.remove_tree(self.target, dry_run=self.dry_run)
|
||||
elif os.path.exists(self.target):
|
||||
self.execute(os.unlink, (self.target,), "Removing " + self.target)
|
||||
if not self.dry_run:
|
||||
ensure_directory(self.target)
|
||||
self.execute(self.copytree, (), "Copying %s to %s" % (self.source, self.target))
|
||||
self.install_namespaces()
|
||||
|
||||
def get_outputs(self):
|
||||
return self.outputs
|
||||
|
||||
def copytree(self):
|
||||
# Copy the .egg-info tree to site-packages
|
||||
def skimmer(src, dst):
|
||||
# filter out source-control directories; note that 'src' is always
|
||||
# a '/'-separated path, regardless of platform. 'dst' is a
|
||||
# platform-specific path.
|
||||
for skip in '.svn/', 'CVS/':
|
||||
if src.startswith(skip) or '/' + skip in src:
|
||||
return None
|
||||
if self.install_layout and self.install_layout in ['deb'] and src.startswith('SOURCES.txt'):
|
||||
log.info("Skipping SOURCES.txt")
|
||||
return None
|
||||
self.outputs.append(dst)
|
||||
log.debug("Copying %s to %s", src, dst)
|
||||
return dst
|
||||
|
||||
unpack_archive(self.source, self.target, skimmer)
|
|
@ -0,0 +1,151 @@
|
|||
import os
|
||||
import sys
|
||||
from itertools import product, starmap
|
||||
import distutils.command.install_lib as orig
|
||||
|
||||
|
||||
class install_lib(orig.install_lib):
|
||||
"""Don't add compiled flags to filenames of non-Python files"""
|
||||
|
||||
def initialize_options(self):
|
||||
orig.install_lib.initialize_options(self)
|
||||
self.multiarch = None
|
||||
self.install_layout = None
|
||||
|
||||
def finalize_options(self):
|
||||
orig.install_lib.finalize_options(self)
|
||||
self.set_undefined_options('install',('install_layout','install_layout'))
|
||||
if self.install_layout == 'deb' and sys.version_info[:2] >= (3, 3):
|
||||
import sysconfig
|
||||
self.multiarch = sysconfig.get_config_var('MULTIARCH')
|
||||
|
||||
def run(self):
|
||||
self.build()
|
||||
outfiles = self.install()
|
||||
if outfiles is not None:
|
||||
# always compile, in case we have any extension stubs to deal with
|
||||
self.byte_compile(outfiles)
|
||||
|
||||
def get_exclusions(self):
|
||||
"""
|
||||
Return a collections.Sized collections.Container of paths to be
|
||||
excluded for single_version_externally_managed installations.
|
||||
"""
|
||||
all_packages = (
|
||||
pkg
|
||||
for ns_pkg in self._get_SVEM_NSPs()
|
||||
for pkg in self._all_packages(ns_pkg)
|
||||
)
|
||||
|
||||
excl_specs = product(all_packages, self._gen_exclusion_paths())
|
||||
return set(starmap(self._exclude_pkg_path, excl_specs))
|
||||
|
||||
def _exclude_pkg_path(self, pkg, exclusion_path):
|
||||
"""
|
||||
Given a package name and exclusion path within that package,
|
||||
compute the full exclusion path.
|
||||
"""
|
||||
parts = pkg.split('.') + [exclusion_path]
|
||||
return os.path.join(self.install_dir, *parts)
|
||||
|
||||
@staticmethod
|
||||
def _all_packages(pkg_name):
|
||||
"""
|
||||
>>> list(install_lib._all_packages('foo.bar.baz'))
|
||||
['foo.bar.baz', 'foo.bar', 'foo']
|
||||
"""
|
||||
while pkg_name:
|
||||
yield pkg_name
|
||||
pkg_name, sep, child = pkg_name.rpartition('.')
|
||||
|
||||
def _get_SVEM_NSPs(self):
|
||||
"""
|
||||
Get namespace packages (list) but only for
|
||||
single_version_externally_managed installations and empty otherwise.
|
||||
"""
|
||||
# TODO: is it necessary to short-circuit here? i.e. what's the cost
|
||||
# if get_finalized_command is called even when namespace_packages is
|
||||
# False?
|
||||
if not self.distribution.namespace_packages:
|
||||
return []
|
||||
|
||||
install_cmd = self.get_finalized_command('install')
|
||||
svem = install_cmd.single_version_externally_managed
|
||||
|
||||
return self.distribution.namespace_packages if svem else []
|
||||
|
||||
@staticmethod
|
||||
def _gen_exclusion_paths():
|
||||
"""
|
||||
Generate file paths to be excluded for namespace packages (bytecode
|
||||
cache files).
|
||||
"""
|
||||
# always exclude the package module itself
|
||||
yield '__init__.py'
|
||||
|
||||
yield '__init__.pyc'
|
||||
yield '__init__.pyo'
|
||||
|
||||
if not hasattr(sys, 'implementation'):
|
||||
return
|
||||
|
||||
base = os.path.join('__pycache__', '__init__.' + sys.implementation.cache_tag)
|
||||
yield base + '.pyc'
|
||||
yield base + '.pyo'
|
||||
yield base + '.opt-1.pyc'
|
||||
yield base + '.opt-2.pyc'
|
||||
|
||||
def copy_tree(
|
||||
self,
|
||||
infile,
|
||||
outfile,
|
||||
preserve_mode=1,
|
||||
preserve_times=1,
|
||||
preserve_symlinks=0,
|
||||
level=1,
|
||||
):
|
||||
assert preserve_mode and preserve_times and not preserve_symlinks
|
||||
exclude = self.get_exclusions()
|
||||
|
||||
if not exclude:
|
||||
import distutils.dir_util
|
||||
distutils.dir_util._multiarch = self.multiarch
|
||||
return orig.install_lib.copy_tree(self, infile, outfile)
|
||||
|
||||
# Exclude namespace package __init__.py* files from the output
|
||||
|
||||
from setuptools.archive_util import unpack_directory
|
||||
from distutils import log
|
||||
|
||||
outfiles = []
|
||||
|
||||
if self.multiarch:
|
||||
import sysconfig
|
||||
ext_suffix = sysconfig.get_config_var ('EXT_SUFFIX')
|
||||
if ext_suffix.endswith(self.multiarch + ext_suffix[-3:]):
|
||||
new_suffix = None
|
||||
else:
|
||||
new_suffix = "%s-%s%s" % (ext_suffix[:-3], self.multiarch, ext_suffix[-3:])
|
||||
|
||||
def pf(src, dst):
|
||||
if dst in exclude:
|
||||
log.warn("Skipping installation of %s (namespace package)", dst)
|
||||
return False
|
||||
|
||||
if self.multiarch and new_suffix and dst.endswith(ext_suffix) and not dst.endswith(new_suffix):
|
||||
dst = dst.replace(ext_suffix, new_suffix)
|
||||
log.info("renaming extension to %s", os.path.basename(dst))
|
||||
|
||||
log.info("copying %s -> %s", src, os.path.dirname(dst))
|
||||
outfiles.append(dst)
|
||||
return dst
|
||||
|
||||
unpack_directory(infile, outfile, pf)
|
||||
return outfiles
|
||||
|
||||
def get_outputs(self):
|
||||
outputs = orig.install_lib.get_outputs(self)
|
||||
exclude = self.get_exclusions()
|
||||
if exclude:
|
||||
return [f for f in outputs if f not in exclude]
|
||||
return outputs
|
|
@ -0,0 +1,66 @@
|
|||
from distutils import log
|
||||
import distutils.command.install_scripts as orig
|
||||
import os
|
||||
import sys
|
||||
|
||||
from .._path import ensure_directory
|
||||
|
||||
|
||||
class install_scripts(orig.install_scripts):
|
||||
"""Do normal script install, plus any egg_info wrapper scripts"""
|
||||
|
||||
def initialize_options(self):
|
||||
orig.install_scripts.initialize_options(self)
|
||||
self.no_ep = False
|
||||
|
||||
def run(self):
|
||||
self.run_command("egg_info")
|
||||
if self.distribution.scripts:
|
||||
orig.install_scripts.run(self) # run first to set up self.outfiles
|
||||
else:
|
||||
self.outfiles = []
|
||||
if self.no_ep:
|
||||
# don't install entry point scripts into .egg file!
|
||||
return
|
||||
self._install_ep_scripts()
|
||||
|
||||
def _install_ep_scripts(self):
|
||||
# Delay import side-effects
|
||||
from pkg_resources import Distribution, PathMetadata
|
||||
from . import easy_install as ei
|
||||
|
||||
ei_cmd = self.get_finalized_command("egg_info")
|
||||
dist = Distribution(
|
||||
ei_cmd.egg_base,
|
||||
PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info),
|
||||
ei_cmd.egg_name,
|
||||
ei_cmd.egg_version,
|
||||
)
|
||||
bs_cmd = self.get_finalized_command('build_scripts')
|
||||
exec_param = getattr(bs_cmd, 'executable', None)
|
||||
writer = ei.ScriptWriter
|
||||
if exec_param == sys.executable:
|
||||
# In case the path to the Python executable contains a space, wrap
|
||||
# it so it's not split up.
|
||||
exec_param = [exec_param]
|
||||
# resolve the writer to the environment
|
||||
writer = writer.best()
|
||||
cmd = writer.command_spec_class.best().from_param(exec_param)
|
||||
for args in writer.get_args(dist, cmd.as_header()):
|
||||
self.write_script(*args)
|
||||
|
||||
def write_script(self, script_name, contents, mode="t", *ignored):
|
||||
"""Write an executable file to the scripts directory"""
|
||||
from setuptools.command.easy_install import chmod, current_umask
|
||||
|
||||
log.info("Installing %s script to %s", script_name, self.install_dir)
|
||||
target = os.path.join(self.install_dir, script_name)
|
||||
self.outfiles.append(target)
|
||||
|
||||
mask = current_umask()
|
||||
if not self.dry_run:
|
||||
ensure_directory(target)
|
||||
f = open(target, "w" + mode)
|
||||
f.write(contents)
|
||||
f.close()
|
||||
chmod(target, 0o777 - mask)
|
|
@ -0,0 +1,15 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
|
||||
<assemblyIdentity version="1.0.0.0"
|
||||
processorArchitecture="X86"
|
||||
name="%(name)s"
|
||||
type="win32"/>
|
||||
<!-- Identify the application security requirements. -->
|
||||
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
|
||||
<security>
|
||||
<requestedPrivileges>
|
||||
<requestedExecutionLevel level="asInvoker" uiAccess="false"/>
|
||||
</requestedPrivileges>
|
||||
</security>
|
||||
</trustInfo>
|
||||
</assembly>
|
|
@ -0,0 +1,18 @@
|
|||
from distutils import log
|
||||
import distutils.command.register as orig
|
||||
|
||||
from setuptools.errors import RemovedCommandError
|
||||
|
||||
|
||||
class register(orig.register):
|
||||
"""Formerly used to register packages on PyPI."""
|
||||
|
||||
def run(self):
|
||||
msg = (
|
||||
"The register command has been removed, use twine to upload "
|
||||
+ "instead (https://pypi.org/p/twine)"
|
||||
)
|
||||
|
||||
self.announce("ERROR: " + msg, log.ERROR)
|
||||
|
||||
raise RemovedCommandError(msg)
|
|
@ -0,0 +1,62 @@
|
|||
from distutils.util import convert_path
|
||||
from distutils import log
|
||||
from distutils.errors import DistutilsOptionError
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from setuptools import Command
|
||||
|
||||
|
||||
class rotate(Command):
|
||||
"""Delete older distributions"""
|
||||
|
||||
description = "delete older distributions, keeping N newest files"
|
||||
user_options = [
|
||||
('match=', 'm', "patterns to match (required)"),
|
||||
('dist-dir=', 'd', "directory where the distributions are"),
|
||||
('keep=', 'k', "number of matching distributions to keep"),
|
||||
]
|
||||
|
||||
boolean_options = []
|
||||
|
||||
def initialize_options(self):
|
||||
self.match = None
|
||||
self.dist_dir = None
|
||||
self.keep = None
|
||||
|
||||
def finalize_options(self):
|
||||
if self.match is None:
|
||||
raise DistutilsOptionError(
|
||||
"Must specify one or more (comma-separated) match patterns "
|
||||
"(e.g. '.zip' or '.egg')"
|
||||
)
|
||||
if self.keep is None:
|
||||
raise DistutilsOptionError("Must specify number of files to keep")
|
||||
try:
|
||||
self.keep = int(self.keep)
|
||||
except ValueError as e:
|
||||
raise DistutilsOptionError("--keep must be an integer") from e
|
||||
if isinstance(self.match, str):
|
||||
self.match = [convert_path(p.strip()) for p in self.match.split(',')]
|
||||
self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
|
||||
|
||||
def run(self):
|
||||
self.run_command("egg_info")
|
||||
from glob import glob
|
||||
|
||||
for pattern in self.match:
|
||||
pattern = self.distribution.get_name() + '*' + pattern
|
||||
files = glob(os.path.join(self.dist_dir, pattern))
|
||||
files = [(os.path.getmtime(f), f) for f in files]
|
||||
files.sort()
|
||||
files.reverse()
|
||||
|
||||
log.info("%d file(s) matching %s", len(files), pattern)
|
||||
files = files[self.keep :]
|
||||
for t, f in files:
|
||||
log.info("Deleting %s", f)
|
||||
if not self.dry_run:
|
||||
if os.path.isdir(f):
|
||||
shutil.rmtree(f)
|
||||
else:
|
||||
os.unlink(f)
|
|
@ -0,0 +1,21 @@
|
|||
from setuptools.command.setopt import edit_config, option_base
|
||||
|
||||
|
||||
class saveopts(option_base):
|
||||
"""Save command-line options to a file"""
|
||||
|
||||
description = "save supplied options to setup.cfg or other config file"
|
||||
|
||||
def run(self):
|
||||
dist = self.distribution
|
||||
settings = {}
|
||||
|
||||
for cmd in dist.command_options:
|
||||
if cmd == 'saveopts':
|
||||
continue # don't save our own options!
|
||||
|
||||
for opt, (src, val) in dist.get_option_dict(cmd).items():
|
||||
if src == "command line":
|
||||
settings.setdefault(cmd, {})[opt] = val
|
||||
|
||||
edit_config(self.filename, settings, self.dry_run)
|
|
@ -0,0 +1,215 @@
|
|||
from distutils import log
|
||||
import distutils.command.sdist as orig
|
||||
import os
|
||||
import sys
|
||||
import io
|
||||
import contextlib
|
||||
from itertools import chain
|
||||
|
||||
from .._importlib import metadata
|
||||
from .build import _ORIGINAL_SUBCOMMANDS
|
||||
|
||||
_default_revctrl = list
|
||||
|
||||
|
||||
def walk_revctrl(dirname=''):
|
||||
"""Find all files under revision control"""
|
||||
for ep in metadata.entry_points(group='setuptools.file_finders'):
|
||||
for item in ep.load()(dirname):
|
||||
yield item
|
||||
|
||||
|
||||
class sdist(orig.sdist):
|
||||
"""Smart sdist that finds anything supported by revision control"""
|
||||
|
||||
user_options = [
|
||||
('formats=', None, "formats for source distribution (comma-separated list)"),
|
||||
(
|
||||
'keep-temp',
|
||||
'k',
|
||||
"keep the distribution tree around after creating " + "archive file(s)",
|
||||
),
|
||||
(
|
||||
'dist-dir=',
|
||||
'd',
|
||||
"directory to put the source distribution archive(s) in " "[default: dist]",
|
||||
),
|
||||
(
|
||||
'owner=',
|
||||
'u',
|
||||
"Owner name used when creating a tar file [default: current user]",
|
||||
),
|
||||
(
|
||||
'group=',
|
||||
'g',
|
||||
"Group name used when creating a tar file [default: current group]",
|
||||
),
|
||||
]
|
||||
|
||||
negative_opt = {}
|
||||
|
||||
README_EXTENSIONS = ['', '.rst', '.txt', '.md']
|
||||
READMES = tuple('README{0}'.format(ext) for ext in README_EXTENSIONS)
|
||||
|
||||
def run(self):
|
||||
self.run_command('egg_info')
|
||||
ei_cmd = self.get_finalized_command('egg_info')
|
||||
self.filelist = ei_cmd.filelist
|
||||
self.filelist.append(os.path.join(ei_cmd.egg_info, 'SOURCES.txt'))
|
||||
self.check_readme()
|
||||
|
||||
# Run sub commands
|
||||
for cmd_name in self.get_sub_commands():
|
||||
self.run_command(cmd_name)
|
||||
|
||||
self.make_distribution()
|
||||
|
||||
dist_files = getattr(self.distribution, 'dist_files', [])
|
||||
for file in self.archive_files:
|
||||
data = ('sdist', '', file)
|
||||
if data not in dist_files:
|
||||
dist_files.append(data)
|
||||
|
||||
def initialize_options(self):
|
||||
orig.sdist.initialize_options(self)
|
||||
|
||||
self._default_to_gztar()
|
||||
|
||||
def _default_to_gztar(self):
|
||||
# only needed on Python prior to 3.6.
|
||||
if sys.version_info >= (3, 6, 0, 'beta', 1):
|
||||
return
|
||||
self.formats = ['gztar']
|
||||
|
||||
def make_distribution(self):
|
||||
"""
|
||||
Workaround for #516
|
||||
"""
|
||||
with self._remove_os_link():
|
||||
orig.sdist.make_distribution(self)
|
||||
|
||||
@staticmethod
|
||||
@contextlib.contextmanager
|
||||
def _remove_os_link():
|
||||
"""
|
||||
In a context, remove and restore os.link if it exists
|
||||
"""
|
||||
|
||||
class NoValue:
|
||||
pass
|
||||
|
||||
orig_val = getattr(os, 'link', NoValue)
|
||||
try:
|
||||
del os.link
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
if orig_val is not NoValue:
|
||||
setattr(os, 'link', orig_val)
|
||||
|
||||
def add_defaults(self):
|
||||
super().add_defaults()
|
||||
self._add_defaults_build_sub_commands()
|
||||
|
||||
def _add_defaults_optional(self):
|
||||
super()._add_defaults_optional()
|
||||
if os.path.isfile('pyproject.toml'):
|
||||
self.filelist.append('pyproject.toml')
|
||||
|
||||
def _add_defaults_python(self):
|
||||
"""getting python files"""
|
||||
if self.distribution.has_pure_modules():
|
||||
build_py = self.get_finalized_command('build_py')
|
||||
self.filelist.extend(build_py.get_source_files())
|
||||
self._add_data_files(self._safe_data_files(build_py))
|
||||
|
||||
def _add_defaults_build_sub_commands(self):
|
||||
build = self.get_finalized_command("build")
|
||||
missing_cmds = set(build.get_sub_commands()) - _ORIGINAL_SUBCOMMANDS
|
||||
# ^-- the original built-in sub-commands are already handled by default.
|
||||
cmds = (self.get_finalized_command(c) for c in missing_cmds)
|
||||
files = (c.get_source_files() for c in cmds if hasattr(c, "get_source_files"))
|
||||
self.filelist.extend(chain.from_iterable(files))
|
||||
|
||||
def _safe_data_files(self, build_py):
|
||||
"""
|
||||
Since the ``sdist`` class is also used to compute the MANIFEST
|
||||
(via :obj:`setuptools.command.egg_info.manifest_maker`),
|
||||
there might be recursion problems when trying to obtain the list of
|
||||
data_files and ``include_package_data=True`` (which in turn depends on
|
||||
the files included in the MANIFEST).
|
||||
|
||||
To avoid that, ``manifest_maker`` should be able to overwrite this
|
||||
method and avoid recursive attempts to build/analyze the MANIFEST.
|
||||
"""
|
||||
return build_py.data_files
|
||||
|
||||
def _add_data_files(self, data_files):
|
||||
"""
|
||||
Add data files as found in build_py.data_files.
|
||||
"""
|
||||
self.filelist.extend(
|
||||
os.path.join(src_dir, name)
|
||||
for _, src_dir, _, filenames in data_files
|
||||
for name in filenames
|
||||
)
|
||||
|
||||
def _add_defaults_data_files(self):
|
||||
try:
|
||||
super()._add_defaults_data_files()
|
||||
except TypeError:
|
||||
log.warn("data_files contains unexpected objects")
|
||||
|
||||
def check_readme(self):
|
||||
for f in self.READMES:
|
||||
if os.path.exists(f):
|
||||
return
|
||||
else:
|
||||
self.warn(
|
||||
"standard file not found: should have one of " + ', '.join(self.READMES)
|
||||
)
|
||||
|
||||
def make_release_tree(self, base_dir, files):
|
||||
orig.sdist.make_release_tree(self, base_dir, files)
|
||||
|
||||
# Save any egg_info command line options used to create this sdist
|
||||
dest = os.path.join(base_dir, 'setup.cfg')
|
||||
if hasattr(os, 'link') and os.path.exists(dest):
|
||||
# unlink and re-copy, since it might be hard-linked, and
|
||||
# we don't want to change the source version
|
||||
os.unlink(dest)
|
||||
self.copy_file('setup.cfg', dest)
|
||||
|
||||
self.get_finalized_command('egg_info').save_version_info(dest)
|
||||
|
||||
def _manifest_is_not_generated(self):
|
||||
# check for special comment used in 2.7.1 and higher
|
||||
if not os.path.isfile(self.manifest):
|
||||
return False
|
||||
|
||||
with io.open(self.manifest, 'rb') as fp:
|
||||
first_line = fp.readline()
|
||||
return first_line != '# file GENERATED by distutils, do NOT edit\n'.encode()
|
||||
|
||||
def read_manifest(self):
|
||||
"""Read the manifest file (named by 'self.manifest') and use it to
|
||||
fill in 'self.filelist', the list of files to include in the source
|
||||
distribution.
|
||||
"""
|
||||
log.info("reading manifest file '%s'", self.manifest)
|
||||
manifest = open(self.manifest, 'rb')
|
||||
for line in manifest:
|
||||
# The manifest must contain UTF-8. See #303.
|
||||
try:
|
||||
line = line.decode('UTF-8')
|
||||
except UnicodeDecodeError:
|
||||
log.warn("%r not UTF-8 decodable -- skipping" % line)
|
||||
continue
|
||||
# ignore comments and blank lines
|
||||
line = line.strip()
|
||||
if line.startswith('#') or not line:
|
||||
continue
|
||||
self.filelist.append(line)
|
||||
manifest.close()
|
|
@ -0,0 +1,138 @@
|
|||
from distutils.util import convert_path
|
||||
from distutils import log
|
||||
from distutils.errors import DistutilsOptionError
|
||||
import distutils
|
||||
import os
|
||||
import configparser
|
||||
|
||||
from setuptools import Command
|
||||
|
||||
__all__ = ['config_file', 'edit_config', 'option_base', 'setopt']
|
||||
|
||||
|
||||
def config_file(kind="local"):
|
||||
"""Get the filename of the distutils, local, global, or per-user config
|
||||
|
||||
`kind` must be one of "local", "global", or "user"
|
||||
"""
|
||||
if kind == 'local':
|
||||
return 'setup.cfg'
|
||||
if kind == 'global':
|
||||
return os.path.join(os.path.dirname(distutils.__file__), 'distutils.cfg')
|
||||
if kind == 'user':
|
||||
dot = os.name == 'posix' and '.' or ''
|
||||
return os.path.expanduser(convert_path("~/%spydistutils.cfg" % dot))
|
||||
raise ValueError("config_file() type must be 'local', 'global', or 'user'", kind)
|
||||
|
||||
|
||||
def edit_config(filename, settings, dry_run=False):
|
||||
"""Edit a configuration file to include `settings`
|
||||
|
||||
`settings` is a dictionary of dictionaries or ``None`` values, keyed by
|
||||
command/section name. A ``None`` value means to delete the entire section,
|
||||
while a dictionary lists settings to be changed or deleted in that section.
|
||||
A setting of ``None`` means to delete that setting.
|
||||
"""
|
||||
log.debug("Reading configuration from %s", filename)
|
||||
opts = configparser.RawConfigParser()
|
||||
opts.optionxform = lambda x: x
|
||||
opts.read([filename])
|
||||
for section, options in settings.items():
|
||||
if options is None:
|
||||
log.info("Deleting section [%s] from %s", section, filename)
|
||||
opts.remove_section(section)
|
||||
else:
|
||||
if not opts.has_section(section):
|
||||
log.debug("Adding new section [%s] to %s", section, filename)
|
||||
opts.add_section(section)
|
||||
for option, value in options.items():
|
||||
if value is None:
|
||||
log.debug("Deleting %s.%s from %s", section, option, filename)
|
||||
opts.remove_option(section, option)
|
||||
if not opts.options(section):
|
||||
log.info(
|
||||
"Deleting empty [%s] section from %s", section, filename
|
||||
)
|
||||
opts.remove_section(section)
|
||||
else:
|
||||
log.debug(
|
||||
"Setting %s.%s to %r in %s", section, option, value, filename
|
||||
)
|
||||
opts.set(section, option, value)
|
||||
|
||||
log.info("Writing %s", filename)
|
||||
if not dry_run:
|
||||
with open(filename, 'w') as f:
|
||||
opts.write(f)
|
||||
|
||||
|
||||
class option_base(Command):
|
||||
"""Abstract base class for commands that mess with config files"""
|
||||
|
||||
user_options = [
|
||||
('global-config', 'g', "save options to the site-wide distutils.cfg file"),
|
||||
('user-config', 'u', "save options to the current user's pydistutils.cfg file"),
|
||||
('filename=', 'f', "configuration file to use (default=setup.cfg)"),
|
||||
]
|
||||
|
||||
boolean_options = [
|
||||
'global-config',
|
||||
'user-config',
|
||||
]
|
||||
|
||||
def initialize_options(self):
|
||||
self.global_config = None
|
||||
self.user_config = None
|
||||
self.filename = None
|
||||
|
||||
def finalize_options(self):
|
||||
filenames = []
|
||||
if self.global_config:
|
||||
filenames.append(config_file('global'))
|
||||
if self.user_config:
|
||||
filenames.append(config_file('user'))
|
||||
if self.filename is not None:
|
||||
filenames.append(self.filename)
|
||||
if not filenames:
|
||||
filenames.append(config_file('local'))
|
||||
if len(filenames) > 1:
|
||||
raise DistutilsOptionError(
|
||||
"Must specify only one configuration file option", filenames
|
||||
)
|
||||
(self.filename,) = filenames
|
||||
|
||||
|
||||
class setopt(option_base):
|
||||
"""Save command-line options to a file"""
|
||||
|
||||
description = "set an option in setup.cfg or another config file"
|
||||
|
||||
user_options = [
|
||||
('command=', 'c', 'command to set an option for'),
|
||||
('option=', 'o', 'option to set'),
|
||||
('set-value=', 's', 'value of the option'),
|
||||
('remove', 'r', 'remove (unset) the value'),
|
||||
] + option_base.user_options
|
||||
|
||||
boolean_options = option_base.boolean_options + ['remove']
|
||||
|
||||
def initialize_options(self):
|
||||
option_base.initialize_options(self)
|
||||
self.command = None
|
||||
self.option = None
|
||||
self.set_value = None
|
||||
self.remove = None
|
||||
|
||||
def finalize_options(self):
|
||||
option_base.finalize_options(self)
|
||||
if self.command is None or self.option is None:
|
||||
raise DistutilsOptionError("Must specify --command *and* --option")
|
||||
if self.set_value is None and not self.remove:
|
||||
raise DistutilsOptionError("Must specify --set-value or --remove")
|
||||
|
||||
def run(self):
|
||||
edit_config(
|
||||
self.filename,
|
||||
{self.command: {self.option.replace('-', '_'): self.set_value}},
|
||||
self.dry_run,
|
||||
)
|
250
elitebot/lib/python3.11/site-packages/setuptools/command/test.py
Normal file
250
elitebot/lib/python3.11/site-packages/setuptools/command/test.py
Normal file
|
@ -0,0 +1,250 @@
|
|||
import os
|
||||
import operator
|
||||
import sys
|
||||
import contextlib
|
||||
import itertools
|
||||
import unittest
|
||||
from distutils.errors import DistutilsError, DistutilsOptionError
|
||||
from distutils import log
|
||||
from unittest import TestLoader
|
||||
|
||||
from pkg_resources import (
|
||||
resource_listdir,
|
||||
resource_exists,
|
||||
normalize_path,
|
||||
working_set,
|
||||
evaluate_marker,
|
||||
add_activation_listener,
|
||||
require,
|
||||
)
|
||||
from .._importlib import metadata
|
||||
from setuptools import Command
|
||||
from setuptools.extern.more_itertools import unique_everseen
|
||||
from setuptools.extern.jaraco.functools import pass_none
|
||||
|
||||
|
||||
class ScanningLoader(TestLoader):
|
||||
def __init__(self):
|
||||
TestLoader.__init__(self)
|
||||
self._visited = set()
|
||||
|
||||
def loadTestsFromModule(self, module, pattern=None):
|
||||
"""Return a suite of all tests cases contained in the given module
|
||||
|
||||
If the module is a package, load tests from all the modules in it.
|
||||
If the module has an ``additional_tests`` function, call it and add
|
||||
the return value to the tests.
|
||||
"""
|
||||
if module in self._visited:
|
||||
return None
|
||||
self._visited.add(module)
|
||||
|
||||
tests = []
|
||||
tests.append(TestLoader.loadTestsFromModule(self, module))
|
||||
|
||||
if hasattr(module, "additional_tests"):
|
||||
tests.append(module.additional_tests())
|
||||
|
||||
if hasattr(module, '__path__'):
|
||||
for file in resource_listdir(module.__name__, ''):
|
||||
if file.endswith('.py') and file != '__init__.py':
|
||||
submodule = module.__name__ + '.' + file[:-3]
|
||||
else:
|
||||
if resource_exists(module.__name__, file + '/__init__.py'):
|
||||
submodule = module.__name__ + '.' + file
|
||||
else:
|
||||
continue
|
||||
tests.append(self.loadTestsFromName(submodule))
|
||||
|
||||
if len(tests) != 1:
|
||||
return self.suiteClass(tests)
|
||||
else:
|
||||
return tests[0] # don't create a nested suite for only one return
|
||||
|
||||
|
||||
# adapted from jaraco.classes.properties:NonDataProperty
|
||||
class NonDataProperty:
|
||||
def __init__(self, fget):
|
||||
self.fget = fget
|
||||
|
||||
def __get__(self, obj, objtype=None):
|
||||
if obj is None:
|
||||
return self
|
||||
return self.fget(obj)
|
||||
|
||||
|
||||
class test(Command):
|
||||
"""Command to run unit tests after in-place build"""
|
||||
|
||||
description = "run unit tests after in-place build (deprecated)"
|
||||
|
||||
user_options = [
|
||||
('test-module=', 'm', "Run 'test_suite' in specified module"),
|
||||
(
|
||||
'test-suite=',
|
||||
's',
|
||||
"Run single test, case or suite (e.g. 'module.test_suite')",
|
||||
),
|
||||
('test-runner=', 'r', "Test runner to use"),
|
||||
]
|
||||
|
||||
def initialize_options(self):
|
||||
self.test_suite = None
|
||||
self.test_module = None
|
||||
self.test_loader = None
|
||||
self.test_runner = None
|
||||
|
||||
def finalize_options(self):
|
||||
if self.test_suite and self.test_module:
|
||||
msg = "You may specify a module or a suite, but not both"
|
||||
raise DistutilsOptionError(msg)
|
||||
|
||||
if self.test_suite is None:
|
||||
if self.test_module is None:
|
||||
self.test_suite = self.distribution.test_suite
|
||||
else:
|
||||
self.test_suite = self.test_module + ".test_suite"
|
||||
|
||||
if self.test_loader is None:
|
||||
self.test_loader = getattr(self.distribution, 'test_loader', None)
|
||||
if self.test_loader is None:
|
||||
self.test_loader = "setuptools.command.test:ScanningLoader"
|
||||
if self.test_runner is None:
|
||||
self.test_runner = getattr(self.distribution, 'test_runner', None)
|
||||
|
||||
@NonDataProperty
|
||||
def test_args(self):
|
||||
return list(self._test_args())
|
||||
|
||||
def _test_args(self):
|
||||
if not self.test_suite:
|
||||
yield 'discover'
|
||||
if self.verbose:
|
||||
yield '--verbose'
|
||||
if self.test_suite:
|
||||
yield self.test_suite
|
||||
|
||||
def with_project_on_sys_path(self, func):
|
||||
"""
|
||||
Backward compatibility for project_on_sys_path context.
|
||||
"""
|
||||
with self.project_on_sys_path():
|
||||
func()
|
||||
|
||||
@contextlib.contextmanager
|
||||
def project_on_sys_path(self, include_dists=[]):
|
||||
self.run_command('egg_info')
|
||||
|
||||
# Build extensions in-place
|
||||
self.reinitialize_command('build_ext', inplace=1)
|
||||
self.run_command('build_ext')
|
||||
|
||||
ei_cmd = self.get_finalized_command("egg_info")
|
||||
|
||||
old_path = sys.path[:]
|
||||
old_modules = sys.modules.copy()
|
||||
|
||||
try:
|
||||
project_path = normalize_path(ei_cmd.egg_base)
|
||||
sys.path.insert(0, project_path)
|
||||
working_set.__init__()
|
||||
add_activation_listener(lambda dist: dist.activate())
|
||||
require('%s==%s' % (ei_cmd.egg_name, ei_cmd.egg_version))
|
||||
with self.paths_on_pythonpath([project_path]):
|
||||
yield
|
||||
finally:
|
||||
sys.path[:] = old_path
|
||||
sys.modules.clear()
|
||||
sys.modules.update(old_modules)
|
||||
working_set.__init__()
|
||||
|
||||
@staticmethod
|
||||
@contextlib.contextmanager
|
||||
def paths_on_pythonpath(paths):
|
||||
"""
|
||||
Add the indicated paths to the head of the PYTHONPATH environment
|
||||
variable so that subprocesses will also see the packages at
|
||||
these paths.
|
||||
|
||||
Do this in a context that restores the value on exit.
|
||||
"""
|
||||
nothing = object()
|
||||
orig_pythonpath = os.environ.get('PYTHONPATH', nothing)
|
||||
current_pythonpath = os.environ.get('PYTHONPATH', '')
|
||||
try:
|
||||
prefix = os.pathsep.join(unique_everseen(paths))
|
||||
to_join = filter(None, [prefix, current_pythonpath])
|
||||
new_path = os.pathsep.join(to_join)
|
||||
if new_path:
|
||||
os.environ['PYTHONPATH'] = new_path
|
||||
yield
|
||||
finally:
|
||||
if orig_pythonpath is nothing:
|
||||
os.environ.pop('PYTHONPATH', None)
|
||||
else:
|
||||
os.environ['PYTHONPATH'] = orig_pythonpath
|
||||
|
||||
@staticmethod
|
||||
def install_dists(dist):
|
||||
"""
|
||||
Install the requirements indicated by self.distribution and
|
||||
return an iterable of the dists that were built.
|
||||
"""
|
||||
ir_d = dist.fetch_build_eggs(dist.install_requires)
|
||||
tr_d = dist.fetch_build_eggs(dist.tests_require or [])
|
||||
er_d = dist.fetch_build_eggs(
|
||||
v
|
||||
for k, v in dist.extras_require.items()
|
||||
if k.startswith(':') and evaluate_marker(k[1:])
|
||||
)
|
||||
return itertools.chain(ir_d, tr_d, er_d)
|
||||
|
||||
def run(self):
|
||||
self.announce(
|
||||
"WARNING: Testing via this command is deprecated and will be "
|
||||
"removed in a future version. Users looking for a generic test "
|
||||
"entry point independent of test runner are encouraged to use "
|
||||
"tox.",
|
||||
log.WARN,
|
||||
)
|
||||
|
||||
installed_dists = self.install_dists(self.distribution)
|
||||
|
||||
cmd = ' '.join(self._argv)
|
||||
if self.dry_run:
|
||||
self.announce('skipping "%s" (dry run)' % cmd)
|
||||
return
|
||||
|
||||
self.announce('running "%s"' % cmd)
|
||||
|
||||
paths = map(operator.attrgetter('location'), installed_dists)
|
||||
with self.paths_on_pythonpath(paths):
|
||||
with self.project_on_sys_path():
|
||||
self.run_tests()
|
||||
|
||||
def run_tests(self):
|
||||
test = unittest.main(
|
||||
None,
|
||||
None,
|
||||
self._argv,
|
||||
testLoader=self._resolve_as_ep(self.test_loader),
|
||||
testRunner=self._resolve_as_ep(self.test_runner),
|
||||
exit=False,
|
||||
)
|
||||
if not test.result.wasSuccessful():
|
||||
msg = 'Test failed: %s' % test.result
|
||||
self.announce(msg, log.ERROR)
|
||||
raise DistutilsError(msg)
|
||||
|
||||
@property
|
||||
def _argv(self):
|
||||
return ['unittest'] + self.test_args
|
||||
|
||||
@staticmethod
|
||||
@pass_none
|
||||
def _resolve_as_ep(val):
|
||||
"""
|
||||
Load the indicated attribute value, called, as a as if it were
|
||||
specified as an entry point.
|
||||
"""
|
||||
return metadata.EntryPoint(value=val, name=None, group=None).load()()
|
|
@ -0,0 +1,17 @@
|
|||
from distutils import log
|
||||
from distutils.command import upload as orig
|
||||
|
||||
from setuptools.errors import RemovedCommandError
|
||||
|
||||
|
||||
class upload(orig.upload):
|
||||
"""Formerly used to upload packages to PyPI."""
|
||||
|
||||
def run(self):
|
||||
msg = (
|
||||
"The upload command has been removed, use twine to upload "
|
||||
+ "instead (https://pypi.org/p/twine)"
|
||||
)
|
||||
|
||||
self.announce("ERROR: " + msg, log.ERROR)
|
||||
raise RemovedCommandError(msg)
|
|
@ -0,0 +1,222 @@
|
|||
"""upload_docs
|
||||
|
||||
Implements a Distutils 'upload_docs' subcommand (upload documentation to
|
||||
sites other than PyPi such as devpi).
|
||||
"""
|
||||
|
||||
from base64 import standard_b64encode
|
||||
from distutils import log
|
||||
from distutils.errors import DistutilsOptionError
|
||||
import os
|
||||
import socket
|
||||
import zipfile
|
||||
import tempfile
|
||||
import shutil
|
||||
import itertools
|
||||
import functools
|
||||
import http.client
|
||||
import urllib.parse
|
||||
|
||||
from .._importlib import metadata
|
||||
from ..warnings import SetuptoolsDeprecationWarning
|
||||
|
||||
from .upload import upload
|
||||
|
||||
|
||||
def _encode(s):
|
||||
return s.encode('utf-8', 'surrogateescape')
|
||||
|
||||
|
||||
class upload_docs(upload):
|
||||
# override the default repository as upload_docs isn't
|
||||
# supported by Warehouse (and won't be).
|
||||
DEFAULT_REPOSITORY = 'https://pypi.python.org/pypi/'
|
||||
|
||||
description = 'Upload documentation to sites other than PyPi such as devpi'
|
||||
|
||||
user_options = [
|
||||
(
|
||||
'repository=',
|
||||
'r',
|
||||
"url of repository [default: %s]" % upload.DEFAULT_REPOSITORY,
|
||||
),
|
||||
('show-response', None, 'display full response text from server'),
|
||||
('upload-dir=', None, 'directory to upload'),
|
||||
]
|
||||
boolean_options = upload.boolean_options
|
||||
|
||||
def has_sphinx(self):
|
||||
return bool(
|
||||
self.upload_dir is None
|
||||
and metadata.entry_points(group='distutils.commands', name='build_sphinx')
|
||||
)
|
||||
|
||||
sub_commands = [('build_sphinx', has_sphinx)]
|
||||
|
||||
def initialize_options(self):
|
||||
upload.initialize_options(self)
|
||||
self.upload_dir = None
|
||||
self.target_dir = None
|
||||
|
||||
def finalize_options(self):
|
||||
log.warn(
|
||||
"Upload_docs command is deprecated. Use Read the Docs "
|
||||
"(https://readthedocs.org) instead."
|
||||
)
|
||||
upload.finalize_options(self)
|
||||
if self.upload_dir is None:
|
||||
if self.has_sphinx():
|
||||
build_sphinx = self.get_finalized_command('build_sphinx')
|
||||
self.target_dir = dict(build_sphinx.builder_target_dirs)['html']
|
||||
else:
|
||||
build = self.get_finalized_command('build')
|
||||
self.target_dir = os.path.join(build.build_base, 'docs')
|
||||
else:
|
||||
self.ensure_dirname('upload_dir')
|
||||
self.target_dir = self.upload_dir
|
||||
self.announce('Using upload directory %s' % self.target_dir)
|
||||
|
||||
def create_zipfile(self, filename):
|
||||
zip_file = zipfile.ZipFile(filename, "w")
|
||||
try:
|
||||
self.mkpath(self.target_dir) # just in case
|
||||
for root, dirs, files in os.walk(self.target_dir):
|
||||
if root == self.target_dir and not files:
|
||||
tmpl = "no files found in upload directory '%s'"
|
||||
raise DistutilsOptionError(tmpl % self.target_dir)
|
||||
for name in files:
|
||||
full = os.path.join(root, name)
|
||||
relative = root[len(self.target_dir) :].lstrip(os.path.sep)
|
||||
dest = os.path.join(relative, name)
|
||||
zip_file.write(full, dest)
|
||||
finally:
|
||||
zip_file.close()
|
||||
|
||||
def run(self):
|
||||
SetuptoolsDeprecationWarning.emit(
|
||||
"Deprecated command",
|
||||
"""
|
||||
upload_docs is deprecated and will be removed in a future version.
|
||||
Instead, use tools like devpi and Read the Docs; or lower level tools like
|
||||
httpie and curl to interact directly with your hosting service API.
|
||||
""",
|
||||
due_date=(2023, 9, 26), # warning introduced in 27 Jul 2022
|
||||
)
|
||||
|
||||
# Run sub commands
|
||||
for cmd_name in self.get_sub_commands():
|
||||
self.run_command(cmd_name)
|
||||
|
||||
tmp_dir = tempfile.mkdtemp()
|
||||
name = self.distribution.metadata.get_name()
|
||||
zip_file = os.path.join(tmp_dir, "%s.zip" % name)
|
||||
try:
|
||||
self.create_zipfile(zip_file)
|
||||
self.upload_file(zip_file)
|
||||
finally:
|
||||
shutil.rmtree(tmp_dir)
|
||||
|
||||
@staticmethod
|
||||
def _build_part(item, sep_boundary):
|
||||
key, values = item
|
||||
title = '\nContent-Disposition: form-data; name="%s"' % key
|
||||
# handle multiple entries for the same name
|
||||
if not isinstance(values, list):
|
||||
values = [values]
|
||||
for value in values:
|
||||
if isinstance(value, tuple):
|
||||
title += '; filename="%s"' % value[0]
|
||||
value = value[1]
|
||||
else:
|
||||
value = _encode(value)
|
||||
yield sep_boundary
|
||||
yield _encode(title)
|
||||
yield b"\n\n"
|
||||
yield value
|
||||
if value and value[-1:] == b'\r':
|
||||
yield b'\n' # write an extra newline (lurve Macs)
|
||||
|
||||
@classmethod
|
||||
def _build_multipart(cls, data):
|
||||
"""
|
||||
Build up the MIME payload for the POST data
|
||||
"""
|
||||
boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
|
||||
sep_boundary = b'\n--' + boundary.encode('ascii')
|
||||
end_boundary = sep_boundary + b'--'
|
||||
end_items = (
|
||||
end_boundary,
|
||||
b"\n",
|
||||
)
|
||||
builder = functools.partial(
|
||||
cls._build_part,
|
||||
sep_boundary=sep_boundary,
|
||||
)
|
||||
part_groups = map(builder, data.items())
|
||||
parts = itertools.chain.from_iterable(part_groups)
|
||||
body_items = itertools.chain(parts, end_items)
|
||||
content_type = 'multipart/form-data; boundary=%s' % boundary
|
||||
return b''.join(body_items), content_type
|
||||
|
||||
def upload_file(self, filename):
|
||||
with open(filename, 'rb') as f:
|
||||
content = f.read()
|
||||
meta = self.distribution.metadata
|
||||
data = {
|
||||
':action': 'doc_upload',
|
||||
'name': meta.get_name(),
|
||||
'content': (os.path.basename(filename), content),
|
||||
}
|
||||
# set up the authentication
|
||||
credentials = _encode(self.username + ':' + self.password)
|
||||
credentials = standard_b64encode(credentials).decode('ascii')
|
||||
auth = "Basic " + credentials
|
||||
|
||||
body, ct = self._build_multipart(data)
|
||||
|
||||
msg = "Submitting documentation to %s" % (self.repository)
|
||||
self.announce(msg, log.INFO)
|
||||
|
||||
# build the Request
|
||||
# We can't use urllib2 since we need to send the Basic
|
||||
# auth right with the first request
|
||||
schema, netloc, url, params, query, fragments = urllib.parse.urlparse(
|
||||
self.repository
|
||||
)
|
||||
assert not params and not query and not fragments
|
||||
if schema == 'http':
|
||||
conn = http.client.HTTPConnection(netloc)
|
||||
elif schema == 'https':
|
||||
conn = http.client.HTTPSConnection(netloc)
|
||||
else:
|
||||
raise AssertionError("unsupported schema " + schema)
|
||||
|
||||
data = ''
|
||||
try:
|
||||
conn.connect()
|
||||
conn.putrequest("POST", url)
|
||||
content_type = ct
|
||||
conn.putheader('Content-type', content_type)
|
||||
conn.putheader('Content-length', str(len(body)))
|
||||
conn.putheader('Authorization', auth)
|
||||
conn.endheaders()
|
||||
conn.send(body)
|
||||
except socket.error as e:
|
||||
self.announce(str(e), log.ERROR)
|
||||
return
|
||||
|
||||
r = conn.getresponse()
|
||||
if r.status == 200:
|
||||
msg = 'Server response (%s): %s' % (r.status, r.reason)
|
||||
self.announce(msg, log.INFO)
|
||||
elif r.status == 301:
|
||||
location = r.getheader('Location')
|
||||
if location is None:
|
||||
location = 'https://pythonhosted.org/%s/' % meta.get_name()
|
||||
msg = 'Upload successful. Visit %s' % location
|
||||
self.announce(msg, log.INFO)
|
||||
else:
|
||||
msg = 'Upload failed (%s): %s' % (r.status, r.reason)
|
||||
self.announce(msg, log.ERROR)
|
||||
if self.show_response:
|
||||
print('-' * 75, r.read(), '-' * 75)
|
Loading…
Add table
Add a link
Reference in a new issue