Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +2 -0
- .venv/Lib/site-packages/scipy/special/cython_special.cp39-win_amd64.pyd +3 -0
- .venv/Lib/site-packages/scipy/special/tests/data/boost.npz +3 -0
- .venv/Lib/site-packages/scipy/stats/__pycache__/__init__.cpython-39.pyc +0 -0
- .venv/Lib/site-packages/scipy/stats/__pycache__/_sampling.cpython-39.pyc +0 -0
- .venv/Lib/site-packages/scipy/stats/__pycache__/_sensitivity_analysis.cpython-39.pyc +0 -0
- .venv/Lib/site-packages/scipy/stats/__pycache__/_stats_mstats_common.cpython-39.pyc +0 -0
- .venv/Lib/site-packages/scipy/stats/__pycache__/_stats_py.cpython-39.pyc +0 -0
- .venv/Lib/site-packages/scipy/stats/__pycache__/_survival.cpython-39.pyc +0 -0
- .venv/Lib/site-packages/scipy/stats/__pycache__/_tukeylambda_stats.cpython-39.pyc +0 -0
- .venv/Lib/site-packages/scipy/stats/__pycache__/_variation.cpython-39.pyc +0 -0
- .venv/Lib/site-packages/scipy/stats/__pycache__/_warnings_errors.cpython-39.pyc +0 -0
- .venv/Lib/site-packages/scipy/stats/__pycache__/_wilcoxon.cpython-39.pyc +0 -0
- .venv/Lib/site-packages/scipy/stats/_boost/beta_ufunc.cp39-win_amd64.pyd +3 -0
- .venv/Lib/site-packages/scipy/stats/tests/data/levy_stable/stable-Z1-cdf-sample-data.npy +3 -0
- .venv/Lib/site-packages/scipy/stats/tests/data/levy_stable/stable-Z1-pdf-sample-data.npy +3 -0
- .venv/Lib/site-packages/setuptools/_distutils/__pycache__/core.cpython-39.pyc +0 -0
- .venv/Lib/site-packages/setuptools/_distutils/__pycache__/debug.cpython-39.pyc +0 -0
- .venv/Lib/site-packages/setuptools/_distutils/__pycache__/dir_util.cpython-39.pyc +0 -0
- .venv/Lib/site-packages/setuptools/_distutils/__pycache__/dist.cpython-39.pyc +0 -0
- .venv/Lib/site-packages/setuptools/_distutils/__pycache__/errors.cpython-39.pyc +0 -0
- .venv/Lib/site-packages/setuptools/_distutils/__pycache__/extension.cpython-39.pyc +0 -0
- .venv/Lib/site-packages/setuptools/_distutils/__pycache__/fancy_getopt.cpython-39.pyc +0 -0
- .venv/Lib/site-packages/setuptools/_distutils/__pycache__/file_util.cpython-39.pyc +0 -0
- .venv/Lib/site-packages/setuptools/_distutils/__pycache__/filelist.cpython-39.pyc +0 -0
- .venv/Lib/site-packages/setuptools/_distutils/__pycache__/log.cpython-39.pyc +0 -0
- .venv/Lib/site-packages/setuptools/_distutils/archive_util.py +264 -0
- .venv/Lib/site-packages/setuptools/_distutils/ccompiler.py +1256 -0
- .venv/Lib/site-packages/setuptools/_distutils/cmd.py +439 -0
- .venv/Lib/site-packages/setuptools/_distutils/core.py +286 -0
- .venv/Lib/site-packages/setuptools/_distutils/cygwinccompiler.py +339 -0
- .venv/Lib/site-packages/setuptools/_distutils/debug.py +5 -0
- .venv/Lib/site-packages/setuptools/_distutils/dep_util.py +14 -0
- .venv/Lib/site-packages/setuptools/_distutils/dir_util.py +244 -0
- .venv/Lib/site-packages/setuptools/_distutils/dist.py +1288 -0
- .venv/Lib/site-packages/setuptools/_distutils/errors.py +124 -0
- .venv/Lib/site-packages/setuptools/_distutils/extension.py +247 -0
- .venv/Lib/site-packages/setuptools/_distutils/fancy_getopt.py +469 -0
- .venv/Lib/site-packages/setuptools/_distutils/file_util.py +236 -0
- .venv/Lib/site-packages/setuptools/_distutils/filelist.py +369 -0
- .venv/Lib/site-packages/setuptools/_distutils/log.py +56 -0
- .venv/Lib/site-packages/setuptools/_distutils/tests/test_install_data.py +74 -0
- .venv/Lib/site-packages/setuptools/_distutils/tests/test_install_headers.py +33 -0
- .venv/Lib/site-packages/setuptools/_distutils/tests/test_install_lib.py +110 -0
- .venv/Lib/site-packages/setuptools/compat/__init__.py +0 -0
- .venv/Lib/site-packages/setuptools/compat/py312.py +13 -0
- .venv/Lib/site-packages/setuptools/compat/py39.py +9 -0
- .venv/Lib/site-packages/setuptools/config/distutils.schema.json +26 -0
- .venv/Lib/site-packages/setuptools/config/expand.py +449 -0
- .venv/Lib/site-packages/setuptools/tests/config/downloads/__init__.py +59 -0
.gitattributes
CHANGED
@@ -101,3 +101,5 @@ reference_sample_wavs/syuukovoice_200918_3_01.wav filter=lfs diff=lfs merge=lfs
|
|
101 |
.venv/Lib/site-packages/scipy/spatial/_qhull.cp39-win_amd64.pyd filter=lfs diff=lfs merge=lfs -text
|
102 |
.venv/Lib/site-packages/scipy/special/_ufuncs_cxx.cp39-win_amd64.pyd filter=lfs diff=lfs merge=lfs -text
|
103 |
.venv/Lib/site-packages/scipy/stats/_stats_pythran.cp39-win_amd64.pyd filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
101 |
.venv/Lib/site-packages/scipy/spatial/_qhull.cp39-win_amd64.pyd filter=lfs diff=lfs merge=lfs -text
|
102 |
.venv/Lib/site-packages/scipy/special/_ufuncs_cxx.cp39-win_amd64.pyd filter=lfs diff=lfs merge=lfs -text
|
103 |
.venv/Lib/site-packages/scipy/stats/_stats_pythran.cp39-win_amd64.pyd filter=lfs diff=lfs merge=lfs -text
|
104 |
+
.venv/Lib/site-packages/scipy/special/cython_special.cp39-win_amd64.pyd filter=lfs diff=lfs merge=lfs -text
|
105 |
+
.venv/Lib/site-packages/scipy/stats/_boost/beta_ufunc.cp39-win_amd64.pyd filter=lfs diff=lfs merge=lfs -text
|
.venv/Lib/site-packages/scipy/special/cython_special.cp39-win_amd64.pyd
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:36da971afb728bfe904cea6e2afeb4b8839e1d3a0c4a7d141615fa002bda05bf
|
3 |
+
size 2892288
|
.venv/Lib/site-packages/scipy/special/tests/data/boost.npz
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b6f221c1a34fcd2af426c6564a2714ab671ea6584cf2a169d01dd93625fc4902
|
3 |
+
size 1270643
|
.venv/Lib/site-packages/scipy/stats/__pycache__/__init__.cpython-39.pyc
ADDED
Binary file (18.6 kB). View file
|
|
.venv/Lib/site-packages/scipy/stats/__pycache__/_sampling.cpython-39.pyc
ADDED
Binary file (43.1 kB). View file
|
|
.venv/Lib/site-packages/scipy/stats/__pycache__/_sensitivity_analysis.cpython-39.pyc
ADDED
Binary file (23.6 kB). View file
|
|
.venv/Lib/site-packages/scipy/stats/__pycache__/_stats_mstats_common.cpython-39.pyc
ADDED
Binary file (16.3 kB). View file
|
|
.venv/Lib/site-packages/scipy/stats/__pycache__/_stats_py.cpython-39.pyc
ADDED
Binary file (373 kB). View file
|
|
.venv/Lib/site-packages/scipy/stats/__pycache__/_survival.cpython-39.pyc
ADDED
Binary file (22.8 kB). View file
|
|
.venv/Lib/site-packages/scipy/stats/__pycache__/_tukeylambda_stats.cpython-39.pyc
ADDED
Binary file (3.11 kB). View file
|
|
.venv/Lib/site-packages/scipy/stats/__pycache__/_variation.cpython-39.pyc
ADDED
Binary file (4.42 kB). View file
|
|
.venv/Lib/site-packages/scipy/stats/__pycache__/_warnings_errors.cpython-39.pyc
ADDED
Binary file (1.9 kB). View file
|
|
.venv/Lib/site-packages/scipy/stats/__pycache__/_wilcoxon.cpython-39.pyc
ADDED
Binary file (7.12 kB). View file
|
|
.venv/Lib/site-packages/scipy/stats/_boost/beta_ufunc.cp39-win_amd64.pyd
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ef4088a6145708570c814a31824e46cfabeb0993ca42211cf25bfc57b8feeba3
|
3 |
+
size 1060352
|
.venv/Lib/site-packages/scipy/stats/tests/data/levy_stable/stable-Z1-cdf-sample-data.npy
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cf18c1f2d65a232bf2c7121282df31bf2a8be827afafc4ed810ed37457ee898a
|
3 |
+
size 183728
|
.venv/Lib/site-packages/scipy/stats/tests/data/levy_stable/stable-Z1-pdf-sample-data.npy
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fee99512bab4ccc6569b47b924e4b034e1cdbab5624fafc7e120648bd5f7a128
|
3 |
+
size 183688
|
.venv/Lib/site-packages/setuptools/_distutils/__pycache__/core.cpython-39.pyc
ADDED
Binary file (7.14 kB). View file
|
|
.venv/Lib/site-packages/setuptools/_distutils/__pycache__/debug.cpython-39.pyc
ADDED
Binary file (256 Bytes). View file
|
|
.venv/Lib/site-packages/setuptools/_distutils/__pycache__/dir_util.cpython-39.pyc
ADDED
Binary file (7.24 kB). View file
|
|
.venv/Lib/site-packages/setuptools/_distutils/__pycache__/dist.cpython-39.pyc
ADDED
Binary file (36.1 kB). View file
|
|
.venv/Lib/site-packages/setuptools/_distutils/__pycache__/errors.cpython-39.pyc
ADDED
Binary file (5.05 kB). View file
|
|
.venv/Lib/site-packages/setuptools/_distutils/__pycache__/extension.cpython-39.pyc
ADDED
Binary file (7.09 kB). View file
|
|
.venv/Lib/site-packages/setuptools/_distutils/__pycache__/fancy_getopt.cpython-39.pyc
ADDED
Binary file (10.8 kB). View file
|
|
.venv/Lib/site-packages/setuptools/_distutils/__pycache__/file_util.cpython-39.pyc
ADDED
Binary file (6.1 kB). View file
|
|
.venv/Lib/site-packages/setuptools/_distutils/__pycache__/filelist.cpython-39.pyc
ADDED
Binary file (10.8 kB). View file
|
|
.venv/Lib/site-packages/setuptools/_distutils/__pycache__/log.cpython-39.pyc
ADDED
Binary file (1.69 kB). View file
|
|
.venv/Lib/site-packages/setuptools/_distutils/archive_util.py
ADDED
@@ -0,0 +1,264 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""distutils.archive_util
|
2 |
+
|
3 |
+
Utility functions for creating archive files (tarballs, zip files,
|
4 |
+
that sort of thing)."""
|
5 |
+
|
6 |
+
import os
|
7 |
+
|
8 |
+
try:
|
9 |
+
import zipfile
|
10 |
+
except ImportError:
|
11 |
+
zipfile = None
|
12 |
+
|
13 |
+
|
14 |
+
from ._log import log
|
15 |
+
from .dir_util import mkpath
|
16 |
+
from .errors import DistutilsExecError
|
17 |
+
from .spawn import spawn
|
18 |
+
|
19 |
+
try:
|
20 |
+
from pwd import getpwnam
|
21 |
+
except ImportError:
|
22 |
+
getpwnam = None
|
23 |
+
|
24 |
+
try:
|
25 |
+
from grp import getgrnam
|
26 |
+
except ImportError:
|
27 |
+
getgrnam = None
|
28 |
+
|
29 |
+
|
30 |
+
def _get_gid(name):
|
31 |
+
"""Returns a gid, given a group name."""
|
32 |
+
if getgrnam is None or name is None:
|
33 |
+
return None
|
34 |
+
try:
|
35 |
+
result = getgrnam(name)
|
36 |
+
except KeyError:
|
37 |
+
result = None
|
38 |
+
if result is not None:
|
39 |
+
return result[2]
|
40 |
+
return None
|
41 |
+
|
42 |
+
|
43 |
+
def _get_uid(name):
|
44 |
+
"""Returns an uid, given a user name."""
|
45 |
+
if getpwnam is None or name is None:
|
46 |
+
return None
|
47 |
+
try:
|
48 |
+
result = getpwnam(name)
|
49 |
+
except KeyError:
|
50 |
+
result = None
|
51 |
+
if result is not None:
|
52 |
+
return result[2]
|
53 |
+
return None
|
54 |
+
|
55 |
+
|
56 |
+
def make_tarball(
|
57 |
+
base_name,
|
58 |
+
base_dir,
|
59 |
+
compress="gzip",
|
60 |
+
verbose=False,
|
61 |
+
dry_run=False,
|
62 |
+
owner=None,
|
63 |
+
group=None,
|
64 |
+
):
|
65 |
+
"""Create a (possibly compressed) tar file from all the files under
|
66 |
+
'base_dir'.
|
67 |
+
|
68 |
+
'compress' must be "gzip" (the default), "bzip2", "xz", or None.
|
69 |
+
|
70 |
+
'owner' and 'group' can be used to define an owner and a group for the
|
71 |
+
archive that is being built. If not provided, the current owner and group
|
72 |
+
will be used.
|
73 |
+
|
74 |
+
The output tar file will be named 'base_dir' + ".tar", possibly plus
|
75 |
+
the appropriate compression extension (".gz", ".bz2", ".xz" or ".Z").
|
76 |
+
|
77 |
+
Returns the output filename.
|
78 |
+
"""
|
79 |
+
tar_compression = {
|
80 |
+
'gzip': 'gz',
|
81 |
+
'bzip2': 'bz2',
|
82 |
+
'xz': 'xz',
|
83 |
+
None: '',
|
84 |
+
}
|
85 |
+
compress_ext = {'gzip': '.gz', 'bzip2': '.bz2', 'xz': '.xz'}
|
86 |
+
|
87 |
+
# flags for compression program, each element of list will be an argument
|
88 |
+
if compress is not None and compress not in compress_ext.keys():
|
89 |
+
raise ValueError(
|
90 |
+
"bad value for 'compress': must be None, 'gzip', 'bzip2', 'xz'"
|
91 |
+
)
|
92 |
+
|
93 |
+
archive_name = base_name + '.tar'
|
94 |
+
archive_name += compress_ext.get(compress, '')
|
95 |
+
|
96 |
+
mkpath(os.path.dirname(archive_name), dry_run=dry_run)
|
97 |
+
|
98 |
+
# creating the tarball
|
99 |
+
import tarfile # late import so Python build itself doesn't break
|
100 |
+
|
101 |
+
log.info('Creating tar archive')
|
102 |
+
|
103 |
+
uid = _get_uid(owner)
|
104 |
+
gid = _get_gid(group)
|
105 |
+
|
106 |
+
def _set_uid_gid(tarinfo):
|
107 |
+
if gid is not None:
|
108 |
+
tarinfo.gid = gid
|
109 |
+
tarinfo.gname = group
|
110 |
+
if uid is not None:
|
111 |
+
tarinfo.uid = uid
|
112 |
+
tarinfo.uname = owner
|
113 |
+
return tarinfo
|
114 |
+
|
115 |
+
if not dry_run:
|
116 |
+
tar = tarfile.open(archive_name, f'w|{tar_compression[compress]}')
|
117 |
+
try:
|
118 |
+
tar.add(base_dir, filter=_set_uid_gid)
|
119 |
+
finally:
|
120 |
+
tar.close()
|
121 |
+
|
122 |
+
return archive_name
|
123 |
+
|
124 |
+
|
125 |
+
def make_zipfile(base_name, base_dir, verbose=False, dry_run=False): # noqa: C901
|
126 |
+
"""Create a zip file from all the files under 'base_dir'.
|
127 |
+
|
128 |
+
The output zip file will be named 'base_name' + ".zip". Uses either the
|
129 |
+
"zipfile" Python module (if available) or the InfoZIP "zip" utility
|
130 |
+
(if installed and found on the default search path). If neither tool is
|
131 |
+
available, raises DistutilsExecError. Returns the name of the output zip
|
132 |
+
file.
|
133 |
+
"""
|
134 |
+
zip_filename = base_name + ".zip"
|
135 |
+
mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
|
136 |
+
|
137 |
+
# If zipfile module is not available, try spawning an external
|
138 |
+
# 'zip' command.
|
139 |
+
if zipfile is None:
|
140 |
+
if verbose:
|
141 |
+
zipoptions = "-r"
|
142 |
+
else:
|
143 |
+
zipoptions = "-rq"
|
144 |
+
|
145 |
+
try:
|
146 |
+
spawn(["zip", zipoptions, zip_filename, base_dir], dry_run=dry_run)
|
147 |
+
except DistutilsExecError:
|
148 |
+
# XXX really should distinguish between "couldn't find
|
149 |
+
# external 'zip' command" and "zip failed".
|
150 |
+
raise DistutilsExecError(
|
151 |
+
f"unable to create zip file '{zip_filename}': "
|
152 |
+
"could neither import the 'zipfile' module nor "
|
153 |
+
"find a standalone zip utility"
|
154 |
+
)
|
155 |
+
|
156 |
+
else:
|
157 |
+
log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)
|
158 |
+
|
159 |
+
if not dry_run:
|
160 |
+
try:
|
161 |
+
zip = zipfile.ZipFile(
|
162 |
+
zip_filename, "w", compression=zipfile.ZIP_DEFLATED
|
163 |
+
)
|
164 |
+
except RuntimeError:
|
165 |
+
zip = zipfile.ZipFile(zip_filename, "w", compression=zipfile.ZIP_STORED)
|
166 |
+
|
167 |
+
with zip:
|
168 |
+
if base_dir != os.curdir:
|
169 |
+
path = os.path.normpath(os.path.join(base_dir, ''))
|
170 |
+
zip.write(path, path)
|
171 |
+
log.info("adding '%s'", path)
|
172 |
+
for dirpath, dirnames, filenames in os.walk(base_dir):
|
173 |
+
for name in dirnames:
|
174 |
+
path = os.path.normpath(os.path.join(dirpath, name, ''))
|
175 |
+
zip.write(path, path)
|
176 |
+
log.info("adding '%s'", path)
|
177 |
+
for name in filenames:
|
178 |
+
path = os.path.normpath(os.path.join(dirpath, name))
|
179 |
+
if os.path.isfile(path):
|
180 |
+
zip.write(path, path)
|
181 |
+
log.info("adding '%s'", path)
|
182 |
+
|
183 |
+
return zip_filename
|
184 |
+
|
185 |
+
|
186 |
+
ARCHIVE_FORMATS = {
|
187 |
+
'gztar': (make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"),
|
188 |
+
'bztar': (make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"),
|
189 |
+
'xztar': (make_tarball, [('compress', 'xz')], "xz'ed tar-file"),
|
190 |
+
'ztar': (make_tarball, [('compress', 'compress')], "compressed tar file"),
|
191 |
+
'tar': (make_tarball, [('compress', None)], "uncompressed tar file"),
|
192 |
+
'zip': (make_zipfile, [], "ZIP file"),
|
193 |
+
}
|
194 |
+
|
195 |
+
|
196 |
+
def check_archive_formats(formats):
|
197 |
+
"""Returns the first format from the 'format' list that is unknown.
|
198 |
+
|
199 |
+
If all formats are known, returns None
|
200 |
+
"""
|
201 |
+
for format in formats:
|
202 |
+
if format not in ARCHIVE_FORMATS:
|
203 |
+
return format
|
204 |
+
return None
|
205 |
+
|
206 |
+
|
207 |
+
def make_archive(
|
208 |
+
base_name,
|
209 |
+
format,
|
210 |
+
root_dir=None,
|
211 |
+
base_dir=None,
|
212 |
+
verbose=False,
|
213 |
+
dry_run=False,
|
214 |
+
owner=None,
|
215 |
+
group=None,
|
216 |
+
):
|
217 |
+
"""Create an archive file (eg. zip or tar).
|
218 |
+
|
219 |
+
'base_name' is the name of the file to create, minus any format-specific
|
220 |
+
extension; 'format' is the archive format: one of "zip", "tar", "gztar",
|
221 |
+
"bztar", "xztar", or "ztar".
|
222 |
+
|
223 |
+
'root_dir' is a directory that will be the root directory of the
|
224 |
+
archive; ie. we typically chdir into 'root_dir' before creating the
|
225 |
+
archive. 'base_dir' is the directory where we start archiving from;
|
226 |
+
ie. 'base_dir' will be the common prefix of all files and
|
227 |
+
directories in the archive. 'root_dir' and 'base_dir' both default
|
228 |
+
to the current directory. Returns the name of the archive file.
|
229 |
+
|
230 |
+
'owner' and 'group' are used when creating a tar archive. By default,
|
231 |
+
uses the current owner and group.
|
232 |
+
"""
|
233 |
+
save_cwd = os.getcwd()
|
234 |
+
if root_dir is not None:
|
235 |
+
log.debug("changing into '%s'", root_dir)
|
236 |
+
base_name = os.path.abspath(base_name)
|
237 |
+
if not dry_run:
|
238 |
+
os.chdir(root_dir)
|
239 |
+
|
240 |
+
if base_dir is None:
|
241 |
+
base_dir = os.curdir
|
242 |
+
|
243 |
+
kwargs = {'dry_run': dry_run}
|
244 |
+
|
245 |
+
try:
|
246 |
+
format_info = ARCHIVE_FORMATS[format]
|
247 |
+
except KeyError:
|
248 |
+
raise ValueError(f"unknown archive format '{format}'")
|
249 |
+
|
250 |
+
func = format_info[0]
|
251 |
+
kwargs.update(format_info[1])
|
252 |
+
|
253 |
+
if format != 'zip':
|
254 |
+
kwargs['owner'] = owner
|
255 |
+
kwargs['group'] = group
|
256 |
+
|
257 |
+
try:
|
258 |
+
filename = func(base_name, base_dir, **kwargs)
|
259 |
+
finally:
|
260 |
+
if root_dir is not None:
|
261 |
+
log.debug("changing back to '%s'", save_cwd)
|
262 |
+
os.chdir(save_cwd)
|
263 |
+
|
264 |
+
return filename
|
.venv/Lib/site-packages/setuptools/_distutils/ccompiler.py
ADDED
@@ -0,0 +1,1256 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""distutils.ccompiler
|
2 |
+
|
3 |
+
Contains CCompiler, an abstract base class that defines the interface
|
4 |
+
for the Distutils compiler abstraction model."""
|
5 |
+
|
6 |
+
import os
|
7 |
+
import re
|
8 |
+
import sys
|
9 |
+
import types
|
10 |
+
import warnings
|
11 |
+
|
12 |
+
from more_itertools import always_iterable
|
13 |
+
|
14 |
+
from ._log import log
|
15 |
+
from ._modified import newer_group
|
16 |
+
from .dir_util import mkpath
|
17 |
+
from .errors import (
|
18 |
+
CompileError,
|
19 |
+
DistutilsModuleError,
|
20 |
+
DistutilsPlatformError,
|
21 |
+
LinkError,
|
22 |
+
UnknownFileError,
|
23 |
+
)
|
24 |
+
from .file_util import move_file
|
25 |
+
from .spawn import spawn
|
26 |
+
from .util import execute, is_mingw, split_quoted
|
27 |
+
|
28 |
+
|
29 |
+
class CCompiler:
|
30 |
+
"""Abstract base class to define the interface that must be implemented
|
31 |
+
by real compiler classes. Also has some utility methods used by
|
32 |
+
several compiler classes.
|
33 |
+
|
34 |
+
The basic idea behind a compiler abstraction class is that each
|
35 |
+
instance can be used for all the compile/link steps in building a
|
36 |
+
single project. Thus, attributes common to all of those compile and
|
37 |
+
link steps -- include directories, macros to define, libraries to link
|
38 |
+
against, etc. -- are attributes of the compiler instance. To allow for
|
39 |
+
variability in how individual files are treated, most of those
|
40 |
+
attributes may be varied on a per-compilation or per-link basis.
|
41 |
+
"""
|
42 |
+
|
43 |
+
# 'compiler_type' is a class attribute that identifies this class. It
|
44 |
+
# keeps code that wants to know what kind of compiler it's dealing with
|
45 |
+
# from having to import all possible compiler classes just to do an
|
46 |
+
# 'isinstance'. In concrete CCompiler subclasses, 'compiler_type'
|
47 |
+
# should really, really be one of the keys of the 'compiler_class'
|
48 |
+
# dictionary (see below -- used by the 'new_compiler()' factory
|
49 |
+
# function) -- authors of new compiler interface classes are
|
50 |
+
# responsible for updating 'compiler_class'!
|
51 |
+
compiler_type = None
|
52 |
+
|
53 |
+
# XXX things not handled by this compiler abstraction model:
|
54 |
+
# * client can't provide additional options for a compiler,
|
55 |
+
# e.g. warning, optimization, debugging flags. Perhaps this
|
56 |
+
# should be the domain of concrete compiler abstraction classes
|
57 |
+
# (UnixCCompiler, MSVCCompiler, etc.) -- or perhaps the base
|
58 |
+
# class should have methods for the common ones.
|
59 |
+
# * can't completely override the include or library searchg
|
60 |
+
# path, ie. no "cc -I -Idir1 -Idir2" or "cc -L -Ldir1 -Ldir2".
|
61 |
+
# I'm not sure how widely supported this is even by Unix
|
62 |
+
# compilers, much less on other platforms. And I'm even less
|
63 |
+
# sure how useful it is; maybe for cross-compiling, but
|
64 |
+
# support for that is a ways off. (And anyways, cross
|
65 |
+
# compilers probably have a dedicated binary with the
|
66 |
+
# right paths compiled in. I hope.)
|
67 |
+
# * can't do really freaky things with the library list/library
|
68 |
+
# dirs, e.g. "-Ldir1 -lfoo -Ldir2 -lfoo" to link against
|
69 |
+
# different versions of libfoo.a in different locations. I
|
70 |
+
# think this is useless without the ability to null out the
|
71 |
+
# library search path anyways.
|
72 |
+
|
73 |
+
# Subclasses that rely on the standard filename generation methods
|
74 |
+
# implemented below should override these; see the comment near
|
75 |
+
# those methods ('object_filenames()' et. al.) for details:
|
76 |
+
src_extensions = None # list of strings
|
77 |
+
obj_extension = None # string
|
78 |
+
static_lib_extension = None
|
79 |
+
shared_lib_extension = None # string
|
80 |
+
static_lib_format = None # format string
|
81 |
+
shared_lib_format = None # prob. same as static_lib_format
|
82 |
+
exe_extension = None # string
|
83 |
+
|
84 |
+
# Default language settings. language_map is used to detect a source
|
85 |
+
# file or Extension target language, checking source filenames.
|
86 |
+
# language_order is used to detect the language precedence, when deciding
|
87 |
+
# what language to use when mixing source types. For example, if some
|
88 |
+
# extension has two files with ".c" extension, and one with ".cpp", it
|
89 |
+
# is still linked as c++.
|
90 |
+
language_map = {
|
91 |
+
".c": "c",
|
92 |
+
".cc": "c++",
|
93 |
+
".cpp": "c++",
|
94 |
+
".cxx": "c++",
|
95 |
+
".m": "objc",
|
96 |
+
}
|
97 |
+
language_order = ["c++", "objc", "c"]
|
98 |
+
|
99 |
+
include_dirs = []
|
100 |
+
"""
|
101 |
+
include dirs specific to this compiler class
|
102 |
+
"""
|
103 |
+
|
104 |
+
library_dirs = []
|
105 |
+
"""
|
106 |
+
library dirs specific to this compiler class
|
107 |
+
"""
|
108 |
+
|
109 |
+
def __init__(self, verbose=False, dry_run=False, force=False):
|
110 |
+
self.dry_run = dry_run
|
111 |
+
self.force = force
|
112 |
+
self.verbose = verbose
|
113 |
+
|
114 |
+
# 'output_dir': a common output directory for object, library,
|
115 |
+
# shared object, and shared library files
|
116 |
+
self.output_dir = None
|
117 |
+
|
118 |
+
# 'macros': a list of macro definitions (or undefinitions). A
|
119 |
+
# macro definition is a 2-tuple (name, value), where the value is
|
120 |
+
# either a string or None (no explicit value). A macro
|
121 |
+
# undefinition is a 1-tuple (name,).
|
122 |
+
self.macros = []
|
123 |
+
|
124 |
+
# 'include_dirs': a list of directories to search for include files
|
125 |
+
self.include_dirs = []
|
126 |
+
|
127 |
+
# 'libraries': a list of libraries to include in any link
|
128 |
+
# (library names, not filenames: eg. "foo" not "libfoo.a")
|
129 |
+
self.libraries = []
|
130 |
+
|
131 |
+
# 'library_dirs': a list of directories to search for libraries
|
132 |
+
self.library_dirs = []
|
133 |
+
|
134 |
+
# 'runtime_library_dirs': a list of directories to search for
|
135 |
+
# shared libraries/objects at runtime
|
136 |
+
self.runtime_library_dirs = []
|
137 |
+
|
138 |
+
# 'objects': a list of object files (or similar, such as explicitly
|
139 |
+
# named library files) to include on any link
|
140 |
+
self.objects = []
|
141 |
+
|
142 |
+
for key in self.executables.keys():
|
143 |
+
self.set_executable(key, self.executables[key])
|
144 |
+
|
145 |
+
def set_executables(self, **kwargs):
|
146 |
+
"""Define the executables (and options for them) that will be run
|
147 |
+
to perform the various stages of compilation. The exact set of
|
148 |
+
executables that may be specified here depends on the compiler
|
149 |
+
class (via the 'executables' class attribute), but most will have:
|
150 |
+
compiler the C/C++ compiler
|
151 |
+
linker_so linker used to create shared objects and libraries
|
152 |
+
linker_exe linker used to create binary executables
|
153 |
+
archiver static library creator
|
154 |
+
|
155 |
+
On platforms with a command-line (Unix, DOS/Windows), each of these
|
156 |
+
is a string that will be split into executable name and (optional)
|
157 |
+
list of arguments. (Splitting the string is done similarly to how
|
158 |
+
Unix shells operate: words are delimited by spaces, but quotes and
|
159 |
+
backslashes can override this. See
|
160 |
+
'distutils.util.split_quoted()'.)
|
161 |
+
"""
|
162 |
+
|
163 |
+
# Note that some CCompiler implementation classes will define class
|
164 |
+
# attributes 'cpp', 'cc', etc. with hard-coded executable names;
|
165 |
+
# this is appropriate when a compiler class is for exactly one
|
166 |
+
# compiler/OS combination (eg. MSVCCompiler). Other compiler
|
167 |
+
# classes (UnixCCompiler, in particular) are driven by information
|
168 |
+
# discovered at run-time, since there are many different ways to do
|
169 |
+
# basically the same things with Unix C compilers.
|
170 |
+
|
171 |
+
for key in kwargs:
|
172 |
+
if key not in self.executables:
|
173 |
+
raise ValueError(
|
174 |
+
f"unknown executable '{key}' for class {self.__class__.__name__}"
|
175 |
+
)
|
176 |
+
self.set_executable(key, kwargs[key])
|
177 |
+
|
178 |
+
def set_executable(self, key, value):
|
179 |
+
if isinstance(value, str):
|
180 |
+
setattr(self, key, split_quoted(value))
|
181 |
+
else:
|
182 |
+
setattr(self, key, value)
|
183 |
+
|
184 |
+
def _find_macro(self, name):
|
185 |
+
i = 0
|
186 |
+
for defn in self.macros:
|
187 |
+
if defn[0] == name:
|
188 |
+
return i
|
189 |
+
i += 1
|
190 |
+
return None
|
191 |
+
|
192 |
+
def _check_macro_definitions(self, definitions):
|
193 |
+
"""Ensure that every element of 'definitions' is valid."""
|
194 |
+
for defn in definitions:
|
195 |
+
self._check_macro_definition(*defn)
|
196 |
+
|
197 |
+
def _check_macro_definition(self, defn):
|
198 |
+
"""
|
199 |
+
Raise a TypeError if defn is not valid.
|
200 |
+
|
201 |
+
A valid definition is either a (name, value) 2-tuple or a (name,) tuple.
|
202 |
+
"""
|
203 |
+
if not isinstance(defn, tuple) or not self._is_valid_macro(*defn):
|
204 |
+
raise TypeError(
|
205 |
+
f"invalid macro definition '{defn}': "
|
206 |
+
"must be tuple (string,), (string, string), or (string, None)"
|
207 |
+
)
|
208 |
+
|
209 |
+
@staticmethod
|
210 |
+
def _is_valid_macro(name, value=None):
|
211 |
+
"""
|
212 |
+
A valid macro is a ``name : str`` and a ``value : str | None``.
|
213 |
+
"""
|
214 |
+
return isinstance(name, str) and isinstance(value, (str, types.NoneType))
|
215 |
+
|
216 |
+
# -- Bookkeeping methods -------------------------------------------
|
217 |
+
|
218 |
+
def define_macro(self, name, value=None):
|
219 |
+
"""Define a preprocessor macro for all compilations driven by this
|
220 |
+
compiler object. The optional parameter 'value' should be a
|
221 |
+
string; if it is not supplied, then the macro will be defined
|
222 |
+
without an explicit value and the exact outcome depends on the
|
223 |
+
compiler used (XXX true? does ANSI say anything about this?)
|
224 |
+
"""
|
225 |
+
# Delete from the list of macro definitions/undefinitions if
|
226 |
+
# already there (so that this one will take precedence).
|
227 |
+
i = self._find_macro(name)
|
228 |
+
if i is not None:
|
229 |
+
del self.macros[i]
|
230 |
+
|
231 |
+
self.macros.append((name, value))
|
232 |
+
|
233 |
+
def undefine_macro(self, name):
|
234 |
+
"""Undefine a preprocessor macro for all compilations driven by
|
235 |
+
this compiler object. If the same macro is defined by
|
236 |
+
'define_macro()' and undefined by 'undefine_macro()' the last call
|
237 |
+
takes precedence (including multiple redefinitions or
|
238 |
+
undefinitions). If the macro is redefined/undefined on a
|
239 |
+
per-compilation basis (ie. in the call to 'compile()'), then that
|
240 |
+
takes precedence.
|
241 |
+
"""
|
242 |
+
# Delete from the list of macro definitions/undefinitions if
|
243 |
+
# already there (so that this one will take precedence).
|
244 |
+
i = self._find_macro(name)
|
245 |
+
if i is not None:
|
246 |
+
del self.macros[i]
|
247 |
+
|
248 |
+
undefn = (name,)
|
249 |
+
self.macros.append(undefn)
|
250 |
+
|
251 |
+
def add_include_dir(self, dir):
|
252 |
+
"""Add 'dir' to the list of directories that will be searched for
|
253 |
+
header files. The compiler is instructed to search directories in
|
254 |
+
the order in which they are supplied by successive calls to
|
255 |
+
'add_include_dir()'.
|
256 |
+
"""
|
257 |
+
self.include_dirs.append(dir)
|
258 |
+
|
259 |
+
def set_include_dirs(self, dirs):
|
260 |
+
"""Set the list of directories that will be searched to 'dirs' (a
|
261 |
+
list of strings). Overrides any preceding calls to
|
262 |
+
'add_include_dir()'; subsequence calls to 'add_include_dir()' add
|
263 |
+
to the list passed to 'set_include_dirs()'. This does not affect
|
264 |
+
any list of standard include directories that the compiler may
|
265 |
+
search by default.
|
266 |
+
"""
|
267 |
+
self.include_dirs = dirs[:]
|
268 |
+
|
269 |
+
def add_library(self, libname):
|
270 |
+
"""Add 'libname' to the list of libraries that will be included in
|
271 |
+
all links driven by this compiler object. Note that 'libname'
|
272 |
+
should *not* be the name of a file containing a library, but the
|
273 |
+
name of the library itself: the actual filename will be inferred by
|
274 |
+
the linker, the compiler, or the compiler class (depending on the
|
275 |
+
platform).
|
276 |
+
|
277 |
+
The linker will be instructed to link against libraries in the
|
278 |
+
order they were supplied to 'add_library()' and/or
|
279 |
+
'set_libraries()'. It is perfectly valid to duplicate library
|
280 |
+
names; the linker will be instructed to link against libraries as
|
281 |
+
many times as they are mentioned.
|
282 |
+
"""
|
283 |
+
self.libraries.append(libname)
|
284 |
+
|
285 |
+
def set_libraries(self, libnames):
|
286 |
+
"""Set the list of libraries to be included in all links driven by
|
287 |
+
this compiler object to 'libnames' (a list of strings). This does
|
288 |
+
not affect any standard system libraries that the linker may
|
289 |
+
include by default.
|
290 |
+
"""
|
291 |
+
self.libraries = libnames[:]
|
292 |
+
|
293 |
+
def add_library_dir(self, dir):
|
294 |
+
"""Add 'dir' to the list of directories that will be searched for
|
295 |
+
libraries specified to 'add_library()' and 'set_libraries()'. The
|
296 |
+
linker will be instructed to search for libraries in the order they
|
297 |
+
are supplied to 'add_library_dir()' and/or 'set_library_dirs()'.
|
298 |
+
"""
|
299 |
+
self.library_dirs.append(dir)
|
300 |
+
|
301 |
+
def set_library_dirs(self, dirs):
|
302 |
+
"""Set the list of library search directories to 'dirs' (a list of
|
303 |
+
strings). This does not affect any standard library search path
|
304 |
+
that the linker may search by default.
|
305 |
+
"""
|
306 |
+
self.library_dirs = dirs[:]
|
307 |
+
|
308 |
+
def add_runtime_library_dir(self, dir):
|
309 |
+
"""Add 'dir' to the list of directories that will be searched for
|
310 |
+
shared libraries at runtime.
|
311 |
+
"""
|
312 |
+
self.runtime_library_dirs.append(dir)
|
313 |
+
|
314 |
+
def set_runtime_library_dirs(self, dirs):
|
315 |
+
"""Set the list of directories to search for shared libraries at
|
316 |
+
runtime to 'dirs' (a list of strings). This does not affect any
|
317 |
+
standard search path that the runtime linker may search by
|
318 |
+
default.
|
319 |
+
"""
|
320 |
+
self.runtime_library_dirs = dirs[:]
|
321 |
+
|
322 |
+
def add_link_object(self, object):
|
323 |
+
"""Add 'object' to the list of object files (or analogues, such as
|
324 |
+
explicitly named library files or the output of "resource
|
325 |
+
compilers") to be included in every link driven by this compiler
|
326 |
+
object.
|
327 |
+
"""
|
328 |
+
self.objects.append(object)
|
329 |
+
|
330 |
+
def set_link_objects(self, objects):
|
331 |
+
"""Set the list of object files (or analogues) to be included in
|
332 |
+
every link to 'objects'. This does not affect any standard object
|
333 |
+
files that the linker may include by default (such as system
|
334 |
+
libraries).
|
335 |
+
"""
|
336 |
+
self.objects = objects[:]
|
337 |
+
|
338 |
+
# -- Private utility methods --------------------------------------
|
339 |
+
# (here for the convenience of subclasses)
|
340 |
+
|
341 |
+
# Helper method to prep compiler in subclass compile() methods
|
342 |
+
|
343 |
+
def _setup_compile(self, outdir, macros, incdirs, sources, depends, extra):
|
344 |
+
"""Process arguments and decide which source files to compile."""
|
345 |
+
outdir, macros, incdirs = self._fix_compile_args(outdir, macros, incdirs)
|
346 |
+
|
347 |
+
if extra is None:
|
348 |
+
extra = []
|
349 |
+
|
350 |
+
# Get the list of expected output (object) files
|
351 |
+
objects = self.object_filenames(sources, strip_dir=False, output_dir=outdir)
|
352 |
+
assert len(objects) == len(sources)
|
353 |
+
|
354 |
+
pp_opts = gen_preprocess_options(macros, incdirs)
|
355 |
+
|
356 |
+
build = {}
|
357 |
+
for i in range(len(sources)):
|
358 |
+
src = sources[i]
|
359 |
+
obj = objects[i]
|
360 |
+
ext = os.path.splitext(src)[1]
|
361 |
+
self.mkpath(os.path.dirname(obj))
|
362 |
+
build[obj] = (src, ext)
|
363 |
+
|
364 |
+
return macros, objects, extra, pp_opts, build
|
365 |
+
|
366 |
+
def _get_cc_args(self, pp_opts, debug, before):
|
367 |
+
# works for unixccompiler, cygwinccompiler
|
368 |
+
cc_args = pp_opts + ['-c']
|
369 |
+
if debug:
|
370 |
+
cc_args[:0] = ['-g']
|
371 |
+
if before:
|
372 |
+
cc_args[:0] = before
|
373 |
+
return cc_args
|
374 |
+
|
375 |
+
def _fix_compile_args(self, output_dir, macros, include_dirs):
|
376 |
+
"""Typecheck and fix-up some of the arguments to the 'compile()'
|
377 |
+
method, and return fixed-up values. Specifically: if 'output_dir'
|
378 |
+
is None, replaces it with 'self.output_dir'; ensures that 'macros'
|
379 |
+
is a list, and augments it with 'self.macros'; ensures that
|
380 |
+
'include_dirs' is a list, and augments it with 'self.include_dirs'.
|
381 |
+
Guarantees that the returned values are of the correct type,
|
382 |
+
i.e. for 'output_dir' either string or None, and for 'macros' and
|
383 |
+
'include_dirs' either list or None.
|
384 |
+
"""
|
385 |
+
if output_dir is None:
|
386 |
+
output_dir = self.output_dir
|
387 |
+
elif not isinstance(output_dir, str):
|
388 |
+
raise TypeError("'output_dir' must be a string or None")
|
389 |
+
|
390 |
+
if macros is None:
|
391 |
+
macros = list(self.macros)
|
392 |
+
elif isinstance(macros, list):
|
393 |
+
macros = macros + (self.macros or [])
|
394 |
+
else:
|
395 |
+
raise TypeError("'macros' (if supplied) must be a list of tuples")
|
396 |
+
|
397 |
+
if include_dirs is None:
|
398 |
+
include_dirs = list(self.include_dirs)
|
399 |
+
elif isinstance(include_dirs, (list, tuple)):
|
400 |
+
include_dirs = list(include_dirs) + (self.include_dirs or [])
|
401 |
+
else:
|
402 |
+
raise TypeError("'include_dirs' (if supplied) must be a list of strings")
|
403 |
+
|
404 |
+
# add include dirs for class
|
405 |
+
include_dirs += self.__class__.include_dirs
|
406 |
+
|
407 |
+
return output_dir, macros, include_dirs
|
408 |
+
|
409 |
+
def _prep_compile(self, sources, output_dir, depends=None):
|
410 |
+
"""Decide which source files must be recompiled.
|
411 |
+
|
412 |
+
Determine the list of object files corresponding to 'sources',
|
413 |
+
and figure out which ones really need to be recompiled.
|
414 |
+
Return a list of all object files and a dictionary telling
|
415 |
+
which source files can be skipped.
|
416 |
+
"""
|
417 |
+
# Get the list of expected output (object) files
|
418 |
+
objects = self.object_filenames(sources, output_dir=output_dir)
|
419 |
+
assert len(objects) == len(sources)
|
420 |
+
|
421 |
+
# Return an empty dict for the "which source files can be skipped"
|
422 |
+
# return value to preserve API compatibility.
|
423 |
+
return objects, {}
|
424 |
+
|
425 |
+
def _fix_object_args(self, objects, output_dir):
|
426 |
+
"""Typecheck and fix up some arguments supplied to various methods.
|
427 |
+
Specifically: ensure that 'objects' is a list; if output_dir is
|
428 |
+
None, replace with self.output_dir. Return fixed versions of
|
429 |
+
'objects' and 'output_dir'.
|
430 |
+
"""
|
431 |
+
if not isinstance(objects, (list, tuple)):
|
432 |
+
raise TypeError("'objects' must be a list or tuple of strings")
|
433 |
+
objects = list(objects)
|
434 |
+
|
435 |
+
if output_dir is None:
|
436 |
+
output_dir = self.output_dir
|
437 |
+
elif not isinstance(output_dir, str):
|
438 |
+
raise TypeError("'output_dir' must be a string or None")
|
439 |
+
|
440 |
+
return (objects, output_dir)
|
441 |
+
|
442 |
+
def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs):
|
443 |
+
"""Typecheck and fix up some of the arguments supplied to the
|
444 |
+
'link_*' methods. Specifically: ensure that all arguments are
|
445 |
+
lists, and augment them with their permanent versions
|
446 |
+
(eg. 'self.libraries' augments 'libraries'). Return a tuple with
|
447 |
+
fixed versions of all arguments.
|
448 |
+
"""
|
449 |
+
if libraries is None:
|
450 |
+
libraries = list(self.libraries)
|
451 |
+
elif isinstance(libraries, (list, tuple)):
|
452 |
+
libraries = list(libraries) + (self.libraries or [])
|
453 |
+
else:
|
454 |
+
raise TypeError("'libraries' (if supplied) must be a list of strings")
|
455 |
+
|
456 |
+
if library_dirs is None:
|
457 |
+
library_dirs = list(self.library_dirs)
|
458 |
+
elif isinstance(library_dirs, (list, tuple)):
|
459 |
+
library_dirs = list(library_dirs) + (self.library_dirs or [])
|
460 |
+
else:
|
461 |
+
raise TypeError("'library_dirs' (if supplied) must be a list of strings")
|
462 |
+
|
463 |
+
# add library dirs for class
|
464 |
+
library_dirs += self.__class__.library_dirs
|
465 |
+
|
466 |
+
if runtime_library_dirs is None:
|
467 |
+
runtime_library_dirs = list(self.runtime_library_dirs)
|
468 |
+
elif isinstance(runtime_library_dirs, (list, tuple)):
|
469 |
+
runtime_library_dirs = list(runtime_library_dirs) + (
|
470 |
+
self.runtime_library_dirs or []
|
471 |
+
)
|
472 |
+
else:
|
473 |
+
raise TypeError(
|
474 |
+
"'runtime_library_dirs' (if supplied) must be a list of strings"
|
475 |
+
)
|
476 |
+
|
477 |
+
return (libraries, library_dirs, runtime_library_dirs)
|
478 |
+
|
479 |
+
def _need_link(self, objects, output_file):
|
480 |
+
"""Return true if we need to relink the files listed in 'objects'
|
481 |
+
to recreate 'output_file'.
|
482 |
+
"""
|
483 |
+
if self.force:
|
484 |
+
return True
|
485 |
+
else:
|
486 |
+
if self.dry_run:
|
487 |
+
newer = newer_group(objects, output_file, missing='newer')
|
488 |
+
else:
|
489 |
+
newer = newer_group(objects, output_file)
|
490 |
+
return newer
|
491 |
+
|
492 |
+
def detect_language(self, sources):
|
493 |
+
"""Detect the language of a given file, or list of files. Uses
|
494 |
+
language_map, and language_order to do the job.
|
495 |
+
"""
|
496 |
+
if not isinstance(sources, list):
|
497 |
+
sources = [sources]
|
498 |
+
lang = None
|
499 |
+
index = len(self.language_order)
|
500 |
+
for source in sources:
|
501 |
+
base, ext = os.path.splitext(source)
|
502 |
+
extlang = self.language_map.get(ext)
|
503 |
+
try:
|
504 |
+
extindex = self.language_order.index(extlang)
|
505 |
+
if extindex < index:
|
506 |
+
lang = extlang
|
507 |
+
index = extindex
|
508 |
+
except ValueError:
|
509 |
+
pass
|
510 |
+
return lang
|
511 |
+
|
512 |
+
# -- Worker methods ------------------------------------------------
|
513 |
+
# (must be implemented by subclasses)
|
514 |
+
|
515 |
+
def preprocess(
|
516 |
+
self,
|
517 |
+
source,
|
518 |
+
output_file=None,
|
519 |
+
macros=None,
|
520 |
+
include_dirs=None,
|
521 |
+
extra_preargs=None,
|
522 |
+
extra_postargs=None,
|
523 |
+
):
|
524 |
+
"""Preprocess a single C/C++ source file, named in 'source'.
|
525 |
+
Output will be written to file named 'output_file', or stdout if
|
526 |
+
'output_file' not supplied. 'macros' is a list of macro
|
527 |
+
definitions as for 'compile()', which will augment the macros set
|
528 |
+
with 'define_macro()' and 'undefine_macro()'. 'include_dirs' is a
|
529 |
+
list of directory names that will be added to the default list.
|
530 |
+
|
531 |
+
Raises PreprocessError on failure.
|
532 |
+
"""
|
533 |
+
pass
|
534 |
+
|
535 |
+
def compile(
|
536 |
+
self,
|
537 |
+
sources,
|
538 |
+
output_dir=None,
|
539 |
+
macros=None,
|
540 |
+
include_dirs=None,
|
541 |
+
debug=False,
|
542 |
+
extra_preargs=None,
|
543 |
+
extra_postargs=None,
|
544 |
+
depends=None,
|
545 |
+
):
|
546 |
+
"""Compile one or more source files.
|
547 |
+
|
548 |
+
'sources' must be a list of filenames, most likely C/C++
|
549 |
+
files, but in reality anything that can be handled by a
|
550 |
+
particular compiler and compiler class (eg. MSVCCompiler can
|
551 |
+
handle resource files in 'sources'). Return a list of object
|
552 |
+
filenames, one per source filename in 'sources'. Depending on
|
553 |
+
the implementation, not all source files will necessarily be
|
554 |
+
compiled, but all corresponding object filenames will be
|
555 |
+
returned.
|
556 |
+
|
557 |
+
If 'output_dir' is given, object files will be put under it, while
|
558 |
+
retaining their original path component. That is, "foo/bar.c"
|
559 |
+
normally compiles to "foo/bar.o" (for a Unix implementation); if
|
560 |
+
'output_dir' is "build", then it would compile to
|
561 |
+
"build/foo/bar.o".
|
562 |
+
|
563 |
+
'macros', if given, must be a list of macro definitions. A macro
|
564 |
+
definition is either a (name, value) 2-tuple or a (name,) 1-tuple.
|
565 |
+
The former defines a macro; if the value is None, the macro is
|
566 |
+
defined without an explicit value. The 1-tuple case undefines a
|
567 |
+
macro. Later definitions/redefinitions/ undefinitions take
|
568 |
+
precedence.
|
569 |
+
|
570 |
+
'include_dirs', if given, must be a list of strings, the
|
571 |
+
directories to add to the default include file search path for this
|
572 |
+
compilation only.
|
573 |
+
|
574 |
+
'debug' is a boolean; if true, the compiler will be instructed to
|
575 |
+
output debug symbols in (or alongside) the object file(s).
|
576 |
+
|
577 |
+
'extra_preargs' and 'extra_postargs' are implementation- dependent.
|
578 |
+
On platforms that have the notion of a command-line (e.g. Unix,
|
579 |
+
DOS/Windows), they are most likely lists of strings: extra
|
580 |
+
command-line arguments to prepend/append to the compiler command
|
581 |
+
line. On other platforms, consult the implementation class
|
582 |
+
documentation. In any event, they are intended as an escape hatch
|
583 |
+
for those occasions when the abstract compiler framework doesn't
|
584 |
+
cut the mustard.
|
585 |
+
|
586 |
+
'depends', if given, is a list of filenames that all targets
|
587 |
+
depend on. If a source file is older than any file in
|
588 |
+
depends, then the source file will be recompiled. This
|
589 |
+
supports dependency tracking, but only at a coarse
|
590 |
+
granularity.
|
591 |
+
|
592 |
+
Raises CompileError on failure.
|
593 |
+
"""
|
594 |
+
# A concrete compiler class can either override this method
|
595 |
+
# entirely or implement _compile().
|
596 |
+
macros, objects, extra_postargs, pp_opts, build = self._setup_compile(
|
597 |
+
output_dir, macros, include_dirs, sources, depends, extra_postargs
|
598 |
+
)
|
599 |
+
cc_args = self._get_cc_args(pp_opts, debug, extra_preargs)
|
600 |
+
|
601 |
+
for obj in objects:
|
602 |
+
try:
|
603 |
+
src, ext = build[obj]
|
604 |
+
except KeyError:
|
605 |
+
continue
|
606 |
+
self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts)
|
607 |
+
|
608 |
+
# Return *all* object filenames, not just the ones we just built.
|
609 |
+
return objects
|
610 |
+
|
611 |
+
def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
|
612 |
+
"""Compile 'src' to product 'obj'."""
|
613 |
+
# A concrete compiler class that does not override compile()
|
614 |
+
# should implement _compile().
|
615 |
+
pass
|
616 |
+
|
617 |
+
def create_static_lib(
|
618 |
+
self, objects, output_libname, output_dir=None, debug=False, target_lang=None
|
619 |
+
):
|
620 |
+
"""Link a bunch of stuff together to create a static library file.
|
621 |
+
The "bunch of stuff" consists of the list of object files supplied
|
622 |
+
as 'objects', the extra object files supplied to
|
623 |
+
'add_link_object()' and/or 'set_link_objects()', the libraries
|
624 |
+
supplied to 'add_library()' and/or 'set_libraries()', and the
|
625 |
+
libraries supplied as 'libraries' (if any).
|
626 |
+
|
627 |
+
'output_libname' should be a library name, not a filename; the
|
628 |
+
filename will be inferred from the library name. 'output_dir' is
|
629 |
+
the directory where the library file will be put.
|
630 |
+
|
631 |
+
'debug' is a boolean; if true, debugging information will be
|
632 |
+
included in the library (note that on most platforms, it is the
|
633 |
+
compile step where this matters: the 'debug' flag is included here
|
634 |
+
just for consistency).
|
635 |
+
|
636 |
+
'target_lang' is the target language for which the given objects
|
637 |
+
are being compiled. This allows specific linkage time treatment of
|
638 |
+
certain languages.
|
639 |
+
|
640 |
+
Raises LibError on failure.
|
641 |
+
"""
|
642 |
+
pass
|
643 |
+
|
644 |
+
# values for target_desc parameter in link()
|
645 |
+
SHARED_OBJECT = "shared_object"
|
646 |
+
SHARED_LIBRARY = "shared_library"
|
647 |
+
EXECUTABLE = "executable"
|
648 |
+
|
649 |
+
def link(
|
650 |
+
self,
|
651 |
+
target_desc,
|
652 |
+
objects,
|
653 |
+
output_filename,
|
654 |
+
output_dir=None,
|
655 |
+
libraries=None,
|
656 |
+
library_dirs=None,
|
657 |
+
runtime_library_dirs=None,
|
658 |
+
export_symbols=None,
|
659 |
+
debug=False,
|
660 |
+
extra_preargs=None,
|
661 |
+
extra_postargs=None,
|
662 |
+
build_temp=None,
|
663 |
+
target_lang=None,
|
664 |
+
):
|
665 |
+
"""Link a bunch of stuff together to create an executable or
|
666 |
+
shared library file.
|
667 |
+
|
668 |
+
The "bunch of stuff" consists of the list of object files supplied
|
669 |
+
as 'objects'. 'output_filename' should be a filename. If
|
670 |
+
'output_dir' is supplied, 'output_filename' is relative to it
|
671 |
+
(i.e. 'output_filename' can provide directory components if
|
672 |
+
needed).
|
673 |
+
|
674 |
+
'libraries' is a list of libraries to link against. These are
|
675 |
+
library names, not filenames, since they're translated into
|
676 |
+
filenames in a platform-specific way (eg. "foo" becomes "libfoo.a"
|
677 |
+
on Unix and "foo.lib" on DOS/Windows). However, they can include a
|
678 |
+
directory component, which means the linker will look in that
|
679 |
+
specific directory rather than searching all the normal locations.
|
680 |
+
|
681 |
+
'library_dirs', if supplied, should be a list of directories to
|
682 |
+
search for libraries that were specified as bare library names
|
683 |
+
(ie. no directory component). These are on top of the system
|
684 |
+
default and those supplied to 'add_library_dir()' and/or
|
685 |
+
'set_library_dirs()'. 'runtime_library_dirs' is a list of
|
686 |
+
directories that will be embedded into the shared library and used
|
687 |
+
to search for other shared libraries that *it* depends on at
|
688 |
+
run-time. (This may only be relevant on Unix.)
|
689 |
+
|
690 |
+
'export_symbols' is a list of symbols that the shared library will
|
691 |
+
export. (This appears to be relevant only on Windows.)
|
692 |
+
|
693 |
+
'debug' is as for 'compile()' and 'create_static_lib()', with the
|
694 |
+
slight distinction that it actually matters on most platforms (as
|
695 |
+
opposed to 'create_static_lib()', which includes a 'debug' flag
|
696 |
+
mostly for form's sake).
|
697 |
+
|
698 |
+
'extra_preargs' and 'extra_postargs' are as for 'compile()' (except
|
699 |
+
of course that they supply command-line arguments for the
|
700 |
+
particular linker being used).
|
701 |
+
|
702 |
+
'target_lang' is the target language for which the given objects
|
703 |
+
are being compiled. This allows specific linkage time treatment of
|
704 |
+
certain languages.
|
705 |
+
|
706 |
+
Raises LinkError on failure.
|
707 |
+
"""
|
708 |
+
raise NotImplementedError
|
709 |
+
|
710 |
+
# Old 'link_*()' methods, rewritten to use the new 'link()' method.
|
711 |
+
|
712 |
+
def link_shared_lib(
|
713 |
+
self,
|
714 |
+
objects,
|
715 |
+
output_libname,
|
716 |
+
output_dir=None,
|
717 |
+
libraries=None,
|
718 |
+
library_dirs=None,
|
719 |
+
runtime_library_dirs=None,
|
720 |
+
export_symbols=None,
|
721 |
+
debug=False,
|
722 |
+
extra_preargs=None,
|
723 |
+
extra_postargs=None,
|
724 |
+
build_temp=None,
|
725 |
+
target_lang=None,
|
726 |
+
):
|
727 |
+
self.link(
|
728 |
+
CCompiler.SHARED_LIBRARY,
|
729 |
+
objects,
|
730 |
+
self.library_filename(output_libname, lib_type='shared'),
|
731 |
+
output_dir,
|
732 |
+
libraries,
|
733 |
+
library_dirs,
|
734 |
+
runtime_library_dirs,
|
735 |
+
export_symbols,
|
736 |
+
debug,
|
737 |
+
extra_preargs,
|
738 |
+
extra_postargs,
|
739 |
+
build_temp,
|
740 |
+
target_lang,
|
741 |
+
)
|
742 |
+
|
743 |
+
def link_shared_object(
|
744 |
+
self,
|
745 |
+
objects,
|
746 |
+
output_filename,
|
747 |
+
output_dir=None,
|
748 |
+
libraries=None,
|
749 |
+
library_dirs=None,
|
750 |
+
runtime_library_dirs=None,
|
751 |
+
export_symbols=None,
|
752 |
+
debug=False,
|
753 |
+
extra_preargs=None,
|
754 |
+
extra_postargs=None,
|
755 |
+
build_temp=None,
|
756 |
+
target_lang=None,
|
757 |
+
):
|
758 |
+
self.link(
|
759 |
+
CCompiler.SHARED_OBJECT,
|
760 |
+
objects,
|
761 |
+
output_filename,
|
762 |
+
output_dir,
|
763 |
+
libraries,
|
764 |
+
library_dirs,
|
765 |
+
runtime_library_dirs,
|
766 |
+
export_symbols,
|
767 |
+
debug,
|
768 |
+
extra_preargs,
|
769 |
+
extra_postargs,
|
770 |
+
build_temp,
|
771 |
+
target_lang,
|
772 |
+
)
|
773 |
+
|
774 |
+
def link_executable(
|
775 |
+
self,
|
776 |
+
objects,
|
777 |
+
output_progname,
|
778 |
+
output_dir=None,
|
779 |
+
libraries=None,
|
780 |
+
library_dirs=None,
|
781 |
+
runtime_library_dirs=None,
|
782 |
+
debug=False,
|
783 |
+
extra_preargs=None,
|
784 |
+
extra_postargs=None,
|
785 |
+
target_lang=None,
|
786 |
+
):
|
787 |
+
self.link(
|
788 |
+
CCompiler.EXECUTABLE,
|
789 |
+
objects,
|
790 |
+
self.executable_filename(output_progname),
|
791 |
+
output_dir,
|
792 |
+
libraries,
|
793 |
+
library_dirs,
|
794 |
+
runtime_library_dirs,
|
795 |
+
None,
|
796 |
+
debug,
|
797 |
+
extra_preargs,
|
798 |
+
extra_postargs,
|
799 |
+
None,
|
800 |
+
target_lang,
|
801 |
+
)
|
802 |
+
|
803 |
+
# -- Miscellaneous methods -----------------------------------------
|
804 |
+
# These are all used by the 'gen_lib_options() function; there is
|
805 |
+
# no appropriate default implementation so subclasses should
|
806 |
+
# implement all of these.
|
807 |
+
|
808 |
+
def library_dir_option(self, dir):
|
809 |
+
"""Return the compiler option to add 'dir' to the list of
|
810 |
+
directories searched for libraries.
|
811 |
+
"""
|
812 |
+
raise NotImplementedError
|
813 |
+
|
814 |
+
def runtime_library_dir_option(self, dir):
|
815 |
+
"""Return the compiler option to add 'dir' to the list of
|
816 |
+
directories searched for runtime libraries.
|
817 |
+
"""
|
818 |
+
raise NotImplementedError
|
819 |
+
|
820 |
+
def library_option(self, lib):
|
821 |
+
"""Return the compiler option to add 'lib' to the list of libraries
|
822 |
+
linked into the shared library or executable.
|
823 |
+
"""
|
824 |
+
raise NotImplementedError
|
825 |
+
|
826 |
+
def has_function( # noqa: C901
|
827 |
+
self,
|
828 |
+
funcname,
|
829 |
+
includes=None,
|
830 |
+
include_dirs=None,
|
831 |
+
libraries=None,
|
832 |
+
library_dirs=None,
|
833 |
+
):
|
834 |
+
"""Return a boolean indicating whether funcname is provided as
|
835 |
+
a symbol on the current platform. The optional arguments can
|
836 |
+
be used to augment the compilation environment.
|
837 |
+
|
838 |
+
The libraries argument is a list of flags to be passed to the
|
839 |
+
linker to make additional symbol definitions available for
|
840 |
+
linking.
|
841 |
+
|
842 |
+
The includes and include_dirs arguments are deprecated.
|
843 |
+
Usually, supplying include files with function declarations
|
844 |
+
will cause function detection to fail even in cases where the
|
845 |
+
symbol is available for linking.
|
846 |
+
|
847 |
+
"""
|
848 |
+
# this can't be included at module scope because it tries to
|
849 |
+
# import math which might not be available at that point - maybe
|
850 |
+
# the necessary logic should just be inlined?
|
851 |
+
import tempfile
|
852 |
+
|
853 |
+
if includes is None:
|
854 |
+
includes = []
|
855 |
+
else:
|
856 |
+
warnings.warn("includes is deprecated", DeprecationWarning)
|
857 |
+
if include_dirs is None:
|
858 |
+
include_dirs = []
|
859 |
+
else:
|
860 |
+
warnings.warn("include_dirs is deprecated", DeprecationWarning)
|
861 |
+
if libraries is None:
|
862 |
+
libraries = []
|
863 |
+
if library_dirs is None:
|
864 |
+
library_dirs = []
|
865 |
+
fd, fname = tempfile.mkstemp(".c", funcname, text=True)
|
866 |
+
with os.fdopen(fd, "w", encoding='utf-8') as f:
|
867 |
+
for incl in includes:
|
868 |
+
f.write(f"""#include "{incl}"\n""")
|
869 |
+
if not includes:
|
870 |
+
# Use "char func(void);" as the prototype to follow
|
871 |
+
# what autoconf does. This prototype does not match
|
872 |
+
# any well-known function the compiler might recognize
|
873 |
+
# as a builtin, so this ends up as a true link test.
|
874 |
+
# Without a fake prototype, the test would need to
|
875 |
+
# know the exact argument types, and the has_function
|
876 |
+
# interface does not provide that level of information.
|
877 |
+
f.write(
|
878 |
+
f"""\
|
879 |
+
#ifdef __cplusplus
|
880 |
+
extern "C"
|
881 |
+
#endif
|
882 |
+
char {funcname}(void);
|
883 |
+
"""
|
884 |
+
)
|
885 |
+
f.write(
|
886 |
+
f"""\
|
887 |
+
int main (int argc, char **argv) {{
|
888 |
+
{funcname}();
|
889 |
+
return 0;
|
890 |
+
}}
|
891 |
+
"""
|
892 |
+
)
|
893 |
+
|
894 |
+
try:
|
895 |
+
objects = self.compile([fname], include_dirs=include_dirs)
|
896 |
+
except CompileError:
|
897 |
+
return False
|
898 |
+
finally:
|
899 |
+
os.remove(fname)
|
900 |
+
|
901 |
+
try:
|
902 |
+
self.link_executable(
|
903 |
+
objects, "a.out", libraries=libraries, library_dirs=library_dirs
|
904 |
+
)
|
905 |
+
except (LinkError, TypeError):
|
906 |
+
return False
|
907 |
+
else:
|
908 |
+
os.remove(
|
909 |
+
self.executable_filename("a.out", output_dir=self.output_dir or '')
|
910 |
+
)
|
911 |
+
finally:
|
912 |
+
for fn in objects:
|
913 |
+
os.remove(fn)
|
914 |
+
return True
|
915 |
+
|
916 |
+
def find_library_file(self, dirs, lib, debug=False):
|
917 |
+
"""Search the specified list of directories for a static or shared
|
918 |
+
library file 'lib' and return the full path to that file. If
|
919 |
+
'debug' true, look for a debugging version (if that makes sense on
|
920 |
+
the current platform). Return None if 'lib' wasn't found in any of
|
921 |
+
the specified directories.
|
922 |
+
"""
|
923 |
+
raise NotImplementedError
|
924 |
+
|
925 |
+
# -- Filename generation methods -----------------------------------
|
926 |
+
|
927 |
+
# The default implementation of the filename generating methods are
|
928 |
+
# prejudiced towards the Unix/DOS/Windows view of the world:
|
929 |
+
# * object files are named by replacing the source file extension
|
930 |
+
# (eg. .c/.cpp -> .o/.obj)
|
931 |
+
# * library files (shared or static) are named by plugging the
|
932 |
+
# library name and extension into a format string, eg.
|
933 |
+
# "lib%s.%s" % (lib_name, ".a") for Unix static libraries
|
934 |
+
# * executables are named by appending an extension (possibly
|
935 |
+
# empty) to the program name: eg. progname + ".exe" for
|
936 |
+
# Windows
|
937 |
+
#
|
938 |
+
# To reduce redundant code, these methods expect to find
|
939 |
+
# several attributes in the current object (presumably defined
|
940 |
+
# as class attributes):
|
941 |
+
# * src_extensions -
|
942 |
+
# list of C/C++ source file extensions, eg. ['.c', '.cpp']
|
943 |
+
# * obj_extension -
|
944 |
+
# object file extension, eg. '.o' or '.obj'
|
945 |
+
# * static_lib_extension -
|
946 |
+
# extension for static library files, eg. '.a' or '.lib'
|
947 |
+
# * shared_lib_extension -
|
948 |
+
# extension for shared library/object files, eg. '.so', '.dll'
|
949 |
+
# * static_lib_format -
|
950 |
+
# format string for generating static library filenames,
|
951 |
+
# eg. 'lib%s.%s' or '%s.%s'
|
952 |
+
# * shared_lib_format
|
953 |
+
# format string for generating shared library filenames
|
954 |
+
# (probably same as static_lib_format, since the extension
|
955 |
+
# is one of the intended parameters to the format string)
|
956 |
+
# * exe_extension -
|
957 |
+
# extension for executable files, eg. '' or '.exe'
|
958 |
+
|
959 |
+
def object_filenames(self, source_filenames, strip_dir=False, output_dir=''):
|
960 |
+
if output_dir is None:
|
961 |
+
output_dir = ''
|
962 |
+
return list(
|
963 |
+
self._make_out_path(output_dir, strip_dir, src_name)
|
964 |
+
for src_name in source_filenames
|
965 |
+
)
|
966 |
+
|
967 |
+
@property
|
968 |
+
def out_extensions(self):
|
969 |
+
return dict.fromkeys(self.src_extensions, self.obj_extension)
|
970 |
+
|
971 |
+
def _make_out_path(self, output_dir, strip_dir, src_name):
|
972 |
+
base, ext = os.path.splitext(src_name)
|
973 |
+
base = self._make_relative(base)
|
974 |
+
try:
|
975 |
+
new_ext = self.out_extensions[ext]
|
976 |
+
except LookupError:
|
977 |
+
raise UnknownFileError(f"unknown file type '{ext}' (from '{src_name}')")
|
978 |
+
if strip_dir:
|
979 |
+
base = os.path.basename(base)
|
980 |
+
return os.path.join(output_dir, base + new_ext)
|
981 |
+
|
982 |
+
@staticmethod
|
983 |
+
def _make_relative(base):
|
984 |
+
"""
|
985 |
+
In order to ensure that a filename always honors the
|
986 |
+
indicated output_dir, make sure it's relative.
|
987 |
+
Ref python/cpython#37775.
|
988 |
+
"""
|
989 |
+
# Chop off the drive
|
990 |
+
no_drive = os.path.splitdrive(base)[1]
|
991 |
+
# If abs, chop off leading /
|
992 |
+
return no_drive[os.path.isabs(no_drive) :]
|
993 |
+
|
994 |
+
def shared_object_filename(self, basename, strip_dir=False, output_dir=''):
|
995 |
+
assert output_dir is not None
|
996 |
+
if strip_dir:
|
997 |
+
basename = os.path.basename(basename)
|
998 |
+
return os.path.join(output_dir, basename + self.shared_lib_extension)
|
999 |
+
|
1000 |
+
def executable_filename(self, basename, strip_dir=False, output_dir=''):
|
1001 |
+
assert output_dir is not None
|
1002 |
+
if strip_dir:
|
1003 |
+
basename = os.path.basename(basename)
|
1004 |
+
return os.path.join(output_dir, basename + (self.exe_extension or ''))
|
1005 |
+
|
1006 |
+
def library_filename(
|
1007 |
+
self,
|
1008 |
+
libname,
|
1009 |
+
lib_type='static',
|
1010 |
+
strip_dir=False,
|
1011 |
+
output_dir='', # or 'shared'
|
1012 |
+
):
|
1013 |
+
assert output_dir is not None
|
1014 |
+
expected = '"static", "shared", "dylib", "xcode_stub"'
|
1015 |
+
if lib_type not in eval(expected):
|
1016 |
+
raise ValueError(f"'lib_type' must be {expected}")
|
1017 |
+
fmt = getattr(self, lib_type + "_lib_format")
|
1018 |
+
ext = getattr(self, lib_type + "_lib_extension")
|
1019 |
+
|
1020 |
+
dir, base = os.path.split(libname)
|
1021 |
+
filename = fmt % (base, ext)
|
1022 |
+
if strip_dir:
|
1023 |
+
dir = ''
|
1024 |
+
|
1025 |
+
return os.path.join(output_dir, dir, filename)
|
1026 |
+
|
1027 |
+
# -- Utility methods -----------------------------------------------
|
1028 |
+
|
1029 |
+
def announce(self, msg, level=1):
|
1030 |
+
log.debug(msg)
|
1031 |
+
|
1032 |
+
def debug_print(self, msg):
|
1033 |
+
from distutils.debug import DEBUG
|
1034 |
+
|
1035 |
+
if DEBUG:
|
1036 |
+
print(msg)
|
1037 |
+
|
1038 |
+
def warn(self, msg):
|
1039 |
+
sys.stderr.write(f"warning: {msg}\n")
|
1040 |
+
|
1041 |
+
def execute(self, func, args, msg=None, level=1):
|
1042 |
+
execute(func, args, msg, self.dry_run)
|
1043 |
+
|
1044 |
+
def spawn(self, cmd, **kwargs):
|
1045 |
+
spawn(cmd, dry_run=self.dry_run, **kwargs)
|
1046 |
+
|
1047 |
+
def move_file(self, src, dst):
|
1048 |
+
return move_file(src, dst, dry_run=self.dry_run)
|
1049 |
+
|
1050 |
+
def mkpath(self, name, mode=0o777):
|
1051 |
+
mkpath(name, mode, dry_run=self.dry_run)
|
1052 |
+
|
1053 |
+
|
1054 |
+
# Map a sys.platform/os.name ('posix', 'nt') to the default compiler
|
1055 |
+
# type for that platform. Keys are interpreted as re match
|
1056 |
+
# patterns. Order is important; platform mappings are preferred over
|
1057 |
+
# OS names.
|
1058 |
+
_default_compilers = (
|
1059 |
+
# Platform string mappings
|
1060 |
+
# on a cygwin built python we can use gcc like an ordinary UNIXish
|
1061 |
+
# compiler
|
1062 |
+
('cygwin.*', 'unix'),
|
1063 |
+
('zos', 'zos'),
|
1064 |
+
# OS name mappings
|
1065 |
+
('posix', 'unix'),
|
1066 |
+
('nt', 'msvc'),
|
1067 |
+
)
|
1068 |
+
|
1069 |
+
|
1070 |
+
def get_default_compiler(osname=None, platform=None):
|
1071 |
+
"""Determine the default compiler to use for the given platform.
|
1072 |
+
|
1073 |
+
osname should be one of the standard Python OS names (i.e. the
|
1074 |
+
ones returned by os.name) and platform the common value
|
1075 |
+
returned by sys.platform for the platform in question.
|
1076 |
+
|
1077 |
+
The default values are os.name and sys.platform in case the
|
1078 |
+
parameters are not given.
|
1079 |
+
"""
|
1080 |
+
if osname is None:
|
1081 |
+
osname = os.name
|
1082 |
+
if platform is None:
|
1083 |
+
platform = sys.platform
|
1084 |
+
# Mingw is a special case where sys.platform is 'win32' but we
|
1085 |
+
# want to use the 'mingw32' compiler, so check it first
|
1086 |
+
if is_mingw():
|
1087 |
+
return 'mingw32'
|
1088 |
+
for pattern, compiler in _default_compilers:
|
1089 |
+
if (
|
1090 |
+
re.match(pattern, platform) is not None
|
1091 |
+
or re.match(pattern, osname) is not None
|
1092 |
+
):
|
1093 |
+
return compiler
|
1094 |
+
# Default to Unix compiler
|
1095 |
+
return 'unix'
|
1096 |
+
|
1097 |
+
|
1098 |
+
# Map compiler types to (module_name, class_name) pairs -- ie. where to
|
1099 |
+
# find the code that implements an interface to this compiler. (The module
|
1100 |
+
# is assumed to be in the 'distutils' package.)
|
1101 |
+
compiler_class = {
|
1102 |
+
'unix': ('unixccompiler', 'UnixCCompiler', "standard UNIX-style compiler"),
|
1103 |
+
'msvc': ('_msvccompiler', 'MSVCCompiler', "Microsoft Visual C++"),
|
1104 |
+
'cygwin': (
|
1105 |
+
'cygwinccompiler',
|
1106 |
+
'CygwinCCompiler',
|
1107 |
+
"Cygwin port of GNU C Compiler for Win32",
|
1108 |
+
),
|
1109 |
+
'mingw32': (
|
1110 |
+
'cygwinccompiler',
|
1111 |
+
'Mingw32CCompiler',
|
1112 |
+
"Mingw32 port of GNU C Compiler for Win32",
|
1113 |
+
),
|
1114 |
+
'bcpp': ('bcppcompiler', 'BCPPCompiler', "Borland C++ Compiler"),
|
1115 |
+
'zos': ('zosccompiler', 'zOSCCompiler', 'IBM XL C/C++ Compilers'),
|
1116 |
+
}
|
1117 |
+
|
1118 |
+
|
1119 |
+
def show_compilers():
|
1120 |
+
"""Print list of available compilers (used by the "--help-compiler"
|
1121 |
+
options to "build", "build_ext", "build_clib").
|
1122 |
+
"""
|
1123 |
+
# XXX this "knows" that the compiler option it's describing is
|
1124 |
+
# "--compiler", which just happens to be the case for the three
|
1125 |
+
# commands that use it.
|
1126 |
+
from distutils.fancy_getopt import FancyGetopt
|
1127 |
+
|
1128 |
+
compilers = sorted(
|
1129 |
+
("compiler=" + compiler, None, compiler_class[compiler][2])
|
1130 |
+
for compiler in compiler_class.keys()
|
1131 |
+
)
|
1132 |
+
pretty_printer = FancyGetopt(compilers)
|
1133 |
+
pretty_printer.print_help("List of available compilers:")
|
1134 |
+
|
1135 |
+
|
1136 |
+
def new_compiler(plat=None, compiler=None, verbose=False, dry_run=False, force=False):
|
1137 |
+
"""Generate an instance of some CCompiler subclass for the supplied
|
1138 |
+
platform/compiler combination. 'plat' defaults to 'os.name'
|
1139 |
+
(eg. 'posix', 'nt'), and 'compiler' defaults to the default compiler
|
1140 |
+
for that platform. Currently only 'posix' and 'nt' are supported, and
|
1141 |
+
the default compilers are "traditional Unix interface" (UnixCCompiler
|
1142 |
+
class) and Visual C++ (MSVCCompiler class). Note that it's perfectly
|
1143 |
+
possible to ask for a Unix compiler object under Windows, and a
|
1144 |
+
Microsoft compiler object under Unix -- if you supply a value for
|
1145 |
+
'compiler', 'plat' is ignored.
|
1146 |
+
"""
|
1147 |
+
if plat is None:
|
1148 |
+
plat = os.name
|
1149 |
+
|
1150 |
+
try:
|
1151 |
+
if compiler is None:
|
1152 |
+
compiler = get_default_compiler(plat)
|
1153 |
+
|
1154 |
+
(module_name, class_name, long_description) = compiler_class[compiler]
|
1155 |
+
except KeyError:
|
1156 |
+
msg = f"don't know how to compile C/C++ code on platform '{plat}'"
|
1157 |
+
if compiler is not None:
|
1158 |
+
msg = msg + f" with '{compiler}' compiler"
|
1159 |
+
raise DistutilsPlatformError(msg)
|
1160 |
+
|
1161 |
+
try:
|
1162 |
+
module_name = "distutils." + module_name
|
1163 |
+
__import__(module_name)
|
1164 |
+
module = sys.modules[module_name]
|
1165 |
+
klass = vars(module)[class_name]
|
1166 |
+
except ImportError:
|
1167 |
+
raise DistutilsModuleError(
|
1168 |
+
f"can't compile C/C++ code: unable to load module '{module_name}'"
|
1169 |
+
)
|
1170 |
+
except KeyError:
|
1171 |
+
raise DistutilsModuleError(
|
1172 |
+
f"can't compile C/C++ code: unable to find class '{class_name}' "
|
1173 |
+
f"in module '{module_name}'"
|
1174 |
+
)
|
1175 |
+
|
1176 |
+
# XXX The None is necessary to preserve backwards compatibility
|
1177 |
+
# with classes that expect verbose to be the first positional
|
1178 |
+
# argument.
|
1179 |
+
return klass(None, dry_run, force)
|
1180 |
+
|
1181 |
+
|
1182 |
+
def gen_preprocess_options(macros, include_dirs):
|
1183 |
+
"""Generate C pre-processor options (-D, -U, -I) as used by at least
|
1184 |
+
two types of compilers: the typical Unix compiler and Visual C++.
|
1185 |
+
'macros' is the usual thing, a list of 1- or 2-tuples, where (name,)
|
1186 |
+
means undefine (-U) macro 'name', and (name,value) means define (-D)
|
1187 |
+
macro 'name' to 'value'. 'include_dirs' is just a list of directory
|
1188 |
+
names to be added to the header file search path (-I). Returns a list
|
1189 |
+
of command-line options suitable for either Unix compilers or Visual
|
1190 |
+
C++.
|
1191 |
+
"""
|
1192 |
+
# XXX it would be nice (mainly aesthetic, and so we don't generate
|
1193 |
+
# stupid-looking command lines) to go over 'macros' and eliminate
|
1194 |
+
# redundant definitions/undefinitions (ie. ensure that only the
|
1195 |
+
# latest mention of a particular macro winds up on the command
|
1196 |
+
# line). I don't think it's essential, though, since most (all?)
|
1197 |
+
# Unix C compilers only pay attention to the latest -D or -U
|
1198 |
+
# mention of a macro on their command line. Similar situation for
|
1199 |
+
# 'include_dirs'. I'm punting on both for now. Anyways, weeding out
|
1200 |
+
# redundancies like this should probably be the province of
|
1201 |
+
# CCompiler, since the data structures used are inherited from it
|
1202 |
+
# and therefore common to all CCompiler classes.
|
1203 |
+
pp_opts = []
|
1204 |
+
for macro in macros:
|
1205 |
+
if not (isinstance(macro, tuple) and 1 <= len(macro) <= 2):
|
1206 |
+
raise TypeError(
|
1207 |
+
f"bad macro definition '{macro}': "
|
1208 |
+
"each element of 'macros' list must be a 1- or 2-tuple"
|
1209 |
+
)
|
1210 |
+
|
1211 |
+
if len(macro) == 1: # undefine this macro
|
1212 |
+
pp_opts.append(f"-U{macro[0]}")
|
1213 |
+
elif len(macro) == 2:
|
1214 |
+
if macro[1] is None: # define with no explicit value
|
1215 |
+
pp_opts.append(f"-D{macro[0]}")
|
1216 |
+
else:
|
1217 |
+
# XXX *don't* need to be clever about quoting the
|
1218 |
+
# macro value here, because we're going to avoid the
|
1219 |
+
# shell at all costs when we spawn the command!
|
1220 |
+
pp_opts.append("-D{}={}".format(*macro))
|
1221 |
+
|
1222 |
+
pp_opts.extend(f"-I{dir}" for dir in include_dirs)
|
1223 |
+
return pp_opts
|
1224 |
+
|
1225 |
+
|
1226 |
+
def gen_lib_options(compiler, library_dirs, runtime_library_dirs, libraries):
|
1227 |
+
"""Generate linker options for searching library directories and
|
1228 |
+
linking with specific libraries. 'libraries' and 'library_dirs' are,
|
1229 |
+
respectively, lists of library names (not filenames!) and search
|
1230 |
+
directories. Returns a list of command-line options suitable for use
|
1231 |
+
with some compiler (depending on the two format strings passed in).
|
1232 |
+
"""
|
1233 |
+
lib_opts = [compiler.library_dir_option(dir) for dir in library_dirs]
|
1234 |
+
|
1235 |
+
for dir in runtime_library_dirs:
|
1236 |
+
lib_opts.extend(always_iterable(compiler.runtime_library_dir_option(dir)))
|
1237 |
+
|
1238 |
+
# XXX it's important that we *not* remove redundant library mentions!
|
1239 |
+
# sometimes you really do have to say "-lfoo -lbar -lfoo" in order to
|
1240 |
+
# resolve all symbols. I just hope we never have to say "-lfoo obj.o
|
1241 |
+
# -lbar" to get things to work -- that's certainly a possibility, but a
|
1242 |
+
# pretty nasty way to arrange your C code.
|
1243 |
+
|
1244 |
+
for lib in libraries:
|
1245 |
+
(lib_dir, lib_name) = os.path.split(lib)
|
1246 |
+
if lib_dir:
|
1247 |
+
lib_file = compiler.find_library_file([lib_dir], lib_name)
|
1248 |
+
if lib_file:
|
1249 |
+
lib_opts.append(lib_file)
|
1250 |
+
else:
|
1251 |
+
compiler.warn(
|
1252 |
+
f"no library file corresponding to '{lib}' found (skipping)"
|
1253 |
+
)
|
1254 |
+
else:
|
1255 |
+
lib_opts.append(compiler.library_option(lib))
|
1256 |
+
return lib_opts
|
.venv/Lib/site-packages/setuptools/_distutils/cmd.py
ADDED
@@ -0,0 +1,439 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""distutils.cmd
|
2 |
+
|
3 |
+
Provides the Command class, the base class for the command classes
|
4 |
+
in the distutils.command package.
|
5 |
+
"""
|
6 |
+
|
7 |
+
import logging
|
8 |
+
import os
|
9 |
+
import re
|
10 |
+
import sys
|
11 |
+
|
12 |
+
from . import _modified, archive_util, dir_util, file_util, util
|
13 |
+
from ._log import log
|
14 |
+
from .errors import DistutilsOptionError
|
15 |
+
|
16 |
+
|
17 |
+
class Command:
|
18 |
+
"""Abstract base class for defining command classes, the "worker bees"
|
19 |
+
of the Distutils. A useful analogy for command classes is to think of
|
20 |
+
them as subroutines with local variables called "options". The options
|
21 |
+
are "declared" in 'initialize_options()' and "defined" (given their
|
22 |
+
final values, aka "finalized") in 'finalize_options()', both of which
|
23 |
+
must be defined by every command class. The distinction between the
|
24 |
+
two is necessary because option values might come from the outside
|
25 |
+
world (command line, config file, ...), and any options dependent on
|
26 |
+
other options must be computed *after* these outside influences have
|
27 |
+
been processed -- hence 'finalize_options()'. The "body" of the
|
28 |
+
subroutine, where it does all its work based on the values of its
|
29 |
+
options, is the 'run()' method, which must also be implemented by every
|
30 |
+
command class.
|
31 |
+
"""
|
32 |
+
|
33 |
+
# 'sub_commands' formalizes the notion of a "family" of commands,
|
34 |
+
# eg. "install" as the parent with sub-commands "install_lib",
|
35 |
+
# "install_headers", etc. The parent of a family of commands
|
36 |
+
# defines 'sub_commands' as a class attribute; it's a list of
|
37 |
+
# (command_name : string, predicate : unbound_method | string | None)
|
38 |
+
# tuples, where 'predicate' is a method of the parent command that
|
39 |
+
# determines whether the corresponding command is applicable in the
|
40 |
+
# current situation. (Eg. we "install_headers" is only applicable if
|
41 |
+
# we have any C header files to install.) If 'predicate' is None,
|
42 |
+
# that command is always applicable.
|
43 |
+
#
|
44 |
+
# 'sub_commands' is usually defined at the *end* of a class, because
|
45 |
+
# predicates can be unbound methods, so they must already have been
|
46 |
+
# defined. The canonical example is the "install" command.
|
47 |
+
sub_commands = []
|
48 |
+
|
49 |
+
# -- Creation/initialization methods -------------------------------
|
50 |
+
|
51 |
+
def __init__(self, dist):
|
52 |
+
"""Create and initialize a new Command object. Most importantly,
|
53 |
+
invokes the 'initialize_options()' method, which is the real
|
54 |
+
initializer and depends on the actual command being
|
55 |
+
instantiated.
|
56 |
+
"""
|
57 |
+
# late import because of mutual dependence between these classes
|
58 |
+
from distutils.dist import Distribution
|
59 |
+
|
60 |
+
if not isinstance(dist, Distribution):
|
61 |
+
raise TypeError("dist must be a Distribution instance")
|
62 |
+
if self.__class__ is Command:
|
63 |
+
raise RuntimeError("Command is an abstract class")
|
64 |
+
|
65 |
+
self.distribution = dist
|
66 |
+
self.initialize_options()
|
67 |
+
|
68 |
+
# Per-command versions of the global flags, so that the user can
|
69 |
+
# customize Distutils' behaviour command-by-command and let some
|
70 |
+
# commands fall back on the Distribution's behaviour. None means
|
71 |
+
# "not defined, check self.distribution's copy", while 0 or 1 mean
|
72 |
+
# false and true (duh). Note that this means figuring out the real
|
73 |
+
# value of each flag is a touch complicated -- hence "self._dry_run"
|
74 |
+
# will be handled by __getattr__, below.
|
75 |
+
# XXX This needs to be fixed.
|
76 |
+
self._dry_run = None
|
77 |
+
|
78 |
+
# verbose is largely ignored, but needs to be set for
|
79 |
+
# backwards compatibility (I think)?
|
80 |
+
self.verbose = dist.verbose
|
81 |
+
|
82 |
+
# Some commands define a 'self.force' option to ignore file
|
83 |
+
# timestamps, but methods defined *here* assume that
|
84 |
+
# 'self.force' exists for all commands. So define it here
|
85 |
+
# just to be safe.
|
86 |
+
self.force = None
|
87 |
+
|
88 |
+
# The 'help' flag is just used for command-line parsing, so
|
89 |
+
# none of that complicated bureaucracy is needed.
|
90 |
+
self.help = False
|
91 |
+
|
92 |
+
# 'finalized' records whether or not 'finalize_options()' has been
|
93 |
+
# called. 'finalize_options()' itself should not pay attention to
|
94 |
+
# this flag: it is the business of 'ensure_finalized()', which
|
95 |
+
# always calls 'finalize_options()', to respect/update it.
|
96 |
+
self.finalized = False
|
97 |
+
|
98 |
+
# XXX A more explicit way to customize dry_run would be better.
|
99 |
+
def __getattr__(self, attr):
|
100 |
+
if attr == 'dry_run':
|
101 |
+
myval = getattr(self, "_" + attr)
|
102 |
+
if myval is None:
|
103 |
+
return getattr(self.distribution, attr)
|
104 |
+
else:
|
105 |
+
return myval
|
106 |
+
else:
|
107 |
+
raise AttributeError(attr)
|
108 |
+
|
109 |
+
def ensure_finalized(self):
|
110 |
+
if not self.finalized:
|
111 |
+
self.finalize_options()
|
112 |
+
self.finalized = True
|
113 |
+
|
114 |
+
# Subclasses must define:
|
115 |
+
# initialize_options()
|
116 |
+
# provide default values for all options; may be customized by
|
117 |
+
# setup script, by options from config file(s), or by command-line
|
118 |
+
# options
|
119 |
+
# finalize_options()
|
120 |
+
# decide on the final values for all options; this is called
|
121 |
+
# after all possible intervention from the outside world
|
122 |
+
# (command-line, option file, etc.) has been processed
|
123 |
+
# run()
|
124 |
+
# run the command: do whatever it is we're here to do,
|
125 |
+
# controlled by the command's various option values
|
126 |
+
|
127 |
+
def initialize_options(self):
|
128 |
+
"""Set default values for all the options that this command
|
129 |
+
supports. Note that these defaults may be overridden by other
|
130 |
+
commands, by the setup script, by config files, or by the
|
131 |
+
command-line. Thus, this is not the place to code dependencies
|
132 |
+
between options; generally, 'initialize_options()' implementations
|
133 |
+
are just a bunch of "self.foo = None" assignments.
|
134 |
+
|
135 |
+
This method must be implemented by all command classes.
|
136 |
+
"""
|
137 |
+
raise RuntimeError(
|
138 |
+
f"abstract method -- subclass {self.__class__} must override"
|
139 |
+
)
|
140 |
+
|
141 |
+
def finalize_options(self):
|
142 |
+
"""Set final values for all the options that this command supports.
|
143 |
+
This is always called as late as possible, ie. after any option
|
144 |
+
assignments from the command-line or from other commands have been
|
145 |
+
done. Thus, this is the place to code option dependencies: if
|
146 |
+
'foo' depends on 'bar', then it is safe to set 'foo' from 'bar' as
|
147 |
+
long as 'foo' still has the same value it was assigned in
|
148 |
+
'initialize_options()'.
|
149 |
+
|
150 |
+
This method must be implemented by all command classes.
|
151 |
+
"""
|
152 |
+
raise RuntimeError(
|
153 |
+
f"abstract method -- subclass {self.__class__} must override"
|
154 |
+
)
|
155 |
+
|
156 |
+
def dump_options(self, header=None, indent=""):
|
157 |
+
from distutils.fancy_getopt import longopt_xlate
|
158 |
+
|
159 |
+
if header is None:
|
160 |
+
header = f"command options for '{self.get_command_name()}':"
|
161 |
+
self.announce(indent + header, level=logging.INFO)
|
162 |
+
indent = indent + " "
|
163 |
+
for option, _, _ in self.user_options:
|
164 |
+
option = option.translate(longopt_xlate)
|
165 |
+
if option[-1] == "=":
|
166 |
+
option = option[:-1]
|
167 |
+
value = getattr(self, option)
|
168 |
+
self.announce(indent + f"{option} = {value}", level=logging.INFO)
|
169 |
+
|
170 |
+
def run(self):
|
171 |
+
"""A command's raison d'etre: carry out the action it exists to
|
172 |
+
perform, controlled by the options initialized in
|
173 |
+
'initialize_options()', customized by other commands, the setup
|
174 |
+
script, the command-line, and config files, and finalized in
|
175 |
+
'finalize_options()'. All terminal output and filesystem
|
176 |
+
interaction should be done by 'run()'.
|
177 |
+
|
178 |
+
This method must be implemented by all command classes.
|
179 |
+
"""
|
180 |
+
raise RuntimeError(
|
181 |
+
f"abstract method -- subclass {self.__class__} must override"
|
182 |
+
)
|
183 |
+
|
184 |
+
def announce(self, msg, level=logging.DEBUG):
|
185 |
+
log.log(level, msg)
|
186 |
+
|
187 |
+
def debug_print(self, msg):
|
188 |
+
"""Print 'msg' to stdout if the global DEBUG (taken from the
|
189 |
+
DISTUTILS_DEBUG environment variable) flag is true.
|
190 |
+
"""
|
191 |
+
from distutils.debug import DEBUG
|
192 |
+
|
193 |
+
if DEBUG:
|
194 |
+
print(msg)
|
195 |
+
sys.stdout.flush()
|
196 |
+
|
197 |
+
# -- Option validation methods -------------------------------------
|
198 |
+
# (these are very handy in writing the 'finalize_options()' method)
|
199 |
+
#
|
200 |
+
# NB. the general philosophy here is to ensure that a particular option
|
201 |
+
# value meets certain type and value constraints. If not, we try to
|
202 |
+
# force it into conformance (eg. if we expect a list but have a string,
|
203 |
+
# split the string on comma and/or whitespace). If we can't force the
|
204 |
+
# option into conformance, raise DistutilsOptionError. Thus, command
|
205 |
+
# classes need do nothing more than (eg.)
|
206 |
+
# self.ensure_string_list('foo')
|
207 |
+
# and they can be guaranteed that thereafter, self.foo will be
|
208 |
+
# a list of strings.
|
209 |
+
|
210 |
+
def _ensure_stringlike(self, option, what, default=None):
|
211 |
+
val = getattr(self, option)
|
212 |
+
if val is None:
|
213 |
+
setattr(self, option, default)
|
214 |
+
return default
|
215 |
+
elif not isinstance(val, str):
|
216 |
+
raise DistutilsOptionError(f"'{option}' must be a {what} (got `{val}`)")
|
217 |
+
return val
|
218 |
+
|
219 |
+
def ensure_string(self, option, default=None):
|
220 |
+
"""Ensure that 'option' is a string; if not defined, set it to
|
221 |
+
'default'.
|
222 |
+
"""
|
223 |
+
self._ensure_stringlike(option, "string", default)
|
224 |
+
|
225 |
+
def ensure_string_list(self, option):
|
226 |
+
r"""Ensure that 'option' is a list of strings. If 'option' is
|
227 |
+
currently a string, we split it either on /,\s*/ or /\s+/, so
|
228 |
+
"foo bar baz", "foo,bar,baz", and "foo, bar baz" all become
|
229 |
+
["foo", "bar", "baz"].
|
230 |
+
"""
|
231 |
+
val = getattr(self, option)
|
232 |
+
if val is None:
|
233 |
+
return
|
234 |
+
elif isinstance(val, str):
|
235 |
+
setattr(self, option, re.split(r',\s*|\s+', val))
|
236 |
+
else:
|
237 |
+
if isinstance(val, list):
|
238 |
+
ok = all(isinstance(v, str) for v in val)
|
239 |
+
else:
|
240 |
+
ok = False
|
241 |
+
if not ok:
|
242 |
+
raise DistutilsOptionError(
|
243 |
+
f"'{option}' must be a list of strings (got {val!r})"
|
244 |
+
)
|
245 |
+
|
246 |
+
def _ensure_tested_string(self, option, tester, what, error_fmt, default=None):
|
247 |
+
val = self._ensure_stringlike(option, what, default)
|
248 |
+
if val is not None and not tester(val):
|
249 |
+
raise DistutilsOptionError(
|
250 |
+
("error in '%s' option: " + error_fmt) % (option, val)
|
251 |
+
)
|
252 |
+
|
253 |
+
def ensure_filename(self, option):
|
254 |
+
"""Ensure that 'option' is the name of an existing file."""
|
255 |
+
self._ensure_tested_string(
|
256 |
+
option, os.path.isfile, "filename", "'%s' does not exist or is not a file"
|
257 |
+
)
|
258 |
+
|
259 |
+
def ensure_dirname(self, option):
|
260 |
+
self._ensure_tested_string(
|
261 |
+
option,
|
262 |
+
os.path.isdir,
|
263 |
+
"directory name",
|
264 |
+
"'%s' does not exist or is not a directory",
|
265 |
+
)
|
266 |
+
|
267 |
+
# -- Convenience methods for commands ------------------------------
|
268 |
+
|
269 |
+
def get_command_name(self):
|
270 |
+
if hasattr(self, 'command_name'):
|
271 |
+
return self.command_name
|
272 |
+
else:
|
273 |
+
return self.__class__.__name__
|
274 |
+
|
275 |
+
def set_undefined_options(self, src_cmd, *option_pairs):
|
276 |
+
"""Set the values of any "undefined" options from corresponding
|
277 |
+
option values in some other command object. "Undefined" here means
|
278 |
+
"is None", which is the convention used to indicate that an option
|
279 |
+
has not been changed between 'initialize_options()' and
|
280 |
+
'finalize_options()'. Usually called from 'finalize_options()' for
|
281 |
+
options that depend on some other command rather than another
|
282 |
+
option of the same command. 'src_cmd' is the other command from
|
283 |
+
which option values will be taken (a command object will be created
|
284 |
+
for it if necessary); the remaining arguments are
|
285 |
+
'(src_option,dst_option)' tuples which mean "take the value of
|
286 |
+
'src_option' in the 'src_cmd' command object, and copy it to
|
287 |
+
'dst_option' in the current command object".
|
288 |
+
"""
|
289 |
+
# Option_pairs: list of (src_option, dst_option) tuples
|
290 |
+
src_cmd_obj = self.distribution.get_command_obj(src_cmd)
|
291 |
+
src_cmd_obj.ensure_finalized()
|
292 |
+
for src_option, dst_option in option_pairs:
|
293 |
+
if getattr(self, dst_option) is None:
|
294 |
+
setattr(self, dst_option, getattr(src_cmd_obj, src_option))
|
295 |
+
|
296 |
+
def get_finalized_command(self, command, create=True):
|
297 |
+
"""Wrapper around Distribution's 'get_command_obj()' method: find
|
298 |
+
(create if necessary and 'create' is true) the command object for
|
299 |
+
'command', call its 'ensure_finalized()' method, and return the
|
300 |
+
finalized command object.
|
301 |
+
"""
|
302 |
+
cmd_obj = self.distribution.get_command_obj(command, create)
|
303 |
+
cmd_obj.ensure_finalized()
|
304 |
+
return cmd_obj
|
305 |
+
|
306 |
+
# XXX rename to 'get_reinitialized_command()'? (should do the
|
307 |
+
# same in dist.py, if so)
|
308 |
+
def reinitialize_command(self, command, reinit_subcommands=False):
|
309 |
+
return self.distribution.reinitialize_command(command, reinit_subcommands)
|
310 |
+
|
311 |
+
def run_command(self, command):
|
312 |
+
"""Run some other command: uses the 'run_command()' method of
|
313 |
+
Distribution, which creates and finalizes the command object if
|
314 |
+
necessary and then invokes its 'run()' method.
|
315 |
+
"""
|
316 |
+
self.distribution.run_command(command)
|
317 |
+
|
318 |
+
def get_sub_commands(self):
|
319 |
+
"""Determine the sub-commands that are relevant in the current
|
320 |
+
distribution (ie., that need to be run). This is based on the
|
321 |
+
'sub_commands' class attribute: each tuple in that list may include
|
322 |
+
a method that we call to determine if the subcommand needs to be
|
323 |
+
run for the current distribution. Return a list of command names.
|
324 |
+
"""
|
325 |
+
commands = []
|
326 |
+
for cmd_name, method in self.sub_commands:
|
327 |
+
if method is None or method(self):
|
328 |
+
commands.append(cmd_name)
|
329 |
+
return commands
|
330 |
+
|
331 |
+
# -- External world manipulation -----------------------------------
|
332 |
+
|
333 |
+
def warn(self, msg):
|
334 |
+
log.warning("warning: %s: %s\n", self.get_command_name(), msg)
|
335 |
+
|
336 |
+
def execute(self, func, args, msg=None, level=1):
|
337 |
+
util.execute(func, args, msg, dry_run=self.dry_run)
|
338 |
+
|
339 |
+
def mkpath(self, name, mode=0o777):
|
340 |
+
dir_util.mkpath(name, mode, dry_run=self.dry_run)
|
341 |
+
|
342 |
+
def copy_file(
|
343 |
+
self,
|
344 |
+
infile,
|
345 |
+
outfile,
|
346 |
+
preserve_mode=True,
|
347 |
+
preserve_times=True,
|
348 |
+
link=None,
|
349 |
+
level=1,
|
350 |
+
):
|
351 |
+
"""Copy a file respecting verbose, dry-run and force flags. (The
|
352 |
+
former two default to whatever is in the Distribution object, and
|
353 |
+
the latter defaults to false for commands that don't define it.)"""
|
354 |
+
return file_util.copy_file(
|
355 |
+
infile,
|
356 |
+
outfile,
|
357 |
+
preserve_mode,
|
358 |
+
preserve_times,
|
359 |
+
not self.force,
|
360 |
+
link,
|
361 |
+
dry_run=self.dry_run,
|
362 |
+
)
|
363 |
+
|
364 |
+
def copy_tree(
|
365 |
+
self,
|
366 |
+
infile,
|
367 |
+
outfile,
|
368 |
+
preserve_mode=True,
|
369 |
+
preserve_times=True,
|
370 |
+
preserve_symlinks=False,
|
371 |
+
level=1,
|
372 |
+
):
|
373 |
+
"""Copy an entire directory tree respecting verbose, dry-run,
|
374 |
+
and force flags.
|
375 |
+
"""
|
376 |
+
return dir_util.copy_tree(
|
377 |
+
infile,
|
378 |
+
outfile,
|
379 |
+
preserve_mode,
|
380 |
+
preserve_times,
|
381 |
+
preserve_symlinks,
|
382 |
+
not self.force,
|
383 |
+
dry_run=self.dry_run,
|
384 |
+
)
|
385 |
+
|
386 |
+
def move_file(self, src, dst, level=1):
|
387 |
+
"""Move a file respecting dry-run flag."""
|
388 |
+
return file_util.move_file(src, dst, dry_run=self.dry_run)
|
389 |
+
|
390 |
+
def spawn(self, cmd, search_path=True, level=1):
|
391 |
+
"""Spawn an external command respecting dry-run flag."""
|
392 |
+
from distutils.spawn import spawn
|
393 |
+
|
394 |
+
spawn(cmd, search_path, dry_run=self.dry_run)
|
395 |
+
|
396 |
+
def make_archive(
|
397 |
+
self, base_name, format, root_dir=None, base_dir=None, owner=None, group=None
|
398 |
+
):
|
399 |
+
return archive_util.make_archive(
|
400 |
+
base_name,
|
401 |
+
format,
|
402 |
+
root_dir,
|
403 |
+
base_dir,
|
404 |
+
dry_run=self.dry_run,
|
405 |
+
owner=owner,
|
406 |
+
group=group,
|
407 |
+
)
|
408 |
+
|
409 |
+
def make_file(
|
410 |
+
self, infiles, outfile, func, args, exec_msg=None, skip_msg=None, level=1
|
411 |
+
):
|
412 |
+
"""Special case of 'execute()' for operations that process one or
|
413 |
+
more input files and generate one output file. Works just like
|
414 |
+
'execute()', except the operation is skipped and a different
|
415 |
+
message printed if 'outfile' already exists and is newer than all
|
416 |
+
files listed in 'infiles'. If the command defined 'self.force',
|
417 |
+
and it is true, then the command is unconditionally run -- does no
|
418 |
+
timestamp checks.
|
419 |
+
"""
|
420 |
+
if skip_msg is None:
|
421 |
+
skip_msg = f"skipping {outfile} (inputs unchanged)"
|
422 |
+
|
423 |
+
# Allow 'infiles' to be a single string
|
424 |
+
if isinstance(infiles, str):
|
425 |
+
infiles = (infiles,)
|
426 |
+
elif not isinstance(infiles, (list, tuple)):
|
427 |
+
raise TypeError("'infiles' must be a string, or a list or tuple of strings")
|
428 |
+
|
429 |
+
if exec_msg is None:
|
430 |
+
exec_msg = "generating {} from {}".format(outfile, ', '.join(infiles))
|
431 |
+
|
432 |
+
# If 'outfile' must be regenerated (either because it doesn't
|
433 |
+
# exist, is out-of-date, or the 'force' flag is true) then
|
434 |
+
# perform the action that presumably regenerates it
|
435 |
+
if self.force or _modified.newer_group(infiles, outfile):
|
436 |
+
self.execute(func, args, exec_msg, level)
|
437 |
+
# Otherwise, print the "skip" message
|
438 |
+
else:
|
439 |
+
log.debug(skip_msg)
|
.venv/Lib/site-packages/setuptools/_distutils/core.py
ADDED
@@ -0,0 +1,286 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""distutils.core
|
2 |
+
|
3 |
+
The only module that needs to be imported to use the Distutils; provides
|
4 |
+
the 'setup' function (which is to be called from the setup script). Also
|
5 |
+
indirectly provides the Distribution and Command classes, although they are
|
6 |
+
really defined in distutils.dist and distutils.cmd.
|
7 |
+
"""
|
8 |
+
|
9 |
+
import os
|
10 |
+
import sys
|
11 |
+
import tokenize
|
12 |
+
|
13 |
+
from .cmd import Command
|
14 |
+
from .debug import DEBUG
|
15 |
+
|
16 |
+
# Mainly import these so setup scripts can "from distutils.core import" them.
|
17 |
+
from .dist import Distribution
|
18 |
+
from .errors import (
|
19 |
+
CCompilerError,
|
20 |
+
DistutilsArgError,
|
21 |
+
DistutilsError,
|
22 |
+
DistutilsSetupError,
|
23 |
+
)
|
24 |
+
from .extension import Extension
|
25 |
+
|
26 |
+
__all__ = ['Distribution', 'Command', 'Extension', 'setup']
|
27 |
+
|
28 |
+
# This is a barebones help message generated displayed when the user
|
29 |
+
# runs the setup script with no arguments at all. More useful help
|
30 |
+
# is generated with various --help options: global help, list commands,
|
31 |
+
# and per-command help.
|
32 |
+
USAGE = """\
|
33 |
+
usage: %(script)s [global_opts] cmd1 [cmd1_opts] [cmd2 [cmd2_opts] ...]
|
34 |
+
or: %(script)s --help [cmd1 cmd2 ...]
|
35 |
+
or: %(script)s --help-commands
|
36 |
+
or: %(script)s cmd --help
|
37 |
+
"""
|
38 |
+
|
39 |
+
|
40 |
+
def gen_usage(script_name):
|
41 |
+
script = os.path.basename(script_name)
|
42 |
+
return USAGE % locals()
|
43 |
+
|
44 |
+
|
45 |
+
# Some mild magic to control the behaviour of 'setup()' from 'run_setup()'.
|
46 |
+
_setup_stop_after = None
|
47 |
+
_setup_distribution = None
|
48 |
+
|
49 |
+
# Legal keyword arguments for the setup() function
|
50 |
+
setup_keywords = (
|
51 |
+
'distclass',
|
52 |
+
'script_name',
|
53 |
+
'script_args',
|
54 |
+
'options',
|
55 |
+
'name',
|
56 |
+
'version',
|
57 |
+
'author',
|
58 |
+
'author_email',
|
59 |
+
'maintainer',
|
60 |
+
'maintainer_email',
|
61 |
+
'url',
|
62 |
+
'license',
|
63 |
+
'description',
|
64 |
+
'long_description',
|
65 |
+
'keywords',
|
66 |
+
'platforms',
|
67 |
+
'classifiers',
|
68 |
+
'download_url',
|
69 |
+
'requires',
|
70 |
+
'provides',
|
71 |
+
'obsoletes',
|
72 |
+
)
|
73 |
+
|
74 |
+
# Legal keyword arguments for the Extension constructor
|
75 |
+
extension_keywords = (
|
76 |
+
'name',
|
77 |
+
'sources',
|
78 |
+
'include_dirs',
|
79 |
+
'define_macros',
|
80 |
+
'undef_macros',
|
81 |
+
'library_dirs',
|
82 |
+
'libraries',
|
83 |
+
'runtime_library_dirs',
|
84 |
+
'extra_objects',
|
85 |
+
'extra_compile_args',
|
86 |
+
'extra_link_args',
|
87 |
+
'swig_opts',
|
88 |
+
'export_symbols',
|
89 |
+
'depends',
|
90 |
+
'language',
|
91 |
+
)
|
92 |
+
|
93 |
+
|
94 |
+
def setup(**attrs): # noqa: C901
|
95 |
+
"""The gateway to the Distutils: do everything your setup script needs
|
96 |
+
to do, in a highly flexible and user-driven way. Briefly: create a
|
97 |
+
Distribution instance; find and parse config files; parse the command
|
98 |
+
line; run each Distutils command found there, customized by the options
|
99 |
+
supplied to 'setup()' (as keyword arguments), in config files, and on
|
100 |
+
the command line.
|
101 |
+
|
102 |
+
The Distribution instance might be an instance of a class supplied via
|
103 |
+
the 'distclass' keyword argument to 'setup'; if no such class is
|
104 |
+
supplied, then the Distribution class (in dist.py) is instantiated.
|
105 |
+
All other arguments to 'setup' (except for 'cmdclass') are used to set
|
106 |
+
attributes of the Distribution instance.
|
107 |
+
|
108 |
+
The 'cmdclass' argument, if supplied, is a dictionary mapping command
|
109 |
+
names to command classes. Each command encountered on the command line
|
110 |
+
will be turned into a command class, which is in turn instantiated; any
|
111 |
+
class found in 'cmdclass' is used in place of the default, which is
|
112 |
+
(for command 'foo_bar') class 'foo_bar' in module
|
113 |
+
'distutils.command.foo_bar'. The command class must provide a
|
114 |
+
'user_options' attribute which is a list of option specifiers for
|
115 |
+
'distutils.fancy_getopt'. Any command-line options between the current
|
116 |
+
and the next command are used to set attributes of the current command
|
117 |
+
object.
|
118 |
+
|
119 |
+
When the entire command-line has been successfully parsed, calls the
|
120 |
+
'run()' method on each command object in turn. This method will be
|
121 |
+
driven entirely by the Distribution object (which each command object
|
122 |
+
has a reference to, thanks to its constructor), and the
|
123 |
+
command-specific options that became attributes of each command
|
124 |
+
object.
|
125 |
+
"""
|
126 |
+
|
127 |
+
global _setup_stop_after, _setup_distribution
|
128 |
+
|
129 |
+
# Determine the distribution class -- either caller-supplied or
|
130 |
+
# our Distribution (see below).
|
131 |
+
klass = attrs.get('distclass')
|
132 |
+
if klass:
|
133 |
+
attrs.pop('distclass')
|
134 |
+
else:
|
135 |
+
klass = Distribution
|
136 |
+
|
137 |
+
if 'script_name' not in attrs:
|
138 |
+
attrs['script_name'] = os.path.basename(sys.argv[0])
|
139 |
+
if 'script_args' not in attrs:
|
140 |
+
attrs['script_args'] = sys.argv[1:]
|
141 |
+
|
142 |
+
# Create the Distribution instance, using the remaining arguments
|
143 |
+
# (ie. everything except distclass) to initialize it
|
144 |
+
try:
|
145 |
+
_setup_distribution = dist = klass(attrs)
|
146 |
+
except DistutilsSetupError as msg:
|
147 |
+
if 'name' not in attrs:
|
148 |
+
raise SystemExit(f"error in setup command: {msg}")
|
149 |
+
else:
|
150 |
+
raise SystemExit("error in {} setup command: {}".format(attrs['name'], msg))
|
151 |
+
|
152 |
+
if _setup_stop_after == "init":
|
153 |
+
return dist
|
154 |
+
|
155 |
+
# Find and parse the config file(s): they will override options from
|
156 |
+
# the setup script, but be overridden by the command line.
|
157 |
+
dist.parse_config_files()
|
158 |
+
|
159 |
+
if DEBUG:
|
160 |
+
print("options (after parsing config files):")
|
161 |
+
dist.dump_option_dicts()
|
162 |
+
|
163 |
+
if _setup_stop_after == "config":
|
164 |
+
return dist
|
165 |
+
|
166 |
+
# Parse the command line and override config files; any
|
167 |
+
# command-line errors are the end user's fault, so turn them into
|
168 |
+
# SystemExit to suppress tracebacks.
|
169 |
+
try:
|
170 |
+
ok = dist.parse_command_line()
|
171 |
+
except DistutilsArgError as msg:
|
172 |
+
raise SystemExit(gen_usage(dist.script_name) + f"\nerror: {msg}")
|
173 |
+
|
174 |
+
if DEBUG:
|
175 |
+
print("options (after parsing command line):")
|
176 |
+
dist.dump_option_dicts()
|
177 |
+
|
178 |
+
if _setup_stop_after == "commandline":
|
179 |
+
return dist
|
180 |
+
|
181 |
+
# And finally, run all the commands found on the command line.
|
182 |
+
if ok:
|
183 |
+
return run_commands(dist)
|
184 |
+
|
185 |
+
return dist
|
186 |
+
|
187 |
+
|
188 |
+
# setup ()
|
189 |
+
|
190 |
+
|
191 |
+
def run_commands(dist):
|
192 |
+
"""Given a Distribution object run all the commands,
|
193 |
+
raising ``SystemExit`` errors in the case of failure.
|
194 |
+
|
195 |
+
This function assumes that either ``sys.argv`` or ``dist.script_args``
|
196 |
+
is already set accordingly.
|
197 |
+
"""
|
198 |
+
try:
|
199 |
+
dist.run_commands()
|
200 |
+
except KeyboardInterrupt:
|
201 |
+
raise SystemExit("interrupted")
|
202 |
+
except OSError as exc:
|
203 |
+
if DEBUG:
|
204 |
+
sys.stderr.write(f"error: {exc}\n")
|
205 |
+
raise
|
206 |
+
else:
|
207 |
+
raise SystemExit(f"error: {exc}")
|
208 |
+
|
209 |
+
except (DistutilsError, CCompilerError) as msg:
|
210 |
+
if DEBUG:
|
211 |
+
raise
|
212 |
+
else:
|
213 |
+
raise SystemExit("error: " + str(msg))
|
214 |
+
|
215 |
+
return dist
|
216 |
+
|
217 |
+
|
218 |
+
def run_setup(script_name, script_args=None, stop_after="run"):
|
219 |
+
"""Run a setup script in a somewhat controlled environment, and
|
220 |
+
return the Distribution instance that drives things. This is useful
|
221 |
+
if you need to find out the distribution meta-data (passed as
|
222 |
+
keyword args from 'script' to 'setup()', or the contents of the
|
223 |
+
config files or command-line.
|
224 |
+
|
225 |
+
'script_name' is a file that will be read and run with 'exec()';
|
226 |
+
'sys.argv[0]' will be replaced with 'script' for the duration of the
|
227 |
+
call. 'script_args' is a list of strings; if supplied,
|
228 |
+
'sys.argv[1:]' will be replaced by 'script_args' for the duration of
|
229 |
+
the call.
|
230 |
+
|
231 |
+
'stop_after' tells 'setup()' when to stop processing; possible
|
232 |
+
values:
|
233 |
+
init
|
234 |
+
stop after the Distribution instance has been created and
|
235 |
+
populated with the keyword arguments to 'setup()'
|
236 |
+
config
|
237 |
+
stop after config files have been parsed (and their data
|
238 |
+
stored in the Distribution instance)
|
239 |
+
commandline
|
240 |
+
stop after the command-line ('sys.argv[1:]' or 'script_args')
|
241 |
+
have been parsed (and the data stored in the Distribution)
|
242 |
+
run [default]
|
243 |
+
stop after all commands have been run (the same as if 'setup()'
|
244 |
+
had been called in the usual way
|
245 |
+
|
246 |
+
Returns the Distribution instance, which provides all information
|
247 |
+
used to drive the Distutils.
|
248 |
+
"""
|
249 |
+
if stop_after not in ('init', 'config', 'commandline', 'run'):
|
250 |
+
raise ValueError(f"invalid value for 'stop_after': {stop_after!r}")
|
251 |
+
|
252 |
+
global _setup_stop_after, _setup_distribution
|
253 |
+
_setup_stop_after = stop_after
|
254 |
+
|
255 |
+
save_argv = sys.argv.copy()
|
256 |
+
g = {'__file__': script_name, '__name__': '__main__'}
|
257 |
+
try:
|
258 |
+
try:
|
259 |
+
sys.argv[0] = script_name
|
260 |
+
if script_args is not None:
|
261 |
+
sys.argv[1:] = script_args
|
262 |
+
# tokenize.open supports automatic encoding detection
|
263 |
+
with tokenize.open(script_name) as f:
|
264 |
+
code = f.read().replace(r'\r\n', r'\n')
|
265 |
+
exec(code, g)
|
266 |
+
finally:
|
267 |
+
sys.argv = save_argv
|
268 |
+
_setup_stop_after = None
|
269 |
+
except SystemExit:
|
270 |
+
# Hmm, should we do something if exiting with a non-zero code
|
271 |
+
# (ie. error)?
|
272 |
+
pass
|
273 |
+
|
274 |
+
if _setup_distribution is None:
|
275 |
+
raise RuntimeError(
|
276 |
+
"'distutils.core.setup()' was never called -- "
|
277 |
+
f"perhaps '{script_name}' is not a Distutils setup script?"
|
278 |
+
)
|
279 |
+
|
280 |
+
# I wonder if the setup script's namespace -- g and l -- would be of
|
281 |
+
# any interest to callers?
|
282 |
+
# print "_setup_distribution:", _setup_distribution
|
283 |
+
return _setup_distribution
|
284 |
+
|
285 |
+
|
286 |
+
# run_setup ()
|
.venv/Lib/site-packages/setuptools/_distutils/cygwinccompiler.py
ADDED
@@ -0,0 +1,339 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""distutils.cygwinccompiler
|
2 |
+
|
3 |
+
Provides the CygwinCCompiler class, a subclass of UnixCCompiler that
|
4 |
+
handles the Cygwin port of the GNU C compiler to Windows. It also contains
|
5 |
+
the Mingw32CCompiler class which handles the mingw32 port of GCC (same as
|
6 |
+
cygwin in no-cygwin mode).
|
7 |
+
"""
|
8 |
+
|
9 |
+
import copy
|
10 |
+
import os
|
11 |
+
import pathlib
|
12 |
+
import shlex
|
13 |
+
import sys
|
14 |
+
import warnings
|
15 |
+
from subprocess import check_output
|
16 |
+
|
17 |
+
from .errors import (
|
18 |
+
CCompilerError,
|
19 |
+
CompileError,
|
20 |
+
DistutilsExecError,
|
21 |
+
DistutilsPlatformError,
|
22 |
+
)
|
23 |
+
from .file_util import write_file
|
24 |
+
from .sysconfig import get_config_vars
|
25 |
+
from .unixccompiler import UnixCCompiler
|
26 |
+
from .version import LooseVersion, suppress_known_deprecation
|
27 |
+
|
28 |
+
|
29 |
+
def get_msvcr():
|
30 |
+
"""No longer needed, but kept for backward compatibility."""
|
31 |
+
return []
|
32 |
+
|
33 |
+
|
34 |
+
_runtime_library_dirs_msg = (
|
35 |
+
"Unable to set runtime library search path on Windows, "
|
36 |
+
"usually indicated by `runtime_library_dirs` parameter to Extension"
|
37 |
+
)
|
38 |
+
|
39 |
+
|
40 |
+
class CygwinCCompiler(UnixCCompiler):
|
41 |
+
"""Handles the Cygwin port of the GNU C compiler to Windows."""
|
42 |
+
|
43 |
+
compiler_type = 'cygwin'
|
44 |
+
obj_extension = ".o"
|
45 |
+
static_lib_extension = ".a"
|
46 |
+
shared_lib_extension = ".dll.a"
|
47 |
+
dylib_lib_extension = ".dll"
|
48 |
+
static_lib_format = "lib%s%s"
|
49 |
+
shared_lib_format = "lib%s%s"
|
50 |
+
dylib_lib_format = "cyg%s%s"
|
51 |
+
exe_extension = ".exe"
|
52 |
+
|
53 |
+
def __init__(self, verbose=False, dry_run=False, force=False):
|
54 |
+
super().__init__(verbose, dry_run, force)
|
55 |
+
|
56 |
+
status, details = check_config_h()
|
57 |
+
self.debug_print(f"Python's GCC status: {status} (details: {details})")
|
58 |
+
if status is not CONFIG_H_OK:
|
59 |
+
self.warn(
|
60 |
+
"Python's pyconfig.h doesn't seem to support your compiler. "
|
61 |
+
f"Reason: {details}. "
|
62 |
+
"Compiling may fail because of undefined preprocessor macros."
|
63 |
+
)
|
64 |
+
|
65 |
+
self.cc, self.cxx = get_config_vars('CC', 'CXX')
|
66 |
+
|
67 |
+
# Override 'CC' and 'CXX' environment variables for
|
68 |
+
# building using MINGW compiler for MSVC python.
|
69 |
+
self.cc = os.environ.get('CC', self.cc or 'gcc')
|
70 |
+
self.cxx = os.environ.get('CXX', self.cxx or 'g++')
|
71 |
+
|
72 |
+
self.linker_dll = self.cc
|
73 |
+
self.linker_dll_cxx = self.cxx
|
74 |
+
shared_option = "-shared"
|
75 |
+
|
76 |
+
self.set_executables(
|
77 |
+
compiler=f'{self.cc} -mcygwin -O -Wall',
|
78 |
+
compiler_so=f'{self.cc} -mcygwin -mdll -O -Wall',
|
79 |
+
compiler_cxx=f'{self.cxx} -mcygwin -O -Wall',
|
80 |
+
compiler_so_cxx=f'{self.cxx} -mcygwin -mdll -O -Wall',
|
81 |
+
linker_exe=f'{self.cc} -mcygwin',
|
82 |
+
linker_so=f'{self.linker_dll} -mcygwin {shared_option}',
|
83 |
+
linker_exe_cxx=f'{self.cxx} -mcygwin',
|
84 |
+
linker_so_cxx=f'{self.linker_dll_cxx} -mcygwin {shared_option}',
|
85 |
+
)
|
86 |
+
|
87 |
+
self.dll_libraries = get_msvcr()
|
88 |
+
|
89 |
+
@property
|
90 |
+
def gcc_version(self):
|
91 |
+
# Older numpy depended on this existing to check for ancient
|
92 |
+
# gcc versions. This doesn't make much sense with clang etc so
|
93 |
+
# just hardcode to something recent.
|
94 |
+
# https://github.com/numpy/numpy/pull/20333
|
95 |
+
warnings.warn(
|
96 |
+
"gcc_version attribute of CygwinCCompiler is deprecated. "
|
97 |
+
"Instead of returning actual gcc version a fixed value 11.2.0 is returned.",
|
98 |
+
DeprecationWarning,
|
99 |
+
stacklevel=2,
|
100 |
+
)
|
101 |
+
with suppress_known_deprecation():
|
102 |
+
return LooseVersion("11.2.0")
|
103 |
+
|
104 |
+
def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
|
105 |
+
"""Compiles the source by spawning GCC and windres if needed."""
|
106 |
+
if ext in ('.rc', '.res'):
|
107 |
+
# gcc needs '.res' and '.rc' compiled to object files !!!
|
108 |
+
try:
|
109 |
+
self.spawn(["windres", "-i", src, "-o", obj])
|
110 |
+
except DistutilsExecError as msg:
|
111 |
+
raise CompileError(msg)
|
112 |
+
else: # for other files use the C-compiler
|
113 |
+
try:
|
114 |
+
if self.detect_language(src) == 'c++':
|
115 |
+
self.spawn(
|
116 |
+
self.compiler_so_cxx
|
117 |
+
+ cc_args
|
118 |
+
+ [src, '-o', obj]
|
119 |
+
+ extra_postargs
|
120 |
+
)
|
121 |
+
else:
|
122 |
+
self.spawn(
|
123 |
+
self.compiler_so + cc_args + [src, '-o', obj] + extra_postargs
|
124 |
+
)
|
125 |
+
except DistutilsExecError as msg:
|
126 |
+
raise CompileError(msg)
|
127 |
+
|
128 |
+
def link(
|
129 |
+
self,
|
130 |
+
target_desc,
|
131 |
+
objects,
|
132 |
+
output_filename,
|
133 |
+
output_dir=None,
|
134 |
+
libraries=None,
|
135 |
+
library_dirs=None,
|
136 |
+
runtime_library_dirs=None,
|
137 |
+
export_symbols=None,
|
138 |
+
debug=False,
|
139 |
+
extra_preargs=None,
|
140 |
+
extra_postargs=None,
|
141 |
+
build_temp=None,
|
142 |
+
target_lang=None,
|
143 |
+
):
|
144 |
+
"""Link the objects."""
|
145 |
+
# use separate copies, so we can modify the lists
|
146 |
+
extra_preargs = copy.copy(extra_preargs or [])
|
147 |
+
libraries = copy.copy(libraries or [])
|
148 |
+
objects = copy.copy(objects or [])
|
149 |
+
|
150 |
+
if runtime_library_dirs:
|
151 |
+
self.warn(_runtime_library_dirs_msg)
|
152 |
+
|
153 |
+
# Additional libraries
|
154 |
+
libraries.extend(self.dll_libraries)
|
155 |
+
|
156 |
+
# handle export symbols by creating a def-file
|
157 |
+
# with executables this only works with gcc/ld as linker
|
158 |
+
if (export_symbols is not None) and (
|
159 |
+
target_desc != self.EXECUTABLE or self.linker_dll == "gcc"
|
160 |
+
):
|
161 |
+
# (The linker doesn't do anything if output is up-to-date.
|
162 |
+
# So it would probably better to check if we really need this,
|
163 |
+
# but for this we had to insert some unchanged parts of
|
164 |
+
# UnixCCompiler, and this is not what we want.)
|
165 |
+
|
166 |
+
# we want to put some files in the same directory as the
|
167 |
+
# object files are, build_temp doesn't help much
|
168 |
+
# where are the object files
|
169 |
+
temp_dir = os.path.dirname(objects[0])
|
170 |
+
# name of dll to give the helper files the same base name
|
171 |
+
(dll_name, dll_extension) = os.path.splitext(
|
172 |
+
os.path.basename(output_filename)
|
173 |
+
)
|
174 |
+
|
175 |
+
# generate the filenames for these files
|
176 |
+
def_file = os.path.join(temp_dir, dll_name + ".def")
|
177 |
+
|
178 |
+
# Generate .def file
|
179 |
+
contents = [f"LIBRARY {os.path.basename(output_filename)}", "EXPORTS"]
|
180 |
+
contents.extend(export_symbols)
|
181 |
+
self.execute(write_file, (def_file, contents), f"writing {def_file}")
|
182 |
+
|
183 |
+
# next add options for def-file
|
184 |
+
|
185 |
+
# for gcc/ld the def-file is specified as any object files
|
186 |
+
objects.append(def_file)
|
187 |
+
|
188 |
+
# end: if ((export_symbols is not None) and
|
189 |
+
# (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")):
|
190 |
+
|
191 |
+
# who wants symbols and a many times larger output file
|
192 |
+
# should explicitly switch the debug mode on
|
193 |
+
# otherwise we let ld strip the output file
|
194 |
+
# (On my machine: 10KiB < stripped_file < ??100KiB
|
195 |
+
# unstripped_file = stripped_file + XXX KiB
|
196 |
+
# ( XXX=254 for a typical python extension))
|
197 |
+
if not debug:
|
198 |
+
extra_preargs.append("-s")
|
199 |
+
|
200 |
+
UnixCCompiler.link(
|
201 |
+
self,
|
202 |
+
target_desc,
|
203 |
+
objects,
|
204 |
+
output_filename,
|
205 |
+
output_dir,
|
206 |
+
libraries,
|
207 |
+
library_dirs,
|
208 |
+
runtime_library_dirs,
|
209 |
+
None, # export_symbols, we do this in our def-file
|
210 |
+
debug,
|
211 |
+
extra_preargs,
|
212 |
+
extra_postargs,
|
213 |
+
build_temp,
|
214 |
+
target_lang,
|
215 |
+
)
|
216 |
+
|
217 |
+
def runtime_library_dir_option(self, dir):
|
218 |
+
# cygwin doesn't support rpath. While in theory we could error
|
219 |
+
# out like MSVC does, code might expect it to work like on Unix, so
|
220 |
+
# just warn and hope for the best.
|
221 |
+
self.warn(_runtime_library_dirs_msg)
|
222 |
+
return []
|
223 |
+
|
224 |
+
# -- Miscellaneous methods -----------------------------------------
|
225 |
+
|
226 |
+
def _make_out_path(self, output_dir, strip_dir, src_name):
|
227 |
+
# use normcase to make sure '.rc' is really '.rc' and not '.RC'
|
228 |
+
norm_src_name = os.path.normcase(src_name)
|
229 |
+
return super()._make_out_path(output_dir, strip_dir, norm_src_name)
|
230 |
+
|
231 |
+
@property
|
232 |
+
def out_extensions(self):
|
233 |
+
"""
|
234 |
+
Add support for rc and res files.
|
235 |
+
"""
|
236 |
+
return {
|
237 |
+
**super().out_extensions,
|
238 |
+
**{ext: ext + self.obj_extension for ext in ('.res', '.rc')},
|
239 |
+
}
|
240 |
+
|
241 |
+
|
242 |
+
# the same as cygwin plus some additional parameters
|
243 |
+
class Mingw32CCompiler(CygwinCCompiler):
|
244 |
+
"""Handles the Mingw32 port of the GNU C compiler to Windows."""
|
245 |
+
|
246 |
+
compiler_type = 'mingw32'
|
247 |
+
|
248 |
+
def __init__(self, verbose=False, dry_run=False, force=False):
|
249 |
+
super().__init__(verbose, dry_run, force)
|
250 |
+
|
251 |
+
shared_option = "-shared"
|
252 |
+
|
253 |
+
if is_cygwincc(self.cc):
|
254 |
+
raise CCompilerError('Cygwin gcc cannot be used with --compiler=mingw32')
|
255 |
+
|
256 |
+
self.set_executables(
|
257 |
+
compiler=f'{self.cc} -O -Wall',
|
258 |
+
compiler_so=f'{self.cc} -shared -O -Wall',
|
259 |
+
compiler_so_cxx=f'{self.cxx} -shared -O -Wall',
|
260 |
+
compiler_cxx=f'{self.cxx} -O -Wall',
|
261 |
+
linker_exe=f'{self.cc}',
|
262 |
+
linker_so=f'{self.linker_dll} {shared_option}',
|
263 |
+
linker_exe_cxx=f'{self.cxx}',
|
264 |
+
linker_so_cxx=f'{self.linker_dll_cxx} {shared_option}',
|
265 |
+
)
|
266 |
+
|
267 |
+
def runtime_library_dir_option(self, dir):
|
268 |
+
raise DistutilsPlatformError(_runtime_library_dirs_msg)
|
269 |
+
|
270 |
+
|
271 |
+
# Because these compilers aren't configured in Python's pyconfig.h file by
|
272 |
+
# default, we should at least warn the user if he is using an unmodified
|
273 |
+
# version.
|
274 |
+
|
275 |
+
CONFIG_H_OK = "ok"
|
276 |
+
CONFIG_H_NOTOK = "not ok"
|
277 |
+
CONFIG_H_UNCERTAIN = "uncertain"
|
278 |
+
|
279 |
+
|
280 |
+
def check_config_h():
|
281 |
+
"""Check if the current Python installation appears amenable to building
|
282 |
+
extensions with GCC.
|
283 |
+
|
284 |
+
Returns a tuple (status, details), where 'status' is one of the following
|
285 |
+
constants:
|
286 |
+
|
287 |
+
- CONFIG_H_OK: all is well, go ahead and compile
|
288 |
+
- CONFIG_H_NOTOK: doesn't look good
|
289 |
+
- CONFIG_H_UNCERTAIN: not sure -- unable to read pyconfig.h
|
290 |
+
|
291 |
+
'details' is a human-readable string explaining the situation.
|
292 |
+
|
293 |
+
Note there are two ways to conclude "OK": either 'sys.version' contains
|
294 |
+
the string "GCC" (implying that this Python was built with GCC), or the
|
295 |
+
installed "pyconfig.h" contains the string "__GNUC__".
|
296 |
+
"""
|
297 |
+
|
298 |
+
# XXX since this function also checks sys.version, it's not strictly a
|
299 |
+
# "pyconfig.h" check -- should probably be renamed...
|
300 |
+
|
301 |
+
from distutils import sysconfig
|
302 |
+
|
303 |
+
# if sys.version contains GCC then python was compiled with GCC, and the
|
304 |
+
# pyconfig.h file should be OK
|
305 |
+
if "GCC" in sys.version:
|
306 |
+
return CONFIG_H_OK, "sys.version mentions 'GCC'"
|
307 |
+
|
308 |
+
# Clang would also work
|
309 |
+
if "Clang" in sys.version:
|
310 |
+
return CONFIG_H_OK, "sys.version mentions 'Clang'"
|
311 |
+
|
312 |
+
# let's see if __GNUC__ is mentioned in python.h
|
313 |
+
fn = sysconfig.get_config_h_filename()
|
314 |
+
try:
|
315 |
+
config_h = pathlib.Path(fn).read_text(encoding='utf-8')
|
316 |
+
except OSError as exc:
|
317 |
+
return (CONFIG_H_UNCERTAIN, f"couldn't read '{fn}': {exc.strerror}")
|
318 |
+
else:
|
319 |
+
substring = '__GNUC__'
|
320 |
+
if substring in config_h:
|
321 |
+
code = CONFIG_H_OK
|
322 |
+
mention_inflected = 'mentions'
|
323 |
+
else:
|
324 |
+
code = CONFIG_H_NOTOK
|
325 |
+
mention_inflected = 'does not mention'
|
326 |
+
return code, f"{fn!r} {mention_inflected} {substring!r}"
|
327 |
+
|
328 |
+
|
329 |
+
def is_cygwincc(cc):
|
330 |
+
"""Try to determine if the compiler that would be used is from cygwin."""
|
331 |
+
out_string = check_output(shlex.split(cc) + ['-dumpmachine'])
|
332 |
+
return out_string.strip().endswith(b'cygwin')
|
333 |
+
|
334 |
+
|
335 |
+
get_versions = None
|
336 |
+
"""
|
337 |
+
A stand-in for the previous get_versions() function to prevent failures
|
338 |
+
when monkeypatched. See pypa/setuptools#2969.
|
339 |
+
"""
|
.venv/Lib/site-packages/setuptools/_distutils/debug.py
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
|
3 |
+
# If DISTUTILS_DEBUG is anything other than the empty string, we run in
|
4 |
+
# debug mode.
|
5 |
+
DEBUG = os.environ.get('DISTUTILS_DEBUG')
|
.venv/Lib/site-packages/setuptools/_distutils/dep_util.py
ADDED
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import warnings
|
2 |
+
|
3 |
+
from . import _modified
|
4 |
+
|
5 |
+
|
6 |
+
def __getattr__(name):
|
7 |
+
if name not in ['newer', 'newer_group', 'newer_pairwise']:
|
8 |
+
raise AttributeError(name)
|
9 |
+
warnings.warn(
|
10 |
+
"dep_util is Deprecated. Use functions from setuptools instead.",
|
11 |
+
DeprecationWarning,
|
12 |
+
stacklevel=2,
|
13 |
+
)
|
14 |
+
return getattr(_modified, name)
|
.venv/Lib/site-packages/setuptools/_distutils/dir_util.py
ADDED
@@ -0,0 +1,244 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""distutils.dir_util
|
2 |
+
|
3 |
+
Utility functions for manipulating directories and directory trees."""
|
4 |
+
|
5 |
+
import functools
|
6 |
+
import itertools
|
7 |
+
import os
|
8 |
+
import pathlib
|
9 |
+
|
10 |
+
from . import file_util
|
11 |
+
from ._log import log
|
12 |
+
from .errors import DistutilsFileError, DistutilsInternalError
|
13 |
+
|
14 |
+
|
15 |
+
class SkipRepeatAbsolutePaths(set):
|
16 |
+
"""
|
17 |
+
Cache for mkpath.
|
18 |
+
|
19 |
+
In addition to cheapening redundant calls, eliminates redundant
|
20 |
+
"creating /foo/bar/baz" messages in dry-run mode.
|
21 |
+
"""
|
22 |
+
|
23 |
+
def __init__(self):
|
24 |
+
SkipRepeatAbsolutePaths.instance = self
|
25 |
+
|
26 |
+
@classmethod
|
27 |
+
def clear(cls):
|
28 |
+
super(cls, cls.instance).clear()
|
29 |
+
|
30 |
+
def wrap(self, func):
|
31 |
+
@functools.wraps(func)
|
32 |
+
def wrapper(path, *args, **kwargs):
|
33 |
+
if path.absolute() in self:
|
34 |
+
return
|
35 |
+
result = func(path, *args, **kwargs)
|
36 |
+
self.add(path.absolute())
|
37 |
+
return result
|
38 |
+
|
39 |
+
return wrapper
|
40 |
+
|
41 |
+
|
42 |
+
# Python 3.8 compatibility
|
43 |
+
wrapper = SkipRepeatAbsolutePaths().wrap
|
44 |
+
|
45 |
+
|
46 |
+
@functools.singledispatch
|
47 |
+
@wrapper
|
48 |
+
def mkpath(name: pathlib.Path, mode=0o777, verbose=True, dry_run=False) -> None:
|
49 |
+
"""Create a directory and any missing ancestor directories.
|
50 |
+
|
51 |
+
If the directory already exists (or if 'name' is the empty string, which
|
52 |
+
means the current directory, which of course exists), then do nothing.
|
53 |
+
Raise DistutilsFileError if unable to create some directory along the way
|
54 |
+
(eg. some sub-path exists, but is a file rather than a directory).
|
55 |
+
If 'verbose' is true, log the directory created.
|
56 |
+
"""
|
57 |
+
if verbose and not name.is_dir():
|
58 |
+
log.info("creating %s", name)
|
59 |
+
|
60 |
+
try:
|
61 |
+
dry_run or name.mkdir(mode=mode, parents=True, exist_ok=True)
|
62 |
+
except OSError as exc:
|
63 |
+
raise DistutilsFileError(f"could not create '{name}': {exc.args[-1]}")
|
64 |
+
|
65 |
+
|
66 |
+
@mkpath.register
|
67 |
+
def _(name: str, *args, **kwargs):
|
68 |
+
return mkpath(pathlib.Path(name), *args, **kwargs)
|
69 |
+
|
70 |
+
|
71 |
+
@mkpath.register
|
72 |
+
def _(name: None, *args, **kwargs):
|
73 |
+
"""
|
74 |
+
Detect a common bug -- name is None.
|
75 |
+
"""
|
76 |
+
raise DistutilsInternalError(f"mkpath: 'name' must be a string (got {name!r})")
|
77 |
+
|
78 |
+
|
79 |
+
def create_tree(base_dir, files, mode=0o777, verbose=True, dry_run=False):
|
80 |
+
"""Create all the empty directories under 'base_dir' needed to put 'files'
|
81 |
+
there.
|
82 |
+
|
83 |
+
'base_dir' is just the name of a directory which doesn't necessarily
|
84 |
+
exist yet; 'files' is a list of filenames to be interpreted relative to
|
85 |
+
'base_dir'. 'base_dir' + the directory portion of every file in 'files'
|
86 |
+
will be created if it doesn't already exist. 'mode', 'verbose' and
|
87 |
+
'dry_run' flags are as for 'mkpath()'.
|
88 |
+
"""
|
89 |
+
# First get the list of directories to create
|
90 |
+
need_dir = set(os.path.join(base_dir, os.path.dirname(file)) for file in files)
|
91 |
+
|
92 |
+
# Now create them
|
93 |
+
for dir in sorted(need_dir):
|
94 |
+
mkpath(dir, mode, verbose=verbose, dry_run=dry_run)
|
95 |
+
|
96 |
+
|
97 |
+
def copy_tree(
|
98 |
+
src,
|
99 |
+
dst,
|
100 |
+
preserve_mode=True,
|
101 |
+
preserve_times=True,
|
102 |
+
preserve_symlinks=False,
|
103 |
+
update=False,
|
104 |
+
verbose=True,
|
105 |
+
dry_run=False,
|
106 |
+
):
|
107 |
+
"""Copy an entire directory tree 'src' to a new location 'dst'.
|
108 |
+
|
109 |
+
Both 'src' and 'dst' must be directory names. If 'src' is not a
|
110 |
+
directory, raise DistutilsFileError. If 'dst' does not exist, it is
|
111 |
+
created with 'mkpath()'. The end result of the copy is that every
|
112 |
+
file in 'src' is copied to 'dst', and directories under 'src' are
|
113 |
+
recursively copied to 'dst'. Return the list of files that were
|
114 |
+
copied or might have been copied, using their output name. The
|
115 |
+
return value is unaffected by 'update' or 'dry_run': it is simply
|
116 |
+
the list of all files under 'src', with the names changed to be
|
117 |
+
under 'dst'.
|
118 |
+
|
119 |
+
'preserve_mode' and 'preserve_times' are the same as for
|
120 |
+
'copy_file'; note that they only apply to regular files, not to
|
121 |
+
directories. If 'preserve_symlinks' is true, symlinks will be
|
122 |
+
copied as symlinks (on platforms that support them!); otherwise
|
123 |
+
(the default), the destination of the symlink will be copied.
|
124 |
+
'update' and 'verbose' are the same as for 'copy_file'.
|
125 |
+
"""
|
126 |
+
if not dry_run and not os.path.isdir(src):
|
127 |
+
raise DistutilsFileError(f"cannot copy tree '{src}': not a directory")
|
128 |
+
try:
|
129 |
+
names = os.listdir(src)
|
130 |
+
except OSError as e:
|
131 |
+
if dry_run:
|
132 |
+
names = []
|
133 |
+
else:
|
134 |
+
raise DistutilsFileError(f"error listing files in '{src}': {e.strerror}")
|
135 |
+
|
136 |
+
if not dry_run:
|
137 |
+
mkpath(dst, verbose=verbose)
|
138 |
+
|
139 |
+
copy_one = functools.partial(
|
140 |
+
_copy_one,
|
141 |
+
src=src,
|
142 |
+
dst=dst,
|
143 |
+
preserve_symlinks=preserve_symlinks,
|
144 |
+
verbose=verbose,
|
145 |
+
dry_run=dry_run,
|
146 |
+
preserve_mode=preserve_mode,
|
147 |
+
preserve_times=preserve_times,
|
148 |
+
update=update,
|
149 |
+
)
|
150 |
+
return list(itertools.chain.from_iterable(map(copy_one, names)))
|
151 |
+
|
152 |
+
|
153 |
+
def _copy_one(
|
154 |
+
name,
|
155 |
+
*,
|
156 |
+
src,
|
157 |
+
dst,
|
158 |
+
preserve_symlinks,
|
159 |
+
verbose,
|
160 |
+
dry_run,
|
161 |
+
preserve_mode,
|
162 |
+
preserve_times,
|
163 |
+
update,
|
164 |
+
):
|
165 |
+
src_name = os.path.join(src, name)
|
166 |
+
dst_name = os.path.join(dst, name)
|
167 |
+
|
168 |
+
if name.startswith('.nfs'):
|
169 |
+
# skip NFS rename files
|
170 |
+
return
|
171 |
+
|
172 |
+
if preserve_symlinks and os.path.islink(src_name):
|
173 |
+
link_dest = os.readlink(src_name)
|
174 |
+
if verbose >= 1:
|
175 |
+
log.info("linking %s -> %s", dst_name, link_dest)
|
176 |
+
if not dry_run:
|
177 |
+
os.symlink(link_dest, dst_name)
|
178 |
+
yield dst_name
|
179 |
+
|
180 |
+
elif os.path.isdir(src_name):
|
181 |
+
yield from copy_tree(
|
182 |
+
src_name,
|
183 |
+
dst_name,
|
184 |
+
preserve_mode,
|
185 |
+
preserve_times,
|
186 |
+
preserve_symlinks,
|
187 |
+
update,
|
188 |
+
verbose=verbose,
|
189 |
+
dry_run=dry_run,
|
190 |
+
)
|
191 |
+
else:
|
192 |
+
file_util.copy_file(
|
193 |
+
src_name,
|
194 |
+
dst_name,
|
195 |
+
preserve_mode,
|
196 |
+
preserve_times,
|
197 |
+
update,
|
198 |
+
verbose=verbose,
|
199 |
+
dry_run=dry_run,
|
200 |
+
)
|
201 |
+
yield dst_name
|
202 |
+
|
203 |
+
|
204 |
+
def _build_cmdtuple(path, cmdtuples):
|
205 |
+
"""Helper for remove_tree()."""
|
206 |
+
for f in os.listdir(path):
|
207 |
+
real_f = os.path.join(path, f)
|
208 |
+
if os.path.isdir(real_f) and not os.path.islink(real_f):
|
209 |
+
_build_cmdtuple(real_f, cmdtuples)
|
210 |
+
else:
|
211 |
+
cmdtuples.append((os.remove, real_f))
|
212 |
+
cmdtuples.append((os.rmdir, path))
|
213 |
+
|
214 |
+
|
215 |
+
def remove_tree(directory, verbose=True, dry_run=False):
|
216 |
+
"""Recursively remove an entire directory tree.
|
217 |
+
|
218 |
+
Any errors are ignored (apart from being reported to stdout if 'verbose'
|
219 |
+
is true).
|
220 |
+
"""
|
221 |
+
if verbose >= 1:
|
222 |
+
log.info("removing '%s' (and everything under it)", directory)
|
223 |
+
if dry_run:
|
224 |
+
return
|
225 |
+
cmdtuples = []
|
226 |
+
_build_cmdtuple(directory, cmdtuples)
|
227 |
+
for cmd in cmdtuples:
|
228 |
+
try:
|
229 |
+
cmd[0](cmd[1])
|
230 |
+
# Clear the cache
|
231 |
+
SkipRepeatAbsolutePaths.clear()
|
232 |
+
except OSError as exc:
|
233 |
+
log.warning("error removing %s: %s", directory, exc)
|
234 |
+
|
235 |
+
|
236 |
+
def ensure_relative(path):
|
237 |
+
"""Take the full path 'path', and make it a relative path.
|
238 |
+
|
239 |
+
This is useful to make 'path' the second argument to os.path.join().
|
240 |
+
"""
|
241 |
+
drive, path = os.path.splitdrive(path)
|
242 |
+
if path[0:1] == os.sep:
|
243 |
+
path = drive + path[1:]
|
244 |
+
return path
|
.venv/Lib/site-packages/setuptools/_distutils/dist.py
ADDED
@@ -0,0 +1,1288 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""distutils.dist
|
2 |
+
|
3 |
+
Provides the Distribution class, which represents the module distribution
|
4 |
+
being built/installed/distributed.
|
5 |
+
"""
|
6 |
+
|
7 |
+
import contextlib
|
8 |
+
import logging
|
9 |
+
import os
|
10 |
+
import pathlib
|
11 |
+
import re
|
12 |
+
import sys
|
13 |
+
import warnings
|
14 |
+
from collections.abc import Iterable
|
15 |
+
from email import message_from_file
|
16 |
+
|
17 |
+
from packaging.utils import canonicalize_name, canonicalize_version
|
18 |
+
|
19 |
+
from ._log import log
|
20 |
+
from .debug import DEBUG
|
21 |
+
from .errors import (
|
22 |
+
DistutilsArgError,
|
23 |
+
DistutilsClassError,
|
24 |
+
DistutilsModuleError,
|
25 |
+
DistutilsOptionError,
|
26 |
+
)
|
27 |
+
from .fancy_getopt import FancyGetopt, translate_longopt
|
28 |
+
from .util import check_environ, rfc822_escape, strtobool
|
29 |
+
|
30 |
+
# Regex to define acceptable Distutils command names. This is not *quite*
|
31 |
+
# the same as a Python NAME -- I don't allow leading underscores. The fact
|
32 |
+
# that they're very similar is no coincidence; the default naming scheme is
|
33 |
+
# to look for a Python module named after the command.
|
34 |
+
command_re = re.compile(r'^[a-zA-Z]([a-zA-Z0-9_]*)$')
|
35 |
+
|
36 |
+
|
37 |
+
def _ensure_list(value, fieldname):
|
38 |
+
if isinstance(value, str):
|
39 |
+
# a string containing comma separated values is okay. It will
|
40 |
+
# be converted to a list by Distribution.finalize_options().
|
41 |
+
pass
|
42 |
+
elif not isinstance(value, list):
|
43 |
+
# passing a tuple or an iterator perhaps, warn and convert
|
44 |
+
typename = type(value).__name__
|
45 |
+
msg = "Warning: '{fieldname}' should be a list, got type '{typename}'"
|
46 |
+
msg = msg.format(**locals())
|
47 |
+
log.warning(msg)
|
48 |
+
value = list(value)
|
49 |
+
return value
|
50 |
+
|
51 |
+
|
52 |
+
class Distribution:
|
53 |
+
"""The core of the Distutils. Most of the work hiding behind 'setup'
|
54 |
+
is really done within a Distribution instance, which farms the work out
|
55 |
+
to the Distutils commands specified on the command line.
|
56 |
+
|
57 |
+
Setup scripts will almost never instantiate Distribution directly,
|
58 |
+
unless the 'setup()' function is totally inadequate to their needs.
|
59 |
+
However, it is conceivable that a setup script might wish to subclass
|
60 |
+
Distribution for some specialized purpose, and then pass the subclass
|
61 |
+
to 'setup()' as the 'distclass' keyword argument. If so, it is
|
62 |
+
necessary to respect the expectations that 'setup' has of Distribution.
|
63 |
+
See the code for 'setup()', in core.py, for details.
|
64 |
+
"""
|
65 |
+
|
66 |
+
# 'global_options' describes the command-line options that may be
|
67 |
+
# supplied to the setup script prior to any actual commands.
|
68 |
+
# Eg. "./setup.py -n" or "./setup.py --quiet" both take advantage of
|
69 |
+
# these global options. This list should be kept to a bare minimum,
|
70 |
+
# since every global option is also valid as a command option -- and we
|
71 |
+
# don't want to pollute the commands with too many options that they
|
72 |
+
# have minimal control over.
|
73 |
+
# The fourth entry for verbose means that it can be repeated.
|
74 |
+
global_options = [
|
75 |
+
('verbose', 'v', "run verbosely (default)", 1),
|
76 |
+
('quiet', 'q', "run quietly (turns verbosity off)"),
|
77 |
+
('dry-run', 'n', "don't actually do anything"),
|
78 |
+
('help', 'h', "show detailed help message"),
|
79 |
+
('no-user-cfg', None, 'ignore pydistutils.cfg in your home directory'),
|
80 |
+
]
|
81 |
+
|
82 |
+
# 'common_usage' is a short (2-3 line) string describing the common
|
83 |
+
# usage of the setup script.
|
84 |
+
common_usage = """\
|
85 |
+
Common commands: (see '--help-commands' for more)
|
86 |
+
|
87 |
+
setup.py build will build the package underneath 'build/'
|
88 |
+
setup.py install will install the package
|
89 |
+
"""
|
90 |
+
|
91 |
+
# options that are not propagated to the commands
|
92 |
+
display_options = [
|
93 |
+
('help-commands', None, "list all available commands"),
|
94 |
+
('name', None, "print package name"),
|
95 |
+
('version', 'V', "print package version"),
|
96 |
+
('fullname', None, "print <package name>-<version>"),
|
97 |
+
('author', None, "print the author's name"),
|
98 |
+
('author-email', None, "print the author's email address"),
|
99 |
+
('maintainer', None, "print the maintainer's name"),
|
100 |
+
('maintainer-email', None, "print the maintainer's email address"),
|
101 |
+
('contact', None, "print the maintainer's name if known, else the author's"),
|
102 |
+
(
|
103 |
+
'contact-email',
|
104 |
+
None,
|
105 |
+
"print the maintainer's email address if known, else the author's",
|
106 |
+
),
|
107 |
+
('url', None, "print the URL for this package"),
|
108 |
+
('license', None, "print the license of the package"),
|
109 |
+
('licence', None, "alias for --license"),
|
110 |
+
('description', None, "print the package description"),
|
111 |
+
('long-description', None, "print the long package description"),
|
112 |
+
('platforms', None, "print the list of platforms"),
|
113 |
+
('classifiers', None, "print the list of classifiers"),
|
114 |
+
('keywords', None, "print the list of keywords"),
|
115 |
+
('provides', None, "print the list of packages/modules provided"),
|
116 |
+
('requires', None, "print the list of packages/modules required"),
|
117 |
+
('obsoletes', None, "print the list of packages/modules made obsolete"),
|
118 |
+
]
|
119 |
+
display_option_names = [translate_longopt(x[0]) for x in display_options]
|
120 |
+
|
121 |
+
# negative options are options that exclude other options
|
122 |
+
negative_opt = {'quiet': 'verbose'}
|
123 |
+
|
124 |
+
# -- Creation/initialization methods -------------------------------
|
125 |
+
|
126 |
+
def __init__(self, attrs=None): # noqa: C901
|
127 |
+
"""Construct a new Distribution instance: initialize all the
|
128 |
+
attributes of a Distribution, and then use 'attrs' (a dictionary
|
129 |
+
mapping attribute names to values) to assign some of those
|
130 |
+
attributes their "real" values. (Any attributes not mentioned in
|
131 |
+
'attrs' will be assigned to some null value: 0, None, an empty list
|
132 |
+
or dictionary, etc.) Most importantly, initialize the
|
133 |
+
'command_obj' attribute to the empty dictionary; this will be
|
134 |
+
filled in with real command objects by 'parse_command_line()'.
|
135 |
+
"""
|
136 |
+
|
137 |
+
# Default values for our command-line options
|
138 |
+
self.verbose = True
|
139 |
+
self.dry_run = False
|
140 |
+
self.help = False
|
141 |
+
for attr in self.display_option_names:
|
142 |
+
setattr(self, attr, 0)
|
143 |
+
|
144 |
+
# Store the distribution meta-data (name, version, author, and so
|
145 |
+
# forth) in a separate object -- we're getting to have enough
|
146 |
+
# information here (and enough command-line options) that it's
|
147 |
+
# worth it. Also delegate 'get_XXX()' methods to the 'metadata'
|
148 |
+
# object in a sneaky and underhanded (but efficient!) way.
|
149 |
+
self.metadata = DistributionMetadata()
|
150 |
+
for basename in self.metadata._METHOD_BASENAMES:
|
151 |
+
method_name = "get_" + basename
|
152 |
+
setattr(self, method_name, getattr(self.metadata, method_name))
|
153 |
+
|
154 |
+
# 'cmdclass' maps command names to class objects, so we
|
155 |
+
# can 1) quickly figure out which class to instantiate when
|
156 |
+
# we need to create a new command object, and 2) have a way
|
157 |
+
# for the setup script to override command classes
|
158 |
+
self.cmdclass = {}
|
159 |
+
|
160 |
+
# 'command_packages' is a list of packages in which commands
|
161 |
+
# are searched for. The factory for command 'foo' is expected
|
162 |
+
# to be named 'foo' in the module 'foo' in one of the packages
|
163 |
+
# named here. This list is searched from the left; an error
|
164 |
+
# is raised if no named package provides the command being
|
165 |
+
# searched for. (Always access using get_command_packages().)
|
166 |
+
self.command_packages = None
|
167 |
+
|
168 |
+
# 'script_name' and 'script_args' are usually set to sys.argv[0]
|
169 |
+
# and sys.argv[1:], but they can be overridden when the caller is
|
170 |
+
# not necessarily a setup script run from the command-line.
|
171 |
+
self.script_name = None
|
172 |
+
self.script_args = None
|
173 |
+
|
174 |
+
# 'command_options' is where we store command options between
|
175 |
+
# parsing them (from config files, the command-line, etc.) and when
|
176 |
+
# they are actually needed -- ie. when the command in question is
|
177 |
+
# instantiated. It is a dictionary of dictionaries of 2-tuples:
|
178 |
+
# command_options = { command_name : { option : (source, value) } }
|
179 |
+
self.command_options = {}
|
180 |
+
|
181 |
+
# 'dist_files' is the list of (command, pyversion, file) that
|
182 |
+
# have been created by any dist commands run so far. This is
|
183 |
+
# filled regardless of whether the run is dry or not. pyversion
|
184 |
+
# gives sysconfig.get_python_version() if the dist file is
|
185 |
+
# specific to a Python version, 'any' if it is good for all
|
186 |
+
# Python versions on the target platform, and '' for a source
|
187 |
+
# file. pyversion should not be used to specify minimum or
|
188 |
+
# maximum required Python versions; use the metainfo for that
|
189 |
+
# instead.
|
190 |
+
self.dist_files = []
|
191 |
+
|
192 |
+
# These options are really the business of various commands, rather
|
193 |
+
# than of the Distribution itself. We provide aliases for them in
|
194 |
+
# Distribution as a convenience to the developer.
|
195 |
+
self.packages = None
|
196 |
+
self.package_data = {}
|
197 |
+
self.package_dir = None
|
198 |
+
self.py_modules = None
|
199 |
+
self.libraries = None
|
200 |
+
self.headers = None
|
201 |
+
self.ext_modules = None
|
202 |
+
self.ext_package = None
|
203 |
+
self.include_dirs = None
|
204 |
+
self.extra_path = None
|
205 |
+
self.scripts = None
|
206 |
+
self.data_files = None
|
207 |
+
self.password = ''
|
208 |
+
|
209 |
+
# And now initialize bookkeeping stuff that can't be supplied by
|
210 |
+
# the caller at all. 'command_obj' maps command names to
|
211 |
+
# Command instances -- that's how we enforce that every command
|
212 |
+
# class is a singleton.
|
213 |
+
self.command_obj = {}
|
214 |
+
|
215 |
+
# 'have_run' maps command names to boolean values; it keeps track
|
216 |
+
# of whether we have actually run a particular command, to make it
|
217 |
+
# cheap to "run" a command whenever we think we might need to -- if
|
218 |
+
# it's already been done, no need for expensive filesystem
|
219 |
+
# operations, we just check the 'have_run' dictionary and carry on.
|
220 |
+
# It's only safe to query 'have_run' for a command class that has
|
221 |
+
# been instantiated -- a false value will be inserted when the
|
222 |
+
# command object is created, and replaced with a true value when
|
223 |
+
# the command is successfully run. Thus it's probably best to use
|
224 |
+
# '.get()' rather than a straight lookup.
|
225 |
+
self.have_run = {}
|
226 |
+
|
227 |
+
# Now we'll use the attrs dictionary (ultimately, keyword args from
|
228 |
+
# the setup script) to possibly override any or all of these
|
229 |
+
# distribution options.
|
230 |
+
|
231 |
+
if attrs:
|
232 |
+
# Pull out the set of command options and work on them
|
233 |
+
# specifically. Note that this order guarantees that aliased
|
234 |
+
# command options will override any supplied redundantly
|
235 |
+
# through the general options dictionary.
|
236 |
+
options = attrs.get('options')
|
237 |
+
if options is not None:
|
238 |
+
del attrs['options']
|
239 |
+
for command, cmd_options in options.items():
|
240 |
+
opt_dict = self.get_option_dict(command)
|
241 |
+
for opt, val in cmd_options.items():
|
242 |
+
opt_dict[opt] = ("setup script", val)
|
243 |
+
|
244 |
+
if 'licence' in attrs:
|
245 |
+
attrs['license'] = attrs['licence']
|
246 |
+
del attrs['licence']
|
247 |
+
msg = "'licence' distribution option is deprecated; use 'license'"
|
248 |
+
warnings.warn(msg)
|
249 |
+
|
250 |
+
# Now work on the rest of the attributes. Any attribute that's
|
251 |
+
# not already defined is invalid!
|
252 |
+
for key, val in attrs.items():
|
253 |
+
if hasattr(self.metadata, "set_" + key):
|
254 |
+
getattr(self.metadata, "set_" + key)(val)
|
255 |
+
elif hasattr(self.metadata, key):
|
256 |
+
setattr(self.metadata, key, val)
|
257 |
+
elif hasattr(self, key):
|
258 |
+
setattr(self, key, val)
|
259 |
+
else:
|
260 |
+
msg = f"Unknown distribution option: {key!r}"
|
261 |
+
warnings.warn(msg)
|
262 |
+
|
263 |
+
# no-user-cfg is handled before other command line args
|
264 |
+
# because other args override the config files, and this
|
265 |
+
# one is needed before we can load the config files.
|
266 |
+
# If attrs['script_args'] wasn't passed, assume false.
|
267 |
+
#
|
268 |
+
# This also make sure we just look at the global options
|
269 |
+
self.want_user_cfg = True
|
270 |
+
|
271 |
+
if self.script_args is not None:
|
272 |
+
for arg in self.script_args:
|
273 |
+
if not arg.startswith('-'):
|
274 |
+
break
|
275 |
+
if arg == '--no-user-cfg':
|
276 |
+
self.want_user_cfg = False
|
277 |
+
break
|
278 |
+
|
279 |
+
self.finalize_options()
|
280 |
+
|
281 |
+
def get_option_dict(self, command):
|
282 |
+
"""Get the option dictionary for a given command. If that
|
283 |
+
command's option dictionary hasn't been created yet, then create it
|
284 |
+
and return the new dictionary; otherwise, return the existing
|
285 |
+
option dictionary.
|
286 |
+
"""
|
287 |
+
dict = self.command_options.get(command)
|
288 |
+
if dict is None:
|
289 |
+
dict = self.command_options[command] = {}
|
290 |
+
return dict
|
291 |
+
|
292 |
+
def dump_option_dicts(self, header=None, commands=None, indent=""):
|
293 |
+
from pprint import pformat
|
294 |
+
|
295 |
+
if commands is None: # dump all command option dicts
|
296 |
+
commands = sorted(self.command_options.keys())
|
297 |
+
|
298 |
+
if header is not None:
|
299 |
+
self.announce(indent + header)
|
300 |
+
indent = indent + " "
|
301 |
+
|
302 |
+
if not commands:
|
303 |
+
self.announce(indent + "no commands known yet")
|
304 |
+
return
|
305 |
+
|
306 |
+
for cmd_name in commands:
|
307 |
+
opt_dict = self.command_options.get(cmd_name)
|
308 |
+
if opt_dict is None:
|
309 |
+
self.announce(indent + f"no option dict for '{cmd_name}' command")
|
310 |
+
else:
|
311 |
+
self.announce(indent + f"option dict for '{cmd_name}' command:")
|
312 |
+
out = pformat(opt_dict)
|
313 |
+
for line in out.split('\n'):
|
314 |
+
self.announce(indent + " " + line)
|
315 |
+
|
316 |
+
# -- Config file finding/parsing methods ---------------------------
|
317 |
+
|
318 |
+
def find_config_files(self):
|
319 |
+
"""Find as many configuration files as should be processed for this
|
320 |
+
platform, and return a list of filenames in the order in which they
|
321 |
+
should be parsed. The filenames returned are guaranteed to exist
|
322 |
+
(modulo nasty race conditions).
|
323 |
+
|
324 |
+
There are multiple possible config files:
|
325 |
+
- distutils.cfg in the Distutils installation directory (i.e.
|
326 |
+
where the top-level Distutils __inst__.py file lives)
|
327 |
+
- a file in the user's home directory named .pydistutils.cfg
|
328 |
+
on Unix and pydistutils.cfg on Windows/Mac; may be disabled
|
329 |
+
with the ``--no-user-cfg`` option
|
330 |
+
- setup.cfg in the current directory
|
331 |
+
- a file named by an environment variable
|
332 |
+
"""
|
333 |
+
check_environ()
|
334 |
+
files = [str(path) for path in self._gen_paths() if os.path.isfile(path)]
|
335 |
+
|
336 |
+
if DEBUG:
|
337 |
+
self.announce("using config files: {}".format(', '.join(files)))
|
338 |
+
|
339 |
+
return files
|
340 |
+
|
341 |
+
def _gen_paths(self):
|
342 |
+
# The system-wide Distutils config file
|
343 |
+
sys_dir = pathlib.Path(sys.modules['distutils'].__file__).parent
|
344 |
+
yield sys_dir / "distutils.cfg"
|
345 |
+
|
346 |
+
# The per-user config file
|
347 |
+
prefix = '.' * (os.name == 'posix')
|
348 |
+
filename = prefix + 'pydistutils.cfg'
|
349 |
+
if self.want_user_cfg:
|
350 |
+
with contextlib.suppress(RuntimeError):
|
351 |
+
yield pathlib.Path('~').expanduser() / filename
|
352 |
+
|
353 |
+
# All platforms support local setup.cfg
|
354 |
+
yield pathlib.Path('setup.cfg')
|
355 |
+
|
356 |
+
# Additional config indicated in the environment
|
357 |
+
with contextlib.suppress(TypeError):
|
358 |
+
yield pathlib.Path(os.getenv("DIST_EXTRA_CONFIG"))
|
359 |
+
|
360 |
+
def parse_config_files(self, filenames=None): # noqa: C901
|
361 |
+
from configparser import ConfigParser
|
362 |
+
|
363 |
+
# Ignore install directory options if we have a venv
|
364 |
+
if sys.prefix != sys.base_prefix:
|
365 |
+
ignore_options = [
|
366 |
+
'install-base',
|
367 |
+
'install-platbase',
|
368 |
+
'install-lib',
|
369 |
+
'install-platlib',
|
370 |
+
'install-purelib',
|
371 |
+
'install-headers',
|
372 |
+
'install-scripts',
|
373 |
+
'install-data',
|
374 |
+
'prefix',
|
375 |
+
'exec-prefix',
|
376 |
+
'home',
|
377 |
+
'user',
|
378 |
+
'root',
|
379 |
+
]
|
380 |
+
else:
|
381 |
+
ignore_options = []
|
382 |
+
|
383 |
+
ignore_options = frozenset(ignore_options)
|
384 |
+
|
385 |
+
if filenames is None:
|
386 |
+
filenames = self.find_config_files()
|
387 |
+
|
388 |
+
if DEBUG:
|
389 |
+
self.announce("Distribution.parse_config_files():")
|
390 |
+
|
391 |
+
parser = ConfigParser()
|
392 |
+
for filename in filenames:
|
393 |
+
if DEBUG:
|
394 |
+
self.announce(f" reading {filename}")
|
395 |
+
parser.read(filename, encoding='utf-8')
|
396 |
+
for section in parser.sections():
|
397 |
+
options = parser.options(section)
|
398 |
+
opt_dict = self.get_option_dict(section)
|
399 |
+
|
400 |
+
for opt in options:
|
401 |
+
if opt != '__name__' and opt not in ignore_options:
|
402 |
+
val = parser.get(section, opt)
|
403 |
+
opt = opt.replace('-', '_')
|
404 |
+
opt_dict[opt] = (filename, val)
|
405 |
+
|
406 |
+
# Make the ConfigParser forget everything (so we retain
|
407 |
+
# the original filenames that options come from)
|
408 |
+
parser.__init__()
|
409 |
+
|
410 |
+
# If there was a "global" section in the config file, use it
|
411 |
+
# to set Distribution options.
|
412 |
+
|
413 |
+
if 'global' in self.command_options:
|
414 |
+
for opt, (_src, val) in self.command_options['global'].items():
|
415 |
+
alias = self.negative_opt.get(opt)
|
416 |
+
try:
|
417 |
+
if alias:
|
418 |
+
setattr(self, alias, not strtobool(val))
|
419 |
+
elif opt in ('verbose', 'dry_run'): # ugh!
|
420 |
+
setattr(self, opt, strtobool(val))
|
421 |
+
else:
|
422 |
+
setattr(self, opt, val)
|
423 |
+
except ValueError as msg:
|
424 |
+
raise DistutilsOptionError(msg)
|
425 |
+
|
426 |
+
# -- Command-line parsing methods ----------------------------------
|
427 |
+
|
428 |
+
def parse_command_line(self):
|
429 |
+
"""Parse the setup script's command line, taken from the
|
430 |
+
'script_args' instance attribute (which defaults to 'sys.argv[1:]'
|
431 |
+
-- see 'setup()' in core.py). This list is first processed for
|
432 |
+
"global options" -- options that set attributes of the Distribution
|
433 |
+
instance. Then, it is alternately scanned for Distutils commands
|
434 |
+
and options for that command. Each new command terminates the
|
435 |
+
options for the previous command. The allowed options for a
|
436 |
+
command are determined by the 'user_options' attribute of the
|
437 |
+
command class -- thus, we have to be able to load command classes
|
438 |
+
in order to parse the command line. Any error in that 'options'
|
439 |
+
attribute raises DistutilsGetoptError; any error on the
|
440 |
+
command-line raises DistutilsArgError. If no Distutils commands
|
441 |
+
were found on the command line, raises DistutilsArgError. Return
|
442 |
+
true if command-line was successfully parsed and we should carry
|
443 |
+
on with executing commands; false if no errors but we shouldn't
|
444 |
+
execute commands (currently, this only happens if user asks for
|
445 |
+
help).
|
446 |
+
"""
|
447 |
+
#
|
448 |
+
# We now have enough information to show the Macintosh dialog
|
449 |
+
# that allows the user to interactively specify the "command line".
|
450 |
+
#
|
451 |
+
toplevel_options = self._get_toplevel_options()
|
452 |
+
|
453 |
+
# We have to parse the command line a bit at a time -- global
|
454 |
+
# options, then the first command, then its options, and so on --
|
455 |
+
# because each command will be handled by a different class, and
|
456 |
+
# the options that are valid for a particular class aren't known
|
457 |
+
# until we have loaded the command class, which doesn't happen
|
458 |
+
# until we know what the command is.
|
459 |
+
|
460 |
+
self.commands = []
|
461 |
+
parser = FancyGetopt(toplevel_options + self.display_options)
|
462 |
+
parser.set_negative_aliases(self.negative_opt)
|
463 |
+
parser.set_aliases({'licence': 'license'})
|
464 |
+
args = parser.getopt(args=self.script_args, object=self)
|
465 |
+
option_order = parser.get_option_order()
|
466 |
+
logging.getLogger().setLevel(logging.WARN - 10 * self.verbose)
|
467 |
+
|
468 |
+
# for display options we return immediately
|
469 |
+
if self.handle_display_options(option_order):
|
470 |
+
return
|
471 |
+
while args:
|
472 |
+
args = self._parse_command_opts(parser, args)
|
473 |
+
if args is None: # user asked for help (and got it)
|
474 |
+
return
|
475 |
+
|
476 |
+
# Handle the cases of --help as a "global" option, ie.
|
477 |
+
# "setup.py --help" and "setup.py --help command ...". For the
|
478 |
+
# former, we show global options (--verbose, --dry-run, etc.)
|
479 |
+
# and display-only options (--name, --version, etc.); for the
|
480 |
+
# latter, we omit the display-only options and show help for
|
481 |
+
# each command listed on the command line.
|
482 |
+
if self.help:
|
483 |
+
self._show_help(
|
484 |
+
parser, display_options=len(self.commands) == 0, commands=self.commands
|
485 |
+
)
|
486 |
+
return
|
487 |
+
|
488 |
+
# Oops, no commands found -- an end-user error
|
489 |
+
if not self.commands:
|
490 |
+
raise DistutilsArgError("no commands supplied")
|
491 |
+
|
492 |
+
# All is well: return true
|
493 |
+
return True
|
494 |
+
|
495 |
+
def _get_toplevel_options(self):
|
496 |
+
"""Return the non-display options recognized at the top level.
|
497 |
+
|
498 |
+
This includes options that are recognized *only* at the top
|
499 |
+
level as well as options recognized for commands.
|
500 |
+
"""
|
501 |
+
return self.global_options + [
|
502 |
+
(
|
503 |
+
"command-packages=",
|
504 |
+
None,
|
505 |
+
"list of packages that provide distutils commands",
|
506 |
+
),
|
507 |
+
]
|
508 |
+
|
509 |
+
def _parse_command_opts(self, parser, args): # noqa: C901
|
510 |
+
"""Parse the command-line options for a single command.
|
511 |
+
'parser' must be a FancyGetopt instance; 'args' must be the list
|
512 |
+
of arguments, starting with the current command (whose options
|
513 |
+
we are about to parse). Returns a new version of 'args' with
|
514 |
+
the next command at the front of the list; will be the empty
|
515 |
+
list if there are no more commands on the command line. Returns
|
516 |
+
None if the user asked for help on this command.
|
517 |
+
"""
|
518 |
+
# late import because of mutual dependence between these modules
|
519 |
+
from distutils.cmd import Command
|
520 |
+
|
521 |
+
# Pull the current command from the head of the command line
|
522 |
+
command = args[0]
|
523 |
+
if not command_re.match(command):
|
524 |
+
raise SystemExit(f"invalid command name '{command}'")
|
525 |
+
self.commands.append(command)
|
526 |
+
|
527 |
+
# Dig up the command class that implements this command, so we
|
528 |
+
# 1) know that it's a valid command, and 2) know which options
|
529 |
+
# it takes.
|
530 |
+
try:
|
531 |
+
cmd_class = self.get_command_class(command)
|
532 |
+
except DistutilsModuleError as msg:
|
533 |
+
raise DistutilsArgError(msg)
|
534 |
+
|
535 |
+
# Require that the command class be derived from Command -- want
|
536 |
+
# to be sure that the basic "command" interface is implemented.
|
537 |
+
if not issubclass(cmd_class, Command):
|
538 |
+
raise DistutilsClassError(
|
539 |
+
f"command class {cmd_class} must subclass Command"
|
540 |
+
)
|
541 |
+
|
542 |
+
# Also make sure that the command object provides a list of its
|
543 |
+
# known options.
|
544 |
+
if not (
|
545 |
+
hasattr(cmd_class, 'user_options')
|
546 |
+
and isinstance(cmd_class.user_options, list)
|
547 |
+
):
|
548 |
+
msg = (
|
549 |
+
"command class %s must provide "
|
550 |
+
"'user_options' attribute (a list of tuples)"
|
551 |
+
)
|
552 |
+
raise DistutilsClassError(msg % cmd_class)
|
553 |
+
|
554 |
+
# If the command class has a list of negative alias options,
|
555 |
+
# merge it in with the global negative aliases.
|
556 |
+
negative_opt = self.negative_opt
|
557 |
+
if hasattr(cmd_class, 'negative_opt'):
|
558 |
+
negative_opt = negative_opt.copy()
|
559 |
+
negative_opt.update(cmd_class.negative_opt)
|
560 |
+
|
561 |
+
# Check for help_options in command class. They have a different
|
562 |
+
# format (tuple of four) so we need to preprocess them here.
|
563 |
+
if hasattr(cmd_class, 'help_options') and isinstance(
|
564 |
+
cmd_class.help_options, list
|
565 |
+
):
|
566 |
+
help_options = fix_help_options(cmd_class.help_options)
|
567 |
+
else:
|
568 |
+
help_options = []
|
569 |
+
|
570 |
+
# All commands support the global options too, just by adding
|
571 |
+
# in 'global_options'.
|
572 |
+
parser.set_option_table(
|
573 |
+
self.global_options + cmd_class.user_options + help_options
|
574 |
+
)
|
575 |
+
parser.set_negative_aliases(negative_opt)
|
576 |
+
(args, opts) = parser.getopt(args[1:])
|
577 |
+
if hasattr(opts, 'help') and opts.help:
|
578 |
+
self._show_help(parser, display_options=False, commands=[cmd_class])
|
579 |
+
return
|
580 |
+
|
581 |
+
if hasattr(cmd_class, 'help_options') and isinstance(
|
582 |
+
cmd_class.help_options, list
|
583 |
+
):
|
584 |
+
help_option_found = 0
|
585 |
+
for help_option, _short, _desc, func in cmd_class.help_options:
|
586 |
+
if hasattr(opts, parser.get_attr_name(help_option)):
|
587 |
+
help_option_found = 1
|
588 |
+
if callable(func):
|
589 |
+
func()
|
590 |
+
else:
|
591 |
+
raise DistutilsClassError(
|
592 |
+
f"invalid help function {func!r} for help option '{help_option}': "
|
593 |
+
"must be a callable object (function, etc.)"
|
594 |
+
)
|
595 |
+
|
596 |
+
if help_option_found:
|
597 |
+
return
|
598 |
+
|
599 |
+
# Put the options from the command-line into their official
|
600 |
+
# holding pen, the 'command_options' dictionary.
|
601 |
+
opt_dict = self.get_option_dict(command)
|
602 |
+
for name, value in vars(opts).items():
|
603 |
+
opt_dict[name] = ("command line", value)
|
604 |
+
|
605 |
+
return args
|
606 |
+
|
607 |
+
def finalize_options(self):
|
608 |
+
"""Set final values for all the options on the Distribution
|
609 |
+
instance, analogous to the .finalize_options() method of Command
|
610 |
+
objects.
|
611 |
+
"""
|
612 |
+
for attr in ('keywords', 'platforms'):
|
613 |
+
value = getattr(self.metadata, attr)
|
614 |
+
if value is None:
|
615 |
+
continue
|
616 |
+
if isinstance(value, str):
|
617 |
+
value = [elm.strip() for elm in value.split(',')]
|
618 |
+
setattr(self.metadata, attr, value)
|
619 |
+
|
620 |
+
def _show_help(
|
621 |
+
self, parser, global_options=True, display_options=True, commands: Iterable = ()
|
622 |
+
):
|
623 |
+
"""Show help for the setup script command-line in the form of
|
624 |
+
several lists of command-line options. 'parser' should be a
|
625 |
+
FancyGetopt instance; do not expect it to be returned in the
|
626 |
+
same state, as its option table will be reset to make it
|
627 |
+
generate the correct help text.
|
628 |
+
|
629 |
+
If 'global_options' is true, lists the global options:
|
630 |
+
--verbose, --dry-run, etc. If 'display_options' is true, lists
|
631 |
+
the "display-only" options: --name, --version, etc. Finally,
|
632 |
+
lists per-command help for every command name or command class
|
633 |
+
in 'commands'.
|
634 |
+
"""
|
635 |
+
# late import because of mutual dependence between these modules
|
636 |
+
from distutils.cmd import Command
|
637 |
+
from distutils.core import gen_usage
|
638 |
+
|
639 |
+
if global_options:
|
640 |
+
if display_options:
|
641 |
+
options = self._get_toplevel_options()
|
642 |
+
else:
|
643 |
+
options = self.global_options
|
644 |
+
parser.set_option_table(options)
|
645 |
+
parser.print_help(self.common_usage + "\nGlobal options:")
|
646 |
+
print()
|
647 |
+
|
648 |
+
if display_options:
|
649 |
+
parser.set_option_table(self.display_options)
|
650 |
+
parser.print_help(
|
651 |
+
"Information display options (just display information, ignore any commands)"
|
652 |
+
)
|
653 |
+
print()
|
654 |
+
|
655 |
+
for command in commands:
|
656 |
+
if isinstance(command, type) and issubclass(command, Command):
|
657 |
+
klass = command
|
658 |
+
else:
|
659 |
+
klass = self.get_command_class(command)
|
660 |
+
if hasattr(klass, 'help_options') and isinstance(klass.help_options, list):
|
661 |
+
parser.set_option_table(
|
662 |
+
klass.user_options + fix_help_options(klass.help_options)
|
663 |
+
)
|
664 |
+
else:
|
665 |
+
parser.set_option_table(klass.user_options)
|
666 |
+
parser.print_help(f"Options for '{klass.__name__}' command:")
|
667 |
+
print()
|
668 |
+
|
669 |
+
print(gen_usage(self.script_name))
|
670 |
+
|
671 |
+
def handle_display_options(self, option_order):
|
672 |
+
"""If there were any non-global "display-only" options
|
673 |
+
(--help-commands or the metadata display options) on the command
|
674 |
+
line, display the requested info and return true; else return
|
675 |
+
false.
|
676 |
+
"""
|
677 |
+
from distutils.core import gen_usage
|
678 |
+
|
679 |
+
# User just wants a list of commands -- we'll print it out and stop
|
680 |
+
# processing now (ie. if they ran "setup --help-commands foo bar",
|
681 |
+
# we ignore "foo bar").
|
682 |
+
if self.help_commands:
|
683 |
+
self.print_commands()
|
684 |
+
print()
|
685 |
+
print(gen_usage(self.script_name))
|
686 |
+
return 1
|
687 |
+
|
688 |
+
# If user supplied any of the "display metadata" options, then
|
689 |
+
# display that metadata in the order in which the user supplied the
|
690 |
+
# metadata options.
|
691 |
+
any_display_options = 0
|
692 |
+
is_display_option = set()
|
693 |
+
for option in self.display_options:
|
694 |
+
is_display_option.add(option[0])
|
695 |
+
|
696 |
+
for opt, val in option_order:
|
697 |
+
if val and opt in is_display_option:
|
698 |
+
opt = translate_longopt(opt)
|
699 |
+
value = getattr(self.metadata, "get_" + opt)()
|
700 |
+
if opt in ('keywords', 'platforms'):
|
701 |
+
print(','.join(value))
|
702 |
+
elif opt in ('classifiers', 'provides', 'requires', 'obsoletes'):
|
703 |
+
print('\n'.join(value))
|
704 |
+
else:
|
705 |
+
print(value)
|
706 |
+
any_display_options = 1
|
707 |
+
|
708 |
+
return any_display_options
|
709 |
+
|
710 |
+
def print_command_list(self, commands, header, max_length):
|
711 |
+
"""Print a subset of the list of all commands -- used by
|
712 |
+
'print_commands()'.
|
713 |
+
"""
|
714 |
+
print(header + ":")
|
715 |
+
|
716 |
+
for cmd in commands:
|
717 |
+
klass = self.cmdclass.get(cmd)
|
718 |
+
if not klass:
|
719 |
+
klass = self.get_command_class(cmd)
|
720 |
+
try:
|
721 |
+
description = klass.description
|
722 |
+
except AttributeError:
|
723 |
+
description = "(no description available)"
|
724 |
+
|
725 |
+
print(" %-*s %s" % (max_length, cmd, description))
|
726 |
+
|
727 |
+
def print_commands(self):
|
728 |
+
"""Print out a help message listing all available commands with a
|
729 |
+
description of each. The list is divided into "standard commands"
|
730 |
+
(listed in distutils.command.__all__) and "extra commands"
|
731 |
+
(mentioned in self.cmdclass, but not a standard command). The
|
732 |
+
descriptions come from the command class attribute
|
733 |
+
'description'.
|
734 |
+
"""
|
735 |
+
import distutils.command
|
736 |
+
|
737 |
+
std_commands = distutils.command.__all__
|
738 |
+
is_std = set(std_commands)
|
739 |
+
|
740 |
+
extra_commands = [cmd for cmd in self.cmdclass.keys() if cmd not in is_std]
|
741 |
+
|
742 |
+
max_length = 0
|
743 |
+
for cmd in std_commands + extra_commands:
|
744 |
+
if len(cmd) > max_length:
|
745 |
+
max_length = len(cmd)
|
746 |
+
|
747 |
+
self.print_command_list(std_commands, "Standard commands", max_length)
|
748 |
+
if extra_commands:
|
749 |
+
print()
|
750 |
+
self.print_command_list(extra_commands, "Extra commands", max_length)
|
751 |
+
|
752 |
+
def get_command_list(self):
|
753 |
+
"""Get a list of (command, description) tuples.
|
754 |
+
The list is divided into "standard commands" (listed in
|
755 |
+
distutils.command.__all__) and "extra commands" (mentioned in
|
756 |
+
self.cmdclass, but not a standard command). The descriptions come
|
757 |
+
from the command class attribute 'description'.
|
758 |
+
"""
|
759 |
+
# Currently this is only used on Mac OS, for the Mac-only GUI
|
760 |
+
# Distutils interface (by Jack Jansen)
|
761 |
+
import distutils.command
|
762 |
+
|
763 |
+
std_commands = distutils.command.__all__
|
764 |
+
is_std = set(std_commands)
|
765 |
+
|
766 |
+
extra_commands = [cmd for cmd in self.cmdclass.keys() if cmd not in is_std]
|
767 |
+
|
768 |
+
rv = []
|
769 |
+
for cmd in std_commands + extra_commands:
|
770 |
+
klass = self.cmdclass.get(cmd)
|
771 |
+
if not klass:
|
772 |
+
klass = self.get_command_class(cmd)
|
773 |
+
try:
|
774 |
+
description = klass.description
|
775 |
+
except AttributeError:
|
776 |
+
description = "(no description available)"
|
777 |
+
rv.append((cmd, description))
|
778 |
+
return rv
|
779 |
+
|
780 |
+
# -- Command class/object methods ----------------------------------
|
781 |
+
|
782 |
+
def get_command_packages(self):
|
783 |
+
"""Return a list of packages from which commands are loaded."""
|
784 |
+
pkgs = self.command_packages
|
785 |
+
if not isinstance(pkgs, list):
|
786 |
+
if pkgs is None:
|
787 |
+
pkgs = ''
|
788 |
+
pkgs = [pkg.strip() for pkg in pkgs.split(',') if pkg != '']
|
789 |
+
if "distutils.command" not in pkgs:
|
790 |
+
pkgs.insert(0, "distutils.command")
|
791 |
+
self.command_packages = pkgs
|
792 |
+
return pkgs
|
793 |
+
|
794 |
+
def get_command_class(self, command):
|
795 |
+
"""Return the class that implements the Distutils command named by
|
796 |
+
'command'. First we check the 'cmdclass' dictionary; if the
|
797 |
+
command is mentioned there, we fetch the class object from the
|
798 |
+
dictionary and return it. Otherwise we load the command module
|
799 |
+
("distutils.command." + command) and fetch the command class from
|
800 |
+
the module. The loaded class is also stored in 'cmdclass'
|
801 |
+
to speed future calls to 'get_command_class()'.
|
802 |
+
|
803 |
+
Raises DistutilsModuleError if the expected module could not be
|
804 |
+
found, or if that module does not define the expected class.
|
805 |
+
"""
|
806 |
+
klass = self.cmdclass.get(command)
|
807 |
+
if klass:
|
808 |
+
return klass
|
809 |
+
|
810 |
+
for pkgname in self.get_command_packages():
|
811 |
+
module_name = f"{pkgname}.{command}"
|
812 |
+
klass_name = command
|
813 |
+
|
814 |
+
try:
|
815 |
+
__import__(module_name)
|
816 |
+
module = sys.modules[module_name]
|
817 |
+
except ImportError:
|
818 |
+
continue
|
819 |
+
|
820 |
+
try:
|
821 |
+
klass = getattr(module, klass_name)
|
822 |
+
except AttributeError:
|
823 |
+
raise DistutilsModuleError(
|
824 |
+
f"invalid command '{command}' (no class '{klass_name}' in module '{module_name}')"
|
825 |
+
)
|
826 |
+
|
827 |
+
self.cmdclass[command] = klass
|
828 |
+
return klass
|
829 |
+
|
830 |
+
raise DistutilsModuleError(f"invalid command '{command}'")
|
831 |
+
|
832 |
+
def get_command_obj(self, command, create=True):
|
833 |
+
"""Return the command object for 'command'. Normally this object
|
834 |
+
is cached on a previous call to 'get_command_obj()'; if no command
|
835 |
+
object for 'command' is in the cache, then we either create and
|
836 |
+
return it (if 'create' is true) or return None.
|
837 |
+
"""
|
838 |
+
cmd_obj = self.command_obj.get(command)
|
839 |
+
if not cmd_obj and create:
|
840 |
+
if DEBUG:
|
841 |
+
self.announce(
|
842 |
+
"Distribution.get_command_obj(): "
|
843 |
+
f"creating '{command}' command object"
|
844 |
+
)
|
845 |
+
|
846 |
+
klass = self.get_command_class(command)
|
847 |
+
cmd_obj = self.command_obj[command] = klass(self)
|
848 |
+
self.have_run[command] = False
|
849 |
+
|
850 |
+
# Set any options that were supplied in config files
|
851 |
+
# or on the command line. (NB. support for error
|
852 |
+
# reporting is lame here: any errors aren't reported
|
853 |
+
# until 'finalize_options()' is called, which means
|
854 |
+
# we won't report the source of the error.)
|
855 |
+
options = self.command_options.get(command)
|
856 |
+
if options:
|
857 |
+
self._set_command_options(cmd_obj, options)
|
858 |
+
|
859 |
+
return cmd_obj
|
860 |
+
|
861 |
+
def _set_command_options(self, command_obj, option_dict=None): # noqa: C901
|
862 |
+
"""Set the options for 'command_obj' from 'option_dict'. Basically
|
863 |
+
this means copying elements of a dictionary ('option_dict') to
|
864 |
+
attributes of an instance ('command').
|
865 |
+
|
866 |
+
'command_obj' must be a Command instance. If 'option_dict' is not
|
867 |
+
supplied, uses the standard option dictionary for this command
|
868 |
+
(from 'self.command_options').
|
869 |
+
"""
|
870 |
+
command_name = command_obj.get_command_name()
|
871 |
+
if option_dict is None:
|
872 |
+
option_dict = self.get_option_dict(command_name)
|
873 |
+
|
874 |
+
if DEBUG:
|
875 |
+
self.announce(f" setting options for '{command_name}' command:")
|
876 |
+
for option, (source, value) in option_dict.items():
|
877 |
+
if DEBUG:
|
878 |
+
self.announce(f" {option} = {value} (from {source})")
|
879 |
+
try:
|
880 |
+
bool_opts = [translate_longopt(o) for o in command_obj.boolean_options]
|
881 |
+
except AttributeError:
|
882 |
+
bool_opts = []
|
883 |
+
try:
|
884 |
+
neg_opt = command_obj.negative_opt
|
885 |
+
except AttributeError:
|
886 |
+
neg_opt = {}
|
887 |
+
|
888 |
+
try:
|
889 |
+
is_string = isinstance(value, str)
|
890 |
+
if option in neg_opt and is_string:
|
891 |
+
setattr(command_obj, neg_opt[option], not strtobool(value))
|
892 |
+
elif option in bool_opts and is_string:
|
893 |
+
setattr(command_obj, option, strtobool(value))
|
894 |
+
elif hasattr(command_obj, option):
|
895 |
+
setattr(command_obj, option, value)
|
896 |
+
else:
|
897 |
+
raise DistutilsOptionError(
|
898 |
+
f"error in {source}: command '{command_name}' has no such option '{option}'"
|
899 |
+
)
|
900 |
+
except ValueError as msg:
|
901 |
+
raise DistutilsOptionError(msg)
|
902 |
+
|
903 |
+
def reinitialize_command(self, command, reinit_subcommands=False):
|
904 |
+
"""Reinitializes a command to the state it was in when first
|
905 |
+
returned by 'get_command_obj()': ie., initialized but not yet
|
906 |
+
finalized. This provides the opportunity to sneak option
|
907 |
+
values in programmatically, overriding or supplementing
|
908 |
+
user-supplied values from the config files and command line.
|
909 |
+
You'll have to re-finalize the command object (by calling
|
910 |
+
'finalize_options()' or 'ensure_finalized()') before using it for
|
911 |
+
real.
|
912 |
+
|
913 |
+
'command' should be a command name (string) or command object. If
|
914 |
+
'reinit_subcommands' is true, also reinitializes the command's
|
915 |
+
sub-commands, as declared by the 'sub_commands' class attribute (if
|
916 |
+
it has one). See the "install" command for an example. Only
|
917 |
+
reinitializes the sub-commands that actually matter, ie. those
|
918 |
+
whose test predicates return true.
|
919 |
+
|
920 |
+
Returns the reinitialized command object.
|
921 |
+
"""
|
922 |
+
from distutils.cmd import Command
|
923 |
+
|
924 |
+
if not isinstance(command, Command):
|
925 |
+
command_name = command
|
926 |
+
command = self.get_command_obj(command_name)
|
927 |
+
else:
|
928 |
+
command_name = command.get_command_name()
|
929 |
+
|
930 |
+
if not command.finalized:
|
931 |
+
return command
|
932 |
+
command.initialize_options()
|
933 |
+
command.finalized = False
|
934 |
+
self.have_run[command_name] = False
|
935 |
+
self._set_command_options(command)
|
936 |
+
|
937 |
+
if reinit_subcommands:
|
938 |
+
for sub in command.get_sub_commands():
|
939 |
+
self.reinitialize_command(sub, reinit_subcommands)
|
940 |
+
|
941 |
+
return command
|
942 |
+
|
943 |
+
# -- Methods that operate on the Distribution ----------------------
|
944 |
+
|
945 |
+
def announce(self, msg, level=logging.INFO):
|
946 |
+
log.log(level, msg)
|
947 |
+
|
948 |
+
def run_commands(self):
|
949 |
+
"""Run each command that was seen on the setup script command line.
|
950 |
+
Uses the list of commands found and cache of command objects
|
951 |
+
created by 'get_command_obj()'.
|
952 |
+
"""
|
953 |
+
for cmd in self.commands:
|
954 |
+
self.run_command(cmd)
|
955 |
+
|
956 |
+
# -- Methods that operate on its Commands --------------------------
|
957 |
+
|
958 |
+
def run_command(self, command):
|
959 |
+
"""Do whatever it takes to run a command (including nothing at all,
|
960 |
+
if the command has already been run). Specifically: if we have
|
961 |
+
already created and run the command named by 'command', return
|
962 |
+
silently without doing anything. If the command named by 'command'
|
963 |
+
doesn't even have a command object yet, create one. Then invoke
|
964 |
+
'run()' on that command object (or an existing one).
|
965 |
+
"""
|
966 |
+
# Already been here, done that? then return silently.
|
967 |
+
if self.have_run.get(command):
|
968 |
+
return
|
969 |
+
|
970 |
+
log.info("running %s", command)
|
971 |
+
cmd_obj = self.get_command_obj(command)
|
972 |
+
cmd_obj.ensure_finalized()
|
973 |
+
cmd_obj.run()
|
974 |
+
self.have_run[command] = True
|
975 |
+
|
976 |
+
# -- Distribution query methods ------------------------------------
|
977 |
+
|
978 |
+
def has_pure_modules(self):
|
979 |
+
return len(self.packages or self.py_modules or []) > 0
|
980 |
+
|
981 |
+
def has_ext_modules(self):
|
982 |
+
return self.ext_modules and len(self.ext_modules) > 0
|
983 |
+
|
984 |
+
def has_c_libraries(self):
|
985 |
+
return self.libraries and len(self.libraries) > 0
|
986 |
+
|
987 |
+
def has_modules(self):
|
988 |
+
return self.has_pure_modules() or self.has_ext_modules()
|
989 |
+
|
990 |
+
def has_headers(self):
|
991 |
+
return self.headers and len(self.headers) > 0
|
992 |
+
|
993 |
+
def has_scripts(self):
|
994 |
+
return self.scripts and len(self.scripts) > 0
|
995 |
+
|
996 |
+
def has_data_files(self):
|
997 |
+
return self.data_files and len(self.data_files) > 0
|
998 |
+
|
999 |
+
def is_pure(self):
|
1000 |
+
return (
|
1001 |
+
self.has_pure_modules()
|
1002 |
+
and not self.has_ext_modules()
|
1003 |
+
and not self.has_c_libraries()
|
1004 |
+
)
|
1005 |
+
|
1006 |
+
# -- Metadata query methods ----------------------------------------
|
1007 |
+
|
1008 |
+
# If you're looking for 'get_name()', 'get_version()', and so forth,
|
1009 |
+
# they are defined in a sneaky way: the constructor binds self.get_XXX
|
1010 |
+
# to self.metadata.get_XXX. The actual code is in the
|
1011 |
+
# DistributionMetadata class, below.
|
1012 |
+
|
1013 |
+
|
1014 |
+
class DistributionMetadata:
|
1015 |
+
"""Dummy class to hold the distribution meta-data: name, version,
|
1016 |
+
author, and so forth.
|
1017 |
+
"""
|
1018 |
+
|
1019 |
+
_METHOD_BASENAMES = (
|
1020 |
+
"name",
|
1021 |
+
"version",
|
1022 |
+
"author",
|
1023 |
+
"author_email",
|
1024 |
+
"maintainer",
|
1025 |
+
"maintainer_email",
|
1026 |
+
"url",
|
1027 |
+
"license",
|
1028 |
+
"description",
|
1029 |
+
"long_description",
|
1030 |
+
"keywords",
|
1031 |
+
"platforms",
|
1032 |
+
"fullname",
|
1033 |
+
"contact",
|
1034 |
+
"contact_email",
|
1035 |
+
"classifiers",
|
1036 |
+
"download_url",
|
1037 |
+
# PEP 314
|
1038 |
+
"provides",
|
1039 |
+
"requires",
|
1040 |
+
"obsoletes",
|
1041 |
+
)
|
1042 |
+
|
1043 |
+
def __init__(self, path=None):
|
1044 |
+
if path is not None:
|
1045 |
+
self.read_pkg_file(open(path))
|
1046 |
+
else:
|
1047 |
+
self.name = None
|
1048 |
+
self.version = None
|
1049 |
+
self.author = None
|
1050 |
+
self.author_email = None
|
1051 |
+
self.maintainer = None
|
1052 |
+
self.maintainer_email = None
|
1053 |
+
self.url = None
|
1054 |
+
self.license = None
|
1055 |
+
self.description = None
|
1056 |
+
self.long_description = None
|
1057 |
+
self.keywords = None
|
1058 |
+
self.platforms = None
|
1059 |
+
self.classifiers = None
|
1060 |
+
self.download_url = None
|
1061 |
+
# PEP 314
|
1062 |
+
self.provides = None
|
1063 |
+
self.requires = None
|
1064 |
+
self.obsoletes = None
|
1065 |
+
|
1066 |
+
def read_pkg_file(self, file):
|
1067 |
+
"""Reads the metadata values from a file object."""
|
1068 |
+
msg = message_from_file(file)
|
1069 |
+
|
1070 |
+
def _read_field(name):
|
1071 |
+
value = msg[name]
|
1072 |
+
if value and value != "UNKNOWN":
|
1073 |
+
return value
|
1074 |
+
|
1075 |
+
def _read_list(name):
|
1076 |
+
values = msg.get_all(name, None)
|
1077 |
+
if values == []:
|
1078 |
+
return None
|
1079 |
+
return values
|
1080 |
+
|
1081 |
+
metadata_version = msg['metadata-version']
|
1082 |
+
self.name = _read_field('name')
|
1083 |
+
self.version = _read_field('version')
|
1084 |
+
self.description = _read_field('summary')
|
1085 |
+
# we are filling author only.
|
1086 |
+
self.author = _read_field('author')
|
1087 |
+
self.maintainer = None
|
1088 |
+
self.author_email = _read_field('author-email')
|
1089 |
+
self.maintainer_email = None
|
1090 |
+
self.url = _read_field('home-page')
|
1091 |
+
self.license = _read_field('license')
|
1092 |
+
|
1093 |
+
if 'download-url' in msg:
|
1094 |
+
self.download_url = _read_field('download-url')
|
1095 |
+
else:
|
1096 |
+
self.download_url = None
|
1097 |
+
|
1098 |
+
self.long_description = _read_field('description')
|
1099 |
+
self.description = _read_field('summary')
|
1100 |
+
|
1101 |
+
if 'keywords' in msg:
|
1102 |
+
self.keywords = _read_field('keywords').split(',')
|
1103 |
+
|
1104 |
+
self.platforms = _read_list('platform')
|
1105 |
+
self.classifiers = _read_list('classifier')
|
1106 |
+
|
1107 |
+
# PEP 314 - these fields only exist in 1.1
|
1108 |
+
if metadata_version == '1.1':
|
1109 |
+
self.requires = _read_list('requires')
|
1110 |
+
self.provides = _read_list('provides')
|
1111 |
+
self.obsoletes = _read_list('obsoletes')
|
1112 |
+
else:
|
1113 |
+
self.requires = None
|
1114 |
+
self.provides = None
|
1115 |
+
self.obsoletes = None
|
1116 |
+
|
1117 |
+
def write_pkg_info(self, base_dir):
|
1118 |
+
"""Write the PKG-INFO file into the release tree."""
|
1119 |
+
with open(
|
1120 |
+
os.path.join(base_dir, 'PKG-INFO'), 'w', encoding='UTF-8'
|
1121 |
+
) as pkg_info:
|
1122 |
+
self.write_pkg_file(pkg_info)
|
1123 |
+
|
1124 |
+
def write_pkg_file(self, file):
|
1125 |
+
"""Write the PKG-INFO format data to a file object."""
|
1126 |
+
version = '1.0'
|
1127 |
+
if (
|
1128 |
+
self.provides
|
1129 |
+
or self.requires
|
1130 |
+
or self.obsoletes
|
1131 |
+
or self.classifiers
|
1132 |
+
or self.download_url
|
1133 |
+
):
|
1134 |
+
version = '1.1'
|
1135 |
+
|
1136 |
+
# required fields
|
1137 |
+
file.write(f'Metadata-Version: {version}\n')
|
1138 |
+
file.write(f'Name: {self.get_name()}\n')
|
1139 |
+
file.write(f'Version: {self.get_version()}\n')
|
1140 |
+
|
1141 |
+
def maybe_write(header, val):
|
1142 |
+
if val:
|
1143 |
+
file.write(f"{header}: {val}\n")
|
1144 |
+
|
1145 |
+
# optional fields
|
1146 |
+
maybe_write("Summary", self.get_description())
|
1147 |
+
maybe_write("Home-page", self.get_url())
|
1148 |
+
maybe_write("Author", self.get_contact())
|
1149 |
+
maybe_write("Author-email", self.get_contact_email())
|
1150 |
+
maybe_write("License", self.get_license())
|
1151 |
+
maybe_write("Download-URL", self.download_url)
|
1152 |
+
maybe_write("Description", rfc822_escape(self.get_long_description() or ""))
|
1153 |
+
maybe_write("Keywords", ",".join(self.get_keywords()))
|
1154 |
+
|
1155 |
+
self._write_list(file, 'Platform', self.get_platforms())
|
1156 |
+
self._write_list(file, 'Classifier', self.get_classifiers())
|
1157 |
+
|
1158 |
+
# PEP 314
|
1159 |
+
self._write_list(file, 'Requires', self.get_requires())
|
1160 |
+
self._write_list(file, 'Provides', self.get_provides())
|
1161 |
+
self._write_list(file, 'Obsoletes', self.get_obsoletes())
|
1162 |
+
|
1163 |
+
def _write_list(self, file, name, values):
|
1164 |
+
values = values or []
|
1165 |
+
for value in values:
|
1166 |
+
file.write(f'{name}: {value}\n')
|
1167 |
+
|
1168 |
+
# -- Metadata query methods ----------------------------------------
|
1169 |
+
|
1170 |
+
def get_name(self):
|
1171 |
+
return self.name or "UNKNOWN"
|
1172 |
+
|
1173 |
+
def get_version(self):
|
1174 |
+
return self.version or "0.0.0"
|
1175 |
+
|
1176 |
+
def get_fullname(self):
|
1177 |
+
return self._fullname(self.get_name(), self.get_version())
|
1178 |
+
|
1179 |
+
@staticmethod
|
1180 |
+
def _fullname(name: str, version: str) -> str:
|
1181 |
+
"""
|
1182 |
+
>>> DistributionMetadata._fullname('setup.tools', '1.0-2')
|
1183 |
+
'setup_tools-1.0.post2'
|
1184 |
+
>>> DistributionMetadata._fullname('setup-tools', '1.2post2')
|
1185 |
+
'setup_tools-1.2.post2'
|
1186 |
+
>>> DistributionMetadata._fullname('setup-tools', '1.0-r2')
|
1187 |
+
'setup_tools-1.0.post2'
|
1188 |
+
>>> DistributionMetadata._fullname('setup.tools', '1.0.post')
|
1189 |
+
'setup_tools-1.0.post0'
|
1190 |
+
>>> DistributionMetadata._fullname('setup.tools', '1.0+ubuntu-1')
|
1191 |
+
'setup_tools-1.0+ubuntu.1'
|
1192 |
+
"""
|
1193 |
+
return "{}-{}".format(
|
1194 |
+
canonicalize_name(name).replace('-', '_'),
|
1195 |
+
canonicalize_version(version, strip_trailing_zero=False),
|
1196 |
+
)
|
1197 |
+
|
1198 |
+
def get_author(self):
|
1199 |
+
return self.author
|
1200 |
+
|
1201 |
+
def get_author_email(self):
|
1202 |
+
return self.author_email
|
1203 |
+
|
1204 |
+
def get_maintainer(self):
|
1205 |
+
return self.maintainer
|
1206 |
+
|
1207 |
+
def get_maintainer_email(self):
|
1208 |
+
return self.maintainer_email
|
1209 |
+
|
1210 |
+
def get_contact(self):
|
1211 |
+
return self.maintainer or self.author
|
1212 |
+
|
1213 |
+
def get_contact_email(self):
|
1214 |
+
return self.maintainer_email or self.author_email
|
1215 |
+
|
1216 |
+
def get_url(self):
|
1217 |
+
return self.url
|
1218 |
+
|
1219 |
+
def get_license(self):
|
1220 |
+
return self.license
|
1221 |
+
|
1222 |
+
get_licence = get_license
|
1223 |
+
|
1224 |
+
def get_description(self):
|
1225 |
+
return self.description
|
1226 |
+
|
1227 |
+
def get_long_description(self):
|
1228 |
+
return self.long_description
|
1229 |
+
|
1230 |
+
def get_keywords(self):
|
1231 |
+
return self.keywords or []
|
1232 |
+
|
1233 |
+
def set_keywords(self, value):
|
1234 |
+
self.keywords = _ensure_list(value, 'keywords')
|
1235 |
+
|
1236 |
+
def get_platforms(self):
|
1237 |
+
return self.platforms
|
1238 |
+
|
1239 |
+
def set_platforms(self, value):
|
1240 |
+
self.platforms = _ensure_list(value, 'platforms')
|
1241 |
+
|
1242 |
+
def get_classifiers(self):
|
1243 |
+
return self.classifiers or []
|
1244 |
+
|
1245 |
+
def set_classifiers(self, value):
|
1246 |
+
self.classifiers = _ensure_list(value, 'classifiers')
|
1247 |
+
|
1248 |
+
def get_download_url(self):
|
1249 |
+
return self.download_url
|
1250 |
+
|
1251 |
+
# PEP 314
|
1252 |
+
def get_requires(self):
|
1253 |
+
return self.requires or []
|
1254 |
+
|
1255 |
+
def set_requires(self, value):
|
1256 |
+
import distutils.versionpredicate
|
1257 |
+
|
1258 |
+
for v in value:
|
1259 |
+
distutils.versionpredicate.VersionPredicate(v)
|
1260 |
+
self.requires = list(value)
|
1261 |
+
|
1262 |
+
def get_provides(self):
|
1263 |
+
return self.provides or []
|
1264 |
+
|
1265 |
+
def set_provides(self, value):
|
1266 |
+
value = [v.strip() for v in value]
|
1267 |
+
for v in value:
|
1268 |
+
import distutils.versionpredicate
|
1269 |
+
|
1270 |
+
distutils.versionpredicate.split_provision(v)
|
1271 |
+
self.provides = value
|
1272 |
+
|
1273 |
+
def get_obsoletes(self):
|
1274 |
+
return self.obsoletes or []
|
1275 |
+
|
1276 |
+
def set_obsoletes(self, value):
|
1277 |
+
import distutils.versionpredicate
|
1278 |
+
|
1279 |
+
for v in value:
|
1280 |
+
distutils.versionpredicate.VersionPredicate(v)
|
1281 |
+
self.obsoletes = list(value)
|
1282 |
+
|
1283 |
+
|
1284 |
+
def fix_help_options(options):
|
1285 |
+
"""Convert a 4-tuple 'help_options' list as found in various command
|
1286 |
+
classes to the 3-tuple form required by FancyGetopt.
|
1287 |
+
"""
|
1288 |
+
return [opt[0:3] for opt in options]
|
.venv/Lib/site-packages/setuptools/_distutils/errors.py
ADDED
@@ -0,0 +1,124 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Exceptions used by the Distutils modules.
|
3 |
+
|
4 |
+
Distutils modules may raise these or standard exceptions,
|
5 |
+
including :exc:`SystemExit`.
|
6 |
+
"""
|
7 |
+
|
8 |
+
|
9 |
+
class DistutilsError(Exception):
|
10 |
+
"""The root of all Distutils evil."""
|
11 |
+
|
12 |
+
pass
|
13 |
+
|
14 |
+
|
15 |
+
class DistutilsModuleError(DistutilsError):
|
16 |
+
"""Unable to load an expected module, or to find an expected class
|
17 |
+
within some module (in particular, command modules and classes)."""
|
18 |
+
|
19 |
+
pass
|
20 |
+
|
21 |
+
|
22 |
+
class DistutilsClassError(DistutilsError):
|
23 |
+
"""Some command class (or possibly distribution class, if anyone
|
24 |
+
feels a need to subclass Distribution) is found not to be holding
|
25 |
+
up its end of the bargain, ie. implementing some part of the
|
26 |
+
"command "interface."""
|
27 |
+
|
28 |
+
pass
|
29 |
+
|
30 |
+
|
31 |
+
class DistutilsGetoptError(DistutilsError):
|
32 |
+
"""The option table provided to 'fancy_getopt()' is bogus."""
|
33 |
+
|
34 |
+
pass
|
35 |
+
|
36 |
+
|
37 |
+
class DistutilsArgError(DistutilsError):
|
38 |
+
"""Raised by fancy_getopt in response to getopt.error -- ie. an
|
39 |
+
error in the command line usage."""
|
40 |
+
|
41 |
+
pass
|
42 |
+
|
43 |
+
|
44 |
+
class DistutilsFileError(DistutilsError):
|
45 |
+
"""Any problems in the filesystem: expected file not found, etc.
|
46 |
+
Typically this is for problems that we detect before OSError
|
47 |
+
could be raised."""
|
48 |
+
|
49 |
+
pass
|
50 |
+
|
51 |
+
|
52 |
+
class DistutilsOptionError(DistutilsError):
|
53 |
+
"""Syntactic/semantic errors in command options, such as use of
|
54 |
+
mutually conflicting options, or inconsistent options,
|
55 |
+
badly-spelled values, etc. No distinction is made between option
|
56 |
+
values originating in the setup script, the command line, config
|
57 |
+
files, or what-have-you -- but if we *know* something originated in
|
58 |
+
the setup script, we'll raise DistutilsSetupError instead."""
|
59 |
+
|
60 |
+
pass
|
61 |
+
|
62 |
+
|
63 |
+
class DistutilsSetupError(DistutilsError):
|
64 |
+
"""For errors that can be definitely blamed on the setup script,
|
65 |
+
such as invalid keyword arguments to 'setup()'."""
|
66 |
+
|
67 |
+
pass
|
68 |
+
|
69 |
+
|
70 |
+
class DistutilsPlatformError(DistutilsError):
|
71 |
+
"""We don't know how to do something on the current platform (but
|
72 |
+
we do know how to do it on some platform) -- eg. trying to compile
|
73 |
+
C files on a platform not supported by a CCompiler subclass."""
|
74 |
+
|
75 |
+
pass
|
76 |
+
|
77 |
+
|
78 |
+
class DistutilsExecError(DistutilsError):
|
79 |
+
"""Any problems executing an external program (such as the C
|
80 |
+
compiler, when compiling C files)."""
|
81 |
+
|
82 |
+
pass
|
83 |
+
|
84 |
+
|
85 |
+
class DistutilsInternalError(DistutilsError):
|
86 |
+
"""Internal inconsistencies or impossibilities (obviously, this
|
87 |
+
should never be seen if the code is working!)."""
|
88 |
+
|
89 |
+
pass
|
90 |
+
|
91 |
+
|
92 |
+
class DistutilsTemplateError(DistutilsError):
|
93 |
+
"""Syntax error in a file list template."""
|
94 |
+
|
95 |
+
|
96 |
+
class DistutilsByteCompileError(DistutilsError):
|
97 |
+
"""Byte compile error."""
|
98 |
+
|
99 |
+
|
100 |
+
# Exception classes used by the CCompiler implementation classes
|
101 |
+
class CCompilerError(Exception):
|
102 |
+
"""Some compile/link operation failed."""
|
103 |
+
|
104 |
+
|
105 |
+
class PreprocessError(CCompilerError):
|
106 |
+
"""Failure to preprocess one or more C/C++ files."""
|
107 |
+
|
108 |
+
|
109 |
+
class CompileError(CCompilerError):
|
110 |
+
"""Failure to compile one or more C/C++ source files."""
|
111 |
+
|
112 |
+
|
113 |
+
class LibError(CCompilerError):
|
114 |
+
"""Failure to create a static library from one or more C/C++ object
|
115 |
+
files."""
|
116 |
+
|
117 |
+
|
118 |
+
class LinkError(CCompilerError):
|
119 |
+
"""Failure to link one or more C/C++ object files into an executable
|
120 |
+
or shared library file."""
|
121 |
+
|
122 |
+
|
123 |
+
class UnknownFileError(CCompilerError):
|
124 |
+
"""Attempt to process an unknown file type."""
|
.venv/Lib/site-packages/setuptools/_distutils/extension.py
ADDED
@@ -0,0 +1,247 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""distutils.extension
|
2 |
+
|
3 |
+
Provides the Extension class, used to describe C/C++ extension
|
4 |
+
modules in setup scripts."""
|
5 |
+
|
6 |
+
import os
|
7 |
+
import warnings
|
8 |
+
|
9 |
+
# This class is really only used by the "build_ext" command, so it might
|
10 |
+
# make sense to put it in distutils.command.build_ext. However, that
|
11 |
+
# module is already big enough, and I want to make this class a bit more
|
12 |
+
# complex to simplify some common cases ("foo" module in "foo.c") and do
|
13 |
+
# better error-checking ("foo.c" actually exists).
|
14 |
+
#
|
15 |
+
# Also, putting this in build_ext.py means every setup script would have to
|
16 |
+
# import that large-ish module (indirectly, through distutils.core) in
|
17 |
+
# order to do anything.
|
18 |
+
|
19 |
+
|
20 |
+
class Extension:
|
21 |
+
"""Just a collection of attributes that describes an extension
|
22 |
+
module and everything needed to build it (hopefully in a portable
|
23 |
+
way, but there are hooks that let you be as unportable as you need).
|
24 |
+
|
25 |
+
Instance attributes:
|
26 |
+
name : string
|
27 |
+
the full name of the extension, including any packages -- ie.
|
28 |
+
*not* a filename or pathname, but Python dotted name
|
29 |
+
sources : [string | os.PathLike]
|
30 |
+
list of source filenames, relative to the distribution root
|
31 |
+
(where the setup script lives), in Unix form (slash-separated)
|
32 |
+
for portability. Source files may be C, C++, SWIG (.i),
|
33 |
+
platform-specific resource files, or whatever else is recognized
|
34 |
+
by the "build_ext" command as source for a Python extension.
|
35 |
+
include_dirs : [string]
|
36 |
+
list of directories to search for C/C++ header files (in Unix
|
37 |
+
form for portability)
|
38 |
+
define_macros : [(name : string, value : string|None)]
|
39 |
+
list of macros to define; each macro is defined using a 2-tuple,
|
40 |
+
where 'value' is either the string to define it to or None to
|
41 |
+
define it without a particular value (equivalent of "#define
|
42 |
+
FOO" in source or -DFOO on Unix C compiler command line)
|
43 |
+
undef_macros : [string]
|
44 |
+
list of macros to undefine explicitly
|
45 |
+
library_dirs : [string]
|
46 |
+
list of directories to search for C/C++ libraries at link time
|
47 |
+
libraries : [string]
|
48 |
+
list of library names (not filenames or paths) to link against
|
49 |
+
runtime_library_dirs : [string]
|
50 |
+
list of directories to search for C/C++ libraries at run time
|
51 |
+
(for shared extensions, this is when the extension is loaded)
|
52 |
+
extra_objects : [string]
|
53 |
+
list of extra files to link with (eg. object files not implied
|
54 |
+
by 'sources', static library that must be explicitly specified,
|
55 |
+
binary resource files, etc.)
|
56 |
+
extra_compile_args : [string]
|
57 |
+
any extra platform- and compiler-specific information to use
|
58 |
+
when compiling the source files in 'sources'. For platforms and
|
59 |
+
compilers where "command line" makes sense, this is typically a
|
60 |
+
list of command-line arguments, but for other platforms it could
|
61 |
+
be anything.
|
62 |
+
extra_link_args : [string]
|
63 |
+
any extra platform- and compiler-specific information to use
|
64 |
+
when linking object files together to create the extension (or
|
65 |
+
to create a new static Python interpreter). Similar
|
66 |
+
interpretation as for 'extra_compile_args'.
|
67 |
+
export_symbols : [string]
|
68 |
+
list of symbols to be exported from a shared extension. Not
|
69 |
+
used on all platforms, and not generally necessary for Python
|
70 |
+
extensions, which typically export exactly one symbol: "init" +
|
71 |
+
extension_name.
|
72 |
+
swig_opts : [string]
|
73 |
+
any extra options to pass to SWIG if a source file has the .i
|
74 |
+
extension.
|
75 |
+
depends : [string]
|
76 |
+
list of files that the extension depends on
|
77 |
+
language : string
|
78 |
+
extension language (i.e. "c", "c++", "objc"). Will be detected
|
79 |
+
from the source extensions if not provided.
|
80 |
+
optional : boolean
|
81 |
+
specifies that a build failure in the extension should not abort the
|
82 |
+
build process, but simply not install the failing extension.
|
83 |
+
"""
|
84 |
+
|
85 |
+
# When adding arguments to this constructor, be sure to update
|
86 |
+
# setup_keywords in core.py.
|
87 |
+
def __init__(
|
88 |
+
self,
|
89 |
+
name,
|
90 |
+
sources,
|
91 |
+
include_dirs=None,
|
92 |
+
define_macros=None,
|
93 |
+
undef_macros=None,
|
94 |
+
library_dirs=None,
|
95 |
+
libraries=None,
|
96 |
+
runtime_library_dirs=None,
|
97 |
+
extra_objects=None,
|
98 |
+
extra_compile_args=None,
|
99 |
+
extra_link_args=None,
|
100 |
+
export_symbols=None,
|
101 |
+
swig_opts=None,
|
102 |
+
depends=None,
|
103 |
+
language=None,
|
104 |
+
optional=None,
|
105 |
+
**kw, # To catch unknown keywords
|
106 |
+
):
|
107 |
+
if not isinstance(name, str):
|
108 |
+
raise AssertionError("'name' must be a string") # noqa: TRY004
|
109 |
+
if not (
|
110 |
+
isinstance(sources, list)
|
111 |
+
and all(isinstance(v, (str, os.PathLike)) for v in sources)
|
112 |
+
):
|
113 |
+
raise AssertionError(
|
114 |
+
"'sources' must be a list of strings or PathLike objects."
|
115 |
+
)
|
116 |
+
|
117 |
+
self.name = name
|
118 |
+
self.sources = list(map(os.fspath, sources))
|
119 |
+
self.include_dirs = include_dirs or []
|
120 |
+
self.define_macros = define_macros or []
|
121 |
+
self.undef_macros = undef_macros or []
|
122 |
+
self.library_dirs = library_dirs or []
|
123 |
+
self.libraries = libraries or []
|
124 |
+
self.runtime_library_dirs = runtime_library_dirs or []
|
125 |
+
self.extra_objects = extra_objects or []
|
126 |
+
self.extra_compile_args = extra_compile_args or []
|
127 |
+
self.extra_link_args = extra_link_args or []
|
128 |
+
self.export_symbols = export_symbols or []
|
129 |
+
self.swig_opts = swig_opts or []
|
130 |
+
self.depends = depends or []
|
131 |
+
self.language = language
|
132 |
+
self.optional = optional
|
133 |
+
|
134 |
+
# If there are unknown keyword options, warn about them
|
135 |
+
if len(kw) > 0:
|
136 |
+
options = [repr(option) for option in kw]
|
137 |
+
options = ', '.join(sorted(options))
|
138 |
+
msg = f"Unknown Extension options: {options}"
|
139 |
+
warnings.warn(msg)
|
140 |
+
|
141 |
+
def __repr__(self):
|
142 |
+
return f'<{self.__class__.__module__}.{self.__class__.__qualname__}({self.name!r}) at {id(self):#x}>'
|
143 |
+
|
144 |
+
|
145 |
+
def read_setup_file(filename): # noqa: C901
|
146 |
+
"""Reads a Setup file and returns Extension instances."""
|
147 |
+
from distutils.sysconfig import _variable_rx, expand_makefile_vars, parse_makefile
|
148 |
+
from distutils.text_file import TextFile
|
149 |
+
from distutils.util import split_quoted
|
150 |
+
|
151 |
+
# First pass over the file to gather "VAR = VALUE" assignments.
|
152 |
+
vars = parse_makefile(filename)
|
153 |
+
|
154 |
+
# Second pass to gobble up the real content: lines of the form
|
155 |
+
# <module> ... [<sourcefile> ...] [<cpparg> ...] [<library> ...]
|
156 |
+
file = TextFile(
|
157 |
+
filename,
|
158 |
+
strip_comments=True,
|
159 |
+
skip_blanks=True,
|
160 |
+
join_lines=True,
|
161 |
+
lstrip_ws=True,
|
162 |
+
rstrip_ws=True,
|
163 |
+
)
|
164 |
+
try:
|
165 |
+
extensions = []
|
166 |
+
|
167 |
+
while True:
|
168 |
+
line = file.readline()
|
169 |
+
if line is None: # eof
|
170 |
+
break
|
171 |
+
if _variable_rx.match(line): # VAR=VALUE, handled in first pass
|
172 |
+
continue
|
173 |
+
|
174 |
+
if line[0] == line[-1] == "*":
|
175 |
+
file.warn(f"'{line}' lines not handled yet")
|
176 |
+
continue
|
177 |
+
|
178 |
+
line = expand_makefile_vars(line, vars)
|
179 |
+
words = split_quoted(line)
|
180 |
+
|
181 |
+
# NB. this parses a slightly different syntax than the old
|
182 |
+
# makesetup script: here, there must be exactly one extension per
|
183 |
+
# line, and it must be the first word of the line. I have no idea
|
184 |
+
# why the old syntax supported multiple extensions per line, as
|
185 |
+
# they all wind up being the same.
|
186 |
+
|
187 |
+
module = words[0]
|
188 |
+
ext = Extension(module, [])
|
189 |
+
append_next_word = None
|
190 |
+
|
191 |
+
for word in words[1:]:
|
192 |
+
if append_next_word is not None:
|
193 |
+
append_next_word.append(word)
|
194 |
+
append_next_word = None
|
195 |
+
continue
|
196 |
+
|
197 |
+
suffix = os.path.splitext(word)[1]
|
198 |
+
switch = word[0:2]
|
199 |
+
value = word[2:]
|
200 |
+
|
201 |
+
if suffix in (".c", ".cc", ".cpp", ".cxx", ".c++", ".m", ".mm"):
|
202 |
+
# hmm, should we do something about C vs. C++ sources?
|
203 |
+
# or leave it up to the CCompiler implementation to
|
204 |
+
# worry about?
|
205 |
+
ext.sources.append(word)
|
206 |
+
elif switch == "-I":
|
207 |
+
ext.include_dirs.append(value)
|
208 |
+
elif switch == "-D":
|
209 |
+
equals = value.find("=")
|
210 |
+
if equals == -1: # bare "-DFOO" -- no value
|
211 |
+
ext.define_macros.append((value, None))
|
212 |
+
else: # "-DFOO=blah"
|
213 |
+
ext.define_macros.append((value[0:equals], value[equals + 2 :]))
|
214 |
+
elif switch == "-U":
|
215 |
+
ext.undef_macros.append(value)
|
216 |
+
elif switch == "-C": # only here 'cause makesetup has it!
|
217 |
+
ext.extra_compile_args.append(word)
|
218 |
+
elif switch == "-l":
|
219 |
+
ext.libraries.append(value)
|
220 |
+
elif switch == "-L":
|
221 |
+
ext.library_dirs.append(value)
|
222 |
+
elif switch == "-R":
|
223 |
+
ext.runtime_library_dirs.append(value)
|
224 |
+
elif word == "-rpath":
|
225 |
+
append_next_word = ext.runtime_library_dirs
|
226 |
+
elif word == "-Xlinker":
|
227 |
+
append_next_word = ext.extra_link_args
|
228 |
+
elif word == "-Xcompiler":
|
229 |
+
append_next_word = ext.extra_compile_args
|
230 |
+
elif switch == "-u":
|
231 |
+
ext.extra_link_args.append(word)
|
232 |
+
if not value:
|
233 |
+
append_next_word = ext.extra_link_args
|
234 |
+
elif suffix in (".a", ".so", ".sl", ".o", ".dylib"):
|
235 |
+
# NB. a really faithful emulation of makesetup would
|
236 |
+
# append a .o file to extra_objects only if it
|
237 |
+
# had a slash in it; otherwise, it would s/.o/.c/
|
238 |
+
# and append it to sources. Hmmmm.
|
239 |
+
ext.extra_objects.append(word)
|
240 |
+
else:
|
241 |
+
file.warn(f"unrecognized argument '{word}'")
|
242 |
+
|
243 |
+
extensions.append(ext)
|
244 |
+
finally:
|
245 |
+
file.close()
|
246 |
+
|
247 |
+
return extensions
|
.venv/Lib/site-packages/setuptools/_distutils/fancy_getopt.py
ADDED
@@ -0,0 +1,469 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""distutils.fancy_getopt
|
2 |
+
|
3 |
+
Wrapper around the standard getopt module that provides the following
|
4 |
+
additional features:
|
5 |
+
* short and long options are tied together
|
6 |
+
* options have help strings, so fancy_getopt could potentially
|
7 |
+
create a complete usage summary
|
8 |
+
* options set attributes of a passed-in object
|
9 |
+
"""
|
10 |
+
|
11 |
+
import getopt
|
12 |
+
import re
|
13 |
+
import string
|
14 |
+
import sys
|
15 |
+
from typing import Any, Sequence
|
16 |
+
|
17 |
+
from .errors import DistutilsArgError, DistutilsGetoptError
|
18 |
+
|
19 |
+
# Much like command_re in distutils.core, this is close to but not quite
|
20 |
+
# the same as a Python NAME -- except, in the spirit of most GNU
|
21 |
+
# utilities, we use '-' in place of '_'. (The spirit of LISP lives on!)
|
22 |
+
# The similarities to NAME are again not a coincidence...
|
23 |
+
longopt_pat = r'[a-zA-Z](?:[a-zA-Z0-9-]*)'
|
24 |
+
longopt_re = re.compile(rf'^{longopt_pat}$')
|
25 |
+
|
26 |
+
# For recognizing "negative alias" options, eg. "quiet=!verbose"
|
27 |
+
neg_alias_re = re.compile(f"^({longopt_pat})=!({longopt_pat})$")
|
28 |
+
|
29 |
+
# This is used to translate long options to legitimate Python identifiers
|
30 |
+
# (for use as attributes of some object).
|
31 |
+
longopt_xlate = str.maketrans('-', '_')
|
32 |
+
|
33 |
+
|
34 |
+
class FancyGetopt:
|
35 |
+
"""Wrapper around the standard 'getopt()' module that provides some
|
36 |
+
handy extra functionality:
|
37 |
+
* short and long options are tied together
|
38 |
+
* options have help strings, and help text can be assembled
|
39 |
+
from them
|
40 |
+
* options set attributes of a passed-in object
|
41 |
+
* boolean options can have "negative aliases" -- eg. if
|
42 |
+
--quiet is the "negative alias" of --verbose, then "--quiet"
|
43 |
+
on the command line sets 'verbose' to false
|
44 |
+
"""
|
45 |
+
|
46 |
+
def __init__(self, option_table=None):
|
47 |
+
# The option table is (currently) a list of tuples. The
|
48 |
+
# tuples may have 3 or four values:
|
49 |
+
# (long_option, short_option, help_string [, repeatable])
|
50 |
+
# if an option takes an argument, its long_option should have '='
|
51 |
+
# appended; short_option should just be a single character, no ':'
|
52 |
+
# in any case. If a long_option doesn't have a corresponding
|
53 |
+
# short_option, short_option should be None. All option tuples
|
54 |
+
# must have long options.
|
55 |
+
self.option_table = option_table
|
56 |
+
|
57 |
+
# 'option_index' maps long option names to entries in the option
|
58 |
+
# table (ie. those 3-tuples).
|
59 |
+
self.option_index = {}
|
60 |
+
if self.option_table:
|
61 |
+
self._build_index()
|
62 |
+
|
63 |
+
# 'alias' records (duh) alias options; {'foo': 'bar'} means
|
64 |
+
# --foo is an alias for --bar
|
65 |
+
self.alias = {}
|
66 |
+
|
67 |
+
# 'negative_alias' keeps track of options that are the boolean
|
68 |
+
# opposite of some other option
|
69 |
+
self.negative_alias = {}
|
70 |
+
|
71 |
+
# These keep track of the information in the option table. We
|
72 |
+
# don't actually populate these structures until we're ready to
|
73 |
+
# parse the command-line, since the 'option_table' passed in here
|
74 |
+
# isn't necessarily the final word.
|
75 |
+
self.short_opts = []
|
76 |
+
self.long_opts = []
|
77 |
+
self.short2long = {}
|
78 |
+
self.attr_name = {}
|
79 |
+
self.takes_arg = {}
|
80 |
+
|
81 |
+
# And 'option_order' is filled up in 'getopt()'; it records the
|
82 |
+
# original order of options (and their values) on the command-line,
|
83 |
+
# but expands short options, converts aliases, etc.
|
84 |
+
self.option_order = []
|
85 |
+
|
86 |
+
def _build_index(self):
|
87 |
+
self.option_index.clear()
|
88 |
+
for option in self.option_table:
|
89 |
+
self.option_index[option[0]] = option
|
90 |
+
|
91 |
+
def set_option_table(self, option_table):
|
92 |
+
self.option_table = option_table
|
93 |
+
self._build_index()
|
94 |
+
|
95 |
+
def add_option(self, long_option, short_option=None, help_string=None):
|
96 |
+
if long_option in self.option_index:
|
97 |
+
raise DistutilsGetoptError(
|
98 |
+
f"option conflict: already an option '{long_option}'"
|
99 |
+
)
|
100 |
+
else:
|
101 |
+
option = (long_option, short_option, help_string)
|
102 |
+
self.option_table.append(option)
|
103 |
+
self.option_index[long_option] = option
|
104 |
+
|
105 |
+
def has_option(self, long_option):
|
106 |
+
"""Return true if the option table for this parser has an
|
107 |
+
option with long name 'long_option'."""
|
108 |
+
return long_option in self.option_index
|
109 |
+
|
110 |
+
def get_attr_name(self, long_option):
|
111 |
+
"""Translate long option name 'long_option' to the form it
|
112 |
+
has as an attribute of some object: ie., translate hyphens
|
113 |
+
to underscores."""
|
114 |
+
return long_option.translate(longopt_xlate)
|
115 |
+
|
116 |
+
def _check_alias_dict(self, aliases, what):
|
117 |
+
assert isinstance(aliases, dict)
|
118 |
+
for alias, opt in aliases.items():
|
119 |
+
if alias not in self.option_index:
|
120 |
+
raise DistutilsGetoptError(
|
121 |
+
f"invalid {what} '{alias}': option '{alias}' not defined"
|
122 |
+
)
|
123 |
+
if opt not in self.option_index:
|
124 |
+
raise DistutilsGetoptError(
|
125 |
+
f"invalid {what} '{alias}': aliased option '{opt}' not defined"
|
126 |
+
)
|
127 |
+
|
128 |
+
def set_aliases(self, alias):
|
129 |
+
"""Set the aliases for this option parser."""
|
130 |
+
self._check_alias_dict(alias, "alias")
|
131 |
+
self.alias = alias
|
132 |
+
|
133 |
+
def set_negative_aliases(self, negative_alias):
|
134 |
+
"""Set the negative aliases for this option parser.
|
135 |
+
'negative_alias' should be a dictionary mapping option names to
|
136 |
+
option names, both the key and value must already be defined
|
137 |
+
in the option table."""
|
138 |
+
self._check_alias_dict(negative_alias, "negative alias")
|
139 |
+
self.negative_alias = negative_alias
|
140 |
+
|
141 |
+
def _grok_option_table(self): # noqa: C901
|
142 |
+
"""Populate the various data structures that keep tabs on the
|
143 |
+
option table. Called by 'getopt()' before it can do anything
|
144 |
+
worthwhile.
|
145 |
+
"""
|
146 |
+
self.long_opts = []
|
147 |
+
self.short_opts = []
|
148 |
+
self.short2long.clear()
|
149 |
+
self.repeat = {}
|
150 |
+
|
151 |
+
for option in self.option_table:
|
152 |
+
if len(option) == 3:
|
153 |
+
long, short, help = option
|
154 |
+
repeat = 0
|
155 |
+
elif len(option) == 4:
|
156 |
+
long, short, help, repeat = option
|
157 |
+
else:
|
158 |
+
# the option table is part of the code, so simply
|
159 |
+
# assert that it is correct
|
160 |
+
raise ValueError(f"invalid option tuple: {option!r}")
|
161 |
+
|
162 |
+
# Type- and value-check the option names
|
163 |
+
if not isinstance(long, str) or len(long) < 2:
|
164 |
+
raise DistutilsGetoptError(
|
165 |
+
f"invalid long option '{long}': must be a string of length >= 2"
|
166 |
+
)
|
167 |
+
|
168 |
+
if not ((short is None) or (isinstance(short, str) and len(short) == 1)):
|
169 |
+
raise DistutilsGetoptError(
|
170 |
+
f"invalid short option '{short}': "
|
171 |
+
"must a single character or None"
|
172 |
+
)
|
173 |
+
|
174 |
+
self.repeat[long] = repeat
|
175 |
+
self.long_opts.append(long)
|
176 |
+
|
177 |
+
if long[-1] == '=': # option takes an argument?
|
178 |
+
if short:
|
179 |
+
short = short + ':'
|
180 |
+
long = long[0:-1]
|
181 |
+
self.takes_arg[long] = True
|
182 |
+
else:
|
183 |
+
# Is option is a "negative alias" for some other option (eg.
|
184 |
+
# "quiet" == "!verbose")?
|
185 |
+
alias_to = self.negative_alias.get(long)
|
186 |
+
if alias_to is not None:
|
187 |
+
if self.takes_arg[alias_to]:
|
188 |
+
raise DistutilsGetoptError(
|
189 |
+
f"invalid negative alias '{long}': "
|
190 |
+
f"aliased option '{alias_to}' takes a value"
|
191 |
+
)
|
192 |
+
|
193 |
+
self.long_opts[-1] = long # XXX redundant?!
|
194 |
+
self.takes_arg[long] = False
|
195 |
+
|
196 |
+
# If this is an alias option, make sure its "takes arg" flag is
|
197 |
+
# the same as the option it's aliased to.
|
198 |
+
alias_to = self.alias.get(long)
|
199 |
+
if alias_to is not None:
|
200 |
+
if self.takes_arg[long] != self.takes_arg[alias_to]:
|
201 |
+
raise DistutilsGetoptError(
|
202 |
+
f"invalid alias '{long}': inconsistent with "
|
203 |
+
f"aliased option '{alias_to}' (one of them takes a value, "
|
204 |
+
"the other doesn't"
|
205 |
+
)
|
206 |
+
|
207 |
+
# Now enforce some bondage on the long option name, so we can
|
208 |
+
# later translate it to an attribute name on some object. Have
|
209 |
+
# to do this a bit late to make sure we've removed any trailing
|
210 |
+
# '='.
|
211 |
+
if not longopt_re.match(long):
|
212 |
+
raise DistutilsGetoptError(
|
213 |
+
f"invalid long option name '{long}' "
|
214 |
+
"(must be letters, numbers, hyphens only"
|
215 |
+
)
|
216 |
+
|
217 |
+
self.attr_name[long] = self.get_attr_name(long)
|
218 |
+
if short:
|
219 |
+
self.short_opts.append(short)
|
220 |
+
self.short2long[short[0]] = long
|
221 |
+
|
222 |
+
def getopt(self, args=None, object=None): # noqa: C901
|
223 |
+
"""Parse command-line options in args. Store as attributes on object.
|
224 |
+
|
225 |
+
If 'args' is None or not supplied, uses 'sys.argv[1:]'. If
|
226 |
+
'object' is None or not supplied, creates a new OptionDummy
|
227 |
+
object, stores option values there, and returns a tuple (args,
|
228 |
+
object). If 'object' is supplied, it is modified in place and
|
229 |
+
'getopt()' just returns 'args'; in both cases, the returned
|
230 |
+
'args' is a modified copy of the passed-in 'args' list, which
|
231 |
+
is left untouched.
|
232 |
+
"""
|
233 |
+
if args is None:
|
234 |
+
args = sys.argv[1:]
|
235 |
+
if object is None:
|
236 |
+
object = OptionDummy()
|
237 |
+
created_object = True
|
238 |
+
else:
|
239 |
+
created_object = False
|
240 |
+
|
241 |
+
self._grok_option_table()
|
242 |
+
|
243 |
+
short_opts = ' '.join(self.short_opts)
|
244 |
+
try:
|
245 |
+
opts, args = getopt.getopt(args, short_opts, self.long_opts)
|
246 |
+
except getopt.error as msg:
|
247 |
+
raise DistutilsArgError(msg)
|
248 |
+
|
249 |
+
for opt, val in opts:
|
250 |
+
if len(opt) == 2 and opt[0] == '-': # it's a short option
|
251 |
+
opt = self.short2long[opt[1]]
|
252 |
+
else:
|
253 |
+
assert len(opt) > 2 and opt[:2] == '--'
|
254 |
+
opt = opt[2:]
|
255 |
+
|
256 |
+
alias = self.alias.get(opt)
|
257 |
+
if alias:
|
258 |
+
opt = alias
|
259 |
+
|
260 |
+
if not self.takes_arg[opt]: # boolean option?
|
261 |
+
assert val == '', "boolean option can't have value"
|
262 |
+
alias = self.negative_alias.get(opt)
|
263 |
+
if alias:
|
264 |
+
opt = alias
|
265 |
+
val = 0
|
266 |
+
else:
|
267 |
+
val = 1
|
268 |
+
|
269 |
+
attr = self.attr_name[opt]
|
270 |
+
# The only repeating option at the moment is 'verbose'.
|
271 |
+
# It has a negative option -q quiet, which should set verbose = False.
|
272 |
+
if val and self.repeat.get(attr) is not None:
|
273 |
+
val = getattr(object, attr, 0) + 1
|
274 |
+
setattr(object, attr, val)
|
275 |
+
self.option_order.append((opt, val))
|
276 |
+
|
277 |
+
# for opts
|
278 |
+
if created_object:
|
279 |
+
return args, object
|
280 |
+
else:
|
281 |
+
return args
|
282 |
+
|
283 |
+
def get_option_order(self):
|
284 |
+
"""Returns the list of (option, value) tuples processed by the
|
285 |
+
previous run of 'getopt()'. Raises RuntimeError if
|
286 |
+
'getopt()' hasn't been called yet.
|
287 |
+
"""
|
288 |
+
if self.option_order is None:
|
289 |
+
raise RuntimeError("'getopt()' hasn't been called yet")
|
290 |
+
else:
|
291 |
+
return self.option_order
|
292 |
+
|
293 |
+
def generate_help(self, header=None): # noqa: C901
|
294 |
+
"""Generate help text (a list of strings, one per suggested line of
|
295 |
+
output) from the option table for this FancyGetopt object.
|
296 |
+
"""
|
297 |
+
# Blithely assume the option table is good: probably wouldn't call
|
298 |
+
# 'generate_help()' unless you've already called 'getopt()'.
|
299 |
+
|
300 |
+
# First pass: determine maximum length of long option names
|
301 |
+
max_opt = 0
|
302 |
+
for option in self.option_table:
|
303 |
+
long = option[0]
|
304 |
+
short = option[1]
|
305 |
+
ell = len(long)
|
306 |
+
if long[-1] == '=':
|
307 |
+
ell = ell - 1
|
308 |
+
if short is not None:
|
309 |
+
ell = ell + 5 # " (-x)" where short == 'x'
|
310 |
+
if ell > max_opt:
|
311 |
+
max_opt = ell
|
312 |
+
|
313 |
+
opt_width = max_opt + 2 + 2 + 2 # room for indent + dashes + gutter
|
314 |
+
|
315 |
+
# Typical help block looks like this:
|
316 |
+
# --foo controls foonabulation
|
317 |
+
# Help block for longest option looks like this:
|
318 |
+
# --flimflam set the flim-flam level
|
319 |
+
# and with wrapped text:
|
320 |
+
# --flimflam set the flim-flam level (must be between
|
321 |
+
# 0 and 100, except on Tuesdays)
|
322 |
+
# Options with short names will have the short name shown (but
|
323 |
+
# it doesn't contribute to max_opt):
|
324 |
+
# --foo (-f) controls foonabulation
|
325 |
+
# If adding the short option would make the left column too wide,
|
326 |
+
# we push the explanation off to the next line
|
327 |
+
# --flimflam (-l)
|
328 |
+
# set the flim-flam level
|
329 |
+
# Important parameters:
|
330 |
+
# - 2 spaces before option block start lines
|
331 |
+
# - 2 dashes for each long option name
|
332 |
+
# - min. 2 spaces between option and explanation (gutter)
|
333 |
+
# - 5 characters (incl. space) for short option name
|
334 |
+
|
335 |
+
# Now generate lines of help text. (If 80 columns were good enough
|
336 |
+
# for Jesus, then 78 columns are good enough for me!)
|
337 |
+
line_width = 78
|
338 |
+
text_width = line_width - opt_width
|
339 |
+
big_indent = ' ' * opt_width
|
340 |
+
if header:
|
341 |
+
lines = [header]
|
342 |
+
else:
|
343 |
+
lines = ['Option summary:']
|
344 |
+
|
345 |
+
for option in self.option_table:
|
346 |
+
long, short, help = option[:3]
|
347 |
+
text = wrap_text(help, text_width)
|
348 |
+
if long[-1] == '=':
|
349 |
+
long = long[0:-1]
|
350 |
+
|
351 |
+
# Case 1: no short option at all (makes life easy)
|
352 |
+
if short is None:
|
353 |
+
if text:
|
354 |
+
lines.append(" --%-*s %s" % (max_opt, long, text[0]))
|
355 |
+
else:
|
356 |
+
lines.append(" --%-*s " % (max_opt, long))
|
357 |
+
|
358 |
+
# Case 2: we have a short option, so we have to include it
|
359 |
+
# just after the long option
|
360 |
+
else:
|
361 |
+
opt_names = f"{long} (-{short})"
|
362 |
+
if text:
|
363 |
+
lines.append(" --%-*s %s" % (max_opt, opt_names, text[0]))
|
364 |
+
else:
|
365 |
+
lines.append(" --%-*s" % opt_names)
|
366 |
+
|
367 |
+
for ell in text[1:]:
|
368 |
+
lines.append(big_indent + ell)
|
369 |
+
return lines
|
370 |
+
|
371 |
+
def print_help(self, header=None, file=None):
|
372 |
+
if file is None:
|
373 |
+
file = sys.stdout
|
374 |
+
for line in self.generate_help(header):
|
375 |
+
file.write(line + "\n")
|
376 |
+
|
377 |
+
|
378 |
+
def fancy_getopt(options, negative_opt, object, args):
|
379 |
+
parser = FancyGetopt(options)
|
380 |
+
parser.set_negative_aliases(negative_opt)
|
381 |
+
return parser.getopt(args, object)
|
382 |
+
|
383 |
+
|
384 |
+
WS_TRANS = {ord(_wschar): ' ' for _wschar in string.whitespace}
|
385 |
+
|
386 |
+
|
387 |
+
def wrap_text(text, width):
|
388 |
+
"""wrap_text(text : string, width : int) -> [string]
|
389 |
+
|
390 |
+
Split 'text' into multiple lines of no more than 'width' characters
|
391 |
+
each, and return the list of strings that results.
|
392 |
+
"""
|
393 |
+
if text is None:
|
394 |
+
return []
|
395 |
+
if len(text) <= width:
|
396 |
+
return [text]
|
397 |
+
|
398 |
+
text = text.expandtabs()
|
399 |
+
text = text.translate(WS_TRANS)
|
400 |
+
chunks = re.split(r'( +|-+)', text)
|
401 |
+
chunks = [ch for ch in chunks if ch] # ' - ' results in empty strings
|
402 |
+
lines = []
|
403 |
+
|
404 |
+
while chunks:
|
405 |
+
cur_line = [] # list of chunks (to-be-joined)
|
406 |
+
cur_len = 0 # length of current line
|
407 |
+
|
408 |
+
while chunks:
|
409 |
+
ell = len(chunks[0])
|
410 |
+
if cur_len + ell <= width: # can squeeze (at least) this chunk in
|
411 |
+
cur_line.append(chunks[0])
|
412 |
+
del chunks[0]
|
413 |
+
cur_len = cur_len + ell
|
414 |
+
else: # this line is full
|
415 |
+
# drop last chunk if all space
|
416 |
+
if cur_line and cur_line[-1][0] == ' ':
|
417 |
+
del cur_line[-1]
|
418 |
+
break
|
419 |
+
|
420 |
+
if chunks: # any chunks left to process?
|
421 |
+
# if the current line is still empty, then we had a single
|
422 |
+
# chunk that's too big too fit on a line -- so we break
|
423 |
+
# down and break it up at the line width
|
424 |
+
if cur_len == 0:
|
425 |
+
cur_line.append(chunks[0][0:width])
|
426 |
+
chunks[0] = chunks[0][width:]
|
427 |
+
|
428 |
+
# all-whitespace chunks at the end of a line can be discarded
|
429 |
+
# (and we know from the re.split above that if a chunk has
|
430 |
+
# *any* whitespace, it is *all* whitespace)
|
431 |
+
if chunks[0][0] == ' ':
|
432 |
+
del chunks[0]
|
433 |
+
|
434 |
+
# and store this line in the list-of-all-lines -- as a single
|
435 |
+
# string, of course!
|
436 |
+
lines.append(''.join(cur_line))
|
437 |
+
|
438 |
+
return lines
|
439 |
+
|
440 |
+
|
441 |
+
def translate_longopt(opt):
|
442 |
+
"""Convert a long option name to a valid Python identifier by
|
443 |
+
changing "-" to "_".
|
444 |
+
"""
|
445 |
+
return opt.translate(longopt_xlate)
|
446 |
+
|
447 |
+
|
448 |
+
class OptionDummy:
|
449 |
+
"""Dummy class just used as a place to hold command-line option
|
450 |
+
values as instance attributes."""
|
451 |
+
|
452 |
+
def __init__(self, options: Sequence[Any] = []):
|
453 |
+
"""Create a new OptionDummy instance. The attributes listed in
|
454 |
+
'options' will be initialized to None."""
|
455 |
+
for opt in options:
|
456 |
+
setattr(self, opt, None)
|
457 |
+
|
458 |
+
|
459 |
+
if __name__ == "__main__":
|
460 |
+
text = """\
|
461 |
+
Tra-la-la, supercalifragilisticexpialidocious.
|
462 |
+
How *do* you spell that odd word, anyways?
|
463 |
+
(Someone ask Mary -- she'll know [or she'll
|
464 |
+
say, "How should I know?"].)"""
|
465 |
+
|
466 |
+
for w in (10, 20, 30, 40):
|
467 |
+
print("width: %d" % w)
|
468 |
+
print("\n".join(wrap_text(text, w)))
|
469 |
+
print()
|
.venv/Lib/site-packages/setuptools/_distutils/file_util.py
ADDED
@@ -0,0 +1,236 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""distutils.file_util
|
2 |
+
|
3 |
+
Utility functions for operating on single files.
|
4 |
+
"""
|
5 |
+
|
6 |
+
import os
|
7 |
+
|
8 |
+
from ._log import log
|
9 |
+
from .errors import DistutilsFileError
|
10 |
+
|
11 |
+
# for generating verbose output in 'copy_file()'
|
12 |
+
_copy_action = {None: 'copying', 'hard': 'hard linking', 'sym': 'symbolically linking'}
|
13 |
+
|
14 |
+
|
15 |
+
def _copy_file_contents(src, dst, buffer_size=16 * 1024): # noqa: C901
|
16 |
+
"""Copy the file 'src' to 'dst'; both must be filenames. Any error
|
17 |
+
opening either file, reading from 'src', or writing to 'dst', raises
|
18 |
+
DistutilsFileError. Data is read/written in chunks of 'buffer_size'
|
19 |
+
bytes (default 16k). No attempt is made to handle anything apart from
|
20 |
+
regular files.
|
21 |
+
"""
|
22 |
+
# Stolen from shutil module in the standard library, but with
|
23 |
+
# custom error-handling added.
|
24 |
+
fsrc = None
|
25 |
+
fdst = None
|
26 |
+
try:
|
27 |
+
try:
|
28 |
+
fsrc = open(src, 'rb')
|
29 |
+
except OSError as e:
|
30 |
+
raise DistutilsFileError(f"could not open '{src}': {e.strerror}")
|
31 |
+
|
32 |
+
if os.path.exists(dst):
|
33 |
+
try:
|
34 |
+
os.unlink(dst)
|
35 |
+
except OSError as e:
|
36 |
+
raise DistutilsFileError(f"could not delete '{dst}': {e.strerror}")
|
37 |
+
|
38 |
+
try:
|
39 |
+
fdst = open(dst, 'wb')
|
40 |
+
except OSError as e:
|
41 |
+
raise DistutilsFileError(f"could not create '{dst}': {e.strerror}")
|
42 |
+
|
43 |
+
while True:
|
44 |
+
try:
|
45 |
+
buf = fsrc.read(buffer_size)
|
46 |
+
except OSError as e:
|
47 |
+
raise DistutilsFileError(f"could not read from '{src}': {e.strerror}")
|
48 |
+
|
49 |
+
if not buf:
|
50 |
+
break
|
51 |
+
|
52 |
+
try:
|
53 |
+
fdst.write(buf)
|
54 |
+
except OSError as e:
|
55 |
+
raise DistutilsFileError(f"could not write to '{dst}': {e.strerror}")
|
56 |
+
finally:
|
57 |
+
if fdst:
|
58 |
+
fdst.close()
|
59 |
+
if fsrc:
|
60 |
+
fsrc.close()
|
61 |
+
|
62 |
+
|
63 |
+
def copy_file( # noqa: C901
|
64 |
+
src,
|
65 |
+
dst,
|
66 |
+
preserve_mode=True,
|
67 |
+
preserve_times=True,
|
68 |
+
update=False,
|
69 |
+
link=None,
|
70 |
+
verbose=True,
|
71 |
+
dry_run=False,
|
72 |
+
):
|
73 |
+
"""Copy a file 'src' to 'dst'. If 'dst' is a directory, then 'src' is
|
74 |
+
copied there with the same name; otherwise, it must be a filename. (If
|
75 |
+
the file exists, it will be ruthlessly clobbered.) If 'preserve_mode'
|
76 |
+
is true (the default), the file's mode (type and permission bits, or
|
77 |
+
whatever is analogous on the current platform) is copied. If
|
78 |
+
'preserve_times' is true (the default), the last-modified and
|
79 |
+
last-access times are copied as well. If 'update' is true, 'src' will
|
80 |
+
only be copied if 'dst' does not exist, or if 'dst' does exist but is
|
81 |
+
older than 'src'.
|
82 |
+
|
83 |
+
'link' allows you to make hard links (os.link) or symbolic links
|
84 |
+
(os.symlink) instead of copying: set it to "hard" or "sym"; if it is
|
85 |
+
None (the default), files are copied. Don't set 'link' on systems that
|
86 |
+
don't support it: 'copy_file()' doesn't check if hard or symbolic
|
87 |
+
linking is available. If hardlink fails, falls back to
|
88 |
+
_copy_file_contents().
|
89 |
+
|
90 |
+
Under Mac OS, uses the native file copy function in macostools; on
|
91 |
+
other systems, uses '_copy_file_contents()' to copy file contents.
|
92 |
+
|
93 |
+
Return a tuple (dest_name, copied): 'dest_name' is the actual name of
|
94 |
+
the output file, and 'copied' is true if the file was copied (or would
|
95 |
+
have been copied, if 'dry_run' true).
|
96 |
+
"""
|
97 |
+
# XXX if the destination file already exists, we clobber it if
|
98 |
+
# copying, but blow up if linking. Hmmm. And I don't know what
|
99 |
+
# macostools.copyfile() does. Should definitely be consistent, and
|
100 |
+
# should probably blow up if destination exists and we would be
|
101 |
+
# changing it (ie. it's not already a hard/soft link to src OR
|
102 |
+
# (not update) and (src newer than dst).
|
103 |
+
|
104 |
+
from distutils._modified import newer
|
105 |
+
from stat import S_IMODE, ST_ATIME, ST_MODE, ST_MTIME
|
106 |
+
|
107 |
+
if not os.path.isfile(src):
|
108 |
+
raise DistutilsFileError(
|
109 |
+
f"can't copy '{src}': doesn't exist or not a regular file"
|
110 |
+
)
|
111 |
+
|
112 |
+
if os.path.isdir(dst):
|
113 |
+
dir = dst
|
114 |
+
dst = os.path.join(dst, os.path.basename(src))
|
115 |
+
else:
|
116 |
+
dir = os.path.dirname(dst)
|
117 |
+
|
118 |
+
if update and not newer(src, dst):
|
119 |
+
if verbose >= 1:
|
120 |
+
log.debug("not copying %s (output up-to-date)", src)
|
121 |
+
return (dst, 0)
|
122 |
+
|
123 |
+
try:
|
124 |
+
action = _copy_action[link]
|
125 |
+
except KeyError:
|
126 |
+
raise ValueError(f"invalid value '{link}' for 'link' argument")
|
127 |
+
|
128 |
+
if verbose >= 1:
|
129 |
+
if os.path.basename(dst) == os.path.basename(src):
|
130 |
+
log.info("%s %s -> %s", action, src, dir)
|
131 |
+
else:
|
132 |
+
log.info("%s %s -> %s", action, src, dst)
|
133 |
+
|
134 |
+
if dry_run:
|
135 |
+
return (dst, 1)
|
136 |
+
|
137 |
+
# If linking (hard or symbolic), use the appropriate system call
|
138 |
+
# (Unix only, of course, but that's the caller's responsibility)
|
139 |
+
elif link == 'hard':
|
140 |
+
if not (os.path.exists(dst) and os.path.samefile(src, dst)):
|
141 |
+
try:
|
142 |
+
os.link(src, dst)
|
143 |
+
except OSError:
|
144 |
+
# If hard linking fails, fall back on copying file
|
145 |
+
# (some special filesystems don't support hard linking
|
146 |
+
# even under Unix, see issue #8876).
|
147 |
+
pass
|
148 |
+
else:
|
149 |
+
return (dst, 1)
|
150 |
+
elif link == 'sym':
|
151 |
+
if not (os.path.exists(dst) and os.path.samefile(src, dst)):
|
152 |
+
os.symlink(src, dst)
|
153 |
+
return (dst, 1)
|
154 |
+
|
155 |
+
# Otherwise (non-Mac, not linking), copy the file contents and
|
156 |
+
# (optionally) copy the times and mode.
|
157 |
+
_copy_file_contents(src, dst)
|
158 |
+
if preserve_mode or preserve_times:
|
159 |
+
st = os.stat(src)
|
160 |
+
|
161 |
+
# According to David Ascher <[email protected]>, utime() should be done
|
162 |
+
# before chmod() (at least under NT).
|
163 |
+
if preserve_times:
|
164 |
+
os.utime(dst, (st[ST_ATIME], st[ST_MTIME]))
|
165 |
+
if preserve_mode:
|
166 |
+
os.chmod(dst, S_IMODE(st[ST_MODE]))
|
167 |
+
|
168 |
+
return (dst, 1)
|
169 |
+
|
170 |
+
|
171 |
+
# XXX I suspect this is Unix-specific -- need porting help!
|
172 |
+
def move_file(src, dst, verbose=True, dry_run=False): # noqa: C901
|
173 |
+
"""Move a file 'src' to 'dst'. If 'dst' is a directory, the file will
|
174 |
+
be moved into it with the same name; otherwise, 'src' is just renamed
|
175 |
+
to 'dst'. Return the new full name of the file.
|
176 |
+
|
177 |
+
Handles cross-device moves on Unix using 'copy_file()'. What about
|
178 |
+
other systems???
|
179 |
+
"""
|
180 |
+
import errno
|
181 |
+
from os.path import basename, dirname, exists, isdir, isfile
|
182 |
+
|
183 |
+
if verbose >= 1:
|
184 |
+
log.info("moving %s -> %s", src, dst)
|
185 |
+
|
186 |
+
if dry_run:
|
187 |
+
return dst
|
188 |
+
|
189 |
+
if not isfile(src):
|
190 |
+
raise DistutilsFileError(f"can't move '{src}': not a regular file")
|
191 |
+
|
192 |
+
if isdir(dst):
|
193 |
+
dst = os.path.join(dst, basename(src))
|
194 |
+
elif exists(dst):
|
195 |
+
raise DistutilsFileError(
|
196 |
+
f"can't move '{src}': destination '{dst}' already exists"
|
197 |
+
)
|
198 |
+
|
199 |
+
if not isdir(dirname(dst)):
|
200 |
+
raise DistutilsFileError(
|
201 |
+
f"can't move '{src}': destination '{dst}' not a valid path"
|
202 |
+
)
|
203 |
+
|
204 |
+
copy_it = False
|
205 |
+
try:
|
206 |
+
os.rename(src, dst)
|
207 |
+
except OSError as e:
|
208 |
+
(num, msg) = e.args
|
209 |
+
if num == errno.EXDEV:
|
210 |
+
copy_it = True
|
211 |
+
else:
|
212 |
+
raise DistutilsFileError(f"couldn't move '{src}' to '{dst}': {msg}")
|
213 |
+
|
214 |
+
if copy_it:
|
215 |
+
copy_file(src, dst, verbose=verbose)
|
216 |
+
try:
|
217 |
+
os.unlink(src)
|
218 |
+
except OSError as e:
|
219 |
+
(num, msg) = e.args
|
220 |
+
try:
|
221 |
+
os.unlink(dst)
|
222 |
+
except OSError:
|
223 |
+
pass
|
224 |
+
raise DistutilsFileError(
|
225 |
+
f"couldn't move '{src}' to '{dst}' by copy/delete: "
|
226 |
+
f"delete '{src}' failed: {msg}"
|
227 |
+
)
|
228 |
+
return dst
|
229 |
+
|
230 |
+
|
231 |
+
def write_file(filename, contents):
|
232 |
+
"""Create a file with the specified name and write 'contents' (a
|
233 |
+
sequence of strings without line terminators) to it.
|
234 |
+
"""
|
235 |
+
with open(filename, 'w', encoding='utf-8') as f:
|
236 |
+
f.writelines(line + '\n' for line in contents)
|
.venv/Lib/site-packages/setuptools/_distutils/filelist.py
ADDED
@@ -0,0 +1,369 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""distutils.filelist
|
2 |
+
|
3 |
+
Provides the FileList class, used for poking about the filesystem
|
4 |
+
and building lists of files.
|
5 |
+
"""
|
6 |
+
|
7 |
+
import fnmatch
|
8 |
+
import functools
|
9 |
+
import os
|
10 |
+
import re
|
11 |
+
|
12 |
+
from ._log import log
|
13 |
+
from .errors import DistutilsInternalError, DistutilsTemplateError
|
14 |
+
from .util import convert_path
|
15 |
+
|
16 |
+
|
17 |
+
class FileList:
|
18 |
+
"""A list of files built by on exploring the filesystem and filtered by
|
19 |
+
applying various patterns to what we find there.
|
20 |
+
|
21 |
+
Instance attributes:
|
22 |
+
dir
|
23 |
+
directory from which files will be taken -- only used if
|
24 |
+
'allfiles' not supplied to constructor
|
25 |
+
files
|
26 |
+
list of filenames currently being built/filtered/manipulated
|
27 |
+
allfiles
|
28 |
+
complete list of files under consideration (ie. without any
|
29 |
+
filtering applied)
|
30 |
+
"""
|
31 |
+
|
32 |
+
def __init__(self, warn=None, debug_print=None):
|
33 |
+
# ignore argument to FileList, but keep them for backwards
|
34 |
+
# compatibility
|
35 |
+
self.allfiles = None
|
36 |
+
self.files = []
|
37 |
+
|
38 |
+
def set_allfiles(self, allfiles):
|
39 |
+
self.allfiles = allfiles
|
40 |
+
|
41 |
+
def findall(self, dir=os.curdir):
|
42 |
+
self.allfiles = findall(dir)
|
43 |
+
|
44 |
+
def debug_print(self, msg):
|
45 |
+
"""Print 'msg' to stdout if the global DEBUG (taken from the
|
46 |
+
DISTUTILS_DEBUG environment variable) flag is true.
|
47 |
+
"""
|
48 |
+
from distutils.debug import DEBUG
|
49 |
+
|
50 |
+
if DEBUG:
|
51 |
+
print(msg)
|
52 |
+
|
53 |
+
# Collection methods
|
54 |
+
|
55 |
+
def append(self, item):
|
56 |
+
self.files.append(item)
|
57 |
+
|
58 |
+
def extend(self, items):
|
59 |
+
self.files.extend(items)
|
60 |
+
|
61 |
+
def sort(self):
|
62 |
+
# Not a strict lexical sort!
|
63 |
+
sortable_files = sorted(map(os.path.split, self.files))
|
64 |
+
self.files = []
|
65 |
+
for sort_tuple in sortable_files:
|
66 |
+
self.files.append(os.path.join(*sort_tuple))
|
67 |
+
|
68 |
+
# Other miscellaneous utility methods
|
69 |
+
|
70 |
+
def remove_duplicates(self):
|
71 |
+
# Assumes list has been sorted!
|
72 |
+
for i in range(len(self.files) - 1, 0, -1):
|
73 |
+
if self.files[i] == self.files[i - 1]:
|
74 |
+
del self.files[i]
|
75 |
+
|
76 |
+
# "File template" methods
|
77 |
+
|
78 |
+
def _parse_template_line(self, line):
|
79 |
+
words = line.split()
|
80 |
+
action = words[0]
|
81 |
+
|
82 |
+
patterns = dir = dir_pattern = None
|
83 |
+
|
84 |
+
if action in ('include', 'exclude', 'global-include', 'global-exclude'):
|
85 |
+
if len(words) < 2:
|
86 |
+
raise DistutilsTemplateError(
|
87 |
+
f"'{action}' expects <pattern1> <pattern2> ..."
|
88 |
+
)
|
89 |
+
patterns = [convert_path(w) for w in words[1:]]
|
90 |
+
elif action in ('recursive-include', 'recursive-exclude'):
|
91 |
+
if len(words) < 3:
|
92 |
+
raise DistutilsTemplateError(
|
93 |
+
f"'{action}' expects <dir> <pattern1> <pattern2> ..."
|
94 |
+
)
|
95 |
+
dir = convert_path(words[1])
|
96 |
+
patterns = [convert_path(w) for w in words[2:]]
|
97 |
+
elif action in ('graft', 'prune'):
|
98 |
+
if len(words) != 2:
|
99 |
+
raise DistutilsTemplateError(
|
100 |
+
f"'{action}' expects a single <dir_pattern>"
|
101 |
+
)
|
102 |
+
dir_pattern = convert_path(words[1])
|
103 |
+
else:
|
104 |
+
raise DistutilsTemplateError(f"unknown action '{action}'")
|
105 |
+
|
106 |
+
return (action, patterns, dir, dir_pattern)
|
107 |
+
|
108 |
+
def process_template_line(self, line): # noqa: C901
|
109 |
+
# Parse the line: split it up, make sure the right number of words
|
110 |
+
# is there, and return the relevant words. 'action' is always
|
111 |
+
# defined: it's the first word of the line. Which of the other
|
112 |
+
# three are defined depends on the action; it'll be either
|
113 |
+
# patterns, (dir and patterns), or (dir_pattern).
|
114 |
+
(action, patterns, dir, dir_pattern) = self._parse_template_line(line)
|
115 |
+
|
116 |
+
# OK, now we know that the action is valid and we have the
|
117 |
+
# right number of words on the line for that action -- so we
|
118 |
+
# can proceed with minimal error-checking.
|
119 |
+
if action == 'include':
|
120 |
+
self.debug_print("include " + ' '.join(patterns))
|
121 |
+
for pattern in patterns:
|
122 |
+
if not self.include_pattern(pattern, anchor=True):
|
123 |
+
log.warning("warning: no files found matching '%s'", pattern)
|
124 |
+
|
125 |
+
elif action == 'exclude':
|
126 |
+
self.debug_print("exclude " + ' '.join(patterns))
|
127 |
+
for pattern in patterns:
|
128 |
+
if not self.exclude_pattern(pattern, anchor=True):
|
129 |
+
log.warning(
|
130 |
+
(
|
131 |
+
"warning: no previously-included files "
|
132 |
+
"found matching '%s'"
|
133 |
+
),
|
134 |
+
pattern,
|
135 |
+
)
|
136 |
+
|
137 |
+
elif action == 'global-include':
|
138 |
+
self.debug_print("global-include " + ' '.join(patterns))
|
139 |
+
for pattern in patterns:
|
140 |
+
if not self.include_pattern(pattern, anchor=False):
|
141 |
+
log.warning(
|
142 |
+
(
|
143 |
+
"warning: no files found matching '%s' "
|
144 |
+
"anywhere in distribution"
|
145 |
+
),
|
146 |
+
pattern,
|
147 |
+
)
|
148 |
+
|
149 |
+
elif action == 'global-exclude':
|
150 |
+
self.debug_print("global-exclude " + ' '.join(patterns))
|
151 |
+
for pattern in patterns:
|
152 |
+
if not self.exclude_pattern(pattern, anchor=False):
|
153 |
+
log.warning(
|
154 |
+
(
|
155 |
+
"warning: no previously-included files matching "
|
156 |
+
"'%s' found anywhere in distribution"
|
157 |
+
),
|
158 |
+
pattern,
|
159 |
+
)
|
160 |
+
|
161 |
+
elif action == 'recursive-include':
|
162 |
+
self.debug_print("recursive-include {} {}".format(dir, ' '.join(patterns)))
|
163 |
+
for pattern in patterns:
|
164 |
+
if not self.include_pattern(pattern, prefix=dir):
|
165 |
+
msg = "warning: no files found matching '%s' under directory '%s'"
|
166 |
+
log.warning(msg, pattern, dir)
|
167 |
+
|
168 |
+
elif action == 'recursive-exclude':
|
169 |
+
self.debug_print("recursive-exclude {} {}".format(dir, ' '.join(patterns)))
|
170 |
+
for pattern in patterns:
|
171 |
+
if not self.exclude_pattern(pattern, prefix=dir):
|
172 |
+
log.warning(
|
173 |
+
(
|
174 |
+
"warning: no previously-included files matching "
|
175 |
+
"'%s' found under directory '%s'"
|
176 |
+
),
|
177 |
+
pattern,
|
178 |
+
dir,
|
179 |
+
)
|
180 |
+
|
181 |
+
elif action == 'graft':
|
182 |
+
self.debug_print("graft " + dir_pattern)
|
183 |
+
if not self.include_pattern(None, prefix=dir_pattern):
|
184 |
+
log.warning("warning: no directories found matching '%s'", dir_pattern)
|
185 |
+
|
186 |
+
elif action == 'prune':
|
187 |
+
self.debug_print("prune " + dir_pattern)
|
188 |
+
if not self.exclude_pattern(None, prefix=dir_pattern):
|
189 |
+
log.warning(
|
190 |
+
("no previously-included directories found matching '%s'"),
|
191 |
+
dir_pattern,
|
192 |
+
)
|
193 |
+
else:
|
194 |
+
raise DistutilsInternalError(
|
195 |
+
f"this cannot happen: invalid action '{action}'"
|
196 |
+
)
|
197 |
+
|
198 |
+
# Filtering/selection methods
|
199 |
+
|
200 |
+
def include_pattern(self, pattern, anchor=True, prefix=None, is_regex=False):
|
201 |
+
"""Select strings (presumably filenames) from 'self.files' that
|
202 |
+
match 'pattern', a Unix-style wildcard (glob) pattern. Patterns
|
203 |
+
are not quite the same as implemented by the 'fnmatch' module: '*'
|
204 |
+
and '?' match non-special characters, where "special" is platform-
|
205 |
+
dependent: slash on Unix; colon, slash, and backslash on
|
206 |
+
DOS/Windows; and colon on Mac OS.
|
207 |
+
|
208 |
+
If 'anchor' is true (the default), then the pattern match is more
|
209 |
+
stringent: "*.py" will match "foo.py" but not "foo/bar.py". If
|
210 |
+
'anchor' is false, both of these will match.
|
211 |
+
|
212 |
+
If 'prefix' is supplied, then only filenames starting with 'prefix'
|
213 |
+
(itself a pattern) and ending with 'pattern', with anything in between
|
214 |
+
them, will match. 'anchor' is ignored in this case.
|
215 |
+
|
216 |
+
If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and
|
217 |
+
'pattern' is assumed to be either a string containing a regex or a
|
218 |
+
regex object -- no translation is done, the regex is just compiled
|
219 |
+
and used as-is.
|
220 |
+
|
221 |
+
Selected strings will be added to self.files.
|
222 |
+
|
223 |
+
Return True if files are found, False otherwise.
|
224 |
+
"""
|
225 |
+
# XXX docstring lying about what the special chars are?
|
226 |
+
files_found = False
|
227 |
+
pattern_re = translate_pattern(pattern, anchor, prefix, is_regex)
|
228 |
+
self.debug_print(f"include_pattern: applying regex r'{pattern_re.pattern}'")
|
229 |
+
|
230 |
+
# delayed loading of allfiles list
|
231 |
+
if self.allfiles is None:
|
232 |
+
self.findall()
|
233 |
+
|
234 |
+
for name in self.allfiles:
|
235 |
+
if pattern_re.search(name):
|
236 |
+
self.debug_print(" adding " + name)
|
237 |
+
self.files.append(name)
|
238 |
+
files_found = True
|
239 |
+
return files_found
|
240 |
+
|
241 |
+
def exclude_pattern(self, pattern, anchor=True, prefix=None, is_regex=False):
|
242 |
+
"""Remove strings (presumably filenames) from 'files' that match
|
243 |
+
'pattern'. Other parameters are the same as for
|
244 |
+
'include_pattern()', above.
|
245 |
+
The list 'self.files' is modified in place.
|
246 |
+
Return True if files are found, False otherwise.
|
247 |
+
"""
|
248 |
+
files_found = False
|
249 |
+
pattern_re = translate_pattern(pattern, anchor, prefix, is_regex)
|
250 |
+
self.debug_print(f"exclude_pattern: applying regex r'{pattern_re.pattern}'")
|
251 |
+
for i in range(len(self.files) - 1, -1, -1):
|
252 |
+
if pattern_re.search(self.files[i]):
|
253 |
+
self.debug_print(" removing " + self.files[i])
|
254 |
+
del self.files[i]
|
255 |
+
files_found = True
|
256 |
+
return files_found
|
257 |
+
|
258 |
+
|
259 |
+
# Utility functions
|
260 |
+
|
261 |
+
|
262 |
+
def _find_all_simple(path):
|
263 |
+
"""
|
264 |
+
Find all files under 'path'
|
265 |
+
"""
|
266 |
+
all_unique = _UniqueDirs.filter(os.walk(path, followlinks=True))
|
267 |
+
results = (
|
268 |
+
os.path.join(base, file) for base, dirs, files in all_unique for file in files
|
269 |
+
)
|
270 |
+
return filter(os.path.isfile, results)
|
271 |
+
|
272 |
+
|
273 |
+
class _UniqueDirs(set):
|
274 |
+
"""
|
275 |
+
Exclude previously-seen dirs from walk results,
|
276 |
+
avoiding infinite recursion.
|
277 |
+
Ref https://bugs.python.org/issue44497.
|
278 |
+
"""
|
279 |
+
|
280 |
+
def __call__(self, walk_item):
|
281 |
+
"""
|
282 |
+
Given an item from an os.walk result, determine
|
283 |
+
if the item represents a unique dir for this instance
|
284 |
+
and if not, prevent further traversal.
|
285 |
+
"""
|
286 |
+
base, dirs, files = walk_item
|
287 |
+
stat = os.stat(base)
|
288 |
+
candidate = stat.st_dev, stat.st_ino
|
289 |
+
found = candidate in self
|
290 |
+
if found:
|
291 |
+
del dirs[:]
|
292 |
+
self.add(candidate)
|
293 |
+
return not found
|
294 |
+
|
295 |
+
@classmethod
|
296 |
+
def filter(cls, items):
|
297 |
+
return filter(cls(), items)
|
298 |
+
|
299 |
+
|
300 |
+
def findall(dir=os.curdir):
|
301 |
+
"""
|
302 |
+
Find all files under 'dir' and return the list of full filenames.
|
303 |
+
Unless dir is '.', return full filenames with dir prepended.
|
304 |
+
"""
|
305 |
+
files = _find_all_simple(dir)
|
306 |
+
if dir == os.curdir:
|
307 |
+
make_rel = functools.partial(os.path.relpath, start=dir)
|
308 |
+
files = map(make_rel, files)
|
309 |
+
return list(files)
|
310 |
+
|
311 |
+
|
312 |
+
def glob_to_re(pattern):
|
313 |
+
"""Translate a shell-like glob pattern to a regular expression; return
|
314 |
+
a string containing the regex. Differs from 'fnmatch.translate()' in
|
315 |
+
that '*' does not match "special characters" (which are
|
316 |
+
platform-specific).
|
317 |
+
"""
|
318 |
+
pattern_re = fnmatch.translate(pattern)
|
319 |
+
|
320 |
+
# '?' and '*' in the glob pattern become '.' and '.*' in the RE, which
|
321 |
+
# IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix,
|
322 |
+
# and by extension they shouldn't match such "special characters" under
|
323 |
+
# any OS. So change all non-escaped dots in the RE to match any
|
324 |
+
# character except the special characters (currently: just os.sep).
|
325 |
+
sep = os.sep
|
326 |
+
if os.sep == '\\':
|
327 |
+
# we're using a regex to manipulate a regex, so we need
|
328 |
+
# to escape the backslash twice
|
329 |
+
sep = r'\\\\'
|
330 |
+
escaped = rf'\1[^{sep}]'
|
331 |
+
pattern_re = re.sub(r'((?<!\\)(\\\\)*)\.', escaped, pattern_re)
|
332 |
+
return pattern_re
|
333 |
+
|
334 |
+
|
335 |
+
def translate_pattern(pattern, anchor=True, prefix=None, is_regex=False):
|
336 |
+
"""Translate a shell-like wildcard pattern to a compiled regular
|
337 |
+
expression. Return the compiled regex. If 'is_regex' true,
|
338 |
+
then 'pattern' is directly compiled to a regex (if it's a string)
|
339 |
+
or just returned as-is (assumes it's a regex object).
|
340 |
+
"""
|
341 |
+
if is_regex:
|
342 |
+
if isinstance(pattern, str):
|
343 |
+
return re.compile(pattern)
|
344 |
+
else:
|
345 |
+
return pattern
|
346 |
+
|
347 |
+
# ditch start and end characters
|
348 |
+
start, _, end = glob_to_re('_').partition('_')
|
349 |
+
|
350 |
+
if pattern:
|
351 |
+
pattern_re = glob_to_re(pattern)
|
352 |
+
assert pattern_re.startswith(start) and pattern_re.endswith(end)
|
353 |
+
else:
|
354 |
+
pattern_re = ''
|
355 |
+
|
356 |
+
if prefix is not None:
|
357 |
+
prefix_re = glob_to_re(prefix)
|
358 |
+
assert prefix_re.startswith(start) and prefix_re.endswith(end)
|
359 |
+
prefix_re = prefix_re[len(start) : len(prefix_re) - len(end)]
|
360 |
+
sep = os.sep
|
361 |
+
if os.sep == '\\':
|
362 |
+
sep = r'\\'
|
363 |
+
pattern_re = pattern_re[len(start) : len(pattern_re) - len(end)]
|
364 |
+
pattern_re = rf'{start}\A{prefix_re}{sep}.*{pattern_re}{end}'
|
365 |
+
else: # no prefix -- respect anchor flag
|
366 |
+
if anchor:
|
367 |
+
pattern_re = rf'{start}\A{pattern_re[len(start) :]}'
|
368 |
+
|
369 |
+
return re.compile(pattern_re)
|
.venv/Lib/site-packages/setuptools/_distutils/log.py
ADDED
@@ -0,0 +1,56 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
A simple log mechanism styled after PEP 282.
|
3 |
+
|
4 |
+
Retained for compatibility and should not be used.
|
5 |
+
"""
|
6 |
+
|
7 |
+
import logging
|
8 |
+
import warnings
|
9 |
+
|
10 |
+
from ._log import log as _global_log
|
11 |
+
|
12 |
+
DEBUG = logging.DEBUG
|
13 |
+
INFO = logging.INFO
|
14 |
+
WARN = logging.WARN
|
15 |
+
ERROR = logging.ERROR
|
16 |
+
FATAL = logging.FATAL
|
17 |
+
|
18 |
+
log = _global_log.log
|
19 |
+
debug = _global_log.debug
|
20 |
+
info = _global_log.info
|
21 |
+
warn = _global_log.warning
|
22 |
+
error = _global_log.error
|
23 |
+
fatal = _global_log.fatal
|
24 |
+
|
25 |
+
|
26 |
+
def set_threshold(level):
|
27 |
+
orig = _global_log.level
|
28 |
+
_global_log.setLevel(level)
|
29 |
+
return orig
|
30 |
+
|
31 |
+
|
32 |
+
def set_verbosity(v):
|
33 |
+
if v <= 0:
|
34 |
+
set_threshold(logging.WARN)
|
35 |
+
elif v == 1:
|
36 |
+
set_threshold(logging.INFO)
|
37 |
+
elif v >= 2:
|
38 |
+
set_threshold(logging.DEBUG)
|
39 |
+
|
40 |
+
|
41 |
+
class Log(logging.Logger):
|
42 |
+
"""distutils.log.Log is deprecated, please use an alternative from `logging`."""
|
43 |
+
|
44 |
+
def __init__(self, threshold=WARN):
|
45 |
+
warnings.warn(Log.__doc__) # avoid DeprecationWarning to ensure warn is shown
|
46 |
+
super().__init__(__name__, level=threshold)
|
47 |
+
|
48 |
+
@property
|
49 |
+
def threshold(self):
|
50 |
+
return self.level
|
51 |
+
|
52 |
+
@threshold.setter
|
53 |
+
def threshold(self, level):
|
54 |
+
self.setLevel(level)
|
55 |
+
|
56 |
+
warn = logging.Logger.warning
|
.venv/Lib/site-packages/setuptools/_distutils/tests/test_install_data.py
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Tests for distutils.command.install_data."""
|
2 |
+
|
3 |
+
import os
|
4 |
+
import pathlib
|
5 |
+
from distutils.command.install_data import install_data
|
6 |
+
from distutils.tests import support
|
7 |
+
|
8 |
+
import pytest
|
9 |
+
|
10 |
+
|
11 |
+
@pytest.mark.usefixtures('save_env')
|
12 |
+
class TestInstallData(
|
13 |
+
support.TempdirManager,
|
14 |
+
):
|
15 |
+
def test_simple_run(self):
|
16 |
+
pkg_dir, dist = self.create_dist()
|
17 |
+
cmd = install_data(dist)
|
18 |
+
cmd.install_dir = inst = os.path.join(pkg_dir, 'inst')
|
19 |
+
|
20 |
+
# data_files can contain
|
21 |
+
# - simple files
|
22 |
+
# - a Path object
|
23 |
+
# - a tuple with a path, and a list of file
|
24 |
+
one = os.path.join(pkg_dir, 'one')
|
25 |
+
self.write_file(one, 'xxx')
|
26 |
+
inst2 = os.path.join(pkg_dir, 'inst2')
|
27 |
+
two = os.path.join(pkg_dir, 'two')
|
28 |
+
self.write_file(two, 'xxx')
|
29 |
+
three = pathlib.Path(pkg_dir) / 'three'
|
30 |
+
self.write_file(three, 'xxx')
|
31 |
+
|
32 |
+
cmd.data_files = [one, (inst2, [two]), three]
|
33 |
+
assert cmd.get_inputs() == [one, (inst2, [two]), three]
|
34 |
+
|
35 |
+
# let's run the command
|
36 |
+
cmd.ensure_finalized()
|
37 |
+
cmd.run()
|
38 |
+
|
39 |
+
# let's check the result
|
40 |
+
assert len(cmd.get_outputs()) == 3
|
41 |
+
rthree = os.path.split(one)[-1]
|
42 |
+
assert os.path.exists(os.path.join(inst, rthree))
|
43 |
+
rtwo = os.path.split(two)[-1]
|
44 |
+
assert os.path.exists(os.path.join(inst2, rtwo))
|
45 |
+
rone = os.path.split(one)[-1]
|
46 |
+
assert os.path.exists(os.path.join(inst, rone))
|
47 |
+
cmd.outfiles = []
|
48 |
+
|
49 |
+
# let's try with warn_dir one
|
50 |
+
cmd.warn_dir = True
|
51 |
+
cmd.ensure_finalized()
|
52 |
+
cmd.run()
|
53 |
+
|
54 |
+
# let's check the result
|
55 |
+
assert len(cmd.get_outputs()) == 3
|
56 |
+
assert os.path.exists(os.path.join(inst, rthree))
|
57 |
+
assert os.path.exists(os.path.join(inst2, rtwo))
|
58 |
+
assert os.path.exists(os.path.join(inst, rone))
|
59 |
+
cmd.outfiles = []
|
60 |
+
|
61 |
+
# now using root and empty dir
|
62 |
+
cmd.root = os.path.join(pkg_dir, 'root')
|
63 |
+
inst5 = os.path.join(pkg_dir, 'inst5')
|
64 |
+
four = os.path.join(cmd.install_dir, 'four')
|
65 |
+
self.write_file(four, 'xx')
|
66 |
+
cmd.data_files = [one, (inst2, [two]), three, ('inst5', [four]), (inst5, [])]
|
67 |
+
cmd.ensure_finalized()
|
68 |
+
cmd.run()
|
69 |
+
|
70 |
+
# let's check the result
|
71 |
+
assert len(cmd.get_outputs()) == 5
|
72 |
+
assert os.path.exists(os.path.join(inst, rthree))
|
73 |
+
assert os.path.exists(os.path.join(inst2, rtwo))
|
74 |
+
assert os.path.exists(os.path.join(inst, rone))
|
.venv/Lib/site-packages/setuptools/_distutils/tests/test_install_headers.py
ADDED
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Tests for distutils.command.install_headers."""
|
2 |
+
|
3 |
+
import os
|
4 |
+
from distutils.command.install_headers import install_headers
|
5 |
+
from distutils.tests import support
|
6 |
+
|
7 |
+
import pytest
|
8 |
+
|
9 |
+
|
10 |
+
@pytest.mark.usefixtures('save_env')
|
11 |
+
class TestInstallHeaders(
|
12 |
+
support.TempdirManager,
|
13 |
+
):
|
14 |
+
def test_simple_run(self):
|
15 |
+
# we have two headers
|
16 |
+
header_list = self.mkdtemp()
|
17 |
+
header1 = os.path.join(header_list, 'header1')
|
18 |
+
header2 = os.path.join(header_list, 'header2')
|
19 |
+
self.write_file(header1)
|
20 |
+
self.write_file(header2)
|
21 |
+
headers = [header1, header2]
|
22 |
+
|
23 |
+
pkg_dir, dist = self.create_dist(headers=headers)
|
24 |
+
cmd = install_headers(dist)
|
25 |
+
assert cmd.get_inputs() == headers
|
26 |
+
|
27 |
+
# let's run the command
|
28 |
+
cmd.install_dir = os.path.join(pkg_dir, 'inst')
|
29 |
+
cmd.ensure_finalized()
|
30 |
+
cmd.run()
|
31 |
+
|
32 |
+
# let's check the results
|
33 |
+
assert len(cmd.get_outputs()) == 2
|
.venv/Lib/site-packages/setuptools/_distutils/tests/test_install_lib.py
ADDED
@@ -0,0 +1,110 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Tests for distutils.command.install_data."""
|
2 |
+
|
3 |
+
import importlib.util
|
4 |
+
import os
|
5 |
+
import sys
|
6 |
+
from distutils.command.install_lib import install_lib
|
7 |
+
from distutils.errors import DistutilsOptionError
|
8 |
+
from distutils.extension import Extension
|
9 |
+
from distutils.tests import support
|
10 |
+
|
11 |
+
import pytest
|
12 |
+
|
13 |
+
|
14 |
+
@support.combine_markers
|
15 |
+
@pytest.mark.usefixtures('save_env')
|
16 |
+
class TestInstallLib(
|
17 |
+
support.TempdirManager,
|
18 |
+
):
|
19 |
+
def test_finalize_options(self):
|
20 |
+
dist = self.create_dist()[1]
|
21 |
+
cmd = install_lib(dist)
|
22 |
+
|
23 |
+
cmd.finalize_options()
|
24 |
+
assert cmd.compile == 1
|
25 |
+
assert cmd.optimize == 0
|
26 |
+
|
27 |
+
# optimize must be 0, 1, or 2
|
28 |
+
cmd.optimize = 'foo'
|
29 |
+
with pytest.raises(DistutilsOptionError):
|
30 |
+
cmd.finalize_options()
|
31 |
+
cmd.optimize = '4'
|
32 |
+
with pytest.raises(DistutilsOptionError):
|
33 |
+
cmd.finalize_options()
|
34 |
+
|
35 |
+
cmd.optimize = '2'
|
36 |
+
cmd.finalize_options()
|
37 |
+
assert cmd.optimize == 2
|
38 |
+
|
39 |
+
@pytest.mark.skipif('sys.dont_write_bytecode')
|
40 |
+
def test_byte_compile(self):
|
41 |
+
project_dir, dist = self.create_dist()
|
42 |
+
os.chdir(project_dir)
|
43 |
+
cmd = install_lib(dist)
|
44 |
+
cmd.compile = cmd.optimize = 1
|
45 |
+
|
46 |
+
f = os.path.join(project_dir, 'foo.py')
|
47 |
+
self.write_file(f, '# python file')
|
48 |
+
cmd.byte_compile([f])
|
49 |
+
pyc_file = importlib.util.cache_from_source('foo.py', optimization='')
|
50 |
+
pyc_opt_file = importlib.util.cache_from_source(
|
51 |
+
'foo.py', optimization=cmd.optimize
|
52 |
+
)
|
53 |
+
assert os.path.exists(pyc_file)
|
54 |
+
assert os.path.exists(pyc_opt_file)
|
55 |
+
|
56 |
+
def test_get_outputs(self):
|
57 |
+
project_dir, dist = self.create_dist()
|
58 |
+
os.chdir(project_dir)
|
59 |
+
os.mkdir('spam')
|
60 |
+
cmd = install_lib(dist)
|
61 |
+
|
62 |
+
# setting up a dist environment
|
63 |
+
cmd.compile = cmd.optimize = 1
|
64 |
+
cmd.install_dir = self.mkdtemp()
|
65 |
+
f = os.path.join(project_dir, 'spam', '__init__.py')
|
66 |
+
self.write_file(f, '# python package')
|
67 |
+
cmd.distribution.ext_modules = [Extension('foo', ['xxx'])]
|
68 |
+
cmd.distribution.packages = ['spam']
|
69 |
+
cmd.distribution.script_name = 'setup.py'
|
70 |
+
|
71 |
+
# get_outputs should return 4 elements: spam/__init__.py and .pyc,
|
72 |
+
# foo.import-tag-abiflags.so / foo.pyd
|
73 |
+
outputs = cmd.get_outputs()
|
74 |
+
assert len(outputs) == 4, outputs
|
75 |
+
|
76 |
+
def test_get_inputs(self):
|
77 |
+
project_dir, dist = self.create_dist()
|
78 |
+
os.chdir(project_dir)
|
79 |
+
os.mkdir('spam')
|
80 |
+
cmd = install_lib(dist)
|
81 |
+
|
82 |
+
# setting up a dist environment
|
83 |
+
cmd.compile = cmd.optimize = 1
|
84 |
+
cmd.install_dir = self.mkdtemp()
|
85 |
+
f = os.path.join(project_dir, 'spam', '__init__.py')
|
86 |
+
self.write_file(f, '# python package')
|
87 |
+
cmd.distribution.ext_modules = [Extension('foo', ['xxx'])]
|
88 |
+
cmd.distribution.packages = ['spam']
|
89 |
+
cmd.distribution.script_name = 'setup.py'
|
90 |
+
|
91 |
+
# get_inputs should return 2 elements: spam/__init__.py and
|
92 |
+
# foo.import-tag-abiflags.so / foo.pyd
|
93 |
+
inputs = cmd.get_inputs()
|
94 |
+
assert len(inputs) == 2, inputs
|
95 |
+
|
96 |
+
def test_dont_write_bytecode(self, caplog):
|
97 |
+
# makes sure byte_compile is not used
|
98 |
+
dist = self.create_dist()[1]
|
99 |
+
cmd = install_lib(dist)
|
100 |
+
cmd.compile = True
|
101 |
+
cmd.optimize = 1
|
102 |
+
|
103 |
+
old_dont_write_bytecode = sys.dont_write_bytecode
|
104 |
+
sys.dont_write_bytecode = True
|
105 |
+
try:
|
106 |
+
cmd.byte_compile([])
|
107 |
+
finally:
|
108 |
+
sys.dont_write_bytecode = old_dont_write_bytecode
|
109 |
+
|
110 |
+
assert 'byte-compiling is disabled' in caplog.messages[0]
|
.venv/Lib/site-packages/setuptools/compat/__init__.py
ADDED
File without changes
|
.venv/Lib/site-packages/setuptools/compat/py312.py
ADDED
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
import sys
|
4 |
+
|
5 |
+
if sys.version_info >= (3, 12, 4):
|
6 |
+
# Python 3.13 should support `.pth` files encoded in UTF-8
|
7 |
+
# See discussion in https://github.com/python/cpython/issues/77102
|
8 |
+
PTH_ENCODING: str | None = "utf-8"
|
9 |
+
else:
|
10 |
+
from .py39 import LOCALE_ENCODING
|
11 |
+
|
12 |
+
# PTH_ENCODING = "locale"
|
13 |
+
PTH_ENCODING = LOCALE_ENCODING
|
.venv/Lib/site-packages/setuptools/compat/py39.py
ADDED
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import sys
|
2 |
+
|
3 |
+
# Explicitly use the ``"locale"`` encoding in versions that support it,
|
4 |
+
# otherwise just rely on the implicit handling of ``encoding=None``.
|
5 |
+
# Since all platforms that support ``EncodingWarning`` also support
|
6 |
+
# ``encoding="locale"``, this can be used to suppress the warning.
|
7 |
+
# However, please try to use UTF-8 when possible
|
8 |
+
# (.pth files are the notorious exception: python/cpython#77102, pypa/setuptools#3937).
|
9 |
+
LOCALE_ENCODING = "locale" if sys.version_info >= (3, 10) else None
|
.venv/Lib/site-packages/setuptools/config/distutils.schema.json
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"$schema": "http://json-schema.org/draft-07/schema#",
|
3 |
+
|
4 |
+
"$id": "https://setuptools.pypa.io/en/latest/deprecated/distutils/configfile.html",
|
5 |
+
"title": "``tool.distutils`` table",
|
6 |
+
"$$description": [
|
7 |
+
"**EXPERIMENTAL** (NOT OFFICIALLY SUPPORTED): Use ``tool.distutils``",
|
8 |
+
"subtables to configure arguments for ``distutils`` commands.",
|
9 |
+
"Originally, ``distutils`` allowed developers to configure arguments for",
|
10 |
+
"``setup.py`` commands via `distutils configuration files",
|
11 |
+
"<https://setuptools.pypa.io/en/latest/deprecated/distutils/configfile.html>`_.",
|
12 |
+
"See also `the old Python docs <https://docs.python.org/3.11/install/>_`."
|
13 |
+
],
|
14 |
+
|
15 |
+
"type": "object",
|
16 |
+
"properties": {
|
17 |
+
"global": {
|
18 |
+
"type": "object",
|
19 |
+
"description": "Global options applied to all ``distutils`` commands"
|
20 |
+
}
|
21 |
+
},
|
22 |
+
"patternProperties": {
|
23 |
+
".+": {"type": "object"}
|
24 |
+
},
|
25 |
+
"$comment": "TODO: Is there a practical way of making this schema more specific?"
|
26 |
+
}
|
.venv/Lib/site-packages/setuptools/config/expand.py
ADDED
@@ -0,0 +1,449 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Utility functions to expand configuration directives or special values
|
2 |
+
(such glob patterns).
|
3 |
+
|
4 |
+
We can split the process of interpreting configuration files into 2 steps:
|
5 |
+
|
6 |
+
1. The parsing the file contents from strings to value objects
|
7 |
+
that can be understand by Python (for example a string with a comma
|
8 |
+
separated list of keywords into an actual Python list of strings).
|
9 |
+
|
10 |
+
2. The expansion (or post-processing) of these values according to the
|
11 |
+
semantics ``setuptools`` assign to them (for example a configuration field
|
12 |
+
with the ``file:`` directive should be expanded from a list of file paths to
|
13 |
+
a single string with the contents of those files concatenated)
|
14 |
+
|
15 |
+
This module focus on the second step, and therefore allow sharing the expansion
|
16 |
+
functions among several configuration file formats.
|
17 |
+
|
18 |
+
**PRIVATE MODULE**: API reserved for setuptools internal usage only.
|
19 |
+
"""
|
20 |
+
|
21 |
+
from __future__ import annotations
|
22 |
+
|
23 |
+
import ast
|
24 |
+
import importlib
|
25 |
+
import os
|
26 |
+
import pathlib
|
27 |
+
import sys
|
28 |
+
from collections.abc import Iterable, Iterator, Mapping
|
29 |
+
from configparser import ConfigParser
|
30 |
+
from glob import iglob
|
31 |
+
from importlib.machinery import ModuleSpec, all_suffixes
|
32 |
+
from itertools import chain
|
33 |
+
from pathlib import Path
|
34 |
+
from types import ModuleType, TracebackType
|
35 |
+
from typing import TYPE_CHECKING, Any, Callable, TypeVar
|
36 |
+
|
37 |
+
from .._path import StrPath, same_path as _same_path
|
38 |
+
from ..discovery import find_package_path
|
39 |
+
from ..warnings import SetuptoolsWarning
|
40 |
+
|
41 |
+
from distutils.errors import DistutilsOptionError
|
42 |
+
|
43 |
+
if TYPE_CHECKING:
|
44 |
+
from typing_extensions import Self
|
45 |
+
|
46 |
+
from setuptools.dist import Distribution
|
47 |
+
|
48 |
+
_K = TypeVar("_K")
|
49 |
+
_V_co = TypeVar("_V_co", covariant=True)
|
50 |
+
|
51 |
+
|
52 |
+
class StaticModule:
|
53 |
+
"""Proxy to a module object that avoids executing arbitrary code."""
|
54 |
+
|
55 |
+
def __init__(self, name: str, spec: ModuleSpec) -> None:
|
56 |
+
module = ast.parse(pathlib.Path(spec.origin).read_bytes()) # type: ignore[arg-type] # Let it raise an error on None
|
57 |
+
vars(self).update(locals())
|
58 |
+
del self.self
|
59 |
+
|
60 |
+
def _find_assignments(self) -> Iterator[tuple[ast.AST, ast.AST]]:
|
61 |
+
for statement in self.module.body:
|
62 |
+
if isinstance(statement, ast.Assign):
|
63 |
+
yield from ((target, statement.value) for target in statement.targets)
|
64 |
+
elif isinstance(statement, ast.AnnAssign) and statement.value:
|
65 |
+
yield (statement.target, statement.value)
|
66 |
+
|
67 |
+
def __getattr__(self, attr: str):
|
68 |
+
"""Attempt to load an attribute "statically", via :func:`ast.literal_eval`."""
|
69 |
+
try:
|
70 |
+
return next(
|
71 |
+
ast.literal_eval(value)
|
72 |
+
for target, value in self._find_assignments()
|
73 |
+
if isinstance(target, ast.Name) and target.id == attr
|
74 |
+
)
|
75 |
+
except Exception as e:
|
76 |
+
raise AttributeError(f"{self.name} has no attribute {attr}") from e
|
77 |
+
|
78 |
+
|
79 |
+
def glob_relative(
|
80 |
+
patterns: Iterable[str], root_dir: StrPath | None = None
|
81 |
+
) -> list[str]:
|
82 |
+
"""Expand the list of glob patterns, but preserving relative paths.
|
83 |
+
|
84 |
+
:param list[str] patterns: List of glob patterns
|
85 |
+
:param str root_dir: Path to which globs should be relative
|
86 |
+
(current directory by default)
|
87 |
+
:rtype: list
|
88 |
+
"""
|
89 |
+
glob_characters = {'*', '?', '[', ']', '{', '}'}
|
90 |
+
expanded_values = []
|
91 |
+
root_dir = root_dir or os.getcwd()
|
92 |
+
for value in patterns:
|
93 |
+
# Has globby characters?
|
94 |
+
if any(char in value for char in glob_characters):
|
95 |
+
# then expand the glob pattern while keeping paths *relative*:
|
96 |
+
glob_path = os.path.abspath(os.path.join(root_dir, value))
|
97 |
+
expanded_values.extend(
|
98 |
+
sorted(
|
99 |
+
os.path.relpath(path, root_dir).replace(os.sep, "/")
|
100 |
+
for path in iglob(glob_path, recursive=True)
|
101 |
+
)
|
102 |
+
)
|
103 |
+
|
104 |
+
else:
|
105 |
+
# take the value as-is
|
106 |
+
path = os.path.relpath(value, root_dir).replace(os.sep, "/")
|
107 |
+
expanded_values.append(path)
|
108 |
+
|
109 |
+
return expanded_values
|
110 |
+
|
111 |
+
|
112 |
+
def read_files(
|
113 |
+
filepaths: StrPath | Iterable[StrPath], root_dir: StrPath | None = None
|
114 |
+
) -> str:
|
115 |
+
"""Return the content of the files concatenated using ``\n`` as str
|
116 |
+
|
117 |
+
This function is sandboxed and won't reach anything outside ``root_dir``
|
118 |
+
|
119 |
+
(By default ``root_dir`` is the current directory).
|
120 |
+
"""
|
121 |
+
from more_itertools import always_iterable
|
122 |
+
|
123 |
+
root_dir = os.path.abspath(root_dir or os.getcwd())
|
124 |
+
_filepaths = (os.path.join(root_dir, path) for path in always_iterable(filepaths))
|
125 |
+
return '\n'.join(
|
126 |
+
_read_file(path)
|
127 |
+
for path in _filter_existing_files(_filepaths)
|
128 |
+
if _assert_local(path, root_dir)
|
129 |
+
)
|
130 |
+
|
131 |
+
|
132 |
+
def _filter_existing_files(filepaths: Iterable[StrPath]) -> Iterator[StrPath]:
|
133 |
+
for path in filepaths:
|
134 |
+
if os.path.isfile(path):
|
135 |
+
yield path
|
136 |
+
else:
|
137 |
+
SetuptoolsWarning.emit(f"File {path!r} cannot be found")
|
138 |
+
|
139 |
+
|
140 |
+
def _read_file(filepath: bytes | StrPath) -> str:
|
141 |
+
with open(filepath, encoding='utf-8') as f:
|
142 |
+
return f.read()
|
143 |
+
|
144 |
+
|
145 |
+
def _assert_local(filepath: StrPath, root_dir: str):
|
146 |
+
if Path(os.path.abspath(root_dir)) not in Path(os.path.abspath(filepath)).parents:
|
147 |
+
msg = f"Cannot access {filepath!r} (or anything outside {root_dir!r})"
|
148 |
+
raise DistutilsOptionError(msg)
|
149 |
+
|
150 |
+
return True
|
151 |
+
|
152 |
+
|
153 |
+
def read_attr(
|
154 |
+
attr_desc: str,
|
155 |
+
package_dir: Mapping[str, str] | None = None,
|
156 |
+
root_dir: StrPath | None = None,
|
157 |
+
) -> Any:
|
158 |
+
"""Reads the value of an attribute from a module.
|
159 |
+
|
160 |
+
This function will try to read the attributed statically first
|
161 |
+
(via :func:`ast.literal_eval`), and only evaluate the module if it fails.
|
162 |
+
|
163 |
+
Examples:
|
164 |
+
read_attr("package.attr")
|
165 |
+
read_attr("package.module.attr")
|
166 |
+
|
167 |
+
:param str attr_desc: Dot-separated string describing how to reach the
|
168 |
+
attribute (see examples above)
|
169 |
+
:param dict[str, str] package_dir: Mapping of package names to their
|
170 |
+
location in disk (represented by paths relative to ``root_dir``).
|
171 |
+
:param str root_dir: Path to directory containing all the packages in
|
172 |
+
``package_dir`` (current directory by default).
|
173 |
+
:rtype: str
|
174 |
+
"""
|
175 |
+
root_dir = root_dir or os.getcwd()
|
176 |
+
attrs_path = attr_desc.strip().split('.')
|
177 |
+
attr_name = attrs_path.pop()
|
178 |
+
module_name = '.'.join(attrs_path)
|
179 |
+
module_name = module_name or '__init__'
|
180 |
+
path = _find_module(module_name, package_dir, root_dir)
|
181 |
+
spec = _find_spec(module_name, path)
|
182 |
+
|
183 |
+
try:
|
184 |
+
return getattr(StaticModule(module_name, spec), attr_name)
|
185 |
+
except Exception:
|
186 |
+
# fallback to evaluate module
|
187 |
+
module = _load_spec(spec, module_name)
|
188 |
+
return getattr(module, attr_name)
|
189 |
+
|
190 |
+
|
191 |
+
def _find_spec(module_name: str, module_path: StrPath | None) -> ModuleSpec:
|
192 |
+
spec = importlib.util.spec_from_file_location(module_name, module_path)
|
193 |
+
spec = spec or importlib.util.find_spec(module_name)
|
194 |
+
|
195 |
+
if spec is None:
|
196 |
+
raise ModuleNotFoundError(module_name)
|
197 |
+
|
198 |
+
return spec
|
199 |
+
|
200 |
+
|
201 |
+
def _load_spec(spec: ModuleSpec, module_name: str) -> ModuleType:
|
202 |
+
name = getattr(spec, "__name__", module_name)
|
203 |
+
if name in sys.modules:
|
204 |
+
return sys.modules[name]
|
205 |
+
module = importlib.util.module_from_spec(spec)
|
206 |
+
sys.modules[name] = module # cache (it also ensures `==` works on loaded items)
|
207 |
+
assert spec.loader is not None
|
208 |
+
spec.loader.exec_module(module)
|
209 |
+
return module
|
210 |
+
|
211 |
+
|
212 |
+
def _find_module(
|
213 |
+
module_name: str, package_dir: Mapping[str, str] | None, root_dir: StrPath
|
214 |
+
) -> str | None:
|
215 |
+
"""Find the path to the module named ``module_name``,
|
216 |
+
considering the ``package_dir`` in the build configuration and ``root_dir``.
|
217 |
+
|
218 |
+
>>> tmp = getfixture('tmpdir')
|
219 |
+
>>> _ = tmp.ensure("a/b/c.py")
|
220 |
+
>>> _ = tmp.ensure("a/b/d/__init__.py")
|
221 |
+
>>> r = lambda x: x.replace(str(tmp), "tmp").replace(os.sep, "/")
|
222 |
+
>>> r(_find_module("a.b.c", None, tmp))
|
223 |
+
'tmp/a/b/c.py'
|
224 |
+
>>> r(_find_module("f.g.h", {"": "1", "f": "2", "f.g": "3", "f.g.h": "a/b/d"}, tmp))
|
225 |
+
'tmp/a/b/d/__init__.py'
|
226 |
+
"""
|
227 |
+
path_start = find_package_path(module_name, package_dir or {}, root_dir)
|
228 |
+
candidates = chain.from_iterable(
|
229 |
+
(f"{path_start}{ext}", os.path.join(path_start, f"__init__{ext}"))
|
230 |
+
for ext in all_suffixes()
|
231 |
+
)
|
232 |
+
return next((x for x in candidates if os.path.isfile(x)), None)
|
233 |
+
|
234 |
+
|
235 |
+
def resolve_class(
|
236 |
+
qualified_class_name: str,
|
237 |
+
package_dir: Mapping[str, str] | None = None,
|
238 |
+
root_dir: StrPath | None = None,
|
239 |
+
) -> Callable:
|
240 |
+
"""Given a qualified class name, return the associated class object"""
|
241 |
+
root_dir = root_dir or os.getcwd()
|
242 |
+
idx = qualified_class_name.rfind('.')
|
243 |
+
class_name = qualified_class_name[idx + 1 :]
|
244 |
+
pkg_name = qualified_class_name[:idx]
|
245 |
+
|
246 |
+
path = _find_module(pkg_name, package_dir, root_dir)
|
247 |
+
module = _load_spec(_find_spec(pkg_name, path), pkg_name)
|
248 |
+
return getattr(module, class_name)
|
249 |
+
|
250 |
+
|
251 |
+
def cmdclass(
|
252 |
+
values: dict[str, str],
|
253 |
+
package_dir: Mapping[str, str] | None = None,
|
254 |
+
root_dir: StrPath | None = None,
|
255 |
+
) -> dict[str, Callable]:
|
256 |
+
"""Given a dictionary mapping command names to strings for qualified class
|
257 |
+
names, apply :func:`resolve_class` to the dict values.
|
258 |
+
"""
|
259 |
+
return {k: resolve_class(v, package_dir, root_dir) for k, v in values.items()}
|
260 |
+
|
261 |
+
|
262 |
+
def find_packages(
|
263 |
+
*,
|
264 |
+
namespaces=True,
|
265 |
+
fill_package_dir: dict[str, str] | None = None,
|
266 |
+
root_dir: StrPath | None = None,
|
267 |
+
**kwargs,
|
268 |
+
) -> list[str]:
|
269 |
+
"""Works similarly to :func:`setuptools.find_packages`, but with all
|
270 |
+
arguments given as keyword arguments. Moreover, ``where`` can be given
|
271 |
+
as a list (the results will be simply concatenated).
|
272 |
+
|
273 |
+
When the additional keyword argument ``namespaces`` is ``True``, it will
|
274 |
+
behave like :func:`setuptools.find_namespace_packages`` (i.e. include
|
275 |
+
implicit namespaces as per :pep:`420`).
|
276 |
+
|
277 |
+
The ``where`` argument will be considered relative to ``root_dir`` (or the current
|
278 |
+
working directory when ``root_dir`` is not given).
|
279 |
+
|
280 |
+
If the ``fill_package_dir`` argument is passed, this function will consider it as a
|
281 |
+
similar data structure to the ``package_dir`` configuration parameter add fill-in
|
282 |
+
any missing package location.
|
283 |
+
|
284 |
+
:rtype: list
|
285 |
+
"""
|
286 |
+
from more_itertools import always_iterable, unique_everseen
|
287 |
+
|
288 |
+
from setuptools.discovery import construct_package_dir
|
289 |
+
|
290 |
+
# check "not namespaces" first due to python/mypy#6232
|
291 |
+
if not namespaces:
|
292 |
+
from setuptools.discovery import PackageFinder
|
293 |
+
else:
|
294 |
+
from setuptools.discovery import PEP420PackageFinder as PackageFinder
|
295 |
+
|
296 |
+
root_dir = root_dir or os.curdir
|
297 |
+
where = kwargs.pop('where', ['.'])
|
298 |
+
packages: list[str] = []
|
299 |
+
fill_package_dir = {} if fill_package_dir is None else fill_package_dir
|
300 |
+
search = list(unique_everseen(always_iterable(where)))
|
301 |
+
|
302 |
+
if len(search) == 1 and all(not _same_path(search[0], x) for x in (".", root_dir)):
|
303 |
+
fill_package_dir.setdefault("", search[0])
|
304 |
+
|
305 |
+
for path in search:
|
306 |
+
package_path = _nest_path(root_dir, path)
|
307 |
+
pkgs = PackageFinder.find(package_path, **kwargs)
|
308 |
+
packages.extend(pkgs)
|
309 |
+
if pkgs and not (
|
310 |
+
fill_package_dir.get("") == path or os.path.samefile(package_path, root_dir)
|
311 |
+
):
|
312 |
+
fill_package_dir.update(construct_package_dir(pkgs, path))
|
313 |
+
|
314 |
+
return packages
|
315 |
+
|
316 |
+
|
317 |
+
def _nest_path(parent: StrPath, path: StrPath) -> str:
|
318 |
+
path = parent if path in {".", ""} else os.path.join(parent, path)
|
319 |
+
return os.path.normpath(path)
|
320 |
+
|
321 |
+
|
322 |
+
def version(value: Callable | Iterable[str | int] | str) -> str:
|
323 |
+
"""When getting the version directly from an attribute,
|
324 |
+
it should be normalised to string.
|
325 |
+
"""
|
326 |
+
_value = value() if callable(value) else value
|
327 |
+
|
328 |
+
if isinstance(_value, str):
|
329 |
+
return _value
|
330 |
+
if hasattr(_value, '__iter__'):
|
331 |
+
return '.'.join(map(str, _value))
|
332 |
+
return '%s' % _value
|
333 |
+
|
334 |
+
|
335 |
+
def canonic_package_data(package_data: dict) -> dict:
|
336 |
+
if "*" in package_data:
|
337 |
+
package_data[""] = package_data.pop("*")
|
338 |
+
return package_data
|
339 |
+
|
340 |
+
|
341 |
+
def canonic_data_files(
|
342 |
+
data_files: list | dict, root_dir: StrPath | None = None
|
343 |
+
) -> list[tuple[str, list[str]]]:
|
344 |
+
"""For compatibility with ``setup.py``, ``data_files`` should be a list
|
345 |
+
of pairs instead of a dict.
|
346 |
+
|
347 |
+
This function also expands glob patterns.
|
348 |
+
"""
|
349 |
+
if isinstance(data_files, list):
|
350 |
+
return data_files
|
351 |
+
|
352 |
+
return [
|
353 |
+
(dest, glob_relative(patterns, root_dir))
|
354 |
+
for dest, patterns in data_files.items()
|
355 |
+
]
|
356 |
+
|
357 |
+
|
358 |
+
def entry_points(
|
359 |
+
text: str, text_source: str = "entry-points"
|
360 |
+
) -> dict[str, dict[str, str]]:
|
361 |
+
"""Given the contents of entry-points file,
|
362 |
+
process it into a 2-level dictionary (``dict[str, dict[str, str]]``).
|
363 |
+
The first level keys are entry-point groups, the second level keys are
|
364 |
+
entry-point names, and the second level values are references to objects
|
365 |
+
(that correspond to the entry-point value).
|
366 |
+
"""
|
367 |
+
# Using undocumented behaviour, see python/typeshed#12700
|
368 |
+
parser = ConfigParser(default_section=None, delimiters=("=",)) # type: ignore[call-overload]
|
369 |
+
parser.optionxform = str # case sensitive
|
370 |
+
parser.read_string(text, text_source)
|
371 |
+
groups = {k: dict(v.items()) for k, v in parser.items()}
|
372 |
+
groups.pop(parser.default_section, None)
|
373 |
+
return groups
|
374 |
+
|
375 |
+
|
376 |
+
class EnsurePackagesDiscovered:
|
377 |
+
"""Some expand functions require all the packages to already be discovered before
|
378 |
+
they run, e.g. :func:`read_attr`, :func:`resolve_class`, :func:`cmdclass`.
|
379 |
+
|
380 |
+
Therefore in some cases we will need to run autodiscovery during the evaluation of
|
381 |
+
the configuration. However, it is better to postpone calling package discovery as
|
382 |
+
much as possible, because some parameters can influence it (e.g. ``package_dir``),
|
383 |
+
and those might not have been processed yet.
|
384 |
+
"""
|
385 |
+
|
386 |
+
def __init__(self, distribution: Distribution) -> None:
|
387 |
+
self._dist = distribution
|
388 |
+
self._called = False
|
389 |
+
|
390 |
+
def __call__(self):
|
391 |
+
"""Trigger the automatic package discovery, if it is still necessary."""
|
392 |
+
if not self._called:
|
393 |
+
self._called = True
|
394 |
+
self._dist.set_defaults(name=False) # Skip name, we can still be parsing
|
395 |
+
|
396 |
+
def __enter__(self) -> Self:
|
397 |
+
return self
|
398 |
+
|
399 |
+
def __exit__(
|
400 |
+
self,
|
401 |
+
exc_type: type[BaseException] | None,
|
402 |
+
exc_value: BaseException | None,
|
403 |
+
traceback: TracebackType | None,
|
404 |
+
):
|
405 |
+
if self._called:
|
406 |
+
self._dist.set_defaults.analyse_name() # Now we can set a default name
|
407 |
+
|
408 |
+
def _get_package_dir(self) -> Mapping[str, str]:
|
409 |
+
self()
|
410 |
+
pkg_dir = self._dist.package_dir
|
411 |
+
return {} if pkg_dir is None else pkg_dir
|
412 |
+
|
413 |
+
@property
|
414 |
+
def package_dir(self) -> Mapping[str, str]:
|
415 |
+
"""Proxy to ``package_dir`` that may trigger auto-discovery when used."""
|
416 |
+
return LazyMappingProxy(self._get_package_dir)
|
417 |
+
|
418 |
+
|
419 |
+
class LazyMappingProxy(Mapping[_K, _V_co]):
|
420 |
+
"""Mapping proxy that delays resolving the target object, until really needed.
|
421 |
+
|
422 |
+
>>> def obtain_mapping():
|
423 |
+
... print("Running expensive function!")
|
424 |
+
... return {"key": "value", "other key": "other value"}
|
425 |
+
>>> mapping = LazyMappingProxy(obtain_mapping)
|
426 |
+
>>> mapping["key"]
|
427 |
+
Running expensive function!
|
428 |
+
'value'
|
429 |
+
>>> mapping["other key"]
|
430 |
+
'other value'
|
431 |
+
"""
|
432 |
+
|
433 |
+
def __init__(self, obtain_mapping_value: Callable[[], Mapping[_K, _V_co]]) -> None:
|
434 |
+
self._obtain = obtain_mapping_value
|
435 |
+
self._value: Mapping[_K, _V_co] | None = None
|
436 |
+
|
437 |
+
def _target(self) -> Mapping[_K, _V_co]:
|
438 |
+
if self._value is None:
|
439 |
+
self._value = self._obtain()
|
440 |
+
return self._value
|
441 |
+
|
442 |
+
def __getitem__(self, key: _K) -> _V_co:
|
443 |
+
return self._target()[key]
|
444 |
+
|
445 |
+
def __len__(self) -> int:
|
446 |
+
return len(self._target())
|
447 |
+
|
448 |
+
def __iter__(self) -> Iterator[_K]:
|
449 |
+
return iter(self._target())
|
.venv/Lib/site-packages/setuptools/tests/config/downloads/__init__.py
ADDED
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
import re
|
4 |
+
import time
|
5 |
+
from pathlib import Path
|
6 |
+
from urllib.error import HTTPError
|
7 |
+
from urllib.request import urlopen
|
8 |
+
|
9 |
+
__all__ = ["DOWNLOAD_DIR", "retrieve_file", "output_file", "urls_from_file"]
|
10 |
+
|
11 |
+
|
12 |
+
NAME_REMOVE = ("http://", "https://", "github.com/", "/raw/")
|
13 |
+
DOWNLOAD_DIR = Path(__file__).parent
|
14 |
+
|
15 |
+
|
16 |
+
# ----------------------------------------------------------------------
|
17 |
+
# Please update ./preload.py accordingly when modifying this file
|
18 |
+
# ----------------------------------------------------------------------
|
19 |
+
|
20 |
+
|
21 |
+
def output_file(url: str, download_dir: Path = DOWNLOAD_DIR) -> Path:
|
22 |
+
file_name = url.strip()
|
23 |
+
for part in NAME_REMOVE:
|
24 |
+
file_name = file_name.replace(part, '').strip().strip('/:').strip()
|
25 |
+
return Path(download_dir, re.sub(r"[^\-_\.\w\d]+", "_", file_name))
|
26 |
+
|
27 |
+
|
28 |
+
def retrieve_file(url: str, download_dir: Path = DOWNLOAD_DIR, wait: float = 5) -> Path:
|
29 |
+
path = output_file(url, download_dir)
|
30 |
+
if path.exists():
|
31 |
+
print(f"Skipping {url} (already exists: {path})")
|
32 |
+
else:
|
33 |
+
download_dir.mkdir(exist_ok=True, parents=True)
|
34 |
+
print(f"Downloading {url} to {path}")
|
35 |
+
try:
|
36 |
+
download(url, path)
|
37 |
+
except HTTPError:
|
38 |
+
time.sleep(wait) # wait a few seconds and try again.
|
39 |
+
download(url, path)
|
40 |
+
return path
|
41 |
+
|
42 |
+
|
43 |
+
def urls_from_file(list_file: Path) -> list[str]:
|
44 |
+
"""``list_file`` should be a text file where each line corresponds to a URL to
|
45 |
+
download.
|
46 |
+
"""
|
47 |
+
print(f"file: {list_file}")
|
48 |
+
content = list_file.read_text(encoding="utf-8")
|
49 |
+
return [url for url in content.splitlines() if not url.startswith("#")]
|
50 |
+
|
51 |
+
|
52 |
+
def download(url: str, dest: Path):
|
53 |
+
with urlopen(url) as f:
|
54 |
+
data = f.read()
|
55 |
+
|
56 |
+
with open(dest, "wb") as f:
|
57 |
+
f.write(data)
|
58 |
+
|
59 |
+
assert Path(dest).exists()
|