comparison pipcl.py @ 1:1d09e1dec1d9 upstream

ADD: PyMuPDF v1.26.4: the original sdist. It does not yet contain MuPDF. This normally will be downloaded when building PyMuPDF.
author Franz Glasner <fzglas.hg@dom66.de>
date Mon, 15 Sep 2025 11:37:51 +0200
parents
children 59f1bd90b2a0 a6bc019ac0b2
comparison
equal deleted inserted replaced
-1:000000000000 1:1d09e1dec1d9
1 '''
2 Python packaging operations, including PEP-517 support, for use by a `setup.py`
3 script.
4
5 The intention is to take care of as many packaging details as possible so that
6 setup.py contains only project-specific information, while also giving as much
7 flexibility as possible.
8
9 For example we provide a function `build_extension()` that can be used to build
10 a SWIG extension, but we also give access to the located compiler/linker so
11 that a `setup.py` script can take over the details itself.
12
13 Run doctests with: `python -m doctest pipcl.py`
14
15 For Graal we require that PIPCL_GRAAL_PYTHON is set to non-graal Python (we
16 build for non-graal except with Graal Python's include paths and library
17 directory).
18 '''
19
20 import base64
21 import codecs
22 import glob
23 import hashlib
24 import inspect
25 import io
26 import os
27 import platform
28 import re
29 import shlex
30 import shutil
31 import site
32 import subprocess
33 import sys
34 import sysconfig
35 import tarfile
36 import textwrap
37 import time
38 import zipfile
39
40 import wdev
41
42
43 class Package:
44 '''
45 Our constructor takes a definition of a Python package similar to that
46 passed to `distutils.core.setup()` or `setuptools.setup()` (name, version,
47 summary etc) plus callbacks for building, getting a list of sdist
48 filenames, and cleaning.
49
50 We provide methods that can be used to implement a Python package's
51 `setup.py` supporting PEP-517.
52
53 We also support basic command line handling for use
54 with a legacy (pre-PEP-517) pip, as implemented
55 by legacy distutils/setuptools and described in:
56 https://pip.pypa.io/en/stable/reference/build-system/setup-py/
57
58 Here is a `doctest` example of using pipcl to create a SWIG extension
59 module. Requires `swig`.
60
61 Create an empty test directory:
62
63 >>> import os
64 >>> import shutil
65 >>> shutil.rmtree('pipcl_test', ignore_errors=1)
66 >>> os.mkdir('pipcl_test')
67
68 Create a `setup.py` which uses `pipcl` to define an extension module.
69
70 >>> import textwrap
71 >>> with open('pipcl_test/setup.py', 'w') as f:
72 ... _ = f.write(textwrap.dedent("""
73 ... import sys
74 ... import pipcl
75 ...
76 ... def build():
77 ... so_leaf = pipcl.build_extension(
78 ... name = 'foo',
79 ... path_i = 'foo.i',
80 ... outdir = 'build',
81 ... )
82 ... return [
83 ... ('build/foo.py', 'foo/__init__.py'),
84 ... ('cli.py', 'foo/__main__.py'),
85 ... (f'build/{so_leaf}', f'foo/'),
86 ... ('README', '$dist-info/'),
87 ... (b'Hello world', 'foo/hw.txt'),
88 ... ]
89 ...
90 ... def sdist():
91 ... return [
92 ... 'foo.i',
93 ... 'bar.i',
94 ... 'setup.py',
95 ... 'pipcl.py',
96 ... 'wdev.py',
97 ... 'README',
98 ... (b'Hello word2', 'hw2.txt'),
99 ... ]
100 ...
101 ... p = pipcl.Package(
102 ... name = 'foo',
103 ... version = '1.2.3',
104 ... fn_build = build,
105 ... fn_sdist = sdist,
106 ... entry_points = (
107 ... { 'console_scripts': [
108 ... 'foo_cli = foo.__main__:main',
109 ... ],
110 ... }),
111 ... )
112 ...
113 ... build_wheel = p.build_wheel
114 ... build_sdist = p.build_sdist
115 ...
116 ... # Handle old-style setup.py command-line usage:
117 ... if __name__ == '__main__':
118 ... p.handle_argv(sys.argv)
119 ... """))
120
121 Create the files required by the above `setup.py` - the SWIG `.i` input
122 file, the README file, and copies of `pipcl.py` and `wdev.py`.
123
124 >>> with open('pipcl_test/foo.i', 'w') as f:
125 ... _ = f.write(textwrap.dedent("""
126 ... %include bar.i
127 ... %{
128 ... #include <stdio.h>
129 ... #include <string.h>
130 ... int bar(const char* text)
131 ... {
132 ... printf("bar(): text: %s\\\\n", text);
133 ... int len = (int) strlen(text);
134 ... printf("bar(): len=%i\\\\n", len);
135 ... fflush(stdout);
136 ... return len;
137 ... }
138 ... %}
139 ... int bar(const char* text);
140 ... """))
141
142 >>> with open('pipcl_test/bar.i', 'w') as f:
143 ... _ = f.write( '\\n')
144
145 >>> with open('pipcl_test/README', 'w') as f:
146 ... _ = f.write(textwrap.dedent("""
147 ... This is Foo.
148 ... """))
149
150 >>> with open('pipcl_test/cli.py', 'w') as f:
151 ... _ = f.write(textwrap.dedent("""
152 ... def main():
153 ... print('pipcl_test:main().')
154 ... if __name__ == '__main__':
155 ... main()
156 ... """))
157
158 >>> root = os.path.dirname(__file__)
159 >>> _ = shutil.copy2(f'{root}/pipcl.py', 'pipcl_test/pipcl.py')
160 >>> _ = shutil.copy2(f'{root}/wdev.py', 'pipcl_test/wdev.py')
161
162 Use `setup.py`'s command-line interface to build and install the extension
163 module into root `pipcl_test/install`.
164
165 >>> _ = subprocess.run(
166 ... f'cd pipcl_test && {sys.executable} setup.py --root install install',
167 ... shell=1, check=1)
168
169 The actual install directory depends on `sysconfig.get_path('platlib')`:
170
171 >>> if windows():
172 ... install_dir = 'pipcl_test/install'
173 ... else:
174 ... install_dir = f'pipcl_test/install/{sysconfig.get_path("platlib").lstrip(os.sep)}'
175 >>> assert os.path.isfile( f'{install_dir}/foo/__init__.py')
176
177 Create a test script which asserts that Python function call `foo.bar(s)`
178 returns the length of `s`, and run it with `PYTHONPATH` set to the install
179 directory:
180
181 >>> with open('pipcl_test/test.py', 'w') as f:
182 ... _ = f.write(textwrap.dedent("""
183 ... import sys
184 ... import foo
185 ... text = 'hello'
186 ... print(f'test.py: calling foo.bar() with text={text!r}')
187 ... sys.stdout.flush()
188 ... l = foo.bar(text)
189 ... print(f'test.py: foo.bar() returned: {l}')
190 ... assert l == len(text)
191 ... """))
192 >>> r = subprocess.run(
193 ... f'{sys.executable} pipcl_test/test.py',
194 ... shell=1, check=1, text=1,
195 ... stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
196 ... env=os.environ | dict(PYTHONPATH=install_dir),
197 ... )
198 >>> print(r.stdout)
199 test.py: calling foo.bar() with text='hello'
200 bar(): text: hello
201 bar(): len=5
202 test.py: foo.bar() returned: 5
203 <BLANKLINE>
204
205 Check that building sdist and wheel succeeds. For now we don't attempt to
206 check that the sdist and wheel actually work.
207
208 >>> _ = subprocess.run(
209 ... f'cd pipcl_test && {sys.executable} setup.py sdist',
210 ... shell=1, check=1)
211
212 >>> _ = subprocess.run(
213 ... f'cd pipcl_test && {sys.executable} setup.py bdist_wheel',
214 ... shell=1, check=1)
215
216 Check that rebuild does nothing.
217
218 >>> t0 = os.path.getmtime('pipcl_test/build/foo.py')
219 >>> _ = subprocess.run(
220 ... f'cd pipcl_test && {sys.executable} setup.py bdist_wheel',
221 ... shell=1, check=1)
222 >>> t = os.path.getmtime('pipcl_test/build/foo.py')
223 >>> assert t == t0
224
225 Check that touching bar.i forces rebuild.
226
227 >>> os.utime('pipcl_test/bar.i')
228 >>> _ = subprocess.run(
229 ... f'cd pipcl_test && {sys.executable} setup.py bdist_wheel',
230 ... shell=1, check=1)
231 >>> t = os.path.getmtime('pipcl_test/build/foo.py')
232 >>> assert t > t0
233
234 Check that touching foo.i.cpp does not run swig, but does recompile/link.
235
236 >>> t0 = time.time()
237 >>> os.utime('pipcl_test/build/foo.i.cpp')
238 >>> _ = subprocess.run(
239 ... f'cd pipcl_test && {sys.executable} setup.py bdist_wheel',
240 ... shell=1, check=1)
241 >>> assert os.path.getmtime('pipcl_test/build/foo.py') <= t0
242 >>> so = glob.glob('pipcl_test/build/*.so')
243 >>> assert len(so) == 1
244 >>> so = so[0]
245 >>> assert os.path.getmtime(so) > t0
246
247 Check `entry_points` causes creation of command `foo_cli` when we install
248 from our wheel using pip. [As of 2024-02-24 using pipcl's CLI interface
249 directly with `setup.py install` does not support entry points.]
250
251 >>> print('Creating venv.', file=sys.stderr)
252 >>> _ = subprocess.run(
253 ... f'cd pipcl_test && {sys.executable} -m venv pylocal',
254 ... shell=1, check=1)
255
256 >>> print('Installing from wheel into venv using pip.', file=sys.stderr)
257 >>> _ = subprocess.run(
258 ... f'. pipcl_test/pylocal/bin/activate && pip install pipcl_test/dist/*.whl',
259 ... shell=1, check=1)
260
261 >>> print('Running foo_cli.', file=sys.stderr)
262 >>> _ = subprocess.run(
263 ... f'. pipcl_test/pylocal/bin/activate && foo_cli',
264 ... shell=1, check=1)
265
266 Wheels and sdists
267
268 Wheels:
269 We generate wheels according to:
270 https://packaging.python.org/specifications/binary-distribution-format/
271
272 * `{name}-{version}.dist-info/RECORD` uses sha256 hashes.
273 * We do not generate other `RECORD*` files such as
274 `RECORD.jws` or `RECORD.p7s`.
275 * `{name}-{version}.dist-info/WHEEL` has:
276
277 * `Wheel-Version: 1.0`
278 * `Root-Is-Purelib: false`
279 * No support for signed wheels.
280
281 Sdists:
282 We generate sdist's according to:
283 https://packaging.python.org/specifications/source-distribution-format/
284 '''
285 def __init__(self,
286 name,
287 version,
288 *,
289 platform = None,
290 supported_platform = None,
291 summary = None,
292 description = None,
293 description_content_type = None,
294 keywords = None,
295 home_page = None,
296 download_url = None,
297 author = None,
298 author_email = None,
299 maintainer = None,
300 maintainer_email = None,
301 license = None,
302 classifier = None,
303 requires_dist = None,
304 requires_python = None,
305 requires_external = None,
306 project_url = None,
307 provides_extra = None,
308
309 entry_points = None,
310
311 root = None,
312 fn_build = None,
313 fn_clean = None,
314 fn_sdist = None,
315 tag_python = None,
316 tag_abi = None,
317 tag_platform = None,
318 py_limited_api = None,
319
320 wheel_compression = zipfile.ZIP_DEFLATED,
321 wheel_compresslevel = None,
322 ):
323 '''
324 The initial args before `root` define the package
325 metadata and closely follow the definitions in:
326 https://packaging.python.org/specifications/core-metadata/
327
328 Args:
329
330 name:
331 A string, the name of the Python package.
332 version:
333 A string, the version of the Python package. Also see PEP-440
334 `Version Identification and Dependency Specification`.
335 platform:
336 A string or list of strings.
337 supported_platform:
338 A string or list of strings.
339 summary:
340 A string, short description of the package.
341 description:
342 A string. If contains newlines, a detailed description of the
343 package. Otherwise the path of a file containing the detailed
344 description of the package.
345 description_content_type:
346 A string describing markup of `description` arg. For example
347 `text/markdown; variant=GFM`.
348 keywords:
349 A string containing comma-separated keywords.
350 home_page:
351 URL of home page.
352 download_url:
353 Where this version can be downloaded from.
354 author:
355 Author.
356 author_email:
357 Author email.
358 maintainer:
359 Maintainer.
360 maintainer_email:
361 Maintainer email.
362 license:
363 A string containing the license text. Written into metadata
364 file `COPYING`. Is also written into metadata itself if not
365 multi-line.
366 classifier:
367 A string or list of strings. Also see:
368
369 * https://pypi.org/pypi?%3Aaction=list_classifiers
370 * https://pypi.org/classifiers/
371
372 requires_dist:
373 A string or list of strings. None items are ignored. Also see PEP-508.
374 requires_python:
375 A string or list of strings.
376 requires_external:
377 A string or list of strings.
378 project_url:
379 A string or list of strings, each of the form: `{name}, {url}`.
380 provides_extra:
381 A string or list of strings.
382
383 entry_points:
384 String or dict specifying *.dist-info/entry_points.txt, for
385 example:
386
387 ```
388 [console_scripts]
389 foo_cli = foo.__main__:main
390 ```
391
392 or:
393
394 { 'console_scripts': [
395 'foo_cli = foo.__main__:main',
396 ],
397 }
398
399 See: https://packaging.python.org/en/latest/specifications/entry-points/
400
401 root:
402 Root of package, defaults to current directory.
403
404 fn_build:
405 A function taking no args, or a single `config_settings` dict
406 arg (as described in PEP-517), that builds the package.
407
408 Should return a list of items; each item should be a tuple
409 `(from_, to_)`, or a single string `path` which is treated as
410 the tuple `(path, path)`.
411
412 `from_` can be a string or a `bytes`. If a string it should
413 be the path to a file; a relative path is treated as relative
414 to `root`. If a `bytes` it is the contents of the file to be
415 added.
416
417 `to_` identifies what the file should be called within a wheel
418 or when installing. If `to_` ends with `/`, the leaf of `from_`
419 is appended to it (and `from_` must not be a `bytes`).
420
421 Initial `$dist-info/` in `_to` is replaced by
422 `{name}-{version}.dist-info/`; this is useful for license files
423 etc.
424
425 Initial `$data/` in `_to` is replaced by
426 `{name}-{version}.data/`. We do not enforce particular
427 subdirectories, instead it is up to `fn_build()` to specify
428 specific subdirectories such as `purelib`, `headers`,
429 `scripts`, `data` etc.
430
431 If we are building a wheel (e.g. `python setup.py bdist_wheel`,
432 or PEP-517 pip calls `self.build_wheel()`), we add file `from_`
433 to the wheel archive with name `to_`.
434
435 If we are installing (e.g. `install` command in
436 the argv passed to `self.handle_argv()`), then
437 we copy `from_` to `{sitepackages}/{to_}`, where
438 `sitepackages` is the installation directory, the
439 default being `sysconfig.get_path('platlib')` e.g.
440 `myvenv/lib/python3.9/site-packages/`.
441
442 fn_clean:
443 A function taking a single arg `all_` that cleans generated
444 files. `all_` is true iff `--all` is in argv.
445
446 For safety and convenience, can also returns a list of
447 files/directory paths to be deleted. Relative paths are
448 interpreted as relative to `root`. All paths are asserted to be
449 within `root`.
450
451 fn_sdist:
452 A function taking no args, or a single `config_settings` dict
453 arg (as described in PEP517), that returns a list of items to
454 be copied into the sdist. The list should be in the same format
455 as returned by `fn_build`.
456
457 It can be convenient to use `pipcl.git_items()`.
458
459 The specification for sdists requires that the list contains
460 `pyproject.toml`; we enforce this with a diagnostic rather than
461 raising an exception, to allow legacy command-line usage.
462
463 tag_python:
464 First element of wheel tag defined in PEP-425. If None we use
465 `cp{version}`.
466
467 For example if code works with any Python version, one can use
468 'py3'.
469
470 tag_abi:
471 Second element of wheel tag defined in PEP-425. If None we use
472 `none`.
473
474 tag_platform:
475 Third element of wheel tag defined in PEP-425. Default
476 is `os.environ('AUDITWHEEL_PLAT')` if set, otherwise
477 derived from `sysconfig.get_platform()` (was
478 `setuptools.distutils.util.get_platform(), before that
479 `distutils.util.get_platform()` as specified in the PEP), e.g.
480 `openbsd_7_0_amd64`.
481
482 For pure python packages use: `tag_platform=any`
483
484 py_limited_api:
485 If true we build wheels that use the Python Limited API. We use
486 the version of `sys.executable` to define `Py_LIMITED_API` when
487 compiling extensions, and use ABI tag `abi3` in the wheel name
488 if argument `tag_abi` is None.
489
490 wheel_compression:
491 Used as `zipfile.ZipFile()`'s `compression` parameter when
492 creating wheels.
493
494 wheel_compresslevel:
495 Used as `zipfile.ZipFile()`'s `compresslevel` parameter when
496 creating wheels.
497
498 Occurrences of `None` in lists are ignored.
499 '''
500 assert name
501 assert version
502
503 def assert_str( v):
504 if v is not None:
505 assert isinstance( v, str), f'Not a string: {v!r}'
506 def assert_str_or_multi( v):
507 if v is not None:
508 assert isinstance( v, (str, tuple, list)), f'Not a string, tuple or list: {v!r}'
509
510 assert_str( name)
511 assert_str( version)
512 assert_str_or_multi( platform)
513 assert_str_or_multi( supported_platform)
514 assert_str( summary)
515 assert_str( description)
516 assert_str( description_content_type)
517 assert_str( keywords)
518 assert_str( home_page)
519 assert_str( download_url)
520 assert_str( author)
521 assert_str( author_email)
522 assert_str( maintainer)
523 assert_str( maintainer_email)
524 assert_str( license)
525 assert_str_or_multi( classifier)
526 assert_str_or_multi( requires_dist)
527 assert_str( requires_python)
528 assert_str_or_multi( requires_external)
529 assert_str_or_multi( project_url)
530 assert_str_or_multi( provides_extra)
531
532 # https://packaging.python.org/en/latest/specifications/core-metadata/.
533 assert re.match('([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$', name, re.IGNORECASE), \
534 f'Bad name: {name!r}'
535
536 _assert_version_pep_440(version)
537
538 # https://packaging.python.org/en/latest/specifications/binary-distribution-format/
539 if tag_python:
540 assert '-' not in tag_python
541 if tag_abi:
542 assert '-' not in tag_abi
543 if tag_platform:
544 assert '-' not in tag_platform
545
546 self.name = name
547 self.version = version
548 self.platform = platform
549 self.supported_platform = supported_platform
550 self.summary = summary
551 self.description = description
552 self.description_content_type = description_content_type
553 self.keywords = keywords
554 self.home_page = home_page
555 self.download_url = download_url
556 self.author = author
557 self.author_email = author_email
558 self.maintainer = maintainer
559 self.maintainer_email = maintainer_email
560 self.license = license
561 self.classifier = classifier
562 self.requires_dist = requires_dist
563 self.requires_python = requires_python
564 self.requires_external = requires_external
565 self.project_url = project_url
566 self.provides_extra = provides_extra
567 self.entry_points = entry_points
568
569 self.root = os.path.abspath(root if root else os.getcwd())
570 self.fn_build = fn_build
571 self.fn_clean = fn_clean
572 self.fn_sdist = fn_sdist
573 self.tag_python_ = tag_python
574 self.tag_abi_ = tag_abi
575 self.tag_platform_ = tag_platform
576 self.py_limited_api = py_limited_api
577
578 self.wheel_compression = wheel_compression
579 self.wheel_compresslevel = wheel_compresslevel
580
581 # If true and we are building for graal, we set PIPCL_PYTHON_CONFIG to
582 # a command that will print includes/libs from graal_py's sysconfig.
583 #
584 self.graal_legacy_python_config = True
585
586
587 def build_wheel(self,
588 wheel_directory,
589 config_settings=None,
590 metadata_directory=None,
591 ):
592 '''
593 A PEP-517 `build_wheel()` function.
594
595 Also called by `handle_argv()` to handle the `bdist_wheel` command.
596
597 Returns leafname of generated wheel within `wheel_directory`.
598 '''
599 log2(
600 f' wheel_directory={wheel_directory!r}'
601 f' config_settings={config_settings!r}'
602 f' metadata_directory={metadata_directory!r}'
603 )
604
605 if sys.implementation.name == 'graalpy':
606 # We build for Graal by building a native Python wheel with Graal
607 # Python's include paths and library directory. We then rename the
608 # wheel to contain graal's tag etc.
609 #
610 log0(f'### Graal build: deferring to cpython.')
611 python_native = os.environ.get('PIPCL_GRAAL_PYTHON')
612 assert python_native, f'Graal build requires that PIPCL_GRAAL_PYTHON is set.'
613 env_extra = dict(
614 PIPCL_SYSCONFIG_PATH_include = sysconfig.get_path('include'),
615 PIPCL_SYSCONFIG_PATH_platinclude = sysconfig.get_path('platinclude'),
616 PIPCL_SYSCONFIG_CONFIG_VAR_LIBDIR = sysconfig.get_config_var('LIBDIR'),
617 )
618 # Tell native build to run pipcl.py itself to get python-config
619 # information about include paths etc.
620 if self.graal_legacy_python_config:
621 env_extra['PIPCL_PYTHON_CONFIG'] = f'{python_native} {os.path.abspath(__file__)} --graal-legacy-python-config'
622
623 # Create venv.
624 venv_name = os.environ.get('PIPCL_GRAAL_NATIVE_VENV')
625 if venv_name:
626 log1(f'Graal using pre-existing {venv_name=}')
627 else:
628 venv_name = 'venv-pipcl-graal-native'
629 run(f'{shlex.quote(python_native)} -m venv {venv_name}')
630 log1(f'Graal using {venv_name=}')
631
632 newfiles = NewFiles(f'{wheel_directory}/*.whl')
633 run(
634 f'. {venv_name}/bin/activate && python setup.py --dist-dir {shlex.quote(wheel_directory)} bdist_wheel',
635 env_extra = env_extra,
636 prefix = f'pipcl.py graal {python_native}: ',
637 )
638 wheel = newfiles.get_one()
639 wheel_leaf = os.path.basename(wheel)
640 python_major_minor = run(f'{shlex.quote(python_native)} -c "import platform; import sys; sys.stdout.write(str().join(platform.python_version_tuple()[:2]))"', capture=1)
641 cpabi = f'cp{python_major_minor}-abi3'
642 assert cpabi in wheel_leaf, f'Expected wheel to be for {cpabi=}, but {wheel=}.'
643 graalpy_ext_suffix = sysconfig.get_config_var('EXT_SUFFIX')
644 log1(f'{graalpy_ext_suffix=}')
645 m = re.match(r'\.graalpy(\d+[^\-]*)-(\d+)', graalpy_ext_suffix)
646 gpver = m[1]
647 cpver = m[2]
648 graalpy_wheel_tag = f'graalpy{cpver}-graalpy{gpver}_{cpver}_native'
649 name = wheel_leaf.replace(cpabi, graalpy_wheel_tag)
650 destination = f'{wheel_directory}/{name}'
651 log0(f'### Graal build: copying {wheel=} to {destination=}')
652 # Copying results in two wheels which appears to confuse pip, showing:
653 # Found multiple .whl files; unspecified behaviour. Will call build_wheel.
654 os.rename(wheel, destination)
655 log1(f'Returning {name=}.')
656 return name
657
658 wheel_name = self.wheel_name()
659 path = f'{wheel_directory}/{wheel_name}'
660
661 # Do a build and get list of files to copy into the wheel.
662 #
663 items = list()
664 if self.fn_build:
665 items = self._call_fn_build(config_settings)
666
667 log2(f'Creating wheel: {path}')
668 os.makedirs(wheel_directory, exist_ok=True)
669 record = _Record()
670 with zipfile.ZipFile(path, 'w', self.wheel_compression, self.wheel_compresslevel) as z:
671
672 def add(from_, to_):
673 if isinstance(from_, str):
674 z.write(from_, to_)
675 record.add_file(from_, to_)
676 elif isinstance(from_, bytes):
677 z.writestr(to_, from_)
678 record.add_content(from_, to_)
679 else:
680 assert 0
681
682 def add_str(content, to_):
683 add(content.encode('utf8'), to_)
684
685 dist_info_dir = self._dist_info_dir()
686
687 # Add the files returned by fn_build().
688 #
689 for item in items:
690 from_, (to_abs, to_rel) = self._fromto(item)
691 add(from_, to_rel)
692
693 # Add <name>-<version>.dist-info/WHEEL.
694 #
695 add_str(
696 f'Wheel-Version: 1.0\n'
697 f'Generator: pipcl\n'
698 f'Root-Is-Purelib: false\n'
699 f'Tag: {self.wheel_tag_string()}\n'
700 ,
701 f'{dist_info_dir}/WHEEL',
702 )
703 # Add <name>-<version>.dist-info/METADATA.
704 #
705 add_str(self._metainfo(), f'{dist_info_dir}/METADATA')
706
707 # Add <name>-<version>.dist-info/COPYING.
708 if self.license:
709 add_str(self.license, f'{dist_info_dir}/COPYING')
710
711 # Add <name>-<version>.dist-info/entry_points.txt.
712 entry_points_text = self._entry_points_text()
713 if entry_points_text:
714 add_str(entry_points_text, f'{dist_info_dir}/entry_points.txt')
715
716 # Update <name>-<version>.dist-info/RECORD. This must be last.
717 #
718 z.writestr(f'{dist_info_dir}/RECORD', record.get(f'{dist_info_dir}/RECORD'))
719
720 st = os.stat(path)
721 log1( f'Have created wheel size={st.st_size:,}: {path}')
722 if g_verbose >= 2:
723 with zipfile.ZipFile(path, compression=self.wheel_compression) as z:
724 log2(f'Contents are:')
725 for zi in sorted(z.infolist(), key=lambda z: z.filename):
726 log2(f' {zi.file_size: 10,d} {zi.filename}')
727
728 return os.path.basename(path)
729
730
731 def build_sdist(self,
732 sdist_directory,
733 formats,
734 config_settings=None,
735 ):
736 '''
737 A PEP-517 `build_sdist()` function.
738
739 Also called by `handle_argv()` to handle the `sdist` command.
740
741 Returns leafname of generated archive within `sdist_directory`.
742 '''
743 log2(
744 f' sdist_directory={sdist_directory!r}'
745 f' formats={formats!r}'
746 f' config_settings={config_settings!r}'
747 )
748 if formats and formats != 'gztar':
749 raise Exception( f'Unsupported: formats={formats}')
750 items = list()
751 if self.fn_sdist:
752 if inspect.signature(self.fn_sdist).parameters:
753 items = self.fn_sdist(config_settings)
754 else:
755 items = self.fn_sdist()
756
757 prefix = f'{_normalise(self.name)}-{self.version}'
758 os.makedirs(sdist_directory, exist_ok=True)
759 tarpath = f'{sdist_directory}/{prefix}.tar.gz'
760 log2(f'Creating sdist: {tarpath}')
761
762 with tarfile.open(tarpath, 'w:gz') as tar:
763
764 names_in_tar = list()
765 def check_name(name):
766 if name in names_in_tar:
767 raise Exception(f'Name specified twice: {name}')
768 names_in_tar.append(name)
769
770 def add(from_, name):
771 check_name(name)
772 if isinstance(from_, str):
773 log2( f'Adding file: {os.path.relpath(from_)} => {name}')
774 tar.add( from_, f'{prefix}/{name}', recursive=False)
775 elif isinstance(from_, bytes):
776 log2( f'Adding: {name}')
777 ti = tarfile.TarInfo(f'{prefix}/{name}')
778 ti.size = len(from_)
779 ti.mtime = time.time()
780 tar.addfile(ti, io.BytesIO(from_))
781 else:
782 assert 0
783
784 def add_string(text, name):
785 textb = text.encode('utf8')
786 return add(textb, name)
787
788 found_pyproject_toml = False
789 for item in items:
790 from_, (to_abs, to_rel) = self._fromto(item)
791 if isinstance(from_, bytes):
792 add(from_, to_rel)
793 else:
794 if from_.startswith(f'{os.path.abspath(sdist_directory)}/'):
795 # Source files should not be inside <sdist_directory>.
796 assert 0, f'Path is inside sdist_directory={sdist_directory}: {from_!r}'
797 assert os.path.exists(from_), f'Path does not exist: {from_!r}'
798 assert os.path.isfile(from_), f'Path is not a file: {from_!r}'
799 if to_rel == 'pyproject.toml':
800 found_pyproject_toml = True
801 add(from_, to_rel)
802
803 if not found_pyproject_toml:
804 log0(f'Warning: no pyproject.toml specified.')
805
806 # Always add a PKG-INFO file.
807 add_string(self._metainfo(), 'PKG-INFO')
808
809 if self.license:
810 if 'COPYING' in names_in_tar:
811 log2(f'Not writing .license because file already in sdist: COPYING')
812 else:
813 add_string(self.license, 'COPYING')
814
815 log1( f'Have created sdist: {tarpath}')
816 return os.path.basename(tarpath)
817
818 def wheel_tag_string(self):
819 '''
820 Returns <tag_python>-<tag_abi>-<tag_platform>.
821 '''
822 return f'{self.tag_python()}-{self.tag_abi()}-{self.tag_platform()}'
823
824 def tag_python(self):
825 '''
826 Get two-digit python version, e.g. 'cp3.8' for python-3.8.6.
827 '''
828 if self.tag_python_:
829 return self.tag_python_
830 else:
831 return 'cp' + ''.join(platform.python_version().split('.')[:2])
832
833 def tag_abi(self):
834 '''
835 ABI tag.
836 '''
837 if self.tag_abi_:
838 return self.tag_abi_
839 elif self.py_limited_api:
840 return 'abi3'
841 else:
842 return 'none'
843
844 def tag_platform(self):
845 '''
846 Find platform tag used in wheel filename.
847 '''
848 ret = self.tag_platform_
849 log0(f'From self.tag_platform_: {ret=}.')
850
851 if not ret:
852 # Prefer this to PEP-425. Appears to be undocumented,
853 # but set in manylinux docker images and appears
854 # to be used by cibuildwheel and auditwheel, e.g.
855 # https://github.com/rapidsai/shared-action-workflows/issues/80
856 ret = os.environ.get( 'AUDITWHEEL_PLAT')
857 log0(f'From AUDITWHEEL_PLAT: {ret=}.')
858
859 if not ret:
860 # Notes:
861 #
862 # PEP-425. On Linux gives `linux_x86_64` which is rejected by
863 # pypi.org.
864 #
865 # On local MacOS/arm64 mac-mini have seen sysconfig.get_platform()
866 # unhelpfully return `macosx-10.9-universal2` if `python3` is the
867 # system Python /usr/bin/python3; this happens if we source `.
868 # /etc/profile`.
869 #
870 ret = sysconfig.get_platform()
871 ret = ret.replace('-', '_').replace('.', '_').lower()
872 log0(f'From sysconfig.get_platform(): {ret=}.')
873
874 # We need to patch things on MacOS.
875 #
876 # E.g. `foo-1.2.3-cp311-none-macosx_13_x86_64.whl`
877 # causes `pip` to fail with: `not a supported wheel on this
878 # platform`. We seem to need to add `_0` to the OS version.
879 #
880 m = re.match( '^(macosx_[0-9]+)(_[^0-9].+)$', ret)
881 if m:
882 ret2 = f'{m.group(1)}_0{m.group(2)}'
883 log0(f'After macos patch, changing from {ret!r} to {ret2!r}.')
884 ret = ret2
885
886 log0( f'tag_platform(): returning {ret=}.')
887 return ret
888
889 def wheel_name(self):
890 return f'{_normalise(self.name)}-{self.version}-{self.tag_python()}-{self.tag_abi()}-{self.tag_platform()}.whl'
891
892 def wheel_name_match(self, wheel):
893 '''
894 Returns true if `wheel` matches our wheel. We basically require the
895 name to be the same, except that we accept platform tags that contain
896 extra items (see pep-0600/), for example we return true with:
897
898 self: foo-cp38-none-manylinux2014_x86_64.whl
899 wheel: foo-cp38-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl
900 '''
901 log2(f'{wheel=}')
902 assert wheel.endswith('.whl')
903 wheel2 = wheel[:-len('.whl')]
904 name, version, tag_python, tag_abi, tag_platform = wheel2.split('-')
905
906 py_limited_api_compatible = False
907 if self.py_limited_api and tag_abi == 'abi3':
908 # Allow lower tag_python number.
909 m = re.match('cp([0-9]+)', tag_python)
910 tag_python_int = int(m.group(1))
911 m = re.match('cp([0-9]+)', self.tag_python())
912 tag_python_int_self = int(m.group(1))
913 if tag_python_int <= tag_python_int_self:
914 # This wheel uses Python stable ABI same or older than ours, so
915 # we can use it.
916 log2(f'py_limited_api; {tag_python=} compatible with {self.tag_python()=}.')
917 py_limited_api_compatible = True
918
919 log2(f'{_normalise(self.name) == name=}')
920 log2(f'{self.version == version=}')
921 log2(f'{self.tag_python() == tag_python=} {self.tag_python()=} {tag_python=}')
922 log2(f'{py_limited_api_compatible=}')
923 log2(f'{self.tag_abi() == tag_abi=}')
924 log2(f'{self.tag_platform() in tag_platform.split(".")=}')
925 log2(f'{self.tag_platform()=}')
926 log2(f'{tag_platform.split(".")=}')
927 ret = (1
928 and _normalise(self.name) == name
929 and self.version == version
930 and (self.tag_python() == tag_python or py_limited_api_compatible)
931 and self.tag_abi() == tag_abi
932 and self.tag_platform() in tag_platform.split('.')
933 )
934 log2(f'Returning {ret=}.')
935 return ret
936
937 def _entry_points_text(self):
938 if self.entry_points:
939 if isinstance(self.entry_points, str):
940 return self.entry_points
941 ret = ''
942 for key, values in self.entry_points.items():
943 ret += f'[{key}]\n'
944 for value in values:
945 ret += f'{value}\n'
946 return ret
947
948 def _call_fn_build( self, config_settings=None):
949 assert self.fn_build
950 log2(f'calling self.fn_build={self.fn_build}')
951 if inspect.signature(self.fn_build).parameters:
952 ret = self.fn_build(config_settings)
953 else:
954 ret = self.fn_build()
955 assert isinstance( ret, (list, tuple)), \
956 f'Expected list/tuple from {self.fn_build} but got: {ret!r}'
957 return ret
958
959
960 def _argv_clean(self, all_):
961 '''
962 Called by `handle_argv()`.
963 '''
964 if not self.fn_clean:
965 return
966 paths = self.fn_clean(all_)
967 if paths:
968 if isinstance(paths, str):
969 paths = paths,
970 for path in paths:
971 if not os.path.isabs(path):
972 path = ps.path.join(self.root, path)
973 path = os.path.abspath(path)
974 assert path.startswith(self.root+os.sep), \
975 f'path={path!r} does not start with root={self.root+os.sep!r}'
976 log2(f'Removing: {path}')
977 shutil.rmtree(path, ignore_errors=True)
978
979
980 def install(self, record_path=None, root=None):
981 '''
982 Called by `handle_argv()` to handle `install` command..
983 '''
984 log2( f'{record_path=} {root=}')
985
986 # Do a build and get list of files to install.
987 #
988 items = list()
989 if self.fn_build:
990 items = self._call_fn_build( dict())
991
992 root2 = install_dir(root)
993 log2( f'{root2=}')
994
995 log1( f'Installing into: {root2!r}')
996 dist_info_dir = self._dist_info_dir()
997
998 if not record_path:
999 record_path = f'{root2}/{dist_info_dir}/RECORD'
1000 record = _Record()
1001
1002 def add_file(from_, to_abs, to_rel):
1003 os.makedirs( os.path.dirname( to_abs), exist_ok=True)
1004 if isinstance(from_, bytes):
1005 log2(f'Copying content into {to_abs}.')
1006 with open(to_abs, 'wb') as f:
1007 f.write(from_)
1008 record.add_content(from_, to_rel)
1009 else:
1010 log0(f'{from_=}')
1011 log2(f'Copying from {os.path.relpath(from_, self.root)} to {to_abs}')
1012 shutil.copy2( from_, to_abs)
1013 record.add_file(from_, to_rel)
1014
1015 def add_str(content, to_abs, to_rel):
1016 log2( f'Writing to: {to_abs}')
1017 os.makedirs( os.path.dirname( to_abs), exist_ok=True)
1018 with open( to_abs, 'w') as f:
1019 f.write( content)
1020 record.add_content(content, to_rel)
1021
1022 for item in items:
1023 from_, (to_abs, to_rel) = self._fromto(item)
1024 log0(f'{from_=} {to_abs=} {to_rel=}')
1025 to_abs2 = f'{root2}/{to_rel}'
1026 add_file( from_, to_abs2, to_rel)
1027
1028 add_str( self._metainfo(), f'{root2}/{dist_info_dir}/METADATA', f'{dist_info_dir}/METADATA')
1029
1030 if self.license:
1031 add_str( self.license, f'{root2}/{dist_info_dir}/COPYING', f'{dist_info_dir}/COPYING')
1032
1033 entry_points_text = self._entry_points_text()
1034 if entry_points_text:
1035 add_str(
1036 entry_points_text,
1037 f'{root2}/{dist_info_dir}/entry_points.txt',
1038 f'{dist_info_dir}/entry_points.txt',
1039 )
1040
1041 log2( f'Writing to: {record_path}')
1042 with open(record_path, 'w') as f:
1043 f.write(record.get())
1044
1045 log2(f'Finished.')
1046
1047
1048 def _argv_dist_info(self, root):
1049 '''
1050 Called by `handle_argv()`. There doesn't seem to be any documentation
1051 for `setup.py dist_info`, but it appears to be like `egg_info` except
1052 it writes to a slightly different directory.
1053 '''
1054 if root is None:
1055 root = f'{self.name}-{self.version}.dist-info'
1056 self._write_info(f'{root}/METADATA')
1057 if self.license:
1058 with open( f'{root}/COPYING', 'w') as f:
1059 f.write( self.license)
1060
1061
1062 def _argv_egg_info(self, egg_base):
1063 '''
1064 Called by `handle_argv()`.
1065 '''
1066 if egg_base is None:
1067 egg_base = '.'
1068 self._write_info(f'{egg_base}/.egg-info')
1069
1070
1071 def _write_info(self, dirpath=None):
1072 '''
1073 Writes egg/dist info to files in directory `dirpath` or `self.root` if
1074 `None`.
1075 '''
1076 if dirpath is None:
1077 dirpath = self.root
1078 log2(f'Creating files in directory {dirpath}')
1079 os.makedirs(dirpath, exist_ok=True)
1080 with open(os.path.join(dirpath, 'PKG-INFO'), 'w') as f:
1081 f.write(self._metainfo())
1082
1083 # These don't seem to be required?
1084 #
1085 #with open(os.path.join(dirpath, 'SOURCES.txt', 'w') as f:
1086 # pass
1087 #with open(os.path.join(dirpath, 'dependency_links.txt', 'w') as f:
1088 # pass
1089 #with open(os.path.join(dirpath, 'top_level.txt', 'w') as f:
1090 # f.write(f'{self.name}\n')
1091 #with open(os.path.join(dirpath, 'METADATA', 'w') as f:
1092 # f.write(self._metainfo())
1093
1094
1095 def handle_argv(self, argv):
1096 '''
1097 Attempt to handles old-style (pre PEP-517) command line passed by
1098 old releases of pip to a `setup.py` script, and manual running of
1099 `setup.py`.
1100
1101 This is partial support at best.
1102 '''
1103 global g_verbose
1104 #log2(f'argv: {argv}')
1105
1106 class ArgsRaise:
1107 pass
1108
1109 class Args:
1110 '''
1111 Iterates over argv items.
1112 '''
1113 def __init__( self, argv):
1114 self.items = iter( argv)
1115 def next( self, eof=ArgsRaise):
1116 '''
1117 Returns next arg. If no more args, we return <eof> or raise an
1118 exception if <eof> is ArgsRaise.
1119 '''
1120 try:
1121 return next( self.items)
1122 except StopIteration:
1123 if eof is ArgsRaise:
1124 raise Exception('Not enough args')
1125 return eof
1126
1127 command = None
1128 opt_all = None
1129 opt_dist_dir = 'dist'
1130 opt_egg_base = None
1131 opt_formats = None
1132 opt_install_headers = None
1133 opt_record = None
1134 opt_root = None
1135
1136 args = Args(argv[1:])
1137
1138 while 1:
1139 arg = args.next(None)
1140 if arg is None:
1141 break
1142
1143 elif arg in ('-h', '--help', '--help-commands'):
1144 log0(textwrap.dedent('''
1145 Usage:
1146 [<options>...] <command> [<options>...]
1147 Commands:
1148 bdist_wheel
1149 Creates a wheel called
1150 <dist-dir>/<name>-<version>-<details>.whl, where
1151 <dist-dir> is "dist" or as specified by --dist-dir,
1152 and <details> encodes ABI and platform etc.
1153 clean
1154 Cleans build files.
1155 dist_info
1156 Creates files in <name>-<version>.dist-info/ or
1157 directory specified by --egg-base.
1158 egg_info
1159 Creates files in .egg-info/ or directory
1160 directory specified by --egg-base.
1161 install
1162 Builds and installs. Writes installation
1163 information to <record> if --record was
1164 specified.
1165 sdist
1166 Make a source distribution:
1167 <dist-dir>/<name>-<version>.tar.gz
1168 Options:
1169 --all
1170 Used by "clean".
1171 --compile
1172 Ignored.
1173 --dist-dir | -d <dist-dir>
1174 Default is "dist".
1175 --egg-base <egg-base>
1176 Used by "egg_info".
1177 --formats <formats>
1178 Used by "sdist".
1179 --install-headers <directory>
1180 Ignored.
1181 --python-tag <python-tag>
1182 Ignored.
1183 --record <record>
1184 Used by "install".
1185 --root <path>
1186 Used by "install".
1187 --single-version-externally-managed
1188 Ignored.
1189 --verbose -v
1190 Extra diagnostics.
1191 Other:
1192 windows-vs [-y <year>] [-v <version>] [-g <grade] [--verbose]
1193 Windows only; looks for matching Visual Studio.
1194 windows-python [-v <version>] [--verbose]
1195 Windows only; looks for matching Python.
1196 '''))
1197 return
1198
1199 elif arg in ('bdist_wheel', 'clean', 'dist_info', 'egg_info', 'install', 'sdist'):
1200 assert command is None, 'Two commands specified: {command} and {arg}.'
1201 command = arg
1202
1203 elif arg in ('windows-vs', 'windows-python', 'show-sysconfig'):
1204 assert command is None, 'Two commands specified: {command} and {arg}.'
1205 command = arg
1206
1207 elif arg == '--all': opt_all = True
1208 elif arg == '--compile': pass
1209 elif arg == '--dist-dir' or arg == '-d': opt_dist_dir = args.next()
1210 elif arg == '--egg-base': opt_egg_base = args.next()
1211 elif arg == '--formats': opt_formats = args.next()
1212 elif arg == '--install-headers': opt_install_headers = args.next()
1213 elif arg == '--python-tag': pass
1214 elif arg == '--record': opt_record = args.next()
1215 elif arg == '--root': opt_root = args.next()
1216 elif arg == '--single-version-externally-managed': pass
1217 elif arg == '--verbose' or arg == '-v': g_verbose += 1
1218
1219 else:
1220 raise Exception(f'Unrecognised arg: {arg}')
1221
1222 assert command, 'No command specified'
1223
1224 log1(f'Handling command={command}')
1225 if 0: pass
1226 elif command == 'bdist_wheel': self.build_wheel(opt_dist_dir)
1227 elif command == 'clean': self._argv_clean(opt_all)
1228 elif command == 'dist_info': self._argv_dist_info(opt_egg_base)
1229 elif command == 'egg_info': self._argv_egg_info(opt_egg_base)
1230 elif command == 'install': self.install(opt_record, opt_root)
1231 elif command == 'sdist': self.build_sdist(opt_dist_dir, opt_formats)
1232
1233 elif command == 'windows-python':
1234 version = None
1235 while 1:
1236 arg = args.next(None)
1237 if arg is None:
1238 break
1239 elif arg == '-v':
1240 version = args.next()
1241 elif arg == '--verbose':
1242 g_verbose += 1
1243 else:
1244 assert 0, f'Unrecognised {arg=}'
1245 python = wdev.WindowsPython(version=version)
1246 print(f'Python is:\n{python.description_ml(" ")}')
1247
1248 elif command == 'windows-vs':
1249 grade = None
1250 version = None
1251 year = None
1252 while 1:
1253 arg = args.next(None)
1254 if arg is None:
1255 break
1256 elif arg == '-g':
1257 grade = args.next()
1258 elif arg == '-v':
1259 version = args.next()
1260 elif arg == '-y':
1261 year = args.next()
1262 elif arg == '--verbose':
1263 g_verbose += 1
1264 else:
1265 assert 0, f'Unrecognised {arg=}'
1266 vs = wdev.WindowsVS(year=year, grade=grade, version=version)
1267 print(f'Visual Studio is:\n{vs.description_ml(" ")}')
1268
1269 elif command == 'show-sysconfig':
1270 show_sysconfig()
1271 for mod in platform, sys:
1272 log0(f'{mod.__name__}:')
1273 for n in dir(mod):
1274 if n.startswith('_'):
1275 continue
1276 log0(f'{mod.__name__}.{n}')
1277 if mod is platform and n == 'uname':
1278 continue
1279 if mod is platform and n == 'pdb':
1280 continue
1281 if mod is sys and n in ('breakpointhook', 'exit'):
1282 # We don't want to call these.
1283 continue
1284 v = getattr(mod, n)
1285 if callable(v):
1286 try:
1287 v = v()
1288 except Exception:
1289 pass
1290 else:
1291 #print(f'{n=}', flush=1)
1292 try:
1293 print(f' {mod.__name__}.{n}()={v!r}')
1294 except Exception:
1295 print(f' Failed to print value of {mod.__name__}.{n}().')
1296 else:
1297 try:
1298 print(f' {mod.__name__}.{n}={v!r}')
1299 except Exception:
1300 print(f' Failed to print value of {mod.__name__}.{n}.')
1301
1302 else:
1303 assert 0, f'Unrecognised command: {command}'
1304
1305 log2(f'Finished handling command: {command}')
1306
1307
1308 def __str__(self):
1309 return ('{'
1310 f'name={self.name!r}'
1311 f' version={self.version!r}'
1312 f' platform={self.platform!r}'
1313 f' supported_platform={self.supported_platform!r}'
1314 f' summary={self.summary!r}'
1315 f' description={self.description!r}'
1316 f' description_content_type={self.description_content_type!r}'
1317 f' keywords={self.keywords!r}'
1318 f' home_page={self.home_page!r}'
1319 f' download_url={self.download_url!r}'
1320 f' author={self.author!r}'
1321 f' author_email={self.author_email!r}'
1322 f' maintainer={self.maintainer!r}'
1323 f' maintainer_email={self.maintainer_email!r}'
1324 f' license={self.license!r}'
1325 f' classifier={self.classifier!r}'
1326 f' requires_dist={self.requires_dist!r}'
1327 f' requires_python={self.requires_python!r}'
1328 f' requires_external={self.requires_external!r}'
1329 f' project_url={self.project_url!r}'
1330 f' provides_extra={self.provides_extra!r}'
1331
1332 f' root={self.root!r}'
1333 f' fn_build={self.fn_build!r}'
1334 f' fn_sdist={self.fn_sdist!r}'
1335 f' fn_clean={self.fn_clean!r}'
1336 f' tag_python={self.tag_python_!r}'
1337 f' tag_abi={self.tag_abi_!r}'
1338 f' tag_platform={self.tag_platform_!r}'
1339 '}'
1340 )
1341
1342 def _dist_info_dir( self):
1343 return f'{_normalise(self.name)}-{self.version}.dist-info'
1344
1345 def _metainfo(self):
1346 '''
1347 Returns text for `.egg-info/PKG-INFO` file, or `PKG-INFO` in an sdist
1348 `.tar.gz` file, or `...dist-info/METADATA` in a wheel.
1349 '''
1350 # 2021-04-30: Have been unable to get multiline content working on
1351 # test.pypi.org so we currently put the description as the body after
1352 # all the other headers.
1353 #
1354 ret = ['']
1355 def add(key, value):
1356 if value is None:
1357 return
1358 if isinstance( value, (tuple, list)):
1359 for v in value:
1360 if v is not None:
1361 add( key, v)
1362 return
1363 if key == 'License' and '\n' in value:
1364 # This is ok because we write `self.license` into
1365 # *.dist-info/COPYING.
1366 #
1367 log1( f'Omitting license because contains newline(s).')
1368 return
1369 assert '\n' not in value, f'key={key} value contains newline: {value!r}'
1370 if key == 'Project-URL':
1371 assert value.count(',') == 1, f'For {key=}, should have one comma in {value!r}.'
1372 ret[0] += f'{key}: {value}\n'
1373 #add('Description', self.description)
1374 add('Metadata-Version', '2.1')
1375
1376 # These names are from:
1377 # https://packaging.python.org/specifications/core-metadata/
1378 #
1379 for name in (
1380 'Name',
1381 'Version',
1382 'Platform',
1383 'Supported-Platform',
1384 'Summary',
1385 'Description-Content-Type',
1386 'Keywords',
1387 'Home-page',
1388 'Download-URL',
1389 'Author',
1390 'Author-email',
1391 'Maintainer',
1392 'Maintainer-email',
1393 'License',
1394 'Classifier',
1395 'Requires-Dist',
1396 'Requires-Python',
1397 'Requires-External',
1398 'Project-URL',
1399 'Provides-Extra',
1400 ):
1401 identifier = name.lower().replace( '-', '_')
1402 add( name, getattr( self, identifier))
1403
1404 ret = ret[0]
1405
1406 # Append description as the body
1407 if self.description:
1408 if '\n' in self.description:
1409 description_text = self.description.strip()
1410 else:
1411 with open(self.description) as f:
1412 description_text = f.read()
1413 ret += '\n' # Empty line separates headers from body.
1414 ret += description_text
1415 ret += '\n'
1416 return ret
1417
1418 def _path_relative_to_root(self, path, assert_within_root=True):
1419 '''
1420 Returns `(path_abs, path_rel)`, where `path_abs` is absolute path and
1421 `path_rel` is relative to `self.root`.
1422
1423 Interprets `path` as relative to `self.root` if not absolute.
1424
1425 We use `os.path.realpath()` to resolve any links.
1426
1427 if `assert_within_root` is true, assert-fails if `path` is not within
1428 `self.root`.
1429 '''
1430 if os.path.isabs(path):
1431 p = path
1432 else:
1433 p = os.path.join(self.root, path)
1434 p = os.path.realpath(os.path.abspath(p))
1435 if assert_within_root:
1436 assert p.startswith(self.root+os.sep) or p == self.root, \
1437 f'Path not within root={self.root+os.sep!r}: {path=} {p=}'
1438 p_rel = os.path.relpath(p, self.root)
1439 return p, p_rel
1440
1441 def _fromto(self, p):
1442 '''
1443 Returns `(from_, (to_abs, to_rel))`.
1444
1445 If `p` is a string we convert to `(p, p)`. Otherwise we assert that
1446 `p` is a tuple `(from_, to_)` where `from_` is str/bytes and `to_` is
1447 str. If `from_` is a bytes it is contents of file to add, otherwise the
1448 path of an existing file; non-absolute paths are assumed to be relative
1449 to `self.root`. If `to_` is empty or ends with `/`, we append the leaf
1450 of `from_` (which must be a str).
1451
1452 If `to_` starts with `$dist-info/`, we replace this with
1453 `self._dist_info_dir()`.
1454
1455 If `to_` starts with `$data/`, we replace this with
1456 `{self.name}-{self.version}.data/`.
1457
1458 We assert that `to_abs` is `within self.root`.
1459
1460 `to_rel` is derived from the `to_abs` and is relative to self.root`.
1461 '''
1462 ret = None
1463 if isinstance(p, str):
1464 p = p, p
1465 assert isinstance(p, tuple) and len(p) == 2
1466
1467 from_, to_ = p
1468 assert isinstance(from_, (str, bytes))
1469 assert isinstance(to_, str)
1470 if to_.endswith('/') or to_=='':
1471 to_ += os.path.basename(from_)
1472 prefix = '$dist-info/'
1473 if to_.startswith( prefix):
1474 to_ = f'{self._dist_info_dir()}/{to_[ len(prefix):]}'
1475 prefix = '$data/'
1476 if to_.startswith( prefix):
1477 to_ = f'{self.name}-{self.version}.data/{to_[ len(prefix):]}'
1478 if isinstance(from_, str):
1479 from_, _ = self._path_relative_to_root( from_, assert_within_root=False)
1480 to_ = self._path_relative_to_root(to_)
1481 assert isinstance(from_, (str, bytes))
1482 log2(f'returning {from_=} {to_=}')
1483 return from_, to_
1484
1485
1486 def build_extension(
1487 name,
1488 path_i,
1489 outdir,
1490 builddir=None,
1491 includes=None,
1492 defines=None,
1493 libpaths=None,
1494 libs=None,
1495 optimise=True,
1496 debug=False,
1497 compiler_extra='',
1498 linker_extra='',
1499 swig=None,
1500 cpp=True,
1501 prerequisites_swig=None,
1502 prerequisites_compile=None,
1503 prerequisites_link=None,
1504 infer_swig_includes=True,
1505 py_limited_api=False,
1506 ):
1507 '''
1508 Builds a Python extension module using SWIG. Works on Windows, Linux, MacOS
1509 and OpenBSD.
1510
1511 On Unix, sets rpath when linking shared libraries.
1512
1513 Args:
1514 name:
1515 Name of generated extension module.
1516 path_i:
1517 Path of input SWIG `.i` file. Internally we use swig to generate a
1518 corresponding `.c` or `.cpp` file.
1519 outdir:
1520 Output directory for generated files:
1521
1522 * `{outdir}/{name}.py`
1523 * `{outdir}/_{name}.so` # Unix
1524 * `{outdir}/_{name}.*.pyd` # Windows
1525 We return the leafname of the `.so` or `.pyd` file.
1526 builddir:
1527 Where to put intermediate files, for example the .cpp file
1528 generated by swig and `.d` dependency files. Default is `outdir`.
1529 includes:
1530 A string, or a sequence of extra include directories to be prefixed
1531 with `-I`.
1532 defines:
1533 A string, or a sequence of extra preprocessor defines to be
1534 prefixed with `-D`.
1535 libpaths
1536 A string, or a sequence of library paths to be prefixed with
1537 `/LIBPATH:` on Windows or `-L` on Unix.
1538 libs
1539 A string, or a sequence of library names. Each item is prefixed
1540 with `-l` on non-Windows.
1541 optimise:
1542 Whether to use compiler optimisations.
1543 debug:
1544 Whether to build with debug symbols.
1545 compiler_extra:
1546 Extra compiler flags. Can be None.
1547 linker_extra:
1548 Extra linker flags. Can be None.
1549 swig:
1550 Swig command; if false we use 'swig'.
1551 cpp:
1552 If true we tell SWIG to generate C++ code instead of C.
1553 prerequisites_swig:
1554 prerequisites_compile:
1555 prerequisites_link:
1556
1557 [These are mainly for use on Windows. On other systems we
1558 automatically generate dynamic dependencies using swig/compile/link
1559 commands' `-MD` and `-MF` args.]
1560
1561 Sequences of extra input files/directories that should force
1562 running of swig, compile or link commands if they are newer than
1563 any existing generated SWIG `.i` file, compiled object file or
1564 shared library file.
1565
1566 If present, the first occurrence of `True` or `False` forces re-run
1567 or no re-run. Any occurrence of None is ignored. If an item is a
1568 directory path we look for newest file within the directory tree.
1569
1570 If not a sequence, we convert into a single-item list.
1571
1572 prerequisites_swig
1573
1574 We use swig's -MD and -MF args to generate dynamic dependencies
1575 automatically, so this is not usually required.
1576
1577 prerequisites_compile
1578 prerequisites_link
1579
1580 On non-Windows we use cc's -MF and -MF args to generate dynamic
1581 dependencies so this is not usually required.
1582 infer_swig_includes:
1583 If true, we extract `-I<path>` and `-I <path>` args from
1584 `compile_extra` (also `/I` on windows) and use them with swig so
1585 that it can see the same header files as C/C++. This is useful
1586 when using enviromment variables such as `CC` and `CXX` to set
1587 `compile_extra.
1588 py_limited_api:
1589 If true we build for current Python's limited API / stable ABI.
1590
1591 Returns the leafname of the generated library file within `outdir`, e.g.
1592 `_{name}.so` on Unix or `_{name}.cp311-win_amd64.pyd` on Windows.
1593 '''
1594 if compiler_extra is None:
1595 compiler_extra = ''
1596 if linker_extra is None:
1597 linker_extra = ''
1598 if builddir is None:
1599 builddir = outdir
1600 if not swig:
1601 swig = 'swig'
1602 includes_text = _flags( includes, '-I')
1603 defines_text = _flags( defines, '-D')
1604 libpaths_text = _flags( libpaths, '/LIBPATH:', '"') if windows() else _flags( libpaths, '-L')
1605 libs_text = _flags( libs, '' if windows() else '-l')
1606 path_cpp = f'{builddir}/{os.path.basename(path_i)}'
1607 path_cpp += '.cpp' if cpp else '.c'
1608 os.makedirs( outdir, exist_ok=True)
1609
1610 # Run SWIG.
1611
1612 if infer_swig_includes:
1613 # Extract include flags from `compiler_extra`.
1614 swig_includes_extra = ''
1615 compiler_extra_items = compiler_extra.split()
1616 i = 0
1617 while i < len(compiler_extra_items):
1618 item = compiler_extra_items[i]
1619 # Swig doesn't seem to like a space after `I`.
1620 if item == '-I' or (windows() and item == '/I'):
1621 swig_includes_extra += f' -I{compiler_extra_items[i+1]}'
1622 i += 1
1623 elif item.startswith('-I') or (windows() and item.startswith('/I')):
1624 swig_includes_extra += f' -I{compiler_extra_items[i][2:]}'
1625 i += 1
1626 swig_includes_extra = swig_includes_extra.strip()
1627 deps_path = f'{path_cpp}.d'
1628 prerequisites_swig2 = _get_prerequisites( deps_path)
1629 run_if(
1630 f'''
1631 {swig}
1632 -Wall
1633 {"-c++" if cpp else ""}
1634 -python
1635 -module {name}
1636 -outdir {outdir}
1637 -o {path_cpp}
1638 -MD -MF {deps_path}
1639 {includes_text}
1640 {swig_includes_extra}
1641 {path_i}
1642 '''
1643 ,
1644 path_cpp,
1645 path_i,
1646 prerequisites_swig,
1647 prerequisites_swig2,
1648 )
1649
1650 so_suffix = _so_suffix(use_so_versioning = not py_limited_api)
1651 path_so_leaf = f'_{name}{so_suffix}'
1652 path_so = f'{outdir}/{path_so_leaf}'
1653
1654 py_limited_api2 = current_py_limited_api() if py_limited_api else None
1655
1656 if windows():
1657 path_obj = f'{path_so}.obj'
1658
1659 permissive = '/permissive-'
1660 EHsc = '/EHsc'
1661 T = '/Tp' if cpp else '/Tc'
1662 optimise2 = '/DNDEBUG /O2' if optimise else '/D_DEBUG'
1663 debug2 = ''
1664 if debug:
1665 debug2 = '/Zi' # Generate .pdb.
1666 # debug2 = '/Z7' # Embed debug info in .obj files.
1667
1668 py_limited_api3 = f'/DPy_LIMITED_API={py_limited_api2}' if py_limited_api2 else ''
1669
1670 # As of 2023-08-23, it looks like VS tools create slightly
1671 # .dll's each time, even with identical inputs.
1672 #
1673 # Some info about this is at:
1674 # https://nikhilism.com/post/2020/windows-deterministic-builds/.
1675 # E.g. an undocumented linker flag `/Brepro`.
1676 #
1677
1678 command, pythonflags = base_compiler(cpp=cpp)
1679 command = f'''
1680 {command}
1681 # General:
1682 /c # Compiles without linking.
1683 {EHsc} # Enable "Standard C++ exception handling".
1684
1685 #/MD # Creates a multithreaded DLL using MSVCRT.lib.
1686 {'/MDd' if debug else '/MD'}
1687
1688 # Input/output files:
1689 {T}{path_cpp} # /Tp specifies C++ source file.
1690 /Fo{path_obj} # Output file. codespell:ignore
1691
1692 # Include paths:
1693 {includes_text}
1694 {pythonflags.includes} # Include path for Python headers.
1695
1696 # Code generation:
1697 {optimise2}
1698 {debug2}
1699 {permissive} # Set standard-conformance mode.
1700
1701 # Diagnostics:
1702 #/FC # Display full path of source code files passed to cl.exe in diagnostic text.
1703 /W3 # Sets which warning level to output. /W3 is IDE default.
1704 /diagnostics:caret # Controls the format of diagnostic messages.
1705 /nologo #
1706
1707 {defines_text}
1708 {compiler_extra}
1709
1710 {py_limited_api3}
1711 '''
1712 run_if( command, path_obj, path_cpp, prerequisites_compile)
1713
1714 command, pythonflags = base_linker(cpp=cpp)
1715 debug2 = '/DEBUG' if debug else ''
1716 base, _ = os.path.splitext(path_so_leaf)
1717 command = f'''
1718 {command}
1719 /DLL # Builds a DLL.
1720 /EXPORT:PyInit__{name} # Exports a function.
1721 /IMPLIB:{base}.lib # Overrides the default import library name.
1722 {libpaths_text}
1723 {pythonflags.ldflags}
1724 /OUT:{path_so} # Specifies the output file name.
1725 {debug2}
1726 /nologo
1727 {libs_text}
1728 {path_obj}
1729 {linker_extra}
1730 '''
1731 run_if( command, path_so, path_obj, prerequisites_link)
1732
1733 else:
1734
1735 # Not Windows.
1736 #
1737 command, pythonflags = base_compiler(cpp=cpp)
1738
1739 # setuptools on Linux seems to use slightly different compile flags:
1740 #
1741 # -fwrapv -O3 -Wall -O2 -g0 -DPY_CALL_TRAMPOLINE
1742 #
1743
1744 general_flags = ''
1745 if debug:
1746 general_flags += ' -g'
1747 if optimise:
1748 general_flags += ' -O2 -DNDEBUG'
1749
1750 py_limited_api3 = f'-DPy_LIMITED_API={py_limited_api2}' if py_limited_api2 else ''
1751
1752 if darwin():
1753 # MacOS's linker does not like `-z origin`.
1754 rpath_flag = "-Wl,-rpath,@loader_path/"
1755
1756 # Avoid `Undefined symbols for ... "_PyArg_UnpackTuple" ...'.
1757 general_flags += ' -undefined dynamic_lookup'
1758 elif pyodide():
1759 # Setting `-Wl,-rpath,'$ORIGIN',-z,origin` gives:
1760 # emcc: warning: ignoring unsupported linker flag: `-rpath` [-Wlinkflags]
1761 # wasm-ld: error: unknown -z value: origin
1762 #
1763 log0(f'pyodide: PEP-3149 suffix untested, so omitting. {_so_suffix()=}.')
1764 path_so_leaf = f'_{name}.so'
1765 path_so = f'{outdir}/{path_so_leaf}'
1766
1767 rpath_flag = ''
1768 else:
1769 rpath_flag = "-Wl,-rpath,'$ORIGIN',-z,origin"
1770 path_so = f'{outdir}/{path_so_leaf}'
1771 # Fun fact - on Linux, if the -L and -l options are before '{path_cpp}'
1772 # they seem to be ignored...
1773 #
1774 prerequisites = list()
1775
1776 if pyodide():
1777 # Looks like pyodide's `cc` can't compile and link in one invocation.
1778 prerequisites_compile_path = f'{path_cpp}.o.d'
1779 prerequisites += _get_prerequisites( prerequisites_compile_path)
1780 command = f'''
1781 {command}
1782 -fPIC
1783 {general_flags.strip()}
1784 {pythonflags.includes}
1785 {includes_text}
1786 {defines_text}
1787 -MD -MF {prerequisites_compile_path}
1788 -c {path_cpp}
1789 -o {path_cpp}.o
1790 {compiler_extra}
1791 {py_limited_api3}
1792 '''
1793 prerequisites_link_path = f'{path_cpp}.o.d'
1794 prerequisites += _get_prerequisites( prerequisites_link_path)
1795 ld, _ = base_linker(cpp=cpp)
1796 command += f'''
1797 && {ld}
1798 {path_cpp}.o
1799 -o {path_so}
1800 -MD -MF {prerequisites_link_path}
1801 {rpath_flag}
1802 {libpaths_text}
1803 {libs_text}
1804 {linker_extra}
1805 {pythonflags.ldflags}
1806 '''
1807 else:
1808 # We use compiler to compile and link in one command.
1809 prerequisites_path = f'{path_so}.d'
1810 prerequisites = _get_prerequisites(prerequisites_path)
1811
1812 command = f'''
1813 {command}
1814 -fPIC
1815 -shared
1816 {general_flags.strip()}
1817 {pythonflags.includes}
1818 {includes_text}
1819 {defines_text}
1820 {path_cpp}
1821 -MD -MF {prerequisites_path}
1822 -o {path_so}
1823 {compiler_extra}
1824 {libpaths_text}
1825 {linker_extra}
1826 {pythonflags.ldflags}
1827 {libs_text}
1828 {rpath_flag}
1829 {py_limited_api3}
1830 '''
1831 command_was_run = run_if(
1832 command,
1833 path_so,
1834 path_cpp,
1835 prerequisites_compile,
1836 prerequisites_link,
1837 prerequisites,
1838 )
1839
1840 if command_was_run and darwin():
1841 # We need to patch up references to shared libraries in `libs`.
1842 sublibraries = list()
1843 for lib in () if libs is None else libs:
1844 for libpath in libpaths:
1845 found = list()
1846 for suffix in '.so', '.dylib':
1847 path = f'{libpath}/lib{os.path.basename(lib)}{suffix}'
1848 if os.path.exists( path):
1849 found.append( path)
1850 if found:
1851 assert len(found) == 1, f'More than one file matches lib={lib!r}: {found}'
1852 sublibraries.append( found[0])
1853 break
1854 else:
1855 log2(f'Warning: can not find path of lib={lib!r} in libpaths={libpaths}')
1856 macos_patch( path_so, *sublibraries)
1857
1858 #run(f'ls -l {path_so}', check=0)
1859 #run(f'file {path_so}', check=0)
1860
1861 return path_so_leaf
1862
1863
1864 # Functions that might be useful.
1865 #
1866
1867
1868 def base_compiler(vs=None, pythonflags=None, cpp=False, use_env=True):
1869 '''
1870 Returns basic compiler command and PythonFlags.
1871
1872 Args:
1873 vs:
1874 Windows only. A `wdev.WindowsVS` instance or None to use default
1875 `wdev.WindowsVS` instance.
1876 pythonflags:
1877 A `pipcl.PythonFlags` instance or None to use default
1878 `pipcl.PythonFlags` instance.
1879 cpp:
1880 If true we return C++ compiler command instead of C. On Windows
1881 this has no effect - we always return `cl.exe`.
1882 use_env:
1883 If true we return '$CC' or '$CXX' if the corresponding
1884 environmental variable is set (without evaluating with `getenv()`
1885 or `os.environ`).
1886
1887 Returns `(cc, pythonflags)`:
1888 cc:
1889 C or C++ command. On Windows this is of the form
1890 `{vs.vcvars}&&{vs.cl}`; otherwise it is typically `cc` or `c++`.
1891 pythonflags:
1892 The `pythonflags` arg or a new `pipcl.PythonFlags` instance.
1893 '''
1894 if not pythonflags:
1895 pythonflags = PythonFlags()
1896 cc = None
1897 if use_env:
1898 if cpp:
1899 if os.environ.get( 'CXX'):
1900 cc = '$CXX'
1901 else:
1902 if os.environ.get( 'CC'):
1903 cc = '$CC'
1904 if cc:
1905 pass
1906 elif windows():
1907 if not vs:
1908 vs = wdev.WindowsVS()
1909 cc = f'"{vs.vcvars}"&&"{vs.cl}"'
1910 elif wasm():
1911 cc = 'em++' if cpp else 'emcc'
1912 else:
1913 cc = 'c++' if cpp else 'cc'
1914 cc = macos_add_cross_flags( cc)
1915 return cc, pythonflags
1916
1917
1918 def base_linker(vs=None, pythonflags=None, cpp=False, use_env=True):
1919 '''
1920 Returns basic linker command.
1921
1922 Args:
1923 vs:
1924 Windows only. A `wdev.WindowsVS` instance or None to use default
1925 `wdev.WindowsVS` instance.
1926 pythonflags:
1927 A `pipcl.PythonFlags` instance or None to use default
1928 `pipcl.PythonFlags` instance.
1929 cpp:
1930 If true we return C++ linker command instead of C. On Windows this
1931 has no effect - we always return `link.exe`.
1932 use_env:
1933 If true we use `os.environ['LD']` if set.
1934
1935 Returns `(linker, pythonflags)`:
1936 linker:
1937 Linker command. On Windows this is of the form
1938 `{vs.vcvars}&&{vs.link}`; otherwise it is typically `cc` or `c++`.
1939 pythonflags:
1940 The `pythonflags` arg or a new `pipcl.PythonFlags` instance.
1941 '''
1942 if not pythonflags:
1943 pythonflags = PythonFlags()
1944 linker = None
1945 if use_env:
1946 if os.environ.get( 'LD'):
1947 linker = '$LD'
1948 if linker:
1949 pass
1950 elif windows():
1951 if not vs:
1952 vs = wdev.WindowsVS()
1953 linker = f'"{vs.vcvars}"&&"{vs.link}"'
1954 elif wasm():
1955 linker = 'em++' if cpp else 'emcc'
1956 else:
1957 linker = 'c++' if cpp else 'cc'
1958 linker = macos_add_cross_flags( linker)
1959 return linker, pythonflags
1960
1961
1962 def git_info( directory):
1963 '''
1964 Returns `(sha, comment, diff, branch)`, all items are str or None if not
1965 available.
1966
1967 directory:
1968 Root of git checkout.
1969 '''
1970 sha, comment, diff, branch = None, None, None, None
1971 e, out = run(
1972 f'cd {directory} && (PAGER= git show --pretty=oneline|head -n 1 && git diff)',
1973 capture=1,
1974 check=0
1975 )
1976 if not e:
1977 sha, _ = out.split(' ', 1)
1978 comment, diff = _.split('\n', 1)
1979 e, out = run(
1980 f'cd {directory} && git rev-parse --abbrev-ref HEAD',
1981 capture=1,
1982 check=0
1983 )
1984 if not e:
1985 branch = out.strip()
1986 log(f'git_info(): directory={directory!r} returning branch={branch!r} sha={sha!r} comment={comment!r}')
1987 return sha, comment, diff, branch
1988
1989
1990 def git_items( directory, submodules=False):
1991 '''
1992 Returns list of paths for all files known to git within a `directory`.
1993
1994 Args:
1995 directory:
1996 Must be somewhere within a git checkout.
1997 submodules:
1998 If true we also include git submodules.
1999
2000 Returns:
2001 A list of paths for all files known to git within `directory`. Each
2002 path is relative to `directory`. `directory` must be somewhere within a
2003 git checkout.
2004
2005 We run a `git ls-files` command internally.
2006
2007 This function can be useful for the `fn_sdist()` callback.
2008 '''
2009 command = 'cd ' + directory + ' && git ls-files'
2010 if submodules:
2011 command += ' --recurse-submodules'
2012 log1(f'Running {command=}')
2013 text = subprocess.check_output( command, shell=True)
2014 ret = []
2015 for path in text.decode('utf8').strip().split( '\n'):
2016 path2 = os.path.join(directory, path)
2017 # Sometimes git ls-files seems to list empty/non-existent directories
2018 # within submodules.
2019 #
2020 if not os.path.exists(path2):
2021 log2(f'Ignoring git ls-files item that does not exist: {path2}')
2022 elif os.path.isdir(path2):
2023 log2(f'Ignoring git ls-files item that is actually a directory: {path2}')
2024 else:
2025 ret.append(path)
2026 return ret
2027
2028
2029 def git_get(
2030 remote,
2031 local,
2032 *,
2033 branch=None,
2034 depth=1,
2035 env_extra=None,
2036 tag=None,
2037 update=True,
2038 submodules=True,
2039 default_remote=None,
2040 ):
2041 '''
2042 Ensures that <local> is a git checkout (at either <tag>, or <branch> HEAD)
2043 of a remote repository.
2044
2045 Exactly one of <branch> and <tag> must be specified, or <remote> must start
2046 with 'git:' and match the syntax described below.
2047
2048 Args:
2049 remote:
2050 Remote git repostitory, for example
2051 'https://github.com/ArtifexSoftware/mupdf.git'.
2052
2053 If starts with 'git:', the remaining text should be a command-line
2054 style string containing some or all of these args:
2055 --branch <branch>
2056 --tag <tag>
2057 <remote>
2058 These overrides <branch>, <tag> and <default_remote>.
2059
2060 For example these all clone/update/branch master of https://foo.bar/qwerty.git to local
2061 checkout 'foo-local':
2062
2063 git_get('https://foo.bar/qwerty.git', 'foo-local', branch='master')
2064 git_get('git:--branch master https://foo.bar/qwerty.git', 'foo-local')
2065 git_get('git:--branch master', 'foo-local', default_remote='https://foo.bar/qwerty.git')
2066 git_get('git:', 'foo-local', branch='master', default_remote='https://foo.bar/qwerty.git')
2067
2068 local:
2069 Local directory. If <local>/.git exists, we attempt to run `git
2070 update` in it.
2071 branch:
2072 Branch to use. Is used as default if remote starts with 'git:'.
2073 depth:
2074 Depth of local checkout when cloning and fetching, or None.
2075 env_extra:
2076 Dict of extra name=value environment variables to use whenever we
2077 run git.
2078 tag:
2079 Tag to use. Is used as default if remote starts with 'git:'.
2080 update:
2081 If false we do not update existing repository. Might be useful if
2082 testing without network access.
2083 submodules:
2084 If true, we clone with `--recursive --shallow-submodules` and run
2085 `git submodule update --init --recursive` before returning.
2086 default_remote:
2087 The remote URL if <remote> starts with 'git:' but does not specify
2088 the remote URL.
2089 '''
2090 log0(f'{remote=} {local=} {branch=} {tag=}')
2091 if remote.startswith('git:'):
2092 remote0 = remote
2093 args = iter(shlex.split(remote0[len('git:'):]))
2094 remote = default_remote
2095 while 1:
2096 try:
2097 arg = next(args)
2098 except StopIteration:
2099 break
2100 if arg == '--branch':
2101 branch = next(args)
2102 tag = None
2103 elif arg == '--tag':
2104 tag == next(args)
2105 branch = None
2106 else:
2107 remote = arg
2108 assert remote, f'{default_remote=} and no remote specified in remote={remote0!r}.'
2109 assert branch or tag, f'{branch=} {tag=} and no branch/tag specified in remote={remote0!r}.'
2110
2111 assert (branch and not tag) or (not branch and tag), f'Must specify exactly one of <branch> and <tag>.'
2112
2113 depth_arg = f' --depth {depth}' if depth else ''
2114
2115 def do_update():
2116 # This seems to pull in the entire repository.
2117 log0(f'do_update(): attempting to update {local=}.')
2118 # Remove any local changes.
2119 run(f'cd {local} && git checkout .', env_extra=env_extra)
2120 if tag:
2121 # `-u` avoids `fatal: Refusing to fetch into current branch`.
2122 # Using '+' and `revs/tags/` prefix seems to avoid errors like:
2123 # error: cannot update ref 'refs/heads/v3.16.44':
2124 # trying to write non-commit object
2125 # 06c4ae5fe39a03b37a25a8b95214d9f8f8a867b8 to branch
2126 # 'refs/heads/v3.16.44'
2127 #
2128 run(f'cd {local} && git fetch -fuv{depth_arg} {remote} +refs/tags/{tag}:refs/tags/{tag}', env_extra=env_extra)
2129 run(f'cd {local} && git checkout {tag}', env_extra=env_extra)
2130 if branch:
2131 # `-u` avoids `fatal: Refusing to fetch into current branch`.
2132 run(f'cd {local} && git fetch -fuv{depth_arg} {remote} {branch}:{branch}', env_extra=env_extra)
2133 run(f'cd {local} && git checkout {branch}', env_extra=env_extra)
2134
2135 do_clone = True
2136 if os.path.isdir(f'{local}/.git'):
2137 if update:
2138 # Try to update existing checkout.
2139 try:
2140 do_update()
2141 do_clone = False
2142 except Exception as e:
2143 log0(f'Failed to update existing checkout {local}: {e}')
2144 else:
2145 do_clone = False
2146
2147 if do_clone:
2148 # No existing git checkout, so do a fresh clone.
2149 #_fs_remove(local)
2150 log0(f'Cloning to: {local}')
2151 command = f'git clone --config core.longpaths=true{depth_arg}'
2152 if submodules:
2153 command += f' --recursive --shallow-submodules'
2154 if branch:
2155 command += f' -b {branch}'
2156 if tag:
2157 command += f' -b {tag}'
2158 command += f' {remote} {local}'
2159 run(command, env_extra=env_extra)
2160 do_update()
2161
2162 if submodules:
2163 run(f'cd {local} && git submodule update --init --recursive', env_extra=env_extra)
2164
2165 # Show sha of checkout.
2166 run( f'cd {local} && git show --pretty=oneline|head -n 1', check=False)
2167
2168
2169 def run(
2170 command,
2171 *,
2172 capture=False,
2173 check=1,
2174 verbose=1,
2175 env=None,
2176 env_extra=None,
2177 timeout=None,
2178 caller=1,
2179 prefix=None,
2180 ):
2181 '''
2182 Runs a command using `subprocess.run()`.
2183
2184 Args:
2185 command:
2186 A string, the command to run.
2187
2188 Multiple lines in `command` are treated as a single command.
2189
2190 * If a line starts with `#` it is discarded.
2191 * If a line contains ` #`, the trailing text is discarded.
2192
2193 When running the command on Windows, newlines are replaced by
2194 spaces; otherwise each line is terminated by a backslash character.
2195 capture:
2196 If true, we include the command's output in our return value.
2197 check:
2198 If true we raise an exception on error; otherwise we include the
2199 command's returncode in our return value.
2200 verbose:
2201 If true we show the command.
2202 env:
2203 None or dict to use instead of <os.environ>.
2204 env_extra:
2205 None or dict to add to <os.environ> or <env>.
2206 timeout:
2207 If not None, timeout in seconds; passed directly to
2208 subprocess.run(). Note that on MacOS subprocess.run() seems to
2209 leave processes running if timeout expires.
2210 prefix:
2211 String prefix for each line of output.
2212
2213 If true:
2214 * We run command with stdout=subprocess.PIPE and
2215 stderr=subprocess.STDOUT, repetaedly reading the command's output
2216 and writing it to stdout with <prefix>.
2217 * We do not support <timeout>, which must be None.
2218 Returns:
2219 check capture Return
2220 --------------------------
2221 false false returncode
2222 false true (returncode, output)
2223 true false None or raise exception
2224 true true output or raise exception
2225 '''
2226 if env is None:
2227 env = os.environ
2228 if env_extra:
2229 env = env.copy()
2230 if env_extra:
2231 env.update(env_extra)
2232 lines = _command_lines( command)
2233 if verbose:
2234 text = f'Running:'
2235 if env_extra:
2236 for k in sorted(env_extra.keys()):
2237 text += f' {k}={shlex.quote(env_extra[k])}'
2238 nl = '\n'
2239 text += f' {nl.join(lines)}'
2240 log1(text, caller=caller+1)
2241 sep = ' ' if windows() else ' \\\n'
2242 command2 = sep.join( lines)
2243
2244 if prefix:
2245 assert not timeout, f'Timeout not supported with prefix.'
2246 child = subprocess.Popen(
2247 command2,
2248 shell=True,
2249 stdout=subprocess.PIPE,
2250 stderr=subprocess.STDOUT,
2251 encoding='utf8',
2252 env=env,
2253 )
2254 if capture:
2255 capture_text = ''
2256 decoder = codecs.getincrementaldecoder('utf8')('replace')
2257 line_start = True
2258 while 1:
2259 raw = os.read( child.stdout.fileno(), 10000)
2260 text = decoder.decode(raw, final=not raw)
2261 if text:
2262 if capture:
2263 capture_text += text
2264 lines = text.split('\n')
2265 for i, line in enumerate(lines):
2266 if line_start:
2267 sys.stdout.write(prefix)
2268 line_start = False
2269 sys.stdout.write(line)
2270 if i < len(lines) - 1:
2271 sys.stdout.write('\n')
2272 line_start = True
2273 sys.stdout.flush()
2274 if not raw:
2275 break
2276 if not line_start:
2277 sys.stdout.write('\n')
2278 e = child.wait()
2279 if check and e:
2280 raise subprocess.CalledProcessError(e, command2, capture_text if capture else None)
2281 if check:
2282 return capture_text if capture else None
2283 else:
2284 return (e, capture_text) if capture else e
2285 else:
2286 cp = subprocess.run(
2287 command2,
2288 shell=True,
2289 stdout=subprocess.PIPE if capture else None,
2290 stderr=subprocess.STDOUT if capture else None,
2291 check=check,
2292 encoding='utf8',
2293 env=env,
2294 timeout=timeout,
2295 )
2296 if check:
2297 return cp.stdout if capture else None
2298 else:
2299 return (cp.returncode, cp.stdout) if capture else cp.returncode
2300
2301
2302 def darwin():
2303 return sys.platform.startswith( 'darwin')
2304
2305 def windows():
2306 return platform.system() == 'Windows'
2307
2308 def wasm():
2309 return os.environ.get( 'OS') in ('wasm', 'wasm-mt')
2310
2311 def pyodide():
2312 return os.environ.get( 'PYODIDE') == '1'
2313
2314 def linux():
2315 return platform.system() == 'Linux'
2316
2317 def openbsd():
2318 return platform.system() == 'OpenBSD'
2319
2320
2321 def show_system():
2322 '''
2323 Show useful information about the system plus argv and environ.
2324 '''
2325 def log(text):
2326 log0(text, caller=3)
2327
2328 #log(f'{__file__=}')
2329 #log(f'{__name__=}')
2330 log(f'{os.getcwd()=}')
2331 log(f'{platform.machine()=}')
2332 log(f'{platform.platform()=}')
2333 log(f'{platform.python_implementation()=}')
2334 log(f'{platform.python_version()=}')
2335 log(f'{platform.system()=}')
2336 if sys.implementation.name != 'graalpy':
2337 log(f'{platform.uname()=}')
2338 log(f'{sys.executable=}')
2339 log(f'{sys.version=}')
2340 log(f'{sys.version_info=}')
2341 log(f'{list(sys.version_info)=}')
2342
2343 log(f'CPU bits: {cpu_bits()}')
2344
2345 log(f'sys.argv ({len(sys.argv)}):')
2346 for i, arg in enumerate(sys.argv):
2347 log(f' {i}: {arg!r}')
2348
2349 log(f'os.environ ({len(os.environ)}):')
2350 for k in sorted( os.environ.keys()):
2351 v = os.environ[ k]
2352 log( f' {k}: {v!r}')
2353
2354
2355 class PythonFlags:
2356 '''
2357 Compile/link flags for the current python, for example the include path
2358 needed to get `Python.h`.
2359
2360 The 'PIPCL_PYTHON_CONFIG' environment variable allows to override
2361 the location of the python-config executable.
2362
2363 Members:
2364 .includes:
2365 String containing compiler flags for include paths.
2366 .ldflags:
2367 String containing linker flags for library paths.
2368 '''
2369 def __init__(self):
2370
2371 # Experimental detection of python flags from sysconfig.*() instead of
2372 # python-config command.
2373 includes_, ldflags_ = sysconfig_python_flags()
2374
2375 if pyodide():
2376 _include_dir = os.environ[ 'PYO3_CROSS_INCLUDE_DIR']
2377 _lib_dir = os.environ[ 'PYO3_CROSS_LIB_DIR']
2378 self.includes = f'-I {_include_dir}'
2379 self.ldflags = f'-L {_lib_dir}'
2380
2381 elif 0:
2382
2383 self.includes = includes_
2384 self.ldflags = ldflags_
2385
2386 elif windows():
2387 wp = wdev.WindowsPython()
2388 self.includes = f'/I"{wp.include}"'
2389 self.ldflags = f'/LIBPATH:"{wp.libs}"'
2390
2391 elif pyodide():
2392 _include_dir = os.environ[ 'PYO3_CROSS_INCLUDE_DIR']
2393 _lib_dir = os.environ[ 'PYO3_CROSS_LIB_DIR']
2394 self.includes = f'-I {_include_dir}'
2395 self.ldflags = f'-L {_lib_dir}'
2396
2397 else:
2398 python_config = os.environ.get("PIPCL_PYTHON_CONFIG")
2399 if not python_config:
2400 # We use python-config which appears to work better than pkg-config
2401 # because it copes with multiple installed python's, e.g.
2402 # manylinux_2014's /opt/python/cp*-cp*/bin/python*.
2403 #
2404 # But... on non-macos it seems that we should not attempt to specify
2405 # libpython on the link command. The manylinux docker containers
2406 # don't actually contain libpython.so, and it seems that this
2407 # deliberate. And the link command runs ok.
2408 #
2409 python_exe = os.path.realpath( sys.executable)
2410 if darwin():
2411 # Basic install of dev tools with `xcode-select --install` doesn't
2412 # seem to provide a `python3-config` or similar, but there is a
2413 # `python-config.py` accessible via sysconfig.
2414 #
2415 # We try different possibilities and use the last one that
2416 # works.
2417 #
2418 python_config = None
2419 for pc in (
2420 f'python3-config',
2421 f'{sys.executable} {sysconfig.get_config_var("srcdir")}/python-config.py',
2422 f'{python_exe}-config',
2423 ):
2424 e = subprocess.run(
2425 f'{pc} --includes',
2426 shell=1,
2427 stdout=subprocess.DEVNULL,
2428 stderr=subprocess.DEVNULL,
2429 check=0,
2430 ).returncode
2431 log2(f'{e=} from {pc!r}.')
2432 if e == 0:
2433 python_config = pc
2434 assert python_config, f'Cannot find python-config'
2435 else:
2436 python_config = f'{python_exe}-config'
2437 log2(f'Using {python_config=}.')
2438 try:
2439 self.includes = run( f'{python_config} --includes', capture=1, verbose=0).strip()
2440 except Exception as e:
2441 raise Exception('We require python development tools to be installed.') from e
2442 self.ldflags = run( f'{python_config} --ldflags', capture=1, verbose=0).strip()
2443 if linux():
2444 # It seems that with python-3.10 on Linux, we can get an
2445 # incorrect -lcrypt flag that on some systems (e.g. WSL)
2446 # causes:
2447 #
2448 # ImportError: libcrypt.so.2: cannot open shared object file: No such file or directory
2449 #
2450 ldflags2 = self.ldflags.replace(' -lcrypt ', ' ')
2451 if ldflags2 != self.ldflags:
2452 log2(f'### Have removed `-lcrypt` from ldflags: {self.ldflags!r} -> {ldflags2!r}')
2453 self.ldflags = ldflags2
2454
2455 log1(f'{self.includes=}')
2456 log1(f' {includes_=}')
2457 log1(f'{self.ldflags=}')
2458 log1(f' {ldflags_=}')
2459
2460
2461 def macos_add_cross_flags(command):
2462 '''
2463 If running on MacOS and environment variables ARCHFLAGS is set
2464 (indicating we are cross-building, e.g. for arm64), returns
2465 `command` with extra flags appended. Otherwise returns unchanged
2466 `command`.
2467 '''
2468 if darwin():
2469 archflags = os.environ.get( 'ARCHFLAGS')
2470 if archflags:
2471 command = f'{command} {archflags}'
2472 log2(f'Appending ARCHFLAGS to command: {command}')
2473 return command
2474 return command
2475
2476
2477 def macos_patch( library, *sublibraries):
2478 '''
2479 If running on MacOS, patches `library` so that all references to items in
2480 `sublibraries` are changed to `@rpath/{leafname}`. Does nothing on other
2481 platforms.
2482
2483 library:
2484 Path of shared library.
2485 sublibraries:
2486 List of paths of shared libraries; these have typically been
2487 specified with `-l` when `library` was created.
2488 '''
2489 log2( f'macos_patch(): library={library} sublibraries={sublibraries}')
2490 if not darwin():
2491 return
2492 if not sublibraries:
2493 return
2494 subprocess.run( f'otool -L {library}', shell=1, check=1)
2495 command = 'install_name_tool'
2496 names = []
2497 for sublibrary in sublibraries:
2498 name = subprocess.run(
2499 f'otool -D {sublibrary}',
2500 shell=1,
2501 check=1,
2502 capture_output=1,
2503 encoding='utf8',
2504 ).stdout.strip()
2505 name = name.split('\n')
2506 assert len(name) == 2 and name[0] == f'{sublibrary}:', f'{name=}'
2507 name = name[1]
2508 # strip trailing so_name.
2509 leaf = os.path.basename(name)
2510 m = re.match('^(.+[.]((so)|(dylib)))[0-9.]*$', leaf)
2511 assert m
2512 log2(f'Changing {leaf=} to {m.group(1)}')
2513 leaf = m.group(1)
2514 command += f' -change {name} @rpath/{leaf}'
2515 command += f' {library}'
2516 log2( f'Running: {command}')
2517 subprocess.run( command, shell=1, check=1)
2518 subprocess.run( f'otool -L {library}', shell=1, check=1)
2519
2520
2521 # Internal helpers.
2522 #
2523
2524 def _command_lines( command):
2525 '''
2526 Process multiline command by running through `textwrap.dedent()`, removes
2527 comments (lines starting with `#` or ` #` until end of line), removes
2528 entirely blank lines.
2529
2530 Returns list of lines.
2531 '''
2532 command = textwrap.dedent( command)
2533 lines = []
2534 for line in command.split( '\n'):
2535 if line.startswith( '#'):
2536 h = 0
2537 else:
2538 h = line.find( ' #')
2539 if h >= 0:
2540 line = line[:h]
2541 if line.strip():
2542 lines.append(line.rstrip())
2543 return lines
2544
2545
2546 def cpu_bits():
2547 return int.bit_length(sys.maxsize+1)
2548
2549
2550 def _cpu_name():
2551 '''
2552 Returns `x32` or `x64` depending on Python build.
2553 '''
2554 #log(f'sys.maxsize={hex(sys.maxsize)}')
2555 return f'x{32 if sys.maxsize == 2**31 - 1 else 64}'
2556
2557
2558 def run_if( command, out, *prerequisites):
2559 '''
2560 Runs a command only if the output file is not up to date.
2561
2562 Args:
2563 command:
2564 The command to run. We write this into a file <out>.cmd so that we
2565 know to run a command if the command itself has changed.
2566 out:
2567 Path of the output file.
2568
2569 prerequisites:
2570 List of prerequisite paths or true/false/None items. If an item
2571 is None it is ignored, otherwise if an item is not a string we
2572 immediately return it cast to a bool.
2573
2574 Returns:
2575 True if we ran the command, otherwise None.
2576
2577
2578 If the output file does not exist, the command is run:
2579
2580 >>> verbose(1)
2581 1
2582 >>> log_line_numbers(0)
2583 >>> out = 'run_if_test_out'
2584 >>> if os.path.exists( out):
2585 ... os.remove( out)
2586 >>> if os.path.exists( f'{out}.cmd'):
2587 ... os.remove( f'{out}.cmd')
2588 >>> run_if( f'touch {out}', out)
2589 pipcl.py:run_if(): Running command because: File does not exist: 'run_if_test_out'
2590 pipcl.py:run_if(): Running: touch run_if_test_out
2591 True
2592
2593 If we repeat, the output file will be up to date so the command is not run:
2594
2595 >>> run_if( f'touch {out}', out)
2596 pipcl.py:run_if(): Not running command because up to date: 'run_if_test_out'
2597
2598 If we change the command, the command is run:
2599
2600 >>> run_if( f'touch {out}', out)
2601 pipcl.py:run_if(): Running command because: Command has changed
2602 pipcl.py:run_if(): Running: touch run_if_test_out
2603 True
2604
2605 If we add a prerequisite that is newer than the output, the command is run:
2606
2607 >>> time.sleep(1)
2608 >>> prerequisite = 'run_if_test_prerequisite'
2609 >>> run( f'touch {prerequisite}', caller=0)
2610 pipcl.py:run(): Running: touch run_if_test_prerequisite
2611 >>> run_if( f'touch {out}', out, prerequisite)
2612 pipcl.py:run_if(): Running command because: Prerequisite is new: 'run_if_test_prerequisite'
2613 pipcl.py:run_if(): Running: touch run_if_test_out
2614 True
2615
2616 If we repeat, the output will be newer than the prerequisite, so the
2617 command is not run:
2618
2619 >>> run_if( f'touch {out}', out, prerequisite)
2620 pipcl.py:run_if(): Not running command because up to date: 'run_if_test_out'
2621 '''
2622 doit = False
2623 cmd_path = f'{out}.cmd'
2624
2625 if not doit:
2626 out_mtime = _fs_mtime( out)
2627 if out_mtime == 0:
2628 doit = f'File does not exist: {out!r}'
2629
2630 if not doit:
2631 if os.path.isfile( cmd_path):
2632 with open( cmd_path) as f:
2633 cmd = f.read()
2634 else:
2635 cmd = None
2636 if command != cmd:
2637 if cmd is None:
2638 doit = 'No previous command stored'
2639 else:
2640 doit = f'Command has changed'
2641 if 0:
2642 doit += f': {cmd!r} => {command!r}'
2643
2644 if not doit:
2645 # See whether any prerequisites are newer than target.
2646 def _make_prerequisites(p):
2647 if isinstance( p, (list, tuple)):
2648 return list(p)
2649 else:
2650 return [p]
2651 prerequisites_all = list()
2652 for p in prerequisites:
2653 prerequisites_all += _make_prerequisites( p)
2654 if 0:
2655 log2( 'prerequisites_all:')
2656 for i in prerequisites_all:
2657 log2( f' {i!r}')
2658 pre_mtime = 0
2659 pre_path = None
2660 for prerequisite in prerequisites_all:
2661 if isinstance( prerequisite, str):
2662 mtime = _fs_mtime_newest( prerequisite)
2663 if mtime >= pre_mtime:
2664 pre_mtime = mtime
2665 pre_path = prerequisite
2666 elif prerequisite is None:
2667 pass
2668 elif prerequisite:
2669 doit = str(prerequisite)
2670 break
2671 if not doit:
2672 if pre_mtime > out_mtime:
2673 doit = f'Prerequisite is new: {pre_path!r}'
2674
2675 if doit:
2676 # Remove `cmd_path` before we run the command, so any failure
2677 # will force rerun next time.
2678 #
2679 try:
2680 os.remove( cmd_path)
2681 except Exception:
2682 pass
2683 log1( f'Running command because: {doit}')
2684
2685 run( command)
2686
2687 # Write the command we ran, into `cmd_path`.
2688 with open( cmd_path, 'w') as f:
2689 f.write( command)
2690 return True
2691 else:
2692 log1( f'Not running command because up to date: {out!r}')
2693
2694 if 0:
2695 log2( f'out_mtime={time.ctime(out_mtime)} pre_mtime={time.ctime(pre_mtime)}.'
2696 f' pre_path={pre_path!r}: returning {ret!r}.'
2697 )
2698
2699
2700 def _get_prerequisites(path):
2701 '''
2702 Returns list of prerequisites from Makefile-style dependency file, e.g.
2703 created by `cc -MD -MF <path>`.
2704 '''
2705 ret = list()
2706 if os.path.isfile(path):
2707 with open(path) as f:
2708 for line in f:
2709 for item in line.split():
2710 if item.endswith( (':', '\\')):
2711 continue
2712 ret.append( item)
2713 return ret
2714
2715
2716 def _fs_mtime_newest( path):
2717 '''
2718 path:
2719 If a file, returns mtime of the file. If a directory, returns mtime of
2720 newest file anywhere within directory tree. Otherwise returns 0.
2721 '''
2722 ret = 0
2723 if os.path.isdir( path):
2724 for dirpath, dirnames, filenames in os.walk( path):
2725 for filename in filenames:
2726 path = os.path.join( dirpath, filename)
2727 ret = max( ret, _fs_mtime( path))
2728 else:
2729 ret = _fs_mtime( path)
2730 return ret
2731
2732
2733 def _flags( items, prefix='', quote=''):
2734 '''
2735 Turns sequence into string, prefixing/quoting each item.
2736 '''
2737 if not items:
2738 return ''
2739 if isinstance( items, str):
2740 items = items,
2741 ret = ''
2742 for item in items:
2743 if ret:
2744 ret += ' '
2745 ret += f'{prefix}{quote}{item}{quote}'
2746 return ret.strip()
2747
2748
2749 def _fs_mtime( filename, default=0):
2750 '''
2751 Returns mtime of file, or `default` if error - e.g. doesn't exist.
2752 '''
2753 try:
2754 return os.path.getmtime( filename)
2755 except OSError:
2756 return default
2757
2758
2759 def _normalise(name):
2760 # https://packaging.python.org/en/latest/specifications/name-normalization/#name-normalization
2761 return re.sub(r"[-_.]+", "-", name).lower()
2762
2763
2764 def _assert_version_pep_440(version):
2765 assert re.match(
2766 r'^([1-9][0-9]*!)?(0|[1-9][0-9]*)(\.(0|[1-9][0-9]*))*((a|b|rc)(0|[1-9][0-9]*))?(\.post(0|[1-9][0-9]*))?(\.dev(0|[1-9][0-9]*))?$',
2767 version,
2768 ), \
2769 f'Bad version: {version!r}.'
2770
2771
2772 g_verbose = int(os.environ.get('PIPCL_VERBOSE', '1'))
2773
2774 def verbose(level=None):
2775 '''
2776 Sets verbose level if `level` is not None.
2777 Returns verbose level.
2778 '''
2779 global g_verbose
2780 if level is not None:
2781 g_verbose = level
2782 return g_verbose
2783
2784 g_log_line_numbers = True
2785
2786 def log_line_numbers(yes):
2787 '''
2788 Sets whether to include line numbers; helps with doctest.
2789 '''
2790 global g_log_line_numbers
2791 g_log_line_numbers = bool(yes)
2792
2793 def log0(text='', caller=1):
2794 _log(text, 0, caller+1)
2795
2796 def log1(text='', caller=1):
2797 _log(text, 1, caller+1)
2798
2799 def log2(text='', caller=1):
2800 _log(text, 2, caller+1)
2801
2802 def _log(text, level, caller):
2803 '''
2804 Logs lines with prefix, if <level> is lower than <g_verbose>.
2805 '''
2806 if level <= g_verbose:
2807 fr = inspect.stack(context=0)[caller]
2808 filename = relpath(fr.filename)
2809 for line in text.split('\n'):
2810 if g_log_line_numbers:
2811 print(f'{filename}:{fr.lineno}:{fr.function}(): {line}', file=sys.stdout, flush=1)
2812 else:
2813 print(f'{filename}:{fr.function}(): {line}', file=sys.stdout, flush=1)
2814
2815
2816 def relpath(path, start=None):
2817 '''
2818 A safe alternative to os.path.relpath(), avoiding an exception on Windows
2819 if the drive needs to change - in this case we use os.path.abspath().
2820 '''
2821 if windows():
2822 try:
2823 return os.path.relpath(path, start)
2824 except ValueError:
2825 # os.path.relpath() fails if trying to change drives.
2826 return os.path.abspath(path)
2827 else:
2828 return os.path.relpath(path, start)
2829
2830
2831 def _so_suffix(use_so_versioning=True):
2832 '''
2833 Filename suffix for shared libraries is defined in pep-3149. The
2834 pep claims to only address posix systems, but the recommended
2835 sysconfig.get_config_var('EXT_SUFFIX') also seems to give the
2836 right string on Windows.
2837
2838 If use_so_versioning is false, we return only the last component of
2839 the suffix, which removes any version number, for example changing
2840 `.cp312-win_amd64.pyd` to `.pyd`.
2841 '''
2842 # Example values:
2843 # linux: .cpython-311-x86_64-linux-gnu.so
2844 # macos: .cpython-311-darwin.so
2845 # openbsd: .cpython-310.so
2846 # windows .cp311-win_amd64.pyd
2847 #
2848 # Only Linux and Windows seem to identify the cpu. For example shared
2849 # libraries in numpy-1.25.2-cp311-cp311-macosx_11_0_arm64.whl are called
2850 # things like `numpy/core/_simd.cpython-311-darwin.so`.
2851 #
2852 ret = sysconfig.get_config_var('EXT_SUFFIX')
2853 if not use_so_versioning:
2854 # Use last component only.
2855 ret = os.path.splitext(ret)[1]
2856 return ret
2857
2858
2859 def get_soname(path):
2860 '''
2861 If we are on Linux and `path` is softlink and points to a shared library
2862 for which `objdump -p` contains 'SONAME', return the pointee. Otherwise
2863 return `path`. Useful if Linux shared libraries have been created with
2864 `-Wl,-soname,...`, where we need to embed the versioned library.
2865 '''
2866 if linux() and os.path.islink(path):
2867 path2 = os.path.realpath(path)
2868 if subprocess.run(f'objdump -p {path2}|grep SONAME', shell=1, check=0).returncode == 0:
2869 return path2
2870 elif openbsd():
2871 # Return newest .so with version suffix.
2872 sos = glob.glob(f'{path}.*')
2873 log1(f'{sos=}')
2874 sos2 = list()
2875 for so in sos:
2876 suffix = so[len(path):]
2877 if not suffix or re.match('^[.][0-9.]*[0-9]$', suffix):
2878 sos2.append(so)
2879 sos2.sort(key=lambda p: os.path.getmtime(p))
2880 log1(f'{sos2=}')
2881 return sos2[-1]
2882 return path
2883
2884
2885 def current_py_limited_api():
2886 '''
2887 Returns value of PyLIMITED_API to build for current Python.
2888 '''
2889 a, b = map(int, platform.python_version().split('.')[:2])
2890 return f'0x{a:02x}{b:02x}0000'
2891
2892
2893 def install_dir(root=None):
2894 '''
2895 Returns install directory used by `install()`.
2896
2897 This will be `sysconfig.get_path('platlib')`, modified by `root` if not
2898 None.
2899 '''
2900 # todo: for pure-python we should use sysconfig.get_path('purelib') ?
2901 root2 = sysconfig.get_path('platlib')
2902 if root:
2903 if windows():
2904 # If we are in a venv, `sysconfig.get_path('platlib')`
2905 # can be absolute, e.g.
2906 # `C:\\...\\venv-pypackage-3.11.1-64\\Lib\\site-packages`, so it's
2907 # not clear how to append it to `root`. So we just use `root`.
2908 return root
2909 else:
2910 # E.g. if `root` is `install' and `sysconfig.get_path('platlib')`
2911 # is `/usr/local/lib/python3.9/site-packages`, we set `root2` to
2912 # `install/usr/local/lib/python3.9/site-packages`.
2913 #
2914 return os.path.join( root, root2.lstrip( os.sep))
2915 else:
2916 return root2
2917
2918
2919 class _Record:
2920 '''
2921 Internal - builds up text suitable for writing to a RECORD item, e.g.
2922 within a wheel.
2923 '''
2924 def __init__(self):
2925 self.text = ''
2926
2927 def add_content(self, content, to_, verbose=True):
2928 if isinstance(content, str):
2929 content = content.encode('utf8')
2930
2931 # Specification for the line we write is supposed to be in
2932 # https://packaging.python.org/en/latest/specifications/binary-distribution-format
2933 # but it's not very clear.
2934 #
2935 h = hashlib.sha256(content)
2936 digest = h.digest()
2937 digest = base64.urlsafe_b64encode(digest)
2938 digest = digest.rstrip(b'=')
2939 digest = digest.decode('utf8')
2940
2941 self.text += f'{to_},sha256={digest},{len(content)}\n'
2942 if verbose:
2943 log2(f'Adding {to_}')
2944
2945 def add_file(self, from_, to_):
2946 log1(f'Adding file: {os.path.relpath(from_)} => {to_}')
2947 with open(from_, 'rb') as f:
2948 content = f.read()
2949 self.add_content(content, to_, verbose=False)
2950
2951 def get(self, record_path=None):
2952 '''
2953 Returns contents of the RECORD file. If `record_path` is
2954 specified we append a final line `<record_path>,,`; this can be
2955 used to include the RECORD file itself in the contents, with
2956 empty hash and size fields.
2957 '''
2958 ret = self.text
2959 if record_path:
2960 ret += f'{record_path},,\n'
2961 return ret
2962
2963
2964 class NewFiles:
2965 '''
2966 Detects new/modified/updated files matching a glob pattern. Useful for
2967 detecting wheels created by pip or cubuildwheel etc.
2968 '''
2969 def __init__(self, glob_pattern):
2970 # Find current matches of <glob_pattern>.
2971 self.glob_pattern = glob_pattern
2972 self.items0 = self._items()
2973 def get(self):
2974 '''
2975 Returns list of new matches of <glob_pattern> - paths of files that
2976 were not present previously, or have different mtimes or have different
2977 contents.
2978 '''
2979 ret = list()
2980 items = self._items()
2981 for path, id_ in items.items():
2982 id0 = self.items0.get(path)
2983 if id0 != id_:
2984 #mtime0, hash0 = id0
2985 #mtime1, hash1 = id_
2986 #log0(f'New/modified file {path=}.')
2987 #log0(f' {mtime0=} {"==" if mtime0==mtime1 else "!="} {mtime1=}.')
2988 #log0(f' {hash0=} {"==" if hash0==hash1 else "!="} {hash1=}.')
2989 ret.append(path)
2990 return ret
2991 def get_one(self):
2992 '''
2993 Returns new match of <glob_pattern>, asserting that there is exactly
2994 one.
2995 '''
2996 ret = self.get()
2997 assert len(ret) == 1, f'{len(ret)=}'
2998 return ret[0]
2999 def _file_id(self, path):
3000 mtime = os.stat(path).st_mtime
3001 with open(path, 'rb') as f:
3002 content = f.read()
3003 hash_ = hashlib.md5(content).digest()
3004 # With python >= 3.11 we can do:
3005 #hash_ = hashlib.file_digest(f, hashlib.md5).digest()
3006 return mtime, hash_
3007 def _items(self):
3008 ret = dict()
3009 for path in glob.glob(self.glob_pattern):
3010 if os.path.isfile(path):
3011 ret[path] = self._file_id(path)
3012 return ret
3013
3014
3015 def swig_get(swig, quick, swig_local='pipcl-swig-git'):
3016 '''
3017 Returns <swig> or a new swig binary.
3018
3019 If <swig> is true and starts with 'git:' (not Windows), the remaining text
3020 is passed to git_get() and we clone/update/build swig, and return the built
3021 binary. We default to the main swig repository, branch master, so for
3022 example 'git:' will return the latest swig from branch master.
3023
3024 Otherwise we simply return <swig>.
3025
3026 Args:
3027 swig:
3028 If starts with 'git:', passed as <remote> arg to git_remote().
3029 quick:
3030 If true, we do not update/build local checkout if the binary is
3031 already present.
3032 swig_local:
3033 path to use for checkout.
3034 '''
3035 if swig and swig.startswith('git:'):
3036 assert platform.system() != 'Windows'
3037 swig_local = os.path.abspath(swig_local)
3038 # Note that {swig_local}/install/bin/swig doesn't work on MacoS because
3039 # {swig_local}/INSTALL is a file and the fs is case-insensitive.
3040 swig_binary = f'{swig_local}/install-dir/bin/swig'
3041 if quick and os.path.isfile(swig_binary):
3042 log1(f'{quick=} and {swig_binary=} already exists, so not downloading/building.')
3043 else:
3044 # Clone swig.
3045 swig_env_extra = None
3046 git_get(
3047 swig,
3048 swig_local,
3049 default_remote='https://github.com/swig/swig.git',
3050 branch='master',
3051 )
3052 if darwin():
3053 run(f'brew install automake')
3054 run(f'brew install pcre2')
3055 # Default bison doesn't work, and Brew's bison is not added to $PATH.
3056 #
3057 # > bison is keg-only, which means it was not symlinked into /opt/homebrew,
3058 # > because macOS already provides this software and installing another version in
3059 # > parallel can cause all kinds of trouble.
3060 # >
3061 # > If you need to have bison first in your PATH, run:
3062 # > echo 'export PATH="/opt/homebrew/opt/bison/bin:$PATH"' >> ~/.zshrc
3063 #
3064 run(f'brew install bison')
3065 PATH = os.environ['PATH']
3066 PATH = f'/opt/homebrew/opt/bison/bin:{PATH}'
3067 swig_env_extra = dict(PATH=PATH)
3068 # Build swig.
3069 run(f'cd {swig_local} && ./autogen.sh', env_extra=swig_env_extra)
3070 run(f'cd {swig_local} && ./configure --prefix={swig_local}/install-dir', env_extra=swig_env_extra)
3071 run(f'cd {swig_local} && make', env_extra=swig_env_extra)
3072 run(f'cd {swig_local} && make install', env_extra=swig_env_extra)
3073 assert os.path.isfile(swig_binary)
3074 return swig_binary
3075 else:
3076 return swig
3077
3078
3079 def _show_dict(d):
3080 ret = ''
3081 for n in sorted(d.keys()):
3082 v = d[n]
3083 ret += f' {n}: {v!r}\n'
3084 return ret
3085
3086 def show_sysconfig():
3087 '''
3088 Shows contents of sysconfig.get_paths() and sysconfig.get_config_vars() dicts.
3089 '''
3090 import sysconfig
3091 paths = sysconfig.get_paths()
3092 log0(f'show_sysconfig().')
3093 log0(f'sysconfig.get_paths():\n{_show_dict(sysconfig.get_paths())}')
3094 log0(f'sysconfig.get_config_vars():\n{_show_dict(sysconfig.get_config_vars())}')
3095
3096
3097 def sysconfig_python_flags():
3098 '''
3099 Returns include paths and library directory for Python.
3100
3101 Uses sysconfig.*(), overridden by environment variables
3102 PIPCL_SYSCONFIG_PATH_include, PIPCL_SYSCONFIG_PATH_platinclude and
3103 PIPCL_SYSCONFIG_CONFIG_VAR_LIBDIR if set.
3104 '''
3105 include1_ = os.environ.get('PIPCL_SYSCONFIG_PATH_include') or sysconfig.get_path('include')
3106 include2_ = os.environ.get('PIPCL_SYSCONFIG_PATH_platinclude') or sysconfig.get_path('platinclude')
3107 ldflags_ = os.environ.get('PIPCL_SYSCONFIG_CONFIG_VAR_LIBDIR') or sysconfig.get_config_var('LIBDIR')
3108
3109 includes_ = [include1_]
3110 if include2_ != include1_:
3111 includes_.append(include2_)
3112 if windows():
3113 includes_ = [f'/I"{i}"' for i in includes_]
3114 ldflags_ = f'/LIBPATH:"{ldflags_}"'
3115 else:
3116 includes_ = [f'-I {i}' for i in includes_]
3117 ldflags_ = f'-L {ldflags_}'
3118 includes_ = ' '.join(includes_)
3119 return includes_, ldflags_
3120
3121
3122 if __name__ == '__main__':
3123 # Internal-only limited command line support, used if
3124 # graal_legacy_python_config is true.
3125 #
3126 includes, ldflags = sysconfig_python_flags()
3127 if sys.argv[1:] == ['--graal-legacy-python-config', '--includes']:
3128 print(includes)
3129 elif sys.argv[1:] == ['--graal-legacy-python-config', '--ldflags']:
3130 print(ldflags)
3131 else:
3132 assert 0, f'Expected `--graal-legacy-python-config --includes|--ldflags` but {sys.argv=}'