Mercurial > hgrepos > Python > apps > py-cutils
changeset 284:b65d25882e44
REFACTOR: sdist generation: postprocess an sdist to include symbolic links as symbolic links.
A standard sdist contains no symlinks but dereferences symlinks encountered
in the source tree.
| author | Franz Glasner <fzglas.hg@dom66.de> |
|---|---|
| date | Sun, 23 Feb 2025 21:27:48 +0100 |
| parents | 99b78fa04bc1 |
| children | 39a19c008708 |
| files | MANIFEST.in _postprocess-sdist.py setup.cfg |
| diffstat | 3 files changed, 109 insertions(+), 1 deletions(-) [+] |
line wrap: on
line diff
--- a/MANIFEST.in Sun Feb 23 14:45:26 2025 +0100 +++ b/MANIFEST.in Sun Feb 23 21:27:48 2025 +0100 @@ -2,4 +2,5 @@ graft docs prune docs/_build graft tests +prune tests/data global-exclude *.pyc *.pyo
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/_postprocess-sdist.py Sun Feb 23 21:27:48 2025 +0100 @@ -0,0 +1,107 @@ +# -*- coding: utf-8 -*- +"""Postprogress a .tar-sdist to include tests/data with symlinks as symlinks + +""" + +from __future__ import print_function, absolute_import + +import ast +try: + from configparser import ConfigParser +except ImportError: + from ConfigParser import SafeConfigParser as ConfigParser +import io +import os +import tarfile + +import cutils +import cutils.util.walk + + +def main(): + with io.open("setup.cfg", "rt", encoding="utf-8") as cfgfile: + cp = ConfigParser() + if hasattr(cp, "read_file"): + cp.read_file(cfgfile, "setup.cfg") + else: + cp.readfp(cfgfile, "setup.cfg") + project_name = cp.get("metadata", "name") + project_version = cp.get("metadata", "version") + if project_version.startswith("attr:"): + assert project_version == "attr: cutils.__version__" + project_version = ast.literal_eval(repr(cutils.__version__)) + # + # Compressed tar files cannot be modified by Python: make sure the + # originally generated archive is uncompressed. + # + assert cp.get("sdist", "formats") == "tar" + + archive_name = "{}-{}.tar".format(project_name, project_version) + archive_path = "dist/" + archive_name + assert os.path.isfile(archive_path) + + # the directory within the archive + archive_path_prefix = "{}-{}".format(project_name, project_version) + + egg_directory = "{}.egg-info".format(project_name.replace("-", "_")) + assert os.path.isdir(egg_directory) + sources_txt_path = "{}/SOURCES.txt".format(egg_directory) + sources_txt_arcname = "{}/{}/SOURCES.txt".format( + archive_path_prefix, + egg_directory) + + with tarfile.TarFile(archive_path, "r") as tf: + sf = tf.extractfile(sources_txt_arcname) + try: + sources_txt = sf.read() + finally: + sf.close() + + with tarfile.TarFile(archive_path, "a") as tf: + arcname = "{}/tests/data".format(archive_path_prefix) + try: + info = tf.getmember(arcname) + except KeyError: + pass + else: + raise RuntimeError("already postprocessed") + pre_names = set(tf.getnames()) + tf.add("tests/data", arcname=arcname, recursive=True) + + # + # Determine the new files and symlinks that are to be added + # to SOURCES.txt. Skip directories. + # + post_names = set(tf.getnames()) + new_names = list(post_names - pre_names) + new_names.sort() + new_sources = [] + + for np in new_names: + nn = np[len(archive_path_prefix)+1:] + info = tf.getmember(np) + if not info.isdir(): + new_sources.append(nn) + + # Augment SOURCES.txt and add it to the archive + sources_info = tf.gettarinfo( + sources_txt_path, arcname=sources_txt_arcname) + sf = io.BytesIO() + sf.write(sources_txt) + if not sources_txt.endswith(b'\n'): + sf.write(b'\n') + sf.write(b('\n'.join(new_sources))) + sources_info.size = len(sf.getvalue()) + sf.seek(0) + tf.addfile(sources_info, sf) + + +def b(buf, encoding="ascii"): + if isinstance(buf, bytes): + return buf + else: + return buf.encode(encoding) + + +if __name__ == "__main__": + main()
