changeset 407:d3429477cd55 build-2.7

MERGE: current trunk
author Franz Glasner <fzglas.hg@dom66.de>
date Tue, 17 Feb 2026 16:19:53 +0100
parents 6930917a3752 (current diff) deabdfed3b96 (diff)
children 8809d79777c3
files Makefile pyproject.toml
diffstat 4 files changed, 171 insertions(+), 6 deletions(-) [+]
line wrap: on
line diff
--- a/MANIFEST.in	Tue Feb 17 15:31:50 2026 +0100
+++ b/MANIFEST.in	Tue Feb 17 16:19:53 2026 +0100
@@ -1,5 +1,6 @@
 include .hg* *.txt *.py
 include cutils/crcmod/REUSE.toml
+include intree-build-helper/*.py
 graft LICENSES
 graft docs
 prune docs/_build
--- a/README.txt	Tue Feb 17 15:31:50 2026 +0100
+++ b/README.txt	Tue Feb 17 16:19:53 2026 +0100
@@ -7,8 +7,14 @@
 :Date:      2025-04-02
 :Copyright: © 2020-2026 Franz Glasner.
 :License:   BSD 3-Clause "New" or "Revised" License.
-            See LICENSE.txt for details. If you cannot find LICENSE.txt
-	    see <https://opensource.org/licenses/BSD-3-Clause>.
+            See BSD-3-Clause.txt for details.
+	    If you cannot find BSD-3-Clause.txt see
+	    <https://opensource.org/licenses/BSD-3-Clause>.
+
+            Parts of the code have the following licenses:
+
+            - MIT License
+
 :ID:        @(#) $HGid$
 
 Pure Python implementations of some (Unix) coreutils and some additional
--- a/cutils/__init__.py	Tue Feb 17 15:31:50 2026 +0100
+++ b/cutils/__init__.py	Tue Feb 17 16:19:53 2026 +0100
@@ -1,14 +1,14 @@
 # -*- coding: utf-8 -*-
 # :-
-# SPDX-FileCopyrightText: © 2020-2025 Franz Glasner
+# SPDX-FileCopyrightText: © 2020-2026 Franz Glasner
 # SPDX-License-Identifier: BSD-3-Clause
 # :-
 r"""
 :Author:    Franz Glasner
-:Copyright: © 2020-2025 Franz Glasner
+:Copyright: © 2020-2026 Franz Glasner
 :License:   BSD 3-Clause "New" or "Revised" License.
-            See :ref:`LICENSE.txt <license>` for details.
-            If you cannot find LICENSE.txt see
+            See :file:`BSD-3-Clause.txt` for details.
+            If you cannot find BSD-3-Clause.txt see
             <https://opensource.org/licenses/BSD-3-Clause>.
 :ID:        @(#) $HGid$
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/intree-build-helper/cutils_build.py	Tue Feb 17 16:19:53 2026 +0100
@@ -0,0 +1,158 @@
+# -*- coding: utf-8 -*-
+# :-
+# SPDX-FileCopyrightText: © 2025-2026 Franz Glasner
+# SPDX-License-Identifier: BSD-3-Clause
+# :-
+r"""An intree build backend that is mostly just a wrapper for
+:mod:`setuptools.build_meta`.
+
+Augments :fn:`build_sdist` to automatically postprocess it
+(i.e. add symlinks as symlinks).
+
+"""
+
+# Firstly, we are just a wrapper for setuptools.build_meta
+from setuptools.build_meta import *       # noqa:F403,F401
+from setuptools.build_meta import build_sdist as _orig_build_sdist
+
+import gzip as _gzip
+import io as _io
+import logging as _logging
+import os as _os
+import tarfile as _tarfile
+
+
+_log = _logging.getLogger(__name__)
+
+
+def _postprocess_sdist(sdist_directory, sdist_archive, config_settings):
+    _log.info("post-processing the sdist %r ...", sdist_archive)
+    #
+    # PEP 625 requires that sdist archive filenames are of the form
+    # <normalized_project_name>-<project_version>.tar.gz
+    #
+    if sdist_archive.endswith(".tar.gz"):
+        uncompressed_sdist_archive = sdist_archive[:-3]
+        # the directory prefix within the archive
+        archive_path_prefix = uncompressed_sdist_archive[:-4]
+        normalized_project_name, sep, project_version = \
+            archive_path_prefix.rpartition('-')
+        if not sep:
+            raise ValueError(
+                "unexpected archive path prefix: %s" % (archive_path_prefix,))
+    else:
+        raise ValueError("unexpected archive name: %s" % (sdist_archive,))
+
+    uncompressed_sdist_path = f"{sdist_directory}/{uncompressed_sdist_archive}"
+
+    # Metadata directories in the FS and the archive
+    egg_directory = f"{normalized_project_name}.egg-info"
+    if not _os.path.isdir(egg_directory):
+        raise RuntimeError("directory does not exist: %s" % (egg_directory,))
+    sources_txt_path = f"{egg_directory}/SOURCES.txt"
+    sources_txt_arcname = f"{archive_path_prefix}/{egg_directory}/SOURCES.txt"
+
+    if _os.path.isfile(uncompressed_sdist_path):
+        _log.warning("warning: overwriting existing %r",
+                     uncompressed_sdist_path)
+
+    # Uncompress
+    _log.info("uncompressing the created archive %r into %r",
+              f"{sdist_directory}/{sdist_archive}",
+              uncompressed_sdist_path)
+    with _gzip.GzipFile(f"{sdist_directory}/{sdist_archive}",
+                        mode="rb") as ca:
+        with open(uncompressed_sdist_path, "wb") as uca:
+            while True:
+                data = ca.read(64*1024)
+                if not data:
+                    break
+                uca.write(data)
+
+    # Get SOURCES.txt from the metadata within the sdist
+    with _tarfile.TarFile(uncompressed_sdist_path, "r") as tf:
+        sf = tf.extractfile(sources_txt_arcname)
+        try:
+            sources_txt = sf.read()
+        finally:
+            sf.close()
+
+    with _tarfile.TarFile(uncompressed_sdist_path, "a") as tf:
+        arcname = "{}/tests/data".format(archive_path_prefix)
+        try:
+            info = tf.getmember(arcname)
+        except KeyError:
+            pass
+        else:
+            raise RuntimeError("already postprocessed")
+        pre_names = set(tf.getnames())
+        tf.add("tests/data", arcname=arcname, recursive=True)
+
+        #
+        # Determine the new files and symlinks that are to be added
+        # to SOURCES.txt. Skip directories.
+        #
+        post_names = set(tf.getnames())
+        new_names = list(post_names - pre_names)
+        new_names.sort()
+        new_sources = []
+
+        for np in new_names:
+            nn = np[len(archive_path_prefix)+1:]
+            info = tf.getmember(np)
+            if not info.isdir():
+                _log.info("adding %s -> %s", nn, np)
+                new_sources.append(nn)
+
+        # Augment SOURCES.txt and add it to the archive
+        sources_info = tf.gettarinfo(
+            sources_txt_path, arcname=sources_txt_arcname)
+        sf = _io.BytesIO()
+        sf.write(sources_txt)
+        if not sources_txt.endswith(b'\n'):
+            sf.write(b'\n')
+        sf.write(_b('\n'.join(new_sources)))
+        sources_info.size = len(sf.getvalue())
+        sf.seek(0)
+        #
+        # This adds SOURCES.txt a 2nd time -- effectively overwriting
+        # the "earlier" one.
+        #
+        tf.addfile(sources_info, sf)
+
+    # Compress
+    _log.info("recompressing the augmented archive %r into %r",
+              uncompressed_sdist_path,
+              f"{sdist_directory}/{sdist_archive}")
+    with open(uncompressed_sdist_path, "rb") as uca:
+        with open(f"{sdist_directory}/{sdist_archive}", "wb") as ca:
+            with _gzip.GzipFile(filename=uncompressed_sdist_archive,
+                                fileobj=ca,
+                                mode="wb") as gzfile:
+                while True:
+                    data = uca.read(64*1024)
+                    if not data:
+                        break
+                    gzfile.write(data)
+
+    _log.info("post-processing the sdist done.")
+    return sdist_archive
+
+
+def _b(buf, encoding="ascii"):
+    if isinstance(buf, bytes):
+        return buf
+    else:
+        return buf.encode(encoding)
+
+
+def build_sdist(sdist_directory, config_settings=None):
+    # NOTE: logging is obviously set to level WARN (default?)
+    _log.debug(
+        "debug: build_sdist in cutils_build called with params"
+        " sdist_directory=%r, config_settings=%r",
+        sdist_directory, config_settings)
+    # NOTE: orig_build_sdist re-configures logging to level INFO
+    sdist_archive = _orig_build_sdist(
+        sdist_directory, config_settings=config_settings)
+    return _postprocess_sdist(sdist_directory, sdist_archive, config_settings)