view intree-build-helper/cutils_build.py @ 408:3e28e5aacb8a default

A separate "wheel" package is not used by modern build backends any more.
author Franz Glasner <fzglas.hg@dom66.de>
date Tue, 17 Feb 2026 17:16:27 +0100
parents deabdfed3b96
children
line wrap: on
line source

# -*- coding: utf-8 -*-
# :-
# SPDX-FileCopyrightText: © 2025-2026 Franz Glasner
# SPDX-License-Identifier: BSD-3-Clause
# :-
r"""An intree build backend that is mostly just a wrapper for
:mod:`setuptools.build_meta`.

Augments :fn:`build_sdist` to automatically postprocess it
(i.e. add symlinks as symlinks).

"""

# Firstly, we are just a wrapper for setuptools.build_meta
from setuptools.build_meta import *       # noqa:F403,F401
from setuptools.build_meta import build_sdist as _orig_build_sdist

import gzip as _gzip
import io as _io
import logging as _logging
import os as _os
import tarfile as _tarfile


_log = _logging.getLogger(__name__)


def _postprocess_sdist(sdist_directory, sdist_archive, config_settings):
    _log.info("post-processing the sdist %r ...", sdist_archive)
    #
    # PEP 625 requires that sdist archive filenames are of the form
    # <normalized_project_name>-<project_version>.tar.gz
    #
    if sdist_archive.endswith(".tar.gz"):
        uncompressed_sdist_archive = sdist_archive[:-3]
        # the directory prefix within the archive
        archive_path_prefix = uncompressed_sdist_archive[:-4]
        normalized_project_name, sep, project_version = \
            archive_path_prefix.rpartition('-')
        if not sep:
            raise ValueError(
                "unexpected archive path prefix: %s" % (archive_path_prefix,))
    else:
        raise ValueError("unexpected archive name: %s" % (sdist_archive,))

    uncompressed_sdist_path = f"{sdist_directory}/{uncompressed_sdist_archive}"

    # Metadata directories in the FS and the archive
    egg_directory = f"{normalized_project_name}.egg-info"
    if not _os.path.isdir(egg_directory):
        raise RuntimeError("directory does not exist: %s" % (egg_directory,))
    sources_txt_path = f"{egg_directory}/SOURCES.txt"
    sources_txt_arcname = f"{archive_path_prefix}/{egg_directory}/SOURCES.txt"

    if _os.path.isfile(uncompressed_sdist_path):
        _log.warning("warning: overwriting existing %r",
                     uncompressed_sdist_path)

    # Uncompress
    _log.info("uncompressing the created archive %r into %r",
              f"{sdist_directory}/{sdist_archive}",
              uncompressed_sdist_path)
    with _gzip.GzipFile(f"{sdist_directory}/{sdist_archive}",
                        mode="rb") as ca:
        with open(uncompressed_sdist_path, "wb") as uca:
            while True:
                data = ca.read(64*1024)
                if not data:
                    break
                uca.write(data)

    # Get SOURCES.txt from the metadata within the sdist
    with _tarfile.TarFile(uncompressed_sdist_path, "r") as tf:
        sf = tf.extractfile(sources_txt_arcname)
        try:
            sources_txt = sf.read()
        finally:
            sf.close()

    with _tarfile.TarFile(uncompressed_sdist_path, "a") as tf:
        arcname = "{}/tests/data".format(archive_path_prefix)
        try:
            info = tf.getmember(arcname)
        except KeyError:
            pass
        else:
            raise RuntimeError("already postprocessed")
        pre_names = set(tf.getnames())
        tf.add("tests/data", arcname=arcname, recursive=True)

        #
        # Determine the new files and symlinks that are to be added
        # to SOURCES.txt. Skip directories.
        #
        post_names = set(tf.getnames())
        new_names = list(post_names - pre_names)
        new_names.sort()
        new_sources = []

        for np in new_names:
            nn = np[len(archive_path_prefix)+1:]
            info = tf.getmember(np)
            if not info.isdir():
                _log.info("adding %s -> %s", nn, np)
                new_sources.append(nn)

        # Augment SOURCES.txt and add it to the archive
        sources_info = tf.gettarinfo(
            sources_txt_path, arcname=sources_txt_arcname)
        sf = _io.BytesIO()
        sf.write(sources_txt)
        if not sources_txt.endswith(b'\n'):
            sf.write(b'\n')
        sf.write(_b('\n'.join(new_sources)))
        sources_info.size = len(sf.getvalue())
        sf.seek(0)
        #
        # This adds SOURCES.txt a 2nd time -- effectively overwriting
        # the "earlier" one.
        #
        tf.addfile(sources_info, sf)

    # Compress
    _log.info("recompressing the augmented archive %r into %r",
              uncompressed_sdist_path,
              f"{sdist_directory}/{sdist_archive}")
    with open(uncompressed_sdist_path, "rb") as uca:
        with open(f"{sdist_directory}/{sdist_archive}", "wb") as ca:
            with _gzip.GzipFile(filename=uncompressed_sdist_archive,
                                fileobj=ca,
                                mode="wb") as gzfile:
                while True:
                    data = uca.read(64*1024)
                    if not data:
                        break
                    gzfile.write(data)

    _log.info("post-processing the sdist done.")
    return sdist_archive


def _b(buf, encoding="ascii"):
    if isinstance(buf, bytes):
        return buf
    else:
        return buf.encode(encoding)


def build_sdist(sdist_directory, config_settings=None):
    # NOTE: logging is obviously set to level WARN (default?)
    _log.debug(
        "debug: build_sdist in cutils_build called with params"
        " sdist_directory=%r, config_settings=%r",
        sdist_directory, config_settings)
    # NOTE: orig_build_sdist re-configures logging to level INFO
    sdist_archive = _orig_build_sdist(
        sdist_directory, config_settings=config_settings)
    return _postprocess_sdist(sdist_directory, sdist_archive, config_settings)