From 2b4e6e7ae06963ce7fd696a6e7d8759b85fbf830 Mon Sep 17 00:00:00 2001 From: Emmett Butler Date: Fri, 19 May 2023 12:31:47 -0700 Subject: [PATCH 01/33] add linting workflow --- .github/workflows/lint.yml | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 .github/workflows/lint.yml diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 0000000..2582b0d --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,25 @@ +name: lint +on: + pull_request: + types: ['opened', 'edited', 'reopened', 'synchronize'] +jobs: + lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + - uses: actions/setup-python@v4 + name: Install Python + with: + python-version: '3.8' + - name: Install Dependencies + run: pip install black isort flake8 + + - name: Lint Python Code + uses: actions/github-script@v6.4.1 + with: + script: | + black --check pynixify + isort --check pynixify + flake8 pynixify From 5a35eaf1a231c4f56a72e7913e0b1f758503e33c Mon Sep 17 00:00:00 2001 From: Emmett Butler Date: Fri, 19 May 2023 12:36:51 -0700 Subject: [PATCH 02/33] no flake8 --- .github/workflows/lint.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 2582b0d..846b729 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -14,7 +14,7 @@ jobs: with: python-version: '3.8' - name: Install Dependencies - run: pip install black isort flake8 + run: pip install black isort - name: Lint Python Code uses: actions/github-script@v6.4.1 @@ -22,4 +22,3 @@ jobs: script: | black --check pynixify isort --check pynixify - flake8 pynixify From 96ae2e4a013e92f840237999893dc6bc973b3c07 Mon Sep 17 00:00:00 2001 From: Emmett Butler Date: Fri, 19 May 2023 12:38:07 -0700 Subject: [PATCH 03/33] run instead of script --- .github/workflows/lint.yml | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 846b729..b7db832 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -18,7 +18,6 @@ jobs: - name: Lint Python Code uses: actions/github-script@v6.4.1 - with: - script: | - black --check pynixify - isort --check pynixify + run: | + black --check pynixify + isort --check pynixify From 1178fa48e9a82bd30a590d6b7e5ba8d4c0972eb6 Mon Sep 17 00:00:00 2001 From: Emmett Butler Date: Fri, 19 May 2023 12:40:34 -0700 Subject: [PATCH 04/33] noscript --- .github/workflows/lint.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index b7db832..8977986 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -17,7 +17,6 @@ jobs: run: pip install black isort - name: Lint Python Code - uses: actions/github-script@v6.4.1 run: | black --check pynixify isort --check pynixify From 9130df2369d5a0af867fbac43d166a496bd81b72 Mon Sep 17 00:00:00 2001 From: Emmett Butler Date: Fri, 19 May 2023 12:41:23 -0700 Subject: [PATCH 05/33] apply black and isort --- pynixify/base.py | 34 ++-- pynixify/command.py | 276 +++++++++++++++---------------- pynixify/exceptions.py | 4 + pynixify/expression_builder.py | 115 +++++++------ pynixify/nixpkgs_sources.py | 70 ++++---- pynixify/package_requirements.py | 40 ++--- pynixify/pypi_api.py | 92 ++++++----- pynixify/version_chooser.py | 110 ++++++------ 8 files changed, 394 insertions(+), 347 deletions(-) diff --git a/pynixify/base.py b/pynixify/base.py index 494f98d..ea11013 100644 --- a/pynixify/base.py +++ b/pynixify/base.py @@ -15,10 +15,13 @@ # along with this program. If not, see . import json -from pathlib import Path from dataclasses import dataclass -from typing import Optional, Dict -from packaging.version import Version, LegacyVersion, parse as parse_original +from pathlib import Path +from typing import Dict, Optional + +from packaging.version import LegacyVersion, Version +from packaging.version import parse as parse_original + @dataclass class PackageMetadata: @@ -26,6 +29,7 @@ class PackageMetadata: license: Optional[str] url: Optional[str] + @dataclass class Package: version: Version @@ -38,9 +42,10 @@ def attr(self) -> str: raise NotImplementedError() async def metadata(self) -> PackageMetadata: - from pynixify.package_requirements import run_nix_build, NixBuildError + from pynixify.package_requirements import NixBuildError, run_nix_build + source = await self.source() - if source.name.endswith('.whl'): + if source.name.endswith(".whl"): # Some nixpkgs packages use a wheel as source, which don't have a # setup.py file. For now, ignore them assume they have no metadata return PackageMetadata( @@ -52,23 +57,23 @@ async def metadata(self) -> PackageMetadata: assert nix_expression_path.exists() nix_store_path = await run_nix_build( str(nix_expression_path), - '--no-out-link', - '--no-build-output', - '--arg', - 'file', - str(source.resolve()) + "--no-out-link", + "--no-build-output", + "--arg", + "file", + str(source.resolve()), ) - if (nix_store_path / 'failed').exists(): - print(f'Error parsing metadata of {source}. Assuming it has no metadata.') + if (nix_store_path / "failed").exists(): + print(f"Error parsing metadata of {source}. Assuming it has no metadata.") return PackageMetadata( url=None, description=None, license=None, ) - with (nix_store_path / 'meta.json').open() as fp: + with (nix_store_path / "meta.json").open() as fp: metadata = json.load(fp) try: - version: Optional[str] = metadata.pop('version') + version: Optional[str] = metadata.pop("version") except KeyError: pass else: @@ -78,6 +83,7 @@ async def metadata(self) -> PackageMetadata: self.version = Version(version) return PackageMetadata(**metadata) + # mypy hack def parse_version(version: str) -> Version: v = parse_original(version) diff --git a/pynixify/command.py b/pynixify/command.py index cfcc6b8..a66437a 100644 --- a/pynixify/command.py +++ b/pynixify/command.py @@ -14,63 +14,53 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -import re -import os -import asyncio import argparse +import asyncio +import os +import re from pathlib import Path +from typing import Dict, List, Optional, Tuple from urllib.parse import urlparse -from typing import List, Dict, Optional, Tuple + +from packaging.requirements import Requirement +from packaging.utils import canonicalize_name from pkg_resources import parse_requirements + import pynixify.nixpkgs_sources from pynixify.base import Package -from pynixify.nixpkgs_sources import ( - NixpkgsData, - load_nixpkgs_data, - set_max_jobs, -) -from pynixify.pypi_api import ( - PyPICache, - PyPIData, -) -from pynixify.version_chooser import ( - VersionChooser, - ChosenPackageRequirements, - evaluate_package_requirements, -) -from pynixify.expression_builder import ( - build_nix_expression, - build_overlayed_nixpkgs, - build_overlay_expr, - build_shell_nix_expression, - nixfmt, -) -from pynixify.pypi_api import ( - PyPIPackage, - get_path_hash, -) -from packaging.requirements import Requirement -from packaging.utils import canonicalize_name +from pynixify.expression_builder import (build_nix_expression, + build_overlay_expr, + build_overlayed_nixpkgs, + build_shell_nix_expression, nixfmt) +from pynixify.nixpkgs_sources import (NixpkgsData, load_nixpkgs_data, + set_max_jobs) +from pynixify.pypi_api import PyPICache, PyPIData, PyPIPackage, get_path_hash +from pynixify.version_chooser import (ChosenPackageRequirements, + VersionChooser, + evaluate_package_requirements) async def _build_version_chooser( - load_test_requirements_for: List[str], - ignore_test_requirements_for: List[str], - load_all_test_requirements: bool) -> VersionChooser: + load_test_requirements_for: List[str], + ignore_test_requirements_for: List[str], + load_all_test_requirements: bool, +) -> VersionChooser: nixpkgs_data = NixpkgsData(await load_nixpkgs_data({})) pypi_cache = PyPICache() pypi_data = PyPIData(pypi_cache) + def should_load_tests(package_name): if canonicalize_name(package_name) in [ - canonicalize_name(n) - for n in ignore_test_requirements_for - ]: + canonicalize_name(n) for n in ignore_test_requirements_for + ]: return False return load_all_test_requirements or canonicalize_name(package_name) in [ - canonicalize_name(n) - for n in load_test_requirements_for] + canonicalize_name(n) for n in load_test_requirements_for + ] + version_chooser = VersionChooser( - nixpkgs_data, pypi_data, + nixpkgs_data, + pypi_data, req_evaluate=evaluate_package_requirements, should_load_tests=should_load_tests, ) @@ -79,106 +69,120 @@ def should_load_tests(package_name): def main(): parser = argparse.ArgumentParser( - description=( - 'Nix expression generator for Python packages.' - )) - parser.add_argument('requirement', nargs='*') + description=("Nix expression generator for Python packages.") + ) + parser.add_argument("requirement", nargs="*") parser.add_argument( - '-l', '--local', - metavar='NAME', + "-l", + "--local", + metavar="NAME", help=( 'Create a "python.pkgs.NAME" derivation using the current ' - 'directory as source. Useful for packaging projects with a ' - 'setup.py.' - )) + "directory as source. Useful for packaging projects with a " + "setup.py." + ), + ) parser.add_argument( - '--nixpkgs', + "--nixpkgs", help=( - 'URL to a tarball containing the nixpkgs source. When specified, ' - 'the generated expressions will use it instead of , ' - 'improving reproducibility.' - )) + "URL to a tarball containing the nixpkgs source. When specified, " + "the generated expressions will use it instead of , " + "improving reproducibility." + ), + ) parser.add_argument( - '-o', '--output', - metavar='DIR', - default='pynixify/', + "-o", + "--output", + metavar="DIR", + default="pynixify/", help=( "Directory in which pynixify will save the generated Nix " "expressions. If if doesn't exist, it will be automatically " "created. [default. pynixify/]" - )) + ), + ) parser.add_argument( - '-O', '--overlay-only', - action='store_true', - help=( - "Generate only overlay expresion." - )) + "-O", + "--overlay-only", + action="store_true", + help=("Generate only overlay expresion."), + ) parser.add_argument( - '--all-tests', - action='store_true', + "--all-tests", + action="store_true", help=( "Include test requirements in all generated expressions, " "except for those explicitly excluded with --ignore-tests." - )) + ), + ) parser.add_argument( - '--ignore-tests', - metavar='PACKAGES', + "--ignore-tests", + metavar="PACKAGES", help=( "Comma-separated list of packages for which we don't want " "their test requirements to be loaded." - )) + ), + ) parser.add_argument( - '--tests', - metavar='PACKAGES', + "--tests", + metavar="PACKAGES", help=( "Comma-separated list of packages for which we do want " "their test requirements to be loaded." - )) + ), + ) parser.add_argument( - '-r', - metavar='REQUIREMENTS_FILE', - action='append', + "-r", + metavar="REQUIREMENTS_FILE", + action="append", help=( "A filename whose content is a PEP 508 compliant list of " "dependencies. It can be specified multiple times to use more " "than one file. Note that pip-specific options, such as " "'-e git+https....' are not supported." - )) + ), + ) parser.add_argument( - '--max-jobs', + "--max-jobs", type=int, help=( "Sets the maximum number of concurrent nix-build processes " "executed by pynixify. If it isn't specified, it will be set to " "the number of CPUs in the system." - )) + ), + ) args = parser.parse_args() - asyncio.run(_main_async( - requirements=args.requirement, - requirement_files=args.r or [], - local=args.local, - output_dir=args.output, - nixpkgs=args.nixpkgs, - load_all_test_requirements=args.all_tests, - load_test_requirements_for=args.tests.split(',') if args.tests else [], - ignore_test_requirements_for=args.ignore_tests.split(',') if args.ignore_tests else [], - max_jobs=args.max_jobs, - generate_only_overlay=args.overlay_only, - )) + asyncio.run( + _main_async( + requirements=args.requirement, + requirement_files=args.r or [], + local=args.local, + output_dir=args.output, + nixpkgs=args.nixpkgs, + load_all_test_requirements=args.all_tests, + load_test_requirements_for=args.tests.split(",") if args.tests else [], + ignore_test_requirements_for=args.ignore_tests.split(",") + if args.ignore_tests + else [], + max_jobs=args.max_jobs, + generate_only_overlay=args.overlay_only, + ) + ) -async def _main_async( - requirements: List[str], - requirement_files: List[str], - local: Optional[str], - nixpkgs: Optional[str], - output_dir: Optional[str], - load_test_requirements_for: List[str], - ignore_test_requirements_for: List[str], - load_all_test_requirements: bool, - max_jobs: Optional[int], - generate_only_overlay:bool): +async def _main_async( + requirements: List[str], + requirement_files: List[str], + local: Optional[str], + nixpkgs: Optional[str], + output_dir: Optional[str], + load_test_requirements_for: List[str], + ignore_test_requirements_for: List[str], + load_all_test_requirements: bool, + max_jobs: Optional[int], + generate_only_overlay: bool, +): if nixpkgs is not None: pynixify.nixpkgs_sources.NIXPKGS_URL = nixpkgs @@ -186,8 +190,10 @@ async def _main_async( set_max_jobs(max_jobs) version_chooser: VersionChooser = await _build_version_chooser( - load_test_requirements_for, ignore_test_requirements_for, - load_all_test_requirements) + load_test_requirements_for, + ignore_test_requirements_for, + load_all_test_requirements, + ) if local is not None: await version_chooser.require_local(local, Path.cwd()) @@ -201,14 +207,11 @@ async def _main_async( for req_ in requirements: all_requirements.append(Requirement(req_)) - await asyncio.gather(*( - version_chooser.require(req) - for req in all_requirements - )) + await asyncio.gather(*(version_chooser.require(req) for req in all_requirements)) - output_dir = output_dir or 'pynixify' + output_dir = output_dir or "pynixify" base_path = Path.cwd() / output_dir - packages_path = base_path / 'packages' + packages_path = base_path / "packages" packages_path.mkdir(parents=True, exist_ok=True) overlays: Dict[str, Path] = {} @@ -226,37 +229,36 @@ async def write_package_expression(package: PyPIPackage): meta = await package.metadata() try: (pname, ext) = await get_pypi_data( - package.download_url, - str(package.version), - sha256 + package.download_url, str(package.version), sha256 ) except RuntimeError: - expr = build_nix_expression( - package, reqs, meta, sha256) + expr = build_nix_expression(package, reqs, meta, sha256) else: expr = build_nix_expression( - package, reqs, meta, sha256, fetchPypi=(pname, ext)) - expression_dir = (packages_path / f'{package.pypi_name}/') + package, reqs, meta, sha256, fetchPypi=(pname, ext) + ) + expression_dir = packages_path / f"{package.pypi_name}/" expression_dir.mkdir(exist_ok=True) - expression_path = expression_dir / 'default.nix' - with expression_path.open('w') as fp: + expression_path = expression_dir / "default.nix" + with expression_path.open("w") as fp: fp.write(await nixfmt(expr)) expression_path = expression_path.relative_to(base_path) overlays[package.attr] = expression_path - await asyncio.gather(*( - write_package_expression(package) - for package in version_chooser.all_pypi_packages() - )) + await asyncio.gather( + *( + write_package_expression(package) + for package in version_chooser.all_pypi_packages() + ) + ) if generate_only_overlay: - with (base_path / 'overlay.nix').open('w') as fp: + with (base_path / "overlay.nix").open("w") as fp: expr = build_overlay_expr(overlays) fp.write(await nixfmt(expr)) return - - with (base_path / 'nixpkgs.nix').open('w') as fp: + with (base_path / "nixpkgs.nix").open("w") as fp: if nixpkgs is None: expr = build_overlayed_nixpkgs(overlays) else: @@ -274,15 +276,15 @@ async def write_package_expression(package: PyPIPackage): assert p is not None packages.append(p) - with (base_path / 'shell.nix').open('w') as fp: + with (base_path / "shell.nix").open("w") as fp: expr = build_shell_nix_expression(packages) fp.write(await nixfmt(expr)) async def get_url_hash(url: str, unpack=True) -> str: - cmd = ['nix-prefetch-url'] + cmd = ["nix-prefetch-url"] if unpack: - cmd.append('--unpack') + cmd.append("--unpack") cmd.append(url) proc = await asyncio.create_subprocess_exec( @@ -293,7 +295,7 @@ async def get_url_hash(url: str, unpack=True) -> str: (stdout, _) = await proc.communicate() status = await proc.wait() if status != 0: - raise RuntimeError(f'Could not get hash of URL: {url}') + raise RuntimeError(f"Could not get hash of URL: {url}") return stdout.decode().strip() @@ -306,21 +308,19 @@ async def get_pypi_data(url: str, version: str, sha256: str) -> Tuple[str, str]: builtins.fetchurl, so our generated expression should do it too. """ filename = Path(urlparse(url).path).name - match = re.match( - f'(?P.+)-{re.escape(version)}\\.(?P.+)', - filename - ) + match = re.match(f"(?P.+)-{re.escape(version)}\\.(?P.+)", filename) if match is None: - raise RuntimeError(f'Cannot build mirror://pypi URL from original URL: {url}') + raise RuntimeError(f"Cannot build mirror://pypi URL from original URL: {url}") - pname, ext = match.group('pname'), match.group('ext') + pname, ext = match.group("pname"), match.group("ext") # See /pkgs/development/python-modules/ansiwrap/default.nix # "mirror://pypi/${builtins.substring 0 1 pname}/${pname}/${pname}-${version}.${extension}"; - url = f'mirror://pypi/{pname[0]}/{pname}/{pname}-{version}.{ext}' + url = f"mirror://pypi/{pname[0]}/{pname}/{pname}-{version}.{ext}" newhash = await get_url_hash(url, unpack=False) if newhash != sha256: - raise RuntimeError(f'Invalid hash for URL: {url}') + raise RuntimeError(f"Invalid hash for URL: {url}") return (pname, ext) -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/pynixify/exceptions.py b/pynixify/exceptions.py index 190954f..60240d8 100644 --- a/pynixify/exceptions.py +++ b/pynixify/exceptions.py @@ -14,14 +14,18 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . + class PackageNotFound(Exception): pass + class NoMatchingVersionFound(Exception): pass + class IntegrityError(Exception): pass + class NixBuildError(Exception): pass diff --git a/pynixify/expression_builder.py b/pynixify/expression_builder.py index cbd89d6..213038e 100644 --- a/pynixify/expression_builder.py +++ b/pynixify/expression_builder.py @@ -16,14 +16,13 @@ import asyncio from pathlib import Path -from typing import Iterable, Mapping, List, Set, Optional, Tuple +from typing import Iterable, List, Mapping, Optional, Set, Tuple + from mako.template import Template -from pynixify.version_chooser import ( - VersionChooser, - ChosenPackageRequirements, -) -from pynixify.base import PackageMetadata, Package + +from pynixify.base import Package, PackageMetadata from pynixify.pypi_api import PyPIPackage +from pynixify.version_chooser import ChosenPackageRequirements, VersionChooser DISCLAIMER = """ # WARNING: This file was automatically generated. You should avoid editing it. @@ -32,7 +31,8 @@ """ -expression_template = Template("""${DISCLAIMER} +expression_template = Template( + """${DISCLAIMER} { ${', '.join(args)} }: buildPythonPackage rec { @@ -88,9 +88,11 @@ % endif }; } -""") +""" +) -overlayed_nixpkgs_template = Template("""${DISCLAIMER} +overlayed_nixpkgs_template = Template( + """${DISCLAIMER} { overlays ? [ ], ... }@args: let pynixifyOverlay = self: super: { @@ -120,9 +122,11 @@ }; in import nixpkgs (args // { overlays = [ pynixifyOverlay ] ++ overlays; }) -""") +""" +) -shell_nix_template = Template("""${DISCLAIMER} +shell_nix_template = Template( + """${DISCLAIMER} { python ? "python3" }: let pkgs = import ./nixpkgs.nix {}; @@ -138,34 +142,42 @@ ])) ]; } -""") +""" +) + def build_nix_expression( - package: PyPIPackage, - requirements: ChosenPackageRequirements, - metadata: PackageMetadata, - sha256: str, - fetchPypi: Optional[Tuple[str, str]] = None, - ) -> str: - non_python_dependencies = ['lib', 'fetchPypi', 'buildPythonPackage'] + package: PyPIPackage, + requirements: ChosenPackageRequirements, + metadata: PackageMetadata, + sha256: str, + fetchPypi: Optional[Tuple[str, str]] = None, +) -> str: + non_python_dependencies = ["lib", "fetchPypi", "buildPythonPackage"] runtime_requirements: List[str] = [ - p.attr for p in requirements.runtime_requirements] - build_requirements: List[str] = [ - p.attr for p in requirements.build_requirements] - test_requirements: List[str] = [ - p.attr for p in requirements.test_requirements] + p.attr for p in requirements.runtime_requirements + ] + build_requirements: List[str] = [p.attr for p in requirements.build_requirements] + test_requirements: List[str] = [p.attr for p in requirements.test_requirements] args: List[str] - args = sorted(set( - non_python_dependencies + runtime_requirements + - test_requirements + build_requirements)) + args = sorted( + set( + non_python_dependencies + + runtime_requirements + + test_requirements + + build_requirements + ) + ) version = str(package.version) nix = escape_string return expression_template.render(DISCLAIMER=DISCLAIMER, **locals()) + def build_overlay_expr(overlays: Mapping[str, Path]): - return Template(""" + return Template( + """ self: super: { % for (package_name, path) in overlays.items(): ${package_name} = @@ -173,32 +185,30 @@ def build_overlay_expr(overlays: Mapping[str, Path]): ${'' if path.is_absolute() else './'}${str(path).replace('/default.nix', '')} {}; % endfor - }""").render(overlays=overlays) + }""" + ).render(overlays=overlays) + def build_overlayed_nixpkgs( - overlays: Mapping[str, Path], - nixpkgs: Optional[Tuple[str, str]] = None - ) -> str: + overlays: Mapping[str, Path], nixpkgs: Optional[Tuple[str, str]] = None +) -> str: nix = escape_string # Sort dictionary keys to ensure pynixify/nixpkgs.nix will have the # same contents in different pynixify runs. - overlays = { - k: overlays[k] - for k in sorted(overlays.keys()) - } + overlays = {k: overlays[k] for k in sorted(overlays.keys())} # Taken from Interpreters section in https://nixos.org/nixpkgs/manual/#reference interpreters = [ - 'python2', - 'python27', - 'python3', - 'python35', - 'python36', - 'python37', - 'python38', - 'python39', - 'python310' + "python2", + "python27", + "python3", + "python35", + "python36", + "python37", + "python38", + "python39", + "python310", ] return overlayed_nixpkgs_template.render(DISCLAIMER=DISCLAIMER, **locals()) @@ -210,7 +220,7 @@ def build_shell_nix_expression(packages: List[Package]) -> str: async def nixfmt(expr: str) -> str: proc = await asyncio.create_subprocess_exec( - 'nixfmt', + "nixfmt", stdout=asyncio.subprocess.PIPE, stdin=asyncio.subprocess.PIPE, ) @@ -219,15 +229,16 @@ async def nixfmt(expr: str) -> str: (stdout, _) = await proc.communicate() status = await proc.wait() if status: - raise TypeError(f'nixfmt failed') + raise TypeError(f"nixfmt failed") return stdout.decode() + def escape_string(string: str) -> str: # Based on the documentation in https://nixos.org/nix/manual/#idm140737322106128 - string = string.replace('\\', '\\\\') + string = string.replace("\\", "\\\\") string = string.replace('"', '\\"') - string = string.replace('\n', '\\n') - string = string.replace('\t', '\\t') - string = string.replace('\r', '\\r') - string = string.replace('${', '\\${') + string = string.replace("\n", "\\n") + string = string.replace("\t", "\\t") + string = string.replace("\r", "\\r") + string = string.replace("${", "\\${") return f'"{string}"' diff --git a/pynixify/nixpkgs_sources.py b/pynixify/nixpkgs_sources.py index f2f435a..1bf034c 100644 --- a/pynixify/nixpkgs_sources.py +++ b/pynixify/nixpkgs_sources.py @@ -14,21 +14,24 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -import sys -import json import asyncio -from pathlib import Path -from typing import Sequence, Any, Optional +import json +import sys from collections import defaultdict from multiprocessing import cpu_count -from packaging.utils import canonicalize_name +from pathlib import Path +from typing import Any, Optional, Sequence + from packaging.requirements import Requirement +from packaging.utils import canonicalize_name from packaging.version import Version + from pynixify.base import Package, parse_version -from pynixify.exceptions import PackageNotFound, NixBuildError +from pynixify.exceptions import NixBuildError, PackageNotFound NIXPKGS_URL: Optional[str] = None + class NixPackage(Package): def __init__(self, *, attr: str, version: Version): self.version = version @@ -55,24 +58,21 @@ async def source(self, extra_args=[]): name = "ATTR_dummy_src"; destination = "/setup.py"; } - """.replace('ATTR', self.attr) - args = [ - '--no-out-link', - '--no-build-output', - '-E', - expr - ] + """.replace( + "ATTR", self.attr + ) + args = ["--no-out-link", "--no-build-output", "-E", expr] args += extra_args return await run_nix_build(*args) def __str__(self): - return f'NixPackage(attr={self.attr}, version={self.version})' + return f"NixPackage(attr={self.attr}, version={self.version})" class NixpkgsData: def __init__(self, data): data_defaultdict: Any = defaultdict(list) - for (k, v) in data.items(): + for k, v in data.items(): data_defaultdict[canonicalize_name(k)] += v self.__data = dict(data_defaultdict) @@ -80,9 +80,9 @@ def from_pypi_name(self, name: str) -> Sequence[NixPackage]: try: data = self.__data[canonicalize_name(name)] except KeyError: - raise PackageNotFound(f'{name} is not defined in nixpkgs') + raise PackageNotFound(f"{name} is not defined in nixpkgs") return [ - NixPackage(attr=drv['attr'], version=parse_version(drv['version'])) + NixPackage(attr=drv["attr"], version=parse_version(drv["version"])) for drv in data ] @@ -94,16 +94,17 @@ def from_requirement(self, req: Requirement) -> Sequence[NixPackage]: async def load_nixpkgs_data(extra_args): nix_expression_path = Path(__file__).parent / "data" / "pythonPackages.nix" args = [ - '--eval', - '--strict', - '--json', + "--eval", + "--strict", + "--json", str(nix_expression_path), ] args += extra_args if NIXPKGS_URL is not None: - args += ['-I', f'nixpkgs={NIXPKGS_URL}'] + args += ["-I", f"nixpkgs={NIXPKGS_URL}"] proc = await asyncio.create_subprocess_exec( - 'nix-instantiate', *args, stdout=asyncio.subprocess.PIPE) + "nix-instantiate", *args, stdout=asyncio.subprocess.PIPE + ) (stdout, _) = await proc.communicate() status = await proc.wait() assert status == 0 @@ -114,35 +115,34 @@ async def load_nixpkgs_data(extra_args): async def _run_nix_build(*args: Sequence[str], retries=0, max_retries=5) -> Path: if NIXPKGS_URL is not None: # TODO fix mypy hack - args_ = list(args) + ['-I', f'nixpkgs={NIXPKGS_URL}'] + args_ = list(args) + ["-I", f"nixpkgs={NIXPKGS_URL}"] else: args_ = list(args) # TODO remove mypy ignore below and fix compatibility with mypy 0.790 proc = await asyncio.create_subprocess_exec( - 'nix-build', *args_, stdout=asyncio.subprocess.PIPE, # type: ignore - stderr=asyncio.subprocess.PIPE) + "nix-build", + *args_, + stdout=asyncio.subprocess.PIPE, # type: ignore + stderr=asyncio.subprocess.PIPE, + ) (stdout, stderr) = await proc.communicate() status = await proc.wait() - if b'all build users are currently in use' in stderr and retries < max_retries: + if b"all build users are currently in use" in stderr and retries < max_retries: # perform an expotential backoff and retry # TODO think a way to avoid relying in the error message sys.stderr.write( - f'warning: All build users are currently in use. ' - f'Retrying in {2**retries} seconds\n' + f"warning: All build users are currently in use. " + f"Retrying in {2**retries} seconds\n" ) await asyncio.sleep(2**retries) - return await run_nix_build( - *args, - retries=retries+1, - max_retries=max_retries - ) + return await run_nix_build(*args, retries=retries + 1, max_retries=max_retries) elif retries >= max_retries: - sys.stderr.write(f'error: Giving up after {max_retries} failed retries\n') + sys.stderr.write(f"error: Giving up after {max_retries} failed retries\n") if status: print(stderr.decode(), file=sys.stderr) - raise NixBuildError(f'nix-build failed with code {status}') + raise NixBuildError(f"nix-build failed with code {status}") return Path(stdout.strip().decode()) diff --git a/pynixify/package_requirements.py b/pynixify/package_requirements.py index 5a17cc5..85a820a 100644 --- a/pynixify/package_requirements.py +++ b/pynixify/package_requirements.py @@ -14,13 +14,15 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . +from dataclasses import dataclass from pathlib import Path from typing import List -from dataclasses import dataclass + from packaging.requirements import Requirement from pkg_resources import parse_requirements -from pynixify.nixpkgs_sources import run_nix_build + from pynixify.exceptions import NixBuildError +from pynixify.nixpkgs_sources import run_nix_build @dataclass @@ -32,12 +34,12 @@ class PackageRequirements: @classmethod def from_result_path(cls, result_path: Path): attr_mapping = { - 'build_requirements': Path('setup_requires.txt'), - 'test_requirements': Path('tests_requires.txt'), - 'runtime_requirements': Path('install_requires.txt'), + "build_requirements": Path("setup_requires.txt"), + "test_requirements": Path("tests_requires.txt"), + "runtime_requirements": Path("install_requires.txt"), } kwargs = {} - for (attr, filename) in attr_mapping.items(): + for attr, filename in attr_mapping.items(): with (result_path / filename).open() as fp: # Convert from Requirement.parse to Requirement reqs = [Requirement(str(r)) for r in parse_requirements(fp)] @@ -47,27 +49,27 @@ def from_result_path(cls, result_path: Path): async def eval_path_requirements(path: Path) -> PackageRequirements: nix_expression_path = Path(__file__).parent / "data" / "parse_setuppy_data.nix" - if path.name.endswith('.whl'): + if path.name.endswith(".whl"): # Some nixpkgs packages use a wheel as source, which don't have a # setup.py file. For now, ignore them assume they have no dependencies - print(f'{path} is a wheel file instead of a source distribution. ' - f'Assuming it has no dependencies.') + print( + f"{path} is a wheel file instead of a source distribution. " + f"Assuming it has no dependencies." + ) return PackageRequirements( - build_requirements=[], - test_requirements=[], - runtime_requirements=[] + build_requirements=[], test_requirements=[], runtime_requirements=[] ) assert nix_expression_path.exists() nix_store_path = await run_nix_build( str(nix_expression_path), - '--no-out-link', - '--no-build-output', - '--arg', - 'file', - str(path.resolve()) + "--no-out-link", + "--no-build-output", + "--arg", + "file", + str(path.resolve()), ) - if (nix_store_path / 'failed').exists(): - print(f'Error parsing requirements of {path}. Assuming it has no dependencies.') + if (nix_store_path / "failed").exists(): + print(f"Error parsing requirements of {path}. Assuming it has no dependencies.") return PackageRequirements( build_requirements=[], test_requirements=[], diff --git a/pynixify/pypi_api.py b/pynixify/pypi_api.py index 3b2a960..c613172 100644 --- a/pynixify/pypi_api.py +++ b/pynixify/pypi_api.py @@ -14,26 +14,25 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -import os -import sys -import json import asyncio import hashlib -import aiohttp -import aiofiles -from typing import Sequence, Optional, List -from pathlib import Path -from dataclasses import dataclass, field -from urllib.parse import urlunparse +import json +import os +import sys from abc import ABCMeta, abstractmethod -from urllib.parse import quote, urlparse -from packaging.utils import canonicalize_name +from dataclasses import dataclass, field +from pathlib import Path +from typing import List, Optional, Sequence +from urllib.parse import quote, urlparse, urlunparse + +import aiofiles +import aiohttp from packaging.requirements import Requirement +from packaging.utils import canonicalize_name from packaging.version import Version, parse + from pynixify.base import Package, parse_version -from pynixify.exceptions import ( - IntegrityError -) +from pynixify.exceptions import IntegrityError class ABCPyPICache(metaclass=ABCMeta): @@ -58,9 +57,10 @@ async def source(self, extra_args=[]) -> Path: if self.local_source is not None: return self.local_source downloaded_file: Path = await self.pypi_cache.fetch_url( - self.download_url, self.sha256) + self.download_url, self.sha256 + ) h = hashlib.sha256() - with downloaded_file.open('rb') as fp: + with downloaded_file.open("rb") as fp: while True: data = fp.read(65536) if not data: @@ -83,7 +83,7 @@ def attr(self): return self.pypi_name def __str__(self): - return f'PyPIPackage(attr={self.attr}, version={self.version})' + return f"PyPIPackage(attr={self.attr}, version={self.version})" class PyPIData: @@ -93,31 +93,34 @@ def __init__(self, pypi_cache): async def from_requirement(self, req: Requirement) -> Sequence[PyPIPackage]: response = await self.pypi_cache.fetch(canonicalize_name(req.name)) matching = [] - for (version, version_dist) in response['releases'].items(): + for version, version_dist in response["releases"].items(): try: - data = next(e for e in version_dist if e['packagetype'] == 'sdist') + data = next(e for e in version_dist if e["packagetype"] == "sdist") except StopIteration: continue if version in req.specifier: - matching.append(PyPIPackage( - sha256=data['digests']['sha256'], - version=parse_version(version), - download_url=data['url'], - pypi_name=canonicalize_name(req.name), - pypi_cache=self.pypi_cache, - )) + matching.append( + PyPIPackage( + sha256=data["digests"]["sha256"], + version=parse_version(version), + download_url=data["url"], + pypi_name=canonicalize_name(req.name), + pypi_cache=self.pypi_cache, + ) + ) return matching class PyPICache: async def fetch(self, package_name): - url = f'https://pypi.org/pypi/{quote(package_name)}/json' + url = f"https://pypi.org/pypi/{quote(package_name)}/json" async with aiohttp.ClientSession(raise_for_status=True) as session: async with session.get(url) as response: return await response.json() async def fetch_url(self, url, sha256) -> Path: from pynixify.expression_builder import escape_string + expr = f""" builtins.fetchurl {{ url = {escape_string(url)}; @@ -132,12 +135,16 @@ async def fetch_url(self, url, sha256) -> Path: async def nix_instantiate(expr: str, attr=None, **kwargs): extra_args: List[str] = [] if attr is not None: - extra_args += ['--attr', attr] - for (k, v) in kwargs.items(): - extra_args += ['--arg', k, v] + extra_args += ["--attr", attr] + for k, v in kwargs.items(): + extra_args += ["--arg", k, v] proc = await asyncio.create_subprocess_exec( - 'nix-instantiate', '--json', '--eval', '-', *extra_args, + "nix-instantiate", + "--json", + "--eval", + "-", + *extra_args, stdin=asyncio.subprocess.PIPE, stdout=asyncio.subprocess.PIPE, ) @@ -150,16 +157,19 @@ async def nix_instantiate(expr: str, attr=None, **kwargs): async def get_path_hash(path: Path) -> str: - url = urlunparse(( - 'file', - '', - str(path.resolve()), - '', - '', - '', - )) + url = urlunparse( + ( + "file", + "", + str(path.resolve()), + "", + "", + "", + ) + ) proc = await asyncio.create_subprocess_exec( - 'nix-prefetch-url', url, + "nix-prefetch-url", + url, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE, ) @@ -167,5 +177,5 @@ async def get_path_hash(path: Path) -> str: status = await proc.wait() if status: print(stderr.decode(), file=sys.stderr) - raise RuntimeError(f'nix-prefetch-url failed with code {status}') + raise RuntimeError(f"nix-prefetch-url failed with code {status}") return stdout.decode().strip() diff --git a/pynixify/version_chooser.py b/pynixify/version_chooser.py index debe2ee..0484443 100644 --- a/pynixify/version_chooser.py +++ b/pynixify/version_chooser.py @@ -16,29 +16,30 @@ import asyncio import operator -from pathlib import Path from dataclasses import dataclass -from typing import Any, Dict, Callable, Awaitable, Optional, List, Tuple +from pathlib import Path +from typing import Any, Awaitable, Callable, Dict, List, Optional, Tuple + from packaging.requirements import Requirement -from packaging.utils import canonicalize_name from packaging.specifiers import SpecifierSet +from packaging.utils import canonicalize_name + from pynixify.base import Package, parse_version -from pynixify.nixpkgs_sources import NixpkgsData, NixPackage +from pynixify.exceptions import NoMatchingVersionFound, PackageNotFound +from pynixify.nixpkgs_sources import NixPackage, NixpkgsData +from pynixify.package_requirements import (PackageRequirements, + eval_path_requirements) from pynixify.pypi_api import PyPIData, PyPIPackage -from pynixify.package_requirements import ( - PackageRequirements, - eval_path_requirements, -) -from pynixify.exceptions import ( - NoMatchingVersionFound, - PackageNotFound, -) + class VersionChooser: - def __init__(self, nixpkgs_data: NixpkgsData, pypi_data: PyPIData, - req_evaluate: Callable[[Package], Awaitable[PackageRequirements]], - should_load_tests: Callable[[str], bool] = lambda _: False, - ): + def __init__( + self, + nixpkgs_data: NixpkgsData, + pypi_data: PyPIData, + req_evaluate: Callable[[Package], Awaitable[PackageRequirements]], + should_load_tests: Callable[[str], bool] = lambda _: False, + ): self.nixpkgs_data = nixpkgs_data self.pypi_data = pypi_data self._choosed_packages: Dict[str, Tuple[Package, SpecifierSet]] = {} @@ -46,7 +47,7 @@ def __init__(self, nixpkgs_data: NixpkgsData, pypi_data: PyPIData, self.evaluate_requirements = req_evaluate self.should_load_tests = should_load_tests - async def require(self, r: Requirement, coming_from: Optional[Package]=None): + async def require(self, r: Requirement, coming_from: Optional[Package] = None): pkg: Package if r.marker and not r.marker.evaluate(): @@ -58,16 +59,20 @@ async def require(self, r: Requirement, coming_from: Optional[Package]=None): is_in_nixpkgs = False else: is_in_nixpkgs = True - if (isinstance(coming_from, NixPackage) and - is_in_nixpkgs and - not self.nixpkgs_data.from_requirement(r)): + if ( + isinstance(coming_from, NixPackage) + and is_in_nixpkgs + and not self.nixpkgs_data.from_requirement(r) + ): # This shouldn't happen in an ideal world. Unfortunately, # nixpkgs does some patching to packages to disable some # requirements. Because we don't use these patches, the # dependency resolution would fail if we don't ignore the # requirement. - print(f"warning: ignoring requirement {r} from {coming_from} " - f"because there is no matching version in nixpkgs packages") + print( + f"warning: ignoring requirement {r} from {coming_from} " + f"because there is no matching version in nixpkgs packages" + ) return print(f'Resolving {r}{f" (from {coming_from})" if coming_from else ""}') @@ -81,9 +86,9 @@ async def require(self, r: Requirement, coming_from: Optional[Package]=None): self._choosed_packages[canonicalize_name(r.name)] = (pkg, specifier) if pkg.version not in specifier: raise NoMatchingVersionFound( - f'New requirement ' + f"New requirement " f'{r}{f" (from {coming_from})" if coming_from else ""} ' - f'does not match already installed {r.name}=={str(pkg.version)}' + f"does not match already installed {r.name}=={str(pkg.version)}" ) return @@ -112,32 +117,38 @@ async def require(self, r: Requirement, coming_from: Optional[Package]=None): found_pypi = False if not found_nixpkgs and not found_pypi: - raise PackageNotFound(f'{r.name} not found in PyPI nor nixpkgs') + raise PackageNotFound(f"{r.name} not found in PyPI nor nixpkgs") if not pkgs: raise NoMatchingVersionFound(str(r)) - pkg = max(pkgs, key=operator.attrgetter('version')) + pkg = max(pkgs, key=operator.attrgetter("version")) self._choosed_packages[canonicalize_name(r.name)] = (pkg, r.specifier) reqs: PackageRequirements = await self.evaluate_requirements(pkg) if isinstance(pkg, NixPackage) or ( - not self.should_load_tests(canonicalize_name(r.name))): + not self.should_load_tests(canonicalize_name(r.name)) + ): reqs.test_requirements = [] - await asyncio.gather(*( - self.require(req, coming_from=pkg) - for req in (reqs.runtime_requirements + reqs.test_requirements + - reqs.build_requirements) - )) + await asyncio.gather( + *( + self.require(req, coming_from=pkg) + for req in ( + reqs.runtime_requirements + + reqs.test_requirements + + reqs.build_requirements + ) + ) + ) async def require_local(self, pypi_name: str, src: Path): assert pypi_name not in self._choosed_packages package = PyPIPackage( pypi_name=pypi_name, - download_url='', - sha256='', - version=parse_version('0.1dev'), + download_url="", + sha256="", + version=parse_version("0.1dev"), pypi_cache=self.pypi_data.pypi_cache, local_source=src, ) @@ -153,13 +164,15 @@ def package_for(self, package_name: str) -> Optional[Package]: def all_pypi_packages(self) -> List[PyPIPackage]: return [ - v[0] for v in self._choosed_packages.values() + v[0] + for v in self._choosed_packages.values() if isinstance(v[0], PyPIPackage) ] async def evaluate_package_requirements( - pkg: Package, extra_args=[]) -> PackageRequirements: + pkg: Package, extra_args=[] +) -> PackageRequirements: src = await pkg.source(extra_args) return await eval_path_requirements(src) @@ -172,22 +185,23 @@ class ChosenPackageRequirements: @classmethod def from_package_requirements( - cls, - package_requirements: PackageRequirements, - version_chooser: VersionChooser, - load_tests: bool): + cls, + package_requirements: PackageRequirements, + version_chooser: VersionChooser, + load_tests: bool, + ): kwargs: Any = {} - kwargs['build_requirements'] = [] + kwargs["build_requirements"] = [] for req in package_requirements.build_requirements: if req.marker and not req.marker.evaluate(): continue package = version_chooser.package_for(req.name) if package is None: raise PackageNotFound( - f'Package {req.name} not found in the version chooser' + f"Package {req.name} not found in the version chooser" ) - kwargs['build_requirements'].append(package) + kwargs["build_requirements"].append(package) # tests_requirements uses the packages in the version chooser packages: List[Package] = [] @@ -198,10 +212,10 @@ def from_package_requirements( package = version_chooser.package_for(req.name) if package is None: raise PackageNotFound( - f'Package {req.name} not found in the version chooser' + f"Package {req.name} not found in the version chooser" ) packages.append(package) - kwargs['test_requirements'] = packages + kwargs["test_requirements"] = packages # runtime_requirements uses the packages in the version chooser packages = [] @@ -211,9 +225,9 @@ def from_package_requirements( package = version_chooser.package_for(req.name) if package is None: raise PackageNotFound( - f'Package {req.name} not found in the version chooser' + f"Package {req.name} not found in the version chooser" ) packages.append(package) - kwargs['runtime_requirements'] = packages + kwargs["runtime_requirements"] = packages return cls(**kwargs) From 8e0ebd62be9069b3667e8e41ce9aa369af535834 Mon Sep 17 00:00:00 2001 From: Emmett Butler Date: Fri, 19 May 2023 12:42:57 -0700 Subject: [PATCH 06/33] workflow tweaks --- .github/workflows/lint.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 8977986..e134ad6 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -7,12 +7,12 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - with: - fetch-depth: 0 + - uses: actions/setup-python@v4 name: Install Python with: - python-version: '3.8' + python-version: '3.10' + - name: Install Dependencies run: pip install black isort From b523dd868bc632c7aada215d1fbef70b3784e698 Mon Sep 17 00:00:00 2001 From: Emmett Butler Date: Fri, 19 May 2023 12:45:57 -0700 Subject: [PATCH 07/33] proper checkout --- .github/workflows/lint.yml | 2 ++ .github/workflows/test.yml | 1 - 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index e134ad6..44fb911 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -7,6 +7,8 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 + with: + ref: ${{ github.event.pull_request.head.sha }} - uses: actions/setup-python@v4 name: Install Python diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 08bc33b..36ba0ce 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -1,6 +1,5 @@ name: "Test" on: - pull_request: push: schedule: - cron: '5 19 * * 5' # At 19:05 on Friday From 0834fe042e01e24cc1019b559ba80083e363de83 Mon Sep 17 00:00:00 2001 From: Emmett Butler Date: Fri, 19 May 2023 12:50:57 -0700 Subject: [PATCH 08/33] use noqa to stop black and isort from fighting each other --- pynixify/command.py | 23 ++++++++++++----------- pynixify/version_chooser.py | 6 +++--- 2 files changed, 15 insertions(+), 14 deletions(-) diff --git a/pynixify/command.py b/pynixify/command.py index a66437a..8381b57 100644 --- a/pynixify/command.py +++ b/pynixify/command.py @@ -16,7 +16,6 @@ import argparse import asyncio -import os import re from pathlib import Path from typing import Dict, List, Optional, Tuple @@ -28,16 +27,18 @@ import pynixify.nixpkgs_sources from pynixify.base import Package -from pynixify.expression_builder import (build_nix_expression, - build_overlay_expr, - build_overlayed_nixpkgs, - build_shell_nix_expression, nixfmt) -from pynixify.nixpkgs_sources import (NixpkgsData, load_nixpkgs_data, - set_max_jobs) -from pynixify.pypi_api import PyPICache, PyPIData, PyPIPackage, get_path_hash -from pynixify.version_chooser import (ChosenPackageRequirements, - VersionChooser, - evaluate_package_requirements) +from pynixify.expression_builder import build_nix_expression # noqa +from pynixify.expression_builder import build_overlay_expr # noqa +from pynixify.expression_builder import build_overlayed_nixpkgs # noqa +from pynixify.expression_builder import build_shell_nix_expression # noqa +from pynixify.expression_builder import nixfmt # noqa; noqa +from pynixify.nixpkgs_sources import set_max_jobs # noqa +from pynixify.nixpkgs_sources import NixpkgsData, load_nixpkgs_data # noqa +from pynixify.pypi_api import get_path_hash # noqa +from pynixify.pypi_api import PyPICache, PyPIData, PyPIPackage # noqa +from pynixify.version_chooser import ChosenPackageRequirements # noqa +from pynixify.version_chooser import VersionChooser # noqa +from pynixify.version_chooser import evaluate_package_requirements # noqa async def _build_version_chooser( diff --git a/pynixify/version_chooser.py b/pynixify/version_chooser.py index 0484443..866879e 100644 --- a/pynixify/version_chooser.py +++ b/pynixify/version_chooser.py @@ -27,9 +27,9 @@ from pynixify.base import Package, parse_version from pynixify.exceptions import NoMatchingVersionFound, PackageNotFound from pynixify.nixpkgs_sources import NixPackage, NixpkgsData -from pynixify.package_requirements import (PackageRequirements, - eval_path_requirements) -from pynixify.pypi_api import PyPIData, PyPIPackage +from pynixify.package_requirements import PackageRequirements # noqa +from pynixify.package_requirements import eval_path_requirements # noqa +from pynixify.pypi_api import PyPIData, PyPIPackage # noqa class VersionChooser: From 0de114474826a34b12e46e13e596a296dd610cfd Mon Sep 17 00:00:00 2001 From: Emmett Butler Date: Tue, 16 May 2023 11:12:33 -0700 Subject: [PATCH 09/33] make dependencies check attempt both setuptools and hatch builds --- pynixify/data/parse_setuppy_data.nix | 92 +++++++++++++++++++++++----- pynixify/nixpkgs_sources.py | 3 +- 2 files changed, 79 insertions(+), 16 deletions(-) diff --git a/pynixify/data/parse_setuppy_data.nix b/pynixify/data/parse_setuppy_data.nix index b2194c0..860f09f 100644 --- a/pynixify/data/parse_setuppy_data.nix +++ b/pynixify/data/parse_setuppy_data.nix @@ -1,43 +1,105 @@ -{ file, stdenv ? (import { }).stdenv, lib ? (import { }).lib -, unzip ? (import { }).unzip, python ? (import { }).python3 -}: +{ file, pkgs ? import { } }: let removeExt = fileName: builtins.elemAt (builtins.split "\\." fileName) 0; - patchedSetuptools = python.pkgs.setuptools.overrideAttrs (ps: { + patchedSetuptools = pkgs.python3.pkgs.setuptools.overrideAttrs (ps: { # src = (import {}).lib.cleanSource ./setuptools; patches = [ ./setuptools_patch.diff ]; - patchFlags = lib.optionals (lib.versionOlder "61" python.pkgs.setuptools.version) ["--merge" "-p1"]; + patchFlags = pkgs.lib.optionals + (pkgs.lib.versionOlder "61" pkgs.python3.pkgs.setuptools.version) [ + "--merge" + "-p1" + ]; }); - pythonWithPackages = python.withPackages (ps: [ patchedSetuptools ]); + setuptoolsscm = pkgs.python3.pkgs.buildPythonPackage rec { + pname = "setuptools-scm"; + version = "7.0.5"; + + src = pkgs.python3.pkgs.fetchPypi { + pname = "setuptools_scm"; + inherit version; + sha256 = "sha256-Ax4Tr3cdb4krlBrbbqBFRbv5Hrxc5ox4qvP/9uH7SEQ="; + }; + + propagatedBuildInputs = [ + pkgs.python3.pkgs.packaging + pkgs.python3.pkgs.typing-extensions + pkgs.python3.pkgs.tomli + patchedSetuptools + ]; + + pythonImportsCheck = [ "setuptools_scm" ]; + + # check in passthru.tests.pytest to escape infinite recursion on pytest + doCheck = false; + }; + hatchvcs = pkgs.python3.pkgs.buildPythonPackage rec { + pname = "hatch-vcs"; + version = "0.2.0"; + format = "pyproject"; + + disabled = pkgs.python3.pkgs.pythonOlder "3.7"; + + src = pkgs.python3.pkgs.fetchPypi { + pname = "hatch_vcs"; + inherit version; + sha256 = "sha256-mRPXM7NO7JuwNF0GJsoyFlpK0t4V0c5kPDbQnKkIq/8="; + }; + + nativeBuildInputs = [ pkgs.python3.pkgs.hatchling ]; + + propagatedBuildInputs = [ pkgs.python3.pkgs.hatchling setuptoolsscm ]; + + checkInputs = [ pkgs.git pkgs.python3.pkgs.pytestCheckHook ]; + + disabledTests = [ + # incompatible with setuptools-scm>=7 + # https://github.com/ofek/hatch-vcs/issues/8 + "test_write" + ]; + + pythonImportsCheck = [ "hatch_vcs" ]; + }; + + pythonWithPackages = pkgs.python3.withPackages + (ps: [ patchedSetuptools pkgs.python3.pkgs.hatchling hatchvcs ]); cleanSource = src: - lib.cleanSourceWith { + pkgs.lib.cleanSourceWith { filter = name: type: - lib.cleanSourceFilter name type && builtins.baseNameOf (toString name) - != "pynixify"; + pkgs.lib.cleanSourceFilter name type + && builtins.baseNameOf (toString name) != "pynixify"; name = builtins.baseNameOf src; inherit src; }; -in stdenv.mkDerivation { +in pkgs.stdenv.mkDerivation { name = "setup.py_data_${removeExt (builtins.baseNameOf file)}"; src = cleanSource file; - nativeBuildInputs = [ unzip ]; - buildInputs = [ pythonWithPackages ]; + nativeBuildInputs = [ pkgs.unzip ]; + buildInputs = [ pythonWithPackages pkgs.hatch ]; configurePhase = '' true # We don't want to execute ./configure ''; buildPhase = '' mkdir -p $out - if ! PYNIXIFY=1 python setup.py install; then - # Indicate that fetching the result failed, but let the build succeed - touch $out/failed + if PYNIXIFY=1 python setup.py install; then + exit 0 + fi + if test -f pyproject.toml && grep "hatchling.build" pyproject.toml; then + echo 'mode = "local"' > config.toml + #echo '[build-system]' >> config.toml + #echo 'build-backend = "hatchling.build"' >> config.toml + cat config.toml + hatch --config config.toml --data-dir $out/data --cache-dir $out/cache build + exit 0 fi + # Indicate that fetching the result failed, but let the build succeed + touch $out/failed ''; dontInstall = true; } diff --git a/pynixify/nixpkgs_sources.py b/pynixify/nixpkgs_sources.py index 1bf034c..30b83f8 100644 --- a/pynixify/nixpkgs_sources.py +++ b/pynixify/nixpkgs_sources.py @@ -126,6 +126,7 @@ async def _run_nix_build(*args: Sequence[str], retries=0, max_retries=5) -> Path stderr=asyncio.subprocess.PIPE, ) (stdout, stderr) = await proc.communicate() + # sys.stdout.write("%s\n%s" % (stdout.decode(), stderr.decode())) status = await proc.wait() if b"all build users are currently in use" in stderr and retries < max_retries: @@ -135,7 +136,7 @@ async def _run_nix_build(*args: Sequence[str], retries=0, max_retries=5) -> Path f"warning: All build users are currently in use. " f"Retrying in {2**retries} seconds\n" ) - await asyncio.sleep(2**retries) + await asyncio.sleep(2 ** retries) return await run_nix_build(*args, retries=retries + 1, max_retries=max_retries) elif retries >= max_retries: sys.stderr.write(f"error: Giving up after {max_retries} failed retries\n") From 1318d8c54ae35db3b7f2991efaccf0f3dddbbeb1 Mon Sep 17 00:00:00 2001 From: Emmett Butler Date: Tue, 16 May 2023 13:50:13 -0700 Subject: [PATCH 10/33] patch hatchling to output dependency data --- pynixify/data/hatchling_patch.diff | 41 ++++++++++++++++++++++++++++ pynixify/data/parse_setuppy_data.nix | 18 ++++++------ 2 files changed, 50 insertions(+), 9 deletions(-) create mode 100644 pynixify/data/hatchling_patch.diff diff --git a/pynixify/data/hatchling_patch.diff b/pynixify/data/hatchling_patch.diff new file mode 100644 index 0000000..bd66acf --- /dev/null +++ b/pynixify/data/hatchling_patch.diff @@ -0,0 +1,41 @@ +diff --git a/src/hatchling/metadata/core.py b/src/hatchling/metadata/core.py +index 24544ad1..f255bfec 100644 +--- a/src/hatchling/metadata/core.py ++++ b/src/hatchling/metadata/core.py +@@ -11,6 +11,14 @@ if sys.version_info >= (3, 11): + else: + import tomli as tomllib + ++if 'PYNIXIFY' in os.environ: ++ from pathlib import Path ++ try: ++ pynix_out = Path(os.environ['out']) ++ except KeyError: ++ print("out environment variable not defined") ++ sys.exit(1) ++ + + def load_toml(path): + with open(path, encoding='utf-8') as f: +@@ -250,6 +258,9 @@ class BuildMetadata: + raise ValueError(f'Dependency #{i} of field `build-system.requires` is invalid: {e}') + + self._requires_complex = requires_complex ++ if 'PYNIXIFY' in os.environ: ++ with (pynix_out / "setup_requires.txt").open("w") as fp: ++ fp.write('\n'.join([str(req) for req in self._requires_complex])) + + return self._requires_complex + +@@ -1074,6 +1085,11 @@ class CoreMetadata: + dependencies_complex[get_normalized_dependency(requirement)] = requirement + + self._dependencies_complex = dict(sorted(dependencies_complex.items())) ++ if 'PYNIXIFY' in os.environ: ++ with (pynix_out / "install_requires.txt").open("w") as fp: ++ fp.write('\n'.join([str(req) for req in self._dependencies_complex])) ++ with (pynix_out / "tests_requires.txt").open("w") as fp: ++ fp.write('\n'.join([str(req) for req in self._dependencies_complex])) + + return self._dependencies_complex + diff --git a/pynixify/data/parse_setuppy_data.nix b/pynixify/data/parse_setuppy_data.nix index 860f09f..0109c7a 100644 --- a/pynixify/data/parse_setuppy_data.nix +++ b/pynixify/data/parse_setuppy_data.nix @@ -37,6 +37,8 @@ let # check in passthru.tests.pytest to escape infinite recursion on pytest doCheck = false; }; + hatchling = pkgs.python3.pkgs.hatchling.overrideAttrs + (ps: { patches = [ ./hatchling_patch.diff ]; }); hatchvcs = pkgs.python3.pkgs.buildPythonPackage rec { pname = "hatch-vcs"; version = "0.2.0"; @@ -50,9 +52,9 @@ let sha256 = "sha256-mRPXM7NO7JuwNF0GJsoyFlpK0t4V0c5kPDbQnKkIq/8="; }; - nativeBuildInputs = [ pkgs.python3.pkgs.hatchling ]; + nativeBuildInputs = [ hatchling ]; - propagatedBuildInputs = [ pkgs.python3.pkgs.hatchling setuptoolsscm ]; + propagatedBuildInputs = [ hatchling setuptoolsscm ]; checkInputs = [ pkgs.git pkgs.python3.pkgs.pytestCheckHook ]; @@ -65,8 +67,8 @@ let pythonImportsCheck = [ "hatch_vcs" ]; }; - pythonWithPackages = pkgs.python3.withPackages - (ps: [ patchedSetuptools pkgs.python3.pkgs.hatchling hatchvcs ]); + pythonWithPackages = + pkgs.python3.withPackages (ps: [ patchedSetuptools hatchling hatchvcs ]); cleanSource = src: pkgs.lib.cleanSourceWith { @@ -92,11 +94,9 @@ in pkgs.stdenv.mkDerivation { fi if test -f pyproject.toml && grep "hatchling.build" pyproject.toml; then echo 'mode = "local"' > config.toml - #echo '[build-system]' >> config.toml - #echo 'build-backend = "hatchling.build"' >> config.toml - cat config.toml - hatch --config config.toml --data-dir $out/data --cache-dir $out/cache build - exit 0 + if PYNIXIFY=1 hatch --config config.toml --data-dir $out/data --cache-dir $out/cache build; then + exit 0 + fi fi # Indicate that fetching the result failed, but let the build succeed touch $out/failed From 62de1af712c12b38f27faba55e6dc9ef9d752995 Mon Sep 17 00:00:00 2001 From: Emmett Butler Date: Tue, 16 May 2023 14:10:10 -0700 Subject: [PATCH 11/33] generate metadata --- pynixify/data/hatchling_patch.diff | 31 ++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/pynixify/data/hatchling_patch.diff b/pynixify/data/hatchling_patch.diff index bd66acf..3abf012 100644 --- a/pynixify/data/hatchling_patch.diff +++ b/pynixify/data/hatchling_patch.diff @@ -39,3 +39,34 @@ index 24544ad1..f255bfec 100644 return self._dependencies_complex +diff --git a/src/hatchling/metadata/spec.py b/src/hatchling/metadata/spec.py +index 43a0fa67..2a971bba 100644 +--- a/src/hatchling/metadata/spec.py ++++ b/src/hatchling/metadata/spec.py +@@ -1,3 +1,15 @@ ++import json ++import os ++import sys ++ ++if 'PYNIXIFY' in os.environ: ++ from pathlib import Path ++ try: ++ pynix_out = Path(os.environ['out']) ++ except KeyError: ++ print("out environment variable not defined") ++ sys.exit(1) ++ + DEFAULT_METADATA_VERSION = '2.1' + + +@@ -147,6 +159,10 @@ def construct_metadata_file_2_1(metadata, extra_dependencies=()): + metadata_file += f'Description-Content-Type: {metadata.core.readme_content_type}\n' + metadata_file += f'\n{metadata.core.readme}' + ++ if 'PYNIXIFY' in os.environ: ++ with (pynix_out / 'meta.json').open('w') as fp: ++ json.dump({"version": None, "url": None, "license": None, "description": None}, fp) ++ + return metadata_file + + From 770345f9d277805d76d657510e603aed1cba0916 Mon Sep 17 00:00:00 2001 From: Emmett Butler Date: Tue, 16 May 2023 15:04:47 -0700 Subject: [PATCH 12/33] set format attribute in generated nix expressions --- pynixify/base.py | 1 + pynixify/data/hatchling_patch.diff | 2 +- pynixify/data/setuptools_patch.diff | 3 ++- pynixify/expression_builder.py | 1 + pynixify/nixpkgs_sources.py | 1 - 5 files changed, 5 insertions(+), 3 deletions(-) diff --git a/pynixify/base.py b/pynixify/base.py index ea11013..49775aa 100644 --- a/pynixify/base.py +++ b/pynixify/base.py @@ -28,6 +28,7 @@ class PackageMetadata: description: Optional[str] license: Optional[str] url: Optional[str] + _fmt: Optional[str] = "pyproject" @dataclass diff --git a/pynixify/data/hatchling_patch.diff b/pynixify/data/hatchling_patch.diff index 3abf012..a57cb65 100644 --- a/pynixify/data/hatchling_patch.diff +++ b/pynixify/data/hatchling_patch.diff @@ -65,7 +65,7 @@ index 43a0fa67..2a971bba 100644 + if 'PYNIXIFY' in os.environ: + with (pynix_out / 'meta.json').open('w') as fp: -+ json.dump({"version": None, "url": None, "license": None, "description": None}, fp) ++ json.dump({"version": None, "url": None, "license": None, "description": None, "_fmt": "pyproject"}, fp) + return metadata_file diff --git a/pynixify/data/setuptools_patch.diff b/pynixify/data/setuptools_patch.diff index a3e4532..59915d0 100644 --- a/pynixify/data/setuptools_patch.diff +++ b/pynixify/data/setuptools_patch.diff @@ -2,7 +2,7 @@ diff --git a/setuptools/__init__.py b/setuptools/__init__.py index 83882511..259effd5 100644 --- a/setuptools/__init__.py +++ b/setuptools/__init__.py -@@ -155,14 +155,63 @@ def _install_setup_requires(attrs): +@@ -155,14 +155,64 @@ def _install_setup_requires(attrs): # Honor setup.cfg's options. dist.parse_config_files(ignore_option_errors=True) @@ -56,6 +56,7 @@ index 83882511..259effd5 100644 + meta_attrs = {'description', 'url', 'license', 'version'} + for meta_attr in meta_attrs: + meta[meta_attr] = attrs.get(meta_attr) ++ meta['_fmt'] = 'setuptools' + with (out / 'meta.json').open('w') as fp: + json.dump(meta, fp) + else: diff --git a/pynixify/expression_builder.py b/pynixify/expression_builder.py index 213038e..2007e97 100644 --- a/pynixify/expression_builder.py +++ b/pynixify/expression_builder.py @@ -38,6 +38,7 @@ buildPythonPackage rec { pname = ${package.pypi_name | nix}; version = ${version | nix}; + format = ${metadata._fmt | nix}; % if package.local_source: src = lib.cleanSource ../../..; diff --git a/pynixify/nixpkgs_sources.py b/pynixify/nixpkgs_sources.py index 30b83f8..d6301ea 100644 --- a/pynixify/nixpkgs_sources.py +++ b/pynixify/nixpkgs_sources.py @@ -126,7 +126,6 @@ async def _run_nix_build(*args: Sequence[str], retries=0, max_retries=5) -> Path stderr=asyncio.subprocess.PIPE, ) (stdout, stderr) = await proc.communicate() - # sys.stdout.write("%s\n%s" % (stdout.decode(), stderr.decode())) status = await proc.wait() if b"all build users are currently in use" in stderr and retries < max_retries: From d7c29fab9a9b30c08317c93826d1c2d882b11528 Mon Sep 17 00:00:00 2001 From: Emmett Butler Date: Tue, 16 May 2023 15:07:41 -0700 Subject: [PATCH 13/33] undo black and isort --- pynixify/nixpkgs_sources.py | 70 ++++++++++++++++++------------------- 1 file changed, 35 insertions(+), 35 deletions(-) diff --git a/pynixify/nixpkgs_sources.py b/pynixify/nixpkgs_sources.py index d6301ea..498dab9 100644 --- a/pynixify/nixpkgs_sources.py +++ b/pynixify/nixpkgs_sources.py @@ -14,24 +14,21 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -import asyncio -import json import sys +import json +import asyncio +from pathlib import Path +from typing import Sequence, Any, Optional from collections import defaultdict from multiprocessing import cpu_count -from pathlib import Path -from typing import Any, Optional, Sequence - -from packaging.requirements import Requirement from packaging.utils import canonicalize_name +from packaging.requirements import Requirement from packaging.version import Version - from pynixify.base import Package, parse_version -from pynixify.exceptions import NixBuildError, PackageNotFound +from pynixify.exceptions import PackageNotFound, NixBuildError NIXPKGS_URL: Optional[str] = None - class NixPackage(Package): def __init__(self, *, attr: str, version: Version): self.version = version @@ -58,15 +55,18 @@ async def source(self, extra_args=[]): name = "ATTR_dummy_src"; destination = "/setup.py"; } - """.replace( - "ATTR", self.attr - ) - args = ["--no-out-link", "--no-build-output", "-E", expr] + """.replace('ATTR', self.attr) + args = [ + '--no-out-link', + '--no-build-output', + '-E', + expr + ] args += extra_args return await run_nix_build(*args) def __str__(self): - return f"NixPackage(attr={self.attr}, version={self.version})" + return f'NixPackage(attr={self.attr}, version={self.version})' class NixpkgsData: @@ -80,9 +80,9 @@ def from_pypi_name(self, name: str) -> Sequence[NixPackage]: try: data = self.__data[canonicalize_name(name)] except KeyError: - raise PackageNotFound(f"{name} is not defined in nixpkgs") + raise PackageNotFound(f'{name} is not defined in nixpkgs') return [ - NixPackage(attr=drv["attr"], version=parse_version(drv["version"])) + NixPackage(attr=drv['attr'], version=parse_version(drv['version'])) for drv in data ] @@ -94,17 +94,16 @@ def from_requirement(self, req: Requirement) -> Sequence[NixPackage]: async def load_nixpkgs_data(extra_args): nix_expression_path = Path(__file__).parent / "data" / "pythonPackages.nix" args = [ - "--eval", - "--strict", - "--json", + '--eval', + '--strict', + '--json', str(nix_expression_path), ] args += extra_args if NIXPKGS_URL is not None: - args += ["-I", f"nixpkgs={NIXPKGS_URL}"] + args += ['-I', f'nixpkgs={NIXPKGS_URL}'] proc = await asyncio.create_subprocess_exec( - "nix-instantiate", *args, stdout=asyncio.subprocess.PIPE - ) + 'nix-instantiate', *args, stdout=asyncio.subprocess.PIPE) (stdout, _) = await proc.communicate() status = await proc.wait() assert status == 0 @@ -115,34 +114,35 @@ async def load_nixpkgs_data(extra_args): async def _run_nix_build(*args: Sequence[str], retries=0, max_retries=5) -> Path: if NIXPKGS_URL is not None: # TODO fix mypy hack - args_ = list(args) + ["-I", f"nixpkgs={NIXPKGS_URL}"] + args_ = list(args) + ['-I', f'nixpkgs={NIXPKGS_URL}'] else: args_ = list(args) # TODO remove mypy ignore below and fix compatibility with mypy 0.790 proc = await asyncio.create_subprocess_exec( - "nix-build", - *args_, - stdout=asyncio.subprocess.PIPE, # type: ignore - stderr=asyncio.subprocess.PIPE, - ) + 'nix-build', *args_, stdout=asyncio.subprocess.PIPE, # type: ignore + stderr=asyncio.subprocess.PIPE) (stdout, stderr) = await proc.communicate() status = await proc.wait() - if b"all build users are currently in use" in stderr and retries < max_retries: + if b'all build users are currently in use' in stderr and retries < max_retries: # perform an expotential backoff and retry # TODO think a way to avoid relying in the error message sys.stderr.write( - f"warning: All build users are currently in use. " - f"Retrying in {2**retries} seconds\n" + f'warning: All build users are currently in use. ' + f'Retrying in {2**retries} seconds\n' + ) + await asyncio.sleep(2**retries) + return await run_nix_build( + *args, + retries=retries+1, + max_retries=max_retries ) - await asyncio.sleep(2 ** retries) - return await run_nix_build(*args, retries=retries + 1, max_retries=max_retries) elif retries >= max_retries: - sys.stderr.write(f"error: Giving up after {max_retries} failed retries\n") + sys.stderr.write(f'error: Giving up after {max_retries} failed retries\n') if status: print(stderr.decode(), file=sys.stderr) - raise NixBuildError(f"nix-build failed with code {status}") + raise NixBuildError(f'nix-build failed with code {status}') return Path(stdout.strip().decode()) From fe5f1706f8833b8adde27a857cc37fff4d7d2b45 Mon Sep 17 00:00:00 2001 From: Emmett Butler Date: Wed, 17 May 2023 16:40:07 -0700 Subject: [PATCH 14/33] moving toward generically supporting pep517 build backends --- pynixify/data/flitcore_patch.diff | 83 ++++++++++++++++++++++++++++ pynixify/data/hatchling_patch.diff | 27 +++++++-- pynixify/data/parse_setuppy_data.nix | 44 +++++++++++---- pynixify/data/patchedpip.nix | 24 ++++++++ pynixify/data/pip_patch.diff | 72 ++++++++++++++++++++++++ pynixify/data/pip_patch_final.diff | 72 ++++++++++++++++++++++++ pynixify/nixpkgs_sources.py | 1 + 7 files changed, 308 insertions(+), 15 deletions(-) create mode 100644 pynixify/data/flitcore_patch.diff create mode 100644 pynixify/data/patchedpip.nix create mode 100644 pynixify/data/pip_patch.diff create mode 100644 pynixify/data/pip_patch_final.diff diff --git a/pynixify/data/flitcore_patch.diff b/pynixify/data/flitcore_patch.diff new file mode 100644 index 0000000..612ef3b --- /dev/null +++ b/pynixify/data/flitcore_patch.diff @@ -0,0 +1,83 @@ +diff --git a/flit_core/flit_core/buildapi.py b/flit_core/flit_core/buildapi.py +index 963bf61..f48cbf8 100644 +--- a/flit_core/flit_core/buildapi.py ++++ b/flit_core/flit_core/buildapi.py +@@ -3,6 +3,7 @@ import logging + import io + import os + import os.path as osp ++import sys + from pathlib import Path + + from .common import ( +@@ -13,6 +14,13 @@ from .config import read_flit_config + from .wheel import make_wheel_in, _write_wheel_file + from .sdist import SdistBuilder + ++if 'PYNIXIFY' in os.environ: ++ try: ++ pynix_out = Path(os.environ['out']) ++ except KeyError: ++ print("out environment variable not defined") ++ sys.exit(1) ++ + log = logging.getLogger(__name__) + + # PEP 517 specifies that the CWD will always be the source tree +@@ -32,9 +40,13 @@ def get_requires_for_build_wheel(config_settings=None): + docstring, version = get_docstring_and_version_via_ast(module) + + if (want_summary and not docstring) or (want_version and not version): +- return info.metadata.get('requires_dist', []) ++ requires = info.metadata.get('requires_dist', []) + else: +- return [] ++ requires = [] ++ if 'PYNIXIFY' in os.environ: ++ with (pynix_out / "setup_requires.txt").open("w") as fp: ++ fp.write('\n'.join(requires)) ++ return requires + + # Requirements to build an sdist are the same as for a wheel + get_requires_for_build_sdist = get_requires_for_build_wheel +diff --git a/flit_core/flit_core/config.py b/flit_core/flit_core/config.py +index 1292956..1afbba6 100644 +--- a/flit_core/flit_core/config.py ++++ b/flit_core/flit_core/config.py +@@ -4,6 +4,8 @@ import errno + import logging + import os + import os.path as osp ++import sys ++import json + from pathlib import Path + import re + +@@ -21,6 +23,13 @@ from .versionno import normalise_version + + log = logging.getLogger(__name__) + ++if 'PYNIXIFY' in os.environ: ++ try: ++ pynix_out = Path(os.environ['out']) ++ except KeyError: ++ print("out environment variable not defined") ++ sys.exit(1) ++ + + class ConfigError(ValueError): + pass +@@ -175,6 +184,13 @@ def prep_toml_config(d, path): + loaded_cfg.data_directory = path.parent / data_dir + if not loaded_cfg.data_directory.is_dir(): + raise ConfigError(f"{toml_key} must refer to a directory") ++ if 'PYNIXIFY' in os.environ: ++ metadata = {} ++ for attr in ("version", "url", "license", "description"): ++ metadata[attr] = loaded_cfg.metadata[attr] ++ metadata["_fmt"] = "pyproject" ++ with (pynix_out / 'meta.json').open('w') as fp: ++ json.dump(metadata, fp) + + return loaded_cfg + diff --git a/pynixify/data/hatchling_patch.diff b/pynixify/data/hatchling_patch.diff index a57cb65..58f89dc 100644 --- a/pynixify/data/hatchling_patch.diff +++ b/pynixify/data/hatchling_patch.diff @@ -1,12 +1,29 @@ +diff --git a/src/hatchling/build.py b/src/hatchling/build.py +index d79c1e2e..02eb3fd9 100644 +--- a/src/hatchling/build.py ++++ b/src/hatchling/build.py +@@ -37,6 +37,11 @@ def build_wheel(wheel_directory, config_settings=None, metadata_directory=None): + """ + from hatchling.builders.wheel import WheelBuilder + ++ if "PYNIXIFY" in config_settings: ++ import os ++ os.environ["PYNIXIFY"] = config_settings["PYNIXIFY"] ++ os.environ["PYNIXIFY_OUT"] = config_settings["PYNIXIFY_OUT"] ++ + builder = WheelBuilder(os.getcwd()) + return os.path.basename(next(builder.build(wheel_directory, ['standard']))) + diff --git a/src/hatchling/metadata/core.py b/src/hatchling/metadata/core.py -index 24544ad1..f255bfec 100644 +index 24544ad1..a14c4d13 100644 --- a/src/hatchling/metadata/core.py +++ b/src/hatchling/metadata/core.py -@@ -11,6 +11,14 @@ if sys.version_info >= (3, 11): +@@ -11,6 +11,15 @@ if sys.version_info >= (3, 11): else: import tomli as tomllib +if 'PYNIXIFY' in os.environ: ++ assert False + from pathlib import Path + try: + pynix_out = Path(os.environ['out']) @@ -17,7 +34,7 @@ index 24544ad1..f255bfec 100644 def load_toml(path): with open(path, encoding='utf-8') as f: -@@ -250,6 +258,9 @@ class BuildMetadata: +@@ -250,6 +259,9 @@ class BuildMetadata: raise ValueError(f'Dependency #{i} of field `build-system.requires` is invalid: {e}') self._requires_complex = requires_complex @@ -27,7 +44,7 @@ index 24544ad1..f255bfec 100644 return self._requires_complex -@@ -1074,6 +1085,11 @@ class CoreMetadata: +@@ -1074,6 +1086,11 @@ class CoreMetadata: dependencies_complex[get_normalized_dependency(requirement)] = requirement self._dependencies_complex = dict(sorted(dependencies_complex.items())) @@ -65,7 +82,7 @@ index 43a0fa67..2a971bba 100644 + if 'PYNIXIFY' in os.environ: + with (pynix_out / 'meta.json').open('w') as fp: -+ json.dump({"version": None, "url": None, "license": None, "description": None, "_fmt": "pyproject"}, fp) ++ json.dump({"version": None, "url": None, "license": None, "description": None}, fp) + return metadata_file diff --git a/pynixify/data/parse_setuppy_data.nix b/pynixify/data/parse_setuppy_data.nix index 0109c7a..399c238 100644 --- a/pynixify/data/parse_setuppy_data.nix +++ b/pynixify/data/parse_setuppy_data.nix @@ -4,15 +4,12 @@ let removeExt = fileName: builtins.elemAt (builtins.split "\\." fileName) 0; patchedSetuptools = pkgs.python3.pkgs.setuptools.overrideAttrs (ps: { - # src = (import {}).lib.cleanSource ./setuptools; - patches = [ ./setuptools_patch.diff ]; patchFlags = pkgs.lib.optionals (pkgs.lib.versionOlder "61" pkgs.python3.pkgs.setuptools.version) [ "--merge" "-p1" ]; - }); setuptoolsscm = pkgs.python3.pkgs.buildPythonPackage rec { @@ -66,9 +63,39 @@ let pythonImportsCheck = [ "hatch_vcs" ]; }; + patchedflitcore = pkgs.python3.pkgs.flit-core.overrideAttrs + (ps: { patches = [ ./flitcore_patch.diff ]; }); + flitscm = pkgs.python3.pkgs.buildPythonPackage rec { + pname = "flit-scm"; + version = "1.7.0"; + + format = "pyproject"; - pythonWithPackages = - pkgs.python3.withPackages (ps: [ patchedSetuptools hatchling hatchvcs ]); + src = pkgs.fetchFromGitLab { + owner = "WillDaSilva"; + repo = "flit_scm"; + rev = version; + sha256 = "sha256-K5sH+oHgX/ftvhkY+vIg6wUokAP96YxrTWds3tnEtyg="; + leaveDotGit = true; + }; + + nativeBuildInputs = + [ patchedflitcore setuptoolsscm pkgs.python3.pkgs.tomli pkgs.git ]; + propagatedBuildInputs = [ patchedflitcore setuptoolsscm ] + ++ pkgs.lib.optionals (pkgs.python3.pkgs.pythonOlder "3.11") + [ pkgs.python3.pkgs.tomli ]; + }; + patchedpip = pkgs.python3.pkgs.pip.overrideAttrs + (ps: { patches = [ ./pip_patch_final.diff ]; }); + + pythonWithPackages = pkgs.python3.withPackages (ps: [ + patchedSetuptools + setuptoolsscm + hatchling + hatchvcs + flitscm + patchedpip + ]); cleanSource = src: pkgs.lib.cleanSourceWith { @@ -92,11 +119,8 @@ in pkgs.stdenv.mkDerivation { if PYNIXIFY=1 python setup.py install; then exit 0 fi - if test -f pyproject.toml && grep "hatchling.build" pyproject.toml; then - echo 'mode = "local"' > config.toml - if PYNIXIFY=1 hatch --config config.toml --data-dir $out/data --cache-dir $out/cache build; then - exit 0 - fi + if ${pythonWithPackages.pkgs.pip}/bin/pip --no-cache-dir install --config-settings PYNIXIFY_OUT=$out --config-settings PYNIXIFY=1 --no-build-isolation --prefix $out --install-option="--install-dir=$out" --root $out $PWD; then + exit 0 fi # Indicate that fetching the result failed, but let the build succeed touch $out/failed diff --git a/pynixify/data/patchedpip.nix b/pynixify/data/patchedpip.nix new file mode 100644 index 0000000..9daf709 --- /dev/null +++ b/pynixify/data/patchedpip.nix @@ -0,0 +1,24 @@ +{ buildPythonPackage, fetchFromGitHub, lib }: + +buildPythonPackage rec { + pname = "pip"; + version = "22.2.2"; + format = "other"; + + src = fetchFromGitHub { + owner = "pypa"; + repo = pname; + rev = version; + sha256 = "sha256-SLjmxFUFmvgy8E8kxfc6lxxCRo+GN4L77pqkWkRR8aE="; + name = "${pname}-${version}-source"; + }; + + postPatch = '' + # Remove vendored Windows PE binaries + # Note: These are unused but make the package unreproducible. + find -type f -name '*.exe' -delete + ''; + + patches = [ ./pip_patch.diff ]; + phases = [ "unpackPhase" "patchPhase" ]; +} diff --git a/pynixify/data/pip_patch.diff b/pynixify/data/pip_patch.diff new file mode 100644 index 0000000..7af8bc9 --- /dev/null +++ b/pynixify/data/pip_patch.diff @@ -0,0 +1,72 @@ +diff --git a/pip-22.2.2-source/src/pip/_internal/operations/build/wheel.py b/pip-22.2.2-source/src/pip/_internal/operations/build/wheel.py +index b0d2fc9ea..adee7f05e 100644 +--- a/pip-22.2.2-source/src/pip/_internal/operations/build/wheel.py ++++ b/pip-22.2.2-source/src/pip/_internal/operations/build/wheel.py +@@ -24,12 +24,14 @@ def build_wheel_pep517( + logger.debug("Destination directory: %s", tempd) + + runner = runner_with_spinner_message( +- f"Building wheel for {name} (pyproject.toml)" ++ f"Emmett is Building wheel for {name} (pyproject.toml)" + ) ++ import pudb;pudb.set_trace() + with backend.subprocess_runner(runner): + wheel_name = backend.build_wheel( + tempd, + metadata_directory=metadata_directory, ++ # XXX shouldn't config_settings be here too? + ) + except Exception: + logger.error("Failed building wheel for %s", name) +diff --git a/pip-22.2.2-source/src/pip/_internal/req/req_install.py b/pip-22.2.2-source/src/pip/_internal/req/req_install.py +index a1e376c89..c070976a1 100644 +--- a/pip-22.2.2-source/src/pip/_internal/req/req_install.py ++++ b/pip-22.2.2-source/src/pip/_internal/req/req_install.py +@@ -480,6 +480,7 @@ class InstallRequirement: + requires, backend, check, backend_path = pyproject_toml_data + self.requirements_to_check = check + self.pyproject_requires = requires ++ assert False + self.pep517_backend = ConfiguredPep517HookCaller( + self, + self.unpacked_source_directory, +diff --git a/pip-22.2.2-source/src/pip/_internal/wheel_builder.py b/pip-22.2.2-source/src/pip/_internal/wheel_builder.py +index 77a17ff0f..73ab75f58 100644 +--- a/pip-22.2.2-source/src/pip/_internal/wheel_builder.py ++++ b/pip-22.2.2-source/src/pip/_internal/wheel_builder.py +@@ -240,6 +240,7 @@ def _build_one_inside_env( + ) -> Optional[str]: + with TempDirectory(kind="wheel") as temp_dir: + assert req.name ++ assert False + if req.use_pep517: + assert req.metadata_directory + assert req.pep517_backend +@@ -331,6 +332,7 @@ def build( + :return: The list of InstallRequirement that succeeded to build and + the list of InstallRequirement that failed to build. + """ ++ assert False + if not requirements: + return [], [] + +diff --git a/pip-22.2.2-source/src/pip/_vendor/pep517/wrappers.py b/pip-22.2.2-source/src/pip/_vendor/pep517/wrappers.py +index e031ed708..72e1aaa03 100644 +--- a/pip-22.2.2-source/src/pip/_vendor/pep517/wrappers.py ++++ b/pip-22.2.2-source/src/pip/_vendor/pep517/wrappers.py +@@ -205,6 +205,7 @@ class Pep517HookCaller(object): + """ + if metadata_directory is not None: + metadata_directory = abspath(metadata_directory) ++ assert False + return self._call_hook('build_wheel', { + 'wheel_directory': abspath(wheel_directory), + 'config_settings': config_settings, +@@ -299,6 +300,7 @@ class Pep517HookCaller(object): + # Python identifier, so non-ASCII content is wrong on Python 2 in + # any case). + # For backend_path, we use sys.getfilesystemencoding. ++ assert False + if sys.version_info[0] == 2: + build_backend = self.build_backend.encode('ASCII') + else: diff --git a/pynixify/data/pip_patch_final.diff b/pynixify/data/pip_patch_final.diff new file mode 100644 index 0000000..82cae24 --- /dev/null +++ b/pynixify/data/pip_patch_final.diff @@ -0,0 +1,72 @@ +diff --git a/src/pip/_internal/operations/build/wheel.py b/src/pip/_internal/operations/build/wheel.py +index b0d2fc9ea..adee7f05e 100644 +--- a/src/pip/_internal/operations/build/wheel.py ++++ b/src/pip/_internal/operations/build/wheel.py +@@ -24,12 +24,14 @@ def build_wheel_pep517( + logger.debug("Destination directory: %s", tempd) + + runner = runner_with_spinner_message( +- f"Building wheel for {name} (pyproject.toml)" ++ f"Emmett is Building wheel for {name} (pyproject.toml)" + ) ++ import pudb;pudb.set_trace() + with backend.subprocess_runner(runner): + wheel_name = backend.build_wheel( + tempd, + metadata_directory=metadata_directory, ++ # XXX shouldn't config_settings be here too? + ) + except Exception: + logger.error("Failed building wheel for %s", name) +diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py +index a1e376c89..c070976a1 100644 +--- a/src/pip/_internal/req/req_install.py ++++ b/src/pip/_internal/req/req_install.py +@@ -480,6 +480,7 @@ class InstallRequirement: + requires, backend, check, backend_path = pyproject_toml_data + self.requirements_to_check = check + self.pyproject_requires = requires ++ assert False + self.pep517_backend = ConfiguredPep517HookCaller( + self, + self.unpacked_source_directory, +diff --git a/src/pip/_internal/wheel_builder.py b/src/pip/_internal/wheel_builder.py +index 77a17ff0f..73ab75f58 100644 +--- a/src/pip/_internal/wheel_builder.py ++++ b/src/pip/_internal/wheel_builder.py +@@ -240,6 +240,7 @@ def _build_one_inside_env( + ) -> Optional[str]: + with TempDirectory(kind="wheel") as temp_dir: + assert req.name ++ assert False + if req.use_pep517: + assert req.metadata_directory + assert req.pep517_backend +@@ -331,6 +332,7 @@ def build( + :return: The list of InstallRequirement that succeeded to build and + the list of InstallRequirement that failed to build. + """ ++ assert False + if not requirements: + return [], [] + +diff --git a/src/pip/_vendor/pep517/wrappers.py b/src/pip/_vendor/pep517/wrappers.py +index e031ed708..72e1aaa03 100644 +--- a/src/pip/_vendor/pep517/wrappers.py ++++ b/src/pip/_vendor/pep517/wrappers.py +@@ -205,6 +205,7 @@ class Pep517HookCaller(object): + """ + if metadata_directory is not None: + metadata_directory = abspath(metadata_directory) ++ assert False + return self._call_hook('build_wheel', { + 'wheel_directory': abspath(wheel_directory), + 'config_settings': config_settings, +@@ -299,6 +300,7 @@ class Pep517HookCaller(object): + # Python identifier, so non-ASCII content is wrong on Python 2 in + # any case). + # For backend_path, we use sys.getfilesystemencoding. ++ assert False + if sys.version_info[0] == 2: + build_backend = self.build_backend.encode('ASCII') + else: diff --git a/pynixify/nixpkgs_sources.py b/pynixify/nixpkgs_sources.py index 498dab9..59a2915 100644 --- a/pynixify/nixpkgs_sources.py +++ b/pynixify/nixpkgs_sources.py @@ -122,6 +122,7 @@ async def _run_nix_build(*args: Sequence[str], retries=0, max_retries=5) -> Path 'nix-build', *args_, stdout=asyncio.subprocess.PIPE, # type: ignore stderr=asyncio.subprocess.PIPE) (stdout, stderr) = await proc.communicate() + #print("%s\n%s" % (stdout.decode(), stderr.decode())) status = await proc.wait() if b'all build users are currently in use' in stderr and retries < max_retries: From b15cc06faaef075eca7d5f9a3e677b610b942221 Mon Sep 17 00:00:00 2001 From: Emmett Butler Date: Thu, 18 May 2023 08:42:36 -0700 Subject: [PATCH 15/33] pin nixpkgs --- pynixify/data/parse_setuppy_data.nix | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/pynixify/data/parse_setuppy_data.nix b/pynixify/data/parse_setuppy_data.nix index 399c238..d6e75c8 100644 --- a/pynixify/data/parse_setuppy_data.nix +++ b/pynixify/data/parse_setuppy_data.nix @@ -1,4 +1,10 @@ -{ file, pkgs ? import { } }: +{ file, pkgs ? import (builtins.fetchGit { + name = "nixos-22.11"; + url = "https://github.com/nixos/nixpkgs/"; + # `git ls-remote https://github.com/nixos/nixpkgs nixos-unstable` + ref = "refs/heads/nixos-22.11"; + rev = "6c591e7adc514090a77209f56c9d0c551ab8530d"; +}) { } }: let removeExt = fileName: builtins.elemAt (builtins.split "\\." fileName) 0; From 7e183a4e47bb52f5e4888bc917eeb94a9d504676 Mon Sep 17 00:00:00 2001 From: Emmett Butler Date: Thu, 18 May 2023 09:05:25 -0700 Subject: [PATCH 16/33] fail during bootstrappedpip install phase with proof that patching works --- pynixify/data/parse_setuppy_data.nix | 112 ++++++++++++++++++++++++--- 1 file changed, 101 insertions(+), 11 deletions(-) diff --git a/pynixify/data/parse_setuppy_data.nix b/pynixify/data/parse_setuppy_data.nix index d6e75c8..412ba48 100644 --- a/pynixify/data/parse_setuppy_data.nix +++ b/pynixify/data/parse_setuppy_data.nix @@ -91,17 +91,107 @@ let ++ pkgs.lib.optionals (pkgs.python3.pkgs.pythonOlder "3.11") [ pkgs.python3.pkgs.tomli ]; }; - patchedpip = pkgs.python3.pkgs.pip.overrideAttrs - (ps: { patches = [ ./pip_patch_final.diff ]; }); - - pythonWithPackages = pkgs.python3.withPackages (ps: [ - patchedSetuptools - setuptoolsscm - hatchling - hatchvcs - flitscm - patchedpip - ]); + patchedbootstrappip = pkgs.stdenv.mkDerivation rec { + pname = "pip"; + inherit (pip) version; + name = "${pkgs.python3.libPrefix}-bootstrapped-${pname}-${version}"; + + srcs = [ pkgs.python3.pkgs.wheel.src pip.src patchedSetuptools.src ]; + sourceRoot = "."; + patches = [ ./pip_patch.diff ]; + + dontUseSetuptoolsBuild = true; + dontUsePipInstall = true; + + # Should be propagatedNativeBuildInputs + propagatedBuildInputs = [ + # Override to remove dependencies to prevent infinite recursion. + (pkgs.python3.pkgs.pipInstallHook.override { pip = null; }) + (pkgs.python3.pkgs.setuptoolsBuildHook.override { + setuptools = null; + wheel = null; + }) + ]; + + postPatch = '' + mkdir -p $out/bin + '' + pip.postPatch; + + nativeBuildInputs = [ pkgs.makeWrapper pkgs.unzip ]; + buildInputs = [ pkgs.python3 ]; + + dontBuild = true; + + installPhase = + pkgs.lib.optionalString (!pkgs.stdenv.hostPlatform.isWindows) '' + export SETUPTOOLS_INSTALL_WINDOWS_SPECIFIC_FILES=0 + '' + '' + # Give folders a known name + mv pip* pip + grep -R Emmett pip + exit 1 + mv setuptools* setuptools + mv wheel* wheel + # Set up PYTHONPATH. The above folders need to be on PYTHONPATH + # $out is where we are installing to and takes precedence + export PYTHONPATH="$out/${pkgs.python3.sitePackages}:$(pwd)/pip/src:$(pwd)/setuptools:$(pwd)/setuptools/pkg_resources:$(pwd)/wheel:$PYTHONPATH" + + echo "Building setuptools wheel..." + pushd setuptools + rm pyproject.toml + ${pkgs.python3.pythonForBuild.interpreter} -m pip install --no-build-isolation --no-index --prefix=$out --ignore-installed --no-dependencies --no-cache . + popd + + echo "Building wheel wheel..." + pushd wheel + ${pkgs.python3.pythonForBuild.interpreter} -m pip install --no-build-isolation --no-index --prefix=$out --ignore-installed --no-dependencies --no-cache . + popd + + echo "Building pip wheel..." + pushd pip + ${pkgs.python3.pythonForBuild.interpreter} -m pip install --no-build-isolation --no-index --prefix=$out --ignore-installed --no-dependencies --no-cache . + popd + ''; + }; + pip = pkgs.python3.pkgs.buildPythonPackage rec { + pname = "pip"; + version = "22.2.2"; + format = "other"; + + src = pkgs.fetchFromGitHub { + owner = "pypa"; + repo = pname; + rev = version; + sha256 = "sha256-SLjmxFUFmvgy8E8kxfc6lxxCRo+GN4L77pqkWkRR8aE="; + name = "${pname}-${version}-source"; + }; + + nativeBuildInputs = [ patchedbootstrappip ]; + + postPatch = '' + # Remove vendored Windows PE binaries + # Note: These are unused but make the package unreproducible. + find -type f -name '*.exe' -delete + ''; + + # pip detects that we already have bootstrapped_pip "installed", so we need + # to force it a little. + pipInstallFlags = [ "--ignore-installed" ]; + + checkInputs = [ + pkgs.python3.pkgs.mock + pkgs.python3.pkgs.scripttest + pkgs.python3.pkgs.virtualenv + pkgs.python3.pkgs.pretend + pkgs.python3.pkgs.pytest + ]; + # Pip wants pytest, but tests are not distributed + doCheck = false; + patches = [ ./pip_patch_final.diff ]; + }; + + pythonWithPackages = pkgs.python3.withPackages + (ps: [ patchedSetuptools setuptoolsscm hatchling hatchvcs flitscm pip ]); cleanSource = src: pkgs.lib.cleanSourceWith { From 98b70b8226645f2d10e5c17de2e82a73cee51616 Mon Sep 17 00:00:00 2001 From: Emmett Butler Date: Thu, 18 May 2023 09:51:12 -0700 Subject: [PATCH 17/33] simplify with overrideattrs --- pynixify/data/hatchling_patch.diff | 4 +- pynixify/data/parse_setuppy_data.nix | 110 +++------------------------ pynixify/data/pip_patch_final.diff | 12 +++ 3 files changed, 26 insertions(+), 100 deletions(-) diff --git a/pynixify/data/hatchling_patch.diff b/pynixify/data/hatchling_patch.diff index 58f89dc..6c6bd4f 100644 --- a/pynixify/data/hatchling_patch.diff +++ b/pynixify/data/hatchling_patch.diff @@ -1,12 +1,12 @@ diff --git a/src/hatchling/build.py b/src/hatchling/build.py -index d79c1e2e..02eb3fd9 100644 +index d79c1e2e..6b5ff25e 100644 --- a/src/hatchling/build.py +++ b/src/hatchling/build.py @@ -37,6 +37,11 @@ def build_wheel(wheel_directory, config_settings=None, metadata_directory=None): """ from hatchling.builders.wheel import WheelBuilder -+ if "PYNIXIFY" in config_settings: ++ if config_settings is not None and "PYNIXIFY" in config_settings: + import os + os.environ["PYNIXIFY"] = config_settings["PYNIXIFY"] + os.environ["PYNIXIFY_OUT"] = config_settings["PYNIXIFY_OUT"] diff --git a/pynixify/data/parse_setuppy_data.nix b/pynixify/data/parse_setuppy_data.nix index 412ba48..9857c8a 100644 --- a/pynixify/data/parse_setuppy_data.nix +++ b/pynixify/data/parse_setuppy_data.nix @@ -91,107 +91,21 @@ let ++ pkgs.lib.optionals (pkgs.python3.pkgs.pythonOlder "3.11") [ pkgs.python3.pkgs.tomli ]; }; - patchedbootstrappip = pkgs.stdenv.mkDerivation rec { - pname = "pip"; - inherit (pip) version; - name = "${pkgs.python3.libPrefix}-bootstrapped-${pname}-${version}"; - - srcs = [ pkgs.python3.pkgs.wheel.src pip.src patchedSetuptools.src ]; - sourceRoot = "."; - patches = [ ./pip_patch.diff ]; - - dontUseSetuptoolsBuild = true; - dontUsePipInstall = true; - - # Should be propagatedNativeBuildInputs - propagatedBuildInputs = [ - # Override to remove dependencies to prevent infinite recursion. - (pkgs.python3.pkgs.pipInstallHook.override { pip = null; }) - (pkgs.python3.pkgs.setuptoolsBuildHook.override { - setuptools = null; - wheel = null; - }) - ]; - - postPatch = '' - mkdir -p $out/bin - '' + pip.postPatch; - - nativeBuildInputs = [ pkgs.makeWrapper pkgs.unzip ]; - buildInputs = [ pkgs.python3 ]; - - dontBuild = true; - - installPhase = - pkgs.lib.optionalString (!pkgs.stdenv.hostPlatform.isWindows) '' - export SETUPTOOLS_INSTALL_WINDOWS_SPECIFIC_FILES=0 - '' + '' - # Give folders a known name - mv pip* pip - grep -R Emmett pip - exit 1 - mv setuptools* setuptools - mv wheel* wheel - # Set up PYTHONPATH. The above folders need to be on PYTHONPATH - # $out is where we are installing to and takes precedence - export PYTHONPATH="$out/${pkgs.python3.sitePackages}:$(pwd)/pip/src:$(pwd)/setuptools:$(pwd)/setuptools/pkg_resources:$(pwd)/wheel:$PYTHONPATH" - - echo "Building setuptools wheel..." - pushd setuptools - rm pyproject.toml - ${pkgs.python3.pythonForBuild.interpreter} -m pip install --no-build-isolation --no-index --prefix=$out --ignore-installed --no-dependencies --no-cache . - popd - - echo "Building wheel wheel..." - pushd wheel - ${pkgs.python3.pythonForBuild.interpreter} -m pip install --no-build-isolation --no-index --prefix=$out --ignore-installed --no-dependencies --no-cache . - popd - - echo "Building pip wheel..." - pushd pip - ${pkgs.python3.pythonForBuild.interpreter} -m pip install --no-build-isolation --no-index --prefix=$out --ignore-installed --no-dependencies --no-cache . - popd - ''; - }; - pip = pkgs.python3.pkgs.buildPythonPackage rec { - pname = "pip"; - version = "22.2.2"; - format = "other"; - - src = pkgs.fetchFromGitHub { - owner = "pypa"; - repo = pname; - rev = version; - sha256 = "sha256-SLjmxFUFmvgy8E8kxfc6lxxCRo+GN4L77pqkWkRR8aE="; - name = "${pname}-${version}-source"; - }; - + patchedbootstrappip = pkgs.python3.pkgs.bootstrapped-pip.overrideAttrs + (ps: { patches = [ ./pip_patch.diff ]; }); + patchedpip = pkgs.python3.pkgs.pip.overrideAttrs (ps: { nativeBuildInputs = [ patchedbootstrappip ]; - - postPatch = '' - # Remove vendored Windows PE binaries - # Note: These are unused but make the package unreproducible. - find -type f -name '*.exe' -delete - ''; - - # pip detects that we already have bootstrapped_pip "installed", so we need - # to force it a little. - pipInstallFlags = [ "--ignore-installed" ]; - - checkInputs = [ - pkgs.python3.pkgs.mock - pkgs.python3.pkgs.scripttest - pkgs.python3.pkgs.virtualenv - pkgs.python3.pkgs.pretend - pkgs.python3.pkgs.pytest - ]; - # Pip wants pytest, but tests are not distributed - doCheck = false; patches = [ ./pip_patch_final.diff ]; - }; + }); - pythonWithPackages = pkgs.python3.withPackages - (ps: [ patchedSetuptools setuptoolsscm hatchling hatchvcs flitscm pip ]); + pythonWithPackages = pkgs.python3.withPackages (ps: [ + patchedSetuptools + setuptoolsscm + hatchling + hatchvcs + flitscm + patchedpip + ]); cleanSource = src: pkgs.lib.cleanSourceWith { diff --git a/pynixify/data/pip_patch_final.diff b/pynixify/data/pip_patch_final.diff index 82cae24..6bc2dfe 100644 --- a/pynixify/data/pip_patch_final.diff +++ b/pynixify/data/pip_patch_final.diff @@ -50,6 +50,18 @@ index 77a17ff0f..73ab75f58 100644 if not requirements: return [], [] +diff --git a/src/pip/_vendor/pep517/in_process/_in_process.py b/src/pip/_vendor/pep517/in_process/_in_process.py +index 954a4ab05..851d6f460 100644 +--- a/src/pip/_vendor/pep517/in_process/_in_process.py ++++ b/src/pip/_vendor/pep517/in_process/_in_process.py +@@ -151,6 +151,7 @@ def prepare_metadata_for_build_wheel( + Implements a fallback by building a wheel if the hook isn't defined, + unless _allow_fallback is False in which case HookMissing is raised. + """ ++ assert False + backend = _build_backend() + try: + hook = backend.prepare_metadata_for_build_wheel diff --git a/src/pip/_vendor/pep517/wrappers.py b/src/pip/_vendor/pep517/wrappers.py index e031ed708..72e1aaa03 100644 --- a/src/pip/_vendor/pep517/wrappers.py From 94d9a9c586eeb1683091eb5327553150f13642ea Mon Sep 17 00:00:00 2001 From: Emmett Butler Date: Thu, 18 May 2023 10:11:17 -0700 Subject: [PATCH 18/33] checkin --- pynixify/data/hatchling_patch.diff | 5 ++--- pynixify/data/parse_setuppy_data.nix | 2 +- pynixify/data/pip_patch.diff | 28 ++++++++++++++++++++++++++-- pynixify/data/pip_patch_final.diff | 20 ++++++++++++++++---- 4 files changed, 45 insertions(+), 10 deletions(-) diff --git a/pynixify/data/hatchling_patch.diff b/pynixify/data/hatchling_patch.diff index 6c6bd4f..2443c38 100644 --- a/pynixify/data/hatchling_patch.diff +++ b/pynixify/data/hatchling_patch.diff @@ -1,13 +1,12 @@ diff --git a/src/hatchling/build.py b/src/hatchling/build.py -index d79c1e2e..6b5ff25e 100644 +index d79c1e2e..c88ba500 100644 --- a/src/hatchling/build.py +++ b/src/hatchling/build.py -@@ -37,6 +37,11 @@ def build_wheel(wheel_directory, config_settings=None, metadata_directory=None): +@@ -37,6 +37,10 @@ def build_wheel(wheel_directory, config_settings=None, metadata_directory=None): """ from hatchling.builders.wheel import WheelBuilder + if config_settings is not None and "PYNIXIFY" in config_settings: -+ import os + os.environ["PYNIXIFY"] = config_settings["PYNIXIFY"] + os.environ["PYNIXIFY_OUT"] = config_settings["PYNIXIFY_OUT"] + diff --git a/pynixify/data/parse_setuppy_data.nix b/pynixify/data/parse_setuppy_data.nix index 9857c8a..443e63e 100644 --- a/pynixify/data/parse_setuppy_data.nix +++ b/pynixify/data/parse_setuppy_data.nix @@ -129,7 +129,7 @@ in pkgs.stdenv.mkDerivation { if PYNIXIFY=1 python setup.py install; then exit 0 fi - if ${pythonWithPackages.pkgs.pip}/bin/pip --no-cache-dir install --config-settings PYNIXIFY_OUT=$out --config-settings PYNIXIFY=1 --no-build-isolation --prefix $out --install-option="--install-dir=$out" --root $out $PWD; then + if ${patchedpip} --no-cache-dir install --config-settings PYNIXIFY_OUT=$out --config-settings PYNIXIFY=1 --no-build-isolation --prefix $out --install-option="--install-dir=$out" --root $out $PWD; then exit 0 fi # Indicate that fetching the result failed, but let the build succeed diff --git a/pynixify/data/pip_patch.diff b/pynixify/data/pip_patch.diff index 7af8bc9..3f7855d 100644 --- a/pynixify/data/pip_patch.diff +++ b/pynixify/data/pip_patch.diff @@ -1,3 +1,15 @@ +diff --git a/pip-22.2.2-source/src/pip/_internal/commands/install.py b/pip-22.2.2-source/src/pip/_internal/commands/install.py +index 29907645c..851f58536 100644 +--- a/pip-22.2.2-source/src/pip/_internal/commands/install.py ++++ b/pip-22.2.2-source/src/pip/_internal/commands/install.py +@@ -413,6 +413,7 @@ class InstallCommand(RequirementCommand): + for r in requirement_set.requirements.values() + if should_build_for_install_command(r, check_binary_allowed) + ] ++ import pdb;pdb.set_trace() + + _, build_failures = build( + reqs_to_build, diff --git a/pip-22.2.2-source/src/pip/_internal/operations/build/wheel.py b/pip-22.2.2-source/src/pip/_internal/operations/build/wheel.py index b0d2fc9ea..adee7f05e 100644 --- a/pip-22.2.2-source/src/pip/_internal/operations/build/wheel.py @@ -31,7 +43,7 @@ index a1e376c89..c070976a1 100644 self, self.unpacked_source_directory, diff --git a/pip-22.2.2-source/src/pip/_internal/wheel_builder.py b/pip-22.2.2-source/src/pip/_internal/wheel_builder.py -index 77a17ff0f..73ab75f58 100644 +index 77a17ff0f..f628462b3 100644 --- a/pip-22.2.2-source/src/pip/_internal/wheel_builder.py +++ b/pip-22.2.2-source/src/pip/_internal/wheel_builder.py @@ -240,6 +240,7 @@ def _build_one_inside_env( @@ -46,10 +58,22 @@ index 77a17ff0f..73ab75f58 100644 :return: The list of InstallRequirement that succeeded to build and the list of InstallRequirement that failed to build. """ -+ assert False ++ raise Exception if not requirements: return [], [] +diff --git a/pip-22.2.2-source/src/pip/_vendor/pep517/in_process/_in_process.py b/pip-22.2.2-source/src/pip/_vendor/pep517/in_process/_in_process.py +index 954a4ab05..851d6f460 100644 +--- a/pip-22.2.2-source/src/pip/_vendor/pep517/in_process/_in_process.py ++++ b/pip-22.2.2-source/src/pip/_vendor/pep517/in_process/_in_process.py +@@ -151,6 +151,7 @@ def prepare_metadata_for_build_wheel( + Implements a fallback by building a wheel if the hook isn't defined, + unless _allow_fallback is False in which case HookMissing is raised. + """ ++ assert False + backend = _build_backend() + try: + hook = backend.prepare_metadata_for_build_wheel diff --git a/pip-22.2.2-source/src/pip/_vendor/pep517/wrappers.py b/pip-22.2.2-source/src/pip/_vendor/pep517/wrappers.py index e031ed708..72e1aaa03 100644 --- a/pip-22.2.2-source/src/pip/_vendor/pep517/wrappers.py diff --git a/pynixify/data/pip_patch_final.diff b/pynixify/data/pip_patch_final.diff index 6bc2dfe..0772346 100644 --- a/pynixify/data/pip_patch_final.diff +++ b/pynixify/data/pip_patch_final.diff @@ -1,10 +1,22 @@ +diff --git a/src/pip/_internal/commands/install.py b/src/pip/_internal/commands/install.py +index 29907645c..851f58536 100644 +--- a/src/pip/_internal/commands/install.py ++++ b/src/pip/_internal/commands/install.py +@@ -413,6 +413,7 @@ class InstallCommand(RequirementCommand): + for r in requirement_set.requirements.values() + if should_build_for_install_command(r, check_binary_allowed) + ] ++ import pdb;pdb.set_trace() + + _, build_failures = build( + reqs_to_build, diff --git a/src/pip/_internal/operations/build/wheel.py b/src/pip/_internal/operations/build/wheel.py index b0d2fc9ea..adee7f05e 100644 --- a/src/pip/_internal/operations/build/wheel.py +++ b/src/pip/_internal/operations/build/wheel.py @@ -24,12 +24,14 @@ def build_wheel_pep517( logger.debug("Destination directory: %s", tempd) - + runner = runner_with_spinner_message( - f"Building wheel for {name} (pyproject.toml)" + f"Emmett is Building wheel for {name} (pyproject.toml)" @@ -31,7 +43,7 @@ index a1e376c89..c070976a1 100644 self, self.unpacked_source_directory, diff --git a/src/pip/_internal/wheel_builder.py b/src/pip/_internal/wheel_builder.py -index 77a17ff0f..73ab75f58 100644 +index 77a17ff0f..f628462b3 100644 --- a/src/pip/_internal/wheel_builder.py +++ b/src/pip/_internal/wheel_builder.py @@ -240,6 +240,7 @@ def _build_one_inside_env( @@ -46,10 +58,10 @@ index 77a17ff0f..73ab75f58 100644 :return: The list of InstallRequirement that succeeded to build and the list of InstallRequirement that failed to build. """ -+ assert False ++ raise Exception if not requirements: return [], [] - + diff --git a/src/pip/_vendor/pep517/in_process/_in_process.py b/src/pip/_vendor/pep517/in_process/_in_process.py index 954a4ab05..851d6f460 100644 --- a/src/pip/_vendor/pep517/in_process/_in_process.py From fbd5974f63ada359eba10993f1f32da75d4c30dd Mon Sep 17 00:00:00 2001 From: Emmett Butler Date: Thu, 18 May 2023 11:00:56 -0700 Subject: [PATCH 19/33] checkin --- pynixify/data/parse_setuppy_data.nix | 6 +- pynixify/data/pip_patch.diff | 136 ++++++++++++++++++--------- pynixify/data/pip_patch_final.diff | 136 ++++++++++++++++++--------- 3 files changed, 190 insertions(+), 88 deletions(-) diff --git a/pynixify/data/parse_setuppy_data.nix b/pynixify/data/parse_setuppy_data.nix index 443e63e..8af6507 100644 --- a/pynixify/data/parse_setuppy_data.nix +++ b/pynixify/data/parse_setuppy_data.nix @@ -94,7 +94,7 @@ let patchedbootstrappip = pkgs.python3.pkgs.bootstrapped-pip.overrideAttrs (ps: { patches = [ ./pip_patch.diff ]; }); patchedpip = pkgs.python3.pkgs.pip.overrideAttrs (ps: { - nativeBuildInputs = [ patchedbootstrappip ]; + #nativeBuildInputs = [ patchedbootstrappip ]; patches = [ ./pip_patch_final.diff ]; }); @@ -129,9 +129,7 @@ in pkgs.stdenv.mkDerivation { if PYNIXIFY=1 python setup.py install; then exit 0 fi - if ${patchedpip} --no-cache-dir install --config-settings PYNIXIFY_OUT=$out --config-settings PYNIXIFY=1 --no-build-isolation --prefix $out --install-option="--install-dir=$out" --root $out $PWD; then - exit 0 - fi + ${patchedpip}/bin/pip --no-cache-dir install --config-settings PYNIXIFY_OUT=$out --config-settings PYNIXIFY=1 --no-build-isolation --prefix $out --install-option="--install-dir=$out" --root $out $PWD # Indicate that fetching the result failed, but let the build succeed touch $out/failed ''; diff --git a/pynixify/data/pip_patch.diff b/pynixify/data/pip_patch.diff index 3f7855d..eca7ed2 100644 --- a/pynixify/data/pip_patch.diff +++ b/pynixify/data/pip_patch.diff @@ -1,96 +1,148 @@ diff --git a/pip-22.2.2-source/src/pip/_internal/commands/install.py b/pip-22.2.2-source/src/pip/_internal/commands/install.py -index 29907645c..851f58536 100644 +index 29907645c..67f29c65d 100644 --- a/pip-22.2.2-source/src/pip/_internal/commands/install.py +++ b/pip-22.2.2-source/src/pip/_internal/commands/install.py -@@ -413,6 +413,7 @@ class InstallCommand(RequirementCommand): +@@ -413,12 +413,13 @@ class InstallCommand(RequirementCommand): for r in requirement_set.requirements.values() if should_build_for_install_command(r, check_binary_allowed) ] -+ import pdb;pdb.set_trace() - +- ++ if options.config_settings is not None: ++ raise Exception _, build_failures = build( reqs_to_build, + wheel_cache=wheel_cache, + verify=True, +- build_options=[], ++ build_options=[options.config_settings], + global_options=[], + ) + +diff --git a/pip-22.2.2-source/src/pip/_internal/operations/build/metadata.py b/pip-22.2.2-source/src/pip/_internal/operations/build/metadata.py +index e2b7b4445..cd144e024 100644 +--- a/pip-22.2.2-source/src/pip/_internal/operations/build/metadata.py ++++ b/pip-22.2.2-source/src/pip/_internal/operations/build/metadata.py +@@ -15,7 +15,7 @@ from pip._internal.utils.temp_dir import TempDirectory + + + def generate_metadata( +- build_env: BuildEnvironment, backend: Pep517HookCaller, details: str ++ build_env: BuildEnvironment, backend: Pep517HookCaller, details: str, config_settings + ) -> str: + """Generate metadata using mechanisms described in PEP 517. + +@@ -32,7 +32,7 @@ def generate_metadata( + runner = runner_with_spinner_message("Preparing metadata (pyproject.toml)") + with backend.subprocess_runner(runner): + try: +- distinfo_dir = backend.prepare_metadata_for_build_wheel(metadata_dir) ++ distinfo_dir = backend.prepare_metadata_for_build_wheel(metadata_dir, config_settings) + except InstallationSubprocessError as error: + raise MetadataGenerationFailed(package_details=details) from error + diff --git a/pip-22.2.2-source/src/pip/_internal/operations/build/wheel.py b/pip-22.2.2-source/src/pip/_internal/operations/build/wheel.py -index b0d2fc9ea..adee7f05e 100644 +index b0d2fc9ea..767966e66 100644 --- a/pip-22.2.2-source/src/pip/_internal/operations/build/wheel.py +++ b/pip-22.2.2-source/src/pip/_internal/operations/build/wheel.py -@@ -24,12 +24,14 @@ def build_wheel_pep517( +@@ -14,6 +14,7 @@ def build_wheel_pep517( + backend: Pep517HookCaller, + metadata_directory: str, + tempd: str, ++ config_settings, + ) -> Optional[str]: + """Build one InstallRequirement using the PEP 517 build process. + +@@ -24,12 +25,13 @@ def build_wheel_pep517( logger.debug("Destination directory: %s", tempd) runner = runner_with_spinner_message( - f"Building wheel for {name} (pyproject.toml)" + f"Emmett is Building wheel for {name} (pyproject.toml)" ) -+ import pudb;pudb.set_trace() with backend.subprocess_runner(runner): wheel_name = backend.build_wheel( tempd, metadata_directory=metadata_directory, -+ # XXX shouldn't config_settings be here too? ++ config_settings=config_settings, ) except Exception: logger.error("Failed building wheel for %s", name) diff --git a/pip-22.2.2-source/src/pip/_internal/req/req_install.py b/pip-22.2.2-source/src/pip/_internal/req/req_install.py -index a1e376c89..c070976a1 100644 +index a1e376c89..948b092ac 100644 --- a/pip-22.2.2-source/src/pip/_internal/req/req_install.py +++ b/pip-22.2.2-source/src/pip/_internal/req/req_install.py -@@ -480,6 +480,7 @@ class InstallRequirement: - requires, backend, check, backend_path = pyproject_toml_data - self.requirements_to_check = check - self.pyproject_requires = requires -+ assert False - self.pep517_backend = ConfiguredPep517HookCaller( - self, - self.unpacked_source_directory, +@@ -534,6 +534,7 @@ class InstallRequirement: + build_env=self.build_env, + backend=self.pep517_backend, + details=details, ++ config_settings=self.config_settings, + ) + else: + self.metadata_directory = generate_metadata_legacy( diff --git a/pip-22.2.2-source/src/pip/_internal/wheel_builder.py b/pip-22.2.2-source/src/pip/_internal/wheel_builder.py -index 77a17ff0f..f628462b3 100644 +index 77a17ff0f..72e93a580 100644 --- a/pip-22.2.2-source/src/pip/_internal/wheel_builder.py +++ b/pip-22.2.2-source/src/pip/_internal/wheel_builder.py -@@ -240,6 +240,7 @@ def _build_one_inside_env( - ) -> Optional[str]: - with TempDirectory(kind="wheel") as temp_dir: - assert req.name -+ assert False - if req.use_pep517: - assert req.metadata_directory - assert req.pep517_backend -@@ -331,6 +332,7 @@ def build( +@@ -264,6 +264,7 @@ def _build_one_inside_env( + backend=req.pep517_backend, + metadata_directory=req.metadata_directory, + tempd=temp_dir.path, ++ config_settings=req.config_settings, + ) + else: + wheel_path = build_wheel_legacy( +@@ -331,6 +332,9 @@ def build( :return: The list of InstallRequirement that succeeded to build and the list of InstallRequirement that failed to build. """ -+ raise Exception ++ if build_options and build_options[0] is not None: ++ print(build_options) ++ raise Exception if not requirements: return [], [] diff --git a/pip-22.2.2-source/src/pip/_vendor/pep517/in_process/_in_process.py b/pip-22.2.2-source/src/pip/_vendor/pep517/in_process/_in_process.py -index 954a4ab05..851d6f460 100644 +index 954a4ab05..c49ced4e0 100644 --- a/pip-22.2.2-source/src/pip/_vendor/pep517/in_process/_in_process.py +++ b/pip-22.2.2-source/src/pip/_vendor/pep517/in_process/_in_process.py -@@ -151,6 +151,7 @@ def prepare_metadata_for_build_wheel( +@@ -151,6 +151,8 @@ def prepare_metadata_for_build_wheel( Implements a fallback by building a wheel if the hook isn't defined, unless _allow_fallback is False in which case HookMissing is raised. """ -+ assert False ++ if config_settings is not None: ++ raise Exception backend = _build_backend() try: hook = backend.prepare_metadata_for_build_wheel +@@ -253,6 +255,8 @@ def build_wheel(wheel_directory, config_settings, metadata_directory=None): + prepare_metadata_for_build_wheel fallback, this + will copy it rather than rebuilding the wheel. + """ ++ if config_settings is not None: ++ raise Exception + prebuilt_whl = _find_already_built_wheel(metadata_directory) + if prebuilt_whl: + shutil.copy2(prebuilt_whl, wheel_directory) diff --git a/pip-22.2.2-source/src/pip/_vendor/pep517/wrappers.py b/pip-22.2.2-source/src/pip/_vendor/pep517/wrappers.py -index e031ed708..72e1aaa03 100644 +index e031ed708..83862d2ed 100644 --- a/pip-22.2.2-source/src/pip/_vendor/pep517/wrappers.py +++ b/pip-22.2.2-source/src/pip/_vendor/pep517/wrappers.py -@@ -205,6 +205,7 @@ class Pep517HookCaller(object): +@@ -185,6 +185,9 @@ class Pep517HookCaller(object): + and the dist-info extracted from that (unless _allow_fallback is + False). + """ ++ if config_settings is not None: ++ print(config_settings) ++ raise Exception + return self._call_hook('prepare_metadata_for_build_wheel', { + 'metadata_directory': abspath(metadata_directory), + 'config_settings': config_settings, +@@ -205,6 +208,8 @@ class Pep517HookCaller(object): """ if metadata_directory is not None: metadata_directory = abspath(metadata_directory) -+ assert False ++ if config_settings is not None: ++ raise Exception return self._call_hook('build_wheel', { 'wheel_directory': abspath(wheel_directory), 'config_settings': config_settings, -@@ -299,6 +300,7 @@ class Pep517HookCaller(object): - # Python identifier, so non-ASCII content is wrong on Python 2 in - # any case). - # For backend_path, we use sys.getfilesystemencoding. -+ assert False - if sys.version_info[0] == 2: - build_backend = self.build_backend.encode('ASCII') - else: diff --git a/pynixify/data/pip_patch_final.diff b/pynixify/data/pip_patch_final.diff index 0772346..009f2de 100644 --- a/pynixify/data/pip_patch_final.diff +++ b/pynixify/data/pip_patch_final.diff @@ -1,96 +1,148 @@ diff --git a/src/pip/_internal/commands/install.py b/src/pip/_internal/commands/install.py -index 29907645c..851f58536 100644 +index 29907645c..67f29c65d 100644 --- a/src/pip/_internal/commands/install.py +++ b/src/pip/_internal/commands/install.py -@@ -413,6 +413,7 @@ class InstallCommand(RequirementCommand): +@@ -413,12 +413,13 @@ class InstallCommand(RequirementCommand): for r in requirement_set.requirements.values() if should_build_for_install_command(r, check_binary_allowed) ] -+ import pdb;pdb.set_trace() - +- ++ if options.config_settings is not None: ++ raise Exception _, build_failures = build( reqs_to_build, + wheel_cache=wheel_cache, + verify=True, +- build_options=[], ++ build_options=[options.config_settings], + global_options=[], + ) + +diff --git a/src/pip/_internal/operations/build/metadata.py b/src/pip/_internal/operations/build/metadata.py +index e2b7b4445..cd144e024 100644 +--- a/src/pip/_internal/operations/build/metadata.py ++++ b/src/pip/_internal/operations/build/metadata.py +@@ -15,7 +15,7 @@ from pip._internal.utils.temp_dir import TempDirectory + + + def generate_metadata( +- build_env: BuildEnvironment, backend: Pep517HookCaller, details: str ++ build_env: BuildEnvironment, backend: Pep517HookCaller, details: str, config_settings + ) -> str: + """Generate metadata using mechanisms described in PEP 517. + +@@ -32,7 +32,7 @@ def generate_metadata( + runner = runner_with_spinner_message("Preparing metadata (pyproject.toml)") + with backend.subprocess_runner(runner): + try: +- distinfo_dir = backend.prepare_metadata_for_build_wheel(metadata_dir) ++ distinfo_dir = backend.prepare_metadata_for_build_wheel(metadata_dir, config_settings) + except InstallationSubprocessError as error: + raise MetadataGenerationFailed(package_details=details) from error + diff --git a/src/pip/_internal/operations/build/wheel.py b/src/pip/_internal/operations/build/wheel.py -index b0d2fc9ea..adee7f05e 100644 +index b0d2fc9ea..767966e66 100644 --- a/src/pip/_internal/operations/build/wheel.py +++ b/src/pip/_internal/operations/build/wheel.py -@@ -24,12 +24,14 @@ def build_wheel_pep517( +@@ -14,6 +14,7 @@ def build_wheel_pep517( + backend: Pep517HookCaller, + metadata_directory: str, + tempd: str, ++ config_settings, + ) -> Optional[str]: + """Build one InstallRequirement using the PEP 517 build process. + +@@ -24,12 +25,13 @@ def build_wheel_pep517( logger.debug("Destination directory: %s", tempd) runner = runner_with_spinner_message( - f"Building wheel for {name} (pyproject.toml)" + f"Emmett is Building wheel for {name} (pyproject.toml)" ) -+ import pudb;pudb.set_trace() with backend.subprocess_runner(runner): wheel_name = backend.build_wheel( tempd, metadata_directory=metadata_directory, -+ # XXX shouldn't config_settings be here too? ++ config_settings=config_settings, ) except Exception: logger.error("Failed building wheel for %s", name) diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py -index a1e376c89..c070976a1 100644 +index a1e376c89..948b092ac 100644 --- a/src/pip/_internal/req/req_install.py +++ b/src/pip/_internal/req/req_install.py -@@ -480,6 +480,7 @@ class InstallRequirement: - requires, backend, check, backend_path = pyproject_toml_data - self.requirements_to_check = check - self.pyproject_requires = requires -+ assert False - self.pep517_backend = ConfiguredPep517HookCaller( - self, - self.unpacked_source_directory, +@@ -534,6 +534,7 @@ class InstallRequirement: + build_env=self.build_env, + backend=self.pep517_backend, + details=details, ++ config_settings=self.config_settings, + ) + else: + self.metadata_directory = generate_metadata_legacy( diff --git a/src/pip/_internal/wheel_builder.py b/src/pip/_internal/wheel_builder.py -index 77a17ff0f..f628462b3 100644 +index 77a17ff0f..72e93a580 100644 --- a/src/pip/_internal/wheel_builder.py +++ b/src/pip/_internal/wheel_builder.py -@@ -240,6 +240,7 @@ def _build_one_inside_env( - ) -> Optional[str]: - with TempDirectory(kind="wheel") as temp_dir: - assert req.name -+ assert False - if req.use_pep517: - assert req.metadata_directory - assert req.pep517_backend -@@ -331,6 +332,7 @@ def build( +@@ -264,6 +264,7 @@ def _build_one_inside_env( + backend=req.pep517_backend, + metadata_directory=req.metadata_directory, + tempd=temp_dir.path, ++ config_settings=req.config_settings, + ) + else: + wheel_path = build_wheel_legacy( +@@ -331,6 +332,9 @@ def build( :return: The list of InstallRequirement that succeeded to build and the list of InstallRequirement that failed to build. """ -+ raise Exception ++ if build_options and build_options[0] is not None: ++ print(build_options) ++ raise Exception if not requirements: return [], [] diff --git a/src/pip/_vendor/pep517/in_process/_in_process.py b/src/pip/_vendor/pep517/in_process/_in_process.py -index 954a4ab05..851d6f460 100644 +index 954a4ab05..c49ced4e0 100644 --- a/src/pip/_vendor/pep517/in_process/_in_process.py +++ b/src/pip/_vendor/pep517/in_process/_in_process.py -@@ -151,6 +151,7 @@ def prepare_metadata_for_build_wheel( +@@ -151,6 +151,8 @@ def prepare_metadata_for_build_wheel( Implements a fallback by building a wheel if the hook isn't defined, unless _allow_fallback is False in which case HookMissing is raised. """ -+ assert False ++ if config_settings is not None: ++ raise Exception backend = _build_backend() try: hook = backend.prepare_metadata_for_build_wheel +@@ -253,6 +255,8 @@ def build_wheel(wheel_directory, config_settings, metadata_directory=None): + prepare_metadata_for_build_wheel fallback, this + will copy it rather than rebuilding the wheel. + """ ++ if config_settings is not None: ++ raise Exception + prebuilt_whl = _find_already_built_wheel(metadata_directory) + if prebuilt_whl: + shutil.copy2(prebuilt_whl, wheel_directory) diff --git a/src/pip/_vendor/pep517/wrappers.py b/src/pip/_vendor/pep517/wrappers.py -index e031ed708..72e1aaa03 100644 +index e031ed708..83862d2ed 100644 --- a/src/pip/_vendor/pep517/wrappers.py +++ b/src/pip/_vendor/pep517/wrappers.py -@@ -205,6 +205,7 @@ class Pep517HookCaller(object): +@@ -185,6 +185,9 @@ class Pep517HookCaller(object): + and the dist-info extracted from that (unless _allow_fallback is + False). + """ ++ if config_settings is not None: ++ print(config_settings) ++ raise Exception + return self._call_hook('prepare_metadata_for_build_wheel', { + 'metadata_directory': abspath(metadata_directory), + 'config_settings': config_settings, +@@ -205,6 +208,8 @@ class Pep517HookCaller(object): """ if metadata_directory is not None: metadata_directory = abspath(metadata_directory) -+ assert False ++ if config_settings is not None: ++ raise Exception return self._call_hook('build_wheel', { 'wheel_directory': abspath(wheel_directory), 'config_settings': config_settings, -@@ -299,6 +300,7 @@ class Pep517HookCaller(object): - # Python identifier, so non-ASCII content is wrong on Python 2 in - # any case). - # For backend_path, we use sys.getfilesystemencoding. -+ assert False - if sys.version_info[0] == 2: - build_backend = self.build_backend.encode('ASCII') - else: From a120933d40c2db70393d0556029b5f6b33774922 Mon Sep 17 00:00:00 2001 From: Emmett Butler Date: Thu, 18 May 2023 11:28:19 -0700 Subject: [PATCH 20/33] simplify hatch patch --- pynixify/data/hatchling_patch.diff | 22 ++++++------ pynixify/data/parse_setuppy_data.nix | 2 +- pynixify/data/pip_patch.diff | 54 +++------------------------- pynixify/data/pip_patch_final.diff | 54 +++------------------------- 4 files changed, 22 insertions(+), 110 deletions(-) diff --git a/pynixify/data/hatchling_patch.diff b/pynixify/data/hatchling_patch.diff index 2443c38..2c697c4 100644 --- a/pynixify/data/hatchling_patch.diff +++ b/pynixify/data/hatchling_patch.diff @@ -14,18 +14,17 @@ index d79c1e2e..c88ba500 100644 return os.path.basename(next(builder.build(wheel_directory, ['standard']))) diff --git a/src/hatchling/metadata/core.py b/src/hatchling/metadata/core.py -index 24544ad1..a14c4d13 100644 +index 24544ad1..2f19d057 100644 --- a/src/hatchling/metadata/core.py +++ b/src/hatchling/metadata/core.py -@@ -11,6 +11,15 @@ if sys.version_info >= (3, 11): +@@ -11,6 +11,14 @@ if sys.version_info >= (3, 11): else: import tomli as tomllib +if 'PYNIXIFY' in os.environ: -+ assert False + from pathlib import Path + try: -+ pynix_out = Path(os.environ['out']) ++ pynix_out = Path(os.environ['PYNIXIFY_OUT']) + except KeyError: + print("out environment variable not defined") + sys.exit(1) @@ -33,7 +32,7 @@ index 24544ad1..a14c4d13 100644 def load_toml(path): with open(path, encoding='utf-8') as f: -@@ -250,6 +259,9 @@ class BuildMetadata: +@@ -250,6 +258,9 @@ class BuildMetadata: raise ValueError(f'Dependency #{i} of field `build-system.requires` is invalid: {e}') self._requires_complex = requires_complex @@ -43,7 +42,7 @@ index 24544ad1..a14c4d13 100644 return self._requires_complex -@@ -1074,6 +1086,11 @@ class CoreMetadata: +@@ -1074,6 +1085,11 @@ class CoreMetadata: dependencies_complex[get_normalized_dependency(requirement)] = requirement self._dependencies_complex = dict(sorted(dependencies_complex.items())) @@ -56,18 +55,19 @@ index 24544ad1..a14c4d13 100644 return self._dependencies_complex diff --git a/src/hatchling/metadata/spec.py b/src/hatchling/metadata/spec.py -index 43a0fa67..2a971bba 100644 +index 43a0fa67..d083efce 100644 --- a/src/hatchling/metadata/spec.py +++ b/src/hatchling/metadata/spec.py -@@ -1,3 +1,15 @@ +@@ -1,3 +1,16 @@ +import json +import os +import sys + ++pynix_out = None +if 'PYNIXIFY' in os.environ: + from pathlib import Path + try: -+ pynix_out = Path(os.environ['out']) ++ pynix_out = Path(os.environ['PYNIXIFY_OUT']) + except KeyError: + print("out environment variable not defined") + sys.exit(1) @@ -75,11 +75,11 @@ index 43a0fa67..2a971bba 100644 DEFAULT_METADATA_VERSION = '2.1' -@@ -147,6 +159,10 @@ def construct_metadata_file_2_1(metadata, extra_dependencies=()): +@@ -147,6 +160,10 @@ def construct_metadata_file_2_1(metadata, extra_dependencies=()): metadata_file += f'Description-Content-Type: {metadata.core.readme_content_type}\n' metadata_file += f'\n{metadata.core.readme}' -+ if 'PYNIXIFY' in os.environ: ++ if pynix_out is not None: + with (pynix_out / 'meta.json').open('w') as fp: + json.dump({"version": None, "url": None, "license": None, "description": None}, fp) + diff --git a/pynixify/data/parse_setuppy_data.nix b/pynixify/data/parse_setuppy_data.nix index 8af6507..6226f49 100644 --- a/pynixify/data/parse_setuppy_data.nix +++ b/pynixify/data/parse_setuppy_data.nix @@ -129,7 +129,7 @@ in pkgs.stdenv.mkDerivation { if PYNIXIFY=1 python setup.py install; then exit 0 fi - ${patchedpip}/bin/pip --no-cache-dir install --config-settings PYNIXIFY_OUT=$out --config-settings PYNIXIFY=1 --no-build-isolation --prefix $out --install-option="--install-dir=$out" --root $out $PWD + ${patchedpip}/bin/pip --no-cache-dir install --config-settings PYNIXIFY_OUT=$out --config-settings PYNIXIFY=1 --no-build-isolation $PWD # Indicate that fetching the result failed, but let the build succeed touch $out/failed ''; diff --git a/pynixify/data/pip_patch.diff b/pynixify/data/pip_patch.diff index eca7ed2..85ce1e4 100644 --- a/pynixify/data/pip_patch.diff +++ b/pynixify/data/pip_patch.diff @@ -1,14 +1,12 @@ diff --git a/pip-22.2.2-source/src/pip/_internal/commands/install.py b/pip-22.2.2-source/src/pip/_internal/commands/install.py -index 29907645c..67f29c65d 100644 +index 29907645c..ae0bb944a 100644 --- a/pip-22.2.2-source/src/pip/_internal/commands/install.py +++ b/pip-22.2.2-source/src/pip/_internal/commands/install.py -@@ -413,12 +413,13 @@ class InstallCommand(RequirementCommand): +@@ -413,12 +413,11 @@ class InstallCommand(RequirementCommand): for r in requirement_set.requirements.values() if should_build_for_install_command(r, check_binary_allowed) ] - -+ if options.config_settings is not None: -+ raise Exception _, build_failures = build( reqs_to_build, wheel_cache=wheel_cache, @@ -80,7 +78,7 @@ index a1e376c89..948b092ac 100644 else: self.metadata_directory = generate_metadata_legacy( diff --git a/pip-22.2.2-source/src/pip/_internal/wheel_builder.py b/pip-22.2.2-source/src/pip/_internal/wheel_builder.py -index 77a17ff0f..72e93a580 100644 +index 77a17ff0f..f9550055c 100644 --- a/pip-22.2.2-source/src/pip/_internal/wheel_builder.py +++ b/pip-22.2.2-source/src/pip/_internal/wheel_builder.py @@ -264,6 +264,7 @@ def _build_one_inside_env( @@ -91,53 +89,11 @@ index 77a17ff0f..72e93a580 100644 ) else: wheel_path = build_wheel_legacy( -@@ -331,6 +332,9 @@ def build( - :return: The list of InstallRequirement that succeeded to build and - the list of InstallRequirement that failed to build. - """ -+ if build_options and build_options[0] is not None: -+ print(build_options) -+ raise Exception - if not requirements: - return [], [] - -diff --git a/pip-22.2.2-source/src/pip/_vendor/pep517/in_process/_in_process.py b/pip-22.2.2-source/src/pip/_vendor/pep517/in_process/_in_process.py -index 954a4ab05..c49ced4e0 100644 ---- a/pip-22.2.2-source/src/pip/_vendor/pep517/in_process/_in_process.py -+++ b/pip-22.2.2-source/src/pip/_vendor/pep517/in_process/_in_process.py -@@ -151,6 +151,8 @@ def prepare_metadata_for_build_wheel( - Implements a fallback by building a wheel if the hook isn't defined, - unless _allow_fallback is False in which case HookMissing is raised. - """ -+ if config_settings is not None: -+ raise Exception - backend = _build_backend() - try: - hook = backend.prepare_metadata_for_build_wheel -@@ -253,6 +255,8 @@ def build_wheel(wheel_directory, config_settings, metadata_directory=None): - prepare_metadata_for_build_wheel fallback, this - will copy it rather than rebuilding the wheel. - """ -+ if config_settings is not None: -+ raise Exception - prebuilt_whl = _find_already_built_wheel(metadata_directory) - if prebuilt_whl: - shutil.copy2(prebuilt_whl, wheel_directory) diff --git a/pip-22.2.2-source/src/pip/_vendor/pep517/wrappers.py b/pip-22.2.2-source/src/pip/_vendor/pep517/wrappers.py -index e031ed708..83862d2ed 100644 +index e031ed708..a55dc3770 100644 --- a/pip-22.2.2-source/src/pip/_vendor/pep517/wrappers.py +++ b/pip-22.2.2-source/src/pip/_vendor/pep517/wrappers.py -@@ -185,6 +185,9 @@ class Pep517HookCaller(object): - and the dist-info extracted from that (unless _allow_fallback is - False). - """ -+ if config_settings is not None: -+ print(config_settings) -+ raise Exception - return self._call_hook('prepare_metadata_for_build_wheel', { - 'metadata_directory': abspath(metadata_directory), - 'config_settings': config_settings, -@@ -205,6 +208,8 @@ class Pep517HookCaller(object): +@@ -205,6 +205,8 @@ class Pep517HookCaller(object): """ if metadata_directory is not None: metadata_directory = abspath(metadata_directory) diff --git a/pynixify/data/pip_patch_final.diff b/pynixify/data/pip_patch_final.diff index 009f2de..8a5c555 100644 --- a/pynixify/data/pip_patch_final.diff +++ b/pynixify/data/pip_patch_final.diff @@ -1,14 +1,12 @@ diff --git a/src/pip/_internal/commands/install.py b/src/pip/_internal/commands/install.py -index 29907645c..67f29c65d 100644 +index 29907645c..ae0bb944a 100644 --- a/src/pip/_internal/commands/install.py +++ b/src/pip/_internal/commands/install.py -@@ -413,12 +413,13 @@ class InstallCommand(RequirementCommand): +@@ -413,12 +413,11 @@ class InstallCommand(RequirementCommand): for r in requirement_set.requirements.values() if should_build_for_install_command(r, check_binary_allowed) ] - -+ if options.config_settings is not None: -+ raise Exception _, build_failures = build( reqs_to_build, wheel_cache=wheel_cache, @@ -80,7 +78,7 @@ index a1e376c89..948b092ac 100644 else: self.metadata_directory = generate_metadata_legacy( diff --git a/src/pip/_internal/wheel_builder.py b/src/pip/_internal/wheel_builder.py -index 77a17ff0f..72e93a580 100644 +index 77a17ff0f..f9550055c 100644 --- a/src/pip/_internal/wheel_builder.py +++ b/src/pip/_internal/wheel_builder.py @@ -264,6 +264,7 @@ def _build_one_inside_env( @@ -91,53 +89,11 @@ index 77a17ff0f..72e93a580 100644 ) else: wheel_path = build_wheel_legacy( -@@ -331,6 +332,9 @@ def build( - :return: The list of InstallRequirement that succeeded to build and - the list of InstallRequirement that failed to build. - """ -+ if build_options and build_options[0] is not None: -+ print(build_options) -+ raise Exception - if not requirements: - return [], [] - -diff --git a/src/pip/_vendor/pep517/in_process/_in_process.py b/src/pip/_vendor/pep517/in_process/_in_process.py -index 954a4ab05..c49ced4e0 100644 ---- a/src/pip/_vendor/pep517/in_process/_in_process.py -+++ b/src/pip/_vendor/pep517/in_process/_in_process.py -@@ -151,6 +151,8 @@ def prepare_metadata_for_build_wheel( - Implements a fallback by building a wheel if the hook isn't defined, - unless _allow_fallback is False in which case HookMissing is raised. - """ -+ if config_settings is not None: -+ raise Exception - backend = _build_backend() - try: - hook = backend.prepare_metadata_for_build_wheel -@@ -253,6 +255,8 @@ def build_wheel(wheel_directory, config_settings, metadata_directory=None): - prepare_metadata_for_build_wheel fallback, this - will copy it rather than rebuilding the wheel. - """ -+ if config_settings is not None: -+ raise Exception - prebuilt_whl = _find_already_built_wheel(metadata_directory) - if prebuilt_whl: - shutil.copy2(prebuilt_whl, wheel_directory) diff --git a/src/pip/_vendor/pep517/wrappers.py b/src/pip/_vendor/pep517/wrappers.py -index e031ed708..83862d2ed 100644 +index e031ed708..a55dc3770 100644 --- a/src/pip/_vendor/pep517/wrappers.py +++ b/src/pip/_vendor/pep517/wrappers.py -@@ -185,6 +185,9 @@ class Pep517HookCaller(object): - and the dist-info extracted from that (unless _allow_fallback is - False). - """ -+ if config_settings is not None: -+ print(config_settings) -+ raise Exception - return self._call_hook('prepare_metadata_for_build_wheel', { - 'metadata_directory': abspath(metadata_directory), - 'config_settings': config_settings, -@@ -205,6 +208,8 @@ class Pep517HookCaller(object): +@@ -205,6 +205,8 @@ class Pep517HookCaller(object): """ if metadata_directory is not None: metadata_directory = abspath(metadata_directory) From d9471e00a447738c772072e7b4e076c1317e82a1 Mon Sep 17 00:00:00 2001 From: Emmett Butler Date: Thu, 18 May 2023 14:07:43 -0700 Subject: [PATCH 21/33] checkin --- pynixify/data/hatchling_patch.diff | 140 ++++++++++++++++++++++----- pynixify/data/parse_setuppy_data.nix | 13 ++- pynixify/data/pip_patch.diff | 129 +++++++++++++++--------- pynixify/data/pip_patch_final.diff | 85 +++++++++++----- 4 files changed, 268 insertions(+), 99 deletions(-) diff --git a/pynixify/data/hatchling_patch.diff b/pynixify/data/hatchling_patch.diff index 2c697c4..d6232a6 100644 --- a/pynixify/data/hatchling_patch.diff +++ b/pynixify/data/hatchling_patch.diff @@ -1,52 +1,144 @@ diff --git a/src/hatchling/build.py b/src/hatchling/build.py -index d79c1e2e..c88ba500 100644 +index d79c1e2e..1a616e4a 100644 --- a/src/hatchling/build.py +++ b/src/hatchling/build.py -@@ -37,6 +37,10 @@ def build_wheel(wheel_directory, config_settings=None, metadata_directory=None): +@@ -1,6 +1,17 @@ + import os + + ++def _write_pynixify_files(config_settings, deps): ++ if config_settings is not None and "PYNIXIFY_OUT" in config_settings: ++ from pathlib import Path ++ pynix_out = Path(config_settings['PYNIXIFY_OUT']) ++ for target in ("tests", "setup", "install"): ++ fp = (pynix_out / "%s_requires.txt" % target).open("w") ++ fp.write('\n'.join([str(req) for req in deps])) ++ fp.write('\n#\n') ++ fp.close() ++ ++ + def get_requires_for_build_sdist(config_settings=None): """ + https://peps.python.org/pep-0517/#get-requires-for-build-sdist +@@ -8,6 +19,7 @@ def get_requires_for_build_sdist(config_settings=None): + from hatchling.builders.sdist import SdistBuilder + + builder = SdistBuilder(os.getcwd()) ++ _write_pynixify_files(config_settings, builder.config.dependencies) + return builder.config.dependencies + + +@@ -18,6 +30,7 @@ def build_sdist(sdist_directory, config_settings=None): + from hatchling.builders.sdist import SdistBuilder + + builder = SdistBuilder(os.getcwd()) ++ _write_pynixify_files(config_settings, builder.config.dependencies) + return os.path.basename(next(builder.build(sdist_directory, ['standard']))) + + +@@ -28,7 +41,10 @@ def get_requires_for_build_wheel(config_settings=None): from hatchling.builders.wheel import WheelBuilder -+ if config_settings is not None and "PYNIXIFY" in config_settings: -+ os.environ["PYNIXIFY"] = config_settings["PYNIXIFY"] -+ os.environ["PYNIXIFY_OUT"] = config_settings["PYNIXIFY_OUT"] + builder = WheelBuilder(os.getcwd()) +- return builder.config.dependencies ++ deps = builder.config.dependencies ++ _write_pynixify_files(config_settings, deps) + ++ return deps + + + def build_wheel(wheel_directory, config_settings=None, metadata_directory=None): +@@ -38,6 +54,8 @@ def build_wheel(wheel_directory, config_settings=None, metadata_directory=None): + from hatchling.builders.wheel import WheelBuilder + builder = WheelBuilder(os.getcwd()) ++ deps = builder.config.dependencies ++ _write_pynixify_files(config_settings, deps) return os.path.basename(next(builder.build(wheel_directory, ['standard']))) + +@@ -48,6 +66,7 @@ def get_requires_for_build_editable(config_settings=None): + from hatchling.builders.wheel import WheelBuilder + + builder = WheelBuilder(os.getcwd()) ++ _write_pynixify_files(config_settings, builder.config.dependencies) + return builder.config.dependencies + + +@@ -58,6 +77,7 @@ def build_editable(wheel_directory, config_settings=None, metadata_directory=Non + from hatchling.builders.wheel import WheelBuilder + + builder = WheelBuilder(os.getcwd()) ++ _write_pynixify_files(config_settings, builder.config.dependencies) + return os.path.basename(next(builder.build(wheel_directory, ['editable']))) + + +@@ -80,6 +100,8 @@ if 'PIP_BUILD_TRACKER' not in os.environ: + https://peps.python.org/pep-0517/#prepare-metadata-for-build-wheel + """ + from hatchling.builders.wheel import WheelBuilder ++ if config_settings is not None and "PYNIXIFY_OUT" in config_settings: ++ os.environ["PYNIXIFY_OUT"] = config_settings["PYNIXIFY_OUT"] + + builder = WheelBuilder(os.getcwd()) + +@@ -89,6 +111,7 @@ if 'PIP_BUILD_TRACKER' not in os.environ: + + with open(os.path.join(directory, 'METADATA'), 'w', encoding='utf-8') as f: + f.write(builder.config.core_metadata_constructor(builder.metadata)) ++ _write_pynixify_files(config_settings, builder.config.dependencies) + + return os.path.basename(directory) + +@@ -97,6 +120,8 @@ if 'PIP_BUILD_TRACKER' not in os.environ: + https://peps.python.org/pep-0660/#prepare-metadata-for-build-editable + """ + from hatchling.builders.wheel import EDITABLES_MINIMUM_VERSION, WheelBuilder ++ if config_settings is not None and "PYNIXIFY_OUT" in config_settings: ++ os.environ["PYNIXIFY_OUT"] = config_settings["PYNIXIFY_OUT"] + + builder = WheelBuilder(os.getcwd()) + +@@ -110,5 +135,6 @@ if 'PIP_BUILD_TRACKER' not in os.environ: + + with open(os.path.join(directory, 'METADATA'), 'w', encoding='utf-8') as f: + f.write(builder.config.core_metadata_constructor(builder.metadata, extra_dependencies=extra_dependencies)) ++ _write_pynixify_files(config_settings, builder.config.dependencies) + + return os.path.basename(directory) diff --git a/src/hatchling/metadata/core.py b/src/hatchling/metadata/core.py -index 24544ad1..2f19d057 100644 +index 24544ad1..daa91f87 100644 --- a/src/hatchling/metadata/core.py +++ b/src/hatchling/metadata/core.py -@@ -11,6 +11,14 @@ if sys.version_info >= (3, 11): +@@ -11,6 +11,11 @@ if sys.version_info >= (3, 11): else: import tomli as tomllib -+if 'PYNIXIFY' in os.environ: ++pynix_out = None ++if 'PYNIXIFY_OUT' in os.environ: + from pathlib import Path -+ try: -+ pynix_out = Path(os.environ['PYNIXIFY_OUT']) -+ except KeyError: -+ print("out environment variable not defined") -+ sys.exit(1) ++ pynix_out = Path(os.environ['PYNIXIFY_OUT']) + def load_toml(path): with open(path, encoding='utf-8') as f: -@@ -250,6 +258,9 @@ class BuildMetadata: +@@ -250,6 +255,9 @@ class BuildMetadata: raise ValueError(f'Dependency #{i} of field `build-system.requires` is invalid: {e}') self._requires_complex = requires_complex -+ if 'PYNIXIFY' in os.environ: ++ if pynix_out: + with (pynix_out / "setup_requires.txt").open("w") as fp: + fp.write('\n'.join([str(req) for req in self._requires_complex])) return self._requires_complex -@@ -1074,6 +1085,11 @@ class CoreMetadata: +@@ -1074,6 +1082,13 @@ class CoreMetadata: dependencies_complex[get_normalized_dependency(requirement)] = requirement self._dependencies_complex = dict(sorted(dependencies_complex.items())) -+ if 'PYNIXIFY' in os.environ: ++ if pynix_out: ++ with (pynix_out / "setup_requires.txt").open("w") as fp: ++ fp.write('\n'.join([str(req) for req in self._dependencies_complex])) + with (pynix_out / "install_requires.txt").open("w") as fp: + fp.write('\n'.join([str(req) for req in self._dependencies_complex])) + with (pynix_out / "tests_requires.txt").open("w") as fp: @@ -55,27 +147,23 @@ index 24544ad1..2f19d057 100644 return self._dependencies_complex diff --git a/src/hatchling/metadata/spec.py b/src/hatchling/metadata/spec.py -index 43a0fa67..d083efce 100644 +index 43a0fa67..3d0b560e 100644 --- a/src/hatchling/metadata/spec.py +++ b/src/hatchling/metadata/spec.py -@@ -1,3 +1,16 @@ +@@ -1,3 +1,12 @@ +import json +import os +import sys + +pynix_out = None -+if 'PYNIXIFY' in os.environ: ++if 'PYNIXIFY_OUT' in os.environ: + from pathlib import Path -+ try: -+ pynix_out = Path(os.environ['PYNIXIFY_OUT']) -+ except KeyError: -+ print("out environment variable not defined") -+ sys.exit(1) ++ pynix_out = Path(os.environ['PYNIXIFY_OUT']) + DEFAULT_METADATA_VERSION = '2.1' -@@ -147,6 +160,10 @@ def construct_metadata_file_2_1(metadata, extra_dependencies=()): +@@ -147,6 +156,10 @@ def construct_metadata_file_2_1(metadata, extra_dependencies=()): metadata_file += f'Description-Content-Type: {metadata.core.readme_content_type}\n' metadata_file += f'\n{metadata.core.readme}' diff --git a/pynixify/data/parse_setuppy_data.nix b/pynixify/data/parse_setuppy_data.nix index 6226f49..e20b5ec 100644 --- a/pynixify/data/parse_setuppy_data.nix +++ b/pynixify/data/parse_setuppy_data.nix @@ -91,12 +91,8 @@ let ++ pkgs.lib.optionals (pkgs.python3.pkgs.pythonOlder "3.11") [ pkgs.python3.pkgs.tomli ]; }; - patchedbootstrappip = pkgs.python3.pkgs.bootstrapped-pip.overrideAttrs - (ps: { patches = [ ./pip_patch.diff ]; }); - patchedpip = pkgs.python3.pkgs.pip.overrideAttrs (ps: { - #nativeBuildInputs = [ patchedbootstrappip ]; - patches = [ ./pip_patch_final.diff ]; - }); + patchedpip = pkgs.python3.pkgs.pip.overrideAttrs + (ps: { patches = [ ./pip_patch_final.diff ]; }); pythonWithPackages = pkgs.python3.withPackages (ps: [ patchedSetuptools @@ -129,7 +125,10 @@ in pkgs.stdenv.mkDerivation { if PYNIXIFY=1 python setup.py install; then exit 0 fi - ${patchedpip}/bin/pip --no-cache-dir install --config-settings PYNIXIFY_OUT=$out --config-settings PYNIXIFY=1 --no-build-isolation $PWD + #${patchedpip}/bin/pip --no-cache-dir wheel --config-settings PYNIXIFY_OUT=$out --no-build-isolation $PWD + if ${patchedpip}/bin/pip --no-cache-dir wheel --config-settings PYNIXIFY_OUT=$out --no-build-isolation $PWD; then + exit 0 + fi # Indicate that fetching the result failed, but let the build succeed touch $out/failed ''; diff --git a/pynixify/data/pip_patch.diff b/pynixify/data/pip_patch.diff index 85ce1e4..c0681c4 100644 --- a/pynixify/data/pip_patch.diff +++ b/pynixify/data/pip_patch.diff @@ -1,10 +1,13 @@ -diff --git a/pip-22.2.2-source/src/pip/_internal/commands/install.py b/pip-22.2.2-source/src/pip/_internal/commands/install.py -index 29907645c..ae0bb944a 100644 ---- a/pip-22.2.2-source/src/pip/_internal/commands/install.py -+++ b/pip-22.2.2-source/src/pip/_internal/commands/install.py -@@ -413,12 +413,11 @@ class InstallCommand(RequirementCommand): +diff --git a/src/pip/_internal/commands/install.py b/src/pip/_internal/commands/install.py +index 29907645c..d71b4e603 100644 +--- a/src/pip/_internal/commands/install.py ++++ b/src/pip/_internal/commands/install.py +@@ -411,14 +411,13 @@ class InstallCommand(RequirementCommand): + reqs_to_build = [ + r for r in requirement_set.requirements.values() - if should_build_for_install_command(r, check_binary_allowed) +- if should_build_for_install_command(r, check_binary_allowed) ++ if True or should_build_for_install_command(r, check_binary_allowed) ] - _, build_failures = build( @@ -15,20 +18,43 @@ index 29907645c..ae0bb944a 100644 + build_options=[options.config_settings], global_options=[], ) - -diff --git a/pip-22.2.2-source/src/pip/_internal/operations/build/metadata.py b/pip-22.2.2-source/src/pip/_internal/operations/build/metadata.py + +diff --git a/src/pip/_internal/commands/wheel.py b/src/pip/_internal/commands/wheel.py +index 9dd6c82f2..c54f67554 100644 +--- a/src/pip/_internal/commands/wheel.py ++++ b/src/pip/_internal/commands/wheel.py +@@ -148,15 +148,16 @@ class WheelCommand(RequirementCommand): + for req in requirement_set.requirements.values(): + if req.is_wheel: + preparer.save_linked_requirement(req) +- elif should_build_for_wheel_command(req): ++ elif True or should_build_for_wheel_command(req): + reqs_to_build.append(req) + + # build wheels ++ build_options = dict(list((options.config_settings or {}).items()) + list((options.build_options or {}).items())) + build_successes, build_failures = build( + reqs_to_build, + wheel_cache=wheel_cache, + verify=(not options.no_verify), +- build_options=options.build_options or [], ++ build_options=build_options or [], + global_options=options.global_options or [], + ) + for req in build_successes: +diff --git a/src/pip/_internal/operations/build/metadata.py b/src/pip/_internal/operations/build/metadata.py index e2b7b4445..cd144e024 100644 ---- a/pip-22.2.2-source/src/pip/_internal/operations/build/metadata.py -+++ b/pip-22.2.2-source/src/pip/_internal/operations/build/metadata.py +--- a/src/pip/_internal/operations/build/metadata.py ++++ b/src/pip/_internal/operations/build/metadata.py @@ -15,7 +15,7 @@ from pip._internal.utils.temp_dir import TempDirectory - - + + def generate_metadata( - build_env: BuildEnvironment, backend: Pep517HookCaller, details: str + build_env: BuildEnvironment, backend: Pep517HookCaller, details: str, config_settings ) -> str: """Generate metadata using mechanisms described in PEP 517. - + @@ -32,7 +32,7 @@ def generate_metadata( runner = runner_with_spinner_message("Preparing metadata (pyproject.toml)") with backend.subprocess_runner(runner): @@ -37,11 +63,11 @@ index e2b7b4445..cd144e024 100644 + distinfo_dir = backend.prepare_metadata_for_build_wheel(metadata_dir, config_settings) except InstallationSubprocessError as error: raise MetadataGenerationFailed(package_details=details) from error - -diff --git a/pip-22.2.2-source/src/pip/_internal/operations/build/wheel.py b/pip-22.2.2-source/src/pip/_internal/operations/build/wheel.py -index b0d2fc9ea..767966e66 100644 ---- a/pip-22.2.2-source/src/pip/_internal/operations/build/wheel.py -+++ b/pip-22.2.2-source/src/pip/_internal/operations/build/wheel.py + +diff --git a/src/pip/_internal/operations/build/wheel.py b/src/pip/_internal/operations/build/wheel.py +index b0d2fc9ea..d44b64ec9 100644 +--- a/src/pip/_internal/operations/build/wheel.py ++++ b/src/pip/_internal/operations/build/wheel.py @@ -14,6 +14,7 @@ def build_wheel_pep517( backend: Pep517HookCaller, metadata_directory: str, @@ -49,10 +75,10 @@ index b0d2fc9ea..767966e66 100644 + config_settings, ) -> Optional[str]: """Build one InstallRequirement using the PEP 517 build process. - -@@ -24,12 +25,13 @@ def build_wheel_pep517( + +@@ -24,14 +25,16 @@ def build_wheel_pep517( logger.debug("Destination directory: %s", tempd) - + runner = runner_with_spinner_message( - f"Building wheel for {name} (pyproject.toml)" + f"Emmett is Building wheel for {name} (pyproject.toml)" @@ -64,11 +90,15 @@ index b0d2fc9ea..767966e66 100644 + config_settings=config_settings, ) except Exception: - logger.error("Failed building wheel for %s", name) -diff --git a/pip-22.2.2-source/src/pip/_internal/req/req_install.py b/pip-22.2.2-source/src/pip/_internal/req/req_install.py +- logger.error("Failed building wheel for %s", name) ++ logger.error("Emmett Failed building wheel for %s", name) ++ raise + return None + return os.path.join(tempd, wheel_name) +diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index a1e376c89..948b092ac 100644 ---- a/pip-22.2.2-source/src/pip/_internal/req/req_install.py -+++ b/pip-22.2.2-source/src/pip/_internal/req/req_install.py +--- a/src/pip/_internal/req/req_install.py ++++ b/src/pip/_internal/req/req_install.py @@ -534,6 +534,7 @@ class InstallRequirement: build_env=self.build_env, backend=self.pep517_backend, @@ -77,28 +107,39 @@ index a1e376c89..948b092ac 100644 ) else: self.metadata_directory = generate_metadata_legacy( -diff --git a/pip-22.2.2-source/src/pip/_internal/wheel_builder.py b/pip-22.2.2-source/src/pip/_internal/wheel_builder.py -index 77a17ff0f..f9550055c 100644 ---- a/pip-22.2.2-source/src/pip/_internal/wheel_builder.py -+++ b/pip-22.2.2-source/src/pip/_internal/wheel_builder.py -@@ -264,6 +264,7 @@ def _build_one_inside_env( +diff --git a/src/pip/_internal/wheel_builder.py b/src/pip/_internal/wheel_builder.py +index 77a17ff0f..ef1009dba 100644 +--- a/src/pip/_internal/wheel_builder.py ++++ b/src/pip/_internal/wheel_builder.py +@@ -257,6 +257,7 @@ def _build_one_inside_env( + backend=req.pep517_backend, + metadata_directory=req.metadata_directory, + tempd=temp_dir.path, ++ config_settings=build_options, + ) + else: + wheel_path = build_wheel_pep517( +@@ -264,6 +265,7 @@ def _build_one_inside_env( backend=req.pep517_backend, metadata_directory=req.metadata_directory, tempd=temp_dir.path, -+ config_settings=req.config_settings, ++ config_settings=build_options, ) else: wheel_path = build_wheel_legacy( -diff --git a/pip-22.2.2-source/src/pip/_vendor/pep517/wrappers.py b/pip-22.2.2-source/src/pip/_vendor/pep517/wrappers.py -index e031ed708..a55dc3770 100644 ---- a/pip-22.2.2-source/src/pip/_vendor/pep517/wrappers.py -+++ b/pip-22.2.2-source/src/pip/_vendor/pep517/wrappers.py -@@ -205,6 +205,8 @@ class Pep517HookCaller(object): - """ - if metadata_directory is not None: - metadata_directory = abspath(metadata_directory) -+ if config_settings is not None: -+ raise Exception - return self._call_hook('build_wheel', { - 'wheel_directory': abspath(wheel_directory), - 'config_settings': config_settings, +diff --git a/src/pip/_vendor/pep517/in_process/_in_process.py b/src/pip/_vendor/pep517/in_process/_in_process.py +index 954a4ab05..03299cdbc 100644 +--- a/src/pip/_vendor/pep517/in_process/_in_process.py ++++ b/src/pip/_vendor/pep517/in_process/_in_process.py +@@ -253,11 +253,6 @@ def build_wheel(wheel_directory, config_settings, metadata_directory=None): + prepare_metadata_for_build_wheel fallback, this + will copy it rather than rebuilding the wheel. + """ +- prebuilt_whl = _find_already_built_wheel(metadata_directory) +- if prebuilt_whl: +- shutil.copy2(prebuilt_whl, wheel_directory) +- return os.path.basename(prebuilt_whl) +- + return _build_backend().build_wheel(wheel_directory, config_settings, + metadata_directory) + diff --git a/pynixify/data/pip_patch_final.diff b/pynixify/data/pip_patch_final.diff index 8a5c555..c0681c4 100644 --- a/pynixify/data/pip_patch_final.diff +++ b/pynixify/data/pip_patch_final.diff @@ -1,10 +1,13 @@ diff --git a/src/pip/_internal/commands/install.py b/src/pip/_internal/commands/install.py -index 29907645c..ae0bb944a 100644 +index 29907645c..d71b4e603 100644 --- a/src/pip/_internal/commands/install.py +++ b/src/pip/_internal/commands/install.py -@@ -413,12 +413,11 @@ class InstallCommand(RequirementCommand): +@@ -411,14 +411,13 @@ class InstallCommand(RequirementCommand): + reqs_to_build = [ + r for r in requirement_set.requirements.values() - if should_build_for_install_command(r, check_binary_allowed) +- if should_build_for_install_command(r, check_binary_allowed) ++ if True or should_build_for_install_command(r, check_binary_allowed) ] - _, build_failures = build( @@ -16,6 +19,29 @@ index 29907645c..ae0bb944a 100644 global_options=[], ) +diff --git a/src/pip/_internal/commands/wheel.py b/src/pip/_internal/commands/wheel.py +index 9dd6c82f2..c54f67554 100644 +--- a/src/pip/_internal/commands/wheel.py ++++ b/src/pip/_internal/commands/wheel.py +@@ -148,15 +148,16 @@ class WheelCommand(RequirementCommand): + for req in requirement_set.requirements.values(): + if req.is_wheel: + preparer.save_linked_requirement(req) +- elif should_build_for_wheel_command(req): ++ elif True or should_build_for_wheel_command(req): + reqs_to_build.append(req) + + # build wheels ++ build_options = dict(list((options.config_settings or {}).items()) + list((options.build_options or {}).items())) + build_successes, build_failures = build( + reqs_to_build, + wheel_cache=wheel_cache, + verify=(not options.no_verify), +- build_options=options.build_options or [], ++ build_options=build_options or [], + global_options=options.global_options or [], + ) + for req in build_successes: diff --git a/src/pip/_internal/operations/build/metadata.py b/src/pip/_internal/operations/build/metadata.py index e2b7b4445..cd144e024 100644 --- a/src/pip/_internal/operations/build/metadata.py @@ -39,7 +65,7 @@ index e2b7b4445..cd144e024 100644 raise MetadataGenerationFailed(package_details=details) from error diff --git a/src/pip/_internal/operations/build/wheel.py b/src/pip/_internal/operations/build/wheel.py -index b0d2fc9ea..767966e66 100644 +index b0d2fc9ea..d44b64ec9 100644 --- a/src/pip/_internal/operations/build/wheel.py +++ b/src/pip/_internal/operations/build/wheel.py @@ -14,6 +14,7 @@ def build_wheel_pep517( @@ -50,7 +76,7 @@ index b0d2fc9ea..767966e66 100644 ) -> Optional[str]: """Build one InstallRequirement using the PEP 517 build process. -@@ -24,12 +25,13 @@ def build_wheel_pep517( +@@ -24,14 +25,16 @@ def build_wheel_pep517( logger.debug("Destination directory: %s", tempd) runner = runner_with_spinner_message( @@ -64,7 +90,11 @@ index b0d2fc9ea..767966e66 100644 + config_settings=config_settings, ) except Exception: - logger.error("Failed building wheel for %s", name) +- logger.error("Failed building wheel for %s", name) ++ logger.error("Emmett Failed building wheel for %s", name) ++ raise + return None + return os.path.join(tempd, wheel_name) diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index a1e376c89..948b092ac 100644 --- a/src/pip/_internal/req/req_install.py @@ -78,27 +108,38 @@ index a1e376c89..948b092ac 100644 else: self.metadata_directory = generate_metadata_legacy( diff --git a/src/pip/_internal/wheel_builder.py b/src/pip/_internal/wheel_builder.py -index 77a17ff0f..f9550055c 100644 +index 77a17ff0f..ef1009dba 100644 --- a/src/pip/_internal/wheel_builder.py +++ b/src/pip/_internal/wheel_builder.py -@@ -264,6 +264,7 @@ def _build_one_inside_env( +@@ -257,6 +257,7 @@ def _build_one_inside_env( + backend=req.pep517_backend, + metadata_directory=req.metadata_directory, + tempd=temp_dir.path, ++ config_settings=build_options, + ) + else: + wheel_path = build_wheel_pep517( +@@ -264,6 +265,7 @@ def _build_one_inside_env( backend=req.pep517_backend, metadata_directory=req.metadata_directory, tempd=temp_dir.path, -+ config_settings=req.config_settings, ++ config_settings=build_options, ) else: wheel_path = build_wheel_legacy( -diff --git a/src/pip/_vendor/pep517/wrappers.py b/src/pip/_vendor/pep517/wrappers.py -index e031ed708..a55dc3770 100644 ---- a/src/pip/_vendor/pep517/wrappers.py -+++ b/src/pip/_vendor/pep517/wrappers.py -@@ -205,6 +205,8 @@ class Pep517HookCaller(object): - """ - if metadata_directory is not None: - metadata_directory = abspath(metadata_directory) -+ if config_settings is not None: -+ raise Exception - return self._call_hook('build_wheel', { - 'wheel_directory': abspath(wheel_directory), - 'config_settings': config_settings, +diff --git a/src/pip/_vendor/pep517/in_process/_in_process.py b/src/pip/_vendor/pep517/in_process/_in_process.py +index 954a4ab05..03299cdbc 100644 +--- a/src/pip/_vendor/pep517/in_process/_in_process.py ++++ b/src/pip/_vendor/pep517/in_process/_in_process.py +@@ -253,11 +253,6 @@ def build_wheel(wheel_directory, config_settings, metadata_directory=None): + prepare_metadata_for_build_wheel fallback, this + will copy it rather than rebuilding the wheel. + """ +- prebuilt_whl = _find_already_built_wheel(metadata_directory) +- if prebuilt_whl: +- shutil.copy2(prebuilt_whl, wheel_directory) +- return os.path.basename(prebuilt_whl) +- + return _build_backend().build_wheel(wheel_directory, config_settings, + metadata_directory) + From d41d200b5207ca040899148236bc1e22f19e7128 Mon Sep 17 00:00:00 2001 From: Emmett Butler Date: Thu, 18 May 2023 14:19:20 -0700 Subject: [PATCH 22/33] fix crash --- pynixify/data/hatchling_patch.diff | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pynixify/data/hatchling_patch.diff b/pynixify/data/hatchling_patch.diff index d6232a6..78ab559 100644 --- a/pynixify/data/hatchling_patch.diff +++ b/pynixify/data/hatchling_patch.diff @@ -1,5 +1,5 @@ diff --git a/src/hatchling/build.py b/src/hatchling/build.py -index d79c1e2e..1a616e4a 100644 +index d79c1e2e..29d31be8 100644 --- a/src/hatchling/build.py +++ b/src/hatchling/build.py @@ -1,6 +1,17 @@ @@ -11,7 +11,7 @@ index d79c1e2e..1a616e4a 100644 + from pathlib import Path + pynix_out = Path(config_settings['PYNIXIFY_OUT']) + for target in ("tests", "setup", "install"): -+ fp = (pynix_out / "%s_requires.txt" % target).open("w") ++ fp = (pynix_out / ("%s_requires.txt" % target)).open("w") + fp.write('\n'.join([str(req) for req in deps])) + fp.write('\n#\n') + fp.close() From 492057b2a42f351c53a8244841e343fc600721c6 Mon Sep 17 00:00:00 2001 From: Emmett Butler Date: Fri, 19 May 2023 08:03:15 -0700 Subject: [PATCH 23/33] prefix overrides with an underscore to avoid infinite recursion --- pynixify/data/hatchling_patch.diff | 36 +++++++++++++++--------------- pynixify/expression_builder.py | 2 +- 2 files changed, 19 insertions(+), 19 deletions(-) diff --git a/pynixify/data/hatchling_patch.diff b/pynixify/data/hatchling_patch.diff index 78ab559..0cf1001 100644 --- a/pynixify/data/hatchling_patch.diff +++ b/pynixify/data/hatchling_patch.diff @@ -1,26 +1,29 @@ diff --git a/src/hatchling/build.py b/src/hatchling/build.py -index d79c1e2e..29d31be8 100644 +index d79c1e2e..d966e687 100644 --- a/src/hatchling/build.py +++ b/src/hatchling/build.py -@@ -1,6 +1,17 @@ +@@ -1,6 +1,20 @@ import os +def _write_pynixify_files(config_settings, deps): + if config_settings is not None and "PYNIXIFY_OUT" in config_settings: + from pathlib import Path ++ import json + pynix_out = Path(config_settings['PYNIXIFY_OUT']) + for target in ("tests", "setup", "install"): + fp = (pynix_out / ("%s_requires.txt" % target)).open("w") + fp.write('\n'.join([str(req) for req in deps])) -+ fp.write('\n#\n') ++ fp.write('\nhatchling\nhatch-vcs') + fp.close() ++ with (pynix_out / 'meta.json').open('w') as fp: ++ json.dump({"version": None, "url": None, "license": None, "description": None}, fp) + + def get_requires_for_build_sdist(config_settings=None): """ https://peps.python.org/pep-0517/#get-requires-for-build-sdist -@@ -8,6 +19,7 @@ def get_requires_for_build_sdist(config_settings=None): +@@ -8,6 +22,7 @@ def get_requires_for_build_sdist(config_settings=None): from hatchling.builders.sdist import SdistBuilder builder = SdistBuilder(os.getcwd()) @@ -28,7 +31,7 @@ index d79c1e2e..29d31be8 100644 return builder.config.dependencies -@@ -18,6 +30,7 @@ def build_sdist(sdist_directory, config_settings=None): +@@ -18,6 +33,7 @@ def build_sdist(sdist_directory, config_settings=None): from hatchling.builders.sdist import SdistBuilder builder = SdistBuilder(os.getcwd()) @@ -36,7 +39,7 @@ index d79c1e2e..29d31be8 100644 return os.path.basename(next(builder.build(sdist_directory, ['standard']))) -@@ -28,7 +41,10 @@ def get_requires_for_build_wheel(config_settings=None): +@@ -28,7 +44,10 @@ def get_requires_for_build_wheel(config_settings=None): from hatchling.builders.wheel import WheelBuilder builder = WheelBuilder(os.getcwd()) @@ -48,7 +51,7 @@ index d79c1e2e..29d31be8 100644 def build_wheel(wheel_directory, config_settings=None, metadata_directory=None): -@@ -38,6 +54,8 @@ def build_wheel(wheel_directory, config_settings=None, metadata_directory=None): +@@ -38,6 +57,8 @@ def build_wheel(wheel_directory, config_settings=None, metadata_directory=None): from hatchling.builders.wheel import WheelBuilder builder = WheelBuilder(os.getcwd()) @@ -57,7 +60,7 @@ index d79c1e2e..29d31be8 100644 return os.path.basename(next(builder.build(wheel_directory, ['standard']))) -@@ -48,6 +66,7 @@ def get_requires_for_build_editable(config_settings=None): +@@ -48,6 +69,7 @@ def get_requires_for_build_editable(config_settings=None): from hatchling.builders.wheel import WheelBuilder builder = WheelBuilder(os.getcwd()) @@ -65,7 +68,7 @@ index d79c1e2e..29d31be8 100644 return builder.config.dependencies -@@ -58,6 +77,7 @@ def build_editable(wheel_directory, config_settings=None, metadata_directory=Non +@@ -58,6 +80,7 @@ def build_editable(wheel_directory, config_settings=None, metadata_directory=Non from hatchling.builders.wheel import WheelBuilder builder = WheelBuilder(os.getcwd()) @@ -73,7 +76,7 @@ index d79c1e2e..29d31be8 100644 return os.path.basename(next(builder.build(wheel_directory, ['editable']))) -@@ -80,6 +100,8 @@ if 'PIP_BUILD_TRACKER' not in os.environ: +@@ -80,6 +103,8 @@ if 'PIP_BUILD_TRACKER' not in os.environ: https://peps.python.org/pep-0517/#prepare-metadata-for-build-wheel """ from hatchling.builders.wheel import WheelBuilder @@ -82,7 +85,7 @@ index d79c1e2e..29d31be8 100644 builder = WheelBuilder(os.getcwd()) -@@ -89,6 +111,7 @@ if 'PIP_BUILD_TRACKER' not in os.environ: +@@ -89,6 +114,7 @@ if 'PIP_BUILD_TRACKER' not in os.environ: with open(os.path.join(directory, 'METADATA'), 'w', encoding='utf-8') as f: f.write(builder.config.core_metadata_constructor(builder.metadata)) @@ -90,7 +93,7 @@ index d79c1e2e..29d31be8 100644 return os.path.basename(directory) -@@ -97,6 +120,8 @@ if 'PIP_BUILD_TRACKER' not in os.environ: +@@ -97,6 +123,8 @@ if 'PIP_BUILD_TRACKER' not in os.environ: https://peps.python.org/pep-0660/#prepare-metadata-for-build-editable """ from hatchling.builders.wheel import EDITABLES_MINIMUM_VERSION, WheelBuilder @@ -99,7 +102,7 @@ index d79c1e2e..29d31be8 100644 builder = WheelBuilder(os.getcwd()) -@@ -110,5 +135,6 @@ if 'PIP_BUILD_TRACKER' not in os.environ: +@@ -110,5 +138,6 @@ if 'PIP_BUILD_TRACKER' not in os.environ: with open(os.path.join(directory, 'METADATA'), 'w', encoding='utf-8') as f: f.write(builder.config.core_metadata_constructor(builder.metadata, extra_dependencies=extra_dependencies)) @@ -147,7 +150,7 @@ index 24544ad1..daa91f87 100644 return self._dependencies_complex diff --git a/src/hatchling/metadata/spec.py b/src/hatchling/metadata/spec.py -index 43a0fa67..3d0b560e 100644 +index 43a0fa67..2182e3c6 100644 --- a/src/hatchling/metadata/spec.py +++ b/src/hatchling/metadata/spec.py @@ -1,3 +1,12 @@ @@ -163,13 +166,10 @@ index 43a0fa67..3d0b560e 100644 DEFAULT_METADATA_VERSION = '2.1' -@@ -147,6 +156,10 @@ def construct_metadata_file_2_1(metadata, extra_dependencies=()): +@@ -147,6 +156,7 @@ def construct_metadata_file_2_1(metadata, extra_dependencies=()): metadata_file += f'Description-Content-Type: {metadata.core.readme_content_type}\n' metadata_file += f'\n{metadata.core.readme}' -+ if pynix_out is not None: -+ with (pynix_out / 'meta.json').open('w') as fp: -+ json.dump({"version": None, "url": None, "license": None, "description": None}, fp) + return metadata_file diff --git a/pynixify/expression_builder.py b/pynixify/expression_builder.py index 2007e97..3f192f7 100644 --- a/pynixify/expression_builder.py +++ b/pynixify/expression_builder.py @@ -115,7 +115,7 @@ packageOverrides = self: super: { % for (package_name, path) in overlays.items(): - ${package_name} = + ${"_" + package_name} = self.callPackage ${'' if path.is_absolute() else './'}${str(path).replace('/default.nix', '')} {}; From 2d078d70c2baa2b9a21a6920df4564ed6b13284e Mon Sep 17 00:00:00 2001 From: Emmett Butler Date: Fri, 19 May 2023 08:05:48 -0700 Subject: [PATCH 24/33] prefix overrides with an underscore to avoid infinite recursion --- pynixify/expression_builder.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pynixify/expression_builder.py b/pynixify/expression_builder.py index 3f192f7..72d6e61 100644 --- a/pynixify/expression_builder.py +++ b/pynixify/expression_builder.py @@ -115,7 +115,7 @@ packageOverrides = self: super: { % for (package_name, path) in overlays.items(): - ${"_" + package_name} = + ${package_name} = self.callPackage ${'' if path.is_absolute() else './'}${str(path).replace('/default.nix', '')} {}; @@ -181,7 +181,7 @@ def build_overlay_expr(overlays: Mapping[str, Path]): """ self: super: { % for (package_name, path) in overlays.items(): - ${package_name} = + ${"_" + package_name} = self.callPackage ${'' if path.is_absolute() else './'}${str(path).replace('/default.nix', '')} {}; From e8161051b7f3d8cf949f5b99d3040bb07c7f4af9 Mon Sep 17 00:00:00 2001 From: Emmett Butler Date: Fri, 19 May 2023 08:15:49 -0700 Subject: [PATCH 25/33] correct underscore placement --- pynixify/data/flitcore_patch.diff | 26 ++++++++++++++++++++------ pynixify/expression_builder.py | 2 +- 2 files changed, 21 insertions(+), 7 deletions(-) diff --git a/pynixify/data/flitcore_patch.diff b/pynixify/data/flitcore_patch.diff index 612ef3b..edb4130 100644 --- a/pynixify/data/flitcore_patch.diff +++ b/pynixify/data/flitcore_patch.diff @@ -1,5 +1,5 @@ diff --git a/flit_core/flit_core/buildapi.py b/flit_core/flit_core/buildapi.py -index 963bf61..f48cbf8 100644 +index 963bf61..6e52511 100644 --- a/flit_core/flit_core/buildapi.py +++ b/flit_core/flit_core/buildapi.py @@ -3,6 +3,7 @@ import logging @@ -10,7 +10,7 @@ index 963bf61..f48cbf8 100644 from pathlib import Path from .common import ( -@@ -13,6 +14,13 @@ from .config import read_flit_config +@@ -13,6 +14,29 @@ from .config import read_flit_config from .wheel import make_wheel_in, _write_wheel_file from .sdist import SdistBuilder @@ -20,11 +20,27 @@ index 963bf61..f48cbf8 100644 + except KeyError: + print("out environment variable not defined") + sys.exit(1) ++ ++ ++def _write_pynixify_files(config_settings, deps): ++ if config_settings is not None and "PYNIXIFY_OUT" in config_settings: ++ from pathlib import Path ++ import json ++ print(deps) ++ raise Exception ++ pynix_out = Path(config_settings['PYNIXIFY_OUT']) ++ for target in ("tests", "setup", "install"): ++ fp = (pynix_out / ("%s_requires.txt" % target)).open("w") ++ fp.write('\n'.join([str(req) for req in deps])) ++ fp.write('\nflit_core') ++ fp.close() ++ with (pynix_out / 'meta.json').open('w') as fp: ++ json.dump({"version": None, "url": None, "license": None, "description": None}, fp) + log = logging.getLogger(__name__) # PEP 517 specifies that the CWD will always be the source tree -@@ -32,9 +40,13 @@ def get_requires_for_build_wheel(config_settings=None): +@@ -32,9 +56,11 @@ def get_requires_for_build_wheel(config_settings=None): docstring, version = get_docstring_and_version_via_ast(module) if (want_summary and not docstring) or (want_version and not version): @@ -33,9 +49,7 @@ index 963bf61..f48cbf8 100644 else: - return [] + requires = [] -+ if 'PYNIXIFY' in os.environ: -+ with (pynix_out / "setup_requires.txt").open("w") as fp: -+ fp.write('\n'.join(requires)) ++ _write_pynixify_files(config_settings, requires) + return requires # Requirements to build an sdist are the same as for a wheel diff --git a/pynixify/expression_builder.py b/pynixify/expression_builder.py index 72d6e61..3b4e33e 100644 --- a/pynixify/expression_builder.py +++ b/pynixify/expression_builder.py @@ -115,7 +115,7 @@ packageOverrides = self: super: { % for (package_name, path) in overlays.items(): - ${package_name} = + ${"_" + package_name} = self.callPackage ${'' if path.is_absolute() else './'}${str(path).replace('/default.nix', '')} {}; From 355722ef63596fd9bb57a2e3940dea93aabd73b2 Mon Sep 17 00:00:00 2001 From: Emmett Butler Date: Fri, 19 May 2023 08:22:08 -0700 Subject: [PATCH 26/33] hook into correct spot in flit_core --- pynixify/data/flitcore_patch.diff | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/pynixify/data/flitcore_patch.diff b/pynixify/data/flitcore_patch.diff index edb4130..2e50f48 100644 --- a/pynixify/data/flitcore_patch.diff +++ b/pynixify/data/flitcore_patch.diff @@ -1,5 +1,5 @@ diff --git a/flit_core/flit_core/buildapi.py b/flit_core/flit_core/buildapi.py -index 963bf61..6e52511 100644 +index 963bf61..a8802ca 100644 --- a/flit_core/flit_core/buildapi.py +++ b/flit_core/flit_core/buildapi.py @@ -3,6 +3,7 @@ import logging @@ -54,6 +54,14 @@ index 963bf61..6e52511 100644 # Requirements to build an sdist are the same as for a wheel get_requires_for_build_sdist = get_requires_for_build_wheel +@@ -70,6 +96,7 @@ prepare_metadata_for_build_editable = prepare_metadata_for_build_wheel + def build_wheel(wheel_directory, config_settings=None, metadata_directory=None): + """Builds a wheel, places it in wheel_directory""" + info = make_wheel_in(pyproj_toml, Path(wheel_directory)) ++ _write_pynixify_files(config_settings, []) + return info.file.name + + def build_editable(wheel_directory, config_settings=None, metadata_directory=None): diff --git a/flit_core/flit_core/config.py b/flit_core/flit_core/config.py index 1292956..1afbba6 100644 --- a/flit_core/flit_core/config.py From de8bbde1026402e71acb9c0ada4b3069d9cb6f7b Mon Sep 17 00:00:00 2001 From: Emmett Butler Date: Fri, 19 May 2023 09:46:26 -0700 Subject: [PATCH 27/33] cleanup --- pynixify/data/flitcore_patch.diff | 71 +------------ pynixify/data/hatchling_patch.diff | 98 +++--------------- pynixify/data/parse_setuppy_data.nix | 3 +- pynixify/data/patchedpip.nix | 24 ----- pynixify/data/pip_patch.diff | 39 ++----- pynixify/data/pip_patch_final.diff | 145 --------------------------- pynixify/nixpkgs_sources.py | 1 - 7 files changed, 29 insertions(+), 352 deletions(-) delete mode 100644 pynixify/data/patchedpip.nix delete mode 100644 pynixify/data/pip_patch_final.diff diff --git a/pynixify/data/flitcore_patch.diff b/pynixify/data/flitcore_patch.diff index 2e50f48..c703f33 100644 --- a/pynixify/data/flitcore_patch.diff +++ b/pynixify/data/flitcore_patch.diff @@ -1,5 +1,5 @@ diff --git a/flit_core/flit_core/buildapi.py b/flit_core/flit_core/buildapi.py -index 963bf61..a8802ca 100644 +index 963bf61..5190b7e 100644 --- a/flit_core/flit_core/buildapi.py +++ b/flit_core/flit_core/buildapi.py @@ -3,6 +3,7 @@ import logging @@ -10,24 +10,14 @@ index 963bf61..a8802ca 100644 from pathlib import Path from .common import ( -@@ -13,6 +14,29 @@ from .config import read_flit_config +@@ -13,6 +14,19 @@ from .config import read_flit_config from .wheel import make_wheel_in, _write_wheel_file from .sdist import SdistBuilder -+if 'PYNIXIFY' in os.environ: -+ try: -+ pynix_out = Path(os.environ['out']) -+ except KeyError: -+ print("out environment variable not defined") -+ sys.exit(1) -+ -+ +def _write_pynixify_files(config_settings, deps): + if config_settings is not None and "PYNIXIFY_OUT" in config_settings: + from pathlib import Path + import json -+ print(deps) -+ raise Exception + pynix_out = Path(config_settings['PYNIXIFY_OUT']) + for target in ("tests", "setup", "install"): + fp = (pynix_out / ("%s_requires.txt" % target)).open("w") @@ -40,21 +30,7 @@ index 963bf61..a8802ca 100644 log = logging.getLogger(__name__) # PEP 517 specifies that the CWD will always be the source tree -@@ -32,9 +56,11 @@ def get_requires_for_build_wheel(config_settings=None): - docstring, version = get_docstring_and_version_via_ast(module) - - if (want_summary and not docstring) or (want_version and not version): -- return info.metadata.get('requires_dist', []) -+ requires = info.metadata.get('requires_dist', []) - else: -- return [] -+ requires = [] -+ _write_pynixify_files(config_settings, requires) -+ return requires - - # Requirements to build an sdist are the same as for a wheel - get_requires_for_build_sdist = get_requires_for_build_wheel -@@ -70,6 +96,7 @@ prepare_metadata_for_build_editable = prepare_metadata_for_build_wheel +@@ -70,6 +84,7 @@ prepare_metadata_for_build_editable = prepare_metadata_for_build_wheel def build_wheel(wheel_directory, config_settings=None, metadata_directory=None): """Builds a wheel, places it in wheel_directory""" info = make_wheel_in(pyproj_toml, Path(wheel_directory)) @@ -62,44 +38,3 @@ index 963bf61..a8802ca 100644 return info.file.name def build_editable(wheel_directory, config_settings=None, metadata_directory=None): -diff --git a/flit_core/flit_core/config.py b/flit_core/flit_core/config.py -index 1292956..1afbba6 100644 ---- a/flit_core/flit_core/config.py -+++ b/flit_core/flit_core/config.py -@@ -4,6 +4,8 @@ import errno - import logging - import os - import os.path as osp -+import sys -+import json - from pathlib import Path - import re - -@@ -21,6 +23,13 @@ from .versionno import normalise_version - - log = logging.getLogger(__name__) - -+if 'PYNIXIFY' in os.environ: -+ try: -+ pynix_out = Path(os.environ['out']) -+ except KeyError: -+ print("out environment variable not defined") -+ sys.exit(1) -+ - - class ConfigError(ValueError): - pass -@@ -175,6 +184,13 @@ def prep_toml_config(d, path): - loaded_cfg.data_directory = path.parent / data_dir - if not loaded_cfg.data_directory.is_dir(): - raise ConfigError(f"{toml_key} must refer to a directory") -+ if 'PYNIXIFY' in os.environ: -+ metadata = {} -+ for attr in ("version", "url", "license", "description"): -+ metadata[attr] = loaded_cfg.metadata[attr] -+ metadata["_fmt"] = "pyproject" -+ with (pynix_out / 'meta.json').open('w') as fp: -+ json.dump(metadata, fp) - - return loaded_cfg - diff --git a/pynixify/data/hatchling_patch.diff b/pynixify/data/hatchling_patch.diff index 0cf1001..bc50843 100644 --- a/pynixify/data/hatchling_patch.diff +++ b/pynixify/data/hatchling_patch.diff @@ -1,5 +1,5 @@ diff --git a/src/hatchling/build.py b/src/hatchling/build.py -index d79c1e2e..d966e687 100644 +index d79c1e2e..c85a837e 100644 --- a/src/hatchling/build.py +++ b/src/hatchling/build.py @@ -1,6 +1,20 @@ @@ -39,28 +39,23 @@ index d79c1e2e..d966e687 100644 return os.path.basename(next(builder.build(sdist_directory, ['standard']))) -@@ -28,7 +44,10 @@ def get_requires_for_build_wheel(config_settings=None): +@@ -28,6 +44,7 @@ def get_requires_for_build_wheel(config_settings=None): from hatchling.builders.wheel import WheelBuilder builder = WheelBuilder(os.getcwd()) -- return builder.config.dependencies -+ deps = builder.config.dependencies -+ _write_pynixify_files(config_settings, deps) -+ -+ return deps ++ _write_pynixify_files(config_settings, builder.config.dependencies) + return builder.config.dependencies - def build_wheel(wheel_directory, config_settings=None, metadata_directory=None): -@@ -38,6 +57,8 @@ def build_wheel(wheel_directory, config_settings=None, metadata_directory=None): +@@ -38,6 +55,7 @@ def build_wheel(wheel_directory, config_settings=None, metadata_directory=None): from hatchling.builders.wheel import WheelBuilder builder = WheelBuilder(os.getcwd()) -+ deps = builder.config.dependencies -+ _write_pynixify_files(config_settings, deps) ++ _write_pynixify_files(config_settings, builder.config.dependencies) return os.path.basename(next(builder.build(wheel_directory, ['standard']))) -@@ -48,6 +69,7 @@ def get_requires_for_build_editable(config_settings=None): +@@ -48,6 +66,7 @@ def get_requires_for_build_editable(config_settings=None): from hatchling.builders.wheel import WheelBuilder builder = WheelBuilder(os.getcwd()) @@ -68,7 +63,7 @@ index d79c1e2e..d966e687 100644 return builder.config.dependencies -@@ -58,6 +80,7 @@ def build_editable(wheel_directory, config_settings=None, metadata_directory=Non +@@ -58,6 +77,7 @@ def build_editable(wheel_directory, config_settings=None, metadata_directory=Non from hatchling.builders.wheel import WheelBuilder builder = WheelBuilder(os.getcwd()) @@ -76,16 +71,7 @@ index d79c1e2e..d966e687 100644 return os.path.basename(next(builder.build(wheel_directory, ['editable']))) -@@ -80,6 +103,8 @@ if 'PIP_BUILD_TRACKER' not in os.environ: - https://peps.python.org/pep-0517/#prepare-metadata-for-build-wheel - """ - from hatchling.builders.wheel import WheelBuilder -+ if config_settings is not None and "PYNIXIFY_OUT" in config_settings: -+ os.environ["PYNIXIFY_OUT"] = config_settings["PYNIXIFY_OUT"] - - builder = WheelBuilder(os.getcwd()) - -@@ -89,6 +114,7 @@ if 'PIP_BUILD_TRACKER' not in os.environ: +@@ -89,6 +109,7 @@ if 'PIP_BUILD_TRACKER' not in os.environ: with open(os.path.join(directory, 'METADATA'), 'w', encoding='utf-8') as f: f.write(builder.config.core_metadata_constructor(builder.metadata)) @@ -93,84 +79,30 @@ index d79c1e2e..d966e687 100644 return os.path.basename(directory) -@@ -97,6 +123,8 @@ if 'PIP_BUILD_TRACKER' not in os.environ: - https://peps.python.org/pep-0660/#prepare-metadata-for-build-editable - """ - from hatchling.builders.wheel import EDITABLES_MINIMUM_VERSION, WheelBuilder -+ if config_settings is not None and "PYNIXIFY_OUT" in config_settings: -+ os.environ["PYNIXIFY_OUT"] = config_settings["PYNIXIFY_OUT"] - - builder = WheelBuilder(os.getcwd()) - -@@ -110,5 +138,6 @@ if 'PIP_BUILD_TRACKER' not in os.environ: +@@ -110,5 +131,6 @@ if 'PIP_BUILD_TRACKER' not in os.environ: with open(os.path.join(directory, 'METADATA'), 'w', encoding='utf-8') as f: f.write(builder.config.core_metadata_constructor(builder.metadata, extra_dependencies=extra_dependencies)) + _write_pynixify_files(config_settings, builder.config.dependencies) return os.path.basename(directory) -diff --git a/src/hatchling/metadata/core.py b/src/hatchling/metadata/core.py -index 24544ad1..daa91f87 100644 ---- a/src/hatchling/metadata/core.py -+++ b/src/hatchling/metadata/core.py -@@ -11,6 +11,11 @@ if sys.version_info >= (3, 11): - else: - import tomli as tomllib - -+pynix_out = None -+if 'PYNIXIFY_OUT' in os.environ: -+ from pathlib import Path -+ pynix_out = Path(os.environ['PYNIXIFY_OUT']) -+ - - def load_toml(path): - with open(path, encoding='utf-8') as f: -@@ -250,6 +255,9 @@ class BuildMetadata: - raise ValueError(f'Dependency #{i} of field `build-system.requires` is invalid: {e}') - - self._requires_complex = requires_complex -+ if pynix_out: -+ with (pynix_out / "setup_requires.txt").open("w") as fp: -+ fp.write('\n'.join([str(req) for req in self._requires_complex])) - - return self._requires_complex - -@@ -1074,6 +1082,13 @@ class CoreMetadata: - dependencies_complex[get_normalized_dependency(requirement)] = requirement - - self._dependencies_complex = dict(sorted(dependencies_complex.items())) -+ if pynix_out: -+ with (pynix_out / "setup_requires.txt").open("w") as fp: -+ fp.write('\n'.join([str(req) for req in self._dependencies_complex])) -+ with (pynix_out / "install_requires.txt").open("w") as fp: -+ fp.write('\n'.join([str(req) for req in self._dependencies_complex])) -+ with (pynix_out / "tests_requires.txt").open("w") as fp: -+ fp.write('\n'.join([str(req) for req in self._dependencies_complex])) - - return self._dependencies_complex - diff --git a/src/hatchling/metadata/spec.py b/src/hatchling/metadata/spec.py -index 43a0fa67..2182e3c6 100644 +index 43a0fa67..6f0874a0 100644 --- a/src/hatchling/metadata/spec.py +++ b/src/hatchling/metadata/spec.py -@@ -1,3 +1,12 @@ +@@ -1,3 +1,7 @@ +import json +import os +import sys -+ -+pynix_out = None -+if 'PYNIXIFY_OUT' in os.environ: -+ from pathlib import Path -+ pynix_out = Path(os.environ['PYNIXIFY_OUT']) + DEFAULT_METADATA_VERSION = '2.1' -@@ -147,6 +156,7 @@ def construct_metadata_file_2_1(metadata, extra_dependencies=()): +@@ -146,7 +150,6 @@ def construct_metadata_file_2_1(metadata, extra_dependencies=()): + if metadata.core.readme: metadata_file += f'Description-Content-Type: {metadata.core.readme_content_type}\n' metadata_file += f'\n{metadata.core.readme}' - -+ +- return metadata_file diff --git a/pynixify/data/parse_setuppy_data.nix b/pynixify/data/parse_setuppy_data.nix index e20b5ec..91a3931 100644 --- a/pynixify/data/parse_setuppy_data.nix +++ b/pynixify/data/parse_setuppy_data.nix @@ -92,7 +92,7 @@ let [ pkgs.python3.pkgs.tomli ]; }; patchedpip = pkgs.python3.pkgs.pip.overrideAttrs - (ps: { patches = [ ./pip_patch_final.diff ]; }); + (ps: { patches = [ ./pip_patch.diff ]; }); pythonWithPackages = pkgs.python3.withPackages (ps: [ patchedSetuptools @@ -125,7 +125,6 @@ in pkgs.stdenv.mkDerivation { if PYNIXIFY=1 python setup.py install; then exit 0 fi - #${patchedpip}/bin/pip --no-cache-dir wheel --config-settings PYNIXIFY_OUT=$out --no-build-isolation $PWD if ${patchedpip}/bin/pip --no-cache-dir wheel --config-settings PYNIXIFY_OUT=$out --no-build-isolation $PWD; then exit 0 fi diff --git a/pynixify/data/patchedpip.nix b/pynixify/data/patchedpip.nix deleted file mode 100644 index 9daf709..0000000 --- a/pynixify/data/patchedpip.nix +++ /dev/null @@ -1,24 +0,0 @@ -{ buildPythonPackage, fetchFromGitHub, lib }: - -buildPythonPackage rec { - pname = "pip"; - version = "22.2.2"; - format = "other"; - - src = fetchFromGitHub { - owner = "pypa"; - repo = pname; - rev = version; - sha256 = "sha256-SLjmxFUFmvgy8E8kxfc6lxxCRo+GN4L77pqkWkRR8aE="; - name = "${pname}-${version}-source"; - }; - - postPatch = '' - # Remove vendored Windows PE binaries - # Note: These are unused but make the package unreproducible. - find -type f -name '*.exe' -delete - ''; - - patches = [ ./pip_patch.diff ]; - phases = [ "unpackPhase" "patchPhase" ]; -} diff --git a/pynixify/data/pip_patch.diff b/pynixify/data/pip_patch.diff index c0681c4..9bff8c1 100644 --- a/pynixify/data/pip_patch.diff +++ b/pynixify/data/pip_patch.diff @@ -1,13 +1,10 @@ diff --git a/src/pip/_internal/commands/install.py b/src/pip/_internal/commands/install.py -index 29907645c..d71b4e603 100644 +index 29907645c..ae0bb944a 100644 --- a/src/pip/_internal/commands/install.py +++ b/src/pip/_internal/commands/install.py -@@ -411,14 +411,13 @@ class InstallCommand(RequirementCommand): - reqs_to_build = [ - r +@@ -413,12 +413,11 @@ class InstallCommand(RequirementCommand): for r in requirement_set.requirements.values() -- if should_build_for_install_command(r, check_binary_allowed) -+ if True or should_build_for_install_command(r, check_binary_allowed) + if should_build_for_install_command(r, check_binary_allowed) ] - _, build_failures = build( @@ -20,15 +17,10 @@ index 29907645c..d71b4e603 100644 ) diff --git a/src/pip/_internal/commands/wheel.py b/src/pip/_internal/commands/wheel.py -index 9dd6c82f2..c54f67554 100644 +index 9dd6c82f2..78ff796c5 100644 --- a/src/pip/_internal/commands/wheel.py +++ b/src/pip/_internal/commands/wheel.py -@@ -148,15 +148,16 @@ class WheelCommand(RequirementCommand): - for req in requirement_set.requirements.values(): - if req.is_wheel: - preparer.save_linked_requirement(req) -- elif should_build_for_wheel_command(req): -+ elif True or should_build_for_wheel_command(req): +@@ -152,11 +152,12 @@ class WheelCommand(RequirementCommand): reqs_to_build.append(req) # build wheels @@ -43,7 +35,7 @@ index 9dd6c82f2..c54f67554 100644 ) for req in build_successes: diff --git a/src/pip/_internal/operations/build/metadata.py b/src/pip/_internal/operations/build/metadata.py -index e2b7b4445..cd144e024 100644 +index e2b7b4445..a184dbc7c 100644 --- a/src/pip/_internal/operations/build/metadata.py +++ b/src/pip/_internal/operations/build/metadata.py @@ -15,7 +15,7 @@ from pip._internal.utils.temp_dir import TempDirectory @@ -51,7 +43,7 @@ index e2b7b4445..cd144e024 100644 def generate_metadata( - build_env: BuildEnvironment, backend: Pep517HookCaller, details: str -+ build_env: BuildEnvironment, backend: Pep517HookCaller, details: str, config_settings ++ build_env: BuildEnvironment, backend: Pep517HookCaller, details: str, config_settings ) -> str: """Generate metadata using mechanisms described in PEP 517. @@ -65,7 +57,7 @@ index e2b7b4445..cd144e024 100644 raise MetadataGenerationFailed(package_details=details) from error diff --git a/src/pip/_internal/operations/build/wheel.py b/src/pip/_internal/operations/build/wheel.py -index b0d2fc9ea..d44b64ec9 100644 +index b0d2fc9ea..fafa8a8ac 100644 --- a/src/pip/_internal/operations/build/wheel.py +++ b/src/pip/_internal/operations/build/wheel.py @@ -14,6 +14,7 @@ def build_wheel_pep517( @@ -76,25 +68,14 @@ index b0d2fc9ea..d44b64ec9 100644 ) -> Optional[str]: """Build one InstallRequirement using the PEP 517 build process. -@@ -24,14 +25,16 @@ def build_wheel_pep517( - logger.debug("Destination directory: %s", tempd) - - runner = runner_with_spinner_message( -- f"Building wheel for {name} (pyproject.toml)" -+ f"Emmett is Building wheel for {name} (pyproject.toml)" - ) - with backend.subprocess_runner(runner): +@@ -30,6 +31,7 @@ def build_wheel_pep517( wheel_name = backend.build_wheel( tempd, metadata_directory=metadata_directory, + config_settings=config_settings, ) except Exception: -- logger.error("Failed building wheel for %s", name) -+ logger.error("Emmett Failed building wheel for %s", name) -+ raise - return None - return os.path.join(tempd, wheel_name) + logger.error("Failed building wheel for %s", name) diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index a1e376c89..948b092ac 100644 --- a/src/pip/_internal/req/req_install.py diff --git a/pynixify/data/pip_patch_final.diff b/pynixify/data/pip_patch_final.diff deleted file mode 100644 index c0681c4..0000000 --- a/pynixify/data/pip_patch_final.diff +++ /dev/null @@ -1,145 +0,0 @@ -diff --git a/src/pip/_internal/commands/install.py b/src/pip/_internal/commands/install.py -index 29907645c..d71b4e603 100644 ---- a/src/pip/_internal/commands/install.py -+++ b/src/pip/_internal/commands/install.py -@@ -411,14 +411,13 @@ class InstallCommand(RequirementCommand): - reqs_to_build = [ - r - for r in requirement_set.requirements.values() -- if should_build_for_install_command(r, check_binary_allowed) -+ if True or should_build_for_install_command(r, check_binary_allowed) - ] -- - _, build_failures = build( - reqs_to_build, - wheel_cache=wheel_cache, - verify=True, -- build_options=[], -+ build_options=[options.config_settings], - global_options=[], - ) - -diff --git a/src/pip/_internal/commands/wheel.py b/src/pip/_internal/commands/wheel.py -index 9dd6c82f2..c54f67554 100644 ---- a/src/pip/_internal/commands/wheel.py -+++ b/src/pip/_internal/commands/wheel.py -@@ -148,15 +148,16 @@ class WheelCommand(RequirementCommand): - for req in requirement_set.requirements.values(): - if req.is_wheel: - preparer.save_linked_requirement(req) -- elif should_build_for_wheel_command(req): -+ elif True or should_build_for_wheel_command(req): - reqs_to_build.append(req) - - # build wheels -+ build_options = dict(list((options.config_settings or {}).items()) + list((options.build_options or {}).items())) - build_successes, build_failures = build( - reqs_to_build, - wheel_cache=wheel_cache, - verify=(not options.no_verify), -- build_options=options.build_options or [], -+ build_options=build_options or [], - global_options=options.global_options or [], - ) - for req in build_successes: -diff --git a/src/pip/_internal/operations/build/metadata.py b/src/pip/_internal/operations/build/metadata.py -index e2b7b4445..cd144e024 100644 ---- a/src/pip/_internal/operations/build/metadata.py -+++ b/src/pip/_internal/operations/build/metadata.py -@@ -15,7 +15,7 @@ from pip._internal.utils.temp_dir import TempDirectory - - - def generate_metadata( -- build_env: BuildEnvironment, backend: Pep517HookCaller, details: str -+ build_env: BuildEnvironment, backend: Pep517HookCaller, details: str, config_settings - ) -> str: - """Generate metadata using mechanisms described in PEP 517. - -@@ -32,7 +32,7 @@ def generate_metadata( - runner = runner_with_spinner_message("Preparing metadata (pyproject.toml)") - with backend.subprocess_runner(runner): - try: -- distinfo_dir = backend.prepare_metadata_for_build_wheel(metadata_dir) -+ distinfo_dir = backend.prepare_metadata_for_build_wheel(metadata_dir, config_settings) - except InstallationSubprocessError as error: - raise MetadataGenerationFailed(package_details=details) from error - -diff --git a/src/pip/_internal/operations/build/wheel.py b/src/pip/_internal/operations/build/wheel.py -index b0d2fc9ea..d44b64ec9 100644 ---- a/src/pip/_internal/operations/build/wheel.py -+++ b/src/pip/_internal/operations/build/wheel.py -@@ -14,6 +14,7 @@ def build_wheel_pep517( - backend: Pep517HookCaller, - metadata_directory: str, - tempd: str, -+ config_settings, - ) -> Optional[str]: - """Build one InstallRequirement using the PEP 517 build process. - -@@ -24,14 +25,16 @@ def build_wheel_pep517( - logger.debug("Destination directory: %s", tempd) - - runner = runner_with_spinner_message( -- f"Building wheel for {name} (pyproject.toml)" -+ f"Emmett is Building wheel for {name} (pyproject.toml)" - ) - with backend.subprocess_runner(runner): - wheel_name = backend.build_wheel( - tempd, - metadata_directory=metadata_directory, -+ config_settings=config_settings, - ) - except Exception: -- logger.error("Failed building wheel for %s", name) -+ logger.error("Emmett Failed building wheel for %s", name) -+ raise - return None - return os.path.join(tempd, wheel_name) -diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py -index a1e376c89..948b092ac 100644 ---- a/src/pip/_internal/req/req_install.py -+++ b/src/pip/_internal/req/req_install.py -@@ -534,6 +534,7 @@ class InstallRequirement: - build_env=self.build_env, - backend=self.pep517_backend, - details=details, -+ config_settings=self.config_settings, - ) - else: - self.metadata_directory = generate_metadata_legacy( -diff --git a/src/pip/_internal/wheel_builder.py b/src/pip/_internal/wheel_builder.py -index 77a17ff0f..ef1009dba 100644 ---- a/src/pip/_internal/wheel_builder.py -+++ b/src/pip/_internal/wheel_builder.py -@@ -257,6 +257,7 @@ def _build_one_inside_env( - backend=req.pep517_backend, - metadata_directory=req.metadata_directory, - tempd=temp_dir.path, -+ config_settings=build_options, - ) - else: - wheel_path = build_wheel_pep517( -@@ -264,6 +265,7 @@ def _build_one_inside_env( - backend=req.pep517_backend, - metadata_directory=req.metadata_directory, - tempd=temp_dir.path, -+ config_settings=build_options, - ) - else: - wheel_path = build_wheel_legacy( -diff --git a/src/pip/_vendor/pep517/in_process/_in_process.py b/src/pip/_vendor/pep517/in_process/_in_process.py -index 954a4ab05..03299cdbc 100644 ---- a/src/pip/_vendor/pep517/in_process/_in_process.py -+++ b/src/pip/_vendor/pep517/in_process/_in_process.py -@@ -253,11 +253,6 @@ def build_wheel(wheel_directory, config_settings, metadata_directory=None): - prepare_metadata_for_build_wheel fallback, this - will copy it rather than rebuilding the wheel. - """ -- prebuilt_whl = _find_already_built_wheel(metadata_directory) -- if prebuilt_whl: -- shutil.copy2(prebuilt_whl, wheel_directory) -- return os.path.basename(prebuilt_whl) -- - return _build_backend().build_wheel(wheel_directory, config_settings, - metadata_directory) - diff --git a/pynixify/nixpkgs_sources.py b/pynixify/nixpkgs_sources.py index 59a2915..498dab9 100644 --- a/pynixify/nixpkgs_sources.py +++ b/pynixify/nixpkgs_sources.py @@ -122,7 +122,6 @@ async def _run_nix_build(*args: Sequence[str], retries=0, max_retries=5) -> Path 'nix-build', *args_, stdout=asyncio.subprocess.PIPE, # type: ignore stderr=asyncio.subprocess.PIPE) (stdout, stderr) = await proc.communicate() - #print("%s\n%s" % (stdout.decode(), stderr.decode())) status = await proc.wait() if b'all build users are currently in use' in stderr and retries < max_retries: From 632808a44d55d77059ab6a673e6b25e1e6add163 Mon Sep 17 00:00:00 2001 From: Emmett Butler Date: Fri, 19 May 2023 09:52:20 -0700 Subject: [PATCH 28/33] blank diff --- pynixify/data/hatchling_patch.diff | 20 -------------------- 1 file changed, 20 deletions(-) diff --git a/pynixify/data/hatchling_patch.diff b/pynixify/data/hatchling_patch.diff index bc50843..bb1f3a4 100644 --- a/pynixify/data/hatchling_patch.diff +++ b/pynixify/data/hatchling_patch.diff @@ -86,23 +86,3 @@ index d79c1e2e..c85a837e 100644 + _write_pynixify_files(config_settings, builder.config.dependencies) return os.path.basename(directory) -diff --git a/src/hatchling/metadata/spec.py b/src/hatchling/metadata/spec.py -index 43a0fa67..6f0874a0 100644 ---- a/src/hatchling/metadata/spec.py -+++ b/src/hatchling/metadata/spec.py -@@ -1,3 +1,7 @@ -+import json -+import os -+import sys -+ - DEFAULT_METADATA_VERSION = '2.1' - - -@@ -146,7 +150,6 @@ def construct_metadata_file_2_1(metadata, extra_dependencies=()): - if metadata.core.readme: - metadata_file += f'Description-Content-Type: {metadata.core.readme_content_type}\n' - metadata_file += f'\n{metadata.core.readme}' -- - return metadata_file - - From 47ed0606b9e4fc5b4902f21515003e62ac4588b4 Mon Sep 17 00:00:00 2001 From: Emmett Butler Date: Fri, 19 May 2023 10:05:33 -0700 Subject: [PATCH 29/33] shrink pip patch --- pynixify/data/pip_patch.diff | 34 ---------------------------------- 1 file changed, 34 deletions(-) diff --git a/pynixify/data/pip_patch.diff b/pynixify/data/pip_patch.diff index 9bff8c1..f220a5d 100644 --- a/pynixify/data/pip_patch.diff +++ b/pynixify/data/pip_patch.diff @@ -1,21 +1,3 @@ -diff --git a/src/pip/_internal/commands/install.py b/src/pip/_internal/commands/install.py -index 29907645c..ae0bb944a 100644 ---- a/src/pip/_internal/commands/install.py -+++ b/src/pip/_internal/commands/install.py -@@ -413,12 +413,11 @@ class InstallCommand(RequirementCommand): - for r in requirement_set.requirements.values() - if should_build_for_install_command(r, check_binary_allowed) - ] -- - _, build_failures = build( - reqs_to_build, - wheel_cache=wheel_cache, - verify=True, -- build_options=[], -+ build_options=[options.config_settings], - global_options=[], - ) - diff --git a/src/pip/_internal/commands/wheel.py b/src/pip/_internal/commands/wheel.py index 9dd6c82f2..78ff796c5 100644 --- a/src/pip/_internal/commands/wheel.py @@ -108,19 +90,3 @@ index 77a17ff0f..ef1009dba 100644 ) else: wheel_path = build_wheel_legacy( -diff --git a/src/pip/_vendor/pep517/in_process/_in_process.py b/src/pip/_vendor/pep517/in_process/_in_process.py -index 954a4ab05..03299cdbc 100644 ---- a/src/pip/_vendor/pep517/in_process/_in_process.py -+++ b/src/pip/_vendor/pep517/in_process/_in_process.py -@@ -253,11 +253,6 @@ def build_wheel(wheel_directory, config_settings, metadata_directory=None): - prepare_metadata_for_build_wheel fallback, this - will copy it rather than rebuilding the wheel. - """ -- prebuilt_whl = _find_already_built_wheel(metadata_directory) -- if prebuilt_whl: -- shutil.copy2(prebuilt_whl, wheel_directory) -- return os.path.basename(prebuilt_whl) -- - return _build_backend().build_wheel(wheel_directory, config_settings, - metadata_directory) - From fe0b94c7a52b18a455f048a290764d5ba4a4bb27 Mon Sep 17 00:00:00 2001 From: Emmett Butler Date: Fri, 19 May 2023 11:13:12 -0700 Subject: [PATCH 30/33] patching pip is unnecessary --- pynixify/data/parse_setuppy_data.nix | 6 +- pynixify/data/pip_patch.diff | 92 ---------------------------- 2 files changed, 2 insertions(+), 96 deletions(-) delete mode 100644 pynixify/data/pip_patch.diff diff --git a/pynixify/data/parse_setuppy_data.nix b/pynixify/data/parse_setuppy_data.nix index 91a3931..11b798b 100644 --- a/pynixify/data/parse_setuppy_data.nix +++ b/pynixify/data/parse_setuppy_data.nix @@ -91,8 +91,6 @@ let ++ pkgs.lib.optionals (pkgs.python3.pkgs.pythonOlder "3.11") [ pkgs.python3.pkgs.tomli ]; }; - patchedpip = pkgs.python3.pkgs.pip.overrideAttrs - (ps: { patches = [ ./pip_patch.diff ]; }); pythonWithPackages = pkgs.python3.withPackages (ps: [ patchedSetuptools @@ -100,7 +98,7 @@ let hatchling hatchvcs flitscm - patchedpip + pkgs.python3.pkgs.pip ]); cleanSource = src: @@ -125,7 +123,7 @@ in pkgs.stdenv.mkDerivation { if PYNIXIFY=1 python setup.py install; then exit 0 fi - if ${patchedpip}/bin/pip --no-cache-dir wheel --config-settings PYNIXIFY_OUT=$out --no-build-isolation $PWD; then + if ${pkgs.python3.pkgs.pip}/bin/pip --no-cache-dir wheel --config-settings PYNIXIFY_OUT=$out --no-build-isolation $PWD; then exit 0 fi # Indicate that fetching the result failed, but let the build succeed diff --git a/pynixify/data/pip_patch.diff b/pynixify/data/pip_patch.diff deleted file mode 100644 index f220a5d..0000000 --- a/pynixify/data/pip_patch.diff +++ /dev/null @@ -1,92 +0,0 @@ -diff --git a/src/pip/_internal/commands/wheel.py b/src/pip/_internal/commands/wheel.py -index 9dd6c82f2..78ff796c5 100644 ---- a/src/pip/_internal/commands/wheel.py -+++ b/src/pip/_internal/commands/wheel.py -@@ -152,11 +152,12 @@ class WheelCommand(RequirementCommand): - reqs_to_build.append(req) - - # build wheels -+ build_options = dict(list((options.config_settings or {}).items()) + list((options.build_options or {}).items())) - build_successes, build_failures = build( - reqs_to_build, - wheel_cache=wheel_cache, - verify=(not options.no_verify), -- build_options=options.build_options or [], -+ build_options=build_options or [], - global_options=options.global_options or [], - ) - for req in build_successes: -diff --git a/src/pip/_internal/operations/build/metadata.py b/src/pip/_internal/operations/build/metadata.py -index e2b7b4445..a184dbc7c 100644 ---- a/src/pip/_internal/operations/build/metadata.py -+++ b/src/pip/_internal/operations/build/metadata.py -@@ -15,7 +15,7 @@ from pip._internal.utils.temp_dir import TempDirectory - - - def generate_metadata( -- build_env: BuildEnvironment, backend: Pep517HookCaller, details: str -+ build_env: BuildEnvironment, backend: Pep517HookCaller, details: str, config_settings - ) -> str: - """Generate metadata using mechanisms described in PEP 517. - -@@ -32,7 +32,7 @@ def generate_metadata( - runner = runner_with_spinner_message("Preparing metadata (pyproject.toml)") - with backend.subprocess_runner(runner): - try: -- distinfo_dir = backend.prepare_metadata_for_build_wheel(metadata_dir) -+ distinfo_dir = backend.prepare_metadata_for_build_wheel(metadata_dir, config_settings) - except InstallationSubprocessError as error: - raise MetadataGenerationFailed(package_details=details) from error - -diff --git a/src/pip/_internal/operations/build/wheel.py b/src/pip/_internal/operations/build/wheel.py -index b0d2fc9ea..fafa8a8ac 100644 ---- a/src/pip/_internal/operations/build/wheel.py -+++ b/src/pip/_internal/operations/build/wheel.py -@@ -14,6 +14,7 @@ def build_wheel_pep517( - backend: Pep517HookCaller, - metadata_directory: str, - tempd: str, -+ config_settings, - ) -> Optional[str]: - """Build one InstallRequirement using the PEP 517 build process. - -@@ -30,6 +31,7 @@ def build_wheel_pep517( - wheel_name = backend.build_wheel( - tempd, - metadata_directory=metadata_directory, -+ config_settings=config_settings, - ) - except Exception: - logger.error("Failed building wheel for %s", name) -diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py -index a1e376c89..948b092ac 100644 ---- a/src/pip/_internal/req/req_install.py -+++ b/src/pip/_internal/req/req_install.py -@@ -534,6 +534,7 @@ class InstallRequirement: - build_env=self.build_env, - backend=self.pep517_backend, - details=details, -+ config_settings=self.config_settings, - ) - else: - self.metadata_directory = generate_metadata_legacy( -diff --git a/src/pip/_internal/wheel_builder.py b/src/pip/_internal/wheel_builder.py -index 77a17ff0f..ef1009dba 100644 ---- a/src/pip/_internal/wheel_builder.py -+++ b/src/pip/_internal/wheel_builder.py -@@ -257,6 +257,7 @@ def _build_one_inside_env( - backend=req.pep517_backend, - metadata_directory=req.metadata_directory, - tempd=temp_dir.path, -+ config_settings=build_options, - ) - else: - wheel_path = build_wheel_pep517( -@@ -264,6 +265,7 @@ def _build_one_inside_env( - backend=req.pep517_backend, - metadata_directory=req.metadata_directory, - tempd=temp_dir.path, -+ config_settings=build_options, - ) - else: - wheel_path = build_wheel_legacy( From 3a33ef81f955840118fe7aa7fcc7615c2990132e Mon Sep 17 00:00:00 2001 From: Emmett Butler Date: Fri, 19 May 2023 12:10:34 -0700 Subject: [PATCH 31/33] allow specifying python version at cli apply black and isort --- pynixify/command.py | 14 +++++++++++++- pynixify/expression_builder.py | 8 +++++--- 2 files changed, 18 insertions(+), 4 deletions(-) diff --git a/pynixify/command.py b/pynixify/command.py index 8381b57..d033378 100644 --- a/pynixify/command.py +++ b/pynixify/command.py @@ -152,6 +152,15 @@ def main(): "the number of CPUs in the system." ), ) + parser.add_argument( + "-p", + "--py", + default="python3", + help=( + "Name of the nixpkgs python interpreter package to install in the " + "generated shell.nix. Defaults to 'python3'." + ), + ) args = parser.parse_args() asyncio.run( @@ -168,6 +177,7 @@ def main(): else [], max_jobs=args.max_jobs, generate_only_overlay=args.overlay_only, + interpreter=args.py, ) ) @@ -183,7 +193,9 @@ async def _main_async( load_all_test_requirements: bool, max_jobs: Optional[int], generate_only_overlay: bool, + interpreter: str, ): + if nixpkgs is not None: pynixify.nixpkgs_sources.NIXPKGS_URL = nixpkgs @@ -278,7 +290,7 @@ async def write_package_expression(package: PyPIPackage): packages.append(p) with (base_path / "shell.nix").open("w") as fp: - expr = build_shell_nix_expression(packages) + expr = build_shell_nix_expression(packages, interpreter) fp.write(await nixfmt(expr)) diff --git a/pynixify/expression_builder.py b/pynixify/expression_builder.py index 3b4e33e..c03c6f9 100644 --- a/pynixify/expression_builder.py +++ b/pynixify/expression_builder.py @@ -128,7 +128,7 @@ shell_nix_template = Template( """${DISCLAIMER} - { python ? "python3" }: + { python ? "${interpreter}" }: let pkgs = import ./nixpkgs.nix {}; pythonPkg = builtins.getAttr python pkgs; @@ -215,8 +215,10 @@ def build_overlayed_nixpkgs( return overlayed_nixpkgs_template.render(DISCLAIMER=DISCLAIMER, **locals()) -def build_shell_nix_expression(packages: List[Package]) -> str: - return shell_nix_template.render(DISCLAIMER=DISCLAIMER, packages=packages) +def build_shell_nix_expression(packages: List[Package], interpreter: str) -> str: + return shell_nix_template.render( + DISCLAIMER=DISCLAIMER, packages=packages, interpreter=interpreter + ) async def nixfmt(expr: str) -> str: From 4892241b9025f7265d48e956aefc1fdcb9f7edaa Mon Sep 17 00:00:00 2001 From: Emmett Butler Date: Fri, 19 May 2023 13:03:34 -0700 Subject: [PATCH 32/33] fmt --- pynixify/nixpkgs_sources.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pynixify/nixpkgs_sources.py b/pynixify/nixpkgs_sources.py index 1bf034c..99369fa 100644 --- a/pynixify/nixpkgs_sources.py +++ b/pynixify/nixpkgs_sources.py @@ -135,7 +135,7 @@ async def _run_nix_build(*args: Sequence[str], retries=0, max_retries=5) -> Path f"warning: All build users are currently in use. " f"Retrying in {2**retries} seconds\n" ) - await asyncio.sleep(2**retries) + await asyncio.sleep(2**retries) # noqa return await run_nix_build(*args, retries=retries + 1, max_retries=max_retries) elif retries >= max_retries: sys.stderr.write(f"error: Giving up after {max_retries} failed retries\n") From ea7482827744625532834919c7ecdf1b0112265f Mon Sep 17 00:00:00 2001 From: Emmett Butler Date: Fri, 19 May 2023 13:06:13 -0700 Subject: [PATCH 33/33] black --- pynixify/command.py | 1 - pynixify/nixpkgs_sources.py | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/pynixify/command.py b/pynixify/command.py index d033378..b2e0c9d 100644 --- a/pynixify/command.py +++ b/pynixify/command.py @@ -195,7 +195,6 @@ async def _main_async( generate_only_overlay: bool, interpreter: str, ): - if nixpkgs is not None: pynixify.nixpkgs_sources.NIXPKGS_URL = nixpkgs diff --git a/pynixify/nixpkgs_sources.py b/pynixify/nixpkgs_sources.py index 2553c76..99369fa 100644 --- a/pynixify/nixpkgs_sources.py +++ b/pynixify/nixpkgs_sources.py @@ -135,7 +135,7 @@ async def _run_nix_build(*args: Sequence[str], retries=0, max_retries=5) -> Path f"warning: All build users are currently in use. " f"Retrying in {2**retries} seconds\n" ) - await asyncio.sleep(2 ** retries) # noqa + await asyncio.sleep(2**retries) # noqa return await run_nix_build(*args, retries=retries + 1, max_retries=max_retries) elif retries >= max_retries: sys.stderr.write(f"error: Giving up after {max_retries} failed retries\n")