From 718610f8a5f94de623eb76aea0780a61b925331b Mon Sep 17 00:00:00 2001 From: thesayyn Date: Fri, 5 Sep 2025 14:31:01 -0700 Subject: [PATCH 01/19] refactor: implement the new bzlmod api --- MODULE.bazel | 116 +++++-- apt.lock.json | 409 +++++++++++++++++++++++ apt/extensions.bzl | 279 ++++++++++------ apt/private/apt_deb_repository.bzl | 50 +-- apt/private/apt_dep_resolver.bzl | 1 + apt/private/deb_import.bzl | 20 +- apt/private/deb_resolve.bzl | 162 --------- apt/private/deb_translate_lock.bzl | 240 ------------- apt/private/lockfile.bzl | 112 ++++--- apt/private/translate_dependency_set.bzl | 145 ++++++++ apt/private/util.bzl | 4 +- 11 files changed, 917 insertions(+), 621 deletions(-) create mode 100755 apt.lock.json delete mode 100644 apt/private/deb_resolve.bzl delete mode 100644 apt/private/deb_translate_lock.bzl create mode 100644 apt/private/translate_dependency_set.bzl diff --git a/MODULE.bazel b/MODULE.bazel index 76a9ba4c..db940997 100644 --- a/MODULE.bazel +++ b/MODULE.bazel @@ -15,13 +15,6 @@ bazel_dep(name = "rules_shell", version = "0.4.1") bazel_lib_toolchains = use_extension("@aspect_bazel_lib//lib:extensions.bzl", "toolchains") use_repo(bazel_lib_toolchains, "zstd_toolchains") use_repo(bazel_lib_toolchains, "bsd_tar_toolchains") -use_repo(bazel_lib_toolchains, "yq_darwin_amd64") -use_repo(bazel_lib_toolchains, "yq_darwin_arm64") -use_repo(bazel_lib_toolchains, "yq_linux_amd64") -use_repo(bazel_lib_toolchains, "yq_linux_arm64") -use_repo(bazel_lib_toolchains, "yq_linux_ppc64le") -use_repo(bazel_lib_toolchains, "yq_linux_s390x") -use_repo(bazel_lib_toolchains, "yq_windows_amd64") # Dev dependencies bazel_dep(name = "gazelle", version = "0.34.0", dev_dependency = True, repo_name = "bazel_gazelle") @@ -58,39 +51,90 @@ apt = use_extension( "apt", dev_dependency = True, ) -apt.install( - name = "bullseye", - lock = "//examples/debian_snapshot:bullseye.lock.json", - manifest = "//examples/debian_snapshot:bullseye.yaml", -) -apt.install( - name = "bullseye_nolock", - manifest = "//examples/debian_snapshot:bullseye.yaml", - nolock = True, +apt.sources_list( + architectures = ["amd64"], + components = ["main"], + suites = [ + "noble", + "noble-security", + "noble-updates", + ], + types = ["deb"], + uris = [ + "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z", + "mirror+https://snapshot.ubuntu.com/ubuntu/20240301T030400Z", + ], + # TODO: signed_by ) -apt.install( - name = "noble", - lock = "//examples/ubuntu_snapshot:noble.lock.json", - manifest = "//examples/ubuntu_snapshot:noble.yaml", +apt.sources_list( + architectures = ["amd64"], + components = ["main"], + suites = ["cloud-sdk"], + types = ["deb"], + uris = ["https://packages.cloud.google.com/apt"], ) -apt.install( - name = "resolution_test", - manifest = "apt/tests/resolution/security.yaml", - nolock = True, +apt.sources_list( + architectures = ["amd64"], + components = ["main"], + suites = ["cloud-sdk"], + types = ["deb"], + uris = ["https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2004/x86_64/"], ) -apt.install( - name = "resolution_test_empty_lock", - lock = "//apt/tests/resolution:empty.lock.json", - manifest = "apt/tests/resolution/security.yaml", +apt.sources_list( + architectures = [ + "amd64", + "arm64", + "i386", + ], + components = ["main"], + suites = [ + "bookworm", + "bookworm-updates", + ], + types = ["deb"], + uris = ["https://snapshot.debian.org/archive/debian/20240210T223313Z"], ) -apt.install( - name = "arch_all_test", - manifest = "apt/tests/resolution/arch_all.yaml", - nolock = True, +apt.sources_list( + architectures = [ + "amd64", + "arm64", + "i386", + ], + components = ["main"], + suites = ["bookworm-security"], + types = ["deb"], + uris = ["https://snapshot.debian.org/archive/debian-security/20240210T223313Z"], ) apt.install( - name = "clang", - manifest = "apt/tests/resolution/clang.yaml", - nolock = True, + # dependency_set isolates these installs into their own scope. + # installing onto default dependency_set only allowed in the + # root module + dependency_set = "bookworm", + packages = [ + "base-files", + "coreutils:arm64", + "google-cloud-cli:amd64", + "libncurses6", + "libstdc++6:i386", + "ncurses-base", + "tzdata", + ], + suites = [ + "bookworm", + "bookworm-updates", + "bookworm-security", + "cloud-sdk", + ], ) -use_repo(apt, "arch_all_test", "arch_all_test_resolve", "bullseye", "bullseye_nolock", "clang", "noble", "resolution_test", "resolution_test_empty_lock_resolve", "resolution_test_resolve") +use_repo(apt, "bookworm") + +# TODO: support this style of source_list +# +# apt.sources_list( +# sources = [ +# "deb [arch=amd64,arm64] https://snapshot.ubuntu.com/ubuntu/20240301T030400Z noble main", +# "deb [arch=amd64,arm64] https://snapshot.ubuntu.com/ubuntu/20240301T030400Z noble-security main", +# "deb [arch=amd64,arm64] https://snapshot.ubuntu.com/ubuntu/20240301T030400Z noble-updates main", +# ], +# ) +# diff --git a/apt.lock.json b/apt.lock.json new file mode 100755 index 00000000..08a26a94 --- /dev/null +++ b/apt.lock.json @@ -0,0 +1,409 @@ +{ + "dependency_sets": { + "bookworm": { + "sets": { + "amd64": { + "/cloud-sdk/google-cloud-cli:amd64": "537.0.0-0", + "/noble/base-files:amd64": "13ubuntu7", + "/noble/libncurses6:amd64": "6.4+20240113-1ubuntu1", + "/noble/ncurses-base:all": "6.4+20240113-1ubuntu1", + "/noble/tzdata:all": "2024a-1ubuntu1" + }, + "arm64": { + "/bookworm/coreutils:arm64": "9.1-1" + }, + "i386": { + "/bookworm/libstdc++6:i386": "12.2.0-14" + } + } + } + }, + "packages": { + "/bookworm-security/libc6:arm64=2.36-9+deb12u4": { + "architecture": "arm64", + "depends_on": [], + "filename": "pool/updates/main/g/glibc/libc6_2.36-9+deb12u4_arm64.deb", + "name": "libc6", + "sha256": "71302ee1bf4374aa4df7bcb5f58155e56a03c282f4076cdd07cf5bdfbca21fab", + "size": 2321668, + "suite": "bookworm-security", + "version": "2.36-9+deb12u4" + }, + "/bookworm-security/libc6:i386=2.36-9+deb12u4": { + "architecture": "i386", + "depends_on": [], + "filename": "pool/updates/main/g/glibc/libc6_2.36-9+deb12u4_i386.deb", + "name": "libc6", + "sha256": "02a6f5c6548577cc77a25cc3fe1bdc85cebe553603f647a0315939b3fed99394", + "size": 2625992, + "suite": "bookworm-security", + "version": "2.36-9+deb12u4" + }, + "/bookworm/coreutils:arm64=9.1-1": { + "architecture": "arm64", + "depends_on": [ + "/bookworm/libselinux1:arm64=3.4-1+b6", + "/bookworm/libpcre2-8-0:arm64=10.42-1", + "/bookworm-security/libc6:arm64=2.36-9+deb12u4", + "/bookworm/libgcc-s1:arm64=12.2.0-14", + "/bookworm/gcc-12-base:arm64=12.2.0-14", + "/bookworm/libgmp10:arm64=2:6.2.1+dfsg1-1.1", + "/bookworm/libattr1:arm64=1:2.5.1-4", + "/bookworm/libacl1:arm64=2.3.1-3" + ], + "filename": "pool/main/c/coreutils/coreutils_9.1-1_arm64.deb", + "name": "coreutils", + "sha256": "ec8f090a14c684879dce251254d8d9ed0876d4480f750d5807ef04e5435e1c4d", + "size": 2815252, + "suite": "bookworm", + "version": "9.1-1" + }, + "/bookworm/gcc-12-base:arm64=12.2.0-14": { + "architecture": "arm64", + "depends_on": [], + "filename": "pool/main/g/gcc-12/gcc-12-base_12.2.0-14_arm64.deb", + "name": "gcc-12-base", + "sha256": "e1f2fb7212546c0e360af8df26303608f7b09e123ac9c96e15872d1ec1ce3275", + "size": 37504, + "suite": "bookworm", + "version": "12.2.0-14" + }, + "/bookworm/gcc-12-base:i386=12.2.0-14": { + "architecture": "i386", + "depends_on": [], + "filename": "pool/main/g/gcc-12/gcc-12-base_12.2.0-14_i386.deb", + "name": "gcc-12-base", + "sha256": "a68aa1dfa176765f7b36a570a8298ab18ab5b82272826fe2fdd25158315d0026", + "size": 37488, + "suite": "bookworm", + "version": "12.2.0-14" + }, + "/bookworm/libacl1:arm64=2.3.1-3": { + "architecture": "arm64", + "depends_on": [], + "filename": "pool/main/a/acl/libacl1_2.3.1-3_arm64.deb", + "name": "libacl1", + "sha256": "2b0eef11a2e271e7355adaf1f6cbf8d2e83835ae1b6cf15165d59b8289c08342", + "size": 30768, + "suite": "bookworm", + "version": "2.3.1-3" + }, + "/bookworm/libattr1:arm64=1:2.5.1-4": { + "architecture": "arm64", + "depends_on": [], + "filename": "pool/main/a/attr/libattr1_2.5.1-4_arm64.deb", + "name": "libattr1", + "sha256": "481e1c3fcad6773ba1c9d7f1de59e4fc80d95326c9b20192e13b7111013c932a", + "size": 21888, + "suite": "bookworm", + "version": "1:2.5.1-4" + }, + "/bookworm/libgcc-s1:arm64=12.2.0-14": { + "architecture": "arm64", + "depends_on": [], + "filename": "pool/main/g/gcc-12/libgcc-s1_12.2.0-14_arm64.deb", + "name": "libgcc-s1", + "sha256": "6fce2268d8f3152a4e84634f5a24133d3c62903b2f9b11b9c59235cbbc1b23a8", + "size": 34836, + "suite": "bookworm", + "version": "12.2.0-14" + }, + "/bookworm/libgcc-s1:i386=12.2.0-14": { + "architecture": "i386", + "depends_on": [], + "filename": "pool/main/g/gcc-12/libgcc-s1_12.2.0-14_i386.deb", + "name": "libgcc-s1", + "sha256": "c22f4115a4b66b56256a6138457fb7630a73bbaa1fc63639060527f7366f0cb1", + "size": 59544, + "suite": "bookworm", + "version": "12.2.0-14" + }, + "/bookworm/libgmp10:arm64=2:6.2.1+dfsg1-1.1": { + "architecture": "arm64", + "depends_on": [], + "filename": "pool/main/g/gmp/libgmp10_6.2.1+dfsg1-1.1_arm64.deb", + "name": "libgmp10", + "sha256": "9906387c1dd806518c915bd8616d072c741061d7fa26b222e52763456060b31a", + "size": 537920, + "suite": "bookworm", + "version": "2:6.2.1+dfsg1-1.1" + }, + "/bookworm/libpcre2-8-0:arm64=10.42-1": { + "architecture": "arm64", + "depends_on": [], + "filename": "pool/main/p/pcre2/libpcre2-8-0_10.42-1_arm64.deb", + "name": "libpcre2-8-0", + "sha256": "b2448d0a8a3db7fbeac231e7ef93811346c1fb5f96ccf6f631701d8a4eb39206", + "size": 230728, + "suite": "bookworm", + "version": "10.42-1" + }, + "/bookworm/libselinux1:arm64=3.4-1+b6": { + "architecture": "arm64", + "depends_on": [], + "filename": "pool/main/libs/libselinux/libselinux1_3.4-1+b6_arm64.deb", + "name": "libselinux1", + "sha256": "29201edf23ebae40844d6c289afdb9bba52f927d55096ed1b1cd37e040135edc", + "size": 68800, + "suite": "bookworm", + "version": "3.4-1+b6" + }, + "/bookworm/libstdc++6:i386=12.2.0-14": { + "architecture": "i386", + "depends_on": [ + "/bookworm/libgcc-s1:i386=12.2.0-14", + "/bookworm-security/libc6:i386=2.36-9+deb12u4", + "/bookworm/gcc-12-base:i386=12.2.0-14" + ], + "filename": "pool/main/g/gcc-12/libstdc++6_12.2.0-14_i386.deb", + "name": "libstdc++6", + "sha256": "65c7019980b204ac150faae399eac921574e0ef273f83c0485a21daae1c57551", + "size": 661288, + "suite": "bookworm", + "version": "12.2.0-14" + }, + "/cloud-sdk/google-cloud-cli:amd64=537.0.0-0": { + "architecture": "amd64", + "depends_on": [], + "filename": "pool/cloud-sdk/google-cloud-cli_537.0.0-0_amd64_536d1a83f33cd095096cd748bd9024e3.deb", + "name": "google-cloud-cli", + "sha256": "efdc2b82ea94f0dbe0944274743179db7008a08afb66e2ee14f72db60a25d73b", + "size": 124102410, + "suite": "cloud-sdk", + "version": "537.0.0-0" + }, + "/noble/base-files:amd64=13ubuntu7": { + "architecture": "amd64", + "depends_on": [ + "/noble/libcrypt1:amd64=1:4.4.36-4", + "/noble/libc6:amd64=2.39-0ubuntu2", + "/noble/libgcc-s1:amd64=14-20240221-2.1ubuntu1", + "/noble/gcc-14-base:amd64=14-20240221-2.1ubuntu1", + "/noble/mawk:amd64=1.3.4.20240123-1" + ], + "filename": "pool/main/b/base-files/base-files_13ubuntu7_amd64.deb", + "name": "base-files", + "sha256": "d2fe9680dea0b8f6d6d675eceaf2bf00da8d1b3da1604f0e3b47ee26866feadd", + "size": 74224, + "suite": "noble", + "version": "13ubuntu7" + }, + "/noble/debconf:all=1.5.86": { + "architecture": "all", + "depends_on": [], + "filename": "pool/main/d/debconf/debconf_1.5.86_all.deb", + "name": "debconf", + "sha256": "725da1e474ff8ce916e7954ed262273a02e4f74ee1f6cd342b19ff283617d91b", + "size": 123988, + "suite": "noble", + "version": "1.5.86" + }, + "/noble/gcc-14-base:amd64=14-20240221-2.1ubuntu1": { + "architecture": "amd64", + "depends_on": [], + "filename": "pool/main/g/gcc-14/gcc-14-base_14-20240221-2.1ubuntu1_amd64.deb", + "name": "gcc-14-base", + "sha256": "2e1ae2c2ccf2d1b6d09c657af1492a8b7a348e899f9ad25d4925b170571a0887", + "size": 46862, + "suite": "noble", + "version": "14-20240221-2.1ubuntu1" + }, + "/noble/libc6:amd64=2.39-0ubuntu2": { + "architecture": "amd64", + "depends_on": [], + "filename": "pool/main/g/glibc/libc6_2.39-0ubuntu2_amd64.deb", + "name": "libc6", + "sha256": "4bd128b75db38b7e9147c0333908e2c7fbc41631f284360f95118fe1c6c162f3", + "size": 3262066, + "suite": "noble", + "version": "2.39-0ubuntu2" + }, + "/noble/libcrypt1:amd64=1:4.4.36-4": { + "architecture": "amd64", + "depends_on": [], + "filename": "pool/main/libx/libxcrypt/libcrypt1_4.4.36-4_amd64.deb", + "name": "libcrypt1", + "sha256": "51ad101808e6a9d6b9c21bcf0b6f27c8ab34f6af53184fc6305f96770cc3a8d9", + "size": 95284, + "suite": "noble", + "version": "1:4.4.36-4" + }, + "/noble/libgcc-s1:amd64=14-20240221-2.1ubuntu1": { + "architecture": "amd64", + "depends_on": [], + "filename": "pool/main/g/gcc-14/libgcc-s1_14-20240221-2.1ubuntu1_amd64.deb", + "name": "libgcc-s1", + "sha256": "ffc195df7e897aaec468e8f62b08660cc711c7449113102491fdd6baa6901f6d", + "size": 78084, + "suite": "noble", + "version": "14-20240221-2.1ubuntu1" + }, + "/noble/libncurses6:amd64=6.4+20240113-1ubuntu1": { + "architecture": "amd64", + "depends_on": [ + "/noble/libc6:amd64=2.39-0ubuntu2", + "/noble/libgcc-s1:amd64=14-20240221-2.1ubuntu1", + "/noble/gcc-14-base:amd64=14-20240221-2.1ubuntu1", + "/noble/libtinfo6:amd64=6.4+20240113-1ubuntu1" + ], + "filename": "pool/main/n/ncurses/libncurses6_6.4+20240113-1ubuntu1_amd64.deb", + "name": "libncurses6", + "sha256": "b5669082396328597c62e51caeb2ee258015e92bd87f6670acee9f396a30b978", + "size": 111894, + "suite": "noble", + "version": "6.4+20240113-1ubuntu1" + }, + "/noble/libtinfo6:amd64=6.4+20240113-1ubuntu1": { + "architecture": "amd64", + "depends_on": [], + "filename": "pool/main/n/ncurses/libtinfo6_6.4+20240113-1ubuntu1_amd64.deb", + "name": "libtinfo6", + "sha256": "80378382ba4f672f8d5579cb953fc43edfe246eb96ee4d453af1ac3d7768c8aa", + "size": 108264, + "suite": "noble", + "version": "6.4+20240113-1ubuntu1" + }, + "/noble/mawk:amd64=1.3.4.20240123-1": { + "architecture": "amd64", + "depends_on": [], + "filename": "pool/main/m/mawk/mawk_1.3.4.20240123-1_amd64.deb", + "name": "mawk", + "sha256": "53512ca310cc01f4a462753a29dd7a1180f2e584941f9d8477c77802b1cff1f8", + "size": 127350, + "suite": "noble", + "version": "1.3.4.20240123-1" + }, + "/noble/ncurses-base:all=6.4+20240113-1ubuntu1": { + "architecture": "all", + "depends_on": [], + "filename": "pool/main/n/ncurses/ncurses-base_6.4+20240113-1ubuntu1_all.deb", + "name": "ncurses-base", + "sha256": "1ea2be0cadf1299e5ed2967269c01e1935ddf5a733a496893b4334994aea2755", + "size": 25426, + "suite": "noble", + "version": "6.4+20240113-1ubuntu1" + }, + "/noble/tzdata:all=2024a-1ubuntu1": { + "architecture": "all", + "depends_on": [ + "/noble/debconf:all=1.5.86" + ], + "filename": "pool/main/t/tzdata/tzdata_2024a-1ubuntu1_all.deb", + "name": "tzdata", + "sha256": "26cdb43f541d5b7d089d2c1cf7d50b4c5e630c79a6d4d6ce34e20dcace4f0d29", + "size": 273120, + "suite": "noble", + "version": "2024a-1ubuntu1" + } + }, + "sources": { + "bookworm": { + "architectures": [ + "amd64", + "arm64", + "i386" + ], + "components": [ + "main" + ], + "types": [ + "deb" + ], + "uris": [ + "https://snapshot.debian.org/archive/debian/20240210T223313Z" + ] + }, + "bookworm-security": { + "architectures": [ + "amd64", + "arm64", + "i386" + ], + "components": [ + "main" + ], + "types": [ + "deb" + ], + "uris": [ + "https://snapshot.debian.org/archive/debian-security/20240210T223313Z" + ] + }, + "bookworm-updates": { + "architectures": [ + "amd64", + "arm64", + "i386" + ], + "components": [ + "main" + ], + "types": [ + "deb" + ], + "uris": [ + "https://snapshot.debian.org/archive/debian/20240210T223313Z" + ] + }, + "cloud-sdk": { + "architectures": [ + "amd64" + ], + "components": [ + "main" + ], + "types": [ + "deb" + ], + "uris": [ + "https://packages.cloud.google.com/apt" + ] + }, + "noble": { + "architectures": [ + "amd64" + ], + "components": [ + "main" + ], + "types": [ + "deb" + ], + "uris": [ + "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z", + "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z" + ] + }, + "noble-security": { + "architectures": [ + "amd64" + ], + "components": [ + "main" + ], + "types": [ + "deb" + ], + "uris": [ + "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z", + "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z" + ] + }, + "noble-updates": { + "architectures": [ + "amd64" + ], + "components": [ + "main" + ], + "types": [ + "deb" + ], + "uris": [ + "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z", + "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z" + ] + } + }, + "version": 2 +} \ No newline at end of file diff --git a/apt/extensions.bzl b/apt/extensions.bzl index 7d91e599..d0ac0fcd 100644 --- a/apt/extensions.bzl +++ b/apt/extensions.bzl @@ -1,10 +1,31 @@ "apt extensions" -load("@bazel_features//:features.bzl", "bazel_features") +load("//apt/private:apt_deb_repository.bzl", "deb_repository") +load("//apt/private:apt_dep_resolver.bzl", "dependency_resolver") load("//apt/private:deb_import.bzl", "deb_import") -load("//apt/private:deb_resolve.bzl", "deb_resolve", "internal_resolve") -load("//apt/private:deb_translate_lock.bzl", "deb_translate_lock") load("//apt/private:lockfile.bzl", "lockfile") +load("//apt/private:translate_dependency_set.bzl", "translate_dependency_set") +load("//apt/private:util.bzl", "util") +load("//apt/private:version_constraint.bzl", "version_constraint") + +# https://wiki.debian.org/SupportedArchitectures +ALL_SUPPORTED_ARCHES = ["armel", "armhf", "arm64", "i386", "amd64", "mips64el", "ppc64el", "x390x"] + +def _parse_source(src): + parts = src.split(" ") + kind = parts.pop(0) + if parts[0].startswith("["): + # skip arch for now. + arch = parts.pop(0) + url = parts.pop(0) + dist = parts.pop(0) + components = parts + return struct( + kind = kind, + url = url, + dist = dist, + components = components, + ) def _distroless_extension(module_ctx): root_direct_deps = [] @@ -12,67 +33,123 @@ def _distroless_extension(module_ctx): reproducible = False for mod in module_ctx.modules: - for install in mod.tags.install: - lockf = None - if not install.lock: - lockf = internal_resolve( - module_ctx, - "yq", - install.manifest, - install.resolve_transitive, + deb_repo = deb_repository.new(module_ctx) + resolver = dependency_resolver.new(deb_repo) + lockf = lockfile.empty(module_ctx) + + for sl in mod.tags.sources_list: + uris = [uri.removeprefix("mirror+") for uri in sl.uris] + architectures = sl.architectures + + for suite in sl.suites: + lockf.add_source( + suite, + uris = uris, + types = sl.types, + components = sl.components, + architectures = architectures, ) - - if not install.nolock: - # buildifier: disable=print - print("\nNo lockfile was given, please run `bazel run @%s//:lock` to create the lockfile." % install.name) - else: - lockf = lockfile.from_json(module_ctx, module_ctx.read(install.lock)) - reproducible = True - - for (package) in lockf.packages(): - package_key = lockfile.make_package_key( - package["name"], - package["version"], - package["arch"], + deb_repo.add_source( + (uris, suite, sl.components, architectures), ) - deb_import( - name = "%s_%s" % (install.name, package_key), - urls = package["urls"], - sha256 = package["sha256"], - mergedusr = install.mergedusr, - ) + deb_repo.fetch_and_parse() - deb_resolve( - name = install.name + "_resolve", - manifest = install.manifest, - resolve_transitive = install.resolve_transitive, - ) + sources = lockf.sources() + dependency_sets = lockf.dependency_sets() + for install in mod.tags.install: + dependency_set = dependency_sets.setdefault(install.dependency_set, { + "sets": {}, + }) + for dep_constraint in install.packages: + constraint = version_constraint.parse_dep(dep_constraint) - deb_translate_lock( - name = install.name, - lock = install.lock, - lock_content = lockf.as_json(), - package_template = install.package_template, - ) + architectures = [] - if mod.is_root: - if module_ctx.is_dev_dependency(install): - root_direct_dev_deps.append(install.name) + if constraint["arch"]: + architectures = constraint["arch"] else: - root_direct_deps.append(install.name) + architectures = ["amd64"] + + for _ in range(len(ALL_SUPPORTED_ARCHES)): + if len(architectures) == 0: + break + arch = architectures.pop() + resolved_count = 0 + + module_ctx.report_progress("Resolving %s:%s" % (dep_constraint, arch)) + (package, dependencies, unmet_dependencies) = resolver.resolve_all( + name = constraint["name"], + version = constraint["version"], + arch = arch, + include_transitive = install.include_transitive, + ) + + if not package: + fail( + "\n\nUnable to locate package `%s` for %s. It may only exist for specific set of architectures. \n" % (dep_constraint, arch) + + " 1 - Ensure that the package is available for the specified architecture. \n" + + " 2 - Ensure that the specified version of the package is available for the specified architecture. \n" + + " 3 - Ensure that an apt.source_list added for the specified architecture.", + ) + + if len(unmet_dependencies): + # buildifier: disable=print + util.warning(module_ctx, "Following dependencies could not be resolved for %s: %s" % (constraint["name"], ",".join([up[0] for up in unmet_dependencies]))) + + lockf.add_package(package) + + resolved_count += len(dependencies) + 1 + + for dep in dependencies: + lockf.add_package(dep) + lockf.add_package_dependency(package, dep) + + # Add it to dependency set + arch_set = dependency_set["sets"].setdefault(arch, {}) + arch_set[lockfile.short_package_key(package)] = package["Version"] + + # For cases where architecture for the package is not specified we need + # to first find out which source contains the package. and in order to do + # that we first need to resolve the package for amd64 architecture. + # Once the repository is found, then resolve the package for all the + # architectures the repository supports. + if not constraint["arch"] and arch == "amd64": + source = sources[package["Dist"]] + architectures = [a for a in source["architectures"] if a != "amd64"] + + module_ctx.report_progress("Resolved %d packages for %s" % (resolved_count, arch)) + + # Generate a hub repo for every dependency set + lock_content = lockf.as_json() + for depset_name in dependency_sets.keys(): + translate_dependency_set( + name = depset_name, + depset_name = depset_name, + lock_content = lock_content, + ) - metadata_kwargs = {} - if bazel_features.external_deps.extension_metadata_has_reproducible: - metadata_kwargs["reproducible"] = reproducible + # Generate a repo per package which will be aliased by hub repo. + for (package_key, package) in lockf.packages().items(): + deb_import( + name = util.sanitize(package_key), + urls = [ + uri + "/" + package["filename"] + for uri in sources[package["suite"]]["uris"] + ], + sha256 = package["sha256"], + mergedusr = False, + depends_on = ["@" + util.sanitize(dep_key) for dep_key in package["depends_on"]], + ) - return module_ctx.extension_metadata( - root_module_direct_deps = root_direct_deps, - root_module_direct_dev_deps = root_direct_dev_deps, - **metadata_kwargs - ) + lock_tmp = module_ctx.path("apt.lock.json") + lockf.write(lock_tmp) + lockf_wksp = module_ctx.path(Label("@//:apt.lock.json")) + module_ctx.execute( + ["mv", lock_tmp, lockf_wksp], + ) -_install_doc = """ +_doc = """ Module extension to create Debian repositories. Create Debian repositories with packages "installed" in them and available @@ -83,33 +160,33 @@ Here's an example how to create a Debian repo: ```starlark apt = use_extension("@rules_distroless//apt:extensions.bzl", "apt") +apt.sources_list( + types = ["deb"], + uris = [ + "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z", + "mirror+https://snapshot.ubuntu.com/ubuntu/20240301T030400Z" + ], + suites = ["noble", "noble-security", "noble-updates"], + components = ["main"], + architectures = ["all"] +) apt.install( - name = "bullseye", - lock = "//examples/apt:bullseye.lock.json", - manifest = "//examples/apt:bullseye.yaml", + # dependency set isolates these installs into their own scope. + dependency_set = "noble", + target_release = "noble", + packages = [ + "ncurses-base", + "libncurses6", + "tzdata", + "coreutils:arm64", + "libstdc++6:i386" + ] ) -use_repo(apt, "bullseye") ``` -Note that, for the initial setup (or if we want to run without a lock) the -lockfile attribute can be omitted. All you need is a YAML -[manifest](/examples/debian_snapshot/bullseye.yaml): -```yaml -version: 1 -sources: - - channel: bullseye main - url: https://snapshot-cloudflare.debian.org/archive/debian/20240210T223313Z - -archs: - - amd64 - -packages: - - perl -``` - -`apt.install` will parse the manifest and will fetch and install the packages -for the given architectures in the Bazel repo `@`. +`apt.install` will install generate a package repository for each package and architecture +combination in the form of `@__`. Each `/` has two targets that match the usual structure of a Debian package: `data` and `control`. @@ -159,46 +236,36 @@ For more infomation, please check https://snapshot.debian.org and/or https://snapshot.ubuntu.com. """ -install = tag_class( +sources_list = tag_class( attrs = { - "name": attr.string( - doc = "Name of the generated repository", - mandatory = True, + "sources": attr.string_list( + # mandatory = True, ), - "manifest": attr.label( - doc = "The file used to generate the lock file", + "types": attr.string_list(), + "uris": attr.string_list(), + "suites": attr.string_list(), + "components": attr.string_list(), + "architectures": attr.string_list(), + }, +) + +install = tag_class( + attrs = { + "packages": attr.string_list( mandatory = True, + allow_empty = False, ), - "lock": attr.label( - doc = "The lock file to use for the index.", - ), - "nolock": attr.bool( - doc = "If you explicitly want to run without a lock, set it " + - "to `True` to avoid the DEBUG messages.", - default = False, - ), - "package_template": attr.label( - doc = "(EXPERIMENTAL!) a template file for generated BUILD " + - "files.", - ), - "resolve_transitive": attr.bool( - doc = "Whether dependencies of dependencies should be " + - "resolved and added to the lockfile.", - default = True, - ), - "mergedusr": attr.bool( - doc = "Whether packges should be normalized following mergedusr conventions.\n" + - "Turning this on might fix the following error thrown by docker for ambigious paths: `duplicate of paths are supported.` \n" + - "For more context please see https://salsa.debian.org/md/usrmerge/-/raw/master/debian/README.Debian?ref_type=heads", - default = False, - ), + "dependency_set": attr.string(), + "target_release": attr.string(mandatory = True), + "include_transitive": attr.bool(default = True), }, - doc = _install_doc, ) apt = module_extension( + doc = _doc, implementation = _distroless_extension, tag_classes = { "install": install, + "sources_list": sources_list, }, ) diff --git a/apt/private/apt_deb_repository.bzl b/apt/private/apt_deb_repository.bzl index 95657f59..fb213e84 100644 --- a/apt/private/apt_deb_repository.bzl +++ b/apt/private/apt_deb_repository.bzl @@ -48,7 +48,7 @@ def _fetch_package_index(rctx, urls, dist, comp, arch, integrity): ) decompress_r = None if download.success: - decompress_r = rctx.execute(cmd + [output]) + decompress_r = mctx.execute(cmd + [output]) if decompress_r.return_code == 0: integrity = download.integrity break @@ -70,14 +70,14 @@ def _fetch_package_index(rctx, urls, dist, comp, arch, integrity): attempt_messages.append("""\n*) Failed '{}'\n\n{}""".format(failed_url, reason)) fail(""" -** Tried to download {} different package indices and all failed. +** Tried to download {} different package indices and all failed. {} """.format(len(failed_attempts), "\n".join(attempt_messages))) return ("{}/Packages".format(target_triple), url, integrity) -def _parse_repository(state, contents, roots): +def _parse_repository(state, contents, roots, dist): last_key = "" pkg = {} for group in contents.split("\n\n"): @@ -106,6 +106,7 @@ def _parse_repository(state, contents, roots): if "Package" not in pkg: fail("Invalid debian package index format. No 'Package' key found in entry: {}".format(pkg)) pkg["Roots"] = roots + pkg["Dist"] = dist _add_package(state, pkg) last_key = "" pkg = {} @@ -160,29 +161,38 @@ def _package_versions(state, name, arch): def _package(state, name, version, arch): return util.get_dict(state.packages, keys = (arch, name, version)) -def _create(rctx, sources, archs): +def _fetch_and_parse_sources(state): + mctx = state.mctx + for source in state.sources: + (urls, dist, components, architectures) = source + + for arch in architectures: + for comp in components: + # We assume that `url` does not contain a trailing forward slash when passing to + # functions below. If one is present, remove it. Some HTTP servers do not handle + # redirects properly when a path contains "//" + # (ie. https://mymirror.com/ubuntu//dists/noble/stable/... may return a 404 + # on misconfigured HTTP servers) + urls = [url.rstrip("/") for url in urls] + + # TODO: make parallel + mctx.report_progress("Fetching package index: {}/{} for {}".format(dist, comp, arch)) + (output, _, _) = _fetch_package_index(mctx, urls, dist, comp, arch, "") + + mctx.report_progress("Parsing package index: {}/{} for {}".format(dist, comp, arch)) + _parse_repository(state, mctx.read(output), urls, dist) + +def _create(mctx): state = struct( + mctx = mctx, + sources = list(), packages = dict(), virtual_packages = dict(), ) - for arch in archs: - for (urls, dist, comp) in sources: - # We assume that `url` does not contain a trailing forward slash when passing to - # functions below. If one is present, remove it. Some HTTP servers do not handle - # redirects properly when a path contains "//" - # (ie. https://mymirror.com/ubuntu//dists/noble/stable/... may return a 404 - # on misconfigured HTTP servers) - urls = [url.rstrip("/") for url in urls] - - rctx.report_progress("Fetching package index: {}/{} for {}".format(dist, comp, arch)) - (output, _, _) = _fetch_package_index(rctx, urls, dist, comp, arch, "") - - # TODO: this is expensive to perform. - rctx.report_progress("Parsing package index: {}/{} for {}".format(dist, comp, arch)) - _parse_repository(state, rctx.read(output), urls) - return struct( + add_source = lambda source: state.sources.append(source), + fetch_and_parse = lambda: _fetch_and_parse_sources(state), package_versions = lambda **kwargs: _package_versions(state, **kwargs), virtual_packages = lambda **kwargs: _virtual_packages(state, **kwargs), package = lambda **kwargs: _package(state, **kwargs), diff --git a/apt/private/apt_dep_resolver.bzl b/apt/private/apt_dep_resolver.bzl index 802a706f..f1d248ff 100644 --- a/apt/private/apt_dep_resolver.bzl +++ b/apt/private/apt_dep_resolver.bzl @@ -97,6 +97,7 @@ def _resolve_all(state, name, version, arch, include_transitive = True): if dependency_group_idx > -1 and dependency_group[dependency_group_idx][0]: continue + # TODO: only resolve in specified suites package = _resolve_package(state, name, version, arch) # If this package is not found and is part of a dependency group, then just skip it. diff --git a/apt/private/deb_import.bzl b/apt/private/deb_import.bzl index 2c182103..9ee71252 100644 --- a/apt/private/deb_import.bzl +++ b/apt/private/deb_import.bzl @@ -9,9 +9,8 @@ load("@rules_distroless//apt/private:deb_postfix.bzl", "deb_postfix") deb_postfix( name = "data", srcs = glob(["data.tar*"]), - outs = ["layer.tar.gz"], - mergedusr = {}, - + outs = ["content.tar.gz"], + mergedusr = {mergedusr}, visibility = ["//visibility:public"], ) @@ -20,10 +19,21 @@ filegroup( srcs = glob(["control.tar.*"]), visibility = ["//visibility:public"], ) + +filegroup( + name = "{target_name}", + srcs = {depends_on} + [":data"], + visibility = ["//visibility:public"], +) ''' -def deb_import(mergedusr = False, **kwargs): +def deb_import(name, depends_on = [], mergedusr = False, **kwargs): http_archive( - build_file_content = _DEB_IMPORT_BUILD_TMPL.format(mergedusr), + build_file_content = _DEB_IMPORT_BUILD_TMPL.format( + mergedusr = mergedusr, + depends_on = depends_on, + target_name = name, + ), + name = name, **kwargs ) diff --git a/apt/private/deb_resolve.bzl b/apt/private/deb_resolve.bzl deleted file mode 100644 index fb4030b7..00000000 --- a/apt/private/deb_resolve.bzl +++ /dev/null @@ -1,162 +0,0 @@ -"repository rule for resolving and generating lockfile" - -load("@aspect_bazel_lib//lib:repo_utils.bzl", "repo_utils") -load(":apt_deb_repository.bzl", "deb_repository") -load(":apt_dep_resolver.bzl", "dependency_resolver") -load(":lockfile.bzl", "lockfile") -load(":util.bzl", "util") -load(":version_constraint.bzl", "version_constraint") - -def _parse_manifest(rctx, yq_toolchain_prefix, manifest): - is_windows = repo_utils.is_windows(rctx) - host_yq = Label("@{}_{}//:yq{}".format(yq_toolchain_prefix, repo_utils.platform(rctx), ".exe" if is_windows else "")) - - if hasattr(rctx, "watch"): - rctx.watch(manifest) - - yq_args = [ - str(rctx.path(host_yq)), - str(rctx.path(manifest)), - "-o=json", - ] - result = rctx.execute(yq_args) - if result.return_code: - fail("failed to parse manifest yq. '{}' exited with {}: \nSTDOUT:\n{}\nSTDERR:\n{}".format(" ".join(yq_args), result.return_code, result.stdout, result.stderr)) - - return json.decode(result.stdout if result.stdout != "null" else "{}") - -# This function is shared between BZLMOD and WORKSPACE implementations. -# INTERNAL: DO NOT DEPEND! -# buildifier: disable=function-docstring-args -def internal_resolve(rctx, yq_toolchain_prefix, manifest, include_transitive): - manifest = _parse_manifest(rctx, yq_toolchain_prefix, manifest) - - if manifest["version"] != 1: - fail("Unsupported manifest version, {}. Please use `version: 1` manifest.".format(manifest["version"])) - - if type(manifest["sources"]) != "list": - fail("`sources` should be an array") - - if type(manifest["archs"]) != "list": - fail("`archs` should be an array") - - if type(manifest["packages"]) != "list": - fail("`packages` should be an array") - - sources = [] - - for src in manifest["sources"]: - distr, components = src["channel"].split(" ", 1) - for comp in components.split(" "): - # TODO: only support urls before 1.0 - if "urls" in src: - urls = src["urls"] - elif "url" in src: - urls = [src["url"]] - else: - fail("Source missing 'url' or 'urls' field") - - sources.append(( - urls, - distr, - comp, - )) - - repository = deb_repository.new(rctx, sources = sources, archs = manifest["archs"]) - resolver = dependency_resolver.new(repository) - lockf = lockfile.empty(rctx) - - resolved_count = 0 - - for arch in manifest["archs"]: - resolved_count = 0 - dep_constraint_set = {} - for dep_constraint in manifest["packages"]: - if dep_constraint in dep_constraint_set: - fail("Duplicate package, {}. Please remove it from your manifest".format(dep_constraint)) - dep_constraint_set[dep_constraint] = True - - constraint = version_constraint.parse_depends(dep_constraint).pop() - - rctx.report_progress("Resolving %s for %s" % (dep_constraint, arch)) - (package, dependencies, unmet_dependencies) = resolver.resolve_all( - name = constraint["name"], - version = constraint["version"], - arch = arch, - include_transitive = include_transitive, - ) - - if not package: - fail("Unable to locate package `%s` for architecture: %s. It may only exist for specific set of architectures." % (dep_constraint, arch)) - - if len(unmet_dependencies): - # buildifier: disable=print - util.warning(rctx, "Following dependencies could not be resolved for %s: %s" % (constraint["name"], ",".join([up[0] for up in unmet_dependencies]))) - - lockf.add_package(package, arch) - - resolved_count += len(dependencies) + 1 - - for dep in dependencies: - lockf.add_package(dep, arch) - lockf.add_package_dependency(package, dep, arch) - - rctx.report_progress("Resolved %d packages for %s" % (resolved_count, arch)) - return lockf - -_BUILD_TMPL = """ -load("@rules_shell//shell:sh_binary.bzl", "sh_binary") - -filegroup( - name = "lockfile", - srcs = ["lock.json"], - tags = ["manual"], - visibility = ["//visibility:public"] -) - -sh_binary( - name = "lock", - srcs = ["copy.sh"], - data = ["lock.json"], - tags = ["manual"], - args = ["$(location :lock.json)"], - visibility = ["//visibility:public"] -) -""" - -def _deb_resolve_impl(rctx): - lockf = internal_resolve(rctx, rctx.attr.yq_toolchain_prefix, rctx.attr.manifest, rctx.attr.resolve_transitive) - lockf.write("lock.json") - - lock_filename = rctx.attr.manifest.name.replace(".yaml", ".lock.json") - lock_label = rctx.attr.manifest.relative(lock_filename) - workspace_relative_path = "{}{}".format( - ("%s/" % lock_label.package) if lock_label.package else "", - lock_label.name, - ) - - rctx.file( - "copy.sh", - rctx.read(rctx.attr._copy_sh_tmpl).format( - repo_name = util.get_repo_name(rctx.name).replace("_resolve", ""), - lock_label = lock_label, - workspace_relative_path = workspace_relative_path, - ), - executable = True, - ) - - rctx.file("BUILD.bazel", _BUILD_TMPL) - -deb_resolve = repository_rule( - implementation = _deb_resolve_impl, - attrs = { - "manifest": attr.label(), - "resolve_transitive": attr.bool(default = True), - "yq_toolchain_prefix": attr.string(default = "yq"), - "_copy_sh_tmpl": attr.label( - default = "//apt/private:copy.sh.tmpl", - doc = "INTERNAL, DO NOT USE - " + - "private attribute label to prevent repo restart", - ), - }, -) diff --git a/apt/private/deb_translate_lock.bzl b/apt/private/deb_translate_lock.bzl deleted file mode 100644 index 34ce22cc..00000000 --- a/apt/private/deb_translate_lock.bzl +++ /dev/null @@ -1,240 +0,0 @@ -"repository rule for generating a dependency graph from a lockfile." - -load(":lockfile.bzl", "lockfile") -load(":starlark_codegen_utils.bzl", "starlark_codegen_utils") -load(":util.bzl", "util") - -# header template for packages.bzl file -_DEB_IMPORT_HEADER_TMPL = '''\ -"""Generated by rules_distroless. DO NOT EDIT.""" -load("@rules_distroless//apt/private:deb_import.bzl", "deb_import") - -# buildifier: disable=function-docstring -def {}_packages(): -''' - -# deb_import template for packages.bzl file -_DEB_IMPORT_TMPL = '''\ - deb_import( - name = "{name}", - urls = {urls}, - sha256 = "{sha256}", - ) -''' - -_PACKAGE_TEMPLATE = '''\ -"""Generated by rules_distroless. DO NOT EDIT.""" - -alias( - name = "data", - actual = select({data_targets}), - visibility = ["//visibility:public"], -) - -alias( - name = "control", - actual = select({control_targets}), - visibility = ["//visibility:public"], -) - -filegroup( - name = "{target_name}", - srcs = select({deps}) + [":data"], - visibility = ["//visibility:public"], -) -''' - -_ROOT_BUILD_TMPL = """\ -"Generated by rules_distroless. DO NOT EDIT." - -load("@rules_distroless//apt:defs.bzl", "dpkg_status") -load("@rules_distroless//distroless:defs.bzl", "flatten") - -exports_files(['packages.bzl']) - -# Map Debian architectures to platform CPUs. -# -# For more info on Debian architectures, see: -# * https://wiki.debian.org/SupportedArchitectures -# * https://wiki.debian.org/ArchitectureSpecificsMemo -# * https://www.debian.org/releases/stable/amd64/ch02s01.en.html#idm186 -# -# For more info on Bazel's platforms CPUs see: -# * https://github.com/bazelbuild/platforms/blob/main/cpu/BUILD -_ARCHITECTURE_MAP = {{ - "amd64": "x86_64", - "arm64": "arm64", - "ppc64el": "ppc64le", - "mips64el": "mips64", - "s390x": "s390x", - "i386": "x86_32", - "armhf": "armv7e-mf", - "all": "all", -}} - -_ARCHITECTURES = {architectures} - -[ - config_setting( - name = os + "_" + arch, - constraint_values = [ - "@platforms//os:" + os, - "@platforms//cpu:" + _ARCHITECTURE_MAP[arch], - ], - ) - for os in ["linux"] - for arch in _ARCHITECTURES -] - - -alias( - name = "lock", - actual = "@{target_name}_resolve//:lock", - visibility = ["//visibility:public"], -) - -# List of installed packages. For now it's private. -_PACKAGES = {packages} - -# Creates /var/lib/dpkg/status with installed package information. -dpkg_status( - name = "dpkg_status", - controls = select({{ - "//:linux_%s" % arch: ["//%s:control" % package for package in packages] - for arch, packages in _PACKAGES.items() - }}) if _PACKAGES else {{}}, - visibility = ["//visibility:public"], -) - -filegroup( - name = "packages", - srcs = select({{ - "//:linux_%s" % arch: ["//%s" % package for package in packages] - for arch, packages in _PACKAGES.items() - }}) if _PACKAGES else {{}}, - visibility = ["//visibility:public"], -) - - -# A filegroup that contains all the packages and the dpkg status file. -filegroup( - name = "{target_name}", - srcs = [ - ":dpkg_status", - ":packages", - ], - visibility = ["//visibility:public"], -) - -flatten( - name = "flat", - tars = [ - "{target_name}", - ], - deduplicate = True, - visibility = ["//visibility:public"], -) -""" - -def _deb_translate_lock_impl(rctx): - lock_content = rctx.attr.lock_content - package_template = rctx.read(rctx.attr.package_template) - lockf = lockfile.from_json(rctx, lock_content if lock_content else rctx.read(rctx.attr.lock)) - - package_defs = [] - - if not lock_content: - package_defs = [_DEB_IMPORT_HEADER_TMPL.format(rctx.attr.name)] - - if len(lockf.packages()) < 1: - package_defs.append(" pass") - - # TODO: rework lockfile to include architecure information - architectures = {} - packages = {} - - for (package) in lockf.packages(): - package_key = lockfile.make_package_key( - package["name"], - package["version"], - package["arch"], - ) - - if package["arch"] not in architectures: - architectures[package["arch"]] = [] - - if package["name"] not in architectures[package["arch"]]: - architectures[package["arch"]].append(package["name"]) - - if package["name"] not in packages: - packages[package["name"]] = [] - if package["arch"] not in packages[package["name"]]: - packages[package["name"]].append(package["arch"]) - - if not lock_content: - package_defs.append( - _DEB_IMPORT_TMPL.format( - name = "%s_%s" % (rctx.attr.name, package_key), - package_name = package["name"], - urls = package["urls"], - sha256 = package["sha256"], - ), - ) - - repo_name = "%s%s_%s" % ("@" if lock_content else "", rctx.attr.name, package_key) - - rctx.file( - "%s/%s/BUILD.bazel" % (package["name"], package["arch"]), - package_template.format( - target_name = package["arch"], - data_targets = '"@%s//:data"' % repo_name, - control_targets = '"@%s//:control"' % repo_name, - src = '"@%s//:data"' % repo_name, - deps = starlark_codegen_utils.to_list_attr([ - "//%s/%s" % (dep["name"], package["arch"]) - for dep in package["dependencies"] - ]), - urls = package["urls"], - name = package["name"], - arch = package["arch"], - sha256 = package["sha256"], - repo_name = "%s" % repo_name, - ), - ) - - # TODO: rework lockfile to include architecure information and merge these two loops - for package_name, package_archs in packages.items(): - rctx.file( - "%s/BUILD.bazel" % (package_name), - _PACKAGE_TEMPLATE.format( - target_name = package_name, - data_targets = starlark_codegen_utils.to_dict_attr({ - "//:linux_%s" % arch: "//%s/%s:data" % (package_name, arch) - for arch in package_archs - }), - control_targets = starlark_codegen_utils.to_dict_attr({ - "//:linux_%s" % arch: "//%s/%s:control" % (package_name, arch) - for arch in package_archs - }), - deps = starlark_codegen_utils.to_dict_list_attr({ - "//:linux_%s" % arch: ["//%s/%s" % (package_name, arch)] - for arch in package_archs - }), - ), - ) - - rctx.file("packages.bzl", "\n".join(package_defs)) - rctx.file("BUILD.bazel", _ROOT_BUILD_TMPL.format( - target_name = util.get_repo_name(rctx.attr.name), - packages = starlark_codegen_utils.to_dict_list_attr(architectures), - architectures = starlark_codegen_utils.to_list_attr(architectures.keys()), - )) - -deb_translate_lock = repository_rule( - implementation = _deb_translate_lock_impl, - attrs = { - "lock": attr.label(), - "lock_content": attr.string(doc = "INTERNAL: DO NOT USE"), - "package_template": attr.label(default = "//apt/private:package.BUILD.tmpl"), - }, -) diff --git a/apt/private/lockfile.bzl b/apt/private/lockfile.bzl index a351d48e..16e45efb 100644 --- a/apt/private/lockfile.bzl +++ b/apt/private/lockfile.bzl @@ -1,92 +1,102 @@ "lock" -load(":util.bzl", "util") - -def _make_package_key(name, version, arch): - return "%s_%s_%s" % ( - util.sanitize(name), - util.sanitize(version), +def _make_package_key(suite, name, version, arch): + return "/%s/%s:%s=%s" % ( + suite, + name, arch, + version, + ) + +def _short_package_key(package): + return "/%s/%s:%s" % ( + package["Dist"], + package["Package"], + package["Architecture"], ) -def _package_key(package, arch): - return _make_package_key(package["Package"], package["Version"], arch) +def _package_key(package): + return _make_package_key(package["Dist"], package["Package"], package["Version"], package["Architecture"]) -def _add_package(lock, package, arch): - k = _package_key(package, arch) - if k in lock.fast_package_lookup: +def _add_package(lock, package): + k = _package_key(package) + if k in lock.packages: return - lock.packages.append({ - "key": k, + lock.packages[k] = { "name": package["Package"], "version": package["Version"], - "urls": [ - "%s/%s" % (root, package["Filename"]) - for root in package["Roots"] - ], + "architecture": package["Architecture"], "sha256": package["SHA256"], - "arch": arch, - "dependencies": [], - }) - lock.fast_package_lookup[k] = len(lock.packages) - 1 + "filename": package["Filename"], + "suite": package["Dist"], + "size": int(package["Size"]), + "depends_on": [], + } -def _add_package_dependency(lock, package, dependency, arch): - k = _package_key(package, arch) - if k not in lock.fast_package_lookup: - fail("Broken state: %s is not in the lockfile." % package["Package"]) - i = lock.fast_package_lookup[k] - lock.packages[i]["dependencies"].append(dict( - key = _package_key(dependency, arch), - name = dependency["Package"], - version = dependency["Version"], - )) +def _add_package_dependency(lock, package, dependency): + k = _package_key(package) + if k not in lock.packages: + fail("illegal state: %s is not in the lockfile." % package["Package"]) + sk = _package_key(dependency) + if sk in lock.packages[k]["depends_on"]: + return + lock.packages[k]["depends_on"].append(sk) -def _has_package(lock, name, version, arch): - key = "%s_%s_%s" % (util.sanitize(name), util.sanitize(version), arch) - return key in lock.fast_package_lookup +def _has_package(lock, suite, name, version, arch): + return _make_package_key(suite, name, version, arch) in lock.packages + +def _add_source(lock, suite, types, uris, components, architectures): + lock.sources[suite] = { + "types": types, + "uris": uris, + "components": components, + "architectures": architectures, + } def _create(rctx, lock): return struct( has_package = lambda *args, **kwargs: _has_package(lock, *args, **kwargs), + add_source = lambda *args, **kwargs: _add_source(lock, *args, **kwargs), add_package = lambda *args, **kwargs: _add_package(lock, *args, **kwargs), add_package_dependency = lambda *args, **kwargs: _add_package_dependency(lock, *args, **kwargs), packages = lambda: lock.packages, - write = lambda out: rctx.file(out, json.encode_indent(struct(version = lock.version, packages = lock.packages))), - as_json = lambda: json.encode_indent(struct(version = lock.version, packages = lock.packages)), + sources = lambda: lock.sources, + dependency_sets = lambda: lock.dependency_sets, + write = lambda out: rctx.file(out, _encode_compact(lock)), + as_json = lambda: _encode_compact(lock), ) def _empty(rctx): lock = struct( - version = 1, - packages = list(), - fast_package_lookup = dict(), + version = 2, + dependency_sets = dict(), + packages = dict(), + sources = dict(), ) return _create(rctx, lock) +def _encode_compact(lock): + return json.encode_indent(lock) + def _from_json(rctx, content): if not content: return _empty(rctx) lock = json.decode(content) - if lock["version"] != 1: - fail("invalid lockfile version") + if lock["version"] != 2: + fail("lock file version %d is not supported anymore. please upgrade your lock file" % lock["version"]) lock = struct( version = lock["version"], - packages = lock["packages"], - fast_package_lookup = dict(), + dependency_sets = lock["dependency_sets"] if "dependency_sets" in lock else dict(), + packages = lock["packages"] if "packages" in lock else dict(), + sources = lock["sources"] if "sources" in lock else dict(), ) - for (i, package) in enumerate(lock.packages): - # TODO: only support urls before 1.0 - if "url" in package: - package["urls"] = [package.pop("url")] - - lock.packages[i] = package - lock.fast_package_lookup[package["key"]] = i return _create(rctx, lock) lockfile = struct( empty = _empty, from_json = _from_json, - make_package_key = _make_package_key, + package_key = _package_key, + short_package_key = _short_package_key, ) diff --git a/apt/private/translate_dependency_set.bzl b/apt/private/translate_dependency_set.bzl new file mode 100644 index 00000000..34913ef6 --- /dev/null +++ b/apt/private/translate_dependency_set.bzl @@ -0,0 +1,145 @@ +"repository rule for generating a dependency graph from a lockfile." + +load(":lockfile.bzl", "lockfile") +load(":starlark_codegen_utils.bzl", "starlark_codegen_utils") +load(":util.bzl", "util") + +_ROOT_BUILD_TMPL = """\ +"Generated by rules_distroless. DO NOT EDIT." + +load("@rules_distroless//apt:defs.bzl", "dpkg_status") +load("@rules_distroless//distroless:defs.bzl", "flatten") + +exports_files(['packages.bzl']) + +# Map Debian architectures to platform CPUs. +# +# For more info on Debian architectures, see: +# * https://wiki.debian.org/SupportedArchitectures +# * https://wiki.debian.org/ArchitectureSpecificsMemo +# * https://www.debian.org/releases/stable/amd64/ch02s01.en.html#idm186 +# +# For more info on Bazel's platforms CPUs see: +# * https://github.com/bazelbuild/platforms/blob/main/cpu/BUILD +_ARCHITECTURE_MAP = {{ + "amd64": "x86_64", + "arm64": "arm64", + "ppc64el": "ppc64le", + "mips64el": "mips64", + "s390x": "s390x", + "i386": "x86_32", + "armhf": "armv7e-mf", + "all": "all", +}} + +_ARCHITECTURES = {architectures} + +[ + config_setting( + name = os + "_" + arch, + constraint_values = [ + "@platforms//os:" + os, + "@platforms//cpu:" + _ARCHITECTURE_MAP[arch], + ], + ) + for os in ["linux"] + for arch in _ARCHITECTURES +] + + +# alias( +# name = "lock", +# actual = "@{target_name}_resolve//:lock", +# visibility = ["//visibility:public"], +# ) + +# List of installed packages. For now it's private. +_PACKAGES = {packages} + +# Creates /var/lib/dpkg/status with installed package information. +dpkg_status( + name = "dpkg_status", + controls = select({{ + "//:linux_%s" % arch: ["//%s:control" % package for package in packages] + for arch, packages in _PACKAGES.items() + }}) if _PACKAGES else {{}}, + visibility = ["//visibility:public"], +) + +filegroup( + name = "packages", + srcs = select({{ + "//:linux_%s" % arch: ["//%s" % package for package in packages] + for arch, packages in _PACKAGES.items() + }}) if _PACKAGES else {{}}, + visibility = ["//visibility:public"], +) + + +# A filegroup that contains all the packages and the dpkg status file. +filegroup( + name = "{target_name}", + srcs = [ + ":dpkg_status", + ":packages", + ], + visibility = ["//visibility:public"], +) + +flatten( + name = "flat", + tars = [ + "{target_name}", + ], + deduplicate = True, + visibility = ["//visibility:public"], +) +""" + +def _translate_dependency_set_impl(rctx): + package_template = rctx.read(rctx.attr.package_template) + lockf = lockfile.from_json(rctx, rctx.attr.lock_content) + + sources = lockf.sources() + packages = lockf.packages() + dependency_sets = lockf.dependency_sets() + dependency_set = dependency_sets[rctx.attr.depset_name] + + for architecture in dependency_set["sets"].keys(): + for (short_key, version) in dependency_set["sets"][architecture].items(): + package_key = short_key + "=" + version + repo_name = util.sanitize(package_key) + package = packages[package_key] + rctx.file( + "%s/%s/BUILD.bazel" % (package["name"], architecture), + package_template.format( + target_name = architecture, + data_targets = '"@%s//:data"' % repo_name, + control_targets = '"@%s//:control"' % repo_name, + src = '"@%s//:data"' % repo_name, + deps = ["@" + util.sanitize(dep_key) for dep_key in package["depends_on"]], + urls = [ + uri + "/" + package["filename"] + for uri in sources[package["suite"]]["uris"] + ], + name = package["name"], + arch = package["architecture"], + sha256 = package["sha256"], + repo_name = repo_name, + ), + ) + + rctx.file("BUILD.bazel", _ROOT_BUILD_TMPL.format( + target_name = util.get_repo_name(rctx.attr.name), + packages = starlark_codegen_utils.to_dict_list_attr({}), + architectures = starlark_codegen_utils.to_list_attr([]), + )) + +translate_dependency_set = repository_rule( + implementation = _translate_dependency_set_impl, + attrs = { + "depset_name": attr.string(doc = "INTERNAL: DO NOT USE"), + "lock_content": attr.string(doc = "INTERNAL: DO NOT USE"), + "package_template": attr.label(default = "//apt/private:package.BUILD.tmpl"), + }, +) diff --git a/apt/private/util.bzl b/apt/private/util.bzl index fdf8d198..4261bc6c 100644 --- a/apt/private/util.bzl +++ b/apt/private/util.bzl @@ -13,6 +13,8 @@ def _set_dict(struct, value = None, keys = []): def _get_dict(struct, keys = [], default_value = None): value = struct for k in keys: + if type(k) != "string": + fail("Invalid key type: {} {}".format(type(k), k)) if k in value: value = value[k] else: @@ -21,7 +23,7 @@ def _get_dict(struct, keys = [], default_value = None): return value def _sanitize(str): - return str.replace("+", "-p-").replace(":", "-").replace("~", "_") + return str.removeprefix("/").replace("+", "-").replace(":", "-").replace("~", "_").replace("/", "_").replace("=", "_") def _get_repo_name(st): if st.find("+") != -1: From 158226fee9bf7f833186120853090eedf98744e9 Mon Sep 17 00:00:00 2001 From: Sahin Yort Date: Tue, 23 Sep 2025 21:57:17 -0700 Subject: [PATCH 02/19] more fixes --- MODULE.bazel | 1 + apt/extensions.bzl | 64 +++-- apt/private/apt_cursed_symlink.bzl | 32 +++ apt/private/apt_deb_repository.bzl | 17 +- apt/private/apt_dep_resolver.bzl | 19 +- apt/private/deb_cc_export.bzl | 24 ++ apt/private/deb_import.bzl | 303 ++++++++++++++++++++++- apt/private/lockfile.bzl | 25 +- apt/private/package.BUILD.tmpl | 2 +- apt/private/pkgconfig.bzl | 65 +++++ apt/private/translate_dependency_set.bzl | 100 +++++++- 11 files changed, 598 insertions(+), 54 deletions(-) create mode 100644 apt/private/apt_cursed_symlink.bzl create mode 100644 apt/private/deb_cc_export.bzl create mode 100644 apt/private/pkgconfig.bzl diff --git a/MODULE.bazel b/MODULE.bazel index db940997..51883bc0 100644 --- a/MODULE.bazel +++ b/MODULE.bazel @@ -5,6 +5,7 @@ module( compatibility_level = 1, ) +bazel_dep(name = "rules_cc", version = "0.2.8") bazel_dep(name = "platforms", version = "0.0.10") bazel_dep(name = "bazel_features", version = "1.20.0") bazel_dep(name = "bazel_skylib", version = "1.5.0") diff --git a/apt/extensions.bzl b/apt/extensions.bzl index d0ac0fcd..109dac84 100644 --- a/apt/extensions.bzl +++ b/apt/extensions.bzl @@ -27,17 +27,25 @@ def _parse_source(src): components = components, ) -def _distroless_extension(module_ctx): +def _distroless_extension(mctx): root_direct_deps = [] root_direct_dev_deps = [] reproducible = False - for mod in module_ctx.modules: - deb_repo = deb_repository.new(module_ctx) + for mod in mctx.modules: + lockf = lockfile.empty(mctx) + + # if mod.is_root: + + if len(mod.tags.lock): + lock = mod.tags.lock[0] + lockf = lockfile.from_json(mctx, mctx.read(lock.into)) + + deb_repo = deb_repository.new(mctx, lockf.facts()) resolver = dependency_resolver.new(deb_repo) - lockf = lockfile.empty(module_ctx) for sl in mod.tags.sources_list: + continue uris = [uri.removeprefix("mirror+") for uri in sl.uris] architectures = sl.architectures @@ -58,6 +66,7 @@ def _distroless_extension(module_ctx): sources = lockf.sources() dependency_sets = lockf.dependency_sets() for install in mod.tags.install: + continue dependency_set = dependency_sets.setdefault(install.dependency_set, { "sets": {}, }) @@ -77,8 +86,8 @@ def _distroless_extension(module_ctx): arch = architectures.pop() resolved_count = 0 - module_ctx.report_progress("Resolving %s:%s" % (dep_constraint, arch)) - (package, dependencies, unmet_dependencies) = resolver.resolve_all( + mctx.report_progress("Resolving %s:%s" % (dep_constraint, arch)) + (package, dependencies, unmet_dependencies, warnings) = resolver.resolve_all( name = constraint["name"], version = constraint["version"], arch = arch, @@ -93,9 +102,11 @@ def _distroless_extension(module_ctx): " 3 - Ensure that an apt.source_list added for the specified architecture.", ) + for warning in warnings: + util.warning(mctx, warning) + if len(unmet_dependencies): - # buildifier: disable=print - util.warning(module_ctx, "Following dependencies could not be resolved for %s: %s" % (constraint["name"], ",".join([up[0] for up in unmet_dependencies]))) + util.warning(mctx, "Following dependencies could not be resolved for %s: %s" % (constraint["name"], ",".join([up[0] for up in unmet_dependencies]))) lockf.add_package(package) @@ -110,7 +121,7 @@ def _distroless_extension(module_ctx): arch_set[lockfile.short_package_key(package)] = package["Version"] # For cases where architecture for the package is not specified we need - # to first find out which source contains the package. and in order to do + # to first find out which source contains the package. in order to do # that we first need to resolve the package for amd64 architecture. # Once the repository is found, then resolve the package for all the # architectures the repository supports. @@ -118,7 +129,7 @@ def _distroless_extension(module_ctx): source = sources[package["Dist"]] architectures = [a for a in source["architectures"] if a != "amd64"] - module_ctx.report_progress("Resolved %d packages for %s" % (resolved_count, arch)) + mctx.report_progress("Resolved %d packages for %s" % (resolved_count, arch)) # Generate a hub repo for every dependency set lock_content = lockf.as_json() @@ -131,23 +142,33 @@ def _distroless_extension(module_ctx): # Generate a repo per package which will be aliased by hub repo. for (package_key, package) in lockf.packages().items(): + # dependent_packages = None + # if package["name"].endswith("-dev") + # packages = lockf.packages() + # dependent_packages = json.encode([ + + # ]) + deb_import( name = util.sanitize(package_key), + target_name = util.sanitize(package_key), urls = [ uri + "/" + package["filename"] for uri in sources[package["suite"]]["uris"] ], sha256 = package["sha256"], mergedusr = False, - depends_on = ["@" + util.sanitize(dep_key) for dep_key in package["depends_on"]], + depends_on = package["depends_on"], + package_name = package["name"], ) - lock_tmp = module_ctx.path("apt.lock.json") - lockf.write(lock_tmp) - lockf_wksp = module_ctx.path(Label("@//:apt.lock.json")) - module_ctx.execute( - ["mv", lock_tmp, lockf_wksp], - ) + for lock in mod.tags.lock: + lock_tmp = mctx.path("apt.lock.json") + lockf.write(lock_tmp) + lockf_wksp = mctx.path(lock.into) + mctx.execute( + ["cp", "-f", lock_tmp, lockf_wksp], + ) _doc = """ Module extension to create Debian repositories. @@ -261,11 +282,20 @@ install = tag_class( }, ) +lock = tag_class( + attrs = { + "into": attr.label( + mandatory = True, + ), + }, +) + apt = module_extension( doc = _doc, implementation = _distroless_extension, tag_classes = { "install": install, "sources_list": sources_list, + "lock": lock, }, ) diff --git a/apt/private/apt_cursed_symlink.bzl b/apt/private/apt_cursed_symlink.bzl new file mode 100644 index 00000000..4f0d6926 --- /dev/null +++ b/apt/private/apt_cursed_symlink.bzl @@ -0,0 +1,32 @@ +def _apt_cursed_symlink(ctx): + own_path = ctx.attr.own_path.removeprefix(".") + own_dirname = own_path[:own_path.rfind("/") + 1] + candidate_full_path = own_dirname + ctx.attr.candidate_path + + found = None + + for file in ctx.files.candidates: + if file.path.endswith(candidate_full_path): + found = file + break + + if not found: + fail("Failed to find the candidate so library. file an issue.") + + ctx.actions.symlink( + output = ctx.outputs.out, + target_file = file, + ) + return DefaultInfo( + files = depset([ctx.outputs.out]), + ) + +apt_cursed_symlink = rule( + implementation = _apt_cursed_symlink, + attrs = { + "candidates": attr.label_list(), + "candidate_path": attr.string(), + "own_path": attr.string(), + "out": attr.output(), + }, +) diff --git a/apt/private/apt_deb_repository.bzl b/apt/private/apt_deb_repository.bzl index fb213e84..8cc60640 100644 --- a/apt/private/apt_deb_repository.bzl +++ b/apt/private/apt_deb_repository.bzl @@ -12,7 +12,7 @@ def _get_auth(ctx, urls): netrc = read_user_netrc(ctx) return use_netrc(netrc, urls, {}) -def _fetch_package_index(rctx, urls, dist, comp, arch, integrity): +def _fetch_package_index(mctx, urls, dist, comp, arch, integrity): target_triple = "{dist}/{comp}/{arch}".format(dist = dist, comp = comp, arch = arch) # See https://linux.die.net/man/1/xz , https://linux.die.net/man/1/gzip , and https://linux.die.net/man/1/bzip2 @@ -30,7 +30,7 @@ def _fetch_package_index(rctx, urls, dist, comp, arch, integrity): failed_attempts = [] url = None - base_auth = _get_auth(rctx, urls) + base_auth = _get_auth(mctx, urls) for url in urls: download = None for (ext, cmd) in supported_extensions: @@ -39,7 +39,7 @@ def _fetch_package_index(rctx, urls, dist, comp, arch, integrity): auth = {} if url in base_auth: auth = {dist_url: base_auth[url]} - download = rctx.download( + download = mctx.download( url = dist_url, output = output, integrity = integrity, @@ -163,6 +163,7 @@ def _package(state, name, version, arch): def _fetch_and_parse_sources(state): mctx = state.mctx + facts = state.facts for source in state.sources: (urls, dist, components, architectures) = source @@ -175,19 +176,25 @@ def _fetch_and_parse_sources(state): # on misconfigured HTTP servers) urls = [url.rstrip("/") for url in urls] + fact_key = dist + "/" + comp + "/" + arch + fact_value = facts.get(fact_key, "") + # TODO: make parallel mctx.report_progress("Fetching package index: {}/{} for {}".format(dist, comp, arch)) - (output, _, _) = _fetch_package_index(mctx, urls, dist, comp, arch, "") + (output, url, integrity) = _fetch_package_index(mctx, urls, dist, comp, arch, fact_value) + + facts[fact_key] = integrity mctx.report_progress("Parsing package index: {}/{} for {}".format(dist, comp, arch)) _parse_repository(state, mctx.read(output), urls, dist) -def _create(mctx): +def _create(mctx, facts): state = struct( mctx = mctx, sources = list(), packages = dict(), virtual_packages = dict(), + facts = facts, ) return struct( diff --git a/apt/private/apt_dep_resolver.bzl b/apt/private/apt_dep_resolver.bzl index f1d248ff..5d6da4df 100644 --- a/apt/private/apt_dep_resolver.bzl +++ b/apt/private/apt_dep_resolver.bzl @@ -17,6 +17,8 @@ def _resolve_package(state, name, version, arch): ) ] + warning = None + if len(candidates) == 1: return candidates[0] @@ -36,11 +38,10 @@ def _resolve_package(state, name, version, arch): # Otherwise, we can't disambiguate the virtual package providers so # choose none and warn. - # buildifier: disable=print - print("\nMultiple candidates for virtual package '{}': {}".format( + warning = "Multiple candidates for virtual package '{}': {}".format( name, - [package["Package"] for package in candidates], - )) + ", ".join([package["Package"] for package in candidates]), + ) # Get available versions of the package versions_by_arch = state.repository.package_versions(name = name, arch = arch) @@ -68,7 +69,7 @@ def _resolve_package(state, name, version, arch): if not package: package = state.repository.package(name = name, version = selected_version, arch = "all") - return package + return (package, warning) _ITERATION_MAX_ = 2147483646 @@ -85,6 +86,8 @@ def _resolve_all(state, name, version, arch, include_transitive = True): dependency_group = [] stack = [(name, version, -1)] + warnings = [] + for i in range(0, _ITERATION_MAX_ + 1): if not len(stack): break @@ -98,7 +101,9 @@ def _resolve_all(state, name, version, arch, include_transitive = True): continue # TODO: only resolve in specified suites - package = _resolve_package(state, name, version, arch) + (package, warning) = _resolve_package(state, name, version, arch) + if warning: + warnings.append(warning) # If this package is not found and is part of a dependency group, then just skip it. if not package and dependency_group_idx > -1: @@ -158,7 +163,7 @@ def _resolve_all(state, name, version, arch, include_transitive = True): if not met: unmet_dependencies.append((dep, None)) - return (root_package, dependencies, unmet_dependencies) + return (root_package, dependencies, unmet_dependencies, warnings) def _create_resolution(repository): state = struct(repository = repository) diff --git a/apt/private/deb_cc_export.bzl b/apt/private/deb_cc_export.bzl new file mode 100644 index 00000000..a397711f --- /dev/null +++ b/apt/private/deb_cc_export.bzl @@ -0,0 +1,24 @@ +"normalization rules" + +# buildifier: disable=function-docstring-args +def deb_cc_export(name, src, outs, **kwargs): + """Private. DO NOT USE.""" + if len(outs) == 0: + native.filegroup(name = name, srcs = [], **kwargs) + return + toolchains = ["@bsd_tar_toolchains//:resolved_toolchain"] + + cmd = """ +$(BSDTAR_BIN) -xf "$<" -C $(RULEDIR) {} \ +""".format( + " ".join(outs), + ) + native.genrule( + name = name, + srcs = [src], + outs = [out.removeprefix("./") for out in outs], + cmd = cmd, + toolchains = toolchains, + output_to_bindir = True, + **kwargs + ) diff --git a/apt/private/deb_import.bzl b/apt/private/deb_import.bzl index 9ee71252..b32996ca 100644 --- a/apt/private/deb_import.bzl +++ b/apt/private/deb_import.bzl @@ -1,10 +1,16 @@ "deb_import" -load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") +load(":lockfile.bzl", "lockfile") +load(":pkgconfig.bzl", "parse_pc") +load(":util.bzl", "util") # BUILD.bazel template _DEB_IMPORT_BUILD_TMPL = ''' load("@rules_distroless//apt/private:deb_postfix.bzl", "deb_postfix") +load("@rules_distroless//apt/private:deb_cc_export.bzl", "deb_cc_export") +load("@rules_distroless//apt/private:apt_cursed_symlink.bzl", "apt_cursed_symlink") +load("@rules_cc//cc/private/rules_impl:cc_import.bzl", "cc_import") +load("@rules_cc//cc:cc_library.bzl", "cc_library") deb_postfix( name = "data", @@ -25,15 +31,292 @@ filegroup( srcs = {depends_on} + [":data"], visibility = ["//visibility:public"], ) + + +deb_cc_export( + name = "cc_export", + src = glob(["data.tar*"])[0], + outs = {outs}, + visibility = ["//visibility:public"] +) + +{cc_import_targets} ''' -def deb_import(name, depends_on = [], mergedusr = False, **kwargs): - http_archive( - build_file_content = _DEB_IMPORT_BUILD_TMPL.format( - mergedusr = mergedusr, - depends_on = depends_on, - target_name = name, - ), - name = name, - **kwargs +_CC_IMPORT_TMPL = """ +cc_import( + name = "{name}", + hdrs = {hdrs}, + linkopts = {linkopts}, + includes = {includes}, + shared_library = {shared_lib}, + static_library = {static_lib}, + visibility = ["//visibility:public"], +) +""" + +_CC_LIBRARY_TMPL = """ +cc_library( + name = "{name}", + hdrs = {hdrs}, + strip_include_prefix = "usr/include", + visibility = ["//visibility:public"], +) +""" + +_CC_LIBRARY_DEP_ONLY_TMPL = """ +cc_library( + name = "{name}", + deps = {deps}, + visibility = ["//visibility:public"] +) +""" + +_APT_CURSED_SYMLINK = """ +apt_cursed_symlink( + name = "{name}_cursed", + own_path = "{own_path}", + candidate_path = "{candidate_path}", + candidates = {candidates}, + out="{out}" +) +""" + +def _discover_contents(rctx, depends_on, target_name): + result = rctx.execute(["tar", "-tf", "data.tar.xz"]) + contents_raw = result.stdout.splitlines() + so_files = [] + a_files = [] + h_files = [] + hpp_files = [] + pc_files = [] + deps = [] + excluded_files = [] + + for dep in depends_on: + (suite, name, arch, version) = lockfile.parse_package_key(dep) + if not name.endswith("-dev"): + # TODO: + # This is probably not safe. + # What if a package has a dependency (with a .so file in it) + # but its a not -dev package? + continue + deps.append( + "@%s//:%s" % (util.sanitize(dep), name.removesuffix("-dev")), + ) + + for line in contents_raw: + # Skip everything in man pages and examples + if line.startswith("/usr/share"): + continue + + # Skip directories + if line.endswith("/"): + continue + + if (line.endswith(".so") or line.find(".so.") > 5) and line.find("lib"): + so_files.append(line) + elif line.endswith(".a") and line.find("lib"): + a_files.append(line) + elif line.endswith(".pc") and line.find("pkgconfig"): + pc_files.append(line) + elif line.endswith(".h") and line.startswith("./usr/include"): + h_files.append(line) + elif line.endswith(".hpp") and line.startswith("./usr/include"): + hpp_files.append(line) + + build_file_content = "" + + # TODO: handle non symlink pc files similar to how we + # handle so symlinks + non_symlink_pc_file = None + pc_files_all_symlink = False + + if len(pc_files): + # TODO: use rctx.extract instead. + r = rctx.execute( + ["tar", "-xvf", "data.tar.xz"] + pc_files + so_files, + ) + pc_files_all_symlink = True + for pc in pc_files: + if rctx.path(pc).exists: + non_symlink_pc_file = pc + pc_files_all_symlink = False + break + + # Package has a pkgconfig, use that as the source of truth. + if non_symlink_pc_file: + pc = parse_pc(rctx.read(non_symlink_pc_file)) + + ( + libname, + includedir, + libdir, + linkopts, + includes, + defines, + ) = _process_pcconfig(pc) + + static_lib = None + shared_lib = None + + # Look for a static archive + for ar in a_files: + if ar.endswith(libname + ".a"): + static_lib = '":%s"' % ar.removeprefix("./") + break + + # Look for a dynamic library + for so_lib in so_files: + if so_lib.endswith(libname + ".so"): + lib_path = so_lib.removeprefix("./") + path = rctx.path(lib_path) + + # Check for dangling symlinks and search in the transitive closure + if not path.exists: + candidate_path = rctx.execute(["readlink", path]).stdout.strip() + build_file_content += _APT_CURSED_SYMLINK.format( + name = target_name, + candidates = [ + "@%s//:cc_export" % util.sanitize(dep) + for dep in depends_on + ], + own_path = so_lib, + candidate_path = candidate_path, + out = so_lib.removeprefix("./"), + ) + excluded_files.append(so_lib) + + shared_lib = '":%s"' % so_lib.removeprefix("./") + break + + build_file_content += _CC_IMPORT_TMPL.format( + name = target_name, + hdrs = [ + ":" + h.removeprefix("./") + for h in h_files + hpp_files + ], + shared_lib = shared_lib, + static_lib = static_lib, + includes = [ + "external/../" + include + for include in includes + ], + linkopts = linkopts, + ) + + # There were some pc files but they were all symlinks + elif pc_files_all_symlink: + pass + + # Package has no pkgconfig, possibly a cmake based library at the + # standard /usr/include location and that's the only available + # information to turn the package into a cc_library target. + + elif len(hpp_files): + build_file_content += _CC_LIBRARY_TMPL.format( + name = target_name, + hdrs = [ + ":" + h.removeprefix("./") + for h in h_files + hpp_files + ], + ) + + # Package has no header files, likely a denominator package like liboost_dev + # since it has dependencies + + elif len(depends_on): + build_file_content += _CC_LIBRARY_DEP_ONLY_TMPL.format( + name = target_name, + hdrs = [], + deps = deps, + ) + + pruned_outs = [] + if pc_files_all_symlink: + pruned_outs = [] + else: + pruned_outs = [ + sf + for sf in so_files + if sf not in excluded_files + ] + h_files + hpp_files + a_files + + return (build_file_content, pruned_outs) + +def _trim(str): + return str.rstrip(" ").lstrip(" ") + +def _process_pcconfig(pc): + (directives, variables) = pc + includedir = _trim(variables["includedir"]) + libdir = _trim(variables["libdir"]) + linkopts = [] + includes = [] + defines = [] + libname = None + if "Libs" in directives: + libs = _trim(directives["Libs"]).split(" ") + for arg in libs: + if arg.startswith("-l"): + libname = "lib" + arg.removeprefix("-l") + continue + if arg.startswith("-L"): + continue + linkopts.append(arg) + + # if "Libs.private" in directives: + # libs = _trim(directives["Libs.private"]).split(" ") + # linkopts.extend([arg for arg in libs if arg.startswith("-l")]) + + if "Cflags" in directives: + cflags = _trim(directives["Cflags"]).split(" ") + for flag in cflags: + if flag.startswith("-I"): + include = flag.removeprefix("-I") + includes.append(include) + + # If the include is direct include eg $includedir (/usr/include/hiredis) + # equals to -I/usr/include/hiredis then we need to add /usr/include into + # includes array to satify imports as `#include ` + if include == includedir: + includes.append(include.removesuffix("/" + directives["Name"])) + elif include.startswith(includedir): + includes.append(include.removesuffix("/" + directives["Name"])) + elif flag.startswith("-D"): + define = flag.removeprefix("-D") + defines.append(define) + + return (libname, includedir, libdir, linkopts, includes, defines) + +def _deb_import_impl(rctx): + rctx.download_and_extract( + url = rctx.attr.urls, + sha256 = rctx.attr.sha256, + ) + + # TODO: only do this if package is -dev or dependent of a -dev pkg. + cc_import_targets, so_files = _discover_contents( + rctx, + rctx.attr.depends_on, + rctx.attr.package_name.removesuffix("-dev"), ) + rctx.file("BUILD.bazel", _DEB_IMPORT_BUILD_TMPL.format( + mergedusr = rctx.attr.mergedusr, + depends_on = ["@" + util.sanitize(dep_key) for dep_key in rctx.attr.depends_on], + target_name = rctx.attr.target_name, + cc_import_targets = cc_import_targets, + outs = so_files, + )) + +deb_import = repository_rule( + implementation = _deb_import_impl, + attrs = { + "urls": attr.string_list(mandatory = True, allow_empty = False), + "sha256": attr.string(), + "depends_on": attr.string_list(), + "mergedusr": attr.bool(), + "target_name": attr.string(), + "package_name": attr.string(), + }, +) diff --git a/apt/private/lockfile.bzl b/apt/private/lockfile.bzl index 16e45efb..29d460b5 100644 --- a/apt/private/lockfile.bzl +++ b/apt/private/lockfile.bzl @@ -8,6 +8,13 @@ def _make_package_key(suite, name, version, arch): version, ) +def _parse_package_key(key): + rest = key[1:] + (suite, rest) = rest.split("/", 1) + (name, rest) = rest.split(":", 1) + (arch, version) = rest.split("=", 1) + return (suite, name, arch, version) + def _short_package_key(package): return "/%s/%s:%s" % ( package["Dist"], @@ -53,7 +60,7 @@ def _add_source(lock, suite, types, uris, components, architectures): "architectures": architectures, } -def _create(rctx, lock): +def _create(mctx, lock): return struct( has_package = lambda *args, **kwargs: _has_package(lock, *args, **kwargs), add_source = lambda *args, **kwargs: _add_source(lock, *args, **kwargs), @@ -62,25 +69,27 @@ def _create(rctx, lock): packages = lambda: lock.packages, sources = lambda: lock.sources, dependency_sets = lambda: lock.dependency_sets, - write = lambda out: rctx.file(out, _encode_compact(lock)), + facts = lambda: lock.facts, + write = lambda out: mctx.file(out, _encode_compact(lock)), as_json = lambda: _encode_compact(lock), ) -def _empty(rctx): +def _empty(mctx): lock = struct( version = 2, dependency_sets = dict(), packages = dict(), sources = dict(), + facts = dict(), ) - return _create(rctx, lock) + return _create(mctx, lock) def _encode_compact(lock): return json.encode_indent(lock) -def _from_json(rctx, content): +def _from_json(mctx, content): if not content: - return _empty(rctx) + return _empty(mctx) lock = json.decode(content) if lock["version"] != 2: @@ -91,12 +100,14 @@ def _from_json(rctx, content): dependency_sets = lock["dependency_sets"] if "dependency_sets" in lock else dict(), packages = lock["packages"] if "packages" in lock else dict(), sources = lock["sources"] if "sources" in lock else dict(), + facts = lock["facts"] if "facts" in lock else dict(), ) - return _create(rctx, lock) + return _create(mctx, lock) lockfile = struct( empty = _empty, from_json = _from_json, package_key = _package_key, short_package_key = _short_package_key, + parse_package_key = _parse_package_key, ) diff --git a/apt/private/package.BUILD.tmpl b/apt/private/package.BUILD.tmpl index 563c33d2..0e7f4876 100644 --- a/apt/private/package.BUILD.tmpl +++ b/apt/private/package.BUILD.tmpl @@ -16,4 +16,4 @@ filegroup( name = "{target_name}", srcs = {deps} + [":data"], visibility = ["//visibility:public"], -) \ No newline at end of file +) diff --git a/apt/private/pkgconfig.bzl b/apt/private/pkgconfig.bzl new file mode 100644 index 00000000..a83881b8 --- /dev/null +++ b/apt/private/pkgconfig.bzl @@ -0,0 +1,65 @@ +# Copyright thesayyn 2025 +# Taken from https://github.com/thesayyn/pkgconfig/blob/main/extensions.bzl +def _expand_value(value, variables): + # fast path + if value.find("$") == -1: + return value + + expanded_value = "" + key = "" + in_subs = False + + def assert_in_subs(): + if not in_subs: + fail("corrupted pc file") + + for c in value.elems(): + if c == "$": + in_subs = True + elif c == "{": + assert_in_subs() + elif c == "}": + assert_in_subs() + value_of_key = variables[key] + + # reset subs state + key = "" + in_subs = False + if not value_of_key: + fail("corrupted pc file") + expanded_value += value_of_key + elif in_subs: + key += c + else: + expanded_value += c + + return expanded_value + +def parse_pc(pc): + variables = {} + directives = {} + for l in pc.splitlines(): + if l.startswith("#"): + continue + if not l.strip(): + continue + if l.find(": ") != -1: + (k, v) = _split_once(l, ":") + directives[k] = _expand_value(v.removeprefix(" "), variables) + elif l.find("=") != -1: + (k, v) = _split_once(l, "=") + variables[k] = _expand_value(v, variables) + + return (directives, variables) + +def _split_once(l, sep): + values = l.split(sep, 1) + if len(values) < 2: + fail("corrupted pc config") + return (values[0], values[1]) + +def _parse_requires(re): + if not re: + return [] + deps = re.split(",") + return [dep.strip(" ") for dep in deps if dep.strip(" ")] diff --git a/apt/private/translate_dependency_set.bzl b/apt/private/translate_dependency_set.bzl index 34913ef6..4ac20cff 100644 --- a/apt/private/translate_dependency_set.bzl +++ b/apt/private/translate_dependency_set.bzl @@ -47,12 +47,6 @@ _ARCHITECTURES = {architectures} ] -# alias( -# name = "lock", -# actual = "@{target_name}_resolve//:lock", -# visibility = ["//visibility:public"], -# ) - # List of installed packages. For now it's private. _PACKAGES = {packages} @@ -96,6 +90,47 @@ flatten( ) """ +_PACKAGE_TEMPLATE = '''\ +"""Generated by rules_distroless. DO NOT EDIT.""" + +NO_MATCH_ERROR=""" +Package "{target_name}" is not available for the current target platform. + +Available Platforms: {available_platforms} + +- Set `--platforms` on the command line. +- Perform a transition to one of the available platforms +""" + +alias( + name = "data", + actual = select({data_targets}, no_match_error = NO_MATCH_ERROR), + visibility = ["//visibility:public"], +) + +alias( + name = "control", + actual = select({control_targets}, no_match_error = NO_MATCH_ERROR), + visibility = ["//visibility:public"], +) + +filegroup( + name = "{target_name}", + srcs = select({deps}, no_match_error = NO_MATCH_ERROR) + [":data"], + visibility = ["//visibility:public"], +) + +{extra} +''' + +_DEB_CC_IMPORT = """ +alias( + name = "{target_name}", + actual = select({selects}), + visibility = ["//visibility:public"], +) +""" + def _translate_dependency_set_impl(rctx): package_template = rctx.read(rctx.attr.package_template) lockf = lockfile.from_json(rctx, rctx.attr.lock_content) @@ -105,11 +140,22 @@ def _translate_dependency_set_impl(rctx): dependency_sets = lockf.dependency_sets() dependency_set = dependency_sets[rctx.attr.depset_name] + packages_to_architectures = {} + for architecture in dependency_set["sets"].keys(): for (short_key, version) in dependency_set["sets"][architecture].items(): package_key = short_key + "=" + version repo_name = util.sanitize(package_key) package = packages[package_key] + + packages_to_architectures.setdefault( + package["name"] + "=" + version, + struct( + name = package["name"], + architectures = {}, + ), + ).architectures[architecture] = package_key + rctx.file( "%s/%s/BUILD.bazel" % (package["name"], architecture), package_template.format( @@ -129,10 +175,50 @@ def _translate_dependency_set_impl(rctx): ), ) + for (_, info) in packages_to_architectures.items(): + package_name = info.name + + architectures = info.architectures.keys() + + extra = "" + if package_name.endswith("-dev"): + target_name = package_name.removesuffix("-dev") + extra = _DEB_CC_IMPORT.format( + target_name = target_name, + selects = starlark_codegen_utils.to_dict_attr({ + "//:linux_%s" % architecture: "@%s//:%s" % (util.sanitize(package_key), target_name) + for (architecture, package_key) in info.architectures.items() + }), + ) + + rctx.file( + "%s/BUILD.bazel" % package_name, + _PACKAGE_TEMPLATE.format( + target_name = package_name, + data_targets = starlark_codegen_utils.to_dict_attr({ + "//:linux_%s" % architecture: "//%s/%s:data" % (package_name, architecture) + for architecture in architectures + }), + control_targets = starlark_codegen_utils.to_dict_attr({ + "//:linux_%s" % architecture: "//%s/%s:control" % (package_name, architecture) + for architecture in architectures + }), + deps = starlark_codegen_utils.to_dict_list_attr({ + "//:linux_%s" % architecture: ["//%s/%s" % (package_name, architecture)] + for architecture in architectures + }), + extra = extra, + available_platforms = " ".join([ + "linux/" + arch + for arch in architectures + ]), + ), + ) + rctx.file("BUILD.bazel", _ROOT_BUILD_TMPL.format( target_name = util.get_repo_name(rctx.attr.name), packages = starlark_codegen_utils.to_dict_list_attr({}), - architectures = starlark_codegen_utils.to_list_attr([]), + architectures = starlark_codegen_utils.to_list_attr(dependency_set["sets"].keys()), )) translate_dependency_set = repository_rule( From 71e793e09a5bff7c6612fc958ddf52a72b977996 Mon Sep 17 00:00:00 2001 From: Sahin Yort Date: Thu, 2 Oct 2025 14:06:52 -0700 Subject: [PATCH 03/19] latest --- MODULE.bazel | 1 + apt/extensions.bzl | 118 ++++++++++++++++------------- apt/private/apt_deb_repository.bzl | 65 ++++++++++------ apt/private/apt_dep_resolver.bzl | 4 +- apt/private/lockfile.bzl | 12 +++ 5 files changed, 122 insertions(+), 78 deletions(-) diff --git a/MODULE.bazel b/MODULE.bazel index 51883bc0..c64b555c 100644 --- a/MODULE.bazel +++ b/MODULE.bazel @@ -5,6 +5,7 @@ module( compatibility_level = 1, ) +bazel_dep(name = "tar.bzl", version = "0.5.6") bazel_dep(name = "rules_cc", version = "0.2.8") bazel_dep(name = "platforms", version = "0.0.10") bazel_dep(name = "bazel_features", version = "1.20.0") diff --git a/apt/extensions.bzl b/apt/extensions.bzl index 109dac84..3be0a089 100644 --- a/apt/extensions.bzl +++ b/apt/extensions.bzl @@ -32,41 +32,47 @@ def _distroless_extension(mctx): root_direct_dev_deps = [] reproducible = False - for mod in mctx.modules: - lockf = lockfile.empty(mctx) - - # if mod.is_root: - - if len(mod.tags.lock): - lock = mod.tags.lock[0] - lockf = lockfile.from_json(mctx, mctx.read(lock.into)) + # As in mach 9 :) + glock = lockfile.merge(mctx, [ + lockfile.from_json(mctx, mctx.read(lock.into)) + for mod in mctx.modules + for lock in mod.tags.lock + ]) - deb_repo = deb_repository.new(mctx, lockf.facts()) - resolver = dependency_resolver.new(deb_repo) + repo = deb_repository.new(mctx, glock.facts()) + resolver = dependency_resolver.new(repo) + for mod in mctx.modules: + # TODO: also enfore that every module explicitly lists their sources_list + # otherwise they'll break if the sources_list that the module depends on + # magically disappears. for sl in mod.tags.sources_list: - continue uris = [uri.removeprefix("mirror+") for uri in sl.uris] architectures = sl.architectures for suite in sl.suites: - lockf.add_source( + glock.add_source( suite, uris = uris, types = sl.types, components = sl.components, architectures = architectures, ) - deb_repo.add_source( + + repo.add_source( (uris, suite, sl.components, architectures), ) - deb_repo.fetch_and_parse() + # Fetch all sources_list and parse them. + # Unfortunately repository rules have no concept of threads + # so parsing has to happen sequentially + repo.fetch_and_parse() + + sources = glock.sources() + dependency_sets = glock.dependency_sets() - sources = lockf.sources() - dependency_sets = lockf.dependency_sets() + for mod in mctx.modules: for install in mod.tags.install: - continue dependency_set = dependency_sets.setdefault(install.dependency_set, { "sets": {}, }) @@ -106,15 +112,23 @@ def _distroless_extension(mctx): util.warning(mctx, warning) if len(unmet_dependencies): - util.warning(mctx, "Following dependencies could not be resolved for %s: %s" % (constraint["name"], ",".join([up[0] for up in unmet_dependencies]))) + util.warning( + mctx, + "Following dependencies could not be resolved for %s: %s" % (constraint["name"], ",".join([up[0] for up in unmet_dependencies])), + ) - lockf.add_package(package) + # TODO: + # Ensure following statements are true. + # 1- Package was resolved from a source that module listed explicitly. + # 2- Package resolution was skipped because some other module asked for this package. + # 3- 1) is enforced even if 2) is the case. + glock.add_package(package) resolved_count += len(dependencies) + 1 for dep in dependencies: - lockf.add_package(dep) - lockf.add_package_dependency(package, dep) + glock.add_package(dep) + glock.add_package_dependency(package, dep) # Add it to dependency set arch_set = dependency_set["sets"].setdefault(arch, {}) @@ -131,40 +145,40 @@ def _distroless_extension(mctx): mctx.report_progress("Resolved %d packages for %s" % (resolved_count, arch)) - # Generate a hub repo for every dependency set - lock_content = lockf.as_json() - for depset_name in dependency_sets.keys(): - translate_dependency_set( - name = depset_name, - depset_name = depset_name, - lock_content = lock_content, - ) + # Generate a hub repo for every dependency set + lock_content = glock.as_json() + for depset_name in dependency_sets.keys(): + translate_dependency_set( + name = depset_name, + depset_name = depset_name, + lock_content = lock_content, + ) + + # Generate a repo per package which will be aliased by hub repo. + for (package_key, package) in glock.packages().items(): + deb_import( + name = util.sanitize(package_key), + target_name = util.sanitize(package_key), + urls = [ + uri + "/" + package["filename"] + for uri in sources[package["suite"]]["uris"] + ], + sha256 = package["sha256"], + mergedusr = False, + depends_on = package["depends_on"], + package_name = package["name"], + ) - # Generate a repo per package which will be aliased by hub repo. - for (package_key, package) in lockf.packages().items(): - # dependent_packages = None - # if package["name"].endswith("-dev") - # packages = lockf.packages() - # dependent_packages = json.encode([ - - # ]) - - deb_import( - name = util.sanitize(package_key), - target_name = util.sanitize(package_key), - urls = [ - uri + "/" + package["filename"] - for uri in sources[package["suite"]]["uris"] - ], - sha256 = package["sha256"], - mergedusr = False, - depends_on = package["depends_on"], - package_name = package["name"], - ) + for mod in mctx.modules: + if not mod.is_root: + continue - for lock in mod.tags.lock: + if len(mod.tags.lock) > 1: + fail("There can only be one apt.lock per module.") + elif len(mod.tags.lock) == 1: + lock = mod.tags.lock[0] lock_tmp = mctx.path("apt.lock.json") - lockf.write(lock_tmp) + glock.write(lock_tmp) lockf_wksp = mctx.path(lock.into) mctx.execute( ["cp", "-f", lock_tmp, lockf_wksp], diff --git a/apt/private/apt_deb_repository.bzl b/apt/private/apt_deb_repository.bzl index 8cc60640..bbf225a3 100644 --- a/apt/private/apt_deb_repository.bzl +++ b/apt/private/apt_deb_repository.bzl @@ -164,41 +164,58 @@ def _package(state, name, version, arch): def _fetch_and_parse_sources(state): mctx = state.mctx facts = state.facts - for source in state.sources: - (urls, dist, components, architectures) = source - for arch in architectures: - for comp in components: - # We assume that `url` does not contain a trailing forward slash when passing to - # functions below. If one is present, remove it. Some HTTP servers do not handle - # redirects properly when a path contains "//" - # (ie. https://mymirror.com/ubuntu//dists/noble/stable/... may return a 404 - # on misconfigured HTTP servers) - urls = [url.rstrip("/") for url in urls] - - fact_key = dist + "/" + comp + "/" + arch - fact_value = facts.get(fact_key, "") - - # TODO: make parallel - mctx.report_progress("Fetching package index: {}/{} for {}".format(dist, comp, arch)) - (output, url, integrity) = _fetch_package_index(mctx, urls, dist, comp, arch, fact_value) - - facts[fact_key] = integrity - - mctx.report_progress("Parsing package index: {}/{} for {}".format(dist, comp, arch)) - _parse_repository(state, mctx.read(output), urls, dist) + # TODO: make parallel + for source in state.sources.values(): + (urls, dist, component, architecture) = source + + # We assume that `url` does not contain a trailing forward slash when passing to + # functions below. If one is present, remove it. Some HTTP servers do not handle + # redirects properly when a path contains "//" + # (ie. https://mymirror.com/ubuntu//dists/noble/stable/... may return a 404 + # on misconfigured HTTP servers) + urls = [url.rstrip("/") for url in urls] + + fact_key = dist + "/" + component + "/" + architecture + fact_value = facts.get(fact_key, "") + + mctx.report_progress("Fetching package index: {}/{} for {}".format(dist, component, architecture)) + (output, url, integrity) = _fetch_package_index(mctx, urls, dist, component, architecture, fact_value) + + facts[fact_key] = integrity + + mctx.report_progress("Parsing package index: {}/{} for {}".format(dist, component, architecture)) + _parse_repository(state, mctx.read(output), urls, dist) + +def _add_source_if_not_present(state, source): + (urls, dist, components, architectures) = source + + for arch in architectures: + for comp in components: + keys = [ + "%".join((url, dist, comp, arch)) + for url in urls + ] + found = any([ + key in state.sources + for key in keys + ]) + if found: + continue + for key in keys: + state.sources[key] = (urls, dist, comp, arch) def _create(mctx, facts): state = struct( mctx = mctx, - sources = list(), + sources = dict(), packages = dict(), virtual_packages = dict(), facts = facts, ) return struct( - add_source = lambda source: state.sources.append(source), + add_source = lambda source: _add_source_if_not_present(state, source), fetch_and_parse = lambda: _fetch_and_parse_sources(state), package_versions = lambda **kwargs: _package_versions(state, **kwargs), virtual_packages = lambda **kwargs: _virtual_packages(state, **kwargs), diff --git a/apt/private/apt_dep_resolver.bzl b/apt/private/apt_dep_resolver.bzl index 5d6da4df..6961a6da 100644 --- a/apt/private/apt_dep_resolver.bzl +++ b/apt/private/apt_dep_resolver.bzl @@ -20,7 +20,7 @@ def _resolve_package(state, name, version, arch): warning = None if len(candidates) == 1: - return candidates[0] + return (candidates[0], warning) if len(candidates) > 1: for package in candidates: @@ -34,7 +34,7 @@ def _resolve_package(state, name, version, arch): # # In the case of required packages, these defaults are not specified. if "Priority" in package and package["Priority"] == "required": - return package + return (package, warning) # Otherwise, we can't disambiguate the virtual package providers so # choose none and warn. diff --git a/apt/private/lockfile.bzl b/apt/private/lockfile.bzl index 29d460b5..dba9ef33 100644 --- a/apt/private/lockfile.bzl +++ b/apt/private/lockfile.bzl @@ -104,10 +104,22 @@ def _from_json(mctx, content): ) return _create(mctx, lock) +def _merge(mctx, locks): + mlock = _empty(mctx) + packages = mlock.packages() + facts = mlock.facts() + for lock in locks: + for (key, pkg) in lock.packages().items(): + packages[key] = pkg + for (key, fact) in lock.facts().items(): + facts[key] = fact + return mlock + lockfile = struct( empty = _empty, from_json = _from_json, package_key = _package_key, short_package_key = _short_package_key, parse_package_key = _parse_package_key, + merge = _merge, ) From a1ffad1ea011048de95cb87bb0af6a60e3e81dac Mon Sep 17 00:00:00 2001 From: Sahin Yort Date: Tue, 21 Oct 2025 13:32:16 -0700 Subject: [PATCH 04/19] lts --- apt.lock.json | 409 ----------------------------- apt/extensions.bzl | 21 +- apt/private/apt_cursed_symlink.bzl | 3 +- apt/private/apt_deb_repository.bzl | 105 +++++++- apt/private/apt_dep_resolver.bzl | 23 +- apt/private/deb_cc_export.bzl | 105 ++++++-- apt/private/deb_import.bzl | 303 ++++++++++++--------- apt/private/lockfile.bzl | 12 + apt/private/pkgconfig.bzl | 58 +++- 9 files changed, 465 insertions(+), 574 deletions(-) delete mode 100755 apt.lock.json diff --git a/apt.lock.json b/apt.lock.json deleted file mode 100755 index 08a26a94..00000000 --- a/apt.lock.json +++ /dev/null @@ -1,409 +0,0 @@ -{ - "dependency_sets": { - "bookworm": { - "sets": { - "amd64": { - "/cloud-sdk/google-cloud-cli:amd64": "537.0.0-0", - "/noble/base-files:amd64": "13ubuntu7", - "/noble/libncurses6:amd64": "6.4+20240113-1ubuntu1", - "/noble/ncurses-base:all": "6.4+20240113-1ubuntu1", - "/noble/tzdata:all": "2024a-1ubuntu1" - }, - "arm64": { - "/bookworm/coreutils:arm64": "9.1-1" - }, - "i386": { - "/bookworm/libstdc++6:i386": "12.2.0-14" - } - } - } - }, - "packages": { - "/bookworm-security/libc6:arm64=2.36-9+deb12u4": { - "architecture": "arm64", - "depends_on": [], - "filename": "pool/updates/main/g/glibc/libc6_2.36-9+deb12u4_arm64.deb", - "name": "libc6", - "sha256": "71302ee1bf4374aa4df7bcb5f58155e56a03c282f4076cdd07cf5bdfbca21fab", - "size": 2321668, - "suite": "bookworm-security", - "version": "2.36-9+deb12u4" - }, - "/bookworm-security/libc6:i386=2.36-9+deb12u4": { - "architecture": "i386", - "depends_on": [], - "filename": "pool/updates/main/g/glibc/libc6_2.36-9+deb12u4_i386.deb", - "name": "libc6", - "sha256": "02a6f5c6548577cc77a25cc3fe1bdc85cebe553603f647a0315939b3fed99394", - "size": 2625992, - "suite": "bookworm-security", - "version": "2.36-9+deb12u4" - }, - "/bookworm/coreutils:arm64=9.1-1": { - "architecture": "arm64", - "depends_on": [ - "/bookworm/libselinux1:arm64=3.4-1+b6", - "/bookworm/libpcre2-8-0:arm64=10.42-1", - "/bookworm-security/libc6:arm64=2.36-9+deb12u4", - "/bookworm/libgcc-s1:arm64=12.2.0-14", - "/bookworm/gcc-12-base:arm64=12.2.0-14", - "/bookworm/libgmp10:arm64=2:6.2.1+dfsg1-1.1", - "/bookworm/libattr1:arm64=1:2.5.1-4", - "/bookworm/libacl1:arm64=2.3.1-3" - ], - "filename": "pool/main/c/coreutils/coreutils_9.1-1_arm64.deb", - "name": "coreutils", - "sha256": "ec8f090a14c684879dce251254d8d9ed0876d4480f750d5807ef04e5435e1c4d", - "size": 2815252, - "suite": "bookworm", - "version": "9.1-1" - }, - "/bookworm/gcc-12-base:arm64=12.2.0-14": { - "architecture": "arm64", - "depends_on": [], - "filename": "pool/main/g/gcc-12/gcc-12-base_12.2.0-14_arm64.deb", - "name": "gcc-12-base", - "sha256": "e1f2fb7212546c0e360af8df26303608f7b09e123ac9c96e15872d1ec1ce3275", - "size": 37504, - "suite": "bookworm", - "version": "12.2.0-14" - }, - "/bookworm/gcc-12-base:i386=12.2.0-14": { - "architecture": "i386", - "depends_on": [], - "filename": "pool/main/g/gcc-12/gcc-12-base_12.2.0-14_i386.deb", - "name": "gcc-12-base", - "sha256": "a68aa1dfa176765f7b36a570a8298ab18ab5b82272826fe2fdd25158315d0026", - "size": 37488, - "suite": "bookworm", - "version": "12.2.0-14" - }, - "/bookworm/libacl1:arm64=2.3.1-3": { - "architecture": "arm64", - "depends_on": [], - "filename": "pool/main/a/acl/libacl1_2.3.1-3_arm64.deb", - "name": "libacl1", - "sha256": "2b0eef11a2e271e7355adaf1f6cbf8d2e83835ae1b6cf15165d59b8289c08342", - "size": 30768, - "suite": "bookworm", - "version": "2.3.1-3" - }, - "/bookworm/libattr1:arm64=1:2.5.1-4": { - "architecture": "arm64", - "depends_on": [], - "filename": "pool/main/a/attr/libattr1_2.5.1-4_arm64.deb", - "name": "libattr1", - "sha256": "481e1c3fcad6773ba1c9d7f1de59e4fc80d95326c9b20192e13b7111013c932a", - "size": 21888, - "suite": "bookworm", - "version": "1:2.5.1-4" - }, - "/bookworm/libgcc-s1:arm64=12.2.0-14": { - "architecture": "arm64", - "depends_on": [], - "filename": "pool/main/g/gcc-12/libgcc-s1_12.2.0-14_arm64.deb", - "name": "libgcc-s1", - "sha256": "6fce2268d8f3152a4e84634f5a24133d3c62903b2f9b11b9c59235cbbc1b23a8", - "size": 34836, - "suite": "bookworm", - "version": "12.2.0-14" - }, - "/bookworm/libgcc-s1:i386=12.2.0-14": { - "architecture": "i386", - "depends_on": [], - "filename": "pool/main/g/gcc-12/libgcc-s1_12.2.0-14_i386.deb", - "name": "libgcc-s1", - "sha256": "c22f4115a4b66b56256a6138457fb7630a73bbaa1fc63639060527f7366f0cb1", - "size": 59544, - "suite": "bookworm", - "version": "12.2.0-14" - }, - "/bookworm/libgmp10:arm64=2:6.2.1+dfsg1-1.1": { - "architecture": "arm64", - "depends_on": [], - "filename": "pool/main/g/gmp/libgmp10_6.2.1+dfsg1-1.1_arm64.deb", - "name": "libgmp10", - "sha256": "9906387c1dd806518c915bd8616d072c741061d7fa26b222e52763456060b31a", - "size": 537920, - "suite": "bookworm", - "version": "2:6.2.1+dfsg1-1.1" - }, - "/bookworm/libpcre2-8-0:arm64=10.42-1": { - "architecture": "arm64", - "depends_on": [], - "filename": "pool/main/p/pcre2/libpcre2-8-0_10.42-1_arm64.deb", - "name": "libpcre2-8-0", - "sha256": "b2448d0a8a3db7fbeac231e7ef93811346c1fb5f96ccf6f631701d8a4eb39206", - "size": 230728, - "suite": "bookworm", - "version": "10.42-1" - }, - "/bookworm/libselinux1:arm64=3.4-1+b6": { - "architecture": "arm64", - "depends_on": [], - "filename": "pool/main/libs/libselinux/libselinux1_3.4-1+b6_arm64.deb", - "name": "libselinux1", - "sha256": "29201edf23ebae40844d6c289afdb9bba52f927d55096ed1b1cd37e040135edc", - "size": 68800, - "suite": "bookworm", - "version": "3.4-1+b6" - }, - "/bookworm/libstdc++6:i386=12.2.0-14": { - "architecture": "i386", - "depends_on": [ - "/bookworm/libgcc-s1:i386=12.2.0-14", - "/bookworm-security/libc6:i386=2.36-9+deb12u4", - "/bookworm/gcc-12-base:i386=12.2.0-14" - ], - "filename": "pool/main/g/gcc-12/libstdc++6_12.2.0-14_i386.deb", - "name": "libstdc++6", - "sha256": "65c7019980b204ac150faae399eac921574e0ef273f83c0485a21daae1c57551", - "size": 661288, - "suite": "bookworm", - "version": "12.2.0-14" - }, - "/cloud-sdk/google-cloud-cli:amd64=537.0.0-0": { - "architecture": "amd64", - "depends_on": [], - "filename": "pool/cloud-sdk/google-cloud-cli_537.0.0-0_amd64_536d1a83f33cd095096cd748bd9024e3.deb", - "name": "google-cloud-cli", - "sha256": "efdc2b82ea94f0dbe0944274743179db7008a08afb66e2ee14f72db60a25d73b", - "size": 124102410, - "suite": "cloud-sdk", - "version": "537.0.0-0" - }, - "/noble/base-files:amd64=13ubuntu7": { - "architecture": "amd64", - "depends_on": [ - "/noble/libcrypt1:amd64=1:4.4.36-4", - "/noble/libc6:amd64=2.39-0ubuntu2", - "/noble/libgcc-s1:amd64=14-20240221-2.1ubuntu1", - "/noble/gcc-14-base:amd64=14-20240221-2.1ubuntu1", - "/noble/mawk:amd64=1.3.4.20240123-1" - ], - "filename": "pool/main/b/base-files/base-files_13ubuntu7_amd64.deb", - "name": "base-files", - "sha256": "d2fe9680dea0b8f6d6d675eceaf2bf00da8d1b3da1604f0e3b47ee26866feadd", - "size": 74224, - "suite": "noble", - "version": "13ubuntu7" - }, - "/noble/debconf:all=1.5.86": { - "architecture": "all", - "depends_on": [], - "filename": "pool/main/d/debconf/debconf_1.5.86_all.deb", - "name": "debconf", - "sha256": "725da1e474ff8ce916e7954ed262273a02e4f74ee1f6cd342b19ff283617d91b", - "size": 123988, - "suite": "noble", - "version": "1.5.86" - }, - "/noble/gcc-14-base:amd64=14-20240221-2.1ubuntu1": { - "architecture": "amd64", - "depends_on": [], - "filename": "pool/main/g/gcc-14/gcc-14-base_14-20240221-2.1ubuntu1_amd64.deb", - "name": "gcc-14-base", - "sha256": "2e1ae2c2ccf2d1b6d09c657af1492a8b7a348e899f9ad25d4925b170571a0887", - "size": 46862, - "suite": "noble", - "version": "14-20240221-2.1ubuntu1" - }, - "/noble/libc6:amd64=2.39-0ubuntu2": { - "architecture": "amd64", - "depends_on": [], - "filename": "pool/main/g/glibc/libc6_2.39-0ubuntu2_amd64.deb", - "name": "libc6", - "sha256": "4bd128b75db38b7e9147c0333908e2c7fbc41631f284360f95118fe1c6c162f3", - "size": 3262066, - "suite": "noble", - "version": "2.39-0ubuntu2" - }, - "/noble/libcrypt1:amd64=1:4.4.36-4": { - "architecture": "amd64", - "depends_on": [], - "filename": "pool/main/libx/libxcrypt/libcrypt1_4.4.36-4_amd64.deb", - "name": "libcrypt1", - "sha256": "51ad101808e6a9d6b9c21bcf0b6f27c8ab34f6af53184fc6305f96770cc3a8d9", - "size": 95284, - "suite": "noble", - "version": "1:4.4.36-4" - }, - "/noble/libgcc-s1:amd64=14-20240221-2.1ubuntu1": { - "architecture": "amd64", - "depends_on": [], - "filename": "pool/main/g/gcc-14/libgcc-s1_14-20240221-2.1ubuntu1_amd64.deb", - "name": "libgcc-s1", - "sha256": "ffc195df7e897aaec468e8f62b08660cc711c7449113102491fdd6baa6901f6d", - "size": 78084, - "suite": "noble", - "version": "14-20240221-2.1ubuntu1" - }, - "/noble/libncurses6:amd64=6.4+20240113-1ubuntu1": { - "architecture": "amd64", - "depends_on": [ - "/noble/libc6:amd64=2.39-0ubuntu2", - "/noble/libgcc-s1:amd64=14-20240221-2.1ubuntu1", - "/noble/gcc-14-base:amd64=14-20240221-2.1ubuntu1", - "/noble/libtinfo6:amd64=6.4+20240113-1ubuntu1" - ], - "filename": "pool/main/n/ncurses/libncurses6_6.4+20240113-1ubuntu1_amd64.deb", - "name": "libncurses6", - "sha256": "b5669082396328597c62e51caeb2ee258015e92bd87f6670acee9f396a30b978", - "size": 111894, - "suite": "noble", - "version": "6.4+20240113-1ubuntu1" - }, - "/noble/libtinfo6:amd64=6.4+20240113-1ubuntu1": { - "architecture": "amd64", - "depends_on": [], - "filename": "pool/main/n/ncurses/libtinfo6_6.4+20240113-1ubuntu1_amd64.deb", - "name": "libtinfo6", - "sha256": "80378382ba4f672f8d5579cb953fc43edfe246eb96ee4d453af1ac3d7768c8aa", - "size": 108264, - "suite": "noble", - "version": "6.4+20240113-1ubuntu1" - }, - "/noble/mawk:amd64=1.3.4.20240123-1": { - "architecture": "amd64", - "depends_on": [], - "filename": "pool/main/m/mawk/mawk_1.3.4.20240123-1_amd64.deb", - "name": "mawk", - "sha256": "53512ca310cc01f4a462753a29dd7a1180f2e584941f9d8477c77802b1cff1f8", - "size": 127350, - "suite": "noble", - "version": "1.3.4.20240123-1" - }, - "/noble/ncurses-base:all=6.4+20240113-1ubuntu1": { - "architecture": "all", - "depends_on": [], - "filename": "pool/main/n/ncurses/ncurses-base_6.4+20240113-1ubuntu1_all.deb", - "name": "ncurses-base", - "sha256": "1ea2be0cadf1299e5ed2967269c01e1935ddf5a733a496893b4334994aea2755", - "size": 25426, - "suite": "noble", - "version": "6.4+20240113-1ubuntu1" - }, - "/noble/tzdata:all=2024a-1ubuntu1": { - "architecture": "all", - "depends_on": [ - "/noble/debconf:all=1.5.86" - ], - "filename": "pool/main/t/tzdata/tzdata_2024a-1ubuntu1_all.deb", - "name": "tzdata", - "sha256": "26cdb43f541d5b7d089d2c1cf7d50b4c5e630c79a6d4d6ce34e20dcace4f0d29", - "size": 273120, - "suite": "noble", - "version": "2024a-1ubuntu1" - } - }, - "sources": { - "bookworm": { - "architectures": [ - "amd64", - "arm64", - "i386" - ], - "components": [ - "main" - ], - "types": [ - "deb" - ], - "uris": [ - "https://snapshot.debian.org/archive/debian/20240210T223313Z" - ] - }, - "bookworm-security": { - "architectures": [ - "amd64", - "arm64", - "i386" - ], - "components": [ - "main" - ], - "types": [ - "deb" - ], - "uris": [ - "https://snapshot.debian.org/archive/debian-security/20240210T223313Z" - ] - }, - "bookworm-updates": { - "architectures": [ - "amd64", - "arm64", - "i386" - ], - "components": [ - "main" - ], - "types": [ - "deb" - ], - "uris": [ - "https://snapshot.debian.org/archive/debian/20240210T223313Z" - ] - }, - "cloud-sdk": { - "architectures": [ - "amd64" - ], - "components": [ - "main" - ], - "types": [ - "deb" - ], - "uris": [ - "https://packages.cloud.google.com/apt" - ] - }, - "noble": { - "architectures": [ - "amd64" - ], - "components": [ - "main" - ], - "types": [ - "deb" - ], - "uris": [ - "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z", - "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z" - ] - }, - "noble-security": { - "architectures": [ - "amd64" - ], - "components": [ - "main" - ], - "types": [ - "deb" - ], - "uris": [ - "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z", - "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z" - ] - }, - "noble-updates": { - "architectures": [ - "amd64" - ], - "components": [ - "main" - ], - "types": [ - "deb" - ], - "uris": [ - "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z", - "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z" - ] - } - }, - "version": 2 -} \ No newline at end of file diff --git a/apt/extensions.bzl b/apt/extensions.bzl index 3be0a089..5ba68c10 100644 --- a/apt/extensions.bzl +++ b/apt/extensions.bzl @@ -93,7 +93,9 @@ def _distroless_extension(mctx): resolved_count = 0 mctx.report_progress("Resolving %s:%s" % (dep_constraint, arch)) - (package, dependencies, unmet_dependencies, warnings) = resolver.resolve_all( + + # TODO: Flattening approach of resolving dependencies has to change. + (package, dependencies, unmet_dependencies, direct_dependencies, warnings) = resolver.resolve_all( name = constraint["name"], version = constraint["version"], arch = arch, @@ -130,6 +132,14 @@ def _distroless_extension(mctx): glock.add_package(dep) glock.add_package_dependency(package, dep) + # Also populate direct dependencies of transitive dependencies + # This is needed because resolver flattens the transitive closure. + # TODO: ditch transitive closure flattening and work around the + # recursive dependencies some other way. + if dep["Package"] in direct_dependencies: + for direct_dep in direct_dependencies[dep["Package"]]: + glock.add_package_direct_dependency(dep, direct_dep) + # Add it to dependency set arch_set = dependency_set["sets"].setdefault(arch, {}) arch_set[lockfile.short_package_key(package)] = package["Version"] @@ -156,6 +166,13 @@ def _distroless_extension(mctx): # Generate a repo per package which will be aliased by hub repo. for (package_key, package) in glock.packages().items(): + filemap = {} + for key in package["direct_depends_on"] + package["depends_on"]: + (suite, name, arch, version) = lockfile.parse_package_key(key) + filemap[name] = repo.filemap( + name = name, + arch = arch, + ) deb_import( name = util.sanitize(package_key), target_name = util.sanitize(package_key), @@ -166,6 +183,8 @@ def _distroless_extension(mctx): sha256 = package["sha256"], mergedusr = False, depends_on = package["depends_on"], + direct_depends_file_map = json.encode(filemap), + direct_depends_on = package["direct_depends_on"], package_name = package["name"], ) diff --git a/apt/private/apt_cursed_symlink.bzl b/apt/private/apt_cursed_symlink.bzl index 4f0d6926..c7a61a77 100644 --- a/apt/private/apt_cursed_symlink.bzl +++ b/apt/private/apt_cursed_symlink.bzl @@ -6,12 +6,13 @@ def _apt_cursed_symlink(ctx): found = None for file in ctx.files.candidates: + print(file.path) if file.path.endswith(candidate_full_path): found = file break if not found: - fail("Failed to find the candidate so library. file an issue.") + fail("Failed to find the candidate so library for {} in {}. file an issue.".format(ctx.attr.candidate_path, ctx.attr.candidates)) ctx.actions.symlink( output = ctx.outputs.out, diff --git a/apt/private/apt_deb_repository.bzl b/apt/private/apt_deb_repository.bzl index bbf225a3..bf666d60 100644 --- a/apt/private/apt_deb_repository.bzl +++ b/apt/private/apt_deb_repository.bzl @@ -77,6 +77,71 @@ def _fetch_package_index(mctx, urls, dist, comp, arch, integrity): return ("{}/Packages".format(target_triple), url, integrity) +def _fetch_contents(mctx, urls, dist, comp, arch, integrity): + target_triple = "{dist}/{comp}/{arch}".format(dist = dist, comp = comp, arch = arch) + + # See https://linux.die.net/man/1/xz , https://linux.die.net/man/1/gzip , and https://linux.die.net/man/1/bzip2 + # --keep -> keep the original file (Bazel might be still committing the output to the cache) + # --force -> overwrite the output if it exists + # --decompress -> decompress + # Order of these matter, we want to try the one that is most likely first. + supported_extensions = [ + (".gz", ["gzip", "--decompress", "--keep", "--force"]), + (".xz", ["xz", "--decompress", "--keep", "--force"]), + (".bz2", ["bzip2", "--decompress", "--keep", "--force"]), + ("", ["true"]), + ] + + failed_attempts = [] + + url = None + base_auth = _get_auth(mctx, urls) + for url in urls: + download = None + for (ext, cmd) in supported_extensions: + output = "{}/Contents{}".format(target_triple, ext) + dist_url = "{}/dists/{}/{}/Contents-{}{}".format(url, dist, comp, arch, ext) + auth = {} + if url in base_auth: + auth = {dist_url: base_auth[url]} + download = mctx.download( + url = dist_url, + output = output, + integrity = integrity, + allow_fail = True, + auth = auth, + ) + decompress_r = None + if download.success: + decompress_r = mctx.execute(cmd + [output]) + if decompress_r.return_code == 0: + integrity = download.integrity + break + + failed_attempts.append((dist_url, download, decompress_r)) + + if download.success: + break + + if len(failed_attempts) == len(supported_extensions) * len(urls): + attempt_messages = [] + for (failed_url, download, decompress) in failed_attempts: + reason = "unknown" + if not download.success: + reason = "Download failed. See warning above for details." + elif decompress.return_code != 0: + reason = "Decompression failed with non-zero exit code.\n\n{}\n{}".format(decompress.stderr, decompress.stdout) + + attempt_messages.append("""\n*) Failed '{}'\n\n{}""".format(failed_url, reason)) + + fail(""" +** Tried to download {} different package indices and all failed. + +{} + """.format(len(failed_attempts), "\n".join(attempt_messages))) + + return ("{}/Contents".format(target_triple), url, integrity) + def _parse_repository(state, contents, roots, dist): last_key = "" pkg = {} @@ -111,6 +176,17 @@ def _parse_repository(state, contents, roots, dist): last_key = "" pkg = {} +def _parse_contents(state, rcontents, arch): + contents = state.filemap.setdefault(arch, {}) + for line in rcontents.splitlines(): + last_empty_char = line.rfind(" ") + first_empty_char = line.find(" ") + filepath = line[:first_empty_char] + pkgs = line[last_empty_char + 1:].split(",") + for pkg in pkgs: + contents.setdefault(pkg[pkg.find("/") + 1:], []).append(filepath) + state.filemap[arch] = contents + def _add_package(state, package): util.set_dict( state.packages, @@ -161,6 +237,14 @@ def _package_versions(state, name, arch): def _package(state, name, version, arch): return util.get_dict(state.packages, keys = (arch, name, version)) +def _filemap(state, name, arch): + if arch not in state.filemap: + return None + all = state.filemap[arch] + if name not in all: + return None + return state.filemap[arch][name] + def _fetch_and_parse_sources(state): mctx = state.mctx facts = state.facts @@ -176,17 +260,26 @@ def _fetch_and_parse_sources(state): # on misconfigured HTTP servers) urls = [url.rstrip("/") for url in urls] - fact_key = dist + "/" + component + "/" + architecture - fact_value = facts.get(fact_key, "") + fact_key = dist + "/" + component + "/" + architecture + "/Packages" - mctx.report_progress("Fetching package index: {}/{} for {}".format(dist, component, architecture)) - (output, url, integrity) = _fetch_package_index(mctx, urls, dist, component, architecture, fact_value) + mctx.report_progress("fetching Package indices: {}/{} for {}".format(dist, component, architecture)) + (output, url, integrity) = _fetch_package_index(mctx, urls, dist, component, architecture, facts.get(fact_key, "")) facts[fact_key] = integrity - mctx.report_progress("Parsing package index: {}/{} for {}".format(dist, component, architecture)) + mctx.report_progress("parsing Package indices: {}/{} for {}".format(dist, component, architecture)) _parse_repository(state, mctx.read(output), urls, dist) + fact_key = dist + "/" + component + "/" + architecture + "/Contents" + + mctx.report_progress("fetching Contents: {}/{} for {}".format(dist, component, architecture)) + (output, url, integrity) = _fetch_contents(mctx, urls, dist, component, architecture, facts.get(fact_key, "")) + + facts[fact_key] = integrity + + mctx.report_progress("parsing Contents: {}/{} for {}".format(dist, component, architecture)) + _parse_contents(state, mctx.read(output), architecture) + def _add_source_if_not_present(state, source): (urls, dist, components, architectures) = source @@ -209,6 +302,7 @@ def _create(mctx, facts): state = struct( mctx = mctx, sources = dict(), + filemap = dict(), packages = dict(), virtual_packages = dict(), facts = facts, @@ -220,6 +314,7 @@ def _create(mctx, facts): package_versions = lambda **kwargs: _package_versions(state, **kwargs), virtual_packages = lambda **kwargs: _virtual_packages(state, **kwargs), package = lambda **kwargs: _package(state, **kwargs), + filemap = lambda **kwargs: _filemap(state, **kwargs), ) deb_repository = struct( diff --git a/apt/private/apt_dep_resolver.bzl b/apt/private/apt_dep_resolver.bzl index 6961a6da..ce36726f 100644 --- a/apt/private/apt_dep_resolver.bzl +++ b/apt/private/apt_dep_resolver.bzl @@ -77,34 +77,42 @@ _ITERATION_MAX_ = 2147483646 # certain conditions and package dependency groups. # TODO: Try to simplify it in the future. def _resolve_all(state, name, version, arch, include_transitive = True): - root_package = None unmet_dependencies = [] + root_package = None dependencies = [] + direct_dependencies = {} # state variables already_recursed = {} dependency_group = [] - stack = [(name, version, -1)] - + stack = [(name, version, -1, None)] warnings = [] + path = [] + for i in range(0, _ITERATION_MAX_ + 1): if not len(stack): break if i == _ITERATION_MAX_: fail("resolve_all exhausted") - (name, version, dependency_group_idx) = stack.pop() + (name, version, dependency_group_idx, requested_by) = stack.pop() # If this iteration is part of a dependency group, and the dependency group is already met, then skip this iteration. if dependency_group_idx > -1 and dependency_group[dependency_group_idx][0]: continue + path.append(name) + # TODO: only resolve in specified suites (package, warning) = _resolve_package(state, name, version, arch) if warning: warnings.append(warning) + if package and requested_by and package["Package"] != requested_by and package["Package"] not in already_recursed: + direct_dependencies.setdefault(requested_by, []).append(package) + + # Unmet optional dependency encountered # If this package is not found and is part of a dependency group, then just skip it. if not package and dependency_group_idx > -1: continue @@ -126,6 +134,7 @@ def _resolve_all(state, name, version, arch, include_transitive = True): # If we encountered package before in the transitive closure, skip it if key in already_recursed: + # fail(" -> ".join(path)) continue # Do not add dependency if it's a root package to avoid circular dependency. @@ -154,16 +163,16 @@ def _resolve_all(state, name, version, arch, include_transitive = True): # stack it means we need to push in reverse order. for gdep in reversed(dep): # TODO: arch - stack.append((gdep["name"], gdep["version"], new_dependency_group_idx)) + stack.append((gdep["name"], gdep["version"], new_dependency_group_idx, package["Package"])) else: # TODO: arch - stack.append((dep["name"], dep["version"], -1)) + stack.append((dep["name"], dep["version"], -1, package["Package"])) for (met, dep) in dependency_group: if not met: unmet_dependencies.append((dep, None)) - return (root_package, dependencies, unmet_dependencies, warnings) + return (root_package, dependencies, unmet_dependencies, direct_dependencies, warnings) def _create_resolution(repository): state = struct(repository = repository) diff --git a/apt/private/deb_cc_export.bzl b/apt/private/deb_cc_export.bzl index a397711f..b94e10d4 100644 --- a/apt/private/deb_cc_export.bzl +++ b/apt/private/deb_cc_export.bzl @@ -1,24 +1,87 @@ "normalization rules" -# buildifier: disable=function-docstring-args -def deb_cc_export(name, src, outs, **kwargs): - """Private. DO NOT USE.""" - if len(outs) == 0: - native.filegroup(name = name, srcs = [], **kwargs) - return - toolchains = ["@bsd_tar_toolchains//:resolved_toolchain"] - - cmd = """ -$(BSDTAR_BIN) -xf "$<" -C $(RULEDIR) {} \ -""".format( - " ".join(outs), - ) - native.genrule( - name = name, - srcs = [src], - outs = [out.removeprefix("./") for out in outs], - cmd = cmd, - toolchains = toolchains, - output_to_bindir = True, - **kwargs +TAR_TOOLCHAIN_TYPE = "@tar.bzl//tar/toolchain:type" + +def _apt_cursed_symlink(ctx): + bsdtar = ctx.toolchains[TAR_TOOLCHAIN_TYPE] + + for (i, symlink_out) in enumerate(ctx.outputs.symlink_outs): + ctx.actions.symlink( + output = symlink_out, + target_file = ctx.files.symlinks[i], + ) + + for (i, symlink_out) in enumerate(ctx.outputs.self_symlink_outs): + ctx.actions.symlink( + output = symlink_out, + target_file = ctx.outputs.outs[ctx.attr.self_symlink_output_indices[i]], + ) + + if len(ctx.outputs.outs): + fout = ctx.outputs.outs[0] + output = fout.path[:fout.path.find(fout.owner.repo_name) + len(fout.owner.repo_name)] + args = ctx.actions.args() + args.add("-xf") + args.add_all(ctx.files.srcs) + args.add("-C") + args.add(output) + args.add_all( + ctx.outputs.outs, + map_each = lambda src: src.short_path[len(src.owner.repo_name) + 4:], + allow_closure = True, + ) + ctx.actions.run( + executable = bsdtar.tarinfo.binary, + inputs = ctx.files.srcs, + outputs = ctx.outputs.outs, + arguments = [args], + mnemonic = "Unpack", + toolchain = TAR_TOOLCHAIN_TYPE, + ) + + return DefaultInfo( + files = depset( + ctx.outputs.outs + + ctx.outputs.symlink_outs + + ctx.outputs.self_symlink_outs, + ), ) + +deb_cc_export = rule( + implementation = _apt_cursed_symlink, + attrs = { + "srcs": attr.label_list(allow_files = True), + "symlinks": attr.label_list(allow_files = True), + "symlink_outs": attr.output_list(), + "self_symlink_outs": attr.output_list(), + "self_symlink_output_indices": attr.int_list(), + "deps": attr.label_list(allow_files = True), + "outs": attr.output_list(), + }, + toolchains = [ + TAR_TOOLCHAIN_TYPE, + ], +) + +# # buildifier: disable=function-docstring-args +# def deb_cc_export(name, src, outs, **kwargs): +# """Private. DO NOT USE.""" +# if len(outs) == 0: +# native.filegroup(name = name, srcs = [], **kwargs) +# return +# toolchains = ["@bsd_tar_toolchains//:resolved_toolchain"] + +# cmd = """ +# $(BSDTAR_BIN) -xf "$<" -C $(RULEDIR) {} \ +# """.format( +# " ".join(outs), +# ) +# native.genrule( +# name = name, +# srcs = [src], +# outs = [out.removeprefix("./") for out in outs], +# cmd = cmd, +# toolchains = toolchains, +# output_to_bindir = True, +# **kwargs +# ) diff --git a/apt/private/deb_import.bzl b/apt/private/deb_import.bzl index b32996ca..26a11fa8 100644 --- a/apt/private/deb_import.bzl +++ b/apt/private/deb_import.bzl @@ -1,7 +1,7 @@ "deb_import" load(":lockfile.bzl", "lockfile") -load(":pkgconfig.bzl", "parse_pc") +load(":pkgconfig.bzl", "parse_pc", "process_pcconfig") load(":util.bzl", "util") # BUILD.bazel template @@ -11,6 +11,7 @@ load("@rules_distroless//apt/private:deb_cc_export.bzl", "deb_cc_export") load("@rules_distroless//apt/private:apt_cursed_symlink.bzl", "apt_cursed_symlink") load("@rules_cc//cc/private/rules_impl:cc_import.bzl", "cc_import") load("@rules_cc//cc:cc_library.bzl", "cc_library") +load("@bazel_skylib//rules/directory:directory.bzl", "directory") deb_postfix( name = "data", @@ -35,11 +36,21 @@ filegroup( deb_cc_export( name = "cc_export", - src = glob(["data.tar*"])[0], + srcs = glob(["data.tar*"]), + symlinks = {symlinks}, + symlink_outs = {symlink_outs}, + self_symlink_outs = {self_symlink_outs}, + self_symlink_output_indices = {self_symlink_output_indices}, outs = {outs}, visibility = ["//visibility:public"] ) +directory( + name = "directory", + srcs = {symlink_outs} + {outs} + {self_symlink_outs}, + visibility = ["//visibility:public"] +) + {cc_import_targets} ''' @@ -59,7 +70,19 @@ _CC_LIBRARY_TMPL = """ cc_library( name = "{name}", hdrs = {hdrs}, - strip_include_prefix = "usr/include", + additional_linker_inputs = {additional_linker_inputs}, + strip_include_prefix = "{strip_include_prefix}", + visibility = ["//visibility:public"], +) +""" + +_CC_LIBRARY_LIBC_TMPL = """ +cc_library( + name = "{name}", + hdrs = {hdrs}, + srcs = {srcs}, + includes = {includes}, + additional_compiler_inputs = {additional_compiler_inputs}, visibility = ["//visibility:public"], ) """ @@ -72,76 +95,132 @@ cc_library( ) """ -_APT_CURSED_SYMLINK = """ -apt_cursed_symlink( - name = "{name}_cursed", - own_path = "{own_path}", - candidate_path = "{candidate_path}", - candidates = {candidates}, - out="{out}" -) -""" +def resolve_symlink(target_path, relative_symlink): + # Split paths into components + target_parts = target_path.split("/") + symlink_parts = relative_symlink.split("/") + + # Remove the file name from target path to get the directory + target_dir_parts = target_parts[:-1] + + # Process the relative symlink + result_parts = target_dir_parts[:] + for part in symlink_parts: + if part == "..": + # Move up one directory by removing the last component + if result_parts: + result_parts.pop() + elif part == "." or part == "": + # Ignore current directory or empty components + continue + else: + # Append the component to the path + result_parts.append(part) + + # Join the parts back into a path + resolved_path = "/".join(result_parts) + return resolved_path -def _discover_contents(rctx, depends_on, target_name): - result = rctx.execute(["tar", "-tf", "data.tar.xz"]) +def _discover_contents(rctx, depends_on, direct_depends_on, direct_depends_file_map, target_name): + result = rctx.execute(["tar", "--exclude='./usr/share/**'", "--exclude='./**/'", "-tvf", "data.tar.xz"]) contents_raw = result.stdout.splitlines() so_files = [] a_files = [] h_files = [] hpp_files = [] + hpp_files_woext = [] pc_files = [] + symlinks = {} deps = [] excluded_files = [] - for dep in depends_on: - (suite, name, arch, version) = lockfile.parse_package_key(dep) - if not name.endswith("-dev"): - # TODO: - # This is probably not safe. - # What if a package has a dependency (with a .so file in it) - # but its a not -dev package? + for line in contents_raw: + # Skip directories + if line.endswith("/"): continue - deps.append( - "@%s//:%s" % (util.sanitize(dep), name.removesuffix("-dev")), - ) - for line in contents_raw: + line = line[line.find(" ./") + 3:] + # Skip everything in man pages and examples - if line.startswith("/usr/share"): + if line.startswith("usr/share"): continue - # Skip directories - if line.endswith("/"): - continue + is_symlink_idx = line.find(" -> ") + if is_symlink_idx != -1: + symlink_target = line[is_symlink_idx + 4:] + line = line[:is_symlink_idx] + if line.endswith(".pc"): + continue + symlinks[line] = resolve_symlink(line, symlink_target).removeprefix("./") - if (line.endswith(".so") or line.find(".so.") > 5) and line.find("lib"): + if (line.endswith(".so") or line.find(".so.") != -1) and line.find("lib") != -1: so_files.append(line) elif line.endswith(".a") and line.find("lib"): a_files.append(line) elif line.endswith(".pc") and line.find("pkgconfig"): pc_files.append(line) - elif line.endswith(".h") and line.startswith("./usr/include"): + elif line.endswith(".h"): h_files.append(line) - elif line.endswith(".hpp") and line.startswith("./usr/include"): + elif line.endswith(".hpp"): hpp_files.append(line) + elif line.find("include/c++") != -1: + hpp_files_woext.append(line) + + # Resolve symlinks: + resolved_symlinks = list([None] * len(symlinks)) + symlink_targets = { + v: k + for (k, v) in symlinks.items() + } + osymlinks = symlinks + symlinks = { + k: None + for k in symlinks.keys() + } + solved_symlinks = 0 + for dep in direct_depends_on or depends_on: + (suite, name, arch, version) = lockfile.parse_package_key(dep) + filemap = direct_depends_file_map.get(name, []) or [] + for file in filemap: + if file in symlink_targets: + symlink_path = symlink_targets[file] + symlinks[symlink_path] = "@%s//:%s" % (util.sanitize(dep), file) + solved_symlinks += 1 + if solved_symlinks == len(symlink_targets): + break + + outs = [] + + for out in so_files + h_files + hpp_files + a_files + hpp_files_woext: + if out not in symlinks: + outs.append(out) + + self_symlinks = {} + for (i, file) in enumerate(outs): + if file in symlink_targets: + symlink_path = symlink_targets[file] + self_symlinks[symlink_path] = i + solved_symlinks += 1 + if solved_symlinks == len(symlink_targets): + break + + if solved_symlinks < len(symlink_targets): + util.warning(rctx, "some symlinks could not be solved for {}. \n{}".format(target_name, osymlinks)) build_file_content = "" # TODO: handle non symlink pc files similar to how we # handle so symlinks non_symlink_pc_file = None - pc_files_all_symlink = False if len(pc_files): # TODO: use rctx.extract instead. r = rctx.execute( - ["tar", "-xvf", "data.tar.xz"] + pc_files + so_files, + ["tar", "-xvf", "data.tar.xz"] + ["./" + pc for pc in pc_files], ) - pc_files_all_symlink = True for pc in pc_files: if rctx.path(pc).exists: non_symlink_pc_file = pc - pc_files_all_symlink = False break # Package has a pkgconfig, use that as the source of truth. @@ -155,7 +234,7 @@ def _discover_contents(rctx, depends_on, target_name): linkopts, includes, defines, - ) = _process_pcconfig(pc) + ) = process_pcconfig(pc) static_lib = None shared_lib = None @@ -163,131 +242,92 @@ def _discover_contents(rctx, depends_on, target_name): # Look for a static archive for ar in a_files: if ar.endswith(libname + ".a"): - static_lib = '":%s"' % ar.removeprefix("./") + static_lib = '":%s"' % ar break # Look for a dynamic library for so_lib in so_files: if so_lib.endswith(libname + ".so"): - lib_path = so_lib.removeprefix("./") + lib_path = so_lib path = rctx.path(lib_path) - - # Check for dangling symlinks and search in the transitive closure - if not path.exists: - candidate_path = rctx.execute(["readlink", path]).stdout.strip() - build_file_content += _APT_CURSED_SYMLINK.format( - name = target_name, - candidates = [ - "@%s//:cc_export" % util.sanitize(dep) - for dep in depends_on - ], - own_path = so_lib, - candidate_path = candidate_path, - out = so_lib.removeprefix("./"), - ) - excluded_files.append(so_lib) - - shared_lib = '":%s"' % so_lib.removeprefix("./") + shared_lib = '":%s"' % so_lib break build_file_content += _CC_IMPORT_TMPL.format( name = target_name, hdrs = [ - ":" + h.removeprefix("./") + ":" + h for h in h_files + hpp_files ], shared_lib = shared_lib, static_lib = static_lib, includes = [ - "external/../" + include + "external/.." + include for include in includes ], linkopts = linkopts, ) - # There were some pc files but they were all symlinks - elif pc_files_all_symlink: - pass - - # Package has no pkgconfig, possibly a cmake based library at the - # standard /usr/include location and that's the only available - # information to turn the package into a cc_library target. + elif (len(hpp_files) or len(h_files)) and ((target_name.find("libc") != -1 or target_name.find("libstdc") != -1 or target_name.find("libgcc") != -1)): + build_file_content += _CC_LIBRARY_LIBC_TMPL.format( + name = target_name, + hdrs = [ + ":" + h + for h in h_files + hpp_files + ], + srcs = [ + # ":" + so + # for so in so_files + ], + additional_compiler_inputs = hpp_files_woext, + includes = [], + ) - elif len(hpp_files): + elif len(hpp_files) or len(h_files): build_file_content += _CC_LIBRARY_TMPL.format( name = target_name, hdrs = [ - ":" + h.removeprefix("./") + ":" + h for h in h_files + hpp_files ], + additional_linker_inputs = [], + strip_include_prefix = "usr/include", ) - # Package has no header files, likely a denominator package like liboost_dev + # Package has no header files, likely a denominator package like liboost-dev # since it has dependencies elif len(depends_on): + deps = [] + for dep in depends_on: + (suite, name, arch, version) = lockfile.parse_package_key(dep) + deps.append( + "@%s//:%s" % (util.sanitize(dep), name.removesuffix("-dev")), + ) + build_file_content += _CC_LIBRARY_DEP_ONLY_TMPL.format( name = target_name, - hdrs = [], deps = deps, ) - - pruned_outs = [] - if pc_files_all_symlink: - pruned_outs = [] + elif len(so_files): + build_file_content += _CC_LIBRARY_TMPL.format( + name = target_name, + additional_linker_inputs = [ + ":" + h + for h in so_files + ], + hdrs = [], + strip_include_prefix = "usr/include", + ) else: - pruned_outs = [ - sf - for sf in so_files - if sf not in excluded_files - ] + h_files + hpp_files + a_files - - return (build_file_content, pruned_outs) - -def _trim(str): - return str.rstrip(" ").lstrip(" ") - -def _process_pcconfig(pc): - (directives, variables) = pc - includedir = _trim(variables["includedir"]) - libdir = _trim(variables["libdir"]) - linkopts = [] - includes = [] - defines = [] - libname = None - if "Libs" in directives: - libs = _trim(directives["Libs"]).split(" ") - for arg in libs: - if arg.startswith("-l"): - libname = "lib" + arg.removeprefix("-l") - continue - if arg.startswith("-L"): - continue - linkopts.append(arg) - - # if "Libs.private" in directives: - # libs = _trim(directives["Libs.private"]).split(" ") - # linkopts.extend([arg for arg in libs if arg.startswith("-l")]) - - if "Cflags" in directives: - cflags = _trim(directives["Cflags"]).split(" ") - for flag in cflags: - if flag.startswith("-I"): - include = flag.removeprefix("-I") - includes.append(include) - - # If the include is direct include eg $includedir (/usr/include/hiredis) - # equals to -I/usr/include/hiredis then we need to add /usr/include into - # includes array to satify imports as `#include ` - if include == includedir: - includes.append(include.removesuffix("/" + directives["Name"])) - elif include.startswith(includedir): - includes.append(include.removesuffix("/" + directives["Name"])) - elif flag.startswith("-D"): - define = flag.removeprefix("-D") - defines.append(define) - - return (libname, includedir, libdir, linkopts, includes, defines) + build_file_content += _CC_LIBRARY_TMPL.format( + name = target_name, + additional_linker_inputs = [], + hdrs = [], + strip_include_prefix = "usr/include", + ) + + return (build_file_content, outs, symlinks, self_symlinks) def _deb_import_impl(rctx): rctx.download_and_extract( @@ -296,17 +336,24 @@ def _deb_import_impl(rctx): ) # TODO: only do this if package is -dev or dependent of a -dev pkg. - cc_import_targets, so_files = _discover_contents( + cc_import_targets, outs, symlinks, self_symlinks = _discover_contents( rctx, rctx.attr.depends_on, + rctx.attr.direct_depends_on, + json.decode(rctx.attr.direct_depends_file_map), rctx.attr.package_name.removesuffix("-dev"), ) + rctx.file("BUILD.bazel", _DEB_IMPORT_BUILD_TMPL.format( mergedusr = rctx.attr.mergedusr, depends_on = ["@" + util.sanitize(dep_key) for dep_key in rctx.attr.depends_on], target_name = rctx.attr.target_name, cc_import_targets = cc_import_targets, - outs = so_files, + outs = outs, + symlinks = [value for value in symlinks.values() if value], + symlink_outs = [k for (k, v) in symlinks.items() if v], + self_symlink_outs = self_symlinks.keys(), + self_symlink_output_indices = self_symlinks.values(), )) deb_import = repository_rule( @@ -315,6 +362,8 @@ deb_import = repository_rule( "urls": attr.string_list(mandatory = True, allow_empty = False), "sha256": attr.string(), "depends_on": attr.string_list(), + "direct_depends_on": attr.string_list(), + "direct_depends_file_map": attr.string(), "mergedusr": attr.bool(), "target_name": attr.string(), "package_name": attr.string(), diff --git a/apt/private/lockfile.bzl b/apt/private/lockfile.bzl index dba9ef33..cc215abf 100644 --- a/apt/private/lockfile.bzl +++ b/apt/private/lockfile.bzl @@ -36,8 +36,10 @@ def _add_package(lock, package): "sha256": package["SHA256"], "filename": package["Filename"], "suite": package["Dist"], + "section": package["Section"], "size": int(package["Size"]), "depends_on": [], + "direct_depends_on": [], } def _add_package_dependency(lock, package, dependency): @@ -49,6 +51,15 @@ def _add_package_dependency(lock, package, dependency): return lock.packages[k]["depends_on"].append(sk) +def _add_package_direct_dependency(lock, package, dependency): + k = _package_key(package) + if k not in lock.packages: + fail("illegal state: %s is not in the lockfile." % package["Package"]) + sk = _package_key(dependency) + if sk in lock.packages[k]["direct_depends_on"]: + return + lock.packages[k]["direct_depends_on"].append(sk) + def _has_package(lock, suite, name, version, arch): return _make_package_key(suite, name, version, arch) in lock.packages @@ -66,6 +77,7 @@ def _create(mctx, lock): add_source = lambda *args, **kwargs: _add_source(lock, *args, **kwargs), add_package = lambda *args, **kwargs: _add_package(lock, *args, **kwargs), add_package_dependency = lambda *args, **kwargs: _add_package_dependency(lock, *args, **kwargs), + add_package_direct_dependency = lambda *args, **kwargs: _add_package_direct_dependency(lock, *args, **kwargs), packages = lambda: lock.packages, sources = lambda: lock.sources, dependency_sets = lambda: lock.dependency_sets, diff --git a/apt/private/pkgconfig.bzl b/apt/private/pkgconfig.bzl index a83881b8..bf6bf399 100644 --- a/apt/private/pkgconfig.bzl +++ b/apt/private/pkgconfig.bzl @@ -20,13 +20,16 @@ def _expand_value(value, variables): assert_in_subs() elif c == "}": assert_in_subs() - value_of_key = variables[key] + if key not in variables: + # fail("corrupted pc file") + value_of_key = "" + else: + value_of_key = variables[key] # reset subs state key = "" in_subs = False - if not value_of_key: - fail("corrupted pc file") + expanded_value += value_of_key elif in_subs: key += c @@ -63,3 +66,52 @@ def _parse_requires(re): return [] deps = re.split(",") return [dep.strip(" ") for dep in deps if dep.strip(" ")] + +def _trim(str): + return str.rstrip(" ").lstrip(" ") + +def process_pcconfig(pc): + (directives, variables) = pc + includedir = "" + libdir = "" + if "includedir" in variables: + includedir = _trim(variables["includedir"]) + if "libdir" in variables: + libdir = _trim(variables["libdir"]) + linkopts = [] + includes = [] + defines = [] + libname = None + if "Libs" in directives: + libs = _trim(directives["Libs"]).split(" ") + for arg in libs: + if arg.startswith("-l"): + libname = "lib" + arg.removeprefix("-l") + continue + if arg.startswith("-L"): + continue + linkopts.append(arg) + + # if "Libs.private" in directives: + # libs = _trim(directives["Libs.private"]).split(" ") + # linkopts.extend([arg for arg in libs if arg.startswith("-l")]) + + if "Cflags" in directives: + cflags = _trim(directives["Cflags"]).split(" ") + for flag in cflags: + if flag.startswith("-I"): + include = flag.removeprefix("-I") + includes.append(include) + + # If the include is direct include eg $includedir (/usr/include/hiredis) + # equals to -I/usr/include/hiredis then we need to add /usr/include into + # includes array to satify imports as `#include ` + if include == includedir: + includes.append(include.removesuffix("/" + directives["Name"])) + elif include.startswith(includedir): + includes.append(include.removesuffix("/" + directives["Name"])) + elif flag.startswith("-D"): + define = flag.removeprefix("-D") + defines.append(define) + + return (libname, includedir, libdir, linkopts, includes, defines) From bb1570a6ab8e1495775928697ff1bd0212db1cd3 Mon Sep 17 00:00:00 2001 From: Sahin Yort Date: Thu, 23 Oct 2025 15:17:39 -0700 Subject: [PATCH 05/19] lts --- apt/extensions.bzl | 180 +++++++++------- apt/private/apt_dep_resolver.bzl | 35 +-- apt/private/deb_cc_export.bzl | 46 +--- apt/private/deb_import.bzl | 358 +++++++++++++++++++------------ apt/private/lockfile.bzl | 11 - apt/private/pkgconfig.bzl | 47 +++- 6 files changed, 399 insertions(+), 278 deletions(-) diff --git a/apt/extensions.bzl b/apt/extensions.bzl index 5ba68c10..1b368547 100644 --- a/apt/extensions.bzl +++ b/apt/extensions.bzl @@ -11,6 +11,8 @@ load("//apt/private:version_constraint.bzl", "version_constraint") # https://wiki.debian.org/SupportedArchitectures ALL_SUPPORTED_ARCHES = ["armel", "armhf", "arm64", "i386", "amd64", "mips64el", "ppc64el", "x390x"] +ITERATION_MAX = 2147483646 + def _parse_source(src): parts = src.split(" ") kind = parts.pop(0) @@ -71,89 +73,119 @@ def _distroless_extension(mctx): sources = glock.sources() dependency_sets = glock.dependency_sets() + resolution_queue = [] + already_resolved = {} + for mod in mctx.modules: for install in mod.tags.install: - dependency_set = dependency_sets.setdefault(install.dependency_set, { - "sets": {}, - }) for dep_constraint in install.packages: constraint = version_constraint.parse_dep(dep_constraint) - - architectures = [] - - if constraint["arch"]: - architectures = constraint["arch"] - else: - architectures = ["amd64"] - - for _ in range(len(ALL_SUPPORTED_ARCHES)): - if len(architectures) == 0: - break - arch = architectures.pop() - resolved_count = 0 - - mctx.report_progress("Resolving %s:%s" % (dep_constraint, arch)) - - # TODO: Flattening approach of resolving dependencies has to change. - (package, dependencies, unmet_dependencies, direct_dependencies, warnings) = resolver.resolve_all( + architectures = constraint["arch"] + if not architectures: + # For cases where architecture for the package is not specified we need + # to first find out which source contains the package. in order to do + # that we first need to resolve the package for amd64 architecture. + # Once the repository is found, then resolve the package for all the + # architectures the repository supports. + (package, warning) = resolver.resolve_package( name = constraint["name"], version = constraint["version"], - arch = arch, - include_transitive = install.include_transitive, + arch = "amd64", ) + if warning: + util.warning(mctx, warning) + # If the package is not found then add the package + # to the resolution_queue to let the resolver handle + # the error messages. if not package: - fail( - "\n\nUnable to locate package `%s` for %s. It may only exist for specific set of architectures. \n" % (dep_constraint, arch) + - " 1 - Ensure that the package is available for the specified architecture. \n" + - " 2 - Ensure that the specified version of the package is available for the specified architecture. \n" + - " 3 - Ensure that an apt.source_list added for the specified architecture.", - ) - - for warning in warnings: - util.warning(mctx, warning) + resolution_queue.append(( + install.dependency_set, + constraint["name"], + constraint["version"], + "amd64", + )) + continue + + source = sources[package["Dist"]] + architectures = source["architectures"] + + for arch in architectures: + resolution_queue.append(( + install.dependency_set, + constraint["name"], + constraint["version"], + arch, + )) + + for i in range(0, ITERATION_MAX + 1): + if not len(resolution_queue): + break + if i == ITERATION_MAX: + fail("apt.install exhausted, please file a bug") + + (dependency_set_name, name, version, arch) = resolution_queue.pop() + + mctx.report_progress("Resolving %s:%s" % (dep_constraint, arch)) + + # TODO: Flattening approach of resolving dependencies has to change. + (package, dependencies, unmet_dependencies, warnings) = resolver.resolve_all( + name = name, + version = version, + arch = arch, + include_transitive = install.include_transitive, + ) - if len(unmet_dependencies): - util.warning( - mctx, - "Following dependencies could not be resolved for %s: %s" % (constraint["name"], ",".join([up[0] for up in unmet_dependencies])), - ) - - # TODO: - # Ensure following statements are true. - # 1- Package was resolved from a source that module listed explicitly. - # 2- Package resolution was skipped because some other module asked for this package. - # 3- 1) is enforced even if 2) is the case. - glock.add_package(package) - - resolved_count += len(dependencies) + 1 - - for dep in dependencies: - glock.add_package(dep) - glock.add_package_dependency(package, dep) - - # Also populate direct dependencies of transitive dependencies - # This is needed because resolver flattens the transitive closure. - # TODO: ditch transitive closure flattening and work around the - # recursive dependencies some other way. - if dep["Package"] in direct_dependencies: - for direct_dep in direct_dependencies[dep["Package"]]: - glock.add_package_direct_dependency(dep, direct_dep) - - # Add it to dependency set - arch_set = dependency_set["sets"].setdefault(arch, {}) - arch_set[lockfile.short_package_key(package)] = package["Version"] + if not package: + fail( + "\n\nUnable to locate package `%s` for %s. It may only exist for specific set of architectures. \n" % (name, arch) + + " 1 - Ensure that the package is available for the specified architecture. \n" + + " 2 - Ensure that the specified version of the package is available for the specified architecture. \n" + + " 3 - Ensure that an apt.source_list added for the specified architecture.", + ) - # For cases where architecture for the package is not specified we need - # to first find out which source contains the package. in order to do - # that we first need to resolve the package for amd64 architecture. - # Once the repository is found, then resolve the package for all the - # architectures the repository supports. - if not constraint["arch"] and arch == "amd64": - source = sources[package["Dist"]] - architectures = [a for a in source["architectures"] if a != "amd64"] + for warning in warnings: + util.warning(mctx, warning) - mctx.report_progress("Resolved %d packages for %s" % (resolved_count, arch)) + if len(unmet_dependencies): + util.warning( + mctx, + "Following dependencies could not be resolved for %s: %s" % ( + name, + ",".join([up[0] for up in unmet_dependencies]), + ), + ) + + # TODO: + # Ensure following statements are true. + # 1- Package was resolved from a source that module listed explicitly. + # 2- Package resolution was skipped because some other module asked for this package. + # 3- 1) is enforced even if 2) is the case. + glock.add_package(package) + + pkg_short_key = lockfile.short_package_key(package) + + already_resolved[pkg_short_key] = True + + for dep in dependencies: + glock.add_package(dep) + dep_key = lockfile.short_package_key(dep) + if dep_key not in already_resolved: + resolution_queue.append(( + None, + dep["Package"], + ("=", dep["Version"]), + arch, + )) + glock.add_package_dependency(package, dep) + + # Add it to dependency set + if dependency_set_name: + dependency_set = dependency_sets.setdefault(dependency_set_name, { + "sets": {}, + }) + arch_set = dependency_set["sets"].setdefault(arch, {}) + arch_set[pkg_short_key] = package["Version"] # Generate a hub repo for every dependency set lock_content = glock.as_json() @@ -167,12 +199,13 @@ def _distroless_extension(mctx): # Generate a repo per package which will be aliased by hub repo. for (package_key, package) in glock.packages().items(): filemap = {} - for key in package["direct_depends_on"] + package["depends_on"]: + for key in package["depends_on"]: (suite, name, arch, version) = lockfile.parse_package_key(key) filemap[name] = repo.filemap( name = name, arch = arch, ) + deb_import( name = util.sanitize(package_key), target_name = util.sanitize(package_key), @@ -183,8 +216,7 @@ def _distroless_extension(mctx): sha256 = package["sha256"], mergedusr = False, depends_on = package["depends_on"], - direct_depends_file_map = json.encode(filemap), - direct_depends_on = package["direct_depends_on"], + depends_file_map = json.encode(filemap), package_name = package["name"], ) diff --git a/apt/private/apt_dep_resolver.bzl b/apt/private/apt_dep_resolver.bzl index ce36726f..2593c77e 100644 --- a/apt/private/apt_dep_resolver.bzl +++ b/apt/private/apt_dep_resolver.bzl @@ -23,6 +23,7 @@ def _resolve_package(state, name, version, arch): return (candidates[0], warning) if len(candidates) > 1: + versions = {} for package in candidates: # Return 'required' packages immediately since it is implicit that # they should exist on a default debian install. @@ -35,13 +36,20 @@ def _resolve_package(state, name, version, arch): # In the case of required packages, these defaults are not specified. if "Priority" in package and package["Priority"] == "required": return (package, warning) + versions[package["Version"]] = package - # Otherwise, we can't disambiguate the virtual package providers so - # choose none and warn. - warning = "Multiple candidates for virtual package '{}': {}".format( - name, - ", ".join([package["Package"] for package in candidates]), - ) + sortedversions = version_lib.sort(versions.keys(), reverse = True) + + # First element in the versions list is the latest version. + selected_version = sortedversions[0] + return (versions[selected_version], warning) + + # # Otherwise, we can't disambiguate the virtual package providers so + # # choose none and warn. + # warning = "Multiple candidates for virtual package '{}': {}".format( + # name, + # ", ".join([package["Package"] + "" + package["Version"] for package in candidates]), + # ) # Get available versions of the package versions_by_arch = state.repository.package_versions(name = name, arch = arch) @@ -85,7 +93,7 @@ def _resolve_all(state, name, version, arch, include_transitive = True): # state variables already_recursed = {} dependency_group = [] - stack = [(name, version, -1, None)] + stack = [(name, version, -1)] warnings = [] path = [] @@ -96,7 +104,7 @@ def _resolve_all(state, name, version, arch, include_transitive = True): if i == _ITERATION_MAX_: fail("resolve_all exhausted") - (name, version, dependency_group_idx, requested_by) = stack.pop() + (name, version, dependency_group_idx) = stack.pop() # If this iteration is part of a dependency group, and the dependency group is already met, then skip this iteration. if dependency_group_idx > -1 and dependency_group[dependency_group_idx][0]: @@ -109,9 +117,6 @@ def _resolve_all(state, name, version, arch, include_transitive = True): if warning: warnings.append(warning) - if package and requested_by and package["Package"] != requested_by and package["Package"] not in already_recursed: - direct_dependencies.setdefault(requested_by, []).append(package) - # Unmet optional dependency encountered # If this package is not found and is part of a dependency group, then just skip it. if not package and dependency_group_idx > -1: @@ -140,7 +145,7 @@ def _resolve_all(state, name, version, arch, include_transitive = True): # Do not add dependency if it's a root package to avoid circular dependency. if i != 0 and key != root_package["Package"]: # Add it to the dependencies - already_recursed[key] = True + already_recursed[key] = package["Version"] dependencies.append(package) deps = [] @@ -163,16 +168,16 @@ def _resolve_all(state, name, version, arch, include_transitive = True): # stack it means we need to push in reverse order. for gdep in reversed(dep): # TODO: arch - stack.append((gdep["name"], gdep["version"], new_dependency_group_idx, package["Package"])) + stack.append((gdep["name"], gdep["version"], new_dependency_group_idx)) else: # TODO: arch - stack.append((dep["name"], dep["version"], -1, package["Package"])) + stack.append((dep["name"], dep["version"], -1)) for (met, dep) in dependency_group: if not met: unmet_dependencies.append((dep, None)) - return (root_package, dependencies, unmet_dependencies, direct_dependencies, warnings) + return (root_package, dependencies, unmet_dependencies, warnings) def _create_resolution(repository): state = struct(repository = repository) diff --git a/apt/private/deb_cc_export.bzl b/apt/private/deb_cc_export.bzl index b94e10d4..9be92bef 100644 --- a/apt/private/deb_cc_export.bzl +++ b/apt/private/deb_cc_export.bzl @@ -5,16 +5,12 @@ TAR_TOOLCHAIN_TYPE = "@tar.bzl//tar/toolchain:type" def _apt_cursed_symlink(ctx): bsdtar = ctx.toolchains[TAR_TOOLCHAIN_TYPE] - for (i, symlink_out) in enumerate(ctx.outputs.symlink_outs): + for (i, target) in ctx.attr.foreign_symlinks.items(): + i = int(i) ctx.actions.symlink( - output = symlink_out, - target_file = ctx.files.symlinks[i], - ) - - for (i, symlink_out) in enumerate(ctx.outputs.self_symlink_outs): - ctx.actions.symlink( - output = symlink_out, - target_file = ctx.outputs.outs[ctx.attr.self_symlink_output_indices[i]], + output = ctx.outputs.symlink_outs[i], + # grossly inefficient + target_file = target[DefaultInfo].files.to_list()[0], ) if len(ctx.outputs.outs): @@ -43,7 +39,7 @@ def _apt_cursed_symlink(ctx): files = depset( ctx.outputs.outs + ctx.outputs.symlink_outs + - ctx.outputs.self_symlink_outs, + ctx.files.foreign_symlinks, ), ) @@ -51,37 +47,13 @@ deb_cc_export = rule( implementation = _apt_cursed_symlink, attrs = { "srcs": attr.label_list(allow_files = True), - "symlinks": attr.label_list(allow_files = True), - "symlink_outs": attr.output_list(), - "self_symlink_outs": attr.output_list(), - "self_symlink_output_indices": attr.int_list(), "deps": attr.label_list(allow_files = True), + # mapping of symlink_outs indice to a foreign label + "foreign_symlinks": attr.string_keyed_label_dict(allow_files = True), + "symlink_outs": attr.output_list(), "outs": attr.output_list(), }, toolchains = [ TAR_TOOLCHAIN_TYPE, ], ) - -# # buildifier: disable=function-docstring-args -# def deb_cc_export(name, src, outs, **kwargs): -# """Private. DO NOT USE.""" -# if len(outs) == 0: -# native.filegroup(name = name, srcs = [], **kwargs) -# return -# toolchains = ["@bsd_tar_toolchains//:resolved_toolchain"] - -# cmd = """ -# $(BSDTAR_BIN) -xf "$<" -C $(RULEDIR) {} \ -# """.format( -# " ".join(outs), -# ) -# native.genrule( -# name = name, -# srcs = [src], -# outs = [out.removeprefix("./") for out in outs], -# cmd = cmd, -# toolchains = toolchains, -# output_to_bindir = True, -# **kwargs -# ) diff --git a/apt/private/deb_import.bzl b/apt/private/deb_import.bzl index 26a11fa8..d7461cd5 100644 --- a/apt/private/deb_import.bzl +++ b/apt/private/deb_import.bzl @@ -1,7 +1,7 @@ "deb_import" load(":lockfile.bzl", "lockfile") -load(":pkgconfig.bzl", "parse_pc", "process_pcconfig") +load(":pkgconfig.bzl", "pkgconfig") load(":util.bzl", "util") # BUILD.bazel template @@ -37,17 +37,15 @@ filegroup( deb_cc_export( name = "cc_export", srcs = glob(["data.tar*"]), - symlinks = {symlinks}, + foreign_symlinks = {foreign_symlinks}, symlink_outs = {symlink_outs}, - self_symlink_outs = {self_symlink_outs}, - self_symlink_output_indices = {self_symlink_output_indices}, outs = {outs}, visibility = ["//visibility:public"] ) directory( name = "directory", - srcs = {symlink_outs} + {outs} + {self_symlink_outs}, + srcs = {symlink_outs} + {outs}, visibility = ["//visibility:public"] ) @@ -56,42 +54,95 @@ directory( _CC_IMPORT_TMPL = """ cc_import( - name = "{name}", + name = "{name}_imp_", hdrs = {hdrs}, + includes = {includes}, + shared_library = {shared_lib}, + static_library = {static_lib}, +) + +cc_library( + name = "{name}", + deps = [":{name}_imp_"], + additional_compiler_inputs = {additional_compiler_inputs}, + additional_linker_inputs = {additional_linker_inputs}, linkopts = {linkopts}, +) +""" + +_CC_IMPORT_SINGLE_TMPL = """ +cc_import( + name = "{name}_import", + hdrs = {hdrs}, includes = {includes}, shared_library = {shared_lib}, static_library = {static_lib}, +) + +cc_library( + name = "{name}_wodeps", + deps = [":{name}_import"], + additional_compiler_inputs = {additional_compiler_inputs}, + additional_linker_inputs = {additional_linker_inputs}, + linkopts = {linkopts}, + visibility = ["//visibility:public"], +) + + +cc_library( + name = "{name}", + deps = [":{name}_wodeps"] + {deps}, visibility = ["//visibility:public"], ) """ -_CC_LIBRARY_TMPL = """ +_CC_IMPORT_DENOMITATOR = """ +cc_library( + name = "{name}_wodeps", + deps = {targets}, + visibility = ["//visibility:public"], +) + cc_library( name = "{name}", - hdrs = {hdrs}, - additional_linker_inputs = {additional_linker_inputs}, - strip_include_prefix = "{strip_include_prefix}", + deps = [":{name}_wodeps"] + {deps}, visibility = ["//visibility:public"], ) """ _CC_LIBRARY_LIBC_TMPL = """ +alias( + name = "{name}_wodeps", + actual = ":{name}", + visibility = ["//visibility:public"] +) + cc_library( name = "{name}", hdrs = {hdrs}, - srcs = {srcs}, - includes = {includes}, additional_compiler_inputs = {additional_compiler_inputs}, + additional_linker_inputs = {additional_linker_inputs}, + includes = {includes}, visibility = ["//visibility:public"], ) """ -_CC_LIBRARY_DEP_ONLY_TMPL = """ +_CC_LIBRARY_TMPL = """ +cc_library( + name = "{name}_wodeps", + hdrs = {hdrs}, + srcs = {srcs}, + linkopts = {linkopts}, + additional_compiler_inputs = {additional_compiler_inputs}, + additional_linker_inputs = {additional_linker_inputs}, + strip_include_prefix = "{strip_include_prefix}", + visibility = ["//visibility:public"], +) + cc_library( name = "{name}", - deps = {deps}, - visibility = ["//visibility:public"] + deps = [":{name}_wodeps"] + {deps}, + visibility = ["//visibility:public"], ) """ @@ -121,18 +172,18 @@ def resolve_symlink(target_path, relative_symlink): resolved_path = "/".join(result_parts) return resolved_path -def _discover_contents(rctx, depends_on, direct_depends_on, direct_depends_file_map, target_name): +def _discover_contents(rctx, depends_on, depends_file_map, target_name): result = rctx.execute(["tar", "--exclude='./usr/share/**'", "--exclude='./**/'", "-tvf", "data.tar.xz"]) contents_raw = result.stdout.splitlines() + so_files = [] a_files = [] h_files = [] hpp_files = [] hpp_files_woext = [] pc_files = [] + o_files = [] symlinks = {} - deps = [] - excluded_files = [] for line in contents_raw: # Skip directories @@ -146,14 +197,22 @@ def _discover_contents(rctx, depends_on, direct_depends_on, direct_depends_file_ continue is_symlink_idx = line.find(" -> ") + resolved_symlink = None if is_symlink_idx != -1: symlink_target = line[is_symlink_idx + 4:] line = line[:is_symlink_idx] if line.endswith(".pc"): continue - symlinks[line] = resolve_symlink(line, symlink_target).removeprefix("./") + + # An absolute symlink + if symlink_target.startswith("/"): + resolved_symlink = symlink_target.removeprefix("/") + else: + resolved_symlink = resolve_symlink(line, symlink_target).removeprefix("./") if (line.endswith(".so") or line.find(".so.") != -1) and line.find("lib") != -1: + if line.find("libthread_db") != -1: + continue so_files.append(line) elif line.endswith(".a") and line.find("lib"): a_files.append(line) @@ -165,169 +224,201 @@ def _discover_contents(rctx, depends_on, direct_depends_on, direct_depends_file_ hpp_files.append(line) elif line.find("include/c++") != -1: hpp_files_woext.append(line) + elif line.endswith(".o"): + o_files.append(line) + else: + continue + + if resolved_symlink: + symlinks[line] = resolved_symlink # Resolve symlinks: - resolved_symlinks = list([None] * len(symlinks)) - symlink_targets = { - v: k - for (k, v) in symlinks.items() - } - osymlinks = symlinks - symlinks = { - k: None - for k in symlinks.keys() - } - solved_symlinks = 0 - for dep in direct_depends_on or depends_on: + unresolved_symlinks = {} | symlinks + + # TODO: this is highly inefficient, change the filemapping to be + # file -> package instead of package -> files + for dep in depends_on: (suite, name, arch, version) = lockfile.parse_package_key(dep) - filemap = direct_depends_file_map.get(name, []) or [] + filemap = depends_file_map.get(name, []) or [] for file in filemap: - if file in symlink_targets: - symlink_path = symlink_targets[file] - symlinks[symlink_path] = "@%s//:%s" % (util.sanitize(dep), file) - solved_symlinks += 1 - if solved_symlinks == len(symlink_targets): + if len(unresolved_symlinks) == 0: + break + for (symlink, symlink_target) in unresolved_symlinks.items(): + if file == symlink_target: + unresolved_symlinks.pop(symlink) + symlinks[symlink] = "@%s//:%s" % (util.sanitize(dep), file) + + for file in so_files + h_files + hpp_files + a_files + hpp_files_woext: + for (symlink, symlink_target) in unresolved_symlinks.items(): + if file == symlink_target: + symlinks.pop(symlink) + unresolved_symlinks.pop(symlink) + if len(unresolved_symlinks) == 0: break + if len(unresolved_symlinks): + util.warning( + rctx, + "some symlinks could not be solved for {}. \nresolved: {}\nunresolved:{}".format( + target_name, + json.encode_indent(symlinks), + json.encode_indent(unresolved_symlinks), + ), + ) + outs = [] - for out in so_files + h_files + hpp_files + a_files + hpp_files_woext: + for out in so_files + h_files + hpp_files + a_files + hpp_files_woext + o_files: if out not in symlinks: outs.append(out) - self_symlinks = {} - for (i, file) in enumerate(outs): - if file in symlink_targets: - symlink_path = symlink_targets[file] - self_symlinks[symlink_path] = i - solved_symlinks += 1 - if solved_symlinks == len(symlink_targets): - break - - if solved_symlinks < len(symlink_targets): - util.warning(rctx, "some symlinks could not be solved for {}. \n{}".format(target_name, osymlinks)) - - build_file_content = "" - - # TODO: handle non symlink pc files similar to how we - # handle so symlinks - non_symlink_pc_file = None + deps = [] + for dep in depends_on: + (suite, name, arch, version) = lockfile.parse_package_key(dep) + deps.append( + "@%s//:%s_wodeps" % (util.sanitize(dep), name.removesuffix("-dev")), + ) + r_pc_files = [] if len(pc_files): # TODO: use rctx.extract instead. - r = rctx.execute( + rctx.execute( ["tar", "-xvf", "data.tar.xz"] + ["./" + pc for pc in pc_files], ) for pc in pc_files: if rctx.path(pc).exists: - non_symlink_pc_file = pc - break + r_pc_files.append(pc) + + build_file_content = "" + + rpaths = {} + for so in so_files + a_files: + rpath = so[:so.rfind("/")] + rpaths[rpath] = None # Package has a pkgconfig, use that as the source of truth. - if non_symlink_pc_file: - pc = parse_pc(rctx.read(non_symlink_pc_file)) - - ( - libname, - includedir, - libdir, - linkopts, - includes, - defines, - ) = process_pcconfig(pc) + if len(r_pc_files) == 1: + pkgc = pkgconfig(rctx, r_pc_files[0]) static_lib = None shared_lib = None # Look for a static archive for ar in a_files: - if ar.endswith(libname + ".a"): + if ar.endswith(pkgc.libname + ".a"): static_lib = '":%s"' % ar break # Look for a dynamic library for so_lib in so_files: - if so_lib.endswith(libname + ".so"): - lib_path = so_lib - path = rctx.path(lib_path) + if so_lib.endswith(pkgc.libname + ".so"): shared_lib = '":%s"' % so_lib break - build_file_content += _CC_IMPORT_TMPL.format( + build_file_content += _CC_IMPORT_SINGLE_TMPL.format( name = target_name, - hdrs = [ - ":" + h - for h in h_files + hpp_files - ], + hdrs = h_files + hpp_files, + additional_compiler_inputs = hpp_files_woext, + additional_linker_inputs = so_files + o_files + a_files, shared_lib = shared_lib, static_lib = static_lib, includes = [ "external/.." + include - for include in includes - ], - linkopts = linkopts, - ) - - elif (len(hpp_files) or len(h_files)) and ((target_name.find("libc") != -1 or target_name.find("libstdc") != -1 or target_name.find("libgcc") != -1)): - build_file_content += _CC_LIBRARY_LIBC_TMPL.format( - name = target_name, - hdrs = [ - ":" + h - for h in h_files + hpp_files + for include in pkgc.includes ], - srcs = [ - # ":" + so - # for so in so_files + linkopts = pkgc.linkopts + [ + "-Wl,-rpath=/" + rp + for rp in rpaths + ] + [ + "-L$(BINDIR)/external/{}/{}".format(rctx.attr.name, lp) + for lp in pkgc.link_paths ], - additional_compiler_inputs = hpp_files_woext, - includes = [], + deps = deps, ) + elif len(r_pc_files) > 1: + targets = [] + for pc_file in r_pc_files: + pkgc = pkgconfig(rctx, pc_file) - elif len(hpp_files) or len(h_files): - build_file_content += _CC_LIBRARY_TMPL.format( - name = target_name, - hdrs = [ - ":" + h - for h in h_files + hpp_files - ], - additional_linker_inputs = [], - strip_include_prefix = "usr/include", - ) + if not pkgc.libname or "_" + pkgc.libname in targets: + continue + + subtarget = "_" + pkgc.libname + + targets.append(subtarget) + + static_lib = None + shared_lib = None - # Package has no header files, likely a denominator package like liboost-dev - # since it has dependencies + # Look for a static archive + for ar in a_files: + if ar.endswith(pkgc.libname + ".a"): + static_lib = '":%s"' % ar + break + + # Look for a dynamic library + for so_lib in so_files: + if so_lib.endswith(pkgc.libname + ".so"): + shared_lib = '":%s"' % so_lib + break - elif len(depends_on): - deps = [] - for dep in depends_on: - (suite, name, arch, version) = lockfile.parse_package_key(dep) - deps.append( - "@%s//:%s" % (util.sanitize(dep), name.removesuffix("-dev")), + build_file_content += _CC_IMPORT_TMPL.format( + name = subtarget, + hdrs = h_files + hpp_files, + additional_compiler_inputs = hpp_files_woext, + additional_linker_inputs = so_files + o_files + a_files, + shared_lib = shared_lib, + static_lib = static_lib, + includes = [ + "external/.." + include + for include in pkgc.includes + ], + linkopts = pkgc.linkopts + [ + "-Wl,-rpath=/" + rp + for rp in rpaths + ] + [ + "-L$(BINDIR)/external/{}/{}".format(rctx.attr.name, lp) + for lp in pkgc.link_paths + ], + deps = deps, ) - build_file_content += _CC_LIBRARY_DEP_ONLY_TMPL.format( + build_file_content += _CC_IMPORT_DENOMITATOR.format( name = target_name, + targets = targets, deps = deps, ) - elif len(so_files): - build_file_content += _CC_LIBRARY_TMPL.format( + + elif (len(hpp_files) or len(h_files)) and ((target_name.find("libc") != -1 or target_name.find("libstdc") != -1 or target_name.find("libgcc") != -1)): + build_file_content += _CC_LIBRARY_LIBC_TMPL.format( name = target_name, - additional_linker_inputs = [ - ":" + h - for h in so_files - ], - hdrs = [], - strip_include_prefix = "usr/include", + hdrs = h_files + hpp_files, + additional_compiler_inputs = hpp_files_woext, + additional_linker_inputs = so_files + a_files + o_files, + includes = [], ) else: build_file_content += _CC_LIBRARY_TMPL.format( name = target_name, - additional_linker_inputs = [], - hdrs = [], + hdrs = h_files + hpp_files, + deps = deps, + srcs = [], + additional_compiler_inputs = hpp_files_woext, + additional_linker_inputs = so_files + a_files + o_files, + linkopts = [ + "-L$(BINDIR)/external/{}/{}".format(rctx.attr.name, rpath) + for rpath in rpaths + ] + [ + "-Wl,-rpath=/" + rp + for rp in rpaths + ] + [ + "-Wl,-rpath-link=$(BINDIR)/external/{}/{}".format(rctx.attr.name, rpath) + for rp in rpaths + ], strip_include_prefix = "usr/include", ) - return (build_file_content, outs, symlinks, self_symlinks) + return (build_file_content, outs, symlinks) def _deb_import_impl(rctx): rctx.download_and_extract( @@ -336,24 +427,24 @@ def _deb_import_impl(rctx): ) # TODO: only do this if package is -dev or dependent of a -dev pkg. - cc_import_targets, outs, symlinks, self_symlinks = _discover_contents( + cc_import_targets, outs, symlinks = _discover_contents( rctx, rctx.attr.depends_on, - rctx.attr.direct_depends_on, - json.decode(rctx.attr.direct_depends_file_map), + json.decode(rctx.attr.depends_file_map), rctx.attr.package_name.removesuffix("-dev"), ) rctx.file("BUILD.bazel", _DEB_IMPORT_BUILD_TMPL.format( mergedusr = rctx.attr.mergedusr, - depends_on = ["@" + util.sanitize(dep_key) for dep_key in rctx.attr.depends_on], + depends_on = ["@" + util.sanitize(dep_key) + "//:data" for dep_key in rctx.attr.depends_on], target_name = rctx.attr.target_name, cc_import_targets = cc_import_targets, outs = outs, - symlinks = [value for value in symlinks.values() if value], - symlink_outs = [k for (k, v) in symlinks.items() if v], - self_symlink_outs = self_symlinks.keys(), - self_symlink_output_indices = self_symlinks.values(), + foreign_symlinks = { + str(i): symlink + for (i, symlink) in enumerate(symlinks.values()) + }, + symlink_outs = symlinks.keys(), )) deb_import = repository_rule( @@ -362,8 +453,7 @@ deb_import = repository_rule( "urls": attr.string_list(mandatory = True, allow_empty = False), "sha256": attr.string(), "depends_on": attr.string_list(), - "direct_depends_on": attr.string_list(), - "direct_depends_file_map": attr.string(), + "depends_file_map": attr.string(), "mergedusr": attr.bool(), "target_name": attr.string(), "package_name": attr.string(), diff --git a/apt/private/lockfile.bzl b/apt/private/lockfile.bzl index cc215abf..03b58e8b 100644 --- a/apt/private/lockfile.bzl +++ b/apt/private/lockfile.bzl @@ -39,7 +39,6 @@ def _add_package(lock, package): "section": package["Section"], "size": int(package["Size"]), "depends_on": [], - "direct_depends_on": [], } def _add_package_dependency(lock, package, dependency): @@ -51,15 +50,6 @@ def _add_package_dependency(lock, package, dependency): return lock.packages[k]["depends_on"].append(sk) -def _add_package_direct_dependency(lock, package, dependency): - k = _package_key(package) - if k not in lock.packages: - fail("illegal state: %s is not in the lockfile." % package["Package"]) - sk = _package_key(dependency) - if sk in lock.packages[k]["direct_depends_on"]: - return - lock.packages[k]["direct_depends_on"].append(sk) - def _has_package(lock, suite, name, version, arch): return _make_package_key(suite, name, version, arch) in lock.packages @@ -77,7 +67,6 @@ def _create(mctx, lock): add_source = lambda *args, **kwargs: _add_source(lock, *args, **kwargs), add_package = lambda *args, **kwargs: _add_package(lock, *args, **kwargs), add_package_dependency = lambda *args, **kwargs: _add_package_dependency(lock, *args, **kwargs), - add_package_direct_dependency = lambda *args, **kwargs: _add_package_direct_dependency(lock, *args, **kwargs), packages = lambda: lock.packages, sources = lambda: lock.sources, dependency_sets = lambda: lock.dependency_sets, diff --git a/apt/private/pkgconfig.bzl b/apt/private/pkgconfig.bzl index bf6bf399..2c4f0902 100644 --- a/apt/private/pkgconfig.bzl +++ b/apt/private/pkgconfig.bzl @@ -80,21 +80,41 @@ def process_pcconfig(pc): libdir = _trim(variables["libdir"]) linkopts = [] includes = [] + link_paths = [] defines = [] libname = None + + IGNORE = [ + "-licui18n", + "-licuuc", + "-licudata", + "-lz", + "-llzma", + ] + if "Libs" in directives: libs = _trim(directives["Libs"]).split(" ") for arg in libs: - if arg.startswith("-l"): - libname = "lib" + arg.removeprefix("-l") + if arg in IGNORE: continue if arg.startswith("-L"): + link_paths.append(arg.removeprefix("-L")) + linkopts.append("-Wl,-rpath=" + arg.removeprefix("-L")) + continue + elif arg.startswith("-l") and not libname: + libname = "lib" + arg.removeprefix("-l") + continue + if arg == "-licudata": continue linkopts.append(arg) - # if "Libs.private" in directives: - # libs = _trim(directives["Libs.private"]).split(" ") - # linkopts.extend([arg for arg in libs if arg.startswith("-l")]) + if "Libs.private" in directives: + libs = _trim(directives["Libs.private"]).split(" ") + for arg in libs: + if arg in IGNORE: + continue + if arg.startswith("-l"): + linkopts.append(arg) if "Cflags" in directives: cflags = _trim(directives["Cflags"]).split(" ") @@ -113,5 +133,18 @@ def process_pcconfig(pc): elif flag.startswith("-D"): define = flag.removeprefix("-D") defines.append(define) - - return (libname, includedir, libdir, linkopts, includes, defines) + return (libname, includedir, libdir, linkopts, link_paths, includes, defines) + +def pkgconfig(rctx, path): + pc = parse_pc(rctx.read(path)) + (libname, includedir, libdir, linkopts, link_paths, includes, defines) = process_pcconfig(pc) + + return struct( + libname = libname, + includedir = includedir, + libdir = libdir, + linkopts = linkopts, + link_paths = link_paths, + includes = includes, + defines = defines, + ) From 5dc1bf39d7b4fc25bdec0c2bfd911f366d76da17 Mon Sep 17 00:00:00 2001 From: Sahin Yort Date: Tue, 4 Nov 2025 14:00:01 -0800 Subject: [PATCH 06/19] fix symlinks --- apt/private/{deb_cc_export.bzl => deb_export.bzl} | 12 +++++++----- apt/private/deb_import.bzl | 6 +++--- 2 files changed, 10 insertions(+), 8 deletions(-) rename apt/private/{deb_cc_export.bzl => deb_export.bzl} (81%) diff --git a/apt/private/deb_cc_export.bzl b/apt/private/deb_export.bzl similarity index 81% rename from apt/private/deb_cc_export.bzl rename to apt/private/deb_export.bzl index 9be92bef..e381af49 100644 --- a/apt/private/deb_cc_export.bzl +++ b/apt/private/deb_export.bzl @@ -2,7 +2,7 @@ TAR_TOOLCHAIN_TYPE = "@tar.bzl//tar/toolchain:type" -def _apt_cursed_symlink(ctx): +def _deb_export_impl(ctx): bsdtar = ctx.toolchains[TAR_TOOLCHAIN_TYPE] for (i, target) in ctx.attr.foreign_symlinks.items(): @@ -28,7 +28,10 @@ def _apt_cursed_symlink(ctx): ) ctx.actions.run( executable = bsdtar.tarinfo.binary, - inputs = ctx.files.srcs, + # the archive may contain symlinks that point to symlinks that reference + # files from other packages, therefore symlink_outs must be present in the + # sandbox for Bazel to succesfully track them. + inputs = ctx.files.srcs + ctx.outputs.symlink_outs, outputs = ctx.outputs.outs, arguments = [args], mnemonic = "Unpack", @@ -43,11 +46,10 @@ def _apt_cursed_symlink(ctx): ), ) -deb_cc_export = rule( - implementation = _apt_cursed_symlink, +deb_export = rule( + implementation = _deb_export_impl, attrs = { "srcs": attr.label_list(allow_files = True), - "deps": attr.label_list(allow_files = True), # mapping of symlink_outs indice to a foreign label "foreign_symlinks": attr.string_keyed_label_dict(allow_files = True), "symlink_outs": attr.output_list(), diff --git a/apt/private/deb_import.bzl b/apt/private/deb_import.bzl index d7461cd5..6af7cf1c 100644 --- a/apt/private/deb_import.bzl +++ b/apt/private/deb_import.bzl @@ -7,7 +7,7 @@ load(":util.bzl", "util") # BUILD.bazel template _DEB_IMPORT_BUILD_TMPL = ''' load("@rules_distroless//apt/private:deb_postfix.bzl", "deb_postfix") -load("@rules_distroless//apt/private:deb_cc_export.bzl", "deb_cc_export") +load("@rules_distroless//apt/private:deb_export.bzl", "deb_export") load("@rules_distroless//apt/private:apt_cursed_symlink.bzl", "apt_cursed_symlink") load("@rules_cc//cc/private/rules_impl:cc_import.bzl", "cc_import") load("@rules_cc//cc:cc_library.bzl", "cc_library") @@ -34,8 +34,8 @@ filegroup( ) -deb_cc_export( - name = "cc_export", +deb_export( + name = "export", srcs = glob(["data.tar*"]), foreign_symlinks = {foreign_symlinks}, symlink_outs = {symlink_outs}, From 3a54a3163f7e488907a82b0c7201768ecf45789e Mon Sep 17 00:00:00 2001 From: Sahin Yort Date: Wed, 5 Nov 2025 12:36:58 -0800 Subject: [PATCH 07/19] rm unused --- apt/private/apt_cursed_symlink.bzl | 33 ------------------------------ 1 file changed, 33 deletions(-) delete mode 100644 apt/private/apt_cursed_symlink.bzl diff --git a/apt/private/apt_cursed_symlink.bzl b/apt/private/apt_cursed_symlink.bzl deleted file mode 100644 index c7a61a77..00000000 --- a/apt/private/apt_cursed_symlink.bzl +++ /dev/null @@ -1,33 +0,0 @@ -def _apt_cursed_symlink(ctx): - own_path = ctx.attr.own_path.removeprefix(".") - own_dirname = own_path[:own_path.rfind("/") + 1] - candidate_full_path = own_dirname + ctx.attr.candidate_path - - found = None - - for file in ctx.files.candidates: - print(file.path) - if file.path.endswith(candidate_full_path): - found = file - break - - if not found: - fail("Failed to find the candidate so library for {} in {}. file an issue.".format(ctx.attr.candidate_path, ctx.attr.candidates)) - - ctx.actions.symlink( - output = ctx.outputs.out, - target_file = file, - ) - return DefaultInfo( - files = depset([ctx.outputs.out]), - ) - -apt_cursed_symlink = rule( - implementation = _apt_cursed_symlink, - attrs = { - "candidates": attr.label_list(), - "candidate_path": attr.string(), - "own_path": attr.string(), - "out": attr.output(), - }, -) From 4d2ee65636079624a7b6cac466829276b4d6eff9 Mon Sep 17 00:00:00 2001 From: Sahin Yort Date: Thu, 13 Nov 2025 17:00:03 -0800 Subject: [PATCH 08/19] fix --- apt/extensions.bzl | 2 +- apt/private/deb_import.bzl | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/apt/extensions.bzl b/apt/extensions.bzl index 1b368547..376ca4b2 100644 --- a/apt/extensions.bzl +++ b/apt/extensions.bzl @@ -34,7 +34,7 @@ def _distroless_extension(mctx): root_direct_dev_deps = [] reproducible = False - # As in mach 9 :) + # as-in-mach 9 glock = lockfile.merge(mctx, [ lockfile.from_json(mctx, mctx.read(lock.into)) for mod in mctx.modules diff --git a/apt/private/deb_import.bzl b/apt/private/deb_import.bzl index 6af7cf1c..ac692ff2 100644 --- a/apt/private/deb_import.bzl +++ b/apt/private/deb_import.bzl @@ -8,7 +8,6 @@ load(":util.bzl", "util") _DEB_IMPORT_BUILD_TMPL = ''' load("@rules_distroless//apt/private:deb_postfix.bzl", "deb_postfix") load("@rules_distroless//apt/private:deb_export.bzl", "deb_export") -load("@rules_distroless//apt/private:apt_cursed_symlink.bzl", "apt_cursed_symlink") load("@rules_cc//cc/private/rules_impl:cc_import.bzl", "cc_import") load("@rules_cc//cc:cc_library.bzl", "cc_library") load("@bazel_skylib//rules/directory:directory.bzl", "directory") @@ -304,10 +303,11 @@ def _discover_contents(rctx, depends_on, depends_file_map, target_name): shared_lib = None # Look for a static archive - for ar in a_files: - if ar.endswith(pkgc.libname + ".a"): - static_lib = '":%s"' % ar - break + # TODO: static linking is broken for now. + # for ar in a_files: + # if ar.endswith(pkgc.libname + ".a"): + # static_lib = '":%s"' % ar + # break # Look for a dynamic library for so_lib in so_files: From da90ecfe324fbc8f194e96d1acbc2bb937c9662a Mon Sep 17 00:00:00 2001 From: Sahin Yort Date: Thu, 20 Nov 2025 10:42:50 -0800 Subject: [PATCH 09/19] lts --- apt/private/deb_import.bzl | 34 +++++++++++++++++++++++++++++++--- 1 file changed, 31 insertions(+), 3 deletions(-) diff --git a/apt/private/deb_import.bzl b/apt/private/deb_import.bzl index ac692ff2..49521773 100644 --- a/apt/private/deb_import.bzl +++ b/apt/private/deb_import.bzl @@ -9,9 +9,12 @@ _DEB_IMPORT_BUILD_TMPL = ''' load("@rules_distroless//apt/private:deb_postfix.bzl", "deb_postfix") load("@rules_distroless//apt/private:deb_export.bzl", "deb_export") load("@rules_cc//cc/private/rules_impl:cc_import.bzl", "cc_import") +# load("@rules_cc//cc/private:cc_common.bzl", "cc_common") load("@rules_cc//cc:cc_library.bzl", "cc_library") load("@bazel_skylib//rules/directory:directory.bzl", "directory") +# print(cc_common.solib_symlink_action) + deb_postfix( name = "data", srcs = glob(["data.tar*"]), @@ -56,6 +59,7 @@ cc_import( name = "{name}_imp_", hdrs = {hdrs}, includes = {includes}, + deps = {import_deps}, shared_library = {shared_lib}, static_library = {static_lib}, ) @@ -74,6 +78,7 @@ cc_import( name = "{name}_import", hdrs = {hdrs}, includes = {includes}, + deps = {import_deps}, shared_library = {shared_lib}, static_library = {static_lib}, ) @@ -126,11 +131,19 @@ cc_library( ) """ + +_CC_SHARED_LIB_TMPL = """ +cc_import( + name = "{name}", + shared_library = "{lib}", +) +""" + _CC_LIBRARY_TMPL = """ cc_library( name = "{name}_wodeps", hdrs = {hdrs}, - srcs = {srcs}, + deps = {import_deps}, linkopts = {linkopts}, additional_compiler_inputs = {additional_compiler_inputs}, additional_linker_inputs = {additional_linker_inputs}, @@ -247,10 +260,12 @@ def _discover_contents(rctx, depends_on, depends_file_map, target_name): unresolved_symlinks.pop(symlink) symlinks[symlink] = "@%s//:%s" % (util.sanitize(dep), file) + # Resolve self symlinks + self_symlinks = {} for file in so_files + h_files + hpp_files + a_files + hpp_files_woext: for (symlink, symlink_target) in unresolved_symlinks.items(): if file == symlink_target: - symlinks.pop(symlink) + self_symlinks[symlink] = symlinks.pop(symlink) unresolved_symlinks.pop(symlink) if len(unresolved_symlinks) == 0: break @@ -295,6 +310,16 @@ def _discover_contents(rctx, depends_on, depends_file_map, target_name): rpath = so[:so.rfind("/")] rpaths[rpath] = None + + extra_so_deps = [] + for so_file in so_files: + name = so_file.replace("/", "_S").lstrip() + build_file_content += _CC_SHARED_LIB_TMPL.format( + name = name, + lib = so_file + ) + extra_so_deps.append(name) + # Package has a pkgconfig, use that as the source of truth. if len(r_pc_files) == 1: pkgc = pkgconfig(rctx, r_pc_files[0]) @@ -313,6 +338,7 @@ def _discover_contents(rctx, depends_on, depends_file_map, target_name): for so_lib in so_files: if so_lib.endswith(pkgc.libname + ".so"): shared_lib = '":%s"' % so_lib + print(shared_lib) break build_file_content += _CC_IMPORT_SINGLE_TMPL.format( @@ -333,6 +359,7 @@ def _discover_contents(rctx, depends_on, depends_file_map, target_name): "-L$(BINDIR)/external/{}/{}".format(rctx.attr.name, lp) for lp in pkgc.link_paths ], + import_deps = extra_so_deps, deps = deps, ) elif len(r_pc_files) > 1: @@ -380,6 +407,7 @@ def _discover_contents(rctx, depends_on, depends_file_map, target_name): "-L$(BINDIR)/external/{}/{}".format(rctx.attr.name, lp) for lp in pkgc.link_paths ], + import_deps = [], deps = deps, ) @@ -402,7 +430,7 @@ def _discover_contents(rctx, depends_on, depends_file_map, target_name): name = target_name, hdrs = h_files + hpp_files, deps = deps, - srcs = [], + import_deps = extra_so_deps, additional_compiler_inputs = hpp_files_woext, additional_linker_inputs = so_files + a_files + o_files, linkopts = [ From 42bb96ff768cf6898883536439921f87b79ca79f Mon Sep 17 00:00:00 2001 From: thesayyn Date: Thu, 20 Nov 2025 10:50:50 -0800 Subject: [PATCH 10/19] example --- examples/cc_lib/BUILD.bazel | 0 examples/cc_lib/MODULE.bazel | 33 +++++++++++++++++++++++++++++++++ examples/cc_lib/main.c | 0 3 files changed, 33 insertions(+) create mode 100644 examples/cc_lib/BUILD.bazel create mode 100644 examples/cc_lib/MODULE.bazel create mode 100644 examples/cc_lib/main.c diff --git a/examples/cc_lib/BUILD.bazel b/examples/cc_lib/BUILD.bazel new file mode 100644 index 00000000..e69de29b diff --git a/examples/cc_lib/MODULE.bazel b/examples/cc_lib/MODULE.bazel new file mode 100644 index 00000000..785bd725 --- /dev/null +++ b/examples/cc_lib/MODULE.bazel @@ -0,0 +1,33 @@ +bazel_dep(name = "rules_distroless", version = "0.0.0") +local_path_override( + module_name = "rules_distroless", + path = "../..", +) + +apt = use_extension("@rules_distroless//apt:extensions.bzl", "apt") +apt.sources_list( + architectures = ["amd64"], + components = ["main"], + suites = [ + "bookworm", + "bookworm-updates", + ], + uris = ["https://snapshot.debian.org/archive/debian/20251001T023456Z"], +) +apt.sources_list( + architectures = ["amd64"], + components = ["main"], + suites = ["bookworm-security"], + uris = ["https://snapshot.debian.org/archive/debian-security/20251001T023456Z"], +) +apt.install( + dependency_set = "bookworm", + packages = [ + "libnl-3-dev", + "libnl-genl-3-dev", + "libnl-nf-3-dev", + "nlohmann-json3-dev", + ], + target_release = "bookworm", +) +use_repo(apt, "bookworm") diff --git a/examples/cc_lib/main.c b/examples/cc_lib/main.c new file mode 100644 index 00000000..e69de29b From 7b9f74ccfddfb6a1fa9df4dc2249a4e9ba0a389b Mon Sep 17 00:00:00 2001 From: Sahin Yort Date: Thu, 20 Nov 2025 11:50:18 -0800 Subject: [PATCH 11/19] lts --- apt/private/deb_import.bzl | 4 +-- examples/cc_lib/BUILD.bazel | 12 +++++++++ examples/cc_lib/MODULE.bazel | 9 +++++++ examples/cc_lib/test.c | 51 ++++++++++++++++++++++++++++++++++++ 4 files changed, 73 insertions(+), 3 deletions(-) create mode 100644 examples/cc_lib/test.c diff --git a/apt/private/deb_import.bzl b/apt/private/deb_import.bzl index 49521773..1270cc58 100644 --- a/apt/private/deb_import.bzl +++ b/apt/private/deb_import.bzl @@ -131,7 +131,6 @@ cc_library( ) """ - _CC_SHARED_LIB_TMPL = """ cc_import( name = "{name}", @@ -310,13 +309,12 @@ def _discover_contents(rctx, depends_on, depends_file_map, target_name): rpath = so[:so.rfind("/")] rpaths[rpath] = None - extra_so_deps = [] for so_file in so_files: name = so_file.replace("/", "_S").lstrip() build_file_content += _CC_SHARED_LIB_TMPL.format( name = name, - lib = so_file + lib = so_file, ) extra_so_deps.append(name) diff --git a/examples/cc_lib/BUILD.bazel b/examples/cc_lib/BUILD.bazel index e69de29b..69de1621 100644 --- a/examples/cc_lib/BUILD.bazel +++ b/examples/cc_lib/BUILD.bazel @@ -0,0 +1,12 @@ +load("@rules_cc//cc:cc_test.bzl", "cc_test") + +cc_test( + name = "test", + srcs = ["test.c"], + deps = [ + "@bookworm//libnl-3-dev:libnl-3", + "@bookworm//libnl-nf-3-dev:libnl-nf-3", + "@bookworm//libnl-genl-3-dev:libnl-genl-3", + # "@bookworm//nlohmann-json3-dev:nlohmann-json3" + ], +) diff --git a/examples/cc_lib/MODULE.bazel b/examples/cc_lib/MODULE.bazel index 785bd725..d1f2ca3e 100644 --- a/examples/cc_lib/MODULE.bazel +++ b/examples/cc_lib/MODULE.bazel @@ -1,9 +1,18 @@ +bazel_dep(name = "rules_cc", version = "0.2.8") bazel_dep(name = "rules_distroless", version = "0.0.0") local_path_override( module_name = "rules_distroless", path = "../..", ) +bazel_dep(name = "sonic-build-infra", version = "0.0.0") +local_path_override( + module_name = "sonic-build-infra", + path = "../../../sonic-build-infra" +) + +register_toolchains("@sonic-build-infra//toolchains/gcc:host_gcc_toolchain") + apt = use_extension("@rules_distroless//apt:extensions.bzl", "apt") apt.sources_list( architectures = ["amd64"], diff --git a/examples/cc_lib/test.c b/examples/cc_lib/test.c new file mode 100644 index 00000000..186c82de --- /dev/null +++ b/examples/cc_lib/test.c @@ -0,0 +1,51 @@ +#include +#include +#include // libnl-3 +#include // libnl-genl-3 +#include // libnl-genl-3 +#include // libnl-nf-3 +#include + +int main(void) +{ + struct nl_sock *sk_core = NULL; // from libnl-3 + struct nl_sock *sk_nf = NULL; // from libnl-nf-3 + int family_id; + + /* === libnl-3: basic Netlink socket === */ + sk_core = nl_socket_alloc(); + if (!sk_core) { + perror("nl_socket_alloc"); + return EXIT_FAILURE; + } + + if (nl_connect(sk_core, NETLINK_GENERIC) < 0) { + fprintf(stderr, "Failed to connect to Generic Netlink\n"); + goto cleanup; + } + + /* === libnl-genl-3: resolve a generic netlink family === */ + family_id = genl_ctrl_resolve(sk_core, "nl80211"); + if (family_id < 0) { + printf("nl80211 family not found (normal on systems without WiFi): %s\n", + nl_geterror(family_id)); + } else { + printf("Found nl80211 family ID = %d\n", family_id); + } + + /* === libnl-nf-3: create a netfilter socket (forces linking against libnl-nf-3) === */ + sk_nf = nfnl_connect(sk_core); // This symbol is ONLY in libnl-nf-3 + if (!sk_nf) { + fprintf(stderr, "nfnl_connect() failed — this proves libnl-nf-3 is linked correctly\n"); + } else { + printf("Successfully created Netfilter netlink socket (libnl-nf-3 is present)\n"); + // No need to actually use it — just having the pointer forces the linker to resolve it + } + + printf("All three libnl libraries (libnl-3, libnl-genl-3, libnl-nf-3) are present and linked!\n"); + +cleanup: + if (sk_core) nl_socket_free(sk_core); + /* sk_nf is just a pointer alias to sk_core in libnl-nf, no need to free twice */ + return EXIT_SUCCESS; +} From 36943fbe97e119b4f845d2588d9f20d978f74723 Mon Sep 17 00:00:00 2001 From: Sahin Yort Date: Tue, 25 Nov 2025 12:15:25 -0800 Subject: [PATCH 12/19] lts --- apt/extensions.bzl | 2 + apt/private/deb_import.bzl | 166 ++++++++++---------- apt/private/pkgconfig.bzl | 24 ++- examples/cc_lib/main.c | 0 examples/cc_lib/{ => subfolder}/BUILD.bazel | 1 + examples/cc_lib/subfolder/test.c | 67 ++++++++ examples/cc_lib/test.c | 51 ------ 7 files changed, 178 insertions(+), 133 deletions(-) delete mode 100644 examples/cc_lib/main.c rename examples/cc_lib/{ => subfolder}/BUILD.bazel (89%) create mode 100644 examples/cc_lib/subfolder/test.c delete mode 100644 examples/cc_lib/test.c diff --git a/apt/extensions.bzl b/apt/extensions.bzl index 376ca4b2..c47bf2cd 100644 --- a/apt/extensions.bzl +++ b/apt/extensions.bzl @@ -206,6 +206,8 @@ def _distroless_extension(mctx): arch = arch, ) + print(util.sanitize(package_key)) + deb_import( name = util.sanitize(package_key), target_name = util.sanitize(package_key), diff --git a/apt/private/deb_import.bzl b/apt/private/deb_import.bzl index 1270cc58..ab79462c 100644 --- a/apt/private/deb_import.bzl +++ b/apt/private/deb_import.bzl @@ -59,7 +59,6 @@ cc_import( name = "{name}_imp_", hdrs = {hdrs}, includes = {includes}, - deps = {import_deps}, shared_library = {shared_lib}, static_library = {static_lib}, ) @@ -67,6 +66,7 @@ cc_import( cc_library( name = "{name}", deps = [":{name}_imp_"], + data = {additional_linker_inputs}, additional_compiler_inputs = {additional_compiler_inputs}, additional_linker_inputs = {additional_linker_inputs}, linkopts = {linkopts}, @@ -78,7 +78,6 @@ cc_import( name = "{name}_import", hdrs = {hdrs}, includes = {includes}, - deps = {import_deps}, shared_library = {shared_lib}, static_library = {static_lib}, ) @@ -86,6 +85,7 @@ cc_import( cc_library( name = "{name}_wodeps", deps = [":{name}_import"], + data = {additional_linker_inputs}, additional_compiler_inputs = {additional_compiler_inputs}, additional_linker_inputs = {additional_linker_inputs}, linkopts = {linkopts}, @@ -142,7 +142,7 @@ _CC_LIBRARY_TMPL = """ cc_library( name = "{name}_wodeps", hdrs = {hdrs}, - deps = {import_deps}, + data = {additional_linker_inputs}, linkopts = {linkopts}, additional_compiler_inputs = {additional_compiler_inputs}, additional_linker_inputs = {additional_linker_inputs}, @@ -309,22 +309,21 @@ def _discover_contents(rctx, depends_on, depends_file_map, target_name): rpath = so[:so.rfind("/")] rpaths[rpath] = None - extra_so_deps = [] - for so_file in so_files: - name = so_file.replace("/", "_S").lstrip() - build_file_content += _CC_SHARED_LIB_TMPL.format( - name = name, - lib = so_file, - ) - extra_so_deps.append(name) - # Package has a pkgconfig, use that as the source of truth. - if len(r_pc_files) == 1: - pkgc = pkgconfig(rctx, r_pc_files[0]) + if len(r_pc_files): + link_paths = [] + includes = [] + linkopts = [] static_lib = None shared_lib = None + for pc_file in r_pc_files: + pkgc = pkgconfig(rctx, pc_file) + includes += pkgc.includes + linkopts = pkgc.linkopts + link_paths += pkgc.link_paths + # Look for a static archive # TODO: static linking is broken for now. # for ar in a_files: @@ -333,87 +332,90 @@ def _discover_contents(rctx, depends_on, depends_file_map, target_name): # break # Look for a dynamic library - for so_lib in so_files: - if so_lib.endswith(pkgc.libname + ".so"): - shared_lib = '":%s"' % so_lib - print(shared_lib) - break + # for so_lib in so_files: + # if so_lib.endswith(pkgc.libname + ".so"): + # shared_lib = '":%s"' % so_lib + # break build_file_content += _CC_IMPORT_SINGLE_TMPL.format( name = target_name, hdrs = h_files + hpp_files, additional_compiler_inputs = hpp_files_woext, additional_linker_inputs = so_files + o_files + a_files, - shared_lib = shared_lib, + shared_lib = '":%s"' % so_files[0], static_lib = static_lib, - includes = [ - "external/.." + include - for include in pkgc.includes - ], - linkopts = pkgc.linkopts + [ + includes = { + "external/.." + include: True + for include in includes + }.keys(), + linkopts = linkopts + [ "-Wl,-rpath=/" + rp for rp in rpaths + ] + [ + # Needed for cc_test binaries to locate its dependencies. + "-Wl,-rpath=../{}/{}".format(rctx.attr.name, rpath) + for rp in rpaths ] + [ "-L$(BINDIR)/external/{}/{}".format(rctx.attr.name, lp) - for lp in pkgc.link_paths + for lp in link_paths ], - import_deps = extra_so_deps, deps = deps, ) - elif len(r_pc_files) > 1: - targets = [] - for pc_file in r_pc_files: - pkgc = pkgconfig(rctx, pc_file) - - if not pkgc.libname or "_" + pkgc.libname in targets: - continue - - subtarget = "_" + pkgc.libname - - targets.append(subtarget) - - static_lib = None - shared_lib = None - # Look for a static archive - for ar in a_files: - if ar.endswith(pkgc.libname + ".a"): - static_lib = '":%s"' % ar - break - - # Look for a dynamic library - for so_lib in so_files: - if so_lib.endswith(pkgc.libname + ".so"): - shared_lib = '":%s"' % so_lib - break - - build_file_content += _CC_IMPORT_TMPL.format( - name = subtarget, - hdrs = h_files + hpp_files, - additional_compiler_inputs = hpp_files_woext, - additional_linker_inputs = so_files + o_files + a_files, - shared_lib = shared_lib, - static_lib = static_lib, - includes = [ - "external/.." + include - for include in pkgc.includes - ], - linkopts = pkgc.linkopts + [ - "-Wl,-rpath=/" + rp - for rp in rpaths - ] + [ - "-L$(BINDIR)/external/{}/{}".format(rctx.attr.name, lp) - for lp in pkgc.link_paths - ], - import_deps = [], - deps = deps, - ) - - build_file_content += _CC_IMPORT_DENOMITATOR.format( - name = target_name, - targets = targets, - deps = deps, - ) + # elif len(r_pc_files) > 1: + # targets = [] + # for pc_file in r_pc_files: + # pkgc = pkgconfig(rctx, pc_file) + + # if not pkgc.libname or "_" + pkgc.libname in targets: + # continue + + # subtarget = "_" + pkgc.libname + + # targets.append(subtarget) + + # static_lib = None + # shared_lib = None + + # # Look for a static archive + # for ar in a_files: + # if ar.endswith(pkgc.libname + ".a"): + # static_lib = '":%s"' % ar + # break + + # # Look for a dynamic library + # for so_lib in so_files: + # if so_lib.endswith(pkgc.libname + ".so"): + # shared_lib = '":%s"' % so_lib + # break + + # build_file_content += _CC_IMPORT_TMPL.format( + # name = subtarget, + # hdrs = h_files + hpp_files, + # additional_compiler_inputs = hpp_files_woext, + # additional_linker_inputs = so_files + o_files + a_files, + # shared_lib = shared_lib, + # static_lib = static_lib, + # includes = [ + # "external/.." + include + # for include in pkgc.includes + # ], + # linkopts = pkgc.linkopts + [ + # "-Wl,-rpath=/" + rp + # for rp in rpaths + # ] + [ + # "-L$(BINDIR)/external/{}/{}".format(rctx.attr.name, lp) + # for lp in pkgc.link_paths + # ], + # import_deps = [], + # deps = deps, + # ) + + # build_file_content += _CC_IMPORT_DENOMITATOR.format( + # name = target_name, + # targets = targets, + # deps = deps, + # ) elif (len(hpp_files) or len(h_files)) and ((target_name.find("libc") != -1 or target_name.find("libstdc") != -1 or target_name.find("libgcc") != -1)): build_file_content += _CC_LIBRARY_LIBC_TMPL.format( @@ -428,7 +430,6 @@ def _discover_contents(rctx, depends_on, depends_file_map, target_name): name = target_name, hdrs = h_files + hpp_files, deps = deps, - import_deps = extra_so_deps, additional_compiler_inputs = hpp_files_woext, additional_linker_inputs = so_files + a_files + o_files, linkopts = [ @@ -437,6 +438,9 @@ def _discover_contents(rctx, depends_on, depends_file_map, target_name): ] + [ "-Wl,-rpath=/" + rp for rp in rpaths + ] + [ + "-Wl,-rpath=../{}/{}".format(rctx.attr.name, rpath) + for rp in rpaths ] + [ "-Wl,-rpath-link=$(BINDIR)/external/{}/{}".format(rctx.attr.name, rpath) for rp in rpaths diff --git a/apt/private/pkgconfig.bzl b/apt/private/pkgconfig.bzl index 2c4f0902..64e4fa2a 100644 --- a/apt/private/pkgconfig.bzl +++ b/apt/private/pkgconfig.bzl @@ -92,13 +92,22 @@ def process_pcconfig(pc): "-llzma", ] + if directives["Name"] == "icu-uc": + print(pc) + if "Libs" in directives: libs = _trim(directives["Libs"]).split(" ") + for arg in libs: if arg in IGNORE: continue if arg.startswith("-L"): - link_paths.append(arg.removeprefix("-L")) + linkpath = arg.removeprefix("-L") + + # skip bare -L args + if not linkpath: + continue + link_paths.append(linkpath) linkopts.append("-Wl,-rpath=" + arg.removeprefix("-L")) continue elif arg.startswith("-l") and not libname: @@ -121,6 +130,10 @@ def process_pcconfig(pc): for flag in cflags: if flag.startswith("-I"): include = flag.removeprefix("-I") + + # skip bare -I arguments + if not include: + continue includes.append(include) # If the include is direct include eg $includedir (/usr/include/hiredis) @@ -133,6 +146,15 @@ def process_pcconfig(pc): elif flag.startswith("-D"): define = flag.removeprefix("-D") defines.append(define) + + if len(includes) == 0: + includes = [ + # Standard include path if the package does not specify includes + "/usr/include", + ] + if directives["Name"] == "icu-uc": + print(pc, libname) + return (libname, includedir, libdir, linkopts, link_paths, includes, defines) def pkgconfig(rctx, path): diff --git a/examples/cc_lib/main.c b/examples/cc_lib/main.c deleted file mode 100644 index e69de29b..00000000 diff --git a/examples/cc_lib/BUILD.bazel b/examples/cc_lib/subfolder/BUILD.bazel similarity index 89% rename from examples/cc_lib/BUILD.bazel rename to examples/cc_lib/subfolder/BUILD.bazel index 69de1621..9d6f9ba1 100644 --- a/examples/cc_lib/BUILD.bazel +++ b/examples/cc_lib/subfolder/BUILD.bazel @@ -3,6 +3,7 @@ load("@rules_cc//cc:cc_test.bzl", "cc_test") cc_test( name = "test", srcs = ["test.c"], + copts = ["-Wno-int-conversion"], deps = [ "@bookworm//libnl-3-dev:libnl-3", "@bookworm//libnl-nf-3-dev:libnl-nf-3", diff --git a/examples/cc_lib/subfolder/test.c b/examples/cc_lib/subfolder/test.c new file mode 100644 index 00000000..cf709a6d --- /dev/null +++ b/examples/cc_lib/subfolder/test.c @@ -0,0 +1,67 @@ +#include +#include +#include +// #include // libnl-3 +// #include // libnl-genl-3 +// #include // libnl-genl-3 +// #include // libnl-nf-3 +// #include + +int main(void) +{ + char *buffer; + size_t size = 1024; // Initial buffer size + + // Allocate memory for the buffer + buffer = (char *)malloc(size); + if (buffer == NULL) { + perror("Failed to allocate memory"); + return 1; + } + if (getcwd(buffer, size) != NULL) { + printf("Current working directory: %s\n", buffer); + } else { + perror("Failed to get current working directory"); + } + return 1; +// struct nl_sock *sk_core = NULL; // from libnl-3 +// struct nl_sock *sk_nf = NULL; // from libnl-nf-3 +// int family_id; + +// /* === libnl-3: basic Netlink socket === */ +// sk_core = nl_socket_alloc(); +// if (!sk_core) { +// perror("nl_socket_alloc"); +// return EXIT_FAILURE; +// } + +// if (nl_connect(sk_core, NETLINK_GENERIC) < 0) { +// fprintf(stderr, "Failed to connect to Generic Netlink\n"); +// goto cleanup; +// } + +// /* === libnl-genl-3: resolve a generic netlink family === */ +// family_id = genl_ctrl_resolve(sk_core, "nl80211"); +// if (family_id < 0) { +// printf("nl80211 family not found (normal on systems without WiFi): %s\n", +// nl_geterror(family_id)); +// } else { +// printf("Found nl80211 family ID = %d\n", family_id); +// } + +// /* === libnl-nf-3: create a netfilter socket (forces linking against libnl-nf-3) === */ +// sk_nf = nfnl_connect(sk_core); // This symbol is ONLY in libnl-nf-3 +// if (!sk_nf) { +// fprintf(stderr, "nfnl_connect() failed — this proves libnl-nf-3 is linked correctly\n"); +// } else { +// printf("Successfully created Netfilter netlink socket (libnl-nf-3 is present)\n"); +// // No need to actually use it — just having the pointer forces the linker to resolve it +// } + +// printf("All three libnl libraries (libnl-3, libnl-genl-3, libnl-nf-3) are present and linked!\n"); + +// cleanup: +// if (sk_core) nl_socket_free(sk_core); +// /* sk_nf is just a pointer alias to sk_core in libnl-nf, no need to free twice */ +// return EXIT_SUCCESS; +} diff --git a/examples/cc_lib/test.c b/examples/cc_lib/test.c deleted file mode 100644 index 186c82de..00000000 --- a/examples/cc_lib/test.c +++ /dev/null @@ -1,51 +0,0 @@ -#include -#include -#include // libnl-3 -#include // libnl-genl-3 -#include // libnl-genl-3 -#include // libnl-nf-3 -#include - -int main(void) -{ - struct nl_sock *sk_core = NULL; // from libnl-3 - struct nl_sock *sk_nf = NULL; // from libnl-nf-3 - int family_id; - - /* === libnl-3: basic Netlink socket === */ - sk_core = nl_socket_alloc(); - if (!sk_core) { - perror("nl_socket_alloc"); - return EXIT_FAILURE; - } - - if (nl_connect(sk_core, NETLINK_GENERIC) < 0) { - fprintf(stderr, "Failed to connect to Generic Netlink\n"); - goto cleanup; - } - - /* === libnl-genl-3: resolve a generic netlink family === */ - family_id = genl_ctrl_resolve(sk_core, "nl80211"); - if (family_id < 0) { - printf("nl80211 family not found (normal on systems without WiFi): %s\n", - nl_geterror(family_id)); - } else { - printf("Found nl80211 family ID = %d\n", family_id); - } - - /* === libnl-nf-3: create a netfilter socket (forces linking against libnl-nf-3) === */ - sk_nf = nfnl_connect(sk_core); // This symbol is ONLY in libnl-nf-3 - if (!sk_nf) { - fprintf(stderr, "nfnl_connect() failed — this proves libnl-nf-3 is linked correctly\n"); - } else { - printf("Successfully created Netfilter netlink socket (libnl-nf-3 is present)\n"); - // No need to actually use it — just having the pointer forces the linker to resolve it - } - - printf("All three libnl libraries (libnl-3, libnl-genl-3, libnl-nf-3) are present and linked!\n"); - -cleanup: - if (sk_core) nl_socket_free(sk_core); - /* sk_nf is just a pointer alias to sk_core in libnl-nf, no need to free twice */ - return EXIT_SUCCESS; -} From 7c5845d826d65765a9d5cf2d5b53b649d9b6d9be Mon Sep 17 00:00:00 2001 From: Sahin Yort Date: Wed, 26 Nov 2025 17:46:48 -0800 Subject: [PATCH 13/19] lts --- apt/extensions.bzl | 2 - apt/private/deb_import.bzl | 175 +++++++++++++++---------------------- apt/private/pkgconfig.bzl | 6 -- 3 files changed, 69 insertions(+), 114 deletions(-) diff --git a/apt/extensions.bzl b/apt/extensions.bzl index c47bf2cd..376ca4b2 100644 --- a/apt/extensions.bzl +++ b/apt/extensions.bzl @@ -206,8 +206,6 @@ def _distroless_extension(mctx): arch = arch, ) - print(util.sanitize(package_key)) - deb_import( name = util.sanitize(package_key), target_name = util.sanitize(package_key), diff --git a/apt/private/deb_import.bzl b/apt/private/deb_import.bzl index ab79462c..e1fb8b01 100644 --- a/apt/private/deb_import.bzl +++ b/apt/private/deb_import.bzl @@ -9,12 +9,9 @@ _DEB_IMPORT_BUILD_TMPL = ''' load("@rules_distroless//apt/private:deb_postfix.bzl", "deb_postfix") load("@rules_distroless//apt/private:deb_export.bzl", "deb_export") load("@rules_cc//cc/private/rules_impl:cc_import.bzl", "cc_import") -# load("@rules_cc//cc/private:cc_common.bzl", "cc_common") load("@rules_cc//cc:cc_library.bzl", "cc_library") load("@bazel_skylib//rules/directory:directory.bzl", "directory") -# print(cc_common.solib_symlink_action) - deb_postfix( name = "data", srcs = glob(["data.tar*"]), @@ -56,21 +53,13 @@ directory( _CC_IMPORT_TMPL = """ cc_import( - name = "{name}_imp_", + name = "{name}", hdrs = {hdrs}, includes = {includes}, + linkopts = {linkopts}, shared_library = {shared_lib}, static_library = {static_lib}, ) - -cc_library( - name = "{name}", - deps = [":{name}_imp_"], - data = {additional_linker_inputs}, - additional_compiler_inputs = {additional_compiler_inputs}, - additional_linker_inputs = {additional_linker_inputs}, - linkopts = {linkopts}, -) """ _CC_IMPORT_SINGLE_TMPL = """ @@ -143,10 +132,11 @@ cc_library( name = "{name}_wodeps", hdrs = {hdrs}, data = {additional_linker_inputs}, + deps = {direct_deps}, linkopts = {linkopts}, additional_compiler_inputs = {additional_compiler_inputs}, additional_linker_inputs = {additional_linker_inputs}, - strip_include_prefix = "{strip_include_prefix}", + strip_include_prefix = {strip_include_prefix}, visibility = ["//visibility:public"], ) @@ -318,105 +308,68 @@ def _discover_contents(rctx, depends_on, depends_file_map, target_name): static_lib = None shared_lib = None + import_targets = [] + for pc_file in r_pc_files: pkgc = pkgconfig(rctx, pc_file) includes += pkgc.includes linkopts = pkgc.linkopts link_paths += pkgc.link_paths - # Look for a static archive - # TODO: static linking is broken for now. - # for ar in a_files: - # if ar.endswith(pkgc.libname + ".a"): - # static_lib = '":%s"' % ar - # break + if not pkgc.libname or pkgc.libname + "_import" in import_targets: + continue + + subtarget = pkgc.libname + "_import" + import_targets.append(subtarget) + + # Look for a static archive + # for ar in a_files: + # if ar.endswith(pkgc.libname + ".a"): + # static_lib = '":%s"' % ar + # break - # Look for a dynamic library - # for so_lib in so_files: - # if so_lib.endswith(pkgc.libname + ".so"): - # shared_lib = '":%s"' % so_lib - # break + # Look for a dynamic library + for so_lib in so_files: + if pkgc.libname and so_lib.endswith(pkgc.libname + ".so"): + shared_lib = '":%s"' % so_lib + break + + build_file_content += _CC_IMPORT_TMPL.format( + name = subtarget, + shared_lib = shared_lib, + static_lib = static_lib, + hdrs = [], + includes = { + "external/.." + include: True + for include in includes + ["/usr/include", "/usr/include/x86_64-linux-gnu"] + }.keys(), + linkopts = pkgc.linkopts, + ) - build_file_content += _CC_IMPORT_SINGLE_TMPL.format( + build_file_content += _CC_LIBRARY_TMPL.format( name = target_name, hdrs = h_files + hpp_files, additional_compiler_inputs = hpp_files_woext, - additional_linker_inputs = so_files + o_files + a_files, - shared_lib = '":%s"' % so_files[0], - static_lib = static_lib, - includes = { - "external/.." + include: True - for include in includes + additional_linker_inputs = so_files + o_files, + linkopts = { + opt: True + for opt in [ + # Needed for cc_test binaries to locate its dependencies. + "-Wl,-rpath=../{}/{}".format(rctx.attr.name, rpath) + for rp in rpaths + ] + [ + "-L$(BINDIR)/external/{}/{}".format(rctx.attr.name, lp) + for lp in link_paths + ] + [ + "-Wl,-rpath=/" + rp + for rp in rpaths + ] }.keys(), - linkopts = linkopts + [ - "-Wl,-rpath=/" + rp - for rp in rpaths - ] + [ - # Needed for cc_test binaries to locate its dependencies. - "-Wl,-rpath=../{}/{}".format(rctx.attr.name, rpath) - for rp in rpaths - ] + [ - "-L$(BINDIR)/external/{}/{}".format(rctx.attr.name, lp) - for lp in link_paths - ], + direct_deps = import_targets, deps = deps, + strip_include_prefix = None, ) - # elif len(r_pc_files) > 1: - # targets = [] - # for pc_file in r_pc_files: - # pkgc = pkgconfig(rctx, pc_file) - - # if not pkgc.libname or "_" + pkgc.libname in targets: - # continue - - # subtarget = "_" + pkgc.libname - - # targets.append(subtarget) - - # static_lib = None - # shared_lib = None - - # # Look for a static archive - # for ar in a_files: - # if ar.endswith(pkgc.libname + ".a"): - # static_lib = '":%s"' % ar - # break - - # # Look for a dynamic library - # for so_lib in so_files: - # if so_lib.endswith(pkgc.libname + ".so"): - # shared_lib = '":%s"' % so_lib - # break - - # build_file_content += _CC_IMPORT_TMPL.format( - # name = subtarget, - # hdrs = h_files + hpp_files, - # additional_compiler_inputs = hpp_files_woext, - # additional_linker_inputs = so_files + o_files + a_files, - # shared_lib = shared_lib, - # static_lib = static_lib, - # includes = [ - # "external/.." + include - # for include in pkgc.includes - # ], - # linkopts = pkgc.linkopts + [ - # "-Wl,-rpath=/" + rp - # for rp in rpaths - # ] + [ - # "-L$(BINDIR)/external/{}/{}".format(rctx.attr.name, lp) - # for lp in pkgc.link_paths - # ], - # import_deps = [], - # deps = deps, - # ) - - # build_file_content += _CC_IMPORT_DENOMITATOR.format( - # name = target_name, - # targets = targets, - # deps = deps, - # ) - elif (len(hpp_files) or len(h_files)) and ((target_name.find("libc") != -1 or target_name.find("libstdc") != -1 or target_name.find("libgcc") != -1)): build_file_content += _CC_LIBRARY_LIBC_TMPL.format( name = target_name, @@ -426,26 +379,36 @@ def _discover_contents(rctx, depends_on, depends_file_map, target_name): includes = [], ) else: + extra_linkopts = [] + if target_name == "libbsd0": + extra_linkopts = [ + "-Wl,--remap-inputs=/usr/lib/x86_64-linux-gnu/libbsd.so.0.11.7=$(BINDIR)/external/{}/usr/lib/x86_64-linux-gnu/libbsd.so.0.11.7".format(rctx.attr.name), + ] build_file_content += _CC_LIBRARY_TMPL.format( name = target_name, hdrs = h_files + hpp_files, deps = deps, additional_compiler_inputs = hpp_files_woext, - additional_linker_inputs = so_files + a_files + o_files, + additional_linker_inputs = so_files + o_files, linkopts = [ - "-L$(BINDIR)/external/{}/{}".format(rctx.attr.name, rpath) - for rpath in rpaths - ] + [ - "-Wl,-rpath=/" + rp + # Required for linker to find .so libraries + "-L$(BINDIR)/external/{}/{}".format(rctx.attr.name, rp) for rp in rpaths ] + [ - "-Wl,-rpath=../{}/{}".format(rctx.attr.name, rpath) + # Required for bazel test binary to find its dependencies. + "-Wl,-rpath=../{}/{}".format(rctx.attr.name, rp) for rp in rpaths ] + [ + # Required for ld to validate rpath entries "-Wl,-rpath-link=$(BINDIR)/external/{}/{}".format(rctx.attr.name, rpath) for rp in rpaths - ], - strip_include_prefix = "usr/include", + ] + [ + # Required for containers to find the dependencies at runtime. + "-Wl,-rpath=/" + rp + for rp in rpaths + ] + extra_linkopts, + strip_include_prefix = '"usr/include"', + direct_deps = [], ) return (build_file_content, outs, symlinks) diff --git a/apt/private/pkgconfig.bzl b/apt/private/pkgconfig.bzl index 64e4fa2a..8565220c 100644 --- a/apt/private/pkgconfig.bzl +++ b/apt/private/pkgconfig.bzl @@ -92,12 +92,8 @@ def process_pcconfig(pc): "-llzma", ] - if directives["Name"] == "icu-uc": - print(pc) - if "Libs" in directives: libs = _trim(directives["Libs"]).split(" ") - for arg in libs: if arg in IGNORE: continue @@ -152,8 +148,6 @@ def process_pcconfig(pc): # Standard include path if the package does not specify includes "/usr/include", ] - if directives["Name"] == "icu-uc": - print(pc, libname) return (libname, includedir, libdir, linkopts, link_paths, includes, defines) From ad54d1f9412feb95e45e127bba5dfe45ceeaf3a8 Mon Sep 17 00:00:00 2001 From: Sahin Yort Date: Mon, 1 Dec 2025 15:35:29 -0800 Subject: [PATCH 14/19] so_library --- apt/private/deb_import.bzl | 106 ++++++++++++------------------------- apt/private/pkgconfig.bzl | 7 ++- apt/private/so_library.bzl | 66 +++++++++++++++++++++++ 3 files changed, 102 insertions(+), 77 deletions(-) create mode 100644 apt/private/so_library.bzl diff --git a/apt/private/deb_import.bzl b/apt/private/deb_import.bzl index e1fb8b01..a78af3ab 100644 --- a/apt/private/deb_import.bzl +++ b/apt/private/deb_import.bzl @@ -8,6 +8,7 @@ load(":util.bzl", "util") _DEB_IMPORT_BUILD_TMPL = ''' load("@rules_distroless//apt/private:deb_postfix.bzl", "deb_postfix") load("@rules_distroless//apt/private:deb_export.bzl", "deb_export") +load("@rules_distroless//apt/private:so_library.bzl", "so_library") load("@rules_cc//cc/private/rules_impl:cc_import.bzl", "cc_import") load("@rules_cc//cc:cc_library.bzl", "cc_library") load("@bazel_skylib//rules/directory:directory.bzl", "directory") @@ -51,58 +52,6 @@ directory( {cc_import_targets} ''' -_CC_IMPORT_TMPL = """ -cc_import( - name = "{name}", - hdrs = {hdrs}, - includes = {includes}, - linkopts = {linkopts}, - shared_library = {shared_lib}, - static_library = {static_lib}, -) -""" - -_CC_IMPORT_SINGLE_TMPL = """ -cc_import( - name = "{name}_import", - hdrs = {hdrs}, - includes = {includes}, - shared_library = {shared_lib}, - static_library = {static_lib}, -) - -cc_library( - name = "{name}_wodeps", - deps = [":{name}_import"], - data = {additional_linker_inputs}, - additional_compiler_inputs = {additional_compiler_inputs}, - additional_linker_inputs = {additional_linker_inputs}, - linkopts = {linkopts}, - visibility = ["//visibility:public"], -) - - -cc_library( - name = "{name}", - deps = [":{name}_wodeps"] + {deps}, - visibility = ["//visibility:public"], -) -""" - -_CC_IMPORT_DENOMITATOR = """ -cc_library( - name = "{name}_wodeps", - deps = {targets}, - visibility = ["//visibility:public"], -) - -cc_library( - name = "{name}", - deps = [":{name}_wodeps"] + {deps}, - visibility = ["//visibility:public"], -) -""" - _CC_LIBRARY_LIBC_TMPL = """ alias( name = "{name}_wodeps", @@ -120,10 +69,14 @@ cc_library( ) """ -_CC_SHARED_LIB_TMPL = """ +_CC_IMPORT_TMPL = """ cc_import( name = "{name}", - shared_library = "{lib}", + hdrs = {hdrs}, + includes = {includes}, + linkopts = {linkopts}, + shared_library = {shared_lib}, + static_library = {static_lib}, ) """ @@ -131,7 +84,6 @@ _CC_LIBRARY_TMPL = """ cc_library( name = "{name}_wodeps", hdrs = {hdrs}, - data = {additional_linker_inputs}, deps = {direct_deps}, linkopts = {linkopts}, additional_compiler_inputs = {additional_compiler_inputs}, @@ -239,7 +191,7 @@ def _discover_contents(rctx, depends_on, depends_file_map, target_name): # TODO: this is highly inefficient, change the filemapping to be # file -> package instead of package -> files for dep in depends_on: - (suite, name, arch, version) = lockfile.parse_package_key(dep) + (suite, name, arch, _) = lockfile.parse_package_key(dep) filemap = depends_file_map.get(name, []) or [] for file in filemap: if len(unresolved_symlinks) == 0: @@ -282,7 +234,7 @@ def _discover_contents(rctx, depends_on, depends_file_map, target_name): "@%s//:%s_wodeps" % (util.sanitize(dep), name.removesuffix("-dev")), ) - r_pc_files = [] + pkgconfigs = [] if len(pc_files): # TODO: use rctx.extract instead. rctx.execute( @@ -290,9 +242,14 @@ def _discover_contents(rctx, depends_on, depends_file_map, target_name): ) for pc in pc_files: if rctx.path(pc).exists: - r_pc_files.append(pc) + pkgconfigs.append(pc) - build_file_content = "" + build_file_content = """ +so_library( + name = "_so_libs", + dynamic_libs = {} +) +""".format(so_files) rpaths = {} for so in so_files + a_files: @@ -300,20 +257,18 @@ def _discover_contents(rctx, depends_on, depends_file_map, target_name): rpaths[rpath] = None # Package has a pkgconfig, use that as the source of truth. - if len(r_pc_files): + if len(pkgconfigs): link_paths = [] includes = [] - linkopts = [] static_lib = None shared_lib = None import_targets = [] - for pc_file in r_pc_files: + for pc_file in pkgconfigs: pkgc = pkgconfig(rctx, pc_file) includes += pkgc.includes - linkopts = pkgc.linkopts link_paths += pkgc.link_paths if not pkgc.libname or pkgc.libname + "_import" in import_targets: @@ -329,8 +284,9 @@ def _discover_contents(rctx, depends_on, depends_file_map, target_name): # break # Look for a dynamic library + IGNORE = ["libfl"] for so_lib in so_files: - if pkgc.libname and so_lib.endswith(pkgc.libname + ".so"): + if pkgc.libname and pkgc.libname not in IGNORE and so_lib.endswith(pkgc.libname + ".so"): shared_lib = '":%s"' % so_lib break @@ -354,9 +310,13 @@ def _discover_contents(rctx, depends_on, depends_file_map, target_name): linkopts = { opt: True for opt in [ - # Needed for cc_test binaries to locate its dependencies. - "-Wl,-rpath=../{}/{}".format(rctx.attr.name, rpath) - for rp in rpaths + # # Needed for cc_test binaries to locate its dependencies. + # "-Wl,-rpath=../{}/{}".format(rctx.attr.name, rpath) + # for rp in rpaths + ] + [ + # Needed for cc_test binaries to locate its dependencies as a build tool + # "-Wl,-rpath=./external/{}/{}".format(rctx.attr.name, rpath) + # for rp in rpaths ] + [ "-L$(BINDIR)/external/{}/{}".format(rctx.attr.name, lp) for lp in link_paths @@ -365,7 +325,7 @@ def _discover_contents(rctx, depends_on, depends_file_map, target_name): for rp in rpaths ] }.keys(), - direct_deps = import_targets, + direct_deps = import_targets + [":_so_libs"], deps = deps, strip_include_prefix = None, ) @@ -395,12 +355,12 @@ def _discover_contents(rctx, depends_on, depends_file_map, target_name): "-L$(BINDIR)/external/{}/{}".format(rctx.attr.name, rp) for rp in rpaths ] + [ - # Required for bazel test binary to find its dependencies. - "-Wl,-rpath=../{}/{}".format(rctx.attr.name, rp) - for rp in rpaths + # # Required for bazel test binary to find its dependencies. + # "-Wl,-rpath=../{}/{}".format(rctx.attr.name, rp) + # for rp in rpaths ] + [ # Required for ld to validate rpath entries - "-Wl,-rpath-link=$(BINDIR)/external/{}/{}".format(rctx.attr.name, rpath) + "-Wl,-rpath-link=$(BINDIR)/external/{}/{}".format(rctx.attr.name, rp) for rp in rpaths ] + [ # Required for containers to find the dependencies at runtime. @@ -408,7 +368,7 @@ def _discover_contents(rctx, depends_on, depends_file_map, target_name): for rp in rpaths ] + extra_linkopts, strip_include_prefix = '"usr/include"', - direct_deps = [], + direct_deps = [":_so_libs"], ) return (build_file_content, outs, symlinks) diff --git a/apt/private/pkgconfig.bzl b/apt/private/pkgconfig.bzl index 8565220c..bd9e80f6 100644 --- a/apt/private/pkgconfig.bzl +++ b/apt/private/pkgconfig.bzl @@ -90,13 +90,12 @@ def process_pcconfig(pc): "-licudata", "-lz", "-llzma", + "-lfl", ] if "Libs" in directives: libs = _trim(directives["Libs"]).split(" ") for arg in libs: - if arg in IGNORE: - continue if arg.startswith("-L"): linkpath = arg.removeprefix("-L") @@ -109,7 +108,7 @@ def process_pcconfig(pc): elif arg.startswith("-l") and not libname: libname = "lib" + arg.removeprefix("-l") continue - if arg == "-licudata": + elif arg in IGNORE: continue linkopts.append(arg) @@ -118,7 +117,7 @@ def process_pcconfig(pc): for arg in libs: if arg in IGNORE: continue - if arg.startswith("-l"): + elif arg.startswith("-l"): linkopts.append(arg) if "Cflags" in directives: diff --git a/apt/private/so_library.bzl b/apt/private/so_library.bzl new file mode 100644 index 00000000..49c1a4ea --- /dev/null +++ b/apt/private/so_library.bzl @@ -0,0 +1,66 @@ +load("@rules_cc//cc:find_cc_toolchain.bzl", "find_cpp_toolchain", "use_cc_toolchain") + +def _so_library_impl(ctx): + cc_toolchain = find_cpp_toolchain(ctx) + + feature_configuration = cc_common.configure_features( + ctx = ctx, + cc_toolchain = cc_toolchain, + language = "c++", + requested_features = ctx.features, + unsupported_features = ctx.disabled_features, + ) + + libraries = [] + + ifsos = {} + + for dyn_lib in ctx.files.dynamic_libs: + if dyn_lib.owner.package != ctx.label.package: + fail(".so libraries must reside in current package. %s != %s" % (dyn_lib.owner.package, ctx.label.package)) + short_path = dyn_lib.short_path + repo_relative_path = short_path[short_path.find(dyn_lib.owner.repo_name) + len(dyn_lib.owner.repo_name) + 1:] + ifso_name = repo_relative_path[:repo_relative_path.rfind("/")] + if ifso_name in ifsos: + ifso = ifsos[ifso_name] + else: + # TODO: this potentially wasterful, symlink all so libraries into a directory + # and create one ifso in the folder. + ifso = ctx.actions.declare_file(ifso_name + "/rpath.ifso") + ifsos[ifso_name] = ifso + ctx.actions.write(ifso, content = """ + /* GNU LD script + * Empty linker script for empty interface library */ + """) + lib = cc_common.create_library_to_link( + actions = ctx.actions, + cc_toolchain = cc_toolchain, + interface_library = ifso, + dynamic_library = dyn_lib, + feature_configuration = feature_configuration, + ) + libraries.append(lib) + + linker_input = cc_common.create_linker_input( + owner = ctx.label, + libraries = depset(libraries), + additional_inputs = depset([]), + user_link_flags = depset([]), + ) + + linking_context = cc_common.create_linking_context( + linker_inputs = depset([linker_input]), + ) + + return [ + CcInfo(linking_context = linking_context), + ] + +so_library = rule( + implementation = _so_library_impl, + attrs = { + "dynamic_libs": attr.label_list(allow_files = True), + }, + fragments = ["cpp"], + toolchains = use_cc_toolchain(), +) From 5d1c4e8e24bfb7362fd19813e554063e32da9c9d Mon Sep 17 00:00:00 2001 From: thesayyn Date: Mon, 12 Jan 2026 16:31:33 -0800 Subject: [PATCH 15/19] make it green --- .bazelignore | 1 + MODULE.bazel | 127 +----------------------- apt/BUILD.bazel | 19 +--- apt/apt.bzl | 140 --------------------------- apt/extensions.bzl | 4 +- apt/private/BUILD.bazel | 39 +++----- apt/private/apt_deb_repository.bzl | 2 +- apt/private/deb_export.bzl | 11 ++- apt/private/deb_import.bzl | 2 +- apt/tests/resolution/BUILD.bazel | 76 +-------------- apt/tests/resolution_test.bzl | 14 +-- examples/.bazelignore | 2 + examples/.bazelversion | 1 + examples/MODULE.bazel | 134 +++++++++++++++++++++++++ examples/WORKSPACE | 1 + examples/cacerts/BUILD.bazel | 4 +- examples/debian_snapshot/BUILD.bazel | 15 +-- examples/flatten/BUILD.bazel | 37 ++++--- examples/group/BUILD.bazel | 4 +- examples/home/BUILD.bazel | 4 +- examples/java_keystore/BUILD.bazel | 4 +- examples/locale/BUILD.bazel | 4 +- examples/os_release/BUILD.bazel | 4 +- examples/passwd/BUILD.bazel | 4 +- examples/statusd/BUILD.bazel | 4 +- examples/ubuntu_snapshot/BUILD.bazel | 102 ++----------------- 26 files changed, 225 insertions(+), 534 deletions(-) delete mode 100644 apt/apt.bzl create mode 100644 examples/.bazelignore create mode 100644 examples/.bazelversion create mode 100644 examples/MODULE.bazel create mode 100644 examples/WORKSPACE diff --git a/.bazelignore b/.bazelignore index 3bf32c9d..06d0e0f9 100644 --- a/.bazelignore +++ b/.bazelignore @@ -1,3 +1,4 @@ # nested modules docs/ e2e/ +examples/ diff --git a/MODULE.bazel b/MODULE.bazel index c64b555c..e89ff8e9 100644 --- a/MODULE.bazel +++ b/MODULE.bazel @@ -1,16 +1,13 @@ -"Bazel dependencies" - module( name = "rules_distroless", compatibility_level = 1, ) -bazel_dep(name = "tar.bzl", version = "0.5.6") -bazel_dep(name = "rules_cc", version = "0.2.8") -bazel_dep(name = "platforms", version = "0.0.10") +bazel_dep(name = "aspect_bazel_lib", version = "2.14.0") bazel_dep(name = "bazel_features", version = "1.20.0") bazel_dep(name = "bazel_skylib", version = "1.5.0") -bazel_dep(name = "aspect_bazel_lib", version = "2.14.0") +bazel_dep(name = "platforms", version = "0.0.10") +bazel_dep(name = "rules_cc", version = "0.2.8") bazel_dep(name = "rules_java", version = "8.8.0") bazel_dep(name = "rules_shell", version = "0.4.1") @@ -22,121 +19,3 @@ use_repo(bazel_lib_toolchains, "bsd_tar_toolchains") bazel_dep(name = "gazelle", version = "0.34.0", dev_dependency = True, repo_name = "bazel_gazelle") bazel_dep(name = "bazel_skylib_gazelle_plugin", version = "1.5.0", dev_dependency = True) bazel_dep(name = "buildifier_prebuilt", version = "8.0.1", dev_dependency = True) -bazel_dep(name = "rules_oci", version = "2.0.0", dev_dependency = True) -bazel_dep(name = "container_structure_test", version = "1.16.0", dev_dependency = True) - -http_archive = use_repo_rule("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") - -http_archive( - name = "example-bullseye-ca-certificates", - build_file_content = 'exports_files(["data.tar.xz", "control.tar.xz"])', - sha256 = "b2d488ad4d8d8adb3ba319fc9cb2cf9909fc42cb82ad239a26c570a2e749c389", - urls = ["https://snapshot.debian.org/archive/debian/20231106T210201Z/pool/main/c/ca-certificates/ca-certificates_20210119_all.deb"], -) - -http_archive( - name = "example-bullseye-libc-bin", - build_file_content = 'exports_files(["data.tar.xz"])', - sha256 = "8b048ab5c7e9f5b7444655541230e689631fd9855c384e8c4a802586d9bbc65a", - urls = ["https://snapshot.debian.org/archive/debian-security/20231106T230332Z/pool/updates/main/g/glibc/libc-bin_2.31-13+deb11u7_amd64.deb"], -) - -http_archive( - name = "example-bookworm-libc-bin", - build_file_content = 'exports_files(["data.tar.xz"])', - sha256 = "38c44247c5b3e864d6db2877edd9c9a0555fc4e23ae271b73d7f527802616df5", - urls = ["https://snapshot.debian.org/archive/debian-security/20231106T230332Z/pool/updates/main/g/glibc/libc-bin_2.36-9+deb12u3_armhf.deb"], -) - -apt = use_extension( - "@rules_distroless//apt:extensions.bzl", - "apt", - dev_dependency = True, -) -apt.sources_list( - architectures = ["amd64"], - components = ["main"], - suites = [ - "noble", - "noble-security", - "noble-updates", - ], - types = ["deb"], - uris = [ - "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z", - "mirror+https://snapshot.ubuntu.com/ubuntu/20240301T030400Z", - ], - # TODO: signed_by -) -apt.sources_list( - architectures = ["amd64"], - components = ["main"], - suites = ["cloud-sdk"], - types = ["deb"], - uris = ["https://packages.cloud.google.com/apt"], -) -apt.sources_list( - architectures = ["amd64"], - components = ["main"], - suites = ["cloud-sdk"], - types = ["deb"], - uris = ["https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2004/x86_64/"], -) -apt.sources_list( - architectures = [ - "amd64", - "arm64", - "i386", - ], - components = ["main"], - suites = [ - "bookworm", - "bookworm-updates", - ], - types = ["deb"], - uris = ["https://snapshot.debian.org/archive/debian/20240210T223313Z"], -) -apt.sources_list( - architectures = [ - "amd64", - "arm64", - "i386", - ], - components = ["main"], - suites = ["bookworm-security"], - types = ["deb"], - uris = ["https://snapshot.debian.org/archive/debian-security/20240210T223313Z"], -) -apt.install( - # dependency_set isolates these installs into their own scope. - # installing onto default dependency_set only allowed in the - # root module - dependency_set = "bookworm", - packages = [ - "base-files", - "coreutils:arm64", - "google-cloud-cli:amd64", - "libncurses6", - "libstdc++6:i386", - "ncurses-base", - "tzdata", - ], - suites = [ - "bookworm", - "bookworm-updates", - "bookworm-security", - "cloud-sdk", - ], -) -use_repo(apt, "bookworm") - -# TODO: support this style of source_list -# -# apt.sources_list( -# sources = [ -# "deb [arch=amd64,arm64] https://snapshot.ubuntu.com/ubuntu/20240301T030400Z noble main", -# "deb [arch=amd64,arm64] https://snapshot.ubuntu.com/ubuntu/20240301T030400Z noble-security main", -# "deb [arch=amd64,arm64] https://snapshot.ubuntu.com/ubuntu/20240301T030400Z noble-updates main", -# ], -# ) -# diff --git a/apt/BUILD.bazel b/apt/BUILD.bazel index 9e3a6094..7729ac8f 100644 --- a/apt/BUILD.bazel +++ b/apt/BUILD.bazel @@ -1,7 +1,6 @@ load("@bazel_skylib//:bzl_library.bzl", "bzl_library") exports_files([ - "apt.bzl", "extensions.bzl", ]) @@ -15,25 +14,17 @@ bzl_library( ], ) -bzl_library( - name = "apt", - srcs = ["apt.bzl"], - visibility = ["//visibility:public"], - deps = [ - "//apt/private:deb_resolve", - "//apt/private:deb_translate_lock", - ], -) - bzl_library( name = "extensions", srcs = ["extensions.bzl"], visibility = ["//visibility:public"], deps = [ + "//apt/private:apt_deb_repository", + "//apt/private:apt_dep_resolver", "//apt/private:deb_import", - "//apt/private:deb_resolve", - "//apt/private:deb_translate_lock", "//apt/private:lockfile", - "@bazel_features//:features", + "//apt/private:translate_dependency_set", + "//apt/private:util", + "//apt/private:version_constraint", ], ) diff --git a/apt/apt.bzl b/apt/apt.bzl deleted file mode 100644 index 402ecf34..00000000 --- a/apt/apt.bzl +++ /dev/null @@ -1,140 +0,0 @@ -""" -`apt.install` macro - -This documentation provides an overview of the convenience `apt.install` -repository macro to create Debian repositories with packages "installed" in -them and available to use in Bazel. -""" - -load("//apt/private:deb_resolve.bzl", _deb_resolve = "deb_resolve") -load("//apt/private:deb_translate_lock.bzl", _deb_translate_lock = "deb_translate_lock") - -def _apt_install( - name, - manifest, - lock = None, - nolock = False, - package_template = None, - resolve_transitive = True): - """Repository macro to create Debian repositories. - - > [!WARNING] - > THIS IS A LEGACY MACRO. Use it only if you are still using `WORKSPACE`. - > Otherwise please use the [`apt` module extension](apt.md). - - Here's an example to create a Debian repo with `apt.install`: - - ```starlark - # WORKSPACE - - load("@rules_distroless//apt:apt.bzl", "apt") - - apt.install( - name = "bullseye", - # lock = "//examples/apt:bullseye.lock.json", - manifest = "//examples/apt:bullseye.yaml", - ) - - load("@bullseye//:packages.bzl", "bullseye_packages") - bullseye_packages() - ``` - - Note that, for the initial setup (or if we want to run without a lock) the - lockfile attribute can be omitted. All you need is a YAML - [manifest](/examples/debian_snapshot/bullseye.yaml): - ```yaml - version: 1 - - sources: - - channel: bullseye main - url: https://snapshot-cloudflare.debian.org/archive/debian/20240210T223313Z - - archs: - - amd64 - - packages: - - perl - ``` - - `apt.install` will parse the manifest and will fetch and install the - packages for the given architectures in the Bazel repo `@`. - - Each `/` has two targets that match the usual structure of a - Debian package: `data` and `control`. - - You can use the package like so: `@///:`. - - E.g. for the previous example, you could use `@bullseye//perl/amd64:data`. - - ### Lockfiles - - As mentioned, the macro can be used without a lock because the lock will be - generated internally on-demand. However, this comes with the cost of - performing a new package resolution on repository cache misses. - - The lockfile can be generated by running `bazel run @bullseye//:lock`. This - will generate a `.lock.json` file of the same name and in the same path as - the YAML `manifest` file. - - If you explicitly want to run without a lock and avoid the warning messages - set the `nolock` argument to `True`. - - ### Best Practice: use snapshot archive URLs - - While we strongly encourage users to check in the generated lockfile, it's - not always possible because Debian repositories are rolling by default. - Therefore, a lockfile generated today might not work later if the upstream - repository removes or publishes a new version of a package. - - To avoid this problems and increase the reproducibility it's recommended to - avoid using normal Debian mirrors and use snapshot archives instead. - - Snapshot archives provide a way to access Debian package mirrors at a point - in time. Basically, it's a "wayback machine" that allows access to (almost) - all past and current packages based on dates and version numbers. - - Debian has had snapshot archives for [10+ - years](https://lists.debian.org/debian-announce/2010/msg00002.html). Ubuntu - began providing a similar service recently and has packages available since - March 1st 2023. - - To use this services simply use a snapshot URL in the manifest. Here's two - examples showing how to do this for Debian and Ubuntu: - * [/examples/debian_snapshot](/examples/debian_snapshot) - * [/examples/ubuntu_snapshot](/examples/ubuntu_snapshot) - - For more infomation, please check https://snapshot.debian.org and/or - https://snapshot.ubuntu.com. - - Args: - name: name of the repository - manifest: label to a `manifest.yaml` - lock: label to a `lock.json` - nolock: bool, set to True if you explicitly want to run without a lock - and avoid the DEBUG messages. - package_template: (EXPERIMENTAL!) a template file for generated BUILD - files. Available template replacement keys are: - `{target_name}`, `{deps}`, `{urls}`, `{name}`, - `{arch}`, `{sha256}`, `{repo_name}` - resolve_transitive: whether dependencies of dependencies should be - resolved and added to the lockfile. - """ - _deb_resolve( - name = name + "_resolve", - manifest = manifest, - resolve_transitive = resolve_transitive, - ) - - if not lock and not nolock: - # buildifier: disable=print - print("\nNo lockfile was given, please run `bazel run @%s//:lock` to create the lockfile." % name) - - _deb_translate_lock( - name = name, - lock = lock if lock else "@" + name + "_resolve//:lock.json", - package_template = package_template, - ) - -apt = struct( - install = _apt_install, -) diff --git a/apt/extensions.bzl b/apt/extensions.bzl index 376ca4b2..4c74b5ae 100644 --- a/apt/extensions.bzl +++ b/apt/extensions.bzl @@ -259,7 +259,7 @@ apt.sources_list( apt.install( # dependency set isolates these installs into their own scope. dependency_set = "noble", - target_release = "noble", + suites = ["noble", "noble-security", "noble-updates"], packages = [ "ncurses-base", "libncurses6", @@ -342,7 +342,7 @@ install = tag_class( allow_empty = False, ), "dependency_set": attr.string(), - "target_release": attr.string(mandatory = True), + "suites": attr.string_list(), "include_transitive": attr.bool(default = True), }, ) diff --git a/apt/private/BUILD.bazel b/apt/private/BUILD.bazel index 3d745bd7..5dcd0b26 100644 --- a/apt/private/BUILD.bazel +++ b/apt/private/BUILD.bazel @@ -21,20 +21,6 @@ bzl_library( deps = ["//distroless/private:tar"], ) -bzl_library( - name = "deb_translate_lock", - srcs = ["deb_translate_lock.bzl"], - visibility = ["//apt:__subpackages__"], - deps = [ - ":lockfile", - ":starlark_codegen_utils", - "@bazel_skylib//lib:new_sets", - "@bazel_tools//tools/build_defs/repo:cache.bzl", - "@bazel_tools//tools/build_defs/repo:http.bzl", - "@bazel_tools//tools/build_defs/repo:utils.bzl", - ], -) - bzl_library( name = "lockfile", srcs = ["lockfile.bzl"], @@ -62,18 +48,6 @@ bzl_library( ], ) -bzl_library( - name = "deb_resolve", - srcs = ["deb_resolve.bzl"], - visibility = ["//apt:__subpackages__"], - deps = [ - ":apt_deb_repository", - ":apt_dep_resolver", - ":lockfile", - "@aspect_bazel_lib//lib:repo_utils", - ], -) - bzl_library( name = "version", srcs = ["version.bzl"], @@ -106,3 +80,16 @@ bzl_library( srcs = ["util.bzl"], visibility = ["//apt:__subpackages__"], ) + +bzl_library( + name = "translate_dependency_set", + srcs = ["translate_dependency_set.bzl"], + visibility = ["//apt:__subpackages__"], + deps = [ + ":lockfile", + ":starlark_codegen_utils", + ":util", + "//apt:defs", + "//distroless:defs", + ], +) diff --git a/apt/private/apt_deb_repository.bzl b/apt/private/apt_deb_repository.bzl index bf666d60..fadaf35d 100644 --- a/apt/private/apt_deb_repository.bzl +++ b/apt/private/apt_deb_repository.bzl @@ -336,7 +336,7 @@ def _create_test_only(): package_versions = lambda **kwargs: _package_versions(state, **kwargs), virtual_packages = lambda **kwargs: _virtual_packages(state, **kwargs), package = lambda **kwargs: _package(state, **kwargs), - parse_repository = lambda contents: _parse_repository(state, contents, "http://nowhere"), + parse_repository = lambda contents: _parse_repository(state, contents, "http://nowhere", "test"), packages = state.packages, reset = reset, ) diff --git a/apt/private/deb_export.bzl b/apt/private/deb_export.bzl index e381af49..d2051329 100644 --- a/apt/private/deb_export.bzl +++ b/apt/private/deb_export.bzl @@ -1,11 +1,14 @@ "normalization rules" -TAR_TOOLCHAIN_TYPE = "@tar.bzl//tar/toolchain:type" +load("@aspect_bazel_lib//lib:tar.bzl", tar = "tar_lib") + +TAR_TOOLCHAIN_TYPE = tar.toolchain_type def _deb_export_impl(ctx): bsdtar = ctx.toolchains[TAR_TOOLCHAIN_TYPE] - for (i, target) in ctx.attr.foreign_symlinks.items(): + # foreign_symlinks maps label -> index string (reversed for Bazel 7.0.0 compatibility) + for (target, i) in ctx.attr.foreign_symlinks.items(): i = int(i) ctx.actions.symlink( output = ctx.outputs.symlink_outs[i], @@ -50,8 +53,8 @@ deb_export = rule( implementation = _deb_export_impl, attrs = { "srcs": attr.label_list(allow_files = True), - # mapping of symlink_outs indice to a foreign label - "foreign_symlinks": attr.string_keyed_label_dict(allow_files = True), + # mapping of foreign label -> symlink_outs index (label_keyed for Bazel 7.0 compat) + "foreign_symlinks": attr.label_keyed_string_dict(allow_files = True), "symlink_outs": attr.output_list(), "outs": attr.output_list(), }, diff --git a/apt/private/deb_import.bzl b/apt/private/deb_import.bzl index a78af3ab..f3b3ae75 100644 --- a/apt/private/deb_import.bzl +++ b/apt/private/deb_import.bzl @@ -394,7 +394,7 @@ def _deb_import_impl(rctx): cc_import_targets = cc_import_targets, outs = outs, foreign_symlinks = { - str(i): symlink + symlink: str(i) for (i, symlink) in enumerate(symlinks.values()) }, symlink_outs = symlinks.keys(), diff --git a/apt/tests/resolution/BUILD.bazel b/apt/tests/resolution/BUILD.bazel index e81b4443..ec3ad4ca 100644 --- a/apt/tests/resolution/BUILD.bazel +++ b/apt/tests/resolution/BUILD.bazel @@ -1,73 +1,3 @@ -load("@aspect_bazel_lib//lib:jq.bzl", "jq") -load("@aspect_bazel_lib//lib:testing.bzl", "assert_contains") -load("@bazel_skylib//rules:build_test.bzl", "build_test") - -jq( - name = "pick_libuuid_version", - srcs = [ - "@resolution_test_resolve//:lockfile", - ], - args = ["-rj"], - filter = '.packages | map(select(.name == "libuuid1")) | .[0].version', -) - -assert_contains( - name = "test_libuuid_version", - actual = ":pick_libuuid_version", - expected = "2.38.1-5+deb12u1", -) - -jq( - name = "pick_libuuid_version_empty_lock", - srcs = [ - "@resolution_test_empty_lock_resolve//:lockfile", - ], - args = ["-rj"], - filter = '.packages | map(select(.name == "libuuid1")) | .[0].version', -) - -assert_contains( - name = "test_libuuid_version_empty_lock", - actual = ":pick_libuuid_version_empty_lock", - expected = "2.38.1-5+deb12u1", -) - -jq( - name = "pick_quake_arch", - srcs = [ - "@arch_all_test_resolve//:lockfile", - ], - args = ["-rj"], - filter = '.packages | map(select(.name == "quake")) | .[0].arch', -) - -assert_contains( - name = "test_quake_arch", - actual = ":pick_quake_arch", - expected = "all", -) - -jq( - name = "pick_quake_version", - srcs = [ - "@arch_all_test_resolve//:lockfile", - ], - args = ["-rj"], - filter = '.packages | map(select(.name == "quake")) | .[0].version', -) - -assert_contains( - name = "test_quake_version", - actual = ":pick_quake_version", - expected = "73", -) - -build_test( - name = "build_clang", - target_compatible_with = [ - "@platforms//os:linux", - ], - targets = [ - "@clang//clang", - ], -) +# Tests for the new bzlmod API are done via the bookworm dependency_set +# in the root MODULE.bazel. The old yaml-manifest based tests have been +# removed as part of the new API migration. diff --git a/apt/tests/resolution_test.bzl b/apt/tests/resolution_test.bzl index 7fd33021..ec18ec28 100644 --- a/apt/tests/resolution_test.bzl +++ b/apt/tests/resolution_test.bzl @@ -133,7 +133,7 @@ def _resolve_optionals_test(ctx): idx.add_package(package = "libc6-dev") idx.add_package(package = "eject", depends = "libc6-dev | libc-dev") - (root_package, dependencies, _) = idx.resolution.resolve_all( + (root_package, dependencies, _, _) = idx.resolution.resolve_all( name = "eject", version = ("=", _test_version), arch = _test_arch, @@ -157,7 +157,7 @@ def _resolve_architecture_specific_packages_test(ctx): idx.add_package(package = "glibc", architecture = "all", depends = "foo [i386], bar [amd64]") # bar for amd64 - (root_package, dependencies, _) = idx.resolution.resolve_all( + (root_package, dependencies, _, _) = idx.resolution.resolve_all( name = "glibc", version = ("=", _test_version), arch = "amd64", @@ -168,7 +168,7 @@ def _resolve_architecture_specific_packages_test(ctx): asserts.equals(env, 1, len(dependencies)) # foo for i386 - (root_package, dependencies, _) = idx.resolution.resolve_all( + (root_package, dependencies, _, _) = idx.resolution.resolve_all( name = "glibc", version = ("=", _test_version), arch = "i386", @@ -197,7 +197,7 @@ def _resolve_aliases(ctx): for package in with_packages: package(idx) - (root_package, dependencies, _) = idx.resolution.resolve_all( + (root_package, dependencies, _, _) = idx.resolution.resolve_all( name = "foo", version = ("=", _test_version), arch = "amd64", @@ -244,12 +244,12 @@ def _resolve_aliases(ctx): with_package(package = "bar-plus", provides = "bar (= 1.0)"), ], resolved_name = "bar-plus") - # Un-versioned does not match with multiple candidates + # Un-versioned with multiple candidates - picks the latest version check_resolves([ with_package(package = "foo", depends = "bar"), with_package(package = "bar-plus", provides = "bar"), with_package(package = "bar-plus2", provides = "bar"), - ], resolved_name = None) + ], resolved_name = "bar-plus2") return unittest.end(env) @@ -266,7 +266,7 @@ def _resolve_circular_deps_test(ctx): idx.add_package(package = "ruby-rubygems", depends = "ruby3.1") idx.add_package(package = "ruby", depends = "libruby, ruby-rubygems") - (root_package, dependencies, _) = idx.resolution.resolve_all( + (root_package, dependencies, _, _) = idx.resolution.resolve_all( name = "ruby", version = "", arch = _test_arch, diff --git a/examples/.bazelignore b/examples/.bazelignore new file mode 100644 index 00000000..9633d322 --- /dev/null +++ b/examples/.bazelignore @@ -0,0 +1,2 @@ +# cc_lib is a standalone example with external dependencies +cc_lib/ diff --git a/examples/.bazelversion b/examples/.bazelversion new file mode 100644 index 00000000..66ce77b7 --- /dev/null +++ b/examples/.bazelversion @@ -0,0 +1 @@ +7.0.0 diff --git a/examples/MODULE.bazel b/examples/MODULE.bazel new file mode 100644 index 00000000..52314db3 --- /dev/null +++ b/examples/MODULE.bazel @@ -0,0 +1,134 @@ +"Examples module for rules_distroless" + +module( + name = "rules_distroless_examples", + version = "0.0.0", +) + +bazel_dep(name = "rules_distroless", version = "0.0.0") +local_path_override( + module_name = "rules_distroless", + path = "..", +) + +bazel_dep(name = "aspect_bazel_lib", version = "2.14.0") +bazel_dep(name = "bazel_skylib", version = "1.5.0") +bazel_dep(name = "rules_oci", version = "2.0.0") +bazel_dep(name = "rules_java", version = "8.8.0") +bazel_dep(name = "container_structure_test", version = "1.16.0") +bazel_dep(name = "platforms", version = "0.0.10") + +# Toolchains from aspect_bazel_lib +bazel_lib_toolchains = use_extension("@aspect_bazel_lib//lib:extensions.bzl", "toolchains") +use_repo(bazel_lib_toolchains, "bsd_tar_toolchains") +use_repo(bazel_lib_toolchains, "zstd_toolchains") + +# Test fixture packages +http_archive = use_repo_rule("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") + +http_archive( + name = "example-bullseye-ca-certificates", + build_file_content = 'exports_files(["data.tar.xz", "control.tar.xz"])', + sha256 = "b2d488ad4d8d8adb3ba319fc9cb2cf9909fc42cb82ad239a26c570a2e749c389", + urls = ["https://snapshot.debian.org/archive/debian/20231106T210201Z/pool/main/c/ca-certificates/ca-certificates_20210119_all.deb"], +) + +http_archive( + name = "example-bullseye-libc-bin", + build_file_content = 'exports_files(["data.tar.xz"])', + sha256 = "8b048ab5c7e9f5b7444655541230e689631fd9855c384e8c4a802586d9bbc65a", + urls = ["https://snapshot.debian.org/archive/debian-security/20231106T230332Z/pool/updates/main/g/glibc/libc-bin_2.31-13+deb11u7_amd64.deb"], +) + +http_archive( + name = "example-bookworm-libc-bin", + build_file_content = 'exports_files(["data.tar.xz"])', + sha256 = "38c44247c5b3e864d6db2877edd9c9a0555fc4e23ae271b73d7f527802616df5", + urls = ["https://snapshot.debian.org/archive/debian-security/20231106T230332Z/pool/updates/main/g/glibc/libc-bin_2.36-9+deb12u3_armhf.deb"], +) + +# APT extension for Debian/Ubuntu packages +apt = use_extension("@rules_distroless//apt:extensions.bzl", "apt") + +# Debian Bullseye sources (for debian_snapshot example) +# Using September 2024 snapshot +apt.sources_list( + architectures = [ + "amd64", + "arm64", + ], + components = ["main"], + suites = [ + "bullseye", + "bullseye-updates", + ], + types = ["deb"], + uris = ["https://snapshot.debian.org/archive/debian/20240901T024950Z"], +) +apt.sources_list( + architectures = [ + "amd64", + "arm64", + ], + components = ["main"], + suites = ["bullseye-security"], + types = ["deb"], + uris = ["https://snapshot.debian.org/archive/debian-security/20240901T024950Z"], +) + +# Ubuntu Noble sources (for ubuntu_snapshot example) +# NOTE: Commented out because snapshot URLs are returning 404 +# apt.sources_list( +# architectures = [ +# "amd64", +# "arm64", +# ], +# components = ["main"], +# suites = [ +# "noble", +# "noble-security", +# "noble-updates", +# ], +# types = ["deb"], +# uris = ["https://snapshot.ubuntu.com/ubuntu/20240301T030400Z"], +# ) + +# Install packages for Debian Bullseye example +apt.install( + dependency_set = "bullseye", + packages = [ + "bash", + "ca-certificates", + "coreutils", + "dpkg", + "libncurses6", + "ncurses-base", + "tzdata", + ], + suites = [ + "bullseye", + "bullseye-updates", + "bullseye-security", + ], +) + +# Install packages for Ubuntu Noble example +# NOTE: Commented out because snapshot URLs are returning 404 +# apt.install( +# dependency_set = "noble", +# packages = [ +# "bash", +# "coreutils", +# "dpkg", +# "libncurses6", +# "ncurses-base", +# "tzdata", +# ], +# suites = [ +# "noble", +# "noble-security", +# "noble-updates", +# ], +# ) + +use_repo(apt, "bullseye") diff --git a/examples/WORKSPACE b/examples/WORKSPACE new file mode 100644 index 00000000..f9c14bd4 --- /dev/null +++ b/examples/WORKSPACE @@ -0,0 +1 @@ +# This file is intentionally empty - using bzlmod (MODULE.bazel) for dependencies. diff --git a/examples/cacerts/BUILD.bazel b/examples/cacerts/BUILD.bazel index 2eeadea2..175ff793 100644 --- a/examples/cacerts/BUILD.bazel +++ b/examples/cacerts/BUILD.bazel @@ -1,5 +1,5 @@ -load("//distroless:defs.bzl", "cacerts") -load("//distroless/tests:asserts.bzl", "assert_tar_mtree") +load("@rules_distroless//distroless:defs.bzl", "cacerts") +load("@rules_distroless//distroless/tests:asserts.bzl", "assert_tar_mtree") cacerts( name = "cacerts", diff --git a/examples/debian_snapshot/BUILD.bazel b/examples/debian_snapshot/BUILD.bazel index 8c930703..86c2dde9 100644 --- a/examples/debian_snapshot/BUILD.bazel +++ b/examples/debian_snapshot/BUILD.bazel @@ -1,11 +1,8 @@ """ -NOTE: +Debian Bullseye example using the new bzlmod API. - This is the main test used in the e2e testing. - - PLEASE KEEP e2e/smoke/BUILD and examples/debian_snapshot/BUILD - IN-SYNC WITH EACH OTHER, AS WELL AS THE REST OF THE TEST FILES - (test_linux_ files and the bullseye YAML manifest) +This example demonstrates how to create a distroless container image +using packages from Debian Bullseye snapshot. """ load("@aspect_bazel_lib//lib:tar.bzl", "tar") @@ -80,9 +77,6 @@ oci_image( "SSL_CERT_FILE": "/etc/ssl/certs/ca-certificates.crt", }, os = "linux", - # NOTE: this is needed because, otherwise, bazel test //... fails, even - # when container_structure_test already has target_compatible_with. - # See 136 target_compatible_with = COMPATIBLE_WITH, tars = [ # This target contains all the installed packages. @@ -125,9 +119,6 @@ oci_load( repo_tags = [ "distroless/test:latest", ], - # NOTE: this is needed because, otherwise, bazel test //... fails, even - # when container_structure_test already has target_compatible_with. - # See 136 target_compatible_with = COMPATIBLE_WITH, ) diff --git a/examples/flatten/BUILD.bazel b/examples/flatten/BUILD.bazel index 3ceff4a7..fed2a5cb 100644 --- a/examples/flatten/BUILD.bazel +++ b/examples/flatten/BUILD.bazel @@ -1,6 +1,6 @@ load("@aspect_bazel_lib//lib:tar.bzl", "tar") -load("//distroless:defs.bzl", "flatten", "home", "passwd") -load("//distroless/tests:asserts.bzl", "assert_tar_listing", "assert_tar_mtree") +load("@rules_distroless//distroless:defs.bzl", "flatten", "home", "passwd") +load("@rules_distroless//distroless/tests:asserts.bzl", "assert_tar_listing", "assert_tar_mtree") passwd( name = "passwd", @@ -54,12 +54,11 @@ assert_tar_mtree( #mtree ./etc time=0.0 mode=755 gid=0 uid=0 type=dir ./etc/passwd time=0.0 mode=644 gid=0 uid=0 type=file size=34 -./examples time=1672560000.0 mode=755 gid=0 uid=0 type=dir -./examples/flatten time=1672560000.0 mode=755 gid=0 uid=0 type=dir -./examples/flatten/dir time=1672560000.0 mode=755 gid=0 uid=0 type=dir -./examples/flatten/dir/changelog time=1672560000.0 mode=755 gid=0 uid=0 type=file size=0 -./examples/flatten/dir/sub time=1672560000.0 mode=755 gid=0 uid=0 type=dir -./examples/flatten/dir/sub/content.txt time=1672560000.0 mode=755 gid=0 uid=0 type=file size=0 +./flatten time=1672560000.0 mode=755 gid=0 uid=0 type=dir +./flatten/dir time=1672560000.0 mode=755 gid=0 uid=0 type=dir +./flatten/dir/changelog time=1672560000.0 mode=755 gid=0 uid=0 type=file size=0 +./flatten/dir/sub time=1672560000.0 mode=755 gid=0 uid=0 type=dir +./flatten/dir/sub/content.txt time=1672560000.0 mode=755 gid=0 uid=0 type=file size=0 ./home/nonroot time=0.0 mode=700 gid=666 uid=666 type=dir ./root time=0.0 mode=700 gid=0 uid=0 type=dir """, @@ -99,12 +98,11 @@ assert_tar_mtree( actual = "flatten_dedup", expected = """\ #mtree -./examples time=1672560000.0 mode=755 gid=0 uid=0 type=dir -./examples/flatten time=1672560000.0 mode=755 gid=0 uid=0 type=dir -./examples/flatten/dir time=1672560000.0 mode=755 gid=0 uid=0 type=dir -./examples/flatten/dir/changelog time=1672560000.0 mode=755 gid=0 uid=0 type=file size=0 -./examples/flatten/dir/sub time=1672560000.0 mode=755 gid=0 uid=0 type=dir -./examples/flatten/dir/sub/content.txt time=1672560000.0 mode=755 gid=0 uid=0 type=file size=0 +./flatten time=1672560000.0 mode=755 gid=0 uid=0 type=dir +./flatten/dir time=1672560000.0 mode=755 gid=0 uid=0 type=dir +./flatten/dir/changelog time=1672560000.0 mode=755 gid=0 uid=0 type=file size=0 +./flatten/dir/sub time=1672560000.0 mode=755 gid=0 uid=0 type=dir +./flatten/dir/sub/content.txt time=1672560000.0 mode=755 gid=0 uid=0 type=file size=0 """, ) @@ -112,11 +110,10 @@ assert_tar_listing( name = "test_flatten_dedup_listing", actual = "flatten_dedup", expected = """\ -examples/ -examples/flatten/ -examples/flatten/dir/ -examples/flatten/dir/changelog -examples/flatten/dir/sub/ -examples/flatten/dir/sub/content.txt +flatten/ +flatten/dir/ +flatten/dir/changelog +flatten/dir/sub/ +flatten/dir/sub/content.txt """, ) diff --git a/examples/group/BUILD.bazel b/examples/group/BUILD.bazel index b5699265..59538bfa 100644 --- a/examples/group/BUILD.bazel +++ b/examples/group/BUILD.bazel @@ -1,6 +1,6 @@ load("@aspect_bazel_lib//lib:diff_test.bzl", "diff_test") -load("//distroless:defs.bzl", "group") -load("//distroless/tests:asserts.bzl", "assert_tar_mtree") +load("@rules_distroless//distroless:defs.bzl", "group") +load("@rules_distroless//distroless/tests:asserts.bzl", "assert_tar_mtree") group( name = "group", diff --git a/examples/home/BUILD.bazel b/examples/home/BUILD.bazel index 354d72f4..e170a07b 100644 --- a/examples/home/BUILD.bazel +++ b/examples/home/BUILD.bazel @@ -1,5 +1,5 @@ -load("//distroless:defs.bzl", "home") -load("//distroless/tests:asserts.bzl", "assert_tar_mtree") +load("@rules_distroless//distroless:defs.bzl", "home") +load("@rules_distroless//distroless/tests:asserts.bzl", "assert_tar_mtree") home( name = "home", diff --git a/examples/java_keystore/BUILD.bazel b/examples/java_keystore/BUILD.bazel index 97ce1b2a..82f58ad7 100644 --- a/examples/java_keystore/BUILD.bazel +++ b/examples/java_keystore/BUILD.bazel @@ -1,5 +1,5 @@ -load("//distroless:defs.bzl", "java_keystore") -load("//distroless/tests:asserts.bzl", "assert_jks_listing", "assert_tar_mtree") +load("@rules_distroless//distroless:defs.bzl", "java_keystore") +load("@rules_distroless//distroless/tests:asserts.bzl", "assert_jks_listing", "assert_tar_mtree") java_keystore( name = "java_keystore", diff --git a/examples/locale/BUILD.bazel b/examples/locale/BUILD.bazel index df316303..ce923c95 100644 --- a/examples/locale/BUILD.bazel +++ b/examples/locale/BUILD.bazel @@ -1,5 +1,5 @@ -load("//distroless:defs.bzl", "locale") -load("//distroless/tests:asserts.bzl", "assert_tar_mtree") +load("@rules_distroless//distroless:defs.bzl", "locale") +load("@rules_distroless//distroless/tests:asserts.bzl", "assert_tar_mtree") EPOCH = 123 diff --git a/examples/os_release/BUILD.bazel b/examples/os_release/BUILD.bazel index f86793d7..18e91959 100644 --- a/examples/os_release/BUILD.bazel +++ b/examples/os_release/BUILD.bazel @@ -1,6 +1,6 @@ load("@aspect_bazel_lib//lib:diff_test.bzl", "diff_test") -load("//distroless:defs.bzl", "os_release") -load("//distroless/tests:asserts.bzl", "assert_tar_mtree") +load("@rules_distroless//distroless:defs.bzl", "os_release") +load("@rules_distroless//distroless/tests:asserts.bzl", "assert_tar_mtree") os_release( name = "os_release", diff --git a/examples/passwd/BUILD.bazel b/examples/passwd/BUILD.bazel index a1bdc376..830a98c6 100644 --- a/examples/passwd/BUILD.bazel +++ b/examples/passwd/BUILD.bazel @@ -1,6 +1,6 @@ load("@aspect_bazel_lib//lib:diff_test.bzl", "diff_test") -load("//distroless:defs.bzl", "passwd") -load("//distroless/tests:asserts.bzl", "assert_tar_mtree") +load("@rules_distroless//distroless:defs.bzl", "passwd") +load("@rules_distroless//distroless/tests:asserts.bzl", "assert_tar_mtree") passwd( name = "passwd", diff --git a/examples/statusd/BUILD.bazel b/examples/statusd/BUILD.bazel index 146f0b01..e7bab5c2 100644 --- a/examples/statusd/BUILD.bazel +++ b/examples/statusd/BUILD.bazel @@ -1,6 +1,6 @@ # buildifier: disable=bzl-visibility -load("//apt:defs.bzl", "dpkg_statusd") -load("//distroless/tests:asserts.bzl", "assert_tar_mtree") +load("@rules_distroless//apt:defs.bzl", "dpkg_statusd") +load("@rules_distroless//distroless/tests:asserts.bzl", "assert_tar_mtree") dpkg_statusd( name = "statusd", diff --git a/examples/ubuntu_snapshot/BUILD.bazel b/examples/ubuntu_snapshot/BUILD.bazel index ebc7c870..8197104a 100644 --- a/examples/ubuntu_snapshot/BUILD.bazel +++ b/examples/ubuntu_snapshot/BUILD.bazel @@ -1,94 +1,8 @@ -load("@aspect_bazel_lib//lib:tar.bzl", "tar") -load("@container_structure_test//:defs.bzl", "container_structure_test") -load("@rules_distroless//distroless:defs.bzl", "group", "passwd") -load("@rules_oci//oci:defs.bzl", "oci_image", "oci_load") - -COMPATIBLE_WITH = select({ - "@platforms//cpu:x86_64": ["@platforms//cpu:x86_64"], - "@platforms//cpu:arm64": ["@platforms//cpu:arm64"], -}) + [ - "@platforms//os:linux", -] - -passwd( - name = "passwd", - entries = [ - { - "uid": 0, - "gid": 0, - "home": "/root", - "shell": "/bin/bash", - "username": "r00t", - }, - { - "uid": 100, - "gid": 65534, - "home": "/home/_apt", - "shell": "/usr/sbin/nologin", - "username": "_apt", - }, - ], -) - -group( - name = "group", - entries = [ - { - "name": "root", - "gid": 0, - }, - { - "name": "_apt", - "gid": 65534, - }, - ], -) - -tar( - name = "sh", - mtree = [ - # needed as dpkg assumes sh is installed in a typical debian installation. - "./bin/sh type=link link=/bin/bash", - ], -) - -oci_image( - name = "noble", - architecture = select({ - "@platforms//cpu:arm64": "arm64", - "@platforms//cpu:x86_64": "amd64", - }), - os = "linux", - # NOTE: this is needed because, otherwise, bazel test //... fails, even - # when container_structure_test already has target_compatible_with. - # See 136 - target_compatible_with = COMPATIBLE_WITH, - tars = [ - ":sh", - ":passwd", - ":group", - "@noble//:noble", - ], -) - -oci_load( - name = "tarball", - image = ":noble", - repo_tags = [ - "distroless/noble:latest", - ], - # NOTE: this is needed because, otherwise, bazel test //... fails, even - # when container_structure_test already has target_compatible_with. - # See 136 - target_compatible_with = COMPATIBLE_WITH, -) - -container_structure_test( - name = "test", - configs = select({ - "@platforms//cpu:arm64": ["test_linux_arm64.yaml"], - "@platforms//cpu:x86_64": ["test_linux_amd64.yaml"], - }), - image = ":noble", - target_compatible_with = COMPATIBLE_WITH, -) +# Ubuntu Noble example is temporarily disabled. +# +# The snapshot URLs at https://snapshot.ubuntu.com/ubuntu/20240301T030400Z +# are returning 404 errors. This example will need to be updated with +# working snapshot URLs when available. +# +# See MODULE.bazel for the commented out apt.sources_list and apt.install +# configuration that was used. From f18f6c678efbd84f40a838dd377f312bb4a74cea Mon Sep 17 00:00:00 2001 From: Borja Lorente Date: Thu, 15 Jan 2026 20:08:52 +0000 Subject: [PATCH 16/19] fix: Allow multiple indices in foreign_symlinks (#205) --- apt/private/deb_export.bzl | 20 +++++++++++++------- apt/private/deb_import.bzl | 26 ++++++++++++++++++++++---- 2 files changed, 35 insertions(+), 11 deletions(-) diff --git a/apt/private/deb_export.bzl b/apt/private/deb_export.bzl index d2051329..c9b7d5ce 100644 --- a/apt/private/deb_export.bzl +++ b/apt/private/deb_export.bzl @@ -7,14 +7,20 @@ TAR_TOOLCHAIN_TYPE = tar.toolchain_type def _deb_export_impl(ctx): bsdtar = ctx.toolchains[TAR_TOOLCHAIN_TYPE] + foreign_symlinks = { + symlink: json.decode(indices_json) + for (symlink, indices_json) in ctx.attr.foreign_symlinks.items() + } + # foreign_symlinks maps label -> index string (reversed for Bazel 7.0.0 compatibility) - for (target, i) in ctx.attr.foreign_symlinks.items(): - i = int(i) - ctx.actions.symlink( - output = ctx.outputs.symlink_outs[i], - # grossly inefficient - target_file = target[DefaultInfo].files.to_list()[0], - ) + for (target, indices_json) in ctx.attr.foreign_symlinks.items(): + indices = json.decode(indices_json) + for i in indices: + ctx.actions.symlink( + output = ctx.outputs.symlink_outs[i], + # grossly inefficient + target_file = target[DefaultInfo].files.to_list()[0], + ) if len(ctx.outputs.outs): fout = ctx.outputs.outs[0] diff --git a/apt/private/deb_import.bzl b/apt/private/deb_import.bzl index f3b3ae75..3abf6f52 100644 --- a/apt/private/deb_import.bzl +++ b/apt/private/deb_import.bzl @@ -183,23 +183,29 @@ def _discover_contents(rctx, depends_on, depends_file_map, target_name): continue if resolved_symlink: + print("BL: _discover_contents::resolved_symlink(line={}, sl={})".format(line, resolved_symlink)) symlinks[line] = resolved_symlink # Resolve symlinks: unresolved_symlinks = {} | symlinks + print("BL: _discover_contents::depends_file_map({})".format(depends_file_map)) + # TODO: this is highly inefficient, change the filemapping to be # file -> package instead of package -> files for dep in depends_on: (suite, name, arch, _) = lockfile.parse_package_key(dep) filemap = depends_file_map.get(name, []) or [] + print("BL: _discover_contents::for_dep(dep={}, filemap={})".format(name, filemap)) for file in filemap: if len(unresolved_symlinks) == 0: break for (symlink, symlink_target) in unresolved_symlinks.items(): + print("BL: _discover_contents::depends_on::symlink(symlink={}, symlink_target={})".format(symlink, symlink_target)) if file == symlink_target: unresolved_symlinks.pop(symlink) symlinks[symlink] = "@%s//:%s" % (util.sanitize(dep), file) + # Resolve self symlinks self_symlinks = {} @@ -211,6 +217,8 @@ def _discover_contents(rctx, depends_on, depends_file_map, target_name): if len(unresolved_symlinks) == 0: break + print("BL: _discover_contents::depends_on::after(\nsymlinks={}\nself_symlinks={}\nunresolved_symlinks={}\n)".format(symlinks, self_symlinks, unresolved_symlinks)) + if len(unresolved_symlinks): util.warning( rctx, @@ -387,16 +395,26 @@ def _deb_import_impl(rctx): rctx.attr.package_name.removesuffix("-dev"), ) + print("BL: symlinks for {} = {}".format(rctx.attr.target_name, symlinks)) + + foreign_symlinks = {} + for (i, symlink) in enumerate(symlinks.values()): + if symlink not in foreign_symlinks: + foreign_symlinks[symlink] = [] + foreign_symlinks[symlink].append(i) + + foreign_symlinks = { + symlink: json.encode(indices) + for (symlink, indices) in foreign_symlinks.items() + } + rctx.file("BUILD.bazel", _DEB_IMPORT_BUILD_TMPL.format( mergedusr = rctx.attr.mergedusr, depends_on = ["@" + util.sanitize(dep_key) + "//:data" for dep_key in rctx.attr.depends_on], target_name = rctx.attr.target_name, cc_import_targets = cc_import_targets, outs = outs, - foreign_symlinks = { - symlink: str(i) - for (i, symlink) in enumerate(symlinks.values()) - }, + foreign_symlinks = foreign_symlinks, symlink_outs = symlinks.keys(), )) From bcd46d774fdce3ab9985c67e2eafb83b1bb2b9ed Mon Sep 17 00:00:00 2001 From: Borja Lorente Date: Thu, 15 Jan 2026 23:04:35 +0000 Subject: [PATCH 17/19] cleanup: Remove log lines (#206) --- apt/private/deb_import.bzl | 9 --------- 1 file changed, 9 deletions(-) diff --git a/apt/private/deb_import.bzl b/apt/private/deb_import.bzl index 3abf6f52..4338430f 100644 --- a/apt/private/deb_import.bzl +++ b/apt/private/deb_import.bzl @@ -183,25 +183,20 @@ def _discover_contents(rctx, depends_on, depends_file_map, target_name): continue if resolved_symlink: - print("BL: _discover_contents::resolved_symlink(line={}, sl={})".format(line, resolved_symlink)) symlinks[line] = resolved_symlink # Resolve symlinks: unresolved_symlinks = {} | symlinks - print("BL: _discover_contents::depends_file_map({})".format(depends_file_map)) - # TODO: this is highly inefficient, change the filemapping to be # file -> package instead of package -> files for dep in depends_on: (suite, name, arch, _) = lockfile.parse_package_key(dep) filemap = depends_file_map.get(name, []) or [] - print("BL: _discover_contents::for_dep(dep={}, filemap={})".format(name, filemap)) for file in filemap: if len(unresolved_symlinks) == 0: break for (symlink, symlink_target) in unresolved_symlinks.items(): - print("BL: _discover_contents::depends_on::symlink(symlink={}, symlink_target={})".format(symlink, symlink_target)) if file == symlink_target: unresolved_symlinks.pop(symlink) symlinks[symlink] = "@%s//:%s" % (util.sanitize(dep), file) @@ -217,8 +212,6 @@ def _discover_contents(rctx, depends_on, depends_file_map, target_name): if len(unresolved_symlinks) == 0: break - print("BL: _discover_contents::depends_on::after(\nsymlinks={}\nself_symlinks={}\nunresolved_symlinks={}\n)".format(symlinks, self_symlinks, unresolved_symlinks)) - if len(unresolved_symlinks): util.warning( rctx, @@ -395,8 +388,6 @@ def _deb_import_impl(rctx): rctx.attr.package_name.removesuffix("-dev"), ) - print("BL: symlinks for {} = {}".format(rctx.attr.target_name, symlinks)) - foreign_symlinks = {} for (i, symlink) in enumerate(symlinks.values()): if symlink not in foreign_symlinks: From e1b9677115e9b6d428fa765e183c81e22f4e2fa1 Mon Sep 17 00:00:00 2001 From: thesayyn Date: Tue, 20 Jan 2026 12:08:28 -0800 Subject: [PATCH 18/19] suites --- apt/extensions.bzl | 17 ++- apt/private/apt_deb_repository.bzl | 31 +++-- apt/private/apt_dep_resolver.bzl | 17 ++- apt/tests/resolution_test.bzl | 211 ++++++++++++++++++++++++++++- 4 files changed, 252 insertions(+), 24 deletions(-) diff --git a/apt/extensions.bzl b/apt/extensions.bzl index 4c74b5ae..8fd64d75 100644 --- a/apt/extensions.bzl +++ b/apt/extensions.bzl @@ -91,6 +91,7 @@ def _distroless_extension(mctx): name = constraint["name"], version = constraint["version"], arch = "amd64", + suites = install.suites, ) if warning: util.warning(mctx, warning) @@ -104,6 +105,7 @@ def _distroless_extension(mctx): constraint["name"], constraint["version"], "amd64", + install.suites, )) continue @@ -116,6 +118,7 @@ def _distroless_extension(mctx): constraint["name"], constraint["version"], arch, + install.suites, )) for i in range(0, ITERATION_MAX + 1): @@ -124,24 +127,27 @@ def _distroless_extension(mctx): if i == ITERATION_MAX: fail("apt.install exhausted, please file a bug") - (dependency_set_name, name, version, arch) = resolution_queue.pop() + (dependency_set_name, name, version, arch, suites) = resolution_queue.pop() - mctx.report_progress("Resolving %s:%s" % (dep_constraint, arch)) + mctx.report_progress("Resolving %s:%s" % (name, arch)) # TODO: Flattening approach of resolving dependencies has to change. (package, dependencies, unmet_dependencies, warnings) = resolver.resolve_all( name = name, version = version, arch = arch, - include_transitive = install.include_transitive, + include_transitive = True, + suites = suites, ) if not package: + suite_msg = " in suite(s) [%s]" % ", ".join(suites) if suites else "" fail( - "\n\nUnable to locate package `%s` for %s. It may only exist for specific set of architectures. \n" % (name, arch) + + "\n\nUnable to locate package `%s` for %s%s. It may only exist for specific set of architectures or suites. \n" % (name, arch, suite_msg) + " 1 - Ensure that the package is available for the specified architecture. \n" + " 2 - Ensure that the specified version of the package is available for the specified architecture. \n" + - " 3 - Ensure that an apt.source_list added for the specified architecture.", + " 3 - Ensure that an apt.sources_list is added for the specified architecture.\n" + + " 4 - If using suite constraints, ensure the package exists in the specified suite(s).", ) for warning in warnings: @@ -176,6 +182,7 @@ def _distroless_extension(mctx): dep["Package"], ("=", dep["Version"]), arch, + suites, )) glock.add_package_dependency(package, dep) diff --git a/apt/private/apt_deb_repository.bzl b/apt/private/apt_deb_repository.bzl index fadaf35d..d6b689db 100644 --- a/apt/private/apt_deb_repository.bzl +++ b/apt/private/apt_deb_repository.bzl @@ -228,14 +228,27 @@ def _add_package(state, package): (package["Architecture"], virtual["name"]), ) -def _virtual_packages(state, name, arch): - return util.get_dict(state.virtual_packages, [arch, name], []) - -def _package_versions(state, name, arch): - return util.get_dict(state.packages, [arch, name], {}).keys() - -def _package(state, name, version, arch): - return util.get_dict(state.packages, keys = (arch, name, version)) +def _virtual_packages(state, name, arch, suites = None): + all_providers = util.get_dict(state.virtual_packages, [arch, name], []) + if not suites: + return all_providers + return [(pkg, v) for (pkg, v) in all_providers if pkg["Dist"] in suites] + +def _package_versions(state, name, arch, suites = None): + all_packages = util.get_dict(state.packages, [arch, name], {}) + if not suites: + return all_packages.keys() + return [v for v, pkg in all_packages.items() if pkg["Dist"] in suites] + +def _package(state, name, version, arch, suites = None): + if not version: + return None + package = util.get_dict(state.packages, keys = (arch, name, version)) + if not package: + return None + if suites and package["Dist"] not in suites: + return None + return package def _filemap(state, name, arch): if arch not in state.filemap: @@ -336,7 +349,7 @@ def _create_test_only(): package_versions = lambda **kwargs: _package_versions(state, **kwargs), virtual_packages = lambda **kwargs: _virtual_packages(state, **kwargs), package = lambda **kwargs: _package(state, **kwargs), - parse_repository = lambda contents: _parse_repository(state, contents, "http://nowhere", "test"), + parse_repository = lambda contents, dist = "test": _parse_repository(state, contents, "http://nowhere", dist), packages = state.packages, reset = reset, ) diff --git a/apt/private/apt_dep_resolver.bzl b/apt/private/apt_dep_resolver.bzl index 2593c77e..9948d4da 100644 --- a/apt/private/apt_dep_resolver.bzl +++ b/apt/private/apt_dep_resolver.bzl @@ -3,9 +3,9 @@ load(":version.bzl", version_lib = "version") load(":version_constraint.bzl", "version_constraint") -def _resolve_package(state, name, version, arch): +def _resolve_package(state, name, version, arch, suites = None): # First check if the constraint is satisfied by a virtual package - virtual_packages = state.repository.virtual_packages(name = name, arch = arch) + virtual_packages = state.repository.virtual_packages(name = name, arch = arch, suites = suites) candidates = [ package @@ -52,8 +52,8 @@ def _resolve_package(state, name, version, arch): # ) # Get available versions of the package - versions_by_arch = state.repository.package_versions(name = name, arch = arch) - versions_by_any_arch = state.repository.package_versions(name = name, arch = "all") + versions_by_arch = state.repository.package_versions(name = name, arch = arch, suites = suites) + versions_by_any_arch = state.repository.package_versions(name = name, arch = "all", suites = suites) # Order packages by highest to lowest versions = version_lib.sort(versions_by_arch + versions_by_any_arch, reverse = True) @@ -73,9 +73,9 @@ def _resolve_package(state, name, version, arch): # First element in the versions list is the latest version. selected_version = versions[0] - package = state.repository.package(name = name, version = selected_version, arch = arch) + package = state.repository.package(name = name, version = selected_version, arch = arch, suites = suites) if not package: - package = state.repository.package(name = name, version = selected_version, arch = "all") + package = state.repository.package(name = name, version = selected_version, arch = "all", suites = suites) return (package, warning) @@ -84,7 +84,7 @@ _ITERATION_MAX_ = 2147483646 # For future: unfortunately this function uses a few state variables to track # certain conditions and package dependency groups. # TODO: Try to simplify it in the future. -def _resolve_all(state, name, version, arch, include_transitive = True): +def _resolve_all(state, name, version, arch, include_transitive = True, suites = None): unmet_dependencies = [] root_package = None dependencies = [] @@ -112,8 +112,7 @@ def _resolve_all(state, name, version, arch, include_transitive = True): path.append(name) - # TODO: only resolve in specified suites - (package, warning) = _resolve_package(state, name, version, arch) + (package, warning) = _resolve_package(state, name, version, arch, suites = suites) if warning: warnings.append(warning) diff --git a/apt/tests/resolution_test.bzl b/apt/tests/resolution_test.bzl index ec18ec28..1cb835f2 100644 --- a/apt/tests/resolution_test.bzl +++ b/apt/tests/resolution_test.bzl @@ -115,12 +115,14 @@ def _make_index(): def _add_package(idx, **kwargs): kwargs["architecture"] = kwargs.get("architecture", _test_arch) kwargs["version"] = kwargs.get("version", _test_version) + dist = kwargs.pop("dist", "test") r = "\n".join(["{}: {}".format(item[0].title(), item[1]) for item in kwargs.items()]) - idx.parse_repository(r) + idx.parse_repository(r, dist = dist) return struct( add_package = lambda **kwargs: _add_package(idx, **kwargs), resolution = resolution, + idx = idx, reset = lambda: idx.reset(), ) @@ -280,6 +282,208 @@ def _resolve_circular_deps_test(ctx): resolve_circular_deps_test = unittest.make(_resolve_circular_deps_test) +def _resolve_suite_constraint_test(ctx): + env = unittest.begin(ctx) + + idx = _make_index() + + # Add same package name in different suites with different versions + idx.add_package(package = "curl", version = "7.68.0", dist = "noble") + idx.add_package(package = "curl", version = "7.88.0", dist = "jammy") + + # Without suite constraint - should get latest version (7.88.0 from jammy) + (package, _) = idx.resolution.resolve_package( + name = "curl", + version = None, + arch = _test_arch, + ) + asserts.equals(env, "7.88.0", package["Version"]) + asserts.equals(env, "jammy", package["Dist"]) + + # With suite constraint for noble - should get 7.68.0 + (package, _) = idx.resolution.resolve_package( + name = "curl", + version = None, + arch = _test_arch, + suites = ["noble"], + ) + asserts.equals(env, "7.68.0", package["Version"]) + asserts.equals(env, "noble", package["Dist"]) + + # With suite constraint for jammy - should get 7.88.0 + (package, _) = idx.resolution.resolve_package( + name = "curl", + version = None, + arch = _test_arch, + suites = ["jammy"], + ) + asserts.equals(env, "7.88.0", package["Version"]) + asserts.equals(env, "jammy", package["Dist"]) + + return unittest.end(env) + +resolve_suite_constraint_test = unittest.make(_resolve_suite_constraint_test) + +def _resolve_suite_constraint_not_found_test(ctx): + env = unittest.begin(ctx) + + idx = _make_index() + + # Add package only in noble suite + idx.add_package(package = "noble-only-pkg", version = "1.0", dist = "noble") + + # Without suite constraint - should find it + (package, _) = idx.resolution.resolve_package( + name = "noble-only-pkg", + version = None, + arch = _test_arch, + ) + asserts.equals(env, "noble-only-pkg", package["Package"]) + + # With suite constraint for jammy - should NOT find it + (package, _) = idx.resolution.resolve_package( + name = "noble-only-pkg", + version = None, + arch = _test_arch, + suites = ["jammy"], + ) + asserts.equals(env, None, package) + + return unittest.end(env) + +resolve_suite_constraint_not_found_test = unittest.make(_resolve_suite_constraint_not_found_test) + +def _resolve_suite_constraint_transitive_test(ctx): + env = unittest.begin(ctx) + + idx = _make_index() + + # Add packages in noble suite + idx.add_package(package = "libssl", version = "1.0", dist = "noble") + idx.add_package(package = "curl", version = "7.68.0", depends = "libssl", dist = "noble") + + # Add packages in jammy suite + idx.add_package(package = "libssl", version = "2.0", dist = "jammy") + idx.add_package(package = "curl", version = "7.88.0", depends = "libssl", dist = "jammy") + + # Resolve curl with noble suite constraint - should get noble's libssl + (root, deps, _, _) = idx.resolution.resolve_all( + name = "curl", + version = None, + arch = _test_arch, + suites = ["noble"], + ) + asserts.equals(env, "curl", root["Package"]) + asserts.equals(env, "7.68.0", root["Version"]) + asserts.equals(env, "noble", root["Dist"]) + asserts.equals(env, 1, len(deps)) + asserts.equals(env, "libssl", deps[0]["Package"]) + asserts.equals(env, "1.0", deps[0]["Version"]) + asserts.equals(env, "noble", deps[0]["Dist"]) + + # Resolve curl with jammy suite constraint - should get jammy's libssl + (root, deps, _, _) = idx.resolution.resolve_all( + name = "curl", + version = None, + arch = _test_arch, + suites = ["jammy"], + ) + asserts.equals(env, "curl", root["Package"]) + asserts.equals(env, "7.88.0", root["Version"]) + asserts.equals(env, "jammy", root["Dist"]) + asserts.equals(env, 1, len(deps)) + asserts.equals(env, "libssl", deps[0]["Package"]) + asserts.equals(env, "2.0", deps[0]["Version"]) + asserts.equals(env, "jammy", deps[0]["Dist"]) + + return unittest.end(env) + +resolve_suite_constraint_transitive_test = unittest.make(_resolve_suite_constraint_transitive_test) + +def _resolve_suite_constraint_multiple_suites_test(ctx): + env = unittest.begin(ctx) + + idx = _make_index() + + # Add packages across multiple suites + idx.add_package(package = "base-pkg", version = "1.0", dist = "noble") + idx.add_package(package = "security-pkg", version = "1.1", dist = "noble-security") + idx.add_package(package = "updates-pkg", version = "1.2", dist = "noble-updates") + + # Resolve with multiple suite constraint - should find packages in any of the suites + (package, _) = idx.resolution.resolve_package( + name = "base-pkg", + version = None, + arch = _test_arch, + suites = ["noble", "noble-security", "noble-updates"], + ) + asserts.equals(env, "base-pkg", package["Package"]) + asserts.equals(env, "noble", package["Dist"]) + + (package, _) = idx.resolution.resolve_package( + name = "security-pkg", + version = None, + arch = _test_arch, + suites = ["noble", "noble-security", "noble-updates"], + ) + asserts.equals(env, "security-pkg", package["Package"]) + asserts.equals(env, "noble-security", package["Dist"]) + + # Package not in any of the specified suites + idx.add_package(package = "other-pkg", version = "1.0", dist = "jammy") + (package, _) = idx.resolution.resolve_package( + name = "other-pkg", + version = None, + arch = _test_arch, + suites = ["noble", "noble-security", "noble-updates"], + ) + asserts.equals(env, None, package) + + return unittest.end(env) + +resolve_suite_constraint_multiple_suites_test = unittest.make(_resolve_suite_constraint_multiple_suites_test) + +def _resolve_virtual_package_suite_constraint_test(ctx): + env = unittest.begin(ctx) + + idx = _make_index() + + # Add virtual package providers in different suites + idx.add_package(package = "mawk", version = "1.0", provides = "awk", dist = "noble") + idx.add_package(package = "gawk", version = "2.0", provides = "awk", dist = "jammy") + + # Without suite constraint - should pick one (gawk has higher version) + (package, _) = idx.resolution.resolve_package( + name = "awk", + version = None, + arch = _test_arch, + ) + asserts.equals(env, "gawk", package["Package"]) + + # With noble suite constraint - should get mawk + (package, _) = idx.resolution.resolve_package( + name = "awk", + version = None, + arch = _test_arch, + suites = ["noble"], + ) + asserts.equals(env, "mawk", package["Package"]) + asserts.equals(env, "noble", package["Dist"]) + + # With jammy suite constraint - should get gawk + (package, _) = idx.resolution.resolve_package( + name = "awk", + version = None, + arch = _test_arch, + suites = ["jammy"], + ) + asserts.equals(env, "gawk", package["Package"]) + asserts.equals(env, "jammy", package["Dist"]) + + return unittest.end(env) + +resolve_virtual_package_suite_constraint_test = unittest.make(_resolve_virtual_package_suite_constraint_test) + _TEST_SUITE_PREFIX = "package_resolution/" def resolution_tests(): @@ -325,3 +529,8 @@ def resolution_tests(): resolve_architecture_specific_packages_test(name = _TEST_SUITE_PREFIX + "resolve_architectures_specific") resolve_aliases_test(name = _TEST_SUITE_PREFIX + "resolve_aliases") resolve_circular_deps_test(name = _TEST_SUITE_PREFIX + "parse_circular") + resolve_suite_constraint_test(name = _TEST_SUITE_PREFIX + "resolve_suite_constraint") + resolve_suite_constraint_not_found_test(name = _TEST_SUITE_PREFIX + "resolve_suite_constraint_not_found") + resolve_suite_constraint_transitive_test(name = _TEST_SUITE_PREFIX + "resolve_suite_constraint_transitive") + resolve_suite_constraint_multiple_suites_test(name = _TEST_SUITE_PREFIX + "resolve_suite_constraint_multiple_suites") + resolve_virtual_package_suite_constraint_test(name = _TEST_SUITE_PREFIX + "resolve_virtual_package_suite_constraint") From 439c697aff6ef01b8796faa70a21b5b09c7091bc Mon Sep 17 00:00:00 2001 From: Borja Lorente Date: Fri, 30 Jan 2026 16:56:28 +0000 Subject: [PATCH 19/19] fix: Allow multiple cc_imports per pkgconfig file (#208) Some packages, like `krb5-multidev`, include several .so files to link against in their pkg-config files: ``` prefix=/usr exec_prefix=${prefix} libdir=${prefix}/lib/x86_64-linux-gnu/mit-krb5 includedir=${prefix}/include/mit-krb5 defccname=FILE:/tmp/krb5cc_%{uid} defktname=FILE:/etc/krb5.keytab defcktname=FILE:/etc/krb5/user/%{euid}/client.keytab Name: mit-krb5 Description: An implementation of Kerberos network authentication Version: 1.20.1 Cflags: -isystem ${includedir} Libs: -L${libdir} -lkrb5 -lk5crypto -lcom_err Libs.private: -lkrb5support ``` So, instead of assuming that we have one cc_library per pkgconfig file, we just collect all the declarations and create one `cc_import` target for each. Some design considerations: ### Does this violate the private-ness of `Libs.private`? All `cc_imports` we create have private visibility already, so I don't think it does. ### Why not depend on other debian packages exporting those libraries? Since those files are already in the package (https://packages.debian.org/sid/amd64/krb5-multidev/filelist), I assume the intention is to link against the `.so` files distributed in the `krb-multidev` package, and not to pull other deb packages. This could be false, of course, if a user has already installed those packages and for some reason the linker finds their SO files first, but I think that's a bug in the definition of the krb5-multidev package, not this implementation. Plus, it has the added benefit that, if the debian package in question doesn't include an appropriate .so file, we just don't create the cc_import. --- apt/private/deb_import.bzl | 58 ++++++++++++++++++++------------------ apt/private/pkgconfig.bzl | 16 ++++++----- 2 files changed, 40 insertions(+), 34 deletions(-) diff --git a/apt/private/deb_import.bzl b/apt/private/deb_import.bzl index 4338430f..ad267363 100644 --- a/apt/private/deb_import.bzl +++ b/apt/private/deb_import.bzl @@ -272,36 +272,40 @@ so_library( includes += pkgc.includes link_paths += pkgc.link_paths - if not pkgc.libname or pkgc.libname + "_import" in import_targets: + if len(pkgc.libnames) == 0: continue - subtarget = pkgc.libname + "_import" - import_targets.append(subtarget) - - # Look for a static archive - # for ar in a_files: - # if ar.endswith(pkgc.libname + ".a"): - # static_lib = '":%s"' % ar - # break - - # Look for a dynamic library - IGNORE = ["libfl"] - for so_lib in so_files: - if pkgc.libname and pkgc.libname not in IGNORE and so_lib.endswith(pkgc.libname + ".so"): - shared_lib = '":%s"' % so_lib - break + for libname in pkgc.libnames: + if libname + "_import" in import_targets: + continue - build_file_content += _CC_IMPORT_TMPL.format( - name = subtarget, - shared_lib = shared_lib, - static_lib = static_lib, - hdrs = [], - includes = { - "external/.." + include: True - for include in includes + ["/usr/include", "/usr/include/x86_64-linux-gnu"] - }.keys(), - linkopts = pkgc.linkopts, - ) + subtarget = libname + "_import" + import_targets.append(subtarget) + + # Look for a static archive + # for ar in a_files: + # if ar.endswith(pkgc.libname + ".a"): + # static_lib = '":%s"' % ar + # break + + # Look for a dynamic library + IGNORE = ["libfl"] + for so_lib in so_files: + if libname and libname not in IGNORE and so_lib.endswith(libname + ".so"): + shared_lib = '":%s"' % so_lib + break + + build_file_content += _CC_IMPORT_TMPL.format( + name = subtarget, + shared_lib = shared_lib, + static_lib = static_lib, + hdrs = [], + includes = { + "external/.." + include: True + for include in includes + ["/usr/include", "/usr/include/x86_64-linux-gnu"] + }.keys(), + linkopts = pkgc.linkopts, + ) build_file_content += _CC_LIBRARY_TMPL.format( name = target_name, diff --git a/apt/private/pkgconfig.bzl b/apt/private/pkgconfig.bzl index bd9e80f6..78f90672 100644 --- a/apt/private/pkgconfig.bzl +++ b/apt/private/pkgconfig.bzl @@ -82,7 +82,7 @@ def process_pcconfig(pc): includes = [] link_paths = [] defines = [] - libname = None + libnames = [] IGNORE = [ "-licui18n", @@ -105,8 +105,8 @@ def process_pcconfig(pc): link_paths.append(linkpath) linkopts.append("-Wl,-rpath=" + arg.removeprefix("-L")) continue - elif arg.startswith("-l") and not libname: - libname = "lib" + arg.removeprefix("-l") + elif arg.startswith("-l"): + libnames.append("lib" + arg.removeprefix("-l")) continue elif arg in IGNORE: continue @@ -118,7 +118,9 @@ def process_pcconfig(pc): if arg in IGNORE: continue elif arg.startswith("-l"): - linkopts.append(arg) + # The cc_imports we create based on these names are private already, + # so we don't need to do anything special for `Libs.private`. + libnames.append("lib" + arg.removeprefix("-l")) if "Cflags" in directives: cflags = _trim(directives["Cflags"]).split(" ") @@ -148,14 +150,14 @@ def process_pcconfig(pc): "/usr/include", ] - return (libname, includedir, libdir, linkopts, link_paths, includes, defines) + return (libnames, includedir, libdir, linkopts, link_paths, includes, defines) def pkgconfig(rctx, path): pc = parse_pc(rctx.read(path)) - (libname, includedir, libdir, linkopts, link_paths, includes, defines) = process_pcconfig(pc) + (libnames, includedir, libdir, linkopts, link_paths, includes, defines) = process_pcconfig(pc) return struct( - libname = libname, + libnames = libnames, includedir = includedir, libdir = libdir, linkopts = linkopts,