diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 657b9c39ebc..0f13bedff1b 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -222,7 +222,7 @@ jobs: instructions: |- go build -o "$BIN_PATH" -ldflags="$LD_FLAGS" -tags netcgo -trimpath -buildvcs=false - build-docker: + build-docker-light: name: Docker light ${{ matrix.arch }} build needs: - set-product-version @@ -251,3 +251,31 @@ jobs: dev_tags: | docker.io/hashicorppreview/${{ env.REPO_NAME }}:${{ env.version }} docker.io/hashicorppreview/${{ env.REPO_NAME }}:${{ env.version }}-${{ github.sha }} + + build-docker-full: + name: Docker full ${{ matrix.arch }} build + needs: + - set-product-version + - build-linux + runs-on: ubuntu-latest + strategy: + matrix: + arch: [ "arm", "arm64", "386", "amd64" ] + env: + version: ${{ needs.set-product-version.outputs.product-version }} + steps: + - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + - name: Docker Build (Action) + uses: hashicorp/actions-docker-build@v1 + with: + version: ${{ env.version }} + target: release-full + arch: ${{ matrix.arch }} + tags: | + docker.io/hashicorp/${{ env.REPO_NAME }}:full + docker.io/hashicorp/${{ env.REPO_NAME }}:full-${{ env.version }} + public.ecr.aws/hashicorp/${{ env.REPO_NAME }}:full + public.ecr.aws/hashicorp/${{ env.REPO_NAME }}:full-${{ env.version }} + dev_tags: | + docker.io/hashicorppreview/${{ env.REPO_NAME }}:full-${{ env.version }} + docker.io/hashicorppreview/${{ env.REPO_NAME }}:full-${{ env.version }}-${{ github.sha }} diff --git a/.go-version b/.go-version index 0bd54efd316..8909929f6e7 100644 --- a/.go-version +++ b/.go-version @@ -1 +1 @@ -1.20.4 +1.20.7 diff --git a/.release/docker/README.md b/.release/docker/README.md index 4fdf9272958..f733efbeaf9 100644 --- a/.release/docker/README.md +++ b/.release/docker/README.md @@ -1,37 +1,46 @@ # Packer Docker Container -The root of this repository contains the officially supported HashiCorp Dockerfile to build the hashicorp/packer docker image. The `dev` docker image should be built for local dev and testing, while the production docker image, `release`, is built in CI and makes use of CI-built binaries. The `official` docker image is built using the official binaries from releases.hashicorp.com. +The root of this repository contains the officially supported HashiCorp Dockerfile to build the hashicorp/packer docker image. The `dev` docker image should be built for local dev and testing, while the production docker image, `release`, is built in CI and makes use of CI-built binaries. The `light` and `full` docker images are built using the official binaries from releases.hashicorp.com. ## Build -See the Makefile targets in the root of this repository for building Packer images in either -development or release modes: - - - `make docker-dev` - - `make docker-official` - - `make docker-multiarch-official` - - `make docker` +Refer to the Makefile of this repository, especially the `docker` and `docker-dev` targets to build a local version of the dev image based on the sources available. ### Usage This repository automatically builds containers for using the -[`packer`](https://packer.io) command line program. It contains two distinct -varieties of build, an `official` version, aka `light`, which just contains the binary. -It also contains a `dev` version, aka `full`, which compiles the binary from source +[`packer`](https://developer.hashicorp.com/packer) command line program. It contains three distinct +varieties of build: a `light` version, which just contains the binary, +a `full` build, which contains the Packer binary with pre-installed plugins, +and a `dev` version, which compiles the binary from source inside the container before exposing it for use. -##### `official` +##### `light` -The `official` version of this container will copy the current stable version of +The `light` version of this container will copy the current stable version of the binary, taken from releases.hashicorp.com, into the container. It will also -set it for use as the default entrypoint. This will be the best option for most uses, -especially if you are just looking to run the binary from a container. This image -is tagged as both `official` and `light`. +set it for use as the default entrypoint. This will be the best option for most uses, +especially if you are just looking to run the binary from a container. The `latest` tag on DockerHub also points to this version. You can use this version with the following: ```shell -docker run hashicorp/packer:official +docker run hashicorp/packer:light +``` + +##### `full` + +The `full` version of the container builds upon `light` and pre-installs +the plugins officially maintained by HashiCorp. + +You can use this version with the following: +```shell +docker run hashicorp/packer:full +``` + +You can view the list of pre-installed plugins with the following: +```shell +docker run hashicorp/packer:full plugins installed ``` ##### `dev` @@ -41,7 +50,7 @@ the current ref of this [repository](https://github.com/hashicorp/packer). Using official `golang` image](https://hub.docker.com/_/golang/) as a base, this container will copy the source from the current branch, build the binary, and expose it for running. Because all build artifacts are included, it should be quite a bit larger than -the `official` image. This version of the container is most useful for development or +the `light` image. This version of the container is most useful for development or debugging. You can use this version with the following: @@ -64,8 +73,8 @@ docker run \ ~> **Note**: packer init is available from Packer v1.7.0 and later -The command will mount the working directory (`pwd`) to `workspace`, which is the working directory (`-w`) inside the container. -Any plugin installed with `packer init` will be installed under the directory specified under the `PACKER_PLUGIN_PATH` environment variable. `PACKER_PLUGIN_PATH` must be set to a path inside the volume mount so that plugins can become available at `packer build`. +The command will mount the working directory (`pwd`) to `workspace`, which is the working directory (`-w`) inside the container. +Any plugin installed with `packer init` will be installed under the directory specified under the `PACKER_PLUGIN_PATH` environment variable. `PACKER_PLUGIN_PATH` must be set to a path inside the volume mount so that plugins can become available at `packer build`. Running `packer build` ```shell diff --git a/CHANGELOG.md b/CHANGELOG.md index 83f1f035501..64b819a3139 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,126 @@ -## 1.9.0 (Unreleased) +## 1.9.5 (Upcoming) + +## 1.9.4 (August 18, 2023) + +### BUG FIXES: + +* core: When invoking Packer with the CHECKPOINT_DISABLE environment variable the telemetry + reporter is left uninitialized in order to disable telemetry reporting. + Any method calls on the nil reporter is expected to check if the reporter is active or in + NOOP mode. The SetBundledUsage function, introduced in Packer 1.9.2, failed to perform a nil + check before attempting to modify an attribute, causing Packer to fail when telemetry is + disabled. This release fixes this issue by introducing such a check. + +## 1.9.3 (August 17, 2023) + +### NOTES: +* **New Docker Image**: As part of the bundled plugin removal effort, a new + Docker target called `release-full` has been added to the Packer release + artifacts. The release-full image includes Packer and all the official + plugins pre-installed in its environment. This image is being offered as an + alternative option for those users who may still be relying on the plugin + versions bundled into the Packer binary. + [GH-12532](https://github.com/hashicorp/packer/pull/12532) + +### IMPROVEMENTS: +* core/docs: Clarify the expected usage of the `packer init` command for HCL2 + template builds.[GH-12535](https://github.com/hashicorp/packer/pull/12535) +* core/hcp: Add support for project-level service principals. A user connecting + with a project level service principals must provide a valid HCP_PROJECT_ID + in order to connect. + [GH-12520](https://github.com/hashicorp/packer/pull/12520) + [GH-12576](https://github.com/hashicorp/packer/pull/12576) +* core: A new Docker image `packer:release-full` has been added for all + supported architectures. The release-full image includes Packer and all the + official plugins pre-installed in its environment. + [GH-12532](https://github.com/hashicorp/packer/pull/12532) +* core: Add enhanced support to Packer telemetry for bundle plugins usage. + [GH-12536](https://github.com/hashicorp/packer/pull/12536) + +### BUG FIXES: + +* core: Bump golang.org/x/net to v0.13.0 to address CVE GO-2023-1988. Packer + itself is not vulnerable to the CVE as we don't render web pages, but + security checks do point it as an issue that needs to be addressed. + [GH-12561](https://github.com/hashicorp/packer/pull/12561) +* core: Fix custom plugin loading in current working directory regression. + [GH-12544](https://github.com/hashicorp/packer/pull/12544) + + +## 1.9.2 (July 19, 2023) ### NOTES: + +* Vendored plugins within Packer have not been updated. Plugin releases occur on + a regular basis to address issues and feature requests. + Please note that in an upcoming version of Packer, we will remove the last + bundled plugins from Packer. Users are encouraged to use `packer init` for HCL2 templates or + `packer plugins install` with legacy JSON templates for installing external + plugins. + +* Packer will now warn when using bundled plugins. This feature will be removed in + a future version of the tool, so this warning is meant to bring awareness of the + upcoming change, and help users update their templates. + [GH-12495](https://github.com/hashicorp/packer/pull/12495) + +### BUG FIXES: + +* Fixed a bug with how Packer was discovering plugins: in order to load + plugins, Packer would recursively scan all the known directories in which + we could have installed plugins. This caused unnecessary directory + walks and slowdowns upon invocation. Packer will now only check + for nested plugins within the directories used by commands such as `packer + init`, or `packer plugins install`, or as defined in PACKER_PLUGIN_PATH. + Refer to + [Packer's plugin directory documentation](https://developer.hashicorp.com/packer/docs/configure#packer-s-plugin-directory) + for details on how loading works. + [GH-12414](https://github.com/hashicorp/packer/pull/12414) + +* The `packer init` subcommand now bundles all the missing installed plugins into one + condensed warning, as opposed to one warning per missing plugin. + [GH-12506](https://github.com/hashicorp/packer/pull/12506) + +### PLUGINS: + +* packer-plugin-parallels: The Parallels plugin has been handed over to the Parallels + team. New releases for this plugin are available at + https://github.com/parallels/packer-plugin-parallels. This plugin is no longer + being bundled in the Packer binary release. Existing references to the + plugin will continue to work but users are advised to update the + `required_plugins` block to use the new plugin source address. + [GH-12476](https://github.com/hashicorp/packer/pull/12476) + ``` + required_plugins { + parallels = { + source = "github.com/parallels/parallels" + version = "~> 1" + } + } + ``` + +### IMPROVEMENTS: + +* The `hcl2_upgrade` sub-command will now add `required_plugins` to the template + generated from JSON for [official plugins](https://developer.hashicorp.com/packer/plugins#tiers-and-namespaces). + [GH-12504](https://github.com/hashicorp/packer/pull/12504) + +## 1.9.1 (June 1, 2023) + +### BUG FIXES: + +* On May 16th 2023, HCP introduced multi-project support to the platform. + In order to use multiple projects in your organization, you will need to update Packer + to version 1.9.1 or above. Starting with 1.9.1, you may specify a project ID to push + builds to with the `HCP_PROJECT_ID` environment variable. If no project ID is specified, + Packer will pick the project with the oldest creation date. Older versions of Packer are + incompatible with multi-project support on HCP, and builds will fail for HCP + organizations with multiple projects on versions before 1.9.1. + [GH-12453](https://github.com/hashicorp/packer/pull/12453) + +## 1.9.0 (May 31, 2023) + +### NOTES: + * **Breaking Change**: Iteration fingerprints used to be computed from the Git SHA of the repository where the template is located when running packer build. This changes with this release, and now fingerprints are automatically generated @@ -12,20 +132,60 @@ these builds will work exactly as they did before. [GH-12172](https://github.com/hashicorp/packer/pull/12172) +* **Breaking Change**: Community-maintained plugins bundled with the Packer binary have been removed. + These external plugin components are released independently of Packer core and can be installed + directly by the user. Users relying on the external plugin components listed below should refer + to the `packer plugins` sub-command and, if using HCL2, a `required_plugins` block to define a + list of plugins for building a template. + ### PLUGINS + * Remove provisioner plugins for Chef, Converge, Puppet, Salt, and Inspec as vendored plugins. These plugins have been previously archived and not updated in release since being archived. These plugins can be installed using `packer init` or with the Packer plugins sub-command `packer plugins install github.com/hashicorp/chef`. [GH-12374](https://github.com/hashicorp/packer/pull/12374) +* The following community plugins won't be bundled with Packer anymore: + + * [Alicloud](https://github.com/hashicorp/packer-plugin-alicloud) + * [CloudStack](https://github.com/hashicorp/packer-plugin-cloudstack) + * [HCloud](https://github.com/hashicorp/packer-plugin-hcloud) + * [HyperOne](https://github.com/hashicorp/packer-plugin-hyperone) + * [Hyper-V](https://github.com/hashicorp/packer-plugin-hyperv) + * [JDCloud](https://github.com/hashicorp/packer-plugin-jdcloud) + * [LXC](https://github.com/hashicorp/packer-plugin-lxc) + * [LXD](https://github.com/hashicorp/packer-plugin-lxd) + * [NCloud](https://github.com/hashicorp/packer-plugin-ncloud) + * [OpenStack](https://github.com/hashicorp/packer-plugin-openstack) + * [Proxmox](https://github.com/hashicorp/packer-plugin-proxmox) + * [TencentCloud](https://github.com/hashicorp/packer-plugin-tencentcloud) + * [Triton](https://github.com/hashicorp/packer-plugin-triton) + * [Yandex](https://github.com/hashicorp/packer-plugin-yandex) + + [GH-12436](https://github.com/hashicorp/packer/pull/12436) + +Users relying on these external plugin components should refer to the `packer plugins` sub-command and, +if using HCL2, a `required_plugins` block to define a list of plugins to use for building a template. + ### IMPROVEMENTS: * core/hcp: Now, fingerprints used by HCP Packer are randomly generated ULIDs instead of a Git SHA, and a new one is always generated, unless one is - already specified in the environment. + specified in the environment. [GH-12172](https://github.com/hashicorp/packer/pull/12172) +### BUG FIXES: + +* Fix LDFLAGS for release pipelines: Between Packer 1.8.5 and Packer 1.8.7, changes + to the LDFLAGS in use for building the binaries for Packer had mistakenly + removed some compilation flags, leading to the final binaries not being stripped. + This change raised the size of the built binaries by as much as 45%. + In this release, we fixed the LDFLAGS during compilation, yielding leaner binaries. + +* Bumped gopsutil to v3. This fixes a macOS intermittent crash reported by the community + [GH-12430](https://github.com/hashicorp/packer/issues/12430) + ## 1.8.7 (May 4, 2023) ### NOTES: diff --git a/Dockerfile b/Dockerfile index 75acdcd3450..1b5ae54e942 100644 --- a/Dockerfile +++ b/Dockerfile @@ -2,13 +2,13 @@ # SPDX-License-Identifier: MPL-2.0 # ======================================================================== -# +# # This Dockerfile contains multiple targets. # Use 'docker build --target= .' to build one. # e.g. `docker build --target=release-light .` # -# All non-dev targets have a PRODUCT_VERSION argument that must be provided -# via --build-arg=PRODUCT_VERSION= when building. +# All non-dev targets have a PRODUCT_VERSION argument that must be provided +# via --build-arg=PRODUCT_VERSION= when building. # e.g. --build-arg PRODUCT_VERSION=1.11.2 # # For local dev and testing purposes, please build and use the `dev` docker image. @@ -27,59 +27,8 @@ COPY bin/packer /bin/packer ENTRYPOINT ["/bin/packer"] - -# Official docker image that includes binaries from releases.hashicorp.com. -# This downloads the release from releases.hashicorp.com and therefore requires that -# the release is published before building the Docker image. -FROM docker.mirror.hashicorp.services/alpine:latest as official - -# This is the release of Packer to pull in. -ARG PRODUCT_VERSION - -LABEL name="Packer" \ - maintainer="HashiCorp Packer Team " \ - vendor="HashiCorp" \ - version=$PRODUCT_VERSION \ - release=$PRODUCT_VERSION \ - summary="Packer is a tool for creating identical machine images for multiple platforms from a single source configuration." \ - description="Packer is a tool for creating identical machine images for multiple platforms from a single source configuration. Please submit issues to https://github.com/hashicorp/packer/issues" - -# This is the location of the releases. -ENV HASHICORP_RELEASES=https://releases.hashicorp.com - -RUN set -eux && \ - apk add --no-cache git bash wget openssl gnupg xorriso && \ - gpg --keyserver keyserver.ubuntu.com --recv-keys C874011F0AB405110D02105534365D9472D7468F && \ - mkdir -p /tmp/build && \ - cd /tmp/build && \ - apkArch="$(apk --print-arch)" && \ - case "${apkArch}" in \ - aarch64) packerArch='arm64' ;; \ - armhf) packerArch='arm' ;; \ - x86) packerArch='386' ;; \ - x86_64) packerArch='amd64' ;; \ - *) echo >&2 "error: unsupported architecture: ${apkArch} (see ${HASHICORP_RELEASES}/packer/${PRODUCT_VERSION}/)" && exit 1 ;; \ - esac && \ - wget ${HASHICORP_RELEASES}/packer/${PRODUCT_VERSION}/packer_${PRODUCT_VERSION}_linux_${packerArch}.zip && \ - wget ${HASHICORP_RELEASES}/packer/${PRODUCT_VERSION}/packer_${PRODUCT_VERSION}_SHA256SUMS && \ - wget ${HASHICORP_RELEASES}/packer/${PRODUCT_VERSION}/packer_${PRODUCT_VERSION}_SHA256SUMS.sig && \ - gpg --batch --verify packer_${PRODUCT_VERSION}_SHA256SUMS.sig packer_${PRODUCT_VERSION}_SHA256SUMS && \ - grep packer_${PRODUCT_VERSION}_linux_${packerArch}.zip packer_${PRODUCT_VERSION}_SHA256SUMS | sha256sum -c && \ - unzip -d /tmp/build packer_${PRODUCT_VERSION}_linux_${packerArch}.zip && \ - cp /tmp/build/packer /bin/packer && \ - cd /tmp && \ - rm -rf /tmp/build && \ - gpgconf --kill all && \ - apk del gnupg openssl && \ - rm -rf /root/.gnupg && \ - # Tiny smoke test to ensure the binary we downloaded runs - packer version - -ENTRYPOINT ["/bin/packer"] - - # Light docker image which can be used to run the binary from a container. -# This image builds from the locally generated binary in ./bin/, and from CI-built binaries within CI. +# This image builds from the locally generated binary in ./bin/, and from CI-built binaries within CI. # To generate the local binary, run `make dev`. # This image is published to DockerHub under the `light`, `light-$VERSION`, and `latest` tags. FROM docker.mirror.hashicorp.services/alpine:latest as release-light @@ -104,6 +53,24 @@ COPY dist/$TARGETOS/$TARGETARCH/$BIN_NAME /bin/ ENTRYPOINT ["/bin/packer"] +# Full docker image which can be used to run the binary from a container. +# This image is essentially the same as the `release-light` one, but embeds +# the official plugins in it. +FROM release-light as release-full + +# Install the latest version of the official plugins +RUN /bin/packer plugins install "github.com/hashicorp/amazon" && \ + /bin/packer plugins install "github.com/hashicorp/ansible" && \ + /bin/packer plugins install "github.com/hashicorp/azure" && \ + /bin/packer plugins install "github.com/hashicorp/docker" && \ + /bin/packer plugins install "github.com/hashicorp/googlecompute" && \ + /bin/packer plugins install "github.com/hashicorp/qemu" && \ + /bin/packer plugins install "github.com/hashicorp/vagrant" && \ + /bin/packer plugins install "github.com/hashicorp/virtualbox" && \ + /bin/packer plugins install "github.com/hashicorp/vmware" && \ + /bin/packer plugins install "github.com/hashicorp/vsphere" + +ENTRYPOINT ["/bin/packer"] # Set default target to 'dev'. FROM dev diff --git a/Makefile b/Makefile index ad6cfd3a28a..ed46303684f 100644 --- a/Makefile +++ b/Makefile @@ -12,8 +12,6 @@ GOOS=$(shell go env GOOS) GOARCH=$(shell go env GOARCH) GOPATH=$(shell go env GOPATH) -EXECUTABLE_FILES=$(shell find . -type f -executable | egrep -v '^\./(website/[vendor|tmp]|vendor/|\.git|bin/|scripts/|pkg/)' | egrep -v '.*(\.sh|\.bats|\.git)' | egrep -v './provisioner/(ansible|inspec)/test-fixtures/exit1') - # Get the git commit GIT_DIRTY=$(shell test -n "`git status --porcelain`" && echo "+CHANGES" || true) GIT_COMMIT=$(shell git rev-parse --short HEAD) @@ -73,41 +71,22 @@ dev: ## Build and install a development build # Docker build variables and targets REGISTRY_NAME?=docker.io/hashicorp IMAGE_NAME=packer -VERSION?=1.7.10 -IMAGE_TAG=$(REGISTRY_NAME)/$(IMAGE_NAME):$(VERSION) -IMAGE_TAG_DEV=$(REGISTRY_NAME)/$(IMAGE_NAME):latest-$(shell git rev-parse --short HEAD) - -docker: docker-official -docker-light: docker-official - -# Builds from the releases.hashicorp.com official binary -docker-official: - docker build \ - --tag $(IMAGE_TAG) \ - --tag hashicorp/packer:latest \ - --target=official \ - --build-arg VERSION=$(VERSION) \ - . +IMAGE_TAG_DEV=$(REGISTRY_NAME)/$(IMAGE_NAME):latest-$(GIT_COMMIT) -# Builds multiarch from the releases.hashicorp.com official binary -docker-multiarch-official: - docker buildx build \ - --tag $(IMAGE_TAG) \ - --tag hashicorp/packer:latest \ - --target=official \ - --build-arg VERSION=$(VERSION) \ - --platform linux/amd64,linux/arm64 \ - . +docker: docker-dev # Builds from the locally generated binary in ./bin/ # To generate the local binary, run `make dev` -docker-dev: export GOOS=linux -docker-dev: export GOARCH=amd64 -docker-dev: dev +docker-dev: + @GOOS=linux \ + GOARCH=amd64 \ + CGO_ENABLED=0 \ + go build -ldflags '$(GOLDFLAGS)' -o bin/packer . @docker build \ --tag $(IMAGE_TAG_DEV) \ --target=dev \ . + @rm -f bin/packer # Clean up the Linux/amd64 binary to avoid conficts on other OS/archs lint: install-lint-deps ## Lint Go code @if [ ! -z $(PKG_NAME) ]; then \ @@ -133,15 +112,6 @@ fmt-check: fmt ## Check go code formatting exit 1; \ fi -mode-check: ## Check that only certain files are executable - @echo "==> Checking that only certain files are executable..." - @if [ ! -z "$(EXECUTABLE_FILES)" ]; then \ - echo "These files should not be executable or they must be white listed in the Makefile:"; \ - echo "$(EXECUTABLE_FILES)" | xargs -n1; \ - exit 1; \ - else \ - echo "Check passed."; \ - fi fmt-docs: @find ./website/pages/docs -name "*.md" -exec pandoc --wrap auto --columns 79 --atx-headers -s -f "markdown_github+yaml_metadata_block" -t "markdown_github+yaml_metadata_block" {} -o {} \; @@ -166,7 +136,7 @@ generate-check: generate ## Check go code generation is on par exit 1; \ fi -test: mode-check vet ## Run unit tests +test: vet ## Run unit tests @go test -count $(COUNT) $(TEST) $(TESTARGS) -timeout=3m # acctest runs provisioners acceptance tests @@ -178,7 +148,7 @@ testacc: # install-build-deps generate ## Run acceptance tests @echo "WARN: Acceptance tests will take a long time to run and may cost money. Ctrl-C if you want to cancel." PACKER_ACC=1 go test -count $(COUNT) -v $(TEST) $(TESTARGS) -timeout=120m -testrace: mode-check vet ## Test with race detection enabled +testrace: vet ## Test with race detection enabled @go test -count $(COUNT) -race $(TEST) $(TESTARGS) -timeout=3m -p=8 # Runs code coverage and open a html page with report diff --git a/README.md b/README.md index af75e1ed95b..c6a6703d72a 100644 --- a/README.md +++ b/README.md @@ -42,7 +42,7 @@ key points. First, [download a pre-built Packer binary](https://www.packer.io/downloads.html) for your operating system or [compile Packer -yourself](https://github.com/hashicorp/packer/blob/master/.github/CONTRIBUTING.md#setting-up-go-to-work-on-packer). +yourself](https://github.com/hashicorp/packer/blob/main/.github/CONTRIBUTING.md#setting-up-go). After Packer is installed, create your first template, which tells Packer what platforms to build images for and how you want to build them. In our diff --git a/acctest/plugin/bundled_plugin_test.go b/acctest/plugin/bundled_plugin_test.go new file mode 100644 index 00000000000..9f21dfac626 --- /dev/null +++ b/acctest/plugin/bundled_plugin_test.go @@ -0,0 +1,184 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package plugin + +import ( + _ "embed" + "errors" + "fmt" + "os" + "os/exec" + "strings" + "testing" + + "github.com/hashicorp/go-multierror" + amazonacc "github.com/hashicorp/packer-plugin-amazon/builder/ebs/acceptance" + "github.com/hashicorp/packer-plugin-sdk/acctest" + "github.com/hashicorp/packer/hcl2template/addrs" +) + +//go:embed test-fixtures/basic_amazon_bundled.pkr.hcl +var basicAmazonBundledEbsTemplate string + +func TestAccBuildBundledPlugins(t *testing.T) { + plugin := addrs.Plugin{ + Hostname: "github.com", + Namespace: "hashicorp", + Type: "amazon", + } + testCase := &acctest.PluginTestCase{ + Name: "amazon-ebs_bundled_test", + Setup: func() error { + return cleanupPluginInstallation(plugin) + }, + Teardown: func() error { + helper := amazonacc.AMIHelper{ + Region: "us-east-1", + Name: "packer-plugin-bundled-amazon-ebs-test", + } + return helper.CleanUpAmi() + }, + Template: basicAmazonBundledEbsTemplate, + Type: "amazon-ebs", + Init: false, + Check: func(buildCommand *exec.Cmd, logfile string) error { + if buildCommand.ProcessState != nil { + if buildCommand.ProcessState.ExitCode() != 0 { + return fmt.Errorf("Bad exit code. Logfile: %s", logfile) + } + } + + rawLogs, err := os.ReadFile(logfile) + if err != nil { + return fmt.Errorf("failed to read logs: %s", err) + } + + var errs error + + logs := string(rawLogs) + + if !strings.Contains(logs, "Warning: Bundled plugins used") { + errs = multierror.Append(errs, errors.New("expected warning about bundled plugins used, did not find it")) + } + + if !strings.Contains(logs, "Then run 'packer init' to manage installation of the plugins") { + errs = multierror.Append(errs, errors.New("expected suggestion about packer init in logs, did not find it.")) + } + + return errs + }, + } + + acctest.TestPlugin(t, testCase) +} + +//go:embed test-fixtures/basic_amazon_with_required_plugins.pkr.hcl +var basicAmazonRequiredPluginEbsTemplate string + +func TestAccBuildBundledPluginsWithRequiredPlugins(t *testing.T) { + plugin := addrs.Plugin{ + Hostname: "github.com", + Namespace: "hashicorp", + Type: "amazon", + } + testCase := &acctest.PluginTestCase{ + Name: "amazon-ebs_with_required_plugins_test", + Setup: func() error { + return cleanupPluginInstallation(plugin) + }, + Teardown: func() error { + helper := amazonacc.AMIHelper{ + Region: "us-east-1", + Name: "packer-plugin-required-plugin-amazon-ebs-test", + } + return helper.CleanUpAmi() + }, + Template: basicAmazonRequiredPluginEbsTemplate, + Type: "amazon-ebs", + Init: false, + Check: func(buildCommand *exec.Cmd, logfile string) error { + if buildCommand.ProcessState != nil { + if buildCommand.ProcessState.ExitCode() != 1 { + return fmt.Errorf("Bad exit code. Logfile: %s", logfile) + } + } + + rawLogs, err := os.ReadFile(logfile) + if err != nil { + return fmt.Errorf("failed to read logs: %s", err) + } + + var errs error + + logs := string(rawLogs) + + if strings.Contains(logs, "Warning: Bundled plugins used") { + errs = multierror.Append(errs, errors.New("did not expect warning about bundled plugins used")) + } + + if !strings.Contains(logs, "Missing plugins") { + errs = multierror.Append(errs, errors.New("expected error about plugins required and not installed, did not find it")) + } + + return errs + }, + } + + acctest.TestPlugin(t, testCase) +} + +//go:embed test-fixtures/basic_amazon_bundled.json +var basicAmazonBundledEbsTemplateJSON string + +func TestAccBuildBundledPluginsJSON(t *testing.T) { + plugin := addrs.Plugin{ + Hostname: "github.com", + Namespace: "hashicorp", + Type: "amazon", + } + testCase := &acctest.PluginTestCase{ + Name: "amazon-ebs_bundled_test_json", + Setup: func() error { + return cleanupPluginInstallation(plugin) + }, + Teardown: func() error { + helper := amazonacc.AMIHelper{ + Region: "us-east-1", + Name: "packer-plugin-bundled-amazon-ebs-test-json", + } + return helper.CleanUpAmi() + }, + Template: basicAmazonBundledEbsTemplateJSON, + Type: "amazon-ebs", + Init: false, + Check: func(buildCommand *exec.Cmd, logfile string) error { + if buildCommand.ProcessState != nil { + if buildCommand.ProcessState.ExitCode() != 0 { + return fmt.Errorf("Bad exit code. Logfile: %s", logfile) + } + } + + rawLogs, err := os.ReadFile(logfile) + if err != nil { + return fmt.Errorf("failed to read logs: %s", err) + } + + var errs error + + logs := string(rawLogs) + + if !strings.Contains(logs, "Warning: Bundled plugins used") { + errs = multierror.Append(errs, errors.New("expected warning about bundled plugins, did not find it.")) + } + + if !strings.Contains(logs, "plugins with the 'packer plugins install' command") { + errs = multierror.Append(errs, errors.New("expected suggestion about packer plugins install in logs, did not find it.")) + } + + return errs + }, + } + + acctest.TestPlugin(t, testCase) +} diff --git a/acctest/plugin/component_acc_test.go b/acctest/plugin/component_acc_test.go index ff77adf636f..e24e4c01492 100644 --- a/acctest/plugin/component_acc_test.go +++ b/acctest/plugin/component_acc_test.go @@ -8,7 +8,7 @@ package plugin import ( _ "embed" "fmt" - "io/ioutil" + "io" "os" "os/exec" "testing" @@ -54,7 +54,7 @@ func TestAccInitAndBuildBasicAmazonAmiDatasource(t *testing.T) { } defer logs.Close() - logsBytes, err := ioutil.ReadAll(logs) + logsBytes, err := io.ReadAll(logs) if err != nil { return fmt.Errorf("Unable to read %s", logfile) } diff --git a/acctest/plugin/plugin_acc_test.go b/acctest/plugin/plugin_acc_test.go index 9e69afe3774..b83063f15f8 100644 --- a/acctest/plugin/plugin_acc_test.go +++ b/acctest/plugin/plugin_acc_test.go @@ -8,7 +8,7 @@ package plugin import ( _ "embed" "fmt" - "io/ioutil" + "io" "os" "os/exec" "path/filepath" @@ -58,7 +58,7 @@ func TestAccInitAndBuildBasicAmazonEbs(t *testing.T) { } defer logs.Close() - logsBytes, err := ioutil.ReadAll(logs) + logsBytes, err := io.ReadAll(logs) if err != nil { return fmt.Errorf("Unable to read %s", logfile) } diff --git a/acctest/plugin/test-fixtures/basic_amazon_bundled.json b/acctest/plugin/test-fixtures/basic_amazon_bundled.json new file mode 100644 index 00000000000..f674cd130d1 --- /dev/null +++ b/acctest/plugin/test-fixtures/basic_amazon_bundled.json @@ -0,0 +1,10 @@ +{ + "builders": [{ + "type": "amazon-ebs", + "region": "us-east-1", + "instance_type": "m3.medium", + "source_ami": "ami-76b2a71e", + "ssh_username": "ubuntu", + "ami_name": "packer-plugin-bundled-amazon-ebs-test-json" + }] +} diff --git a/acctest/plugin/test-fixtures/basic_amazon_bundled.pkr.hcl b/acctest/plugin/test-fixtures/basic_amazon_bundled.pkr.hcl new file mode 100644 index 00000000000..c17a09df04f --- /dev/null +++ b/acctest/plugin/test-fixtures/basic_amazon_bundled.pkr.hcl @@ -0,0 +1,11 @@ +source "amazon-ebs" "basic-test" { + region = "us-east-1" + instance_type = "m3.medium" + source_ami = "ami-76b2a71e" + ssh_username = "ubuntu" + ami_name = "packer-plugin-bundled-amazon-ebs-test" +} + +build { + sources = ["source.amazon-ebs.basic-test"] +} diff --git a/acctest/plugin/test-fixtures/basic_amazon_with_required_plugins.pkr.hcl b/acctest/plugin/test-fixtures/basic_amazon_with_required_plugins.pkr.hcl new file mode 100644 index 00000000000..a30f85fa49e --- /dev/null +++ b/acctest/plugin/test-fixtures/basic_amazon_with_required_plugins.pkr.hcl @@ -0,0 +1,20 @@ +packer { + required_plugins { + amazon = { + source = "github.com/hashicorp/amazon", + version = "~> 1" + } + } +} + +source "amazon-ebs" "basic-test" { + region = "us-east-1" + instance_type = "m3.medium" + source_ami = "ami-76b2a71e" + ssh_username = "ubuntu" + ami_name = "packer-plugin-external-amazon-ebs-test" +} + +build { + sources = ["source.amazon-ebs.basic-test"] +} diff --git a/acctest/testing.go b/acctest/testing.go index e716ec97b39..e24b64324f5 100644 --- a/acctest/testing.go +++ b/acctest/testing.go @@ -6,7 +6,7 @@ package acctest import ( "context" "fmt" - "io/ioutil" + "io" "log" "os" "strings" @@ -182,8 +182,8 @@ func Test(t TestT, c TestCase) { log.Printf("[DEBUG] Running 'test' build") ui := &packersdk.BasicUi{ Reader: os.Stdin, - Writer: ioutil.Discard, - ErrorWriter: ioutil.Discard, + Writer: io.Discard, + ErrorWriter: io.Discard, PB: &packersdk.NoopProgressTracker{}, } artifacts, err := build.Run(context.Background(), ui) diff --git a/builder/file/builder.go b/builder/file/builder.go index e6df9b54008..1e8d7d08fc6 100644 --- a/builder/file/builder.go +++ b/builder/file/builder.go @@ -12,7 +12,6 @@ import ( "context" "fmt" "io" - "io/ioutil" "os" "path/filepath" @@ -77,7 +76,7 @@ func (b *Builder) Run(ctx context.Context, ui packersdk.Ui, hook packersdk.Hook) } else { // We're going to write Contents; if it's empty we'll just create an // empty file. - err := ioutil.WriteFile(b.config.Target, []byte(b.config.Content), 0600) + err := os.WriteFile(b.config.Target, []byte(b.config.Content), 0600) if err != nil { return nil, err } diff --git a/builder/file/builder_test.go b/builder/file/builder_test.go index dfa87277e2b..ede2e032470 100644 --- a/builder/file/builder_test.go +++ b/builder/file/builder_test.go @@ -5,7 +5,7 @@ package file import ( "fmt" - "io/ioutil" + "os" "testing" packersdk "github.com/hashicorp/packer-plugin-sdk/packer" @@ -33,7 +33,7 @@ func TestBuilderFileAcc_copy(t *testing.T) { } func checkContent(artifacts []packersdk.Artifact) error { - content, err := ioutil.ReadFile("contentTest.txt") + content, err := os.ReadFile("contentTest.txt") if err != nil { return err } @@ -45,7 +45,7 @@ func checkContent(artifacts []packersdk.Artifact) error { } func checkCopy(artifacts []packersdk.Artifact) error { - content, err := ioutil.ReadFile("copyTest.txt") + content, err := os.ReadFile("copyTest.txt") if err != nil { return err } diff --git a/builder/null/artifact_export.go b/builder/null/artifact_export.go index ae6e46f2756..f9ec6d825cd 100644 --- a/builder/null/artifact_export.go +++ b/builder/null/artifact_export.go @@ -4,8 +4,6 @@ package null import ( - "fmt" - registryimage "github.com/hashicorp/packer-plugin-sdk/packer/registry/image" ) @@ -26,7 +24,7 @@ func (*NullArtifact) Id() string { } func (a *NullArtifact) String() string { - return fmt.Sprintf("Did not export anything. This is the null builder") + return "Did not export anything. This is the null builder" } func (a *NullArtifact) State(name string) interface{} { diff --git a/cmd/ssh-keygen/main.go b/cmd/ssh-keygen/main.go index a15797ea811..5a759b165f5 100644 --- a/cmd/ssh-keygen/main.go +++ b/cmd/ssh-keygen/main.go @@ -5,7 +5,6 @@ package main import ( "flag" - "io/ioutil" "log" "os" "os/user" @@ -74,12 +73,12 @@ func main() { log.Fatalf("%s already exists.", cla.Filename) } log.Printf("Saving private key to %s", cla.Filename) - if err := ioutil.WriteFile(cla.Filename, keypair.Private, 0600); err != nil { + if err := os.WriteFile(cla.Filename, keypair.Private, 0600); err != nil { log.Fatal(err) } publicFilename := cla.Filename + ".pub" log.Printf("Saving public key to %s", publicFilename) - if err := ioutil.WriteFile(publicFilename, keypair.Public, 0644); err != nil { + if err := os.WriteFile(publicFilename, keypair.Public, 0644); err != nil { log.Fatal(err) } } diff --git a/command/build.go b/command/build.go index b707fe22d6f..842938adcb9 100644 --- a/command/build.go +++ b/command/build.go @@ -87,7 +87,15 @@ func (c *BuildCommand) RunContext(buildCtx context.Context, cla *BuildArgs) int return ret } - diags := packerStarter.Initialize(packer.InitializeOptions{}) + diags := packerStarter.DetectPluginBinaries() + ret = writeDiags(c.Ui, nil, diags) + if ret != 0 { + return ret + } + + diags = packerStarter.Initialize(packer.InitializeOptions{}) + bundledDiags := c.DetectBundledPlugins(packerStarter) + diags = append(bundledDiags, diags...) ret = writeDiags(c.Ui, nil, diags) if ret != 0 { return ret diff --git a/command/build_parallel_test.go b/command/build_parallel_test.go index 959134fd539..09e994e9e5d 100644 --- a/command/build_parallel_test.go +++ b/command/build_parallel_test.go @@ -6,7 +6,6 @@ package command import ( "bytes" "context" - "fmt" "path/filepath" "sync" "testing" @@ -101,7 +100,7 @@ func TestBuildParallel_1(t *testing.T) { } args := []string{ - fmt.Sprintf("-parallel-builds=10"), + "-parallel-builds=10", filepath.Join(testFixture("parallel"), "1lock-5wg.json"), } @@ -130,7 +129,7 @@ func TestBuildParallel_2(t *testing.T) { } args := []string{ - fmt.Sprintf("-parallel-builds=3"), + "-parallel-builds=3", filepath.Join(testFixture("parallel"), "2lock-4wg.json"), } @@ -159,7 +158,7 @@ func TestBuildParallel_Timeout(t *testing.T) { } args := []string{ - fmt.Sprintf("-parallel-builds=3"), + "-parallel-builds=3", filepath.Join(testFixture("parallel"), "2lock-timeout.json"), } diff --git a/command/build_test.go b/command/build_test.go index 8dac9b8e0aa..f6b8aa06f8b 100644 --- a/command/build_test.go +++ b/command/build_test.go @@ -5,7 +5,6 @@ package command import ( "fmt" - "io/ioutil" "math" "os" "path/filepath" @@ -1009,9 +1008,9 @@ func (fc fileCheck) verify(t *testing.T, dir string) { } } for file, expectedContent := range fc.expectedContent { - content, err := ioutil.ReadFile(filepath.Join(dir, file)) + content, err := os.ReadFile(filepath.Join(dir, file)) if err != nil { - t.Fatalf("ioutil.ReadFile: %v", err) + t.Fatalf("os.ReadFile: %v", err) } if diff := cmp.Diff(expectedContent, string(content)); diff != "" { t.Errorf("content of %s differs: %s", file, diff) diff --git a/command/command_test.go b/command/command_test.go index 4666a4109b8..870cd3a0e20 100644 --- a/command/command_test.go +++ b/command/command_test.go @@ -5,7 +5,7 @@ package command import ( "bytes" - "io/ioutil" + "os" "path/filepath" "testing" @@ -27,7 +27,7 @@ func fatalCommand(t *testing.T, m Meta) { func testFixtureContent(n ...string) string { path := filepath.Join(append([]string{fixturesDir}, n...)...) - b, err := ioutil.ReadFile(path) + b, err := os.ReadFile(path) if err != nil { panic(err) } diff --git a/command/fmt_test.go b/command/fmt_test.go index b642cdef5ad..516fb6acca9 100644 --- a/command/fmt_test.go +++ b/command/fmt_test.go @@ -6,7 +6,6 @@ package command import ( "bytes" "fmt" - "io/ioutil" "os" "path/filepath" "strings" @@ -128,7 +127,7 @@ func TestFmt_Recursive(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - tempDirectory := mustString(ioutil.TempDir(testDir, "test-dir-*")) + tempDirectory := mustString(os.MkdirTemp(testDir, "test-dir-*")) defer os.RemoveAll(tempDirectory) createFiles(tempDirectory, tt.alreadyPresentContent) diff --git a/command/hcl2_upgrade.go b/command/hcl2_upgrade.go index adba96d7f87..08b7d2ad745 100644 --- a/command/hcl2_upgrade.go +++ b/command/hcl2_upgrade.go @@ -797,16 +797,93 @@ type PackerParser struct { } func (p *PackerParser) Parse(tpl *template.Template) error { + reqPlugins, err := p.generateRequiredPluginsBlock(tpl) + if err != nil { + return err + } + + if tpl.MinVersion == "" && reqPlugins == nil { + return nil + } + + fileContent := hclwrite.NewEmptyFile() + body := fileContent.Body() + packerBody := body.AppendNewBlock("packer", nil).Body() + if tpl.MinVersion != "" { - fileContent := hclwrite.NewEmptyFile() - body := fileContent.Body() - packerBody := body.AppendNewBlock("packer", nil).Body() packerBody.SetAttributeValue("required_version", cty.StringVal(fmt.Sprintf(">= %s", tpl.MinVersion))) - p.out = fileContent.Bytes() } + + if reqPlugins != nil { + packerBody.AppendBlock(reqPlugins) + } + + p.out = fileContent.Bytes() + return nil } +func gatherPluginsFromTemplate(tpl *template.Template) []string { + plugins := map[string]struct{}{} + + for _, b := range tpl.Builders { + for prefix, plugin := range knownPluginPrefixes { + if strings.HasPrefix(b.Type, prefix) { + plugins[plugin] = struct{}{} + } + } + } + + for _, p := range tpl.Provisioners { + for prefix, plugin := range knownPluginPrefixes { + if strings.HasPrefix(p.Type, prefix) { + plugins[plugin] = struct{}{} + } + } + } + + for _, pps := range tpl.PostProcessors { + for _, pp := range pps { + for prefix, plugin := range knownPluginPrefixes { + if strings.HasPrefix(pp.Type, prefix) { + plugins[plugin] = struct{}{} + } + } + } + } + + if len(plugins) == 0 { + return nil + } + + retPlugins := make([]string, 0, len(plugins)) + for plugin := range plugins { + retPlugins = append(retPlugins, plugin) + } + + sort.Strings(retPlugins) + + return retPlugins +} + +func (p *PackerParser) generateRequiredPluginsBlock(tpl *template.Template) (*hclwrite.Block, error) { + plugins := gatherPluginsFromTemplate(tpl) + if len(plugins) == 0 { + return nil, nil + } + + reqPlugins := hclwrite.NewBlock("required_plugins", nil) + for _, plugin := range plugins { + pluginBlock := cty.ObjectVal(map[string]cty.Value{ + "source": cty.StringVal(plugin), + "version": cty.StringVal("~> 1"), + }) + reqPlugins.Body().SetAttributeValue(strings.Replace(plugin, "github.com/hashicorp/", "", 1), pluginBlock) + } + + return reqPlugins, nil +} + func (p *PackerParser) Write(out *bytes.Buffer) { if len(p.out) > 0 { if p.WithAnnotations { diff --git a/command/hcl2_upgrade_test.go b/command/hcl2_upgrade_test.go index d2f92b6b690..353fe4c980d 100644 --- a/command/hcl2_upgrade_test.go +++ b/command/hcl2_upgrade_test.go @@ -4,7 +4,6 @@ package command import ( - "io/ioutil" "os" "path/filepath" "testing" @@ -38,6 +37,7 @@ func Test_hcl2_upgrade(t *testing.T) { {folder: "complete-variables-with-template-engine", flags: []string{}}, {folder: "undeclared-variables", flags: []string{}, exitCode: 0}, {folder: "varfile-with-no-variables-block", flags: []string{}, exitCode: 0}, + {folder: "bundled-plugin-used", flags: []string{}, exitCode: 0}, } for _, tc := range tc { @@ -63,8 +63,8 @@ func Test_hcl2_upgrade(t *testing.T) { if tc.exitEarly { return } - expected := string(mustBytes(ioutil.ReadFile(expectedPath))) - actual := string(mustBytes(ioutil.ReadFile(outputPath))) + expected := string(mustBytes(os.ReadFile(expectedPath))) + actual := string(mustBytes(os.ReadFile(outputPath))) if diff := cmp.Diff(expected, actual); diff != "" { t.Fatalf("unexpected output: %s", diff) diff --git a/command/init.go b/command/init.go index 761627134d6..28b5f1fbbb1 100644 --- a/command/init.go +++ b/command/init.go @@ -66,6 +66,14 @@ func (c *InitCommand) RunContext(buildCtx context.Context, cla *InitArgs) int { return ret } + if len(reqs) == 0 { + c.Ui.Message(` +No plugins requirement found, make sure you reference a Packer config +containing a packer.required_plugins block. See +https://www.packer.io/docs/templates/hcl_templates/blocks/packer +for more info.`) + } + opts := plugingetter.ListInstallationsOptions{ FromFolders: c.Meta.CoreConfig.Components.PluginConfig.KnownPluginFolders, BinaryInstallationOptions: plugingetter.BinaryInstallationOptions{ @@ -124,55 +132,13 @@ func (c *InitCommand) RunContext(buildCtx context.Context, cla *InitArgs) int { Getters: getters, }) if err != nil { - if pluginRequirement.Implicit { - msg := fmt.Sprintf(` -Warning! At least one component used in your config file(s) has moved out of -Packer into the %q plugin. -For that reason, Packer init tried to install the latest version of the %s -plugin. Unfortunately, this failed : -%s`, - pluginRequirement.Identifier, - pluginRequirement.Identifier.Type, - err) - c.Ui.Say(msg) - } else { - c.Ui.Error(fmt.Sprintf("Failed getting the %q plugin:", pluginRequirement.Identifier)) - c.Ui.Error(err.Error()) - ret = 1 - } + c.Ui.Error(fmt.Sprintf("Failed getting the %q plugin:", pluginRequirement.Identifier)) + c.Ui.Error(err.Error()) + ret = 1 } if newInstall != nil { - if pluginRequirement.Implicit { - msg := fmt.Sprintf("Installed implicitly required plugin %s %s in %q", pluginRequirement.Identifier, newInstall.Version, newInstall.BinaryPath) - ui.Say(msg) - - warn := fmt.Sprintf(` -Warning, at least one component used in your config file(s) has moved out of -Packer into the %[2]q plugin and is now being implicitly required. -For more details on implicitly required plugins see https://packer.io/docs/commands/init#implicit-required-plugin - -To avoid any backward incompatible changes with your -config file you may want to lock the plugin version by pasting the following to your config: - -packer { - required_plugins { - %[1]s = { - source = "%[2]s" - version = "~> %[3]s" - } - } -} -`, - pluginRequirement.Identifier.Type, - pluginRequirement.Identifier, - newInstall.Version, - ) - ui.Error(warn) - continue - } msg := fmt.Sprintf("Installed plugin %s %s in %q", pluginRequirement.Identifier, newInstall.Version, newInstall.BinaryPath) ui.Say(msg) - } } return ret @@ -180,7 +146,7 @@ packer { func (*InitCommand) Help() string { helpText := ` -Usage: packer init [options] [config.pkr.hcl|folder/] +Usage: packer init [options] TEMPLATE Install all the missing plugins required in a Packer config. Note that Packer does not have a state. diff --git a/command/init_test.go b/command/init_test.go index 7f35d3f4cc5..cfa1abb47ae 100644 --- a/command/init_test.go +++ b/command/init_test.go @@ -1,6 +1,8 @@ // Copyright (c) HashiCorp, Inc. // SPDX-License-Identifier: MPL-2.0 +//go:build amd64 && (darwin || windows || linux) + package command import ( @@ -11,6 +13,7 @@ import ( "path/filepath" "runtime" "sort" + "strings" "testing" "github.com/google/go-cmp/cmp" @@ -412,3 +415,55 @@ func (opts initTestGoGetPlugin) fn(t *testing.T, _ testCaseInit) { t.Fatalf("get: %v", err) } } + +// TestInitCmd aims to test the init command, with output validation +func TestInitCmd(t *testing.T) { + tests := []struct { + name string + args []string + expectedCode int + outputCheck func(string, string) error + }{ + { + name: "Ensure init warns on template without required_plugin blocks", + args: []string{ + testFixture("hcl", "build-var-in-pp.pkr.hcl"), + }, + expectedCode: 0, + outputCheck: func(stdout, stderr string) error { + if !strings.Contains(stdout, "No plugins requirement found") { + return fmt.Errorf("command should warn about plugin requirements not found, but did not") + } + return nil + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + c := &InitCommand{ + Meta: TestMetaFile(t), + } + + exitCode := c.Run(tt.args) + if exitCode != tt.expectedCode { + t.Errorf("process exit code mismatch: expected %d, got %d", + tt.expectedCode, + exitCode) + } + + out, stderr := GetStdoutAndErrFromTestMeta(t, c.Meta) + err := tt.outputCheck(out, stderr) + if err != nil { + if len(out) != 0 { + t.Logf("command stdout: %q", out) + } + + if len(stderr) != 0 { + t.Logf("command stderr: %q", stderr) + } + t.Error(err.Error()) + } + }) + } +} diff --git a/command/meta.go b/command/meta.go index 99123fd8ec7..fb3d9efb2a7 100644 --- a/command/meta.go +++ b/command/meta.go @@ -9,7 +9,9 @@ import ( "fmt" "io" "os" + "strings" + "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/hclparse" packersdk "github.com/hashicorp/packer-plugin-sdk/packer" "github.com/hashicorp/packer-plugin-sdk/template" @@ -179,3 +181,178 @@ func (m *Meta) GetConfigFromJSON(cla *MetaArgs) (packer.Handler, int) { } return core, ret } + +func (m *Meta) DetectBundledPlugins(handler packer.Handler) hcl.Diagnostics { + var plugins []string + + switch h := handler.(type) { + case *packer.Core: + plugins = m.detectBundledPluginsJSON(h) + case *hcl2template.PackerConfig: + plugins = m.detectBundledPluginsHCL2(handler.(*hcl2template.PackerConfig)) + } + + if len(plugins) == 0 { + return nil + } + + packer.CheckpointReporter.SetBundledUsage() + + buf := &strings.Builder{} + buf.WriteString("This template relies on the use of plugins bundled into the Packer binary.\n") + buf.WriteString("The practice of bundling external plugins into Packer will be removed in an upcoming version.\n\n") + switch h := handler.(type) { + case *packer.Core: + buf.WriteString("To remove this warning and ensure builds keep working you can install these external plugins with the 'packer plugins install' command\n\n") + + for _, plugin := range plugins { + fmt.Fprintf(buf, "* packer plugins install %s\n", plugin) + } + + buf.WriteString("\nAlternatively, if you upgrade your templates to HCL2, you can use 'packer init' with a 'required_plugins' block to automatically install external plugins.\n\n") + fmt.Fprintf(buf, "You can try HCL2 by running 'packer hcl2_upgrade %s'", h.Template.Path) + case *hcl2template.PackerConfig: + buf.WriteString("To remove this warning, add the following section to your template:\n") + buf.WriteString(m.fixRequiredPlugins(h)) + buf.WriteString("\nThen run 'packer init' to manage installation of the plugins") + } + + return hcl.Diagnostics{ + &hcl.Diagnostic{ + Severity: hcl.DiagWarning, + Summary: "Bundled plugins used", + Detail: buf.String(), + }, + } +} + +func (m *Meta) detectBundledPluginsJSON(core *packer.Core) []string { + bundledPlugins := map[string]struct{}{} + + tmpl := core.Template + if tmpl == nil { + panic("No template parsed. This is a Packer bug which should be reported, please open an issue on the project's issue tracker.") + } + + for _, b := range tmpl.Builders { + builderType := fmt.Sprintf("packer-builder-%s", b.Type) + if bundledStatus[builderType] { + bundledPlugins[builderType] = struct{}{} + } + } + + for _, p := range tmpl.Provisioners { + provisionerType := fmt.Sprintf("packer-provisioner-%s", p.Type) + if bundledStatus[provisionerType] { + bundledPlugins[provisionerType] = struct{}{} + } + } + + for _, pps := range tmpl.PostProcessors { + for _, pp := range pps { + postProcessorType := fmt.Sprintf("packer-post-processor-%s", pp.Type) + if bundledStatus[postProcessorType] { + bundledPlugins[postProcessorType] = struct{}{} + } + } + } + + return compileBundledPluginList(bundledPlugins) +} + +var knownPluginPrefixes = map[string]string{ + "amazon": "github.com/hashicorp/amazon", + "ansible": "github.com/hashicorp/ansible", + "azure": "github.com/hashicorp/azure", + "docker": "github.com/hashicorp/docker", + "googlecompute": "github.com/hashicorp/googlecompute", + "qemu": "github.com/hashicorp/qemu", + "vagrant": "github.com/hashicorp/vagrant", + "vmware": "github.com/hashicorp/vmware", + "vsphere": "github.com/hashicorp/vsphere", +} + +func (m *Meta) fixRequiredPlugins(config *hcl2template.PackerConfig) string { + plugins := map[string]struct{}{} + + for _, b := range config.Builds { + for _, b := range b.Sources { + for prefix, plugin := range knownPluginPrefixes { + if strings.HasPrefix(b.Type, prefix) { + plugins[plugin] = struct{}{} + } + } + } + + for _, p := range b.ProvisionerBlocks { + for prefix, plugin := range knownPluginPrefixes { + if strings.HasPrefix(p.PType, prefix) { + plugins[plugin] = struct{}{} + } + } + } + + for _, pps := range b.PostProcessorsLists { + for _, pp := range pps { + for prefix, plugin := range knownPluginPrefixes { + if strings.HasPrefix(pp.PType, prefix) { + plugins[plugin] = struct{}{} + } + } + } + } + } + + for _, ds := range config.Datasources { + for prefix, plugin := range knownPluginPrefixes { + if strings.HasPrefix(ds.Type, prefix) { + plugins[plugin] = struct{}{} + } + } + } + + retPlugins := make([]string, 0, len(plugins)) + for plugin := range plugins { + retPlugins = append(retPlugins, plugin) + } + + return generateRequiredPluginsBlock(retPlugins) +} + +func (m *Meta) detectBundledPluginsHCL2(config *hcl2template.PackerConfig) []string { + bundledPlugins := map[string]struct{}{} + + for _, b := range config.Builds { + for _, src := range b.Sources { + builderType := fmt.Sprintf("packer-builder-%s", src.Type) + if bundledStatus[builderType] { + bundledPlugins[builderType] = struct{}{} + } + } + + for _, p := range b.ProvisionerBlocks { + provisionerType := fmt.Sprintf("packer-provisioner-%s", p.PType) + if bundledStatus[provisionerType] { + bundledPlugins[provisionerType] = struct{}{} + } + } + + for _, pps := range b.PostProcessorsLists { + for _, pp := range pps { + postProcessorType := fmt.Sprintf("packer-post-processor-%s", pp.PType) + if bundledStatus[postProcessorType] { + bundledPlugins[postProcessorType] = struct{}{} + } + } + } + } + + for _, ds := range config.Datasources { + dsType := fmt.Sprintf("packer-datasource-%s", ds.Type) + if bundledStatus[dsType] { + bundledPlugins[dsType] = struct{}{} + } + } + + return compileBundledPluginList(bundledPlugins) +} diff --git a/command/plugins_install.go b/command/plugins_install.go index d40bb46bd80..31fe7ec665d 100644 --- a/command/plugins_install.go +++ b/command/plugins_install.go @@ -77,7 +77,6 @@ func (c *PluginsInstallCommand) RunContext(buildCtx context.Context, args []stri // a plugin requirement that matches them all pluginRequirement := plugingetter.Requirement{ Identifier: plugin, - Implicit: false, } if len(args) > 1 { diff --git a/command/plugins_install_test.go b/command/plugins_install_test.go index 577d8b3f776..7f34cb5dddd 100644 --- a/command/plugins_install_test.go +++ b/command/plugins_install_test.go @@ -1,6 +1,8 @@ // Copyright (c) HashiCorp, Inc. // SPDX-License-Identifier: MPL-2.0 +//go:build amd64 && (darwin || windows || linux) + package command import ( diff --git a/command/plugins_installed.go b/command/plugins_installed.go index 58b4381a985..bc6175a61a5 100644 --- a/command/plugins_installed.go +++ b/command/plugins_installed.go @@ -64,7 +64,6 @@ func (c *PluginsInstalledCommand) RunContext(buildCtx context.Context) int { Accessor: "", VersionConstraints: nil, Identifier: nil, - Implicit: false, } installations, err := allPlugins.ListInstallations(opts) diff --git a/command/plugins_remove.go b/command/plugins_remove.go index 32d77749c08..592f7cf1716 100644 --- a/command/plugins_remove.go +++ b/command/plugins_remove.go @@ -74,7 +74,6 @@ func (c *PluginsRemoveCommand) RunContext(buildCtx context.Context, args []strin // a plugin requirement that matches them all pluginRequirement := plugingetter.Requirement{ Identifier: plugin, - Implicit: false, } if len(args) > 1 { diff --git a/command/plugins_remove_test.go b/command/plugins_remove_test.go index 41104db7146..83b2463bc02 100644 --- a/command/plugins_remove_test.go +++ b/command/plugins_remove_test.go @@ -1,6 +1,8 @@ // Copyright (c) HashiCorp, Inc. // SPDX-License-Identifier: MPL-2.0 +//go:build amd64 && (darwin || windows || linux) + package command import ( diff --git a/command/test-fixtures/hcl2_upgrade/ami_test/expected.pkr.hcl b/command/test-fixtures/hcl2_upgrade/ami_test/expected.pkr.hcl index 72ccb20383c..8a72f3551b0 100644 --- a/command/test-fixtures/hcl2_upgrade/ami_test/expected.pkr.hcl +++ b/command/test-fixtures/hcl2_upgrade/ami_test/expected.pkr.hcl @@ -1,3 +1,11 @@ +packer { + required_plugins { + amazon = { + source = "github.com/hashicorp/amazon" + version = "~> 1" + } + } +} source "amazon-ebs" "autogenerated_1" { run_tags = { diff --git a/command/test-fixtures/hcl2_upgrade/aws-access-config/expected.pkr.hcl b/command/test-fixtures/hcl2_upgrade/aws-access-config/expected.pkr.hcl index 9ccbe9dc2fc..8c34e4735b3 100644 --- a/command/test-fixtures/hcl2_upgrade/aws-access-config/expected.pkr.hcl +++ b/command/test-fixtures/hcl2_upgrade/aws-access-config/expected.pkr.hcl @@ -1,5 +1,11 @@ packer { required_version = ">= 1.6.0" + required_plugins { + amazon = { + source = "github.com/hashicorp/amazon" + version = "~> 1" + } + } } variable "aws_access_key" { diff --git a/command/test-fixtures/hcl2_upgrade/azure_shg/expected.pkr.hcl b/command/test-fixtures/hcl2_upgrade/azure_shg/expected.pkr.hcl index 089ce80a781..15d5a4ad33e 100644 --- a/command/test-fixtures/hcl2_upgrade/azure_shg/expected.pkr.hcl +++ b/command/test-fixtures/hcl2_upgrade/azure_shg/expected.pkr.hcl @@ -1,3 +1,11 @@ +packer { + required_plugins { + azure = { + source = "github.com/hashicorp/azure" + version = "~> 1" + } + } +} source "azure-arm" "autogenerated_1" { shared_image_gallery { diff --git a/command/test-fixtures/hcl2_upgrade/bundled-plugin-used/expected.pkr.hcl b/command/test-fixtures/hcl2_upgrade/bundled-plugin-used/expected.pkr.hcl new file mode 100644 index 00000000000..cce175b01bd --- /dev/null +++ b/command/test-fixtures/hcl2_upgrade/bundled-plugin-used/expected.pkr.hcl @@ -0,0 +1,29 @@ +packer { + required_plugins { + amazon = { + source = "github.com/hashicorp/amazon" + version = "~> 1" + } + ansible = { + source = "github.com/hashicorp/ansible" + version = "~> 1" + } + googlecompute = { + source = "github.com/hashicorp/googlecompute" + version = "~> 1" + } + } +} + +source "amazon-ebs" "autogenerated_1" { +} + +build { + sources = ["source.amazon-ebs.autogenerated_1"] + + provisioner "ansible-local" { + } + + post-processor "googlecompute-import" { + } +} diff --git a/command/test-fixtures/hcl2_upgrade/bundled-plugin-used/input.json b/command/test-fixtures/hcl2_upgrade/bundled-plugin-used/input.json new file mode 100644 index 00000000000..917c7105e5a --- /dev/null +++ b/command/test-fixtures/hcl2_upgrade/bundled-plugin-used/input.json @@ -0,0 +1,11 @@ +{ + "builders": [{ + "type": "amazon-ebs" + }], + "provisioners": [{ + "type": "ansible-local" + }], + "post-processors": [{ + "type": "googlecompute-import" + }] +} diff --git a/command/test-fixtures/hcl2_upgrade/complete/expected.pkr.hcl b/command/test-fixtures/hcl2_upgrade/complete/expected.pkr.hcl index bcc669b6438..a6800c67210 100644 --- a/command/test-fixtures/hcl2_upgrade/complete/expected.pkr.hcl +++ b/command/test-fixtures/hcl2_upgrade/complete/expected.pkr.hcl @@ -14,6 +14,12 @@ # See https://www.packer.io/docs/templates/hcl_templates/blocks/packer for more info packer { required_version = ">= 1.6.0" + required_plugins { + amazon = { + source = "github.com/hashicorp/amazon" + version = "~> 1" + } + } } # All generated input variables will be of 'string' type as this is how Packer JSON diff --git a/command/test-fixtures/hcl2_upgrade/minimal/expected.pkr.hcl b/command/test-fixtures/hcl2_upgrade/minimal/expected.pkr.hcl index 2802e8e18b0..316293c39e7 100644 --- a/command/test-fixtures/hcl2_upgrade/minimal/expected.pkr.hcl +++ b/command/test-fixtures/hcl2_upgrade/minimal/expected.pkr.hcl @@ -14,6 +14,12 @@ # See https://www.packer.io/docs/templates/hcl_templates/blocks/packer for more info packer { required_version = ">= 1.6.0" + required_plugins { + amazon = { + source = "github.com/hashicorp/amazon" + version = "~> 1" + } + } } # All generated input variables will be of 'string' type as this is how Packer JSON diff --git a/command/test-fixtures/hcl2_upgrade/source-name/expected.pkr.hcl b/command/test-fixtures/hcl2_upgrade/source-name/expected.pkr.hcl index b5adf8ef55b..c4145f7d145 100644 --- a/command/test-fixtures/hcl2_upgrade/source-name/expected.pkr.hcl +++ b/command/test-fixtures/hcl2_upgrade/source-name/expected.pkr.hcl @@ -14,6 +14,12 @@ # See https://www.packer.io/docs/templates/hcl_templates/blocks/packer for more info packer { required_version = ">= 1.6.0" + required_plugins { + amazon = { + source = "github.com/hashicorp/amazon" + version = "~> 1" + } + } } # All generated input variables will be of 'string' type as this is how Packer JSON diff --git a/command/test-fixtures/hcl2_upgrade/vsphere_linux_options_and_network_interface/expected.pkr.hcl b/command/test-fixtures/hcl2_upgrade/vsphere_linux_options_and_network_interface/expected.pkr.hcl index a160d0c9eed..16c3c21ca24 100644 --- a/command/test-fixtures/hcl2_upgrade/vsphere_linux_options_and_network_interface/expected.pkr.hcl +++ b/command/test-fixtures/hcl2_upgrade/vsphere_linux_options_and_network_interface/expected.pkr.hcl @@ -1,3 +1,11 @@ +packer { + required_plugins { + vsphere = { + source = "github.com/hashicorp/vsphere" + version = "~> 1" + } + } +} source "vsphere-clone" "autogenerated_1" { RAM_reserve_all = false diff --git a/command/test-fixtures/hcl2_upgrade/without-annotations/expected.pkr.hcl b/command/test-fixtures/hcl2_upgrade/without-annotations/expected.pkr.hcl index e332c21e08f..414ebc55a96 100644 --- a/command/test-fixtures/hcl2_upgrade/without-annotations/expected.pkr.hcl +++ b/command/test-fixtures/hcl2_upgrade/without-annotations/expected.pkr.hcl @@ -1,5 +1,11 @@ packer { required_version = ">= 1.6.0" + required_plugins { + amazon = { + source = "github.com/hashicorp/amazon" + version = "~> 1" + } + } } variable "aws_access_key" { diff --git a/command/utils_test.go b/command/utils_test.go index 426ea653eea..0d9817aeac2 100644 --- a/command/utils_test.go +++ b/command/utils_test.go @@ -4,7 +4,6 @@ package command import ( - "io/ioutil" "log" "os" "path/filepath" @@ -23,7 +22,7 @@ func createFiles(dir string, content map[string]string) { if err := os.MkdirAll(filepath.Dir(contentPath), 0777); err != nil { panic(err) } - if err := ioutil.WriteFile(contentPath, []byte(content), 0666); err != nil { + if err := os.WriteFile(contentPath, []byte(content), 0666); err != nil { panic(err) } log.Printf("created tmp file: %s", contentPath) @@ -39,7 +38,7 @@ func (c *configDirSingleton) dir(key string) string { if v, exists := c.dirs[key]; exists { return v } - c.dirs[key] = mustString(ioutil.TempDir("", "pkr-test-cfg-dir-"+key)) + c.dirs[key] = mustString(os.MkdirTemp("", "pkr-test-cfg-dir-"+key)) return c.dirs[key] } diff --git a/command/validate.go b/command/validate.go index 1bc4cc1d7eb..2b7e2107b0d 100644 --- a/command/validate.go +++ b/command/validate.go @@ -65,9 +65,17 @@ func (c *ValidateCommand) RunContext(ctx context.Context, cla *ValidateArgs) int return 0 } - diags := packerStarter.Initialize(packer.InitializeOptions{ + diags := packerStarter.DetectPluginBinaries() + ret = writeDiags(c.Ui, nil, diags) + if ret != 0 { + return ret + } + + diags = packerStarter.Initialize(packer.InitializeOptions{ SkipDatasourcesExecution: !cla.EvaluateDatasources, }) + bundledDiags := c.DetectBundledPlugins(packerStarter) + diags = append(bundledDiags, diags...) ret = writeDiags(c.Ui, nil, diags) if ret != 0 { return ret diff --git a/command/vendored_plugins.go b/command/vendored_plugins.go index 79a8af171c6..d6e5f836338 100644 --- a/command/vendored_plugins.go +++ b/command/vendored_plugins.go @@ -4,6 +4,10 @@ package command import ( + "fmt" + "log" + "strings" + packersdk "github.com/hashicorp/packer-plugin-sdk/packer" // Previously core-bundled components, split into their own plugins but @@ -32,8 +36,6 @@ import ( googlecomputebuilder "github.com/hashicorp/packer-plugin-googlecompute/builder/googlecompute" googlecomputeexportpostprocessor "github.com/hashicorp/packer-plugin-googlecompute/post-processor/googlecompute-export" googlecomputeimportpostprocessor "github.com/hashicorp/packer-plugin-googlecompute/post-processor/googlecompute-import" - parallelsisobuilder "github.com/hashicorp/packer-plugin-parallels/builder/parallels/iso" - parallelspvmbuilder "github.com/hashicorp/packer-plugin-parallels/builder/parallels/pvm" qemubuilder "github.com/hashicorp/packer-plugin-qemu/builder/qemu" vagrantbuilder "github.com/hashicorp/packer-plugin-vagrant/builder/vagrant" vagrantpostprocessor "github.com/hashicorp/packer-plugin-vagrant/post-processor/vagrant" @@ -69,8 +71,6 @@ var VendoredBuilders = map[string]packersdk.Builder{ "azure-dtl": new(azuredtlbuilder.Builder), "docker": new(dockerbuilder.Builder), "googlecompute": new(googlecomputebuilder.Builder), - "parallels-iso": new(parallelsisobuilder.Builder), - "parallels-pvm": new(parallelspvmbuilder.Builder), "qemu": new(qemubuilder.Builder), "vagrant": new(vagrantbuilder.Builder), "vsphere-clone": new(vsphereclonebuilder.Builder), @@ -106,6 +106,170 @@ var VendoredPostProcessors = map[string]packersdk.PostProcessor{ "vsphere": new(vspherepostprocessor.PostProcessor), } +// bundledStatus is used to know if one of the bundled components is loaded from +// an external plugin, or from the bundled plugins. +// +// We keep track of this to produce a warning if a user relies on one +// such plugin, as they will be removed in a later version of Packer. +var bundledStatus = map[string]bool{ + "packer-builder-amazon-ebs": false, + "packer-builder-amazon-chroot": false, + "packer-builder-amazon-ebssurrogate": false, + "packer-builder-amazon-ebsvolume": false, + "packer-builder-amazon-instance": false, + "packer-post-processor-amazon-import": false, + "packer-datasource-amazon-ami": false, + "packer-datasource-amazon-secretsmanager": false, + + "packer-provisioner-ansible": false, + "packer-provisioner-ansible-local": false, + + "packer-provisioner-azure-dtlartifact": false, + "packer-builder-azure-arm": false, + "packer-builder-azure-chroot": false, + "packer-builder-azure-dtl": false, + + "packer-builder-docker": false, + "packer-post-processor-docker-import": false, + "packer-post-processor-docker-push": false, + "packer-post-processor-docker-save": false, + "packer-post-processor-docker-tag": false, + + "packer-builder-googlecompute": false, + "packer-post-processor-googlecompute-export": false, + "packer-post-processor-googlecompute-import": false, + + "packer-builder-qemu": false, + + "packer-builder-vagrant": false, + "packer-post-processor-vagrant": false, + "packer-post-processor-vagrant-cloud": false, + + "packer-builder-virtualbox-iso": false, + "packer-builder-virtualbox-ovf": false, + "packer-builder-virtualbox-vm": false, + + "packer-builder-vmware-iso": false, + "packer-builder-vmware-vmx": false, + + "packer-builder-vsphere-clone": false, + "packer-builder-vsphere-iso": false, + "packer-post-processor-vsphere-template": false, + "packer-post-processor-vsphere": false, +} + +// TrackBundledPlugin marks a component as loaded from Packer's bundled plugins +// instead of from an externally loaded plugin. +// +// NOTE: `pluginName' must be in the format `packer--' +func TrackBundledPlugin(pluginName string) { + _, exists := bundledStatus[pluginName] + if !exists { + return + } + + bundledStatus[pluginName] = true +} + +var componentPluginMap = map[string]string{ + "packer-builder-amazon-ebs": "github.com/hashicorp/amazon", + "packer-builder-amazon-chroot": "github.com/hashicorp/amazon", + "packer-builder-amazon-ebssurrogate": "github.com/hashicorp/amazon", + "packer-builder-amazon-ebsvolume": "github.com/hashicorp/amazon", + "packer-builder-amazon-instance": "github.com/hashicorp/amazon", + "packer-post-processor-amazon-import": "github.com/hashicorp/amazon", + "packer-datasource-amazon-ami": "github.com/hashicorp/amazon", + "packer-datasource-amazon-secretsmanager": "github.com/hashicorp/amazon", + + "packer-provisioner-ansible": "github.com/hashicorp/ansible", + "packer-provisioner-ansible-local": "github.com/hashicorp/ansible", + + "packer-provisioner-azure-dtlartifact": "github.com/hashicorp/azure", + "packer-builder-azure-arm": "github.com/hashicorp/azure", + "packer-builder-azure-chroot": "github.com/hashicorp/azure", + "packer-builder-azure-dtl": "github.com/hashicorp/azure", + + "packer-builder-docker": "github.com/hashicorp/docker", + "packer-post-processor-docker-import": "github.com/hashicorp/docker", + "packer-post-processor-docker-push": "github.com/hashicorp/docker", + "packer-post-processor-docker-save": "github.com/hashicorp/docker", + "packer-post-processor-docker-tag": "github.com/hashicorp/docker", + + "packer-builder-googlecompute": "github.com/hashicorp/googlecompute", + "packer-post-processor-googlecompute-export": "github.com/hashicorp/googlecompute", + "packer-post-processor-googlecompute-import": "github.com/hashicorp/googlecompute", + + "packer-builder-qemu": "github.com/hashicorp/qemu", + + "packer-builder-vagrant": "github.com/hashicorp/vagrant", + "packer-post-processor-vagrant": "github.com/hashicorp/vagrant", + "packer-post-processor-vagrant-cloud": "github.com/hashicorp/vagrant", + + "packer-builder-virtualbox-iso": "github.com/hashicorp/virtualbox", + "packer-builder-virtualbox-ovf": "github.com/hashicorp/virtualbox", + "packer-builder-virtualbox-vm": "github.com/hashicorp/virtualbox", + + "packer-builder-vmware-iso": "github.com/hashicorp/vmware", + "packer-builder-vmware-vmx": "github.com/hashicorp/vmware", + + "packer-builder-vsphere-clone": "github.com/hashicorp/vsphere", + "packer-builder-vsphere-iso": "github.com/hashicorp/vsphere", + "packer-post-processor-vsphere-template": "github.com/hashicorp/vsphere", + "packer-post-processor-vsphere": "github.com/hashicorp/vsphere", +} + +// compileBundledPluginList returns a list of plugins to import in a config +// +// This only works on bundled plugins and serves as a way to inform users that +// they should not rely on a bundled plugin anymore, but give them recommendations +// on how to manage those plugins instead. +func compileBundledPluginList(componentMap map[string]struct{}) []string { + plugins := map[string]struct{}{} + for component := range componentMap { + plugin, ok := componentPluginMap[component] + if !ok { + log.Printf("Unknown bundled plugin component: %q", component) + continue + } + + plugins[plugin] = struct{}{} + } + + pluginList := make([]string, 0, len(plugins)) + for plugin := range plugins { + pluginList = append(pluginList, plugin) + } + + return pluginList +} + +func generateRequiredPluginsBlock(plugins []string) string { + if len(plugins) == 0 { + return "" + } + + buf := &strings.Builder{} + buf.WriteString(` +packer { + required_plugins {`) + + for _, plugin := range plugins { + pluginName := strings.Replace(plugin, "github.com/hashicorp/", "", 1) + fmt.Fprintf(buf, ` + %s = { + source = %q + version = "~> 1" + }`, pluginName, plugin) + } + + buf.WriteString(` + } +} +`) + + return buf.String() +} + // Upon init lets load up any plugins that were vendored manually into the default // set of plugins. func init() { diff --git a/config.go b/config.go index 6b59b98fe63..af679dd8e0b 100644 --- a/config.go +++ b/config.go @@ -153,6 +153,7 @@ func (c *config) discoverInternalComponents() error { for builder := range command.Builders { builder := builder if !c.Plugins.Builders.Has(builder) { + command.TrackBundledPlugin(fmt.Sprintf("packer-builder-%s", builder)) bin := fmt.Sprintf("%s%splugin%spacker-builder-%s", packerPath, PACKERSPACE, PACKERSPACE, builder) c.Plugins.Builders.Set(builder, func() (packersdk.Builder, error) { @@ -164,6 +165,7 @@ func (c *config) discoverInternalComponents() error { for provisioner := range command.Provisioners { provisioner := provisioner if !c.Plugins.Provisioners.Has(provisioner) { + command.TrackBundledPlugin(fmt.Sprintf("packer-provisioner-%s", provisioner)) bin := fmt.Sprintf("%s%splugin%spacker-provisioner-%s", packerPath, PACKERSPACE, PACKERSPACE, provisioner) c.Plugins.Provisioners.Set(provisioner, func() (packersdk.Provisioner, error) { @@ -175,6 +177,7 @@ func (c *config) discoverInternalComponents() error { for postProcessor := range command.PostProcessors { postProcessor := postProcessor if !c.Plugins.PostProcessors.Has(postProcessor) { + command.TrackBundledPlugin(fmt.Sprintf("packer-post-processor-%s", postProcessor)) bin := fmt.Sprintf("%s%splugin%spacker-post-processor-%s", packerPath, PACKERSPACE, PACKERSPACE, postProcessor) c.Plugins.PostProcessors.Set(postProcessor, func() (packersdk.PostProcessor, error) { @@ -186,6 +189,7 @@ func (c *config) discoverInternalComponents() error { for dataSource := range command.Datasources { dataSource := dataSource if !c.Plugins.DataSources.Has(dataSource) { + command.TrackBundledPlugin(fmt.Sprintf("packer-datasource-%s", dataSource)) bin := fmt.Sprintf("%s%splugin%spacker-datasource-%s", packerPath, PACKERSPACE, PACKERSPACE, dataSource) c.Plugins.DataSources.Set(dataSource, func() (packersdk.Datasource, error) { diff --git a/config_test.go b/config_test.go index 772fea4fe3b..e305a379428 100644 --- a/config_test.go +++ b/config_test.go @@ -6,7 +6,6 @@ package main import ( "encoding/json" "fmt" - "io/ioutil" "os" "path/filepath" "reflect" @@ -122,7 +121,7 @@ func TestLoadExternalComponentsFromConfig_onlyProvisioner(t *testing.T) { func TestLoadSingleComponent(t *testing.T) { // .exe will work everyone for testing purpose, but mostly here to help Window's test runs. - tmpFile, err := ioutil.TempFile(".", "packer-builder-*.exe") + tmpFile, err := os.CreateTemp(".", "packer-builder-*.exe") if err != nil { t.Fatalf("failed to create test file with error: %s", err) } @@ -160,7 +159,7 @@ func TestLoadSingleComponent(t *testing.T) { } func generateFakePlugins(dirname string, pluginNames []string) (string, []string, func(), error) { - dir, err := ioutil.TempDir("", dirname) + dir, err := os.MkdirTemp("", dirname) if err != nil { return "", nil, nil, fmt.Errorf("failed to create temporary test directory: %v", err) } diff --git a/datasource/http/data.go b/datasource/http/data.go index ec7a1b3e55c..7790baf15f1 100644 --- a/datasource/http/data.go +++ b/datasource/http/data.go @@ -8,7 +8,7 @@ package http import ( "context" "fmt" - "io/ioutil" + "io" "mime" "net/http" "regexp" @@ -137,7 +137,7 @@ func (d *Datasource) Execute() (cty.Value, error) { fmt.Println("If the content is binary data, Packer may not properly handle the contents of the response.") } - bytes, err := ioutil.ReadAll(resp.Body) + bytes, err := io.ReadAll(resp.Body) // TODO: How to make test case for this? if err != nil { fmt.Println("Error processing response body of call") diff --git a/datasource/http/data_acc_test.go b/datasource/http/data_acc_test.go index ffad0219048..8b49b97c6ba 100644 --- a/datasource/http/data_acc_test.go +++ b/datasource/http/data_acc_test.go @@ -6,7 +6,7 @@ package http import ( _ "embed" "fmt" - "io/ioutil" + "io" "os" "os/exec" "regexp" @@ -84,7 +84,7 @@ func TestHttpDataSource(t *testing.T) { } defer logs.Close() - logsBytes, err := ioutil.ReadAll(logs) + logsBytes, err := io.ReadAll(logs) if err != nil { return fmt.Errorf("Unable to read %s", logfile) } diff --git a/go.mod b/go.mod index bc42114765a..f0b2dcbaa54 100644 --- a/go.mod +++ b/go.mod @@ -38,38 +38,39 @@ require ( github.com/mitchellh/panicwrap v1.0.0 github.com/mitchellh/prefixedio v0.0.0-20151214002211-6e6954073784 github.com/packer-community/winrmcp v0.0.0-20180921211025-c76d91c1e7db // indirect - github.com/pierrec/lz4 v2.6.1+incompatible + github.com/pierrec/lz4 v2.6.1+incompatible // indirect github.com/pkg/sftp v1.13.2 // indirect github.com/posener/complete v1.2.3 github.com/stretchr/testify v1.8.2 github.com/ulikunitz/xz v0.5.10 github.com/zclconf/go-cty v1.10.0 github.com/zclconf/go-cty-yaml v1.0.1 - golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d // indirect + golang.org/x/crypto v0.11.0 // indirect golang.org/x/mod v0.8.0 - golang.org/x/net v0.8.0 + golang.org/x/net v0.13.0 golang.org/x/oauth2 v0.1.0 golang.org/x/sync v0.1.0 - golang.org/x/sys v0.7.0 // indirect - golang.org/x/term v0.6.0 // indirect - golang.org/x/text v0.8.0 // indirect + golang.org/x/sys v0.10.0 // indirect + golang.org/x/term v0.10.0 // indirect + golang.org/x/text v0.11.0 // indirect golang.org/x/tools v0.6.0 google.golang.org/api v0.101.0 // indirect google.golang.org/grpc v1.50.1 ) require ( + github.com/go-openapi/strfmt v0.21.3 github.com/hashicorp/packer-plugin-ansible v1.0.3 github.com/hashicorp/packer-plugin-azure v1.4.0 github.com/hashicorp/packer-plugin-docker v1.0.8 github.com/hashicorp/packer-plugin-googlecompute v1.1.0 - github.com/hashicorp/packer-plugin-parallels v1.0.3 github.com/hashicorp/packer-plugin-qemu v1.0.9 github.com/hashicorp/packer-plugin-vagrant v1.0.3 github.com/hashicorp/packer-plugin-virtualbox v1.0.4 github.com/hashicorp/packer-plugin-vmware v1.0.7 github.com/hashicorp/packer-plugin-vsphere v1.1.1 github.com/oklog/ulid v1.3.1 + github.com/pierrec/lz4/v4 v4.1.18 github.com/shirou/gopsutil/v3 v3.23.4 ) @@ -127,7 +128,6 @@ require ( github.com/go-openapi/jsonreference v0.20.0 // indirect github.com/go-openapi/loads v0.21.2 // indirect github.com/go-openapi/spec v0.20.8 // indirect - github.com/go-openapi/strfmt v0.21.3 // indirect github.com/go-openapi/swag v0.22.3 // indirect github.com/go-openapi/validate v0.22.1 // indirect github.com/gofrs/uuid v4.0.0+incompatible // indirect diff --git a/go.sum b/go.sum index bd6a68ceadb..34f7c200dc5 100644 --- a/go.sum +++ b/go.sum @@ -457,8 +457,6 @@ github.com/hashicorp/packer-plugin-docker v1.0.8 h1:UWPG/pl+1RFsaNQVhEuowCeOZuES github.com/hashicorp/packer-plugin-docker v1.0.8/go.mod h1:4U3gHULbUw3okSqqZgQZD5ptyJKs0S7LfOOt2U3V4Jk= github.com/hashicorp/packer-plugin-googlecompute v1.1.0 h1:/cSZCJuRV6osaSa1uOy8cpN+c/uiCbrSsZ8vyNC0slk= github.com/hashicorp/packer-plugin-googlecompute v1.1.0/go.mod h1:k7MhKwEDw9ASP3a1y1syKJFZiZ8pO4oH40HvVgGHzUE= -github.com/hashicorp/packer-plugin-parallels v1.0.3 h1:smypphUCEj3arCdlvbNtZvGC1ujsUSRtN1MBvZHt/w8= -github.com/hashicorp/packer-plugin-parallels v1.0.3/go.mod h1:Q842nvosVmP5FnYozk8ZZj93HGIan19jHTxGeteBCb0= github.com/hashicorp/packer-plugin-qemu v1.0.9 h1:1YKBBzBULYUBWtpAZJTbaLjjZPAdQ63okkpTqMBTnzM= github.com/hashicorp/packer-plugin-qemu v1.0.9/go.mod h1:BpWIpVpOoPFV9Ppmzq4DP/S0QNoh1R+7DUCqxHdXc+Y= github.com/hashicorp/packer-plugin-sdk v0.4.0 h1:UyLYe0y02D9wkOQ3FeeZWyFg2+mx2vLuWRGUL5xt50I= @@ -645,6 +643,8 @@ github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi github.com/pierrec/lz4 v2.5.2+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= github.com/pierrec/lz4 v2.6.1+incompatible h1:9UY3+iC23yxF0UfGaYrGplQ+79Rg+h/q9FV9ix19jjM= github.com/pierrec/lz4 v2.6.1+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= +github.com/pierrec/lz4/v4 v4.1.18 h1:xaKrnTkyoqfh1YItXl56+6KJNVYWlEEPuAQW9xsplYQ= +github.com/pierrec/lz4/v4 v4.1.18/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1-0.20171018195549-f15c970de5b7/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= @@ -795,8 +795,9 @@ golang.org/x/crypto v0.0.0-20210220033148-5ea612d1eb83/go.mod h1:jdWPYTVW3xRLrWP golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d h1:sK3txAijHtOK88l68nt020reeT1ZdKLIYetKl95FzVY= golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.11.0 h1:6Ewdq3tDic1mg5xRO4milcWCfMVQhI4NkqWWvqejpuA= +golang.org/x/crypto v0.11.0/go.mod h1:xgJhtzW8F9jGdVFWZESrid1U1bjeNy4zgy5cRr/CIio= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190731235908-ec7cb31e5a56/go.mod h1:JhuoJpWY28nO4Vef9tZUw9qufEGTyX1+7lmHxV5q5G4= @@ -844,8 +845,8 @@ golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96b golang.org/x/net v0.0.0-20210421230115-4e50805a0758/go.mod h1:72T/g9IO56b78aLF+1Kcs5dz7/ng1VjMUvfKvpfy+jM= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= -golang.org/x/net v0.8.0 h1:Zrh2ngAOFYneWTAIAPethzeaQLuHwhuBkuV6ZiRnUaQ= -golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= +golang.org/x/net v0.13.0 h1:Nvo8UFsZ8X3BhAC9699Z1j7XQ3rsZnUUm7jfBEk1ueY= +golang.org/x/net v0.13.0/go.mod h1:zEVYFnQC7m/vmpQFELhcD1EWkZlX69l4oqgmer6hfKA= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.1.0 h1:isLCZuhj4v+tYv7eskaN4v/TM+A1begWWgyVJDdl1+Y= golang.org/x/oauth2 v0.1.0/go.mod h1:G9FE4dLTsbXUu90h/Pf85g4w1D+SSAgR+q46nJZ8M4A= @@ -905,21 +906,22 @@ golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.7.0 h1:3jlCCIQZPdOYu1h8BkNvLz8Kgwtae2cagcG/VamtZRU= golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.10.0 h1:SqMFp9UcQJZa+pmYuAKjd9xq1f0j5rLcDIk0mj4qAsA= +golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.6.0 h1:clScbb1cHjoCkyRbWwBEUZ5H/tIFu5TAXIqaZD0Gcjw= -golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= +golang.org/x/term v0.10.0 h1:3R7pNqamzBraeqj/Tj8qt1aQ2HpmlC+Cx/qL/7hn4/c= +golang.org/x/term v0.10.0/go.mod h1:lpqdcUyK/oCiQxvxVrppt5ggO2KCZ5QblwqPnfZ6d5o= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/text v0.8.0 h1:57P1ETyNKtuIjB4SRd15iJxuhj8Gc416Y78H3qgMh68= -golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.11.0 h1:LAntKIrcmeSKERyiOh0XMV39LXS8IE9UL2yP7+f5ij4= +golang.org/x/text v0.11.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20200416051211-89c76fbcd5d1/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20220210224613-90d013bbcef8 h1:vVKdlvoWBphwdxWKrFZEuM0kGgGLxUOYcY4U/2Vjg44= diff --git a/hcl2template/formatter.go b/hcl2template/formatter.go index 7e1cab56aa3..171d919b89a 100644 --- a/hcl2template/formatter.go +++ b/hcl2template/formatter.go @@ -7,7 +7,6 @@ import ( "bytes" "fmt" "io" - "io/ioutil" "os" "os/exec" "path/filepath" @@ -76,7 +75,7 @@ func (f *HCL2Formatter) Format(path string) (int, hcl.Diagnostics) { return f.formatFile(path, diags, bytesModified) } - fileInfos, err := ioutil.ReadDir(path) + fileInfos, err := os.ReadDir(path) if err != nil { diag := &hcl.Diagnostic{ Severity: hcl.DiagError, @@ -129,7 +128,7 @@ func (f *HCL2Formatter) processFile(filename string) ([]byte, error) { } } - inSrc, err := ioutil.ReadAll(in) + inSrc, err := io.ReadAll(in) if err != nil { return nil, fmt.Errorf("failed to read %s: %s", filename, err) } @@ -158,7 +157,7 @@ func (f *HCL2Formatter) processFile(filename string) ([]byte, error) { if filename == "-" { _, _ = f.Output.Write(outSrc) } else { - if err := ioutil.WriteFile(filename, outSrc, 0644); err != nil { + if err := os.WriteFile(filename, outSrc, 0644); err != nil { return nil, err } } @@ -178,14 +177,14 @@ func (f *HCL2Formatter) processFile(filename string) ([]byte, error) { // bytesDiff returns the unified diff of b1 and b2 // Shamelessly copied from Terraform's fmt command. func bytesDiff(b1, b2 []byte, path string) (data []byte, err error) { - f1, err := ioutil.TempFile("", "") + f1, err := os.CreateTemp("", "") if err != nil { return } defer os.Remove(f1.Name()) defer f1.Close() - f2, err := ioutil.TempFile("", "") + f2, err := os.CreateTemp("", "") if err != nil { return } diff --git a/hcl2template/formatter_test.go b/hcl2template/formatter_test.go index 42051c0a8d3..07fc88c5a0f 100644 --- a/hcl2template/formatter_test.go +++ b/hcl2template/formatter_test.go @@ -5,7 +5,6 @@ package hcl2template import ( "bytes" - "io/ioutil" "os" "os/exec" "strings" @@ -48,12 +47,12 @@ func TestHCL2Formatter_Format_Write(t *testing.T) { f.Output = &buf f.Write = true - unformattedData, err := ioutil.ReadFile("testdata/format/unformatted.pkr.hcl") + unformattedData, err := os.ReadFile("testdata/format/unformatted.pkr.hcl") if err != nil { t.Fatalf("failed to open the unformatted fixture %s", err) } - tf, err := ioutil.TempFile("", "*.pkr.hcl") + tf, err := os.CreateTemp("", "*.pkr.hcl") if err != nil { t.Fatalf("failed to create tempfile for test %s", err) } @@ -68,12 +67,12 @@ func TestHCL2Formatter_Format_Write(t *testing.T) { } //lets re-read the tempfile which should now be formatted - data, err := ioutil.ReadFile(tf.Name()) + data, err := os.ReadFile(tf.Name()) if err != nil { t.Fatalf("failed to open the newly formatted fixture %s", err) } - formattedData, err := ioutil.ReadFile("testdata/format/formatted.pkr.hcl") + formattedData, err := os.ReadFile("testdata/format/formatted.pkr.hcl") if err != nil { t.Fatalf("failed to open the formatted fixture %s", err) } diff --git a/hcl2template/parser.go b/hcl2template/parser.go index 2246131fa88..f32df189242 100644 --- a/hcl2template/parser.go +++ b/hcl2template/parser.go @@ -163,28 +163,14 @@ func (p *Parser) Parse(filename string, varFiles []string, argVars map[string]st return cfg, diags } - // Decode required_plugins blocks and create implicit required_plugins - // blocks. Implicit required_plugins blocks happen when a builder or another - // plugin cannot be found, for example if one uses : - // source "amazon-ebs" "example" { ... } - // And no `amazon-ebs` builder can be found. This will then be the - // equivalent of having : - // packer { - // required_plugins { - // amazon = { - // version = "latest" - // source = "github.com/hashicorp/amazon" - // } - // } + // Decode required_plugins blocks. + // // Note: using `latest` ( or actually an empty string ) in a config file // does not work and packer will ask you to pick a version { for _, file := range files { diags = append(diags, cfg.decodeRequiredPluginsBlock(file)...) } - for _, file := range files { - diags = append(diags, cfg.decodeImplicitRequiredPluginsBlocks(file)...) - } } // Decode variable blocks so that they are available later on. Here locals @@ -308,19 +294,7 @@ func filterVarsFromLogs(inputOrLocal Variables) { } func (cfg *PackerConfig) Initialize(opts packer.InitializeOptions) hcl.Diagnostics { - var diags hcl.Diagnostics - - // enable packer to start plugins requested in required_plugins. - moreDiags := cfg.detectPluginBinaries() - diags = append(diags, moreDiags...) - if moreDiags.HasErrors() { - return diags - } - - moreDiags = cfg.InputVariables.ValidateValues() - diags = append(diags, moreDiags...) - moreDiags = cfg.LocalVariables.ValidateValues() - diags = append(diags, moreDiags...) + diags := cfg.InputVariables.ValidateValues() diags = append(diags, cfg.evaluateDatasources(opts.SkipDatasourcesExecution)...) diags = append(diags, checkForDuplicateLocalDefinition(cfg.LocalBlocks)...) diags = append(diags, cfg.evaluateLocalVariables(cfg.LocalBlocks)...) diff --git a/hcl2template/plugin.go b/hcl2template/plugin.go index bd1080b836b..ac109ee7911 100644 --- a/hcl2template/plugin.go +++ b/hcl2template/plugin.go @@ -8,6 +8,7 @@ import ( "fmt" "log" "runtime" + "strings" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/packer-plugin-sdk/didyoumean" @@ -44,7 +45,6 @@ func (cfg *PackerConfig) PluginRequirements() (plugingetter.Requirements, hcl.Di Accessor: name, Identifier: block.Type, VersionConstraints: block.Requirement.Required, - Implicit: block.PluginDependencyReason == PluginDependencyImplicit, }) uniq[name] = block } @@ -54,7 +54,7 @@ func (cfg *PackerConfig) PluginRequirements() (plugingetter.Requirements, hcl.Di return reqs, diags } -func (cfg *PackerConfig) detectPluginBinaries() hcl.Diagnostics { +func (cfg *PackerConfig) DetectPluginBinaries() hcl.Diagnostics { opts := plugingetter.ListInstallationsOptions{ FromFolders: cfg.parser.PluginConfig.KnownPluginFolders, BinaryInstallationOptions: plugingetter.BinaryInstallationOptions{ @@ -77,6 +77,8 @@ func (cfg *PackerConfig) detectPluginBinaries() hcl.Diagnostics { return diags } + uninstalledPlugins := map[string]string{} + for _, pluginRequirement := range pluginReqs { sortedInstalls, err := pluginRequirement.ListInstallations(opts) if err != nil { @@ -88,11 +90,7 @@ func (cfg *PackerConfig) detectPluginBinaries() hcl.Diagnostics { continue } if len(sortedInstalls) == 0 { - diags = append(diags, &hcl.Diagnostic{ - Severity: hcl.DiagError, - Summary: fmt.Sprintf("no plugin installed for %s %v", pluginRequirement.Identifier, pluginRequirement.VersionConstraints.String()), - Detail: "Did you run packer init for this project ?", - }) + uninstalledPlugins[pluginRequirement.Identifier.String()] = pluginRequirement.VersionConstraints.String() continue } log.Printf("[TRACE] Found the following %q installations: %v", pluginRequirement.Identifier, sortedInstalls) @@ -108,6 +106,20 @@ func (cfg *PackerConfig) detectPluginBinaries() hcl.Diagnostics { } } + if len(uninstalledPlugins) > 0 { + detailMessage := &strings.Builder{} + detailMessage.WriteString("The following plugins are required, but not installed:\n\n") + for pluginName, pluginVersion := range uninstalledPlugins { + fmt.Fprintf(detailMessage, "* %s %s\n", pluginName, pluginVersion) + } + detailMessage.WriteString("\nDid you run packer init for this project ?") + diags = append(diags, &hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: "Missing plugins", + Detail: detailMessage.String(), + }) + } + return diags } diff --git a/hcl2template/types.datasource.go b/hcl2template/types.datasource.go index 9ae5e995ecc..68df0a695e8 100644 --- a/hcl2template/types.datasource.go +++ b/hcl2template/types.datasource.go @@ -10,7 +10,6 @@ import ( "github.com/hashicorp/hcl/v2/hclsyntax" packersdk "github.com/hashicorp/packer-plugin-sdk/packer" hcl2shim "github.com/hashicorp/packer/hcl2template/shim" - "github.com/hashicorp/packer/packer" "github.com/zclconf/go-cty/cty" ) @@ -45,7 +44,7 @@ func (ds *Datasources) Values() (map[string]cty.Value, hcl.Diagnostics) { for ref, datasource := range *ds { if datasource.value == (cty.Value{}) { diags = append(diags, &hcl.Diagnostic{ - Summary: fmt.Sprintf("empty value"), + Summary: "empty value", Subject: &datasource.block.DefRange, Severity: hcl.DiagError, }) @@ -65,23 +64,25 @@ func (ds *Datasources) Values() (map[string]cty.Value, hcl.Diagnostics) { return res, diags } -func (cfg *PackerConfig) startDatasource(dataSourceStore packer.DatasourceStore, ref DatasourceRef, secondaryEvaluation bool) (packersdk.Datasource, hcl.Diagnostics) { +func (cfg *PackerConfig) startDatasource(ds DatasourceBlock) (packersdk.Datasource, hcl.Diagnostics) { var diags hcl.Diagnostics - block := cfg.Datasources[ref].block + block := ds.block + + dataSourceStore := cfg.parser.PluginConfig.DataSources if dataSourceStore == nil { diags = append(diags, &hcl.Diagnostic{ - Summary: "Unknown " + dataSourceLabel + " type " + ref.Type, + Summary: "Unknown " + dataSourceLabel + " type " + ds.Type, Subject: block.LabelRanges[0].Ptr(), - Detail: fmt.Sprintf("packer does not currently know any data source."), + Detail: "packer does not currently know any data source.", Severity: hcl.DiagError, }) return nil, diags } - if !dataSourceStore.Has(ref.Type) { + if !dataSourceStore.Has(ds.Type) { diags = append(diags, &hcl.Diagnostic{ - Summary: "Unknown " + dataSourceLabel + " type " + ref.Type, + Summary: "Unknown " + dataSourceLabel + " type " + ds.Type, Subject: block.LabelRanges[0].Ptr(), Detail: fmt.Sprintf("known data sources: %v", dataSourceStore.List()), Severity: hcl.DiagError, @@ -89,7 +90,7 @@ func (cfg *PackerConfig) startDatasource(dataSourceStore packer.DatasourceStore, return nil, diags } - datasource, err := dataSourceStore.Start(ref.Type) + datasource, err := dataSourceStore.Start(ds.Type) if err != nil { diags = append(diags, &hcl.Diagnostic{ Summary: err.Error(), @@ -99,7 +100,7 @@ func (cfg *PackerConfig) startDatasource(dataSourceStore packer.DatasourceStore, } if datasource == nil { diags = append(diags, &hcl.Diagnostic{ - Summary: fmt.Sprintf("failed to start datasource plugin %q.%q", ref.Type, ref.Name), + Summary: fmt.Sprintf("failed to start datasource plugin %q.%q", ds.Type, ds.Name), Subject: &block.DefRange, Severity: hcl.DiagError, }) diff --git a/hcl2template/types.packer_config.go b/hcl2template/types.packer_config.go index 84d1fe8de7d..79c17825478 100644 --- a/hcl2template/types.packer_config.go +++ b/hcl2template/types.packer_config.go @@ -5,7 +5,6 @@ package hcl2template import ( "fmt" - "log" "sort" "strings" @@ -123,8 +122,6 @@ func (cfg *PackerConfig) EvalContext(ctx BlockContext, variables map[string]cty. iterID, ok := cfg.HCPVars["iterationID"] if ok { - log.Printf("iterationID set: %q", iterID) - ectx.Variables[packerAccessor] = cty.ObjectVal(map[string]cty.Value{ "version": cty.StringVal(cfg.CorePackerVersionString), "iterationID": iterID, @@ -313,76 +310,26 @@ func (cfg *PackerConfig) evaluateDatasources(skipExecution bool) hcl.Diagnostics // source in any of its input expressions. If so, skip evaluating it for // now, and add it to a list of datasources to evaluate again, later, // with the datasources in its context. - // This is essentially creating a very primitive DAG just for data - // source interdependencies. - block := ds.block - body := block.Body - attrs, _ := body.JustAttributes() - - skipFirstEval := false - for _, attr := range attrs { - vars := attr.Expr.Variables() - for _, v := range vars { - // check whether the variable is a data source - if v.RootName() == "data" { - // construct, backwards, the data source type and name we - // need to evaluate before this one can be evaluated. - dependsOn := DatasourceRef{ - Type: v[1].(hcl.TraverseAttr).Name, - Name: v[2].(hcl.TraverseAttr).Name, - } - log.Printf("The data source %#v depends on datasource %#v", ref, dependsOn) - if dependencies[ref] != nil { - dependencies[ref] = append(dependencies[ref], dependsOn) - } else { - dependencies[ref] = []DatasourceRef{dependsOn} - } - skipFirstEval = true - } + dependencies[ref] = []DatasourceRef{} + + // Note: when looking at the expressions, we only need to care about + // attributes, as HCL2 expressions are not allowed in a block's labels. + vars := GetVarsByType(ds.block, "data") + for _, v := range vars { + // construct, backwards, the data source type and name we + // need to evaluate before this one can be evaluated. + dependsOn := DatasourceRef{ + Type: v[1].(hcl.TraverseAttr).Name, + Name: v[2].(hcl.TraverseAttr).Name, } + dependencies[ref] = append(dependencies[ref], dependsOn) } - - // Now we have a list of data sources that depend on other data sources. - // Don't evaluate these; only evaluate data sources that we didn't - // mark as having dependencies. - if skipFirstEval { - continue - } - - datasource, startDiags := cfg.startDatasource(cfg.parser.PluginConfig.DataSources, ref, false) - diags = append(diags, startDiags...) - if diags.HasErrors() { - continue - } - - if skipExecution { - placeholderValue := cty.UnknownVal(hcldec.ImpliedType(datasource.OutputSpec())) - ds.value = placeholderValue - cfg.Datasources[ref] = ds - continue - } - - dsOpts, _ := decodeHCL2Spec(body, cfg.EvalContext(DatasourceContext, nil), datasource) - sp := packer.CheckpointReporter.AddSpan(ref.Type, "datasource", dsOpts) - realValue, err := datasource.Execute() - sp.End(err) - if err != nil { - diags = append(diags, &hcl.Diagnostic{ - Summary: err.Error(), - Subject: &cfg.Datasources[ref].block.DefRange, - Severity: hcl.DiagError, - }) - continue - } - - ds.value = realValue - cfg.Datasources[ref] = ds } // Now that most of our data sources have been started and executed, we can // try to execute the ones that depend on other data sources. for ref := range dependencies { - _, moreDiags, _ := cfg.recursivelyEvaluateDatasources(ref, dependencies, skipExecution, 0) + _, moreDiags := cfg.recursivelyEvaluateDatasources(ref, dependencies, skipExecution, 0) // Deduplicate diagnostics to prevent recursion messes. cleanedDiags := map[string]*hcl.Diagnostic{} for _, diag := range moreDiags { @@ -397,10 +344,9 @@ func (cfg *PackerConfig) evaluateDatasources(skipExecution bool) hcl.Diagnostics return diags } -func (cfg *PackerConfig) recursivelyEvaluateDatasources(ref DatasourceRef, dependencies map[DatasourceRef][]DatasourceRef, skipExecution bool, depth int) (map[DatasourceRef][]DatasourceRef, hcl.Diagnostics, bool) { +func (cfg *PackerConfig) recursivelyEvaluateDatasources(ref DatasourceRef, dependencies map[DatasourceRef][]DatasourceRef, skipExecution bool, depth int) (map[DatasourceRef][]DatasourceRef, hcl.Diagnostics) { var diags hcl.Diagnostics var moreDiags hcl.Diagnostics - shouldContinue := true if depth > 10 { // Add a comment about recursion. @@ -411,8 +357,9 @@ func (cfg *PackerConfig) recursivelyEvaluateDatasources(ref DatasourceRef, depen "sources. Either your data source depends on more than ten " + "other data sources, or your data sources have a cyclic " + "dependency. Please simplify your config to continue. ", + Subject: &(cfg.Datasources[ref]).block.DefRange, }) - return dependencies, diags, false + return dependencies, diags } ds := cfg.Datasources[ref] @@ -423,28 +370,28 @@ func (cfg *PackerConfig) recursivelyEvaluateDatasources(ref DatasourceRef, depen // If this dependency is not in the map, it means we've already // launched and executed this datasource. Otherwise, it means // we still need to run it. RECURSION TIME!! - dependencies, moreDiags, shouldContinue = cfg.recursivelyEvaluateDatasources(dep, dependencies, skipExecution, depth) + dependencies, moreDiags = cfg.recursivelyEvaluateDatasources(dep, dependencies, skipExecution, depth) diags = append(diags, moreDiags...) if moreDiags.HasErrors() { diags = append(diags, moreDiags...) - return dependencies, diags, shouldContinue + return dependencies, diags } } } // If we've gotten here, then it means ref doesn't seem to have any further // dependencies we need to evaluate first. Evaluate it, with the cfg's full // data source context. - datasource, startDiags := cfg.startDatasource(cfg.parser.PluginConfig.DataSources, ref, true) + datasource, startDiags := cfg.startDatasource(ds) if startDiags.HasErrors() { diags = append(diags, startDiags...) - return dependencies, diags, shouldContinue + return dependencies, diags } if skipExecution { placeholderValue := cty.UnknownVal(hcldec.ImpliedType(datasource.OutputSpec())) ds.value = placeholderValue cfg.Datasources[ref] = ds - return dependencies, diags, shouldContinue + return dependencies, diags } opts, _ := decodeHCL2Spec(ds.block.Body, cfg.EvalContext(DatasourceContext, nil), datasource) @@ -457,14 +404,14 @@ func (cfg *PackerConfig) recursivelyEvaluateDatasources(ref DatasourceRef, depen Subject: &cfg.Datasources[ref].block.DefRange, Severity: hcl.DiagError, }) - return dependencies, diags, shouldContinue + return dependencies, diags } ds.value = realValue cfg.Datasources[ref] = ds // remove ref from the dependencies map. delete(dependencies, ref) - return dependencies, diags, shouldContinue + return dependencies, diags } // getCoreBuildProvisioners takes a list of provisioner block, starts according diff --git a/hcl2template/types.required_plugins.go b/hcl2template/types.required_plugins.go index 01c090428f8..179434c29f0 100644 --- a/hcl2template/types.required_plugins.go +++ b/hcl2template/types.required_plugins.go @@ -9,7 +9,6 @@ import ( "github.com/hashicorp/go-version" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/packer/hcl2template/addrs" - "github.com/hashicorp/packer/packer" "github.com/zclconf/go-cty/cty" ) @@ -44,127 +43,6 @@ func (cfg *PackerConfig) decodeRequiredPluginsBlock(f *hcl.File) hcl.Diagnostics return diags } -func (cfg *PackerConfig) decodeImplicitRequiredPluginsBlocks(f *hcl.File) hcl.Diagnostics { - // when a plugin is used but not available it should be 'implicitly - // required'. Here we read common configuration blocks to try to guess - // plugin usages. - - // decodeRequiredPluginsBlock needs to be called before - // decodeImplicitRequiredPluginsBlocks; otherwise all required plugins will - // be implicitly required too. - - var diags hcl.Diagnostics - - content, moreDiags := f.Body.Content(configSchema) - diags = append(diags, moreDiags...) - - for _, block := range content.Blocks { - - switch block.Type { - case sourceLabel: - diags = append(diags, cfg.decodeImplicitRequiredPluginsBlock(Builder, block)...) - case dataSourceLabel: - diags = append(diags, cfg.decodeImplicitRequiredPluginsBlock(Datasource, block)...) - case buildLabel: - content, _, moreDiags := block.Body.PartialContent(buildSchema) - diags = append(diags, moreDiags...) - for _, block := range content.Blocks { - - switch block.Type { - case buildProvisionerLabel: - diags = append(diags, cfg.decodeImplicitRequiredPluginsBlock(Provisioner, block)...) - case buildPostProcessorLabel: - diags = append(diags, cfg.decodeImplicitRequiredPluginsBlock(PostProcessor, block)...) - case buildPostProcessorsLabel: - content, _, moreDiags := block.Body.PartialContent(postProcessorsSchema) - diags = append(diags, moreDiags...) - for _, block := range content.Blocks { - - switch block.Type { - case buildPostProcessorLabel: - diags = append(diags, cfg.decodeImplicitRequiredPluginsBlock(PostProcessor, block)...) - } - } - } - } - - } - } - return diags -} - -func (cfg *PackerConfig) decodeImplicitRequiredPluginsBlock(k ComponentKind, block *hcl.Block) hcl.Diagnostics { - if len(block.Labels) == 0 { - // malformed block ? Let's not panic :) - return nil - } - // Currently all block types are `type "component-kind" ["name"] {` - // this makes this simple. - componentName := block.Labels[0] - - store := map[ComponentKind]packer.BasicStore{ - Builder: cfg.parser.PluginConfig.Builders, - PostProcessor: cfg.parser.PluginConfig.PostProcessors, - Provisioner: cfg.parser.PluginConfig.Provisioners, - Datasource: cfg.parser.PluginConfig.DataSources, - }[k] - if store.Has(componentName) { - // If any core or pre-loaded plugin defines the `happycloud-uploader` - // pp, skip. This happens for core and manually installed plugins, as - // they will be listed in the PluginConfig before parsing any HCL. - return nil - } - - redirect := map[ComponentKind]map[string]string{ - Builder: cfg.parser.PluginConfig.BuilderRedirects, - PostProcessor: cfg.parser.PluginConfig.PostProcessorRedirects, - Provisioner: cfg.parser.PluginConfig.ProvisionerRedirects, - Datasource: cfg.parser.PluginConfig.DatasourceRedirects, - }[k][componentName] - - if redirect == "" { - // no known redirect for this component - return nil - } - - redirectAddr, diags := addrs.ParsePluginSourceString(redirect) - if diags.HasErrors() { - // This should never happen, since the map is manually filled. - return diags - } - - for _, req := range cfg.Packer.RequiredPlugins { - if _, found := req.RequiredPlugins[redirectAddr.Type]; found { - // This could happen if a plugin was forked. For example, I forked - // the github.com/hashicorp/happycloud plugin into - // github.com/azr/happycloud that is required in my config file; and - // am using the `happycloud-uploader` pp component from it. In that - // case - and to avoid miss-requires - we won't implicitly import - // any other `happycloud` plugin. - return nil - } - } - - cfg.implicitlyRequirePlugin(redirectAddr) - return nil -} - -func (cfg *PackerConfig) implicitlyRequirePlugin(plugin *addrs.Plugin) { - cfg.Packer.RequiredPlugins = append(cfg.Packer.RequiredPlugins, &RequiredPlugins{ - RequiredPlugins: map[string]*RequiredPlugin{ - plugin.Type: { - Name: plugin.Type, - Source: plugin.String(), - Type: plugin, - Requirement: VersionConstraint{ - Required: nil, // means latest - }, - PluginDependencyReason: PluginDependencyImplicit, - }, - }, - }) -} - // RequiredPlugin represents a declaration of a dependency on a particular // Plugin version or source. type RequiredPlugin struct { @@ -177,24 +55,8 @@ type RequiredPlugin struct { Type *addrs.Plugin Requirement VersionConstraint DeclRange hcl.Range - PluginDependencyReason } -// PluginDependencyReason is an enumeration of reasons why a dependency might be -// present. -type PluginDependencyReason int - -const ( - // PluginDependencyExplicit means that there is an explicit - // "required_plugin" block in the configuration. - PluginDependencyExplicit PluginDependencyReason = iota - - // PluginDependencyImplicit means that there is no explicit - // "required_plugin" block but there is at least one resource that uses this - // plugin. - PluginDependencyImplicit -) - type RequiredPlugins struct { RequiredPlugins map[string]*RequiredPlugin DeclRange hcl.Range diff --git a/hcl2template/types.required_plugins_test.go b/hcl2template/types.required_plugins_test.go index 3a1fabd4bf7..7d832c4788f 100644 --- a/hcl2template/types.required_plugins_test.go +++ b/hcl2template/types.required_plugins_test.go @@ -46,7 +46,6 @@ func TestPackerConfig_required_plugin_parse(t *testing.T) { Requirement: VersionConstraint{ Required: mustVersionConstraints(version.NewConstraint("~> v1.2.3")), }, - PluginDependencyReason: PluginDependencyExplicit, }, }}, }, @@ -77,19 +76,13 @@ func TestPackerConfig_required_plugin_parse(t *testing.T) { Requirement: VersionConstraint{ Required: mustVersionConstraints(version.NewConstraint("~> v1.2.3")), }, - PluginDependencyReason: PluginDependencyExplicit, }, }}, }, }, }}, {"required_plugin_forked", PackerConfig{ - parser: getBasicParser(func(p *Parser) { - p.PluginConfig.BuilderRedirects = map[string]string{ - "amazon-chroot": "github.com/hashicorp/amazon", - } - }, - )}, ` + parser: getBasicParser(func(p *Parser) {})}, ` packer { required_plugins { amazon = { @@ -114,19 +107,13 @@ func TestPackerConfig_required_plugin_parse(t *testing.T) { Requirement: VersionConstraint{ Required: mustVersionConstraints(version.NewConstraint("~> v1.2.3")), }, - PluginDependencyReason: PluginDependencyExplicit, }, }}, }, }, }}, {"missing-required-plugin-for-pre-defined-builder", PackerConfig{ - parser: getBasicParser(func(p *Parser) { - p.PluginConfig.BuilderRedirects = map[string]string{ - "amazon-ebs": "github.com/hashicorp/amazon", - } - }, - )}, + parser: getBasicParser(func(p *Parser) {})}, ` packer { }`, ` @@ -143,202 +130,6 @@ func TestPackerConfig_required_plugin_parse(t *testing.T) { RequiredPlugins: nil, }, }}, - {"missing-required-plugin-for-builder", PackerConfig{ - parser: getBasicParser(func(p *Parser) { - p.PluginConfig.BuilderRedirects = map[string]string{ - "amazon-chroot": "github.com/hashicorp/amazon", - } - }, - )}, - ` - packer { - }`, ` - source "amazon-chroot" "example" { - } - `, - false, - PackerConfig{ - Packer: struct { - VersionConstraints []VersionConstraint - RequiredPlugins []*RequiredPlugins - }{ - RequiredPlugins: []*RequiredPlugins{ - {RequiredPlugins: map[string]*RequiredPlugin{ - "amazon": { - Name: "amazon", - Source: "github.com/hashicorp/amazon", - Type: &addrs.Plugin{Hostname: "github.com", Namespace: "hashicorp", Type: "amazon"}, - Requirement: VersionConstraint{ - Required: nil, - }, - PluginDependencyReason: PluginDependencyImplicit, - }, - }}, - }, - }, - }}, - {"missing-required-plugin-for-provisioner", PackerConfig{ - parser: getBasicParser(func(p *Parser) { - p.PluginConfig.ProvisionerRedirects = map[string]string{ - "ansible-local": "github.com/ansible/ansible", - } - }, - )}, - ` - packer { - }`, ` - build { - provisioner "ansible-local" {} - } - `, - false, - PackerConfig{ - Packer: struct { - VersionConstraints []VersionConstraint - RequiredPlugins []*RequiredPlugins - }{ - RequiredPlugins: []*RequiredPlugins{ - {RequiredPlugins: map[string]*RequiredPlugin{ - "ansible": { - Name: "ansible", - Source: "github.com/ansible/ansible", - Type: &addrs.Plugin{Hostname: "github.com", Namespace: "ansible", Type: "ansible"}, - Requirement: VersionConstraint{ - Required: nil, - }, - PluginDependencyReason: PluginDependencyImplicit, - }, - }}, - }, - }, - }}, - {"missing-required-plugin-for-post-processor", PackerConfig{ - parser: getBasicParser(func(p *Parser) { - p.PluginConfig.PostProcessorRedirects = map[string]string{ - "docker-push": "github.com/hashicorp/docker", - } - }, - )}, - ` - packer { - }`, ` - build { - post-processor "docker-push" {} - } - `, - false, - PackerConfig{ - Packer: struct { - VersionConstraints []VersionConstraint - RequiredPlugins []*RequiredPlugins - }{ - RequiredPlugins: []*RequiredPlugins{ - {RequiredPlugins: map[string]*RequiredPlugin{ - "docker": { - Name: "docker", - Source: "github.com/hashicorp/docker", - Type: &addrs.Plugin{Hostname: "github.com", Namespace: "hashicorp", Type: "docker"}, - Requirement: VersionConstraint{ - Required: nil, - }, - PluginDependencyReason: PluginDependencyImplicit, - }, - }}, - }, - }, - }}, - {"missing-required-plugin-for-nested-post-processor", PackerConfig{ - parser: getBasicParser(func(p *Parser) { - p.PluginConfig.PostProcessorRedirects = map[string]string{ - "docker-push": "github.com/hashicorp/docker", - } - }, - )}, - ` - packer { - }`, ` - build { - post-processors { - post-processor "docker-push" { - } - } - } - `, - false, - PackerConfig{ - Packer: struct { - VersionConstraints []VersionConstraint - RequiredPlugins []*RequiredPlugins - }{ - RequiredPlugins: []*RequiredPlugins{ - {RequiredPlugins: map[string]*RequiredPlugin{ - "docker": { - Name: "docker", - Source: "github.com/hashicorp/docker", - Type: &addrs.Plugin{Hostname: "github.com", Namespace: "hashicorp", Type: "docker"}, - Requirement: VersionConstraint{ - Required: nil, - }, - PluginDependencyReason: PluginDependencyImplicit, - }, - }}, - }, - }, - }}, - - {"required-plugin-renamed", PackerConfig{ - parser: getBasicParser(func(p *Parser) { - p.PluginConfig.BuilderRedirects = map[string]string{ - "amazon-chroot": "github.com/hashicorp/amazon", - } - }, - )}, - ` - packer { - required_plugins { - amazon-v1 = { - source = "github.com/hashicorp/amazon" - version = "~> v1.0" - } - } - }`, ` - source "amazon-v1-chroot" "example" { - } - source "amazon-chroot" "example" { - } - `, - false, - PackerConfig{ - Packer: struct { - VersionConstraints []VersionConstraint - RequiredPlugins []*RequiredPlugins - }{ - RequiredPlugins: []*RequiredPlugins{ - {RequiredPlugins: map[string]*RequiredPlugin{ - "amazon-v1": { - Name: "amazon-v1", - Source: "github.com/hashicorp/amazon", - Type: &addrs.Plugin{Hostname: "github.com", Namespace: "hashicorp", Type: "amazon"}, - Requirement: VersionConstraint{ - Required: mustVersionConstraints(version.NewConstraint("~> v1.0")), - }, - PluginDependencyReason: PluginDependencyExplicit, - }, - }}, - {RequiredPlugins: map[string]*RequiredPlugin{ - "amazon": { - Name: "amazon", - Source: "github.com/hashicorp/amazon", - Type: &addrs.Plugin{Hostname: "github.com", Namespace: "hashicorp", Type: "amazon"}, - Requirement: VersionConstraint{ - Required: nil, - }, - PluginDependencyReason: PluginDependencyImplicit, - }, - }}, - }, - }, - }}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { @@ -351,13 +142,10 @@ func TestPackerConfig_required_plugin_parse(t *testing.T) { t.Fatal(diags) } - rest, diags := cfg.parser.ParseHCL([]byte(tt.restOfTemplate), "rest.pkr.hcl") + _, diags = cfg.parser.ParseHCL([]byte(tt.restOfTemplate), "rest.pkr.hcl") if len(diags) > 0 { t.Fatal(diags) } - if gotDiags := cfg.decodeImplicitRequiredPluginsBlocks(rest); (len(gotDiags) > 0) != tt.wantDiags { - t.Fatal(gotDiags) - } if diff := cmp.Diff(tt.wantConfig, cfg, cmpOpts...); diff != "" { t.Errorf("PackerConfig.inferImplicitRequiredPluginFromBlocks() unexpected PackerConfig: %v", diff) } diff --git a/hcl2template/utils.go b/hcl2template/utils.go index b74093b23fe..d3d0cb29447 100644 --- a/hcl2template/utils.go +++ b/hcl2template/utils.go @@ -5,13 +5,13 @@ package hcl2template import ( "fmt" - "io/ioutil" "os" "path/filepath" "strings" "github.com/gobwas/glob" "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/hcl/v2/hclsyntax" "github.com/hashicorp/packer/hcl2template/repl" hcl2shim "github.com/hashicorp/packer/hcl2template/shim" "github.com/zclconf/go-cty/cty" @@ -74,7 +74,7 @@ func GetHCL2Files(filename, hclSuffix, jsonSuffix string) (hclFiles, jsonFiles [ return nil, nil, diags } - fileInfos, err := ioutil.ReadDir(filename) + fileInfos, err := os.ReadDir(filename) if err != nil { diag := &hcl.Diagnostic{ Severity: hcl.DiagError, @@ -187,3 +187,48 @@ func ConvertPluginConfigValueToHCLValue(v interface{}) (cty.Value, error) { } return buildValue, nil } + +// GetVarsByType walks through a hcl body, and gathers all the Traversals that +// have a root type matching one of the specified top-level labels. +// +// This will only work on finite, expanded, HCL bodies. +func GetVarsByType(block *hcl.Block, topLevelLabels ...string) []hcl.Traversal { + var travs []hcl.Traversal + + switch body := block.Body.(type) { + case *hclsyntax.Body: + travs = getVarsByTypeForHCLSyntaxBody(body) + default: + attrs, _ := body.JustAttributes() + for _, attr := range attrs { + travs = append(travs, attr.Expr.Variables()...) + } + } + + var rets []hcl.Traversal + for _, t := range travs { + varRootname := t.RootName() + for _, lbl := range topLevelLabels { + if varRootname == lbl { + rets = append(rets, t) + break + } + } + } + + return rets +} + +func getVarsByTypeForHCLSyntaxBody(body *hclsyntax.Body) []hcl.Traversal { + var rets []hcl.Traversal + + for _, attr := range body.Attributes { + rets = append(rets, attr.Expr.Variables()...) + } + + for _, block := range body.Blocks { + rets = append(rets, getVarsByTypeForHCLSyntaxBody(block.Body)...) + } + + return rets +} diff --git a/internal/hcp/api/client.go b/internal/hcp/api/client.go index a379af61b6f..f28999d57b6 100644 --- a/internal/hcp/api/client.go +++ b/internal/hcp/api/client.go @@ -6,10 +6,16 @@ package api import ( "fmt" + "log" + "net/http" + "os" + "time" + "github.com/hashicorp/hcp-sdk-go/clients/cloud-packer-service/stable/2021-04-30/client/packer_service" packerSvc "github.com/hashicorp/hcp-sdk-go/clients/cloud-packer-service/stable/2021-04-30/client/packer_service" organizationSvc "github.com/hashicorp/hcp-sdk-go/clients/cloud-resource-manager/preview/2019-12-10/client/organization_service" projectSvc "github.com/hashicorp/hcp-sdk-go/clients/cloud-resource-manager/preview/2019-12-10/client/project_service" + "github.com/hashicorp/hcp-sdk-go/clients/cloud-resource-manager/preview/2019-12-10/models" rmmodels "github.com/hashicorp/hcp-sdk-go/clients/cloud-resource-manager/preview/2019-12-10/models" "github.com/hashicorp/hcp-sdk-go/httpclient" "github.com/hashicorp/packer/internal/hcp/env" @@ -40,32 +46,58 @@ func NewClient() (*Client, error) { } } - cl, err := httpclient.New(httpclient.Config{ + hcpClientCfg := httpclient.Config{ SourceChannel: fmt.Sprintf("packer/%s", version.PackerVersion.FormattedVersion()), - }) - if err != nil { + } + if err := hcpClientCfg.Canonicalize(); err != nil { return nil, &ClientError{ StatusCode: InvalidClientConfig, Err: err, } } + cl, err := httpclient.New(hcpClientCfg) + if err != nil { + return nil, &ClientError{ + StatusCode: InvalidClientConfig, + Err: err, + } + } client := &Client{ Packer: packerSvc.New(cl, nil), Organization: organizationSvc.New(cl, nil), Project: projectSvc.New(cl, nil), } + // A client.Config.hcpConfig is set when calling Canonicalize on basic HCP httpclient, as on line 52. + // If a user sets HCP_* env. variables they will be loaded into the client via the SDK and used for any client calls. + // For HCP_ORGANIZATION_ID and HCP_PROJECT_ID if they are both set via env. variables the call to hcpClientCfg.Connicalize() + // will automatically loaded them using the FromEnv configOption. + // + // If both values are set we should have all that we need to continue so we can returned the configured client. + if hcpClientCfg.Profile().OrganizationID != "" && hcpClientCfg.Profile().ProjectID != "" { + client.OrganizationID = hcpClientCfg.Profile().OrganizationID + client.ProjectID = hcpClientCfg.Profile().ProjectID - if err := client.loadOrganizationID(); err != nil { - return nil, &ClientError{ - StatusCode: InvalidClientConfig, - Err: err, + return client, nil + } + + if client.OrganizationID == "" { + err := client.loadOrganizationID() + if err != nil { + return nil, &ClientError{ + StatusCode: InvalidClientConfig, + Err: err, + } } } - if err := client.loadProjectID(); err != nil { - return nil, &ClientError{ - StatusCode: InvalidClientConfig, - Err: err, + + if client.ProjectID == "" { + err := client.loadProjectID() + if err != nil { + return nil, &ClientError{ + StatusCode: InvalidClientConfig, + Err: err, + } } } @@ -73,6 +105,10 @@ func NewClient() (*Client, error) { } func (c *Client) loadOrganizationID() error { + if env.HasOrganizationID() { + c.OrganizationID = os.Getenv(env.HCPOrganizationID) + return nil + } // Get the organization ID. listOrgParams := organizationSvc.NewOrganizationServiceListParams() listOrgResp, err := c.Organization.OrganizationServiceList(listOrgParams, nil) @@ -88,18 +124,81 @@ func (c *Client) loadOrganizationID() error { } func (c *Client) loadProjectID() error { + if env.HasProjectID() { + c.ProjectID = os.Getenv(env.HCPProjectID) + err := c.ValidateRegistryForProject() + if err != nil { + return fmt.Errorf("project validation for id %q responded in error: %v", c.ProjectID, err) + } + return nil + } // Get the project using the organization ID. listProjParams := projectSvc.NewProjectServiceListParams() listProjParams.ScopeID = &c.OrganizationID scopeType := string(rmmodels.HashicorpCloudResourcemanagerResourceIDResourceTypeORGANIZATION) listProjParams.ScopeType = &scopeType listProjResp, err := c.Project.ProjectServiceList(listProjParams, nil) + if err != nil { - return fmt.Errorf("unable to fetch project id: %v", err) + //For permission errors, our service principal may not have the ability + // to see all projects for an Org; this is the case for project-level service principals. + serviceErr, ok := err.(*projectSvc.ProjectServiceListDefault) + if !ok { + return fmt.Errorf("unable to fetch project list: %v", err) + } + if serviceErr.Code() == http.StatusForbidden { + return fmt.Errorf("unable to fetch project\n\n"+ + "If the provided credentials are tied to a specific project try setting the %s environment variable to one you want to use.", env.HCPProjectID) + } } + if len(listProjResp.Payload.Projects) > 1 { - return fmt.Errorf("this version of Packer does not support multiple projects") + log.Printf("[WARNING] Multiple HCP projects found, will pick the oldest one by default\n"+ + "To specify which project to use, set the %s environment variable to the one you want to use.", env.HCPProjectID) + } + + proj, err := getOldestProject(listProjResp.Payload.Projects) + if err != nil { + return err } - c.ProjectID = listProjResp.Payload.Projects[0].ID + c.ProjectID = proj.ID return nil } + +// getOldestProject retrieves the oldest project from a list based on its created_at time. +func getOldestProject(projects []*models.HashicorpCloudResourcemanagerProject) (*models.HashicorpCloudResourcemanagerProject, error) { + if len(projects) == 0 { + return nil, fmt.Errorf("no project found") + } + + oldestTime := time.Now() + var oldestProj *models.HashicorpCloudResourcemanagerProject + for _, proj := range projects { + projTime := time.Time(proj.CreatedAt) + if projTime.Before(oldestTime) { + oldestProj = proj + oldestTime = projTime + } + } + return oldestProj, nil +} + +// ValidateRegistryForProject validates that there is an active registry associated to the configured organization and project ids. +// A successful validation will result in a nil response. All other response represent an invalid registry error request or a registry not found error. +func (client *Client) ValidateRegistryForProject() error { + params := packer_service.NewPackerServiceGetRegistryParams() + params.LocationOrganizationID = client.OrganizationID + params.LocationProjectID = client.ProjectID + + resp, err := client.Packer.PackerServiceGetRegistry(params, nil) + if err != nil { + return err + } + + if resp.GetPayload().Registry == nil { + return fmt.Errorf("No active HCP Packer registry was found for the organization %q and project %q", client.OrganizationID, client.ProjectID) + } + + return nil + +} diff --git a/internal/hcp/api/client_test.go b/internal/hcp/api/client_test.go new file mode 100644 index 00000000000..16d321beb77 --- /dev/null +++ b/internal/hcp/api/client_test.go @@ -0,0 +1,83 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package api + +import ( + "testing" + "time" + + "github.com/go-openapi/strfmt" + "github.com/hashicorp/hcp-sdk-go/clients/cloud-resource-manager/preview/2019-12-10/models" +) + +func TestGetOldestProject(t *testing.T) { + testcases := []struct { + Name string + ProjectList []*models.HashicorpCloudResourcemanagerProject + ExpectProjectID string + ExpectErr bool + }{ + { + "Only one project, project exists, success", + []*models.HashicorpCloudResourcemanagerProject{ + { + ID: "test-project-exists", + }, + }, + "test-project-exists", + false, + }, + { + "Multiple projects, pick the oldest", + []*models.HashicorpCloudResourcemanagerProject{ + { + ID: "test-project-exists", + CreatedAt: strfmt.DateTime(time.Date(2023, 1, 1, 1, 0, 0, 0, time.UTC)), + }, + { + ID: "test-oldest-project", + CreatedAt: strfmt.DateTime(time.Date(2022, 1, 1, 1, 0, 0, 0, time.UTC)), + }, + }, + "test-oldest-project", + false, + }, + { + "Multiple projects, different order, pick the oldest", + []*models.HashicorpCloudResourcemanagerProject{ + { + ID: "test-oldest-project", + CreatedAt: strfmt.DateTime(time.Date(2022, 1, 1, 1, 0, 0, 0, time.UTC)), + }, + { + ID: "test-project-exists", + CreatedAt: strfmt.DateTime(time.Date(2023, 1, 1, 1, 0, 0, 0, time.UTC)), + }, + }, + "test-oldest-project", + false, + }, + { + "No projects, should error", + []*models.HashicorpCloudResourcemanagerProject{}, + "", + true, + }, + } + + for _, tt := range testcases { + t.Run(tt.Name, func(t *testing.T) { + proj, err := getOldestProject(tt.ProjectList) + if (err != nil) != tt.ExpectErr { + t.Errorf("test findProjectByID, expected %t, got %t", + tt.ExpectErr, + err != nil) + } + + if proj != nil && proj.ID != tt.ExpectProjectID { + t.Errorf("expected to select project %q, got %q", tt.ExpectProjectID, proj.ID) + } + }) + } +} diff --git a/internal/hcp/env/env.go b/internal/hcp/env/env.go index c07d9cac478..0186be1671f 100644 --- a/internal/hcp/env/env.go +++ b/internal/hcp/env/env.go @@ -9,6 +9,14 @@ import ( "strings" ) +func HasProjectID() bool { + return hasEnvVar(HCPProjectID) +} + +func HasOrganizationID() bool { + return hasEnvVar(HCPOrganizationID) +} + func HasClientID() bool { return hasEnvVar(HCPClientID) } diff --git a/internal/hcp/env/variables.go b/internal/hcp/env/variables.go index 0fc654494b6..8c74b600d47 100644 --- a/internal/hcp/env/variables.go +++ b/internal/hcp/env/variables.go @@ -6,6 +6,8 @@ package env const ( HCPClientID = "HCP_CLIENT_ID" HCPClientSecret = "HCP_CLIENT_SECRET" + HCPProjectID = "HCP_PROJECT_ID" + HCPOrganizationID = "HCP_ORGANIZATION_ID" HCPPackerRegistry = "HCP_PACKER_REGISTRY" HCPPackerBucket = "HCP_PACKER_BUCKET_NAME" HCPPackerBuildFingerprint = "HCP_PACKER_BUILD_FINGERPRINT" diff --git a/main.go b/main.go index 728f56860af..59af0a7b580 100644 --- a/main.go +++ b/main.go @@ -9,7 +9,6 @@ package main import ( "fmt" "io" - "io/ioutil" "log" "math/rand" "os" @@ -65,13 +64,13 @@ func realMain() int { return 1 } if logWriter == nil { - logWriter = ioutil.Discard + logWriter = io.Discard } packersdk.LogSecretFilter.SetOutput(logWriter) // Disable logging here - log.SetOutput(ioutil.Discard) + log.SetOutput(io.Discard) // We always send logs to a temporary file that we use in case // there is a panic. Otherwise, we delete it. @@ -325,72 +324,6 @@ func loadConfig() (*config, error) { PluginMinPort: 10000, PluginMaxPort: 25000, KnownPluginFolders: packer.PluginFolders("."), - - // BuilderRedirects - BuilderRedirects: map[string]string{ - - //"amazon-chroot": "github.com/hashicorp/amazon", - //"amazon-ebs": "github.com/hashicorp/amazon", - //"amazon-ebssurrogate": "github.com/hashicorp/amazon", - //"amazon-ebsvolume": "github.com/hashicorp/amazon", - //"amazon-instance": "github.com/hashicorp/amazon", - - //"azure-arm": "github.com/hashicorp/azure", - //"azure-chroot": "github.com/hashicorp/azure", - //"azure-dtl": "github.com/hashicorp/azure", - - //"docker": "github.com/hashicorp/docker", - - //"exoscale": "github.com/exoscale/exoscale", - - //"googlecompute": "github.com/hashicorp/googlecompute", - - //"parallels-iso": "github.com/hashicorp/parallels", - //"parallels-pvm": "github.com/hashicorp/parallels", - - //"qemu": "github.com/hashicorp/qemu", - - //"vagrant": "github.com/hashicorp/vagrant", - - //"virtualbox-iso": "github.com/hashicorp/virtualbox", - //"virtualbox-ovf": "github.com/hashicorp/virtualbox", - //"virtualbox-vm": "github.com/hashicorp/virtualbox", - - //"vmware-iso": "github.com/hashicorp/vmware", - //"vmware-vmx": "github.com/hashicorp/vmware", - - //"vsphere-iso": "github.com/hashicorp/vsphere", - //"vsphere-clone": "github.com/hashicorp/vsphere", - }, - DatasourceRedirects: map[string]string{ - //"amazon-ami": "github.com/hashicorp/amazon", - //"amazon-secretsmanager": "github.com/hashicorp/amazon", - }, - ProvisionerRedirects: map[string]string{ - //"ansible": "github.com/hashicorp/ansible", - //"ansible-local": "github.com/hashicorp/ansible", - - //"azure-dtlartifact": "github.com/hashicorp/azure", - }, - PostProcessorRedirects: map[string]string{ - //"amazon-import": "github.com/hashicorp/amazon", - - //"docker-import": "github.com/hashicorp/docker", - //"docker-push": "github.com/hashicorp/docker", - //"docker-save": "github.com/hashicorp/docker", - //"docker-tag": "github.com/hashicorp/docker", - - //"googlecompute-export": "github.com/hashicorp/googlecompute", - //"googlecompute-import": "github.com/hashicorp/googlecompute", - - //"exoscale-import": "github.com/exoscale/exoscale", - - //"vagrant": "github.com/hashicorp/vagrant", - //"vagrant-cloud": "github.com/hashicorp/vagrant", - - //"vsphere": "github.com/hashicorp/vsphere", - //"vsphere-template": "github.com/hashicorp/vsphere", - }, } if err := config.Plugins.Discover(); err != nil { return nil, err diff --git a/packer/client_test.go b/packer/client_test.go index f9a1483ff87..8656537e269 100644 --- a/packer/client_test.go +++ b/packer/client_test.go @@ -5,7 +5,6 @@ package packer import ( "bytes" - "io/ioutil" "os" "strings" "testing" @@ -102,7 +101,7 @@ func TestClient_Stderr(t *testing.T) { func TestClient_Stdin(t *testing.T) { // Overwrite stdin for this test with a temporary file - tf, err := ioutil.TempFile("", "packer") + tf, err := os.CreateTemp("", "packer") if err != nil { t.Fatalf("err: %s", err) } diff --git a/packer/core.go b/packer/core.go index 4d62cce6bc3..b711af080bc 100644 --- a/packer/core.go +++ b/packer/core.go @@ -132,6 +132,12 @@ func NewCore(c *CoreConfig) *Core { return core } +// DetectPluginBinaries is used to load required plugins from the template, +// since it is unsupported in JSON, this is essentially a no-op. +func (c *Core) DetectPluginBinaries() hcl.Diagnostics { + return nil +} + func (c *Core) Initialize(_ InitializeOptions) hcl.Diagnostics { err := c.initialize() if err != nil { diff --git a/packer/plugin-getter/github/getter.go b/packer/plugin-getter/github/getter.go index c094a33e332..eb2aea38d2c 100644 --- a/packer/plugin-getter/github/getter.go +++ b/packer/plugin-getter/github/getter.go @@ -11,7 +11,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "log" "net/http" "os" @@ -71,7 +70,7 @@ func transformChecksumStream() func(in io.ReadCloser) (io.ReadCloser, error) { } } _, _ = buffer.WriteString("]") - return ioutil.NopCloser(buffer), nil + return io.NopCloser(buffer), nil } } @@ -103,7 +102,7 @@ func transformVersionStream(in io.ReadCloser) (io.ReadCloser, error) { return nil, err } - return ioutil.NopCloser(buf), nil + return io.NopCloser(buf), nil } // HostSpecificTokenAuthTransport makes sure the http roundtripper only sets an diff --git a/packer/plugin-getter/plugins.go b/packer/plugin-getter/plugins.go index 09acf7052c8..84b5d8da1da 100644 --- a/packer/plugin-getter/plugins.go +++ b/packer/plugin-getter/plugins.go @@ -9,7 +9,6 @@ import ( "encoding/json" "fmt" "io" - "io/ioutil" "log" "os" "path/filepath" @@ -43,9 +42,6 @@ type Requirement struct { // VersionConstraints as defined by user. Empty ( to be avoided ) means // highest found version. VersionConstraints version.Constraints - - // was this require implicitly guessed ? - Implicit bool } type BinaryInstallationOptions struct { @@ -717,7 +713,7 @@ func (pr *Requirement) InstallLatest(opts InstallOptions) (*Installation, error) log.Printf("[WARNING] %v, ignoring", err) } - if err := ioutil.WriteFile(outputFileName+checksum.Checksummer.FileExt(), []byte(hex.EncodeToString(cs)), 0555); err != nil { + if err := os.WriteFile(outputFileName+checksum.Checksummer.FileExt(), []byte(hex.EncodeToString(cs)), 0555); err != nil { err := fmt.Errorf("failed to write local binary checksum file: %s", err) errs = multierror.Append(errs, err) log.Printf("[WARNING] %v, ignoring", err) diff --git a/packer/plugin-getter/plugins_test.go b/packer/plugin-getter/plugins_test.go index 3105fb1e667..03e34251ecd 100644 --- a/packer/plugin-getter/plugins_test.go +++ b/packer/plugin-getter/plugins_test.go @@ -10,7 +10,6 @@ import ( "encoding/json" "fmt" "io" - "io/ioutil" "log" "os" "path/filepath" @@ -754,7 +753,7 @@ func (g *mockPluginGetter) Get(what string, options GetOptions) (io.ReadCloser, panic(err) } }() - return ioutil.NopCloser(read), nil + return io.NopCloser(read), nil } func zipFile(content map[string]string) io.ReadCloser { @@ -778,7 +777,7 @@ func zipFile(content map[string]string) io.ReadCloser { if err != nil { panic(err) } - return ioutil.NopCloser(buff) + return io.NopCloser(buff) } var _ Getter = &mockPluginGetter{} diff --git a/packer/plugin.go b/packer/plugin.go index 1b64482a89b..caf05df0e4a 100644 --- a/packer/plugin.go +++ b/packer/plugin.go @@ -16,11 +16,15 @@ import ( "strings" packersdk "github.com/hashicorp/packer-plugin-sdk/packer" - "github.com/hashicorp/packer-plugin-sdk/pathing" pluginsdk "github.com/hashicorp/packer-plugin-sdk/plugin" plugingetter "github.com/hashicorp/packer/packer/plugin-getter" ) +var defaultChecksummer = plugingetter.Checksummer{ + Type: "sha256", + Hash: sha256.New(), +} + // PluginConfig helps load and use packer plugins type PluginConfig struct { KnownPluginFolders []string @@ -30,23 +34,6 @@ type PluginConfig struct { Provisioners ProvisionerSet PostProcessors PostProcessorSet DataSources DatasourceSet - - // Redirects are only set when a plugin was completely moved out; they allow - // telling where a plugin has moved by checking if a known component of this - // plugin is used. For example implicitly require the - // github.com/hashicorp/amazon plugin if it was moved out and the - // "amazon-ebs" plugin is used, but not found. - // - // Redirects will be bypassed if the redirected components are already found - // in their corresponding sets (Builders, Provisioners, PostProcessors, - // DataSources). That is, for example, if you manually put a single - // component plugin in the plugins folder. - // - // Example BuilderRedirects: "amazon-ebs" => "github.com/hashicorp/amazon" - BuilderRedirects map[string]string - DatasourceRedirects map[string]string - ProvisionerRedirects map[string]string - PostProcessorRedirects map[string]string } // PACKERSPACE is used to represent the spaces that separate args for a command @@ -80,54 +67,47 @@ func (c *PluginConfig) Discover() error { return nil } - // TODO: use KnownPluginFolders here. TODO probably after JSON is deprecated - // so that we can keep the current behavior just the way it is. - - // Next, look in the same directory as the executable. - exePath, err := os.Executable() - if err != nil { - log.Printf("[ERR] Error loading exe directory: %s", err) - } else { - if err := c.discoverExternalComponents(filepath.Dir(exePath)); err != nil { - return err - } + if len(c.KnownPluginFolders) == 0 { + //PluginFolders should match the call in github.com/hahicorp/packer/main.go#loadConfig + c.KnownPluginFolders = PluginFolders(".") } - // Next, look in the default plugins directory inside the configdir/.packer.d/plugins. - dir, err := pathing.ConfigDir() - if err != nil { - log.Printf("[ERR] Error loading config directory: %s", err) - } else { - if err := c.discoverExternalComponents(filepath.Join(dir, "plugins")); err != nil { + // TODO after JSON is deprecated remove support for legacy component plugins. + for _, knownFolder := range c.KnownPluginFolders { + if err := c.discoverLegacyMonoComponents(knownFolder); err != nil { return err } } - // Next, look in the CWD. - if err := c.discoverExternalComponents("."); err != nil { + // Pick last folder as it's the one with the highest priority + // This is the same logic used when installing plugins via Packer's plugin installation commands. + pluginInstallationPath := c.KnownPluginFolders[len(c.KnownPluginFolders)-1] + if err := c.discoverInstalledComponents(pluginInstallationPath); err != nil { return err } - // Check whether there is a custom Plugin directory defined. This gets - // absolute preference. - if packerPluginPath := os.Getenv("PACKER_PLUGIN_PATH"); packerPluginPath != "" { - sep := ":" - if runtime.GOOS == "windows" { - // on windows, PATH is semicolon-separated - sep = ";" + // Manually installed plugins take precedence over all. Duplicate plugins installed + // prior to the packer plugins install command should be removed by user to avoid overrides. + for _, knownFolder := range c.KnownPluginFolders { + pluginPaths, err := c.discoverSingle(filepath.Join(knownFolder, "packer-plugin-*")) + if err != nil { + return err } - plugPaths := strings.Split(packerPluginPath, sep) - for _, plugPath := range plugPaths { - if err := c.discoverExternalComponents(plugPath); err != nil { + for pluginName, pluginPath := range pluginPaths { + // Test pluginPath points to an executable + if _, err := exec.LookPath(pluginPath); err != nil { + log.Printf("[WARN] %q is not executable; skipping", pluginPath) + continue + } + if err := c.DiscoverMultiPlugin(pluginName, pluginPath); err != nil { return err } } } - return nil } -func (c *PluginConfig) discoverExternalComponents(path string) error { +func (c *PluginConfig) discoverLegacyMonoComponents(path string) error { var err error log.Printf("[TRACE] discovering plugins in %s", path) @@ -206,66 +186,6 @@ func (c *PluginConfig) discoverExternalComponents(path string) error { log.Printf("using external datasource %v", externallyUsed) } - //Check for installed plugins using the `packer plugins install` command - binInstallOpts := plugingetter.BinaryInstallationOptions{ - OS: runtime.GOOS, - ARCH: runtime.GOARCH, - APIVersionMajor: pluginsdk.APIVersionMajor, - APIVersionMinor: pluginsdk.APIVersionMinor, - Checksummers: []plugingetter.Checksummer{ - {Type: "sha256", Hash: sha256.New()}, - }, - } - - if runtime.GOOS == "windows" { - binInstallOpts.Ext = ".exe" - } - - pluginPaths, err = c.discoverSingle(filepath.Join(path, "*", "*", "*", fmt.Sprintf("packer-plugin-*%s", binInstallOpts.FilenameSuffix()))) - if err != nil { - return err - } - - for pluginName, pluginPath := range pluginPaths { - var checksumOk bool - for _, checksummer := range binInstallOpts.Checksummers { - cs, err := checksummer.GetCacheChecksumOfFile(pluginPath) - if err != nil { - log.Printf("[TRACE] GetChecksumOfFile(%q) failed: %v", pluginPath, err) - continue - } - - if err := checksummer.ChecksumFile(cs, pluginPath); err != nil { - log.Printf("[TRACE] ChecksumFile(%q) failed: %v", pluginPath, err) - continue - } - checksumOk = true - break - } - - if !checksumOk { - log.Printf("[TRACE] No checksum found for %q ignoring possibly unsafe binary", path) - continue - } - - if err := c.DiscoverMultiPlugin(pluginName, pluginPath); err != nil { - return err - } - } - - // Manually installed plugins take precedence over all. Duplicate plugins installed - // prior to the packer plugins install command should be removed by user to avoid overrides. - pluginPaths, err = c.discoverSingle(filepath.Join(path, "packer-plugin-*")) - if err != nil { - return err - } - - for pluginName, pluginPath := range pluginPaths { - if err := c.DiscoverMultiPlugin(pluginName, pluginPath); err != nil { - return err - } - } - return nil } @@ -291,9 +211,17 @@ func (c *PluginConfig) discoverSingle(glob string) (map[string]string, error) { // We could do a full PATHEXT parse, but this is probably good enough. if runtime.GOOS == "windows" && strings.ToLower(filepath.Ext(file)) != ".exe" { log.Printf( - "[DEBUG] Ignoring plugin match %s, no exe extension", + "[TRACE] Ignoring plugin match %s, no exe extension", + match) + continue + } + + if strings.Contains(strings.ToUpper(file), defaultChecksummer.FileExt()) { + log.Printf( + "[TRACE] Ignoring plugin match %s, which looks to be a checksum file", match) continue + } // If the filename has a ".", trim up to there @@ -307,8 +235,12 @@ func (c *PluginConfig) discoverSingle(glob string) (map[string]string, error) { // After the split the plugin name is "baz". pluginName = strings.SplitN(pluginName, "_", 2)[0] - log.Printf("[DEBUG] Discovered plugin: %s = %s", pluginName, match) - res[pluginName] = match + log.Printf("[INFO] Discovered potential plugin: %s = %s", pluginName, match) + pluginPath, err := filepath.Abs(match) + if err != nil { + pluginPath = match + } + res[pluginName] = pluginPath } return res, nil @@ -426,9 +358,9 @@ func (c *PluginConfig) Client(path string, args ...string) *PluginClient { } if strings.Contains(originalPath, PACKERSPACE) { - log.Printf("[TRACE] Starting internal plugin %s", args[len(args)-1]) + log.Printf("[INFO] Starting internal plugin %s", args[len(args)-1]) } else { - log.Printf("[TRACE] Starting external plugin %s %s", path, strings.Join(args, " ")) + log.Printf("[INFO] Starting external plugin %s %s", path, strings.Join(args, " ")) } var config PluginClientConfig config.Cmd = exec.Command(path, args...) @@ -437,3 +369,57 @@ func (c *PluginConfig) Client(path string, args ...string) *PluginClient { config.MaxPort = c.PluginMaxPort return NewClient(&config) } + +// discoverInstalledComponents scans the provided path for plugins installed by running packer plugins install or packer init. +// Valid plugins contain a matching system binary and valid checksum file. +func (c *PluginConfig) discoverInstalledComponents(path string) error { + //Check for installed plugins using the `packer plugins install` command + binInstallOpts := plugingetter.BinaryInstallationOptions{ + OS: runtime.GOOS, + ARCH: runtime.GOARCH, + APIVersionMajor: pluginsdk.APIVersionMajor, + APIVersionMinor: pluginsdk.APIVersionMinor, + Checksummers: []plugingetter.Checksummer{ + defaultChecksummer, + }, + } + + if runtime.GOOS == "windows" { + binInstallOpts.Ext = ".exe" + } + + pluginPath := filepath.Join(path, "*", "*", "*", fmt.Sprintf("packer-plugin-*%s", binInstallOpts.FilenameSuffix())) + pluginPaths, err := c.discoverSingle(pluginPath) + if err != nil { + return err + } + + for pluginName, pluginPath := range pluginPaths { + var checksumOk bool + for _, checksummer := range binInstallOpts.Checksummers { + cs, err := checksummer.GetCacheChecksumOfFile(pluginPath) + if err != nil { + log.Printf("[TRACE] GetChecksumOfFile(%q) failed: %v", pluginPath, err) + continue + } + + if err := checksummer.ChecksumFile(cs, pluginPath); err != nil { + log.Printf("[TRACE] ChecksumFile(%q) failed: %v", pluginPath, err) + continue + } + checksumOk = true + break + } + + if !checksumOk { + log.Printf("[WARN] No checksum found for %q ignoring possibly unsafe binary", path) + continue + } + + if err := c.DiscoverMultiPlugin(pluginName, pluginPath); err != nil { + return err + } + } + + return nil +} diff --git a/packer/plugin_client.go b/packer/plugin_client.go index f1d127497be..bcd71890043 100644 --- a/packer/plugin_client.go +++ b/packer/plugin_client.go @@ -8,7 +8,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "log" "net" "os" @@ -114,7 +113,7 @@ func NewClient(config *PluginClientConfig) (c *PluginClient) { } if config.Stderr == nil { - config.Stderr = ioutil.Discard + config.Stderr = io.Discard } c = &PluginClient{config: config} diff --git a/packer/plugin_discover_test.go b/packer/plugin_discover_test.go index 91e8ba6b318..fec52607d83 100644 --- a/packer/plugin_discover_test.go +++ b/packer/plugin_discover_test.go @@ -154,6 +154,233 @@ func TestDiscoverDatasource(t *testing.T) { } } +func TestMultiPlugin_describe(t *testing.T) { + createMockPlugins(t, mockPlugins) + pluginDir := os.Getenv("PACKER_PLUGIN_PATH") + defer os.RemoveAll(pluginDir) + c := PluginConfig{} + err := c.Discover() + if err != nil { + t.Fatalf("error discovering plugins; %s", err.Error()) + } + + for mockPluginName, plugin := range mockPlugins { + for mockBuilderName := range plugin.Builders { + expectedBuilderName := mockPluginName + "-" + mockBuilderName + + if !c.Builders.Has(expectedBuilderName) { + t.Fatalf("expected to find builder %q", expectedBuilderName) + } + } + for mockProvisionerName := range plugin.Provisioners { + expectedProvisionerName := mockPluginName + "-" + mockProvisionerName + if !c.Provisioners.Has(expectedProvisionerName) { + t.Fatalf("expected to find builder %q", expectedProvisionerName) + } + } + for mockPostProcessorName := range plugin.PostProcessors { + expectedPostProcessorName := mockPluginName + "-" + mockPostProcessorName + if !c.PostProcessors.Has(expectedPostProcessorName) { + t.Fatalf("expected to find post-processor %q", expectedPostProcessorName) + } + } + for mockDatasourceName := range plugin.Datasources { + expectedDatasourceName := mockPluginName + "-" + mockDatasourceName + if !c.DataSources.Has(expectedDatasourceName) { + t.Fatalf("expected to find datasource %q", expectedDatasourceName) + } + } + } +} + +func TestMultiPlugin_describe_installed(t *testing.T) { + createMockInstalledPlugins(t, mockInstalledPlugins, createMockChecksumFile) + pluginDir := os.Getenv("PACKER_PLUGIN_PATH") + defer os.RemoveAll(pluginDir) + + c := PluginConfig{} + err := c.Discover() + if err != nil { + t.Fatalf("error discovering plugins; %s", err.Error()) + } + + for mockPluginName, plugin := range mockInstalledPlugins { + mockPluginName = strings.Split(mockPluginName, "_")[0] + for mockBuilderName := range plugin.Builders { + expectedBuilderName := mockPluginName + "-" + mockBuilderName + if !c.Builders.Has(expectedBuilderName) { + t.Fatalf("expected to find builder %q", expectedBuilderName) + } + } + for mockProvisionerName := range plugin.Provisioners { + expectedProvisionerName := mockPluginName + "-" + mockProvisionerName + if !c.Provisioners.Has(expectedProvisionerName) { + t.Fatalf("expected to find builder %q", expectedProvisionerName) + } + } + for mockPostProcessorName := range plugin.PostProcessors { + expectedPostProcessorName := mockPluginName + "-" + mockPostProcessorName + if !c.PostProcessors.Has(expectedPostProcessorName) { + t.Fatalf("expected to find post-processor %q", expectedPostProcessorName) + } + } + for mockDatasourceName := range plugin.Datasources { + expectedDatasourceName := mockPluginName + "-" + mockDatasourceName + if !c.DataSources.Has(expectedDatasourceName) { + t.Fatalf("expected to find datasource %q", expectedDatasourceName) + } + } + } +} + +func TestMultiPlugin_describe_installed_for_invalid(t *testing.T) { + tc := []struct { + desc string + installedPluginsMock map[string]pluginsdk.Set + createMockFn func(*testing.T, map[string]pluginsdk.Set) + }{ + { + desc: "Incorrectly named plugins", + installedPluginsMock: invalidInstalledPluginsMock, + createMockFn: func(t *testing.T, mocks map[string]pluginsdk.Set) { + createMockInstalledPlugins(t, mocks, createMockChecksumFile) + }, + }, + { + desc: "Plugins missing checksums", + installedPluginsMock: mockInstalledPlugins, + createMockFn: func(t *testing.T, mocks map[string]pluginsdk.Set) { + createMockInstalledPlugins(t, mocks) + }, + }, + } + + for _, tt := range tc { + t.Run(tt.desc, func(t *testing.T) { + tt.createMockFn(t, tt.installedPluginsMock) + pluginDir := os.Getenv("PACKER_PLUGIN_PATH") + defer os.RemoveAll(pluginDir) + + c := PluginConfig{} + err := c.Discover() + if err != nil { + t.Fatalf("error discovering plugins; %s", err.Error()) + } + if c.Builders.Has("feather") { + t.Fatalf("expected to not find builder %q", "feather") + } + for mockPluginName, plugin := range tt.installedPluginsMock { + mockPluginName = strings.Split(mockPluginName, "_")[0] + for mockBuilderName := range plugin.Builders { + expectedBuilderName := mockPluginName + "-" + mockBuilderName + if c.Builders.Has(expectedBuilderName) { + t.Fatalf("expected to not find builder %q", expectedBuilderName) + } + } + for mockProvisionerName := range plugin.Provisioners { + expectedProvisionerName := mockPluginName + "-" + mockProvisionerName + if c.Provisioners.Has(expectedProvisionerName) { + t.Fatalf("expected to not find builder %q", expectedProvisionerName) + } + } + for mockPostProcessorName := range plugin.PostProcessors { + expectedPostProcessorName := mockPluginName + "-" + mockPostProcessorName + if c.PostProcessors.Has(expectedPostProcessorName) { + t.Fatalf("expected to not find post-processor %q", expectedPostProcessorName) + } + } + for mockDatasourceName := range plugin.Datasources { + expectedDatasourceName := mockPluginName + "-" + mockDatasourceName + if c.DataSources.Has(expectedDatasourceName) { + t.Fatalf("expected to not find datasource %q", expectedDatasourceName) + } + } + } + }) + } +} + +func TestMultiPlugin_defaultName(t *testing.T) { + createMockPlugins(t, defaultNameMock) + pluginDir := os.Getenv("PACKER_PLUGIN_PATH") + defer os.RemoveAll(pluginDir) + + c := PluginConfig{} + err := c.Discover() + if err != nil { + t.Fatalf("error discovering plugins; %s ; mocks are %#v", err.Error(), defaultNameMock) + } + + expectedBuilderNames := []string{"foo-bar", "foo-baz", "foo"} + for _, mockBuilderName := range expectedBuilderNames { + if !c.Builders.Has(mockBuilderName) { + t.Fatalf("expected to find builder %q; builders is %#v", mockBuilderName, c.Builders) + } + } +} + +// no T.Parallel using os.Chdir +func TestMultiPlugin_CWD(t *testing.T) { + createMockPlugins(t, defaultNameMock) + pluginDir := os.Getenv("PACKER_PLUGIN_PATH") + defer os.RemoveAll(pluginDir) + // Unset PACKER_PLUGIN_PATH to test CWD loading + os.Unsetenv("PACKER_PLUGIN_PATH") + if err := os.Chdir(pluginDir); err != nil { + t.Fatalf("failed to change directory to test loading from CWD: %s", err) + } + c := PluginConfig{} + err := c.Discover() + if err != nil { + t.Fatalf("error discovering plugins; %s ; mocks are %#v", err.Error(), defaultNameMock) + } + expectedBuilderNames := []string{"foo-bar", "foo-baz", "foo"} + for _, mockBuilderName := range expectedBuilderNames { + if !c.Builders.Has(mockBuilderName) { + t.Fatalf("expected to find builder %q; builders is %#v", mockBuilderName, c.Builders) + } + } +} + +func TestMultiPlugin_IgnoreChecksumFile(t *testing.T) { + createMockPlugins(t, defaultNameMock) + pluginDir := os.Getenv("PACKER_PLUGIN_PATH") + defer os.RemoveAll(pluginDir) + + csFile, err := generateMockChecksumFile(filepath.Join(pluginDir, "packer-plugin-foo")) + if err != nil { + t.Fatal(err.Error()) + } + // Copy plugin contents into checksum file to validate that it is not only skipped but that it never gets loaded + if err := os.Rename(filepath.Join(pluginDir, "packer-plugin-foo"), csFile); err != nil { + t.Fatalf("failed to rename plugin bin file to checkfum file needed for test: %s", err) + } + + c := PluginConfig{} + err = c.Discover() + if err != nil { + t.Fatalf("error discovering plugins; %s ; mocks are %#v", err.Error(), defaultNameMock) + } + expectedBuilderNames := []string{"foo-bar", "foo-baz", "foo"} + for _, mockBuilderName := range expectedBuilderNames { + if c.Builders.Has(mockBuilderName) { + t.Fatalf("expected to not find builder %q; builders is %#v", mockBuilderName, c.Builders) + } + } +} + +func TestMultiPlugin_defaultName_each_plugin_type(t *testing.T) { + createMockPlugins(t, doubleDefaultMock) + pluginDir := os.Getenv("PACKER_PLUGIN_PATH") + defer os.RemoveAll(pluginDir) + + c := PluginConfig{} + err := c.Discover() + if err != nil { + t.Fatal("Should not have error because pluginsdk.DEFAULT_NAME is used twice but only once per plugin type.") + } +} + func generateFakePlugins(dirname string, pluginNames []string) (string, []string, func(), error) { dir, err := os.MkdirTemp("", dirname) if err != nil { @@ -296,6 +523,15 @@ func createMockPlugins(t *testing.T, plugins map[string]pluginsdk.Set) { } func createMockChecksumFile(t testing.TB, filePath string) { + t.Helper() + cs, err := generateMockChecksumFile(filePath) + if err != nil { + t.Fatalf(err.Error()) + } + t.Logf("created fake plugin checksum file %s", cs) +} + +func generateMockChecksumFile(filePath string) (string, error) { cs := plugingetter.Checksummer{ Type: "sha256", Hash: sha256.New(), @@ -303,19 +539,20 @@ func createMockChecksumFile(t testing.TB, filePath string) { f, err := os.Open(filePath) if err != nil { - t.Fatalf("failed to open fake plugin binary: %v", err) + return "", fmt.Errorf("failed to open fake plugin binary: %v", err) } defer f.Close() sum, err := cs.Sum(f) if err != nil { - t.Fatalf("failed to checksum fake plugin binary: %v", err) + return "", fmt.Errorf("failed to checksum fake plugin binary: %v", err) } - t.Logf("creating fake plugin checksum file %s with contents %x", filePath+cs.FileExt(), string(sum)) - if err := os.WriteFile(filePath+cs.FileExt(), []byte(fmt.Sprintf("%x", sum)), os.ModePerm); err != nil { - t.Fatalf("failed to write checksum fake plugin binary: %v", err) + sumfile := filePath + cs.FileExt() + if err := os.WriteFile(sumfile, []byte(fmt.Sprintf("%x", sum)), os.ModePerm); err != nil { + return "", fmt.Errorf("failed to write checksum fake plugin binary: %v", err) } + return sumfile, nil } func createMockInstalledPlugins(t *testing.T, plugins map[string]pluginsdk.Set, opts ...func(tb testing.TB, filePath string)) { @@ -461,180 +698,3 @@ var ( }, } ) - -func Test_multiplugin_describe(t *testing.T) { - createMockPlugins(t, mockPlugins) - pluginDir := os.Getenv("PACKER_PLUGIN_PATH") - defer os.RemoveAll(pluginDir) - c := PluginConfig{} - err := c.Discover() - if err != nil { - t.Fatalf("error discovering plugins; %s", err.Error()) - } - - for mockPluginName, plugin := range mockPlugins { - for mockBuilderName := range plugin.Builders { - expectedBuilderName := mockPluginName + "-" + mockBuilderName - - if !c.Builders.Has(expectedBuilderName) { - t.Fatalf("expected to find builder %q", expectedBuilderName) - } - } - for mockProvisionerName := range plugin.Provisioners { - expectedProvisionerName := mockPluginName + "-" + mockProvisionerName - if !c.Provisioners.Has(expectedProvisionerName) { - t.Fatalf("expected to find builder %q", expectedProvisionerName) - } - } - for mockPostProcessorName := range plugin.PostProcessors { - expectedPostProcessorName := mockPluginName + "-" + mockPostProcessorName - if !c.PostProcessors.Has(expectedPostProcessorName) { - t.Fatalf("expected to find post-processor %q", expectedPostProcessorName) - } - } - for mockDatasourceName := range plugin.Datasources { - expectedDatasourceName := mockPluginName + "-" + mockDatasourceName - if !c.DataSources.Has(expectedDatasourceName) { - t.Fatalf("expected to find datasource %q", expectedDatasourceName) - } - } - } -} - -func Test_multiplugin_describe_installed(t *testing.T) { - createMockInstalledPlugins(t, mockInstalledPlugins, createMockChecksumFile) - pluginDir := os.Getenv("PACKER_PLUGIN_PATH") - defer os.RemoveAll(pluginDir) - - c := PluginConfig{} - err := c.Discover() - if err != nil { - t.Fatalf("error discovering plugins; %s", err.Error()) - } - - for mockPluginName, plugin := range mockInstalledPlugins { - mockPluginName = strings.Split(mockPluginName, "_")[0] - for mockBuilderName := range plugin.Builders { - expectedBuilderName := mockPluginName + "-" + mockBuilderName - if !c.Builders.Has(expectedBuilderName) { - t.Fatalf("expected to find builder %q", expectedBuilderName) - } - } - for mockProvisionerName := range plugin.Provisioners { - expectedProvisionerName := mockPluginName + "-" + mockProvisionerName - if !c.Provisioners.Has(expectedProvisionerName) { - t.Fatalf("expected to find builder %q", expectedProvisionerName) - } - } - for mockPostProcessorName := range plugin.PostProcessors { - expectedPostProcessorName := mockPluginName + "-" + mockPostProcessorName - if !c.PostProcessors.Has(expectedPostProcessorName) { - t.Fatalf("expected to find post-processor %q", expectedPostProcessorName) - } - } - for mockDatasourceName := range plugin.Datasources { - expectedDatasourceName := mockPluginName + "-" + mockDatasourceName - if !c.DataSources.Has(expectedDatasourceName) { - t.Fatalf("expected to find datasource %q", expectedDatasourceName) - } - } - } -} - -func Test_multiplugin_describe_installed_for_invalid(t *testing.T) { - tc := []struct { - desc string - installedPluginsMock map[string]pluginsdk.Set - createMockFn func(*testing.T, map[string]pluginsdk.Set) - }{ - { - desc: "Incorrectly named plugins", - installedPluginsMock: invalidInstalledPluginsMock, - createMockFn: func(t *testing.T, mocks map[string]pluginsdk.Set) { - createMockInstalledPlugins(t, mocks, createMockChecksumFile) - }, - }, - { - desc: "Plugins missing checksums", - installedPluginsMock: mockInstalledPlugins, - createMockFn: func(t *testing.T, mocks map[string]pluginsdk.Set) { - createMockInstalledPlugins(t, mocks) - }, - }, - } - - for _, tt := range tc { - t.Run(tt.desc, func(t *testing.T) { - tt.createMockFn(t, tt.installedPluginsMock) - pluginDir := os.Getenv("PACKER_PLUGIN_PATH") - defer os.RemoveAll(pluginDir) - - c := PluginConfig{} - err := c.Discover() - if err != nil { - t.Fatalf("error discovering plugins; %s", err.Error()) - } - if c.Builders.Has("feather") { - t.Fatalf("expected to not find builder %q", "feather") - } - for mockPluginName, plugin := range tt.installedPluginsMock { - mockPluginName = strings.Split(mockPluginName, "_")[0] - for mockBuilderName := range plugin.Builders { - expectedBuilderName := mockPluginName + "-" + mockBuilderName - if c.Builders.Has(expectedBuilderName) { - t.Fatalf("expected to not find builder %q", expectedBuilderName) - } - } - for mockProvisionerName := range plugin.Provisioners { - expectedProvisionerName := mockPluginName + "-" + mockProvisionerName - if c.Provisioners.Has(expectedProvisionerName) { - t.Fatalf("expected to not find builder %q", expectedProvisionerName) - } - } - for mockPostProcessorName := range plugin.PostProcessors { - expectedPostProcessorName := mockPluginName + "-" + mockPostProcessorName - if c.PostProcessors.Has(expectedPostProcessorName) { - t.Fatalf("expected to not find post-processor %q", expectedPostProcessorName) - } - } - for mockDatasourceName := range plugin.Datasources { - expectedDatasourceName := mockPluginName + "-" + mockDatasourceName - if c.DataSources.Has(expectedDatasourceName) { - t.Fatalf("expected to not find datasource %q", expectedDatasourceName) - } - } - } - }) - } -} - -func Test_multiplugin_defaultName(t *testing.T) { - createMockPlugins(t, defaultNameMock) - pluginDir := os.Getenv("PACKER_PLUGIN_PATH") - defer os.RemoveAll(pluginDir) - - c := PluginConfig{} - err := c.Discover() - if err != nil { - t.Fatalf("error discovering plugins; %s ; mocks are %#v", err.Error(), defaultNameMock) - } - - expectedBuilderNames := []string{"foo-bar", "foo-baz", "foo"} - for _, mockBuilderName := range expectedBuilderNames { - if !c.Builders.Has(mockBuilderName) { - t.Fatalf("expected to find builder %q; builders is %#v", mockBuilderName, c.Builders) - } - } -} - -func Test_only_one_multiplugin_defaultName_each_plugin_type(t *testing.T) { - createMockPlugins(t, doubleDefaultMock) - pluginDir := os.Getenv("PACKER_PLUGIN_PATH") - defer os.RemoveAll(pluginDir) - - c := PluginConfig{} - err := c.Discover() - if err != nil { - t.Fatal("Should not have error because pluginsdk.DEFAULT_NAME is used twice but only once per plugin type.") - } -} diff --git a/packer/plugin_folders.go b/packer/plugin_folders.go index 07e74d19121..52d63aac4cc 100644 --- a/packer/plugin_folders.go +++ b/packer/plugin_folders.go @@ -16,10 +16,15 @@ import ( func PluginFolders(dirs ...string) []string { res := []string{} + if packerPluginPath := os.Getenv("PACKER_PLUGIN_PATH"); packerPluginPath != "" { + res = append(res, strings.Split(packerPluginPath, string(os.PathListSeparator))...) + return res + } + if path, err := os.Executable(); err != nil { log.Printf("[ERR] Error finding executable: %v", err) } else { - res = append(res, path) + res = append(res, filepath.Dir(path)) } res = append(res, dirs...) @@ -30,9 +35,5 @@ func PluginFolders(dirs ...string) []string { res = append(res, filepath.Join(cd, "plugins")) } - if packerPluginPath := os.Getenv("PACKER_PLUGIN_PATH"); packerPluginPath != "" { - res = append(res, strings.Split(packerPluginPath, string(os.PathListSeparator))...) - } - return res } diff --git a/packer/progressbar_test.go b/packer/progressbar_test.go index a50049cf908..4d4c3f22e66 100644 --- a/packer/progressbar_test.go +++ b/packer/progressbar_test.go @@ -5,7 +5,7 @@ package packer import ( "bytes" - "io/ioutil" + "io" "testing" "golang.org/x/sync/errgroup" @@ -16,10 +16,10 @@ import ( func TestProgressTracking_open_close(t *testing.T) { var bar *UiProgressBar - tracker := bar.TrackProgress("1,", 1, 42, ioutil.NopCloser(nil)) + tracker := bar.TrackProgress("1,", 1, 42, io.NopCloser(nil)) tracker.Close() - tracker = bar.TrackProgress("2,", 1, 42, ioutil.NopCloser(nil)) + tracker = bar.TrackProgress("2,", 1, 42, io.NopCloser(nil)) tracker.Close() } @@ -29,7 +29,7 @@ func TestProgressTracking_multi_open_close(t *testing.T) { for i := 0; i < 100; i++ { g.Go(func() error { - tracker := bar.TrackProgress("file,", 1, 42, ioutil.NopCloser(nil)) + tracker := bar.TrackProgress("file,", 1, 42, io.NopCloser(nil)) return tracker.Close() }) } @@ -46,7 +46,7 @@ func TestProgressTracking_races(t *testing.T) { g.Go(func() error { txt := []byte("foobarbaz dolores") b := bytes.NewReader(txt) - tracker := bar.TrackProgress("file,", 1, 42, ioutil.NopCloser(b)) + tracker := bar.TrackProgress("file,", 1, 42, io.NopCloser(b)) for i := 0; i < 42; i++ { tracker.Read([]byte("i")) diff --git a/packer/run_interfaces.go b/packer/run_interfaces.go index 5bf1151529c..253998c20a4 100644 --- a/packer/run_interfaces.go +++ b/packer/run_interfaces.go @@ -40,6 +40,12 @@ type InitializeOptions struct { SkipDatasourcesExecution bool } +type PluginBinaryDetector interface { + // DetectPluginBinaries is used only for HCL2 templates, and loads required + // plugins if specified. + DetectPluginBinaries() hcl.Diagnostics +} + // The Handler handles all Packer things. This interface reflects the Packer // commands, ex: init, console ( evaluate ), fix config, inspect config, etc. To // run a build we will start the builds and then the core of Packer handles @@ -53,6 +59,7 @@ type Handler interface { BuildGetter ConfigFixer ConfigInspector + PluginBinaryDetector } //go:generate enumer -type FixConfigMode diff --git a/packer/telemetry.go b/packer/telemetry.go index d4e20696649..1245ce5229a 100644 --- a/packer/telemetry.go +++ b/packer/telemetry.go @@ -26,7 +26,7 @@ const ( JSONTemplate PackerTemplateType = "JSON" ) -const TelemetryVersion string = "beta/packer/6" +const TelemetryVersion string = "beta/packer/7" const TelemetryPanicVersion string = "beta/packer_panic/4" var CheckpointReporter *CheckpointTelemetry @@ -37,6 +37,7 @@ type PackerReport struct { Error string `json:"error"` Command string `json:"command"` TemplateType PackerTemplateType `json:"template_type"` + UseBundled bool `json:"use_bundled"` } type CheckpointTelemetry struct { @@ -44,6 +45,7 @@ type CheckpointTelemetry struct { signatureFile string startTime time.Time templateType PackerTemplateType + useBundled bool } func NewCheckpointReporter(disableSignature bool) *CheckpointTelemetry { @@ -128,6 +130,14 @@ func (c *CheckpointTelemetry) SetTemplateType(t PackerTemplateType) { c.templateType = t } +// SetBundledUsage marks the template as using bundled plugins +func (c *CheckpointTelemetry) SetBundledUsage() { + if c == nil { + return + } + c.useBundled = true +} + func (c *CheckpointTelemetry) Finalize(command string, errCode int, err error) error { if c == nil { return nil @@ -145,6 +155,7 @@ func (c *CheckpointTelemetry) Finalize(command string, errCode int, err error) e extra.Error = err.Error() } + extra.UseBundled = c.useBundled extra.TemplateType = c.templateType params.Payload = extra // b, _ := json.MarshalIndent(params, "", " ") diff --git a/packer/telemetry_test.go b/packer/telemetry_test.go index 4d1dee3e7f5..a45db2bcb3e 100644 --- a/packer/telemetry_test.go +++ b/packer/telemetry_test.go @@ -4,6 +4,7 @@ package packer import ( + "errors" "testing" "github.com/stretchr/testify/assert" @@ -33,3 +34,24 @@ func TestFlattenConfigKeys_nested(t *testing.T) { "Input didn't flatten correctly.", ) } + +func TestCheckpointTelemetry(t *testing.T) { + defer func() { + if r := recover(); r != nil { + t.Error("a noop CheckpointTelemetry should not to panic but it did\n", r) + } + }() + + // A null CheckpointTelemetry obtained in Packer when the CHECKPOINT_DISABLE env var is set results in a NOOP reporter + // The null reporter can be executable as a configured reporter but does not report any telemetry data. + var c *CheckpointTelemetry + c.SetTemplateType(HCL2Template) + c.SetBundledUsage() + c.AddSpan("mockprovisioner", "provisioner", nil) + if err := c.ReportPanic("Bogus Panic"); err != nil { + t.Errorf("calling ReportPanic on a nil checkpoint reporter should not error") + } + if err := c.Finalize("test", 1, errors.New("Bogus Error")); err != nil { + t.Errorf("calling Finalize on a nil checkpoint reporter should not error") + } +} diff --git a/packer/testing.go b/packer/testing.go index 0b2449593c2..2f01e18aafc 100644 --- a/packer/testing.go +++ b/packer/testing.go @@ -5,7 +5,7 @@ package packer import ( "bytes" - "io/ioutil" + "io" "testing" packersdk "github.com/hashicorp/packer-plugin-sdk/packer" @@ -40,8 +40,8 @@ func TestUi(t *testing.T) packersdk.Ui { var buf bytes.Buffer return &packersdk.BasicUi{ Reader: &buf, - Writer: ioutil.Discard, - ErrorWriter: ioutil.Discard, + Writer: io.Discard, + ErrorWriter: io.Discard, } } diff --git a/post-processor/checksum/post-processor_test.go b/post-processor/checksum/post-processor_test.go index 97ab201f7f8..eff52c929ea 100644 --- a/post-processor/checksum/post-processor_test.go +++ b/post-processor/checksum/post-processor_test.go @@ -7,7 +7,7 @@ import ( "bytes" "context" "fmt" - "io/ioutil" + "io" "os" "strings" "testing" @@ -38,7 +38,7 @@ func TestChecksumSHA1(t *testing.T) { if err != nil { t.Errorf("Unable to read checksum file: %s", err) } - if buf, _ := ioutil.ReadAll(f); !bytes.Equal(buf, []byte("d3486ae9136e7856bc42212385ea797094475802\tpackage.txt\n")) { + if buf, _ := io.ReadAll(f); !bytes.Equal(buf, []byte("d3486ae9136e7856bc42212385ea797094475802\tpackage.txt\n")) { t.Errorf("Failed to compute checksum: %s\n%s", buf, "d3486ae9136e7856bc42212385ea797094475802 package.txt") } diff --git a/post-processor/compress/benchmark.go b/post-processor/compress/benchmark.go index f853c85104f..cdbeb733f29 100644 --- a/post-processor/compress/benchmark.go +++ b/post-processor/compress/benchmark.go @@ -18,7 +18,7 @@ import ( "github.com/biogo/hts/bgzf" "github.com/klauspost/pgzip" - "github.com/pierrec/lz4" + "github.com/pierrec/lz4/v4" "github.com/ulikunitz/xz" ) @@ -190,7 +190,7 @@ func (c *Compressor) BenchmarkPGZIPReader(b *testing.B) { func (c *Compressor) BenchmarkLZ4Writer(b *testing.B) { cw := lz4.NewWriter(c.w) // cw.Header.HighCompression = true - cw.Header.NoChecksum = true + cw.Apply(lz4.ChecksumOption(false)) b.ResetTimer() _, err := io.Copy(cw, c.r) diff --git a/post-processor/compress/post-processor.go b/post-processor/compress/post-processor.go index 9d2a32e57d5..bf0cb1041d2 100644 --- a/post-processor/compress/post-processor.go +++ b/post-processor/compress/post-processor.go @@ -24,7 +24,7 @@ import ( "github.com/hashicorp/packer-plugin-sdk/template/config" "github.com/hashicorp/packer-plugin-sdk/template/interpolate" "github.com/klauspost/pgzip" - "github.com/pierrec/lz4" + "github.com/pierrec/lz4/v4" "github.com/ulikunitz/xz" ) @@ -333,8 +333,27 @@ func makeBZIP2Writer(output io.Writer, compressionLevel int) (io.WriteCloser, er func makeLZ4Writer(output io.WriteCloser, compressionLevel int) (io.WriteCloser, error) { lzwriter := lz4.NewWriter(output) - if compressionLevel > 0 { - lzwriter.Header.CompressionLevel = compressionLevel + if compressionLevel < 0 { + return lzwriter, nil + } + levels := map[int]lz4.CompressionLevel{ + 0: lz4.Fast, + 1: lz4.Level1, + 2: lz4.Level2, + 3: lz4.Level3, + 4: lz4.Level4, + 5: lz4.Level5, + 6: lz4.Level6, + 7: lz4.Level7, + 8: lz4.Level8, + 9: lz4.Level9, + } + level, ok := levels[compressionLevel] + if !ok { + return nil, ErrInvalidCompressionLevel + } + if err := lzwriter.Apply(lz4.CompressionLevelOption(level)); err != nil { + return nil, err } return lzwriter, nil } diff --git a/post-processor/compress/post-processor_test.go b/post-processor/compress/post-processor_test.go index f91a1178f2f..04aea52b1e5 100644 --- a/post-processor/compress/post-processor_test.go +++ b/post-processor/compress/post-processor_test.go @@ -9,7 +9,7 @@ import ( "compress/gzip" "context" "fmt" - "io/ioutil" + "io" "os" "strings" "testing" @@ -18,7 +18,7 @@ import ( packersdk "github.com/hashicorp/packer-plugin-sdk/packer" "github.com/hashicorp/packer-plugin-sdk/template" "github.com/hashicorp/packer/builder/file" - "github.com/pierrec/lz4" + "github.com/pierrec/lz4/v4" ) func TestDetectFilename(t *testing.T) { @@ -84,7 +84,7 @@ func TestCompressOptions(t *testing.T) { filename := "package.gz" archive, _ := os.Open(filename) gzipReader, _ := gzip.NewReader(archive) - data, _ := ioutil.ReadAll(gzipReader) + data, _ := io.ReadAll(gzipReader) if string(data) != expectedFileContents { t.Errorf("Expected:\n%s\nFound:\n%s\n", expectedFileContents, data) @@ -115,7 +115,7 @@ func TestCompressInterpolation(t *testing.T) { } gzipReader, _ := gzip.NewReader(archive) - data, _ := ioutil.ReadAll(gzipReader) + data, _ := io.ReadAll(gzipReader) if string(data) != expectedFileContents { t.Errorf("Expected:\n%s\nFound:\n%s\n", expectedFileContents, data) @@ -194,7 +194,7 @@ func TestArchive(t *testing.T) { if err != nil { return nil, err } - return ioutil.ReadAll(bzipReader) + return io.ReadAll(bzipReader) }, "zip": func(archive *os.File) ([]byte, error) { fi, _ := archive.Stat() @@ -206,7 +206,7 @@ func TestArchive(t *testing.T) { if err != nil { return nil, err } - return ioutil.ReadAll(ctt) + return io.ReadAll(ctt) }, "tar": func(archive *os.File) ([]byte, error) { tarReader := tar.NewReader(archive) @@ -214,7 +214,7 @@ func TestArchive(t *testing.T) { if err != nil { return nil, err } - return ioutil.ReadAll(tarReader) + return io.ReadAll(tarReader) }, "tar.gz": func(archive *os.File) ([]byte, error) { gzipReader, err := gzip.NewReader(archive) @@ -226,15 +226,15 @@ func TestArchive(t *testing.T) { if err != nil { return nil, err } - return ioutil.ReadAll(tarReader) + return io.ReadAll(tarReader) }, "gz": func(archive *os.File) ([]byte, error) { gzipReader, _ := gzip.NewReader(archive) - return ioutil.ReadAll(gzipReader) + return io.ReadAll(gzipReader) }, "lz4": func(archive *os.File) ([]byte, error) { lz4Reader := lz4.NewReader(archive) - return ioutil.ReadAll(lz4Reader) + return io.ReadAll(lz4Reader) }, } diff --git a/post-processor/manifest/post-processor.go b/post-processor/manifest/post-processor.go index f911f734b5c..3063c51bdfe 100644 --- a/post-processor/manifest/post-processor.go +++ b/post-processor/manifest/post-processor.go @@ -10,7 +10,6 @@ import ( "context" "encoding/json" "fmt" - "io/ioutil" "log" "os" "path/filepath" @@ -143,7 +142,7 @@ func (p *PostProcessor) PostProcess(ctx context.Context, ui packersdk.Ui, source // Read the current manifest file from disk contents := []byte{} - if contents, err = ioutil.ReadFile(p.config.OutputPath); err != nil && !os.IsNotExist(err) { + if contents, err = os.ReadFile(p.config.OutputPath); err != nil && !os.IsNotExist(err) { return source, true, true, fmt.Errorf("Unable to open %s for reading: %s", p.config.OutputPath, err) } @@ -167,7 +166,7 @@ func (p *PostProcessor) PostProcess(ctx context.Context, ui packersdk.Ui, source // Write JSON to disk if out, err := json.MarshalIndent(manifestFile, "", " "); err == nil { - if err = ioutil.WriteFile(p.config.OutputPath, out, 0664); err != nil { + if err = os.WriteFile(p.config.OutputPath, out, 0664); err != nil { return source, true, true, fmt.Errorf("Unable to write %s: %s", p.config.OutputPath, err) } } else { diff --git a/post-processor/shell-local/post-processor_test.go b/post-processor/shell-local/post-processor_test.go index eb8481772d1..44d29f3cb2a 100644 --- a/post-processor/shell-local/post-processor_test.go +++ b/post-processor/shell-local/post-processor_test.go @@ -4,7 +4,6 @@ package shell_local import ( - "io/ioutil" "os" "runtime" "testing" @@ -95,7 +94,7 @@ func TestPostProcessorPrepare_Script(t *testing.T) { } // Test with a good one - tf, err := ioutil.TempFile("", "packer") + tf, err := os.CreateTemp("", "packer") if err != nil { t.Fatalf("error tempfile: %s", err) } @@ -163,7 +162,7 @@ func TestPostProcessorPrepare_ScriptAndInline(t *testing.T) { } // Test with both - tf, err := ioutil.TempFile("", "packer") + tf, err := os.CreateTemp("", "packer") if err != nil { t.Fatalf("error tempfile: %s", err) } @@ -182,7 +181,7 @@ func TestPostProcessorPrepare_ScriptAndScripts(t *testing.T) { raws := testConfig() // Test with both - tf, err := ioutil.TempFile("", "packer") + tf, err := os.CreateTemp("", "packer") if err != nil { t.Fatalf("error tempfile: %s", err) } @@ -208,7 +207,7 @@ func TestPostProcessorPrepare_Scripts(t *testing.T) { } // Test with a good one - tf, err := ioutil.TempFile("", "packer") + tf, err := os.CreateTemp("", "packer") if err != nil { t.Fatalf("error tempfile: %s", err) } diff --git a/provisioner/breakpoint/provisioner.go b/provisioner/breakpoint/provisioner.go index fc833fb47bc..893dc2d79c9 100644 --- a/provisioner/breakpoint/provisioner.go +++ b/provisioner/breakpoint/provisioner.go @@ -67,8 +67,7 @@ func (p *Provisioner) Provision(ctx context.Context, ui packersdk.Ui, comm packe ui.Say("Pausing at breakpoint provisioner.") } - message := fmt.Sprintf( - "Press enter to continue.") + message := "Press enter to continue." var g errgroup.Group result := make(chan string, 1) diff --git a/provisioner/file/provisioner_test.go b/provisioner/file/provisioner_test.go index e3ecd7294f7..8f54634b3f1 100644 --- a/provisioner/file/provisioner_test.go +++ b/provisioner/file/provisioner_test.go @@ -6,7 +6,6 @@ package file import ( "bytes" "context" - "io/ioutil" "os" "path/filepath" "regexp" @@ -62,7 +61,7 @@ func TestProvisionerPrepare_InvalidSource(t *testing.T) { func TestProvisionerPrepare_ValidSource(t *testing.T) { var p Provisioner - tf, err := ioutil.TempFile("", "packer") + tf, err := os.CreateTemp("", "packer") if err != nil { t.Fatalf("error tempfile: %s", err) } @@ -107,7 +106,7 @@ func TestProvisionerPrepare_EmptyDestination(t *testing.T) { func TestProvisionerProvision_SendsFile(t *testing.T) { var p Provisioner - tf, err := ioutil.TempFile("", "packer") + tf, err := os.CreateTemp("", "packer") if err != nil { t.Fatalf("error tempfile: %s", err) } @@ -195,7 +194,7 @@ func TestProvisionerProvision_SendsContent(t *testing.T) { func TestProvisionerProvision_SendsFileMultipleFiles(t *testing.T) { var p Provisioner - tf1, err := ioutil.TempFile("", "packer") + tf1, err := os.CreateTemp("", "packer") if err != nil { t.Fatalf("error tempfile: %s", err) } @@ -205,7 +204,7 @@ func TestProvisionerProvision_SendsFileMultipleFiles(t *testing.T) { t.Fatalf("error writing tempfile: %s", err) } - tf2, err := ioutil.TempFile("", "packer") + tf2, err := os.CreateTemp("", "packer") if err != nil { t.Fatalf("error tempfile: %s", err) } @@ -248,13 +247,13 @@ func TestProvisionerProvision_SendsFileMultipleDirs(t *testing.T) { var p Provisioner // Prepare the first directory - td1, err := ioutil.TempDir("", "packerdir") + td1, err := os.MkdirTemp("", "packerdir") if err != nil { t.Fatalf("error temp folder 1: %s", err) } defer os.Remove(td1) - tf1, err := ioutil.TempFile(td1, "packer") + tf1, err := os.CreateTemp(td1, "packer") if err != nil { t.Fatalf("error tempfile: %s", err) } @@ -264,13 +263,13 @@ func TestProvisionerProvision_SendsFileMultipleDirs(t *testing.T) { } // Prepare the second directory - td2, err := ioutil.TempDir("", "packerdir") + td2, err := os.MkdirTemp("", "packerdir") if err != nil { t.Fatalf("error temp folder 1: %s", err) } defer os.Remove(td2) - tf2, err := ioutil.TempFile(td2, "packer") + tf2, err := os.CreateTemp(td2, "packer") if err != nil { t.Fatalf("error tempfile: %s", err) } @@ -317,7 +316,7 @@ func TestProvisionerProvision_SendsFileMultipleDirs(t *testing.T) { func TestProvisionerProvision_DownloadsMultipleFilesToFolder(t *testing.T) { var p Provisioner - tf1, err := ioutil.TempFile("", "packer") + tf1, err := os.CreateTemp("", "packer") if err != nil { t.Fatalf("error tempfile: %s", err) } @@ -327,7 +326,7 @@ func TestProvisionerProvision_DownloadsMultipleFilesToFolder(t *testing.T) { t.Fatalf("error writing tempfile: %s", err) } - tf2, err := ioutil.TempFile("", "packer") + tf2, err := os.CreateTemp("", "packer") if err != nil { t.Fatalf("error tempfile: %s", err) } @@ -388,7 +387,7 @@ func TestProvisionerProvision_DownloadsMultipleFilesToFolder(t *testing.T) { func TestProvisionerProvision_SendsFileMultipleFilesToFolder(t *testing.T) { var p Provisioner - tf1, err := ioutil.TempFile("", "packer") + tf1, err := os.CreateTemp("", "packer") if err != nil { t.Fatalf("error tempfile: %s", err) } @@ -398,7 +397,7 @@ func TestProvisionerProvision_SendsFileMultipleFilesToFolder(t *testing.T) { t.Fatalf("error writing tempfile: %s", err) } - tf2, err := ioutil.TempFile("", "packer") + tf2, err := os.CreateTemp("", "packer") if err != nil { t.Fatalf("error tempfile: %s", err) } @@ -454,12 +453,12 @@ func TestProvisionDownloadMkdirAll(t *testing.T) { {"path/to/dir"}, {"path/to/dir/"}, } - tmpDir, err := ioutil.TempDir("", "packer-file") + tmpDir, err := os.MkdirTemp("", "packer-file") if err != nil { t.Fatalf("error tempdir: %s", err) } defer os.RemoveAll(tmpDir) - tf, err := ioutil.TempFile(tmpDir, "packer") + tf, err := os.CreateTemp(tmpDir, "packer") if err != nil { t.Fatalf("error tempfile: %s", err) } diff --git a/provisioner/powershell/provisioner_acc_test.go b/provisioner/powershell/provisioner_acc_test.go index 379b14f9b8f..efe2a2789d5 100644 --- a/provisioner/powershell/provisioner_acc_test.go +++ b/provisioner/powershell/provisioner_acc_test.go @@ -5,7 +5,7 @@ package powershell_test import ( "fmt" - "io/ioutil" + "io" "os" "os/exec" "path/filepath" @@ -35,7 +35,7 @@ func LoadProvisionerFragment(templateFragmentPath string) (string, error) { } defer fragmentFile.Close() - fragmentString, err := ioutil.ReadAll(fragmentFile) + fragmentString, err := io.ReadAll(fragmentFile) if err != nil { return "", fmt.Errorf("Unable to read %s", fragmentAbsPath) } diff --git a/provisioner/powershell/provisioner_test.go b/provisioner/powershell/provisioner_test.go index d94a56fa23b..dd9f610feb6 100644 --- a/provisioner/powershell/provisioner_test.go +++ b/provisioner/powershell/provisioner_test.go @@ -7,7 +7,6 @@ import ( "bytes" "context" "fmt" - "io/ioutil" "os" "regexp" "strings" @@ -35,7 +34,7 @@ func TestProvisionerPrepare_extractScript(t *testing.T) { } // File contents should contain 2 lines concatenated by newlines: foo\nbar - readFile, err := ioutil.ReadFile(file) + readFile, err := os.ReadFile(file) expectedContents := "foo\nbar\n" if err != nil { t.Fatalf("Should not be error: %s", err) @@ -187,7 +186,7 @@ func TestProvisionerPrepare_Script(t *testing.T) { } // Test with a good one - tf, err := ioutil.TempFile("", "packer") + tf, err := os.CreateTemp("", "packer") if err != nil { t.Fatalf("error tempfile: %s", err) } @@ -214,7 +213,7 @@ func TestProvisionerPrepare_ScriptAndInline(t *testing.T) { } // Test with both - tf, err := ioutil.TempFile("", "packer") + tf, err := os.CreateTemp("", "packer") if err != nil { t.Fatalf("error tempfile: %s", err) } @@ -234,7 +233,7 @@ func TestProvisionerPrepare_ScriptAndScripts(t *testing.T) { config := testConfig() // Test with both - tf, err := ioutil.TempFile("", "packer") + tf, err := os.CreateTemp("", "packer") if err != nil { t.Fatalf("error tempfile: %s", err) } @@ -261,7 +260,7 @@ func TestProvisionerPrepare_Scripts(t *testing.T) { } // Test with a good one - tf, err := ioutil.TempFile("", "packer") + tf, err := os.CreateTemp("", "packer") if err != nil { t.Fatalf("error tempfile: %s", err) } @@ -512,7 +511,7 @@ func TestProvisionerProvision_Inline(t *testing.T) { } func TestProvisionerProvision_Scripts(t *testing.T) { - tempFile, _ := ioutil.TempFile("", "packer") + tempFile, _ := os.CreateTemp("", "packer") defer os.Remove(tempFile.Name()) defer tempFile.Close() @@ -542,7 +541,7 @@ func TestProvisionerProvision_Scripts(t *testing.T) { } func TestProvisionerProvision_ScriptsWithEnvVars(t *testing.T) { - tempFile, _ := ioutil.TempFile("", "packer") + tempFile, _ := os.CreateTemp("", "packer") ui := testUi() defer os.Remove(tempFile.Name()) defer tempFile.Close() @@ -579,7 +578,7 @@ func TestProvisionerProvision_ScriptsWithEnvVars(t *testing.T) { } func TestProvisionerProvision_SkipClean(t *testing.T) { - tempFile, _ := ioutil.TempFile("", "packer") + tempFile, _ := os.CreateTemp("", "packer") defer func() { tempFile.Close() os.Remove(tempFile.Name()) diff --git a/provisioner/shell-local/provisioner_acc_test.go b/provisioner/shell-local/provisioner_acc_test.go index f53990d0d2f..ad027ec7de2 100644 --- a/provisioner/shell-local/provisioner_acc_test.go +++ b/provisioner/shell-local/provisioner_acc_test.go @@ -5,7 +5,7 @@ package shell_test import ( "fmt" - "io/ioutil" + "io" "os" "os/exec" "path/filepath" @@ -31,7 +31,7 @@ func loadFile(templateFragmentPath string) (string, error) { } defer fragmentFile.Close() - fragmentString, err := ioutil.ReadAll(fragmentFile) + fragmentString, err := io.ReadAll(fragmentFile) if err != nil { return "", fmt.Errorf("Unable to read %s", fragmentAbsPath) } diff --git a/provisioner/shell/provisioner_acc_test.go b/provisioner/shell/provisioner_acc_test.go index 8ffc58b8d14..ca7a32d8282 100644 --- a/provisioner/shell/provisioner_acc_test.go +++ b/provisioner/shell/provisioner_acc_test.go @@ -5,7 +5,7 @@ package shell_test import ( "fmt" - "io/ioutil" + "io" "os" "os/exec" "path/filepath" @@ -31,7 +31,7 @@ func loadFile(templateFragmentPath string) (string, error) { } defer fragmentFile.Close() - fragmentString, err := ioutil.ReadAll(fragmentFile) + fragmentString, err := io.ReadAll(fragmentFile) if err != nil { return "", fmt.Errorf("Unable to read %s", fragmentAbsPath) } diff --git a/provisioner/shell/provisioner_test.go b/provisioner/shell/provisioner_test.go index 73a4ac248f5..3e52593b4ca 100644 --- a/provisioner/shell/provisioner_test.go +++ b/provisioner/shell/provisioner_test.go @@ -4,7 +4,6 @@ package shell import ( - "io/ioutil" "os" "regexp" "strings" @@ -123,7 +122,7 @@ func TestProvisionerPrepare_Script(t *testing.T) { } // Test with a good one - tf, err := ioutil.TempFile("", "packer") + tf, err := os.CreateTemp("", "packer") if err != nil { t.Fatalf("error tempfile: %s", err) } @@ -149,7 +148,7 @@ func TestProvisionerPrepare_ScriptAndInline(t *testing.T) { } // Test with both - tf, err := ioutil.TempFile("", "packer") + tf, err := os.CreateTemp("", "packer") if err != nil { t.Fatalf("error tempfile: %s", err) } @@ -168,7 +167,7 @@ func TestProvisionerPrepare_ScriptAndScripts(t *testing.T) { config := testConfig() // Test with both - tf, err := ioutil.TempFile("", "packer") + tf, err := os.CreateTemp("", "packer") if err != nil { t.Fatalf("error tempfile: %s", err) } @@ -194,7 +193,7 @@ func TestProvisionerPrepare_Scripts(t *testing.T) { } // Test with a good one - tf, err := ioutil.TempFile("", "packer") + tf, err := os.CreateTemp("", "packer") if err != nil { t.Fatalf("error tempfile: %s", err) } diff --git a/provisioner/windows-shell/provisioner_test.go b/provisioner/windows-shell/provisioner_test.go index e01af9e7b06..ae97cc20334 100644 --- a/provisioner/windows-shell/provisioner_test.go +++ b/provisioner/windows-shell/provisioner_test.go @@ -6,7 +6,6 @@ package shell import ( "bytes" "context" - "io/ioutil" "log" "os" "strings" @@ -37,7 +36,7 @@ func TestProvisionerPrepare_extractScript(t *testing.T) { } // File contents should contain 2 lines concatenated by newlines: foo\nbar - readFile, err := ioutil.ReadFile(file) + readFile, err := os.ReadFile(file) if err != nil { t.Fatalf("Should not be error: %s", err) } @@ -102,7 +101,7 @@ func TestProvisionerPrepare_Script(t *testing.T) { } // Test with a good one - tf, err := ioutil.TempFile("", "packer") + tf, err := os.CreateTemp("", "packer") if err != nil { t.Fatalf("error tempfile: %s", err) } @@ -129,7 +128,7 @@ func TestProvisionerPrepare_ScriptAndInline(t *testing.T) { } // Test with both - tf, err := ioutil.TempFile("", "packer") + tf, err := os.CreateTemp("", "packer") if err != nil { t.Fatalf("error tempfile: %s", err) } @@ -149,7 +148,7 @@ func TestProvisionerPrepare_ScriptAndScripts(t *testing.T) { config := testConfig() // Test with both - tf, err := ioutil.TempFile("", "packer") + tf, err := os.CreateTemp("", "packer") if err != nil { t.Fatalf("error tempfile: %s", err) } @@ -176,7 +175,7 @@ func TestProvisionerPrepare_Scripts(t *testing.T) { } // Test with a good one - tf, err := ioutil.TempFile("", "packer") + tf, err := os.CreateTemp("", "packer") if err != nil { t.Fatalf("error tempfile: %s", err) } @@ -325,7 +324,7 @@ func TestProvisionerProvision_Inline(t *testing.T) { } func TestProvisionerProvision_Scripts(t *testing.T) { - tf, err := ioutil.TempFile("", "packer") + tf, err := os.CreateTemp("", "packer") if err != nil { t.Fatalf("error tempfile: %s", err) } @@ -357,7 +356,7 @@ func TestProvisionerProvision_Scripts(t *testing.T) { } func TestProvisionerProvision_ScriptsWithEnvVars(t *testing.T) { - tf, err := ioutil.TempFile("", "packer") + tf, err := os.CreateTemp("", "packer") if err != nil { t.Fatalf("error tempfile: %s", err) } diff --git a/scripts/generate-plugins.go b/scripts/generate-plugins.go index e52c063b3fe..669da9d3f32 100755 --- a/scripts/generate-plugins.go +++ b/scripts/generate-plugins.go @@ -12,7 +12,6 @@ import ( "go/ast" "go/parser" "go/token" - "io/ioutil" "log" "os" "path/filepath" @@ -142,7 +141,7 @@ func makeImports(builders, provisioners, postProcessors, Datasources []plugin) s // listDirectories recursively lists directories under the specified path func listDirectories(path string) ([]string, error) { names := []string{} - items, err := ioutil.ReadDir(path) + items, err := os.ReadDir(path) if err != nil { return names, err } diff --git a/version/VERSION b/version/VERSION index f8e233b2733..6ecac68129f 100644 --- a/version/VERSION +++ b/version/VERSION @@ -1 +1 @@ -1.9.0 +1.9.5 \ No newline at end of file diff --git a/version/version.go b/version/version.go index e389cebd2e7..144c68eb324 100644 --- a/version/version.go +++ b/version/version.go @@ -22,7 +22,7 @@ var ( // A pre-release marker for the version can also be specified (e.g -dev). If this is omitted // The main version number that is being run at the moment. - Version = "1.9.0" + Version = "1.9.5" // A pre-release marker for the version. If this is "" (empty string) // then it means that it is a final release. Otherwise, this is a pre-release diff --git a/website/content/docs/commands/init.mdx b/website/content/docs/commands/init.mdx index 52b72eb8151..32008db86bd 100644 --- a/website/content/docs/commands/init.mdx +++ b/website/content/docs/commands/init.mdx @@ -19,6 +19,26 @@ the first command that should be executed when working with a new or existing template. This command is always safe to run multiple times. Though subsequent runs may give errors, this command will never delete anything. +You should invoke `packer init` on either an HCL2 template, or a directory that contains +at least a valid HCL2 template, and eventually other related dependencies like varfiles +for example. + +Example: + +```sh +$ ls . +template.pkr.hcl varfile.pkrvars.pkr.hcl + +$ packer init template.pkr.hcl # You can invoke packer init on a single template in this case + # This works if the template is self-contained, but may fail if + # the template is meant to be built as a bundle of partials. + +$ packer init . # Alternatively, you can invoke packer init on a directory instead, + # which behaves the same in a configuration like this one, but if + # the target is a collection ofHCL2 templates, this is the + # preferred way to invoke it. +``` + Packer does not currently have the notion of a state like Terraform has. In other words, currently `packer init` is only in charge of installing Packer plugins. @@ -67,18 +87,6 @@ Directory](/packer/docs/configure#packer-s-plugin-directory). See [Installing Plugins](/packer/docs/plugins#installing-plugins) for more information on how plugin installation works. -### Implicit required plugin - -This is part of a set of breaking changes made to decouple Packer releases from -plugin releases. To make the transition easier, we will tag components of these -plugins as "moved out". If one of the components of a moved out plugin is used -in a config file, but there is no mention of that plugin in the -"required_plugin" block, then Packer init will automatically download and -install that plugin. Packer will then display a warning and suggest that you -add the plugin to your required_plugin block. We recommend you use the -required_plugin block even if you are only using official plugins, because it -allows you to set the plugin version to avoid surprises in the future. - ## Options - `-upgrade` - On top of installing missing plugins, update installed plugins to diff --git a/website/content/community-tools.mdx b/website/content/docs/community-tools.mdx similarity index 100% rename from website/content/community-tools.mdx rename to website/content/docs/community-tools.mdx diff --git a/website/content/docs/configure.mdx b/website/content/docs/configure.mdx index cce9f1b554b..18eeb409396 100644 --- a/website/content/docs/configure.mdx +++ b/website/content/docs/configure.mdx @@ -13,69 +13,43 @@ so you generally don't have to worry about it until you want to tweak a configuration. If you're just getting started with Packer, don't worry about core configuration for now. -## Packer's home directory - -Plugins and core configuration files can exist in the home directory of Packer. -The home directory of Packer will be the first one of the following env values -to be set : - -| Unix | Windows | -| ---------------------- | --------------------- | -| `${PACKER_CONFIG_DIR}` | `%PACKER_CONFIG_DIR%` | -| `${APPDATA}` | `%APPDATA%` | -| `${HOME}` | `%HOME%` | -| user dir of `${USER}` | user dir of `${USER}` | - --> Note: On this page "Packer's home directory" will be referenced as -`PACKER_HOME_DIR`. - -## Packer's config file - -Packer can optionally read a JSON file for the end user to set core settings. -The config file of Packer will be looked up on the following paths: - -| Unix | Windows | -| -------------------------------- | -------------------------------- | -| `${PACKER_CONFIG}` | `%PACKER_CONFIG%` | -| `PACKER_HOME_DIR/.packerconfig` | `PACKER_HOME_DIR/packer.config/` | -| `${XDG_CONFIG_HOME}/packer` | | -| `PACKER_HOME_DIR/.config/packer` | | - ## Packer's config directory Packer's configuration directory can potentially contain plugins and internal -Packer files. The config dir of Packer will be looked up on the following paths: +Packer files. The Packer config directory will be looked up on the following paths: | Unix | Windows | | --------------------------- | --------------------------- | -| `PACKER_HOME_DIR/.packer.d` | `PACKER_HOME_DIR/packer.d/` | +| `${HOME}/.config/packer/` | `%APPDATA%\packer.d\` | + +-> **Note:** On Unix systems, Packer defaults to using the XDG base directory specification. +When the environment variable `PACKER_CONFIG_DIR` is unset or empty a default equal to `$HOME/.config/packer` should be used. +In all other cases, where there is an existing older style `.packer.d` directory (e.g `$HOME/.packer.d/`) or PACKER_CONFIG_DIR is not empty +the older configuration directory will be used. Examples: -- On a Unix system, if the `$PACKER_CONFIG_DIR` env var is set to +- On a Unix system, if the `$PACKER_CONFIG_DIR` environment variable is set to `/home/packer`, the config directory will be: `/home/packer/.packer.d/` and other values will not be checked. -- On a Unix system, if the `HOME` env var is `/home/azr` or the `USER` env var - is `azr`, then the config directory will default to `/home/azr/.packer.d/`. -- On a Windows system, if the `PACKER_CONFIG_DIR` env var is set to `C:/`,the +- On a Windows system, if the `PACKER_CONFIG_DIR` environment variable is set to `C:/`,the config directory will be: `C:/packer.d/` and other values will not be checked. -## Packer's plugin directory + -@include "plugins/plugin-location.mdx" +## Packer's config file (deprecated) -The format of the configuration file is basic JSON. +Packer can optionally read a JSON file for the end user to set core settings. +The config file of Packer will be looked up on the following paths: -## Packer's cache directory +| Unix | Windows | +| -------------------------------- | --------------------------------- | +| `${PACKER_CONFIG}` | `%PACKER_CONFIG%` | +| `${HOME}/.packerconfig` | `%APPDATA%\packer.config\` | -Packer uses a cache directory to download large files and for logistics around -large file download. By default, Packer caches things in the current directory, -under: `./packer_cache/`. This can be changed by setting the `PACKER_CACHE_DIR` -env var. It is recommended to share the same Packer cache dir across your -builds if you have multiple builds doing similar things to avoid downloading the -same ISO twice for example. +The format of the configuration file is basic JSON. -## Packer config file configuration Reference +### Packer config file configuration Reference Below is the list of all available configuration parameters for the core configuration file. None of these are required, since all have defaults. @@ -94,13 +68,20 @@ configuration file. None of these are required, since all have defaults. and the [`packer init`](/packer/docs/commands/init) command to install plugins; if you are using both, the `required_plugin` config will take precedence. -### HCP Packer Configuration +## Packer's plugin directory -You can configure both legacy JSON and HCL2 Packer templates to publish image metadata to an active HCP Packer registry. The HCP Packer registry helps you track information about machine images, clearly designate which images are appropriate for test and production environments, and query the right images to use in both Packer and Terraform configurations. +@include "plugins/plugin-location.mdx" -For complete configuration details and examples, refer to [Packer Template Configuration](/hcp/docs/packer/store-image-metadata/template-configuration) in the HCP Packer documentation. +## Packer's cache directory + +Packer uses a cache directory to download large files and for logistics around +large file download. By default, Packer caches things in the current directory, +under: `./packer_cache/`. This can be changed by setting the `PACKER_CACHE_DIR` +env var. It is recommended to share the same Packer cache directory across your +builds if you have multiple builds doing similar things to avoid downloading the +same ISO twice for example. -## Full list of Environment Variables usable for Packer +## Environment Variables usable for Packer Packer uses a variety of environmental variables. A listing and description of each can be found below: diff --git a/website/content/docs/hcp/index.mdx b/website/content/docs/hcp/index.mdx index de58397e8bf..d908cca70cc 100644 --- a/website/content/docs/hcp/index.mdx +++ b/website/content/docs/hcp/index.mdx @@ -4,6 +4,8 @@ description: | page_title: HCP Packer --- +-> **Note:** On May 16th 2023, HCP introduced multi-project support to the platform. In order to use multiple projects in your organization, you will need to update Packer to version 1.9.1 or above. Starting with 1.9.1, you may specify a project ID to push builds to with the `HCP_PROJECT_ID` environment variable. If no project ID is specified, Packer will pick the project with the oldest creation date. Older versions of Packer are incompatible with multi-project support on HCP, and builds will fail for HCP organizations with multiple projects on versions before 1.9.1. + # HCP Packer The HCP Packer registry bridges the gap between image factories and image deployments, allowing development and security teams to work together to create, manage, and consume images in a centralized way. @@ -33,6 +35,12 @@ You can set these additional environment variables to control how metadata is pu - `HCP_PACKER_REGISTRY` - When set, Packer does not push image metadata to HCP Packer from an otherwise configured template. Allowed values are [0|OFF]. +- `HCP_ORGANIZATION_ID` - The ID of the HCP organization linked to your service principal. This is environment variable is not required and available for the sole purpose of keeping parity with the HCP SDK authentication options. Its use may change in a future release. + +- `HCP_PROJECT_ID` - The ID of the HCP project to use. This is useful if your service principal has access to multiple projects, as by default Packer will pick the one created first as target. + +-> **Note**: The HCP_PROJECT_ID environment variable must be set if you're authenticating with a project-level service principal, otherwise Packer will attempt to get the list of projects for an organization and error due to a lack of permissions for a project-level service principal. This is supported starting with Packer 1.9.3; older versions of Packer do not support using project-level service principals. + ### HCP Packer Registry Block The only metadata that Packer can infer from a template with the basic configuration are the build name and build fingerprint. For HCL2 templates, we recommend adding the `hcp_packer_registry` block to your template so that you can customize the metadata that Packer sends to the registry. @@ -45,6 +53,9 @@ Refer to [`hcp_packer_registry`](/packer/docs/templates/hcl_templates/blocks/bui Packer uses a unique fingerprint for tracking the completion of builds associated to an iteration. By default a fingerprint is automatically generated by Packer during each invocation of `packer build`, unless a fingerprint is manually provided via the `HCP_PACKER_BUILD_FINGERPRINT` environment variable. +In versions before 1.9.0, this fingerprint was computed from the Git SHA of the current HEAD in which your template is stored. If you were running builds using a non Git managed template, you had to set the `HCP_PACKER_BUILD_FINGERPRINT` environment variable prior to invoking `packer build`. +Starting with Packer 1.9.0, fingerprint generation does not rely on Git at all, and instead Packer now generates a Unique Lexicographically sortable Identifier (ULID) as the fingerprint for every `packer build` invocation. + #### Fingerprints and Incomplete Iterations When you build a template with Packer, there's always a chance that it does not succeed because of a network issue, a provisioning failure, or some upstream error. When that happens, Packer will output the generated fingerprint associated with the incomplete iteration so that you can resume building that iteration using the `HCP_PACKER_BUILD_FINGERPRINT` environment variable; an iteration can be resumed until it is marked as complete. This environment variable is necessary for resuming an incomplete iteration, otherwise Packer will create a new iteration for the build. diff --git a/website/content/docs/plugins/install-plugins.mdx b/website/content/docs/plugins/install-plugins.mdx index 86ec7c5c15f..0460e6c561c 100644 --- a/website/content/docs/plugins/install-plugins.mdx +++ b/website/content/docs/plugins/install-plugins.mdx @@ -13,6 +13,10 @@ post-processor components that ship with the Packer binary. Packer automatically This page explains how to install custom external plugins. Refer to [External Plugins](/packer/plugins) for a list of available plugins and their documentation. +## Plugin Loading Order + +@include "plugins/plugin-location.mdx" + ## Installation Guides Choose the tab that corresponds to the type of plugin you want to install. If you are not sure, check the plugin's name. @@ -22,10 +26,10 @@ Choose the tab that corresponds to the type of plugin you want to install. If yo -~> **Note**: Only _multi-component plugin binaries_ -- that is plugins named +-> **Note:** Only _multi-component plugin binaries_ -- plugins named packer-plugin-\*, like the `packer-plugin-amazon` -- are expected to work with Packer init. The legacy `builder`, `post-processor` and `provisioner` plugin -types will keep on being detected but Packer cannot install them automatically. +types will continue to be detected but Packer cannot install them automatically. If a plugin you use has not been upgraded to use the multi-component plugin architecture, contact your maintainer to request an upgrade. @@ -64,9 +68,8 @@ packer { Each plugin has two identifiers: -- A `source` address, which is only necessary when requiring a plugin outside the HashiCorp domain. -- A unique **local name**, which is used everywhere else in a Packer - configuration. +- A `source` address, which is necessary when requiring a plugin not bundled with the Packer binary. +- A unique **local name**, which is used everywhere else in a Packer configuration. ## Local Names @@ -135,7 +138,8 @@ follows: For example, the fictional `myawesomecloud` plugin could belong to the `hashicorp` namespace on `github.com`, so its `source` could be `github.com/hashicorp/myawesomecloud`, -Note: the actual _repository_ that myawesomecloud comes from must always have + +-> Note: the actual _repository_ that myawesomecloud comes from must always have the name format `github.com/hashicorp/packer-plugin-myawesomecloud`, but the `required_plugins` block omits the redundant `packer-plugin-` repository prefix for brevity. @@ -144,9 +148,34 @@ The source address with all three components given explicitly is called the plugin's _fully-qualified address_. You will see fully-qualified address in various outputs, like error messages. -## Plugin location +## Plugin Installation Workflow -@include "plugins/plugin-location.mdx" +* [`packer init`](/packer/docs/commands/init) will install plugins in the **last** directory +in the following numbered list. + +1. `PACKER_PLUGIN_PATH` if set will be the sole location for installing plugins. All other +plugin directories will be ignored. +1. `PACKER_CONFIG_DIR`\plugins on Windows systems, or `PACKER_CONFIG_DIR`/plugins on all other systems. + +* During the initialization of Packer, any plugin required in the +**`required_plugins`** section will be looked up in all entries of the following +list. **First** plugin found takes precedence. Two binaries of the same plugin +with two different version will be considered as two different plugins. Highest +found version matching `required_plugins` will be taken into consideration. + +During initialization, on a `darwin_amd64` system, Packer will look-up for the +following files: + +* `PACKER_PLUGIN_PATH/github.com/azr/happycloud/packer-plugin-happycloud_*_x5.0_darwin_amd64` +* `PACKER_CONFIG_DIR/plugins/github.com/azr/happycloud/packer-plugin-happycloud_*_x5.0_darwin_amd64` + +The first plugin-name/version files found will take precedence. + +For plugins located under the `github.com/azr/happycloud/` directory structure an accompanying SHA256SUM file +will be required in order for `packer init` to ensure the plugin being loaded has not been tampered with. +The SHA256SUM file will be automatically generated when a plugin is installed via `packer init` if the plugin +was installed manually into `PACKER_CONFIG_DIR/plugins/github.com/azr/happycloud/` then the file +`PACKER_CONFIG_DIR/plugins/github.com/azr/happycloud/packer-plugin-happycloud_*_x5.0_darwin_amd64_SHA256SUM` must be generated manually as well. ## Implicit Github urls @@ -171,37 +200,32 @@ will avoid conflicting with other plugins for other tools, like Terraform. --> The [`packer plugins`](/packer/docs/commands/plugins) command allows to install plugins without going through -`init`. For manual installation of plugin binaries, without the `packer plugins` command, please continue reading. +-> The [`packer plugins`](/packer/docs/commands/plugins), available from Packer v1.8.0, command allows +you to install plugins without going through `init`. -The easiest way to manually install a plugin is to name it correctly, then place -it in the proper directory. To name a plugin correctly, make sure the binary is -named `packer-plugin-NAME`. For example, `packer-plugin-amazon` for a "plugin" -binary named "amazon". This binary will make one or more components available to -use. Valid types for plugins are down this page. - -Once the plugin is named properly, Packer automatically discovers plugins in -the following directories in the given order. If a conflicting plugin is found -later, it will take precedence over one found earlier. +```shell +packer plugins install github.com/hashicorp/vagrant +``` -1. The directory where `packer` is, or the executable directory. +## Plugin Installation Workflow +Plugin installation via `packer plugins install` works similar to that of the `packer init` command, with the following +exceptions no `required_plugins` block required and can be used with both legacy JSON and HCL2 templates. -2. The `$HOME/.packer.d/plugins` directory, if `$HOME` is defined (Unix) +* [`packer plugins install`](/packer/docs/commands/plugins) will install plugins in the **last** directory +in the following numbered list. -3. The `%APPDATA%/packer.d/plugins` if `%APPDATA%` is defined (Windows) +1. `PACKER_PLUGINS_PATH` if set will be the sole location for installing plugins. All other +plugin directories will be ignored. +1. `PACKER_CONFIG_DIR`\plugins on Windows systems, or `PACKER_CONFIG_DIR`/plugins on all other systems. -4. The `%USERPROFILE%/packer.d/plugins` if `%USERPROFILE%` is defined - (Windows) -5. The current working directory. +For manual installation of plugin binaries, without the `packer plugins` command, please continue reading. -6. The directory defined in the env var `PACKER_PLUGIN_PATH`. There can be more - than one directory defined; for example, `~/custom-dir-1:~/custom-dir-2`. - Separate directories in the PATH string using a colon (`:`) on POSIX systems and - a semicolon (`;`) on Windows systems. The above example path would be able to - find a provisioner named `packer-provisioner-foo` in either - `~/custom-dir-1/packer-provisioner-foo` or - `~/custom-dir-2/packer-provisioner-foo`. +The easiest way to manually install a plugin is to name it correctly, then place +it in the proper directory. To name a plugin correctly, make sure the binary is +named `packer-plugin-NAME`. For example, `packer-plugin-amazon` for a "plugin" +binary named "amazon". This binary will make one or more components available to +use. Valid types for plugins are down this page. The valid types for plugins are: @@ -217,6 +241,7 @@ The valid types for plugins are: - `provisioner` - A provisioner to install software on images created by a builder. + @@ -226,29 +251,6 @@ named `packer-COMPONENT-NAME`. For example, `packer-provisioner-comment` for a " binary named "comment". This binary will make a single provisioner named `comment` available to use. Valid types for plugins are down this page. -Once the plugin is named properly, Packer automatically discovers plugins in -the following directories in the given order. If a conflicting plugin is found -later, it will take precedence over one found earlier. - -1. The directory where `packer` is, or the executable directory. - -2. The `$HOME/.packer.d/plugins` directory, if `$HOME` is defined (Unix) - -3. The `%APPDATA%/packer.d/plugins` if `%APPDATA%` is defined (Windows) - -4. The `%USERPROFILE%/packer.d/plugins` if `%USERPROFILE%` is defined - (Windows) - -5. The current working directory. - -6. The directory defined in the env var `PACKER_PLUGIN_PATH`. There can be more - than one directory defined; for example, `~/custom-dir-1:~/custom-dir-2`. - Separate directories in the PATH string using a colon (`:`) on POSIX systems and - a semicolon (`;`) on Windows systems. The above example path would be able to - find a provisioner named `packer-provisioner-foo` in either - `~/custom-dir-1/packer-provisioner-foo` or - `~/custom-dir-2/packer-provisioner-foo`. - The valid types for plugins are: - `plugin` - A plugin binary that can contain one or more of each Packer component @@ -264,4 +266,4 @@ The valid types for plugins are: builder. - \ No newline at end of file + diff --git a/website/content/docs/templates/hcl_templates/functions/collection/lookup.mdx b/website/content/docs/templates/hcl_templates/functions/collection/lookup.mdx index a936d02a70c..0eb686063f1 100644 --- a/website/content/docs/templates/hcl_templates/functions/collection/lookup.mdx +++ b/website/content/docs/templates/hcl_templates/functions/collection/lookup.mdx @@ -6,7 +6,7 @@ description: The lookup function retrieves an element value from a map given its # `lookup` Function `lookup` retrieves the value of a single element from a map, given its key. -If the given key does not exist, a the given default value is returned instead. +If the given key does not exist, the given default value is returned instead. ```hcl lookup(map, key, default) diff --git a/website/content/docs/templates/hcl_templates/functions/contextual/vault.mdx b/website/content/docs/templates/hcl_templates/functions/contextual/vault.mdx index f36e8f65823..8136a10114d 100644 --- a/website/content/docs/templates/hcl_templates/functions/contextual/vault.mdx +++ b/website/content/docs/templates/hcl_templates/functions/contextual/vault.mdx @@ -49,11 +49,12 @@ local "foo" { expression = vault("secrets/hello", "foo") sensitive = true } +``` The `local` block example accesses the Vault path `secrets/foo` and returns the value stored at the key `foo`, storing it as the local variable `local.foo`. However, the output of the newly stored local variable will be filtered from the Packer build output, and replaced -with the value ''. See [Local Values](/docs/templates/hcl_templates/locals) for more details. +with the value ``. See [Local Values](/packer/docs/templates/hcl_templates/locals) for more details. ## Usage diff --git a/website/content/partials/plugins/plugin-location.mdx b/website/content/partials/plugins/plugin-location.mdx index eec78bf10b9..d8e997c7d44 100644 --- a/website/content/partials/plugins/plugin-location.mdx +++ b/website/content/partials/plugins/plugin-location.mdx @@ -1,58 +1,25 @@ -Plugins will usually be located in the -[PACKER_HOME_DIR](/packer/docs/configure#packer-s-home-directory). +Upon the initialization of Packer, any externally installed plugin will be automatically +discovered and loaded. -* [`packer init`](/packer/docs/commands/init) will install plugins in the **last** directory -in the following numbered list. +Packer plugins will usually be located within a plugins sub-directory under Packer's main config directory +[PACKER_CONFIG_DIR](/packer/docs/configure#packer-s-config-directory). If `PACKER_CONFIG_DIR` is +either not set or empty, a default equal to `$HOME/.config/packer/plugins` on UNIX, or `%APPDATA%\packer.d\plugins` +for Windows, will be used. -* During the initialization of Packer, any plugin required in the -**`required_plugins`** section will be looked up in all entries of the following -list. **First** plugin found takes precedence. Two binaries of the same plugin -with two different version will be considered as two different plugins. Highest -found version matching `required_plugins` will be taken into consideration. +Where applicable, some installation processes such as `packer init` may override the plugin loading process. +Refer to the specific installation guides for any plugin loading overrides. -1. The directory where `packer` is, or the executable directory. -1. The current working directory. (`"."`) -1. The `PACKER_HOME_DIR/plugins` directory. `PACKER_HOME_DIR` refers to *[Packer's home -directory](/packer/docs/configure#packer-s-home-directory)*, if it could be found. -1. The director(y/ies) under the `PACKER_PLUGIN_PATH` env var, if `PACKER_PLUGIN_PATH` -is set. +Packer uses the following process for loading the correct plugin: -~> **Note**: There can be more than one directory in the `PACKER_PLUGIN_PATH` -env var, it will be seperated by a semicolon (`;`) on Windows systems and a -colon (`:`) on other systems. The order priority will be kept. +1. All directories under the `PACKER_PLUGIN_PATH` environment variable, if `PACKER_PLUGIN_PATH` +is set. The `PACKER_PLUGIN_PATH` takes precedences over all other plugin directories; no other directories will be checked. +1. The directory where `packer` is installed, or the executable directory. +1. The current working directory, where `packer build` is being invoked. (`"."`) +1. The `PACKER_CONFIG_DIR/plugins` directory. `PACKER_CONFIG_DIR` refers to *[Packer's config +directory](/packer/docs/configure#packer-s-config-directory)*, if it could be found. -Using the following example : -```hcl - required_plugins { - happycloud = { - version = ">= 2.7.0" - source = "github.com/azr/happycloud" - } - } -``` +-> **Note:** The `PACKER_PLUGIN_PATH` environment variable can be set to more that one directories; +for example, ~/custom-dir-1:~/custom-dir-2. Separate directories in the PATH string using a colon (:) on UNIX systems +and a semicolon (;) on Windows systems. The above example path would be able to find a single or multi-component plugin +in either `~/custom-dir-1/packer/` or `~/custom-dir-2/`. -The plugin getter will then install the binaries in the following location for a -system with no `PACKER_PLUGIN_PATH` env var set. - -* `PACKER_HOME_DIR/plugins/github.com/hashicorp/happycloud/` - -During initialization, on a `darwin_amd64` system, Packer will look-up for the -following files: - -* `PACKER_EXEC_DIR/github.com/azr/happycloud/packer-plugin-happycloud_*_x5.0_darwin_amd64` -* `./github.com/azr/happycloud/packer-plugin-happycloud_*_x5.0_darwin_amd64` -* `PACKER_HOME_DIR/plugins/github.com/azr/happycloud/packer-plugin-happycloud_*_x5.0_darwin_amd64` -* `PACKER_PLUGIN_PATH/github.com/azr/happycloud/packer-plugin-happycloud_*_x5.0_darwin_amd64` -* `./packer-plugin-happycloud` - -The first plugin-name/version files found will take precedence. - -For plugins located under the `github.com/azr/happycloud/` directory structure an accompanying SHA256SUM file -will be required in order for `packer init` to ensure the plugin being loaded has not been tampered with. -The SHA256SUM file will be automatically generated when a plugin is installed via `packer init` if the plugin -was installed manually into `PACKER_HOME_DIR/plugins/github.com/azr/happycloud/` then the file -`PACKER_HOME_DIR/plugins/github.com/azr/happycloud/packer-plugin-happycloud_*_x5.0_darwin_amd64_SHA256SUM` must be generated manually as well. - --> Note: `PACKER_HOME_DIR` is not an actual env var and refers to [Packer's home -directory](#packer-s-home-directory). `PACKER_EXEC_DIR` is not an actual env var -and refers to the directory where `packer` is, or the executable directory. diff --git a/website/data/docs-nav-data.json b/website/data/docs-nav-data.json index e1de91658f1..7c7e110d518 100644 --- a/website/data/docs-nav-data.json +++ b/website/data/docs-nav-data.json @@ -887,6 +887,10 @@ "title": "Integration Program", "path": "partnerships" }, + { + "title": "Community Tools", + "path": "community-tools" + }, { "divider": true }, diff --git a/website/data/plugins-manifest.json b/website/data/plugins-manifest.json index ee9c1f5daf5..63d9d26998b 100644 --- a/website/data/plugins-manifest.json +++ b/website/data/plugins-manifest.json @@ -1,18 +1,4 @@ [ - { - "title": "1&1", - "path": "oneandone", - "repo": "hashicorp/packer-plugin-oneandone", - "pluginTier": "community", - "version": "latest" - }, - { - "title": "Alibaba Cloud", - "path": "alicloud", - "repo": "hashicorp/packer-plugin-alicloud", - "pluginTier": "community", - "version": "latest" - }, { "title": "Anka", "path": "anka", @@ -21,49 +7,6 @@ "sourceBranch": "master", "version": "latest" }, - { - "title": "Ansible", - "path": "ansible", - "repo": "hashicorp/packer-plugin-ansible", - "version": "latest" - }, - { - "title": "Amazon EC2", - "path": "amazon", - "repo": "hashicorp/packer-plugin-amazon", - "version": "latest", - "isHcpPackerReady": true - }, - { - "title": "Azure", - "path": "azure", - "repo": "hashicorp/packer-plugin-azure", - "version": "latest", - "isHcpPackerReady": true - }, - { - "title": "Chef", - "path": "chef", - "repo": "hashicorp/packer-plugin-chef", - "pluginTier": "community", - "version": "latest", - "archived": true - }, - { - "title": "CloudStack", - "path": "cloudstack", - "repo": "hashicorp/packer-plugin-cloudstack", - "pluginTier": "community", - "version": "latest" - }, - { - "title": "Converge", - "path": "converge", - "repo": "hashicorp/packer-plugin-converge", - "pluginTier": "community", - "version": "latest", - "archived": true - }, { "title": "DigitalOcean", "path": "digitalocean", @@ -72,13 +15,6 @@ "version": "latest", "isHcpPackerReady": true }, - { - "title": "Docker", - "path": "docker", - "repo": "hashicorp/packer-plugin-docker", - "version": "latest", - "isHcpPackerReady": true - }, { "title": "External", "path": "external", @@ -93,13 +29,6 @@ "version": "latest", "sourceBranch": "main" }, - { - "title": "Google Cloud Platform", - "path": "googlecompute", - "repo": "hashicorp/packer-plugin-googlecompute", - "version": "latest", - "isHcpPackerReady": true - }, { "title": "Gridscale", "path": "gridscale", @@ -108,49 +37,6 @@ "pluginTier": "verified", "isHcpPackerReady": false }, - { - "title": "HashiCups", - "path": "hashicups", - "repo": "hashicorp/packer-plugin-hashicups", - "version": "latest", - "isHcpPackerReady": false - }, - { - "title": "Hetzner Cloud", - "path": "hetzner-cloud", - "repo": "hashicorp/packer-plugin-hcloud", - "version": "latest", - "pluginTier": "community" - }, - { - "title": "HyperOne", - "path": "hyperone", - "repo": "hashicorp/packer-plugin-hyperone", - "version": "latest", - "pluginTier": "community" - }, - { - "title": "Hyper-V", - "path": "hyperv", - "repo": "hashicorp/packer-plugin-hyperv", - "version": "latest", - "pluginTier": "community" - }, - { - "title": "InSpec", - "path": "inspec", - "repo": "hashicorp/packer-plugin-inspec", - "pluginTier": "community", - "version": "latest", - "archived": true - }, - { - "title": "JD Cloud", - "path": "jdcloud", - "repo": "hashicorp/packer-plugin-jdcloud", - "pluginTier": "community", - "version": "latest" - }, { "title": "Kamatera", "path": "kamatera", @@ -173,20 +59,6 @@ "pluginTier": "community", "version": "latest" }, - { - "title": "LXC", - "path": "lxc", - "repo": "hashicorp/packer-plugin-lxc", - "pluginTier": "community", - "version": "latest" - }, - { - "title": "LXD", - "path": "lxd", - "repo": "hashicorp/packer-plugin-lxd", - "pluginTier": "community", - "version": "latest" - }, { "title": "Mondoo", "path": "mondoo", @@ -194,13 +66,6 @@ "pluginTier": "verified", "version": "latest" }, - { - "title": "Naver Cloud", - "path": "ncloud", - "repo": "hashicorp/packer-plugin-ncloud", - "pluginTier": "community", - "version": "latest" - }, { "title": "Nutanix", "path": "nutanix", @@ -209,21 +74,6 @@ "pluginTier": "verified", "sourceBranch": "main" }, - { - "title": "OpenStack", - "path": "openstack", - "repo": "hashicorp/packer-plugin-openstack", - "pluginTier": "community", - "version": "latest" - }, - { - "title": "Oracle", - "path": "oracle", - "repo": "hashicorp/packer-plugin-oracle", - "pluginTier": "community", - "version": "latest", - "isHcpPackerReady": true - }, { "title": "Outscale", "path": "outscale", @@ -235,44 +85,9 @@ { "title": "Parallels", "path": "parallels", - "repo": "hashicorp/packer-plugin-parallels", - "version": "latest" - }, - { - "title": "Profitbricks", - "path": "profitbricks", - "repo": "hashicorp/packer-plugin-profitbricks", - "pluginTier": "community", - "version": "latest" - }, - { - "title": "Proxmox", - "path": "proxmox", - "repo": "hashicorp/packer-plugin-proxmox", - "pluginTier": "community", - "version": "latest" - }, - { - "title": "Puppet", - "path": "puppet", - "repo": "hashicorp/packer-plugin-puppet", - "version": "latest", - "pluginTier": "community", - "archived": true - }, - { - "title": "QEMU", - "path": "qemu", - "repo": "hashicorp/packer-plugin-qemu", - "version": "latest" - }, - { - "title": "Salt", - "path": "salt", - "repo": "hashicorp/packer-plugin-salt", - "pluginTier": "community", + "repo": "parallels/packer-plugin-parallels", "version": "latest", - "archived": true + "pluginTier": "verified" }, { "title": "Scaleway", @@ -286,7 +101,7 @@ "path": "sshkey", "repo": "ivoronin/packer-plugin-sshkey", "pluginTier": "community", - "version": "v1.0.1" + "version": "latest" }, { "title": "Tart", @@ -295,20 +110,6 @@ "pluginTier": "community", "version": "latest" }, - { - "title": "Tencent Cloud", - "path": "tencentcloud", - "repo": "hashicorp/packer-plugin-tencentcloud", - "pluginTier": "community", - "version": "latest" - }, - { - "title": "Triton", - "path": "triton", - "repo": "hashicorp/packer-plugin-triton", - "pluginTier": "community", - "version": "latest" - }, { "title": "UCloud", "path": "ucloud", @@ -325,20 +126,6 @@ "sourceBranch": "master", "isHcpPackerReady": true }, - { - "title": "Vagrant", - "path": "vagrant", - "repo": "hashicorp/packer-plugin-vagrant", - "pluginTier": "official", - "version": "latest" - }, - { - "title": "VirtualBox", - "path": "virtualbox", - "repo": "hashicorp/packer-plugin-virtualbox", - "pluginTier": "official", - "version": "latest" - }, { "title": "Volcengine", "path": "volcengine", @@ -346,19 +133,6 @@ "pluginTier": "community", "version": "latest" }, - { - "title": "VMware vSphere", - "path": "vsphere", - "repo": "hashicorp/packer-plugin-vsphere", - "version": "latest", - "isHcpPackerReady": true - }, - { - "title": "VMware", - "path": "vmware", - "repo": "hashicorp/packer-plugin-vmware", - "version": "latest" - }, { "title": "Vultr", "path": "vultr", @@ -367,10 +141,10 @@ "version": "latest" }, { - "title": "Yandex", - "path": "yandex", - "repo": "hashicorp/packer-plugin-yandex", - "version": "latest", - "pluginTier": "community" + "title": "Ksyun", + "path": "ksyun", + "repo": "kingsoftcloud/packer-plugin-ksyun", + "pluginTier": "community", + "version": "latest" } ] diff --git a/website/package-lock.json b/website/package-lock.json index b18174e0193..09bd084d46a 100644 --- a/website/package-lock.json +++ b/website/package-lock.json @@ -9851,6 +9851,13 @@ "node": ">=6" } }, + "node_modules/querystringify": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz", + "integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==", + "dev": true, + "peer": true + }, "node_modules/queue-microtask": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", @@ -10314,6 +10321,13 @@ "dev": true, "peer": true }, + "node_modules/requires-port": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz", + "integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==", + "dev": true, + "peer": true + }, "node_modules/resolve": { "version": "1.22.0", "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.0.tgz", @@ -12429,24 +12443,25 @@ } }, "node_modules/tough-cookie": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.0.0.tgz", - "integrity": "sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg==", + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.3.tgz", + "integrity": "sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw==", "dev": true, "peer": true, "dependencies": { "psl": "^1.1.33", "punycode": "^2.1.1", - "universalify": "^0.1.2" + "universalify": "^0.2.0", + "url-parse": "^1.5.3" }, "engines": { "node": ">=6" } }, "node_modules/tough-cookie/node_modules/universalify": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz", - "integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==", + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz", + "integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==", "dev": true, "peer": true, "engines": { @@ -13058,6 +13073,17 @@ "dev": true, "peer": true }, + "node_modules/url-parse": { + "version": "1.5.10", + "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz", + "integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==", + "dev": true, + "peer": true, + "dependencies": { + "querystringify": "^2.1.1", + "requires-port": "^1.0.0" + } + }, "node_modules/url-regex": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/url-regex/-/url-regex-5.0.0.tgz", @@ -21357,6 +21383,13 @@ "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", "dev": true }, + "querystringify": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz", + "integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==", + "dev": true, + "peer": true + }, "queue-microtask": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", @@ -21703,6 +21736,13 @@ "dev": true, "peer": true }, + "requires-port": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz", + "integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==", + "dev": true, + "peer": true + }, "resolve": { "version": "1.22.0", "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.0.tgz", @@ -23372,21 +23412,22 @@ } }, "tough-cookie": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.0.0.tgz", - "integrity": "sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg==", + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.3.tgz", + "integrity": "sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw==", "dev": true, "peer": true, "requires": { "psl": "^1.1.33", "punycode": "^2.1.1", - "universalify": "^0.1.2" + "universalify": "^0.2.0", + "url-parse": "^1.5.3" }, "dependencies": { "universalify": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz", - "integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==", + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz", + "integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==", "dev": true, "peer": true } @@ -23846,6 +23887,17 @@ "dev": true, "peer": true }, + "url-parse": { + "version": "1.5.10", + "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz", + "integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==", + "dev": true, + "peer": true, + "requires": { + "querystringify": "^2.1.1", + "requires-port": "^1.0.0" + } + }, "url-regex": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/url-regex/-/url-regex-5.0.0.tgz",