diff --git a/.circleci/Dockerfile b/.circleci/Dockerfile index d70ab94e88..ec45e2d91e 100644 --- a/.circleci/Dockerfile +++ b/.circleci/Dockerfile @@ -34,6 +34,7 @@ RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y \ binutils-gold \ cmake \ composer \ + docker.io \ elixir \ faketime \ g++ \ @@ -64,6 +65,7 @@ RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y \ tzdata \ uuid-dev \ vim \ + xdg-utils \ zip \ && apt-get auto-remove -y && apt-get clean -y && rm -rf /var/lib/apt/lists/* RUN node -v @@ -108,6 +110,10 @@ ENV TEMP=/tmp ENV TMP=$TEMP ENV TMPDIR=$TEMP +# install uv (Python package manager) +RUN curl -LsSf https://astral.sh/uv/install.sh | sh +ENV PATH=/home/circleci/.local/bin:$PATH + # install rust and convco RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y RUN ~/.cargo/bin/cargo install convco diff --git a/.circleci/config.yml b/.circleci/config.yml index 5dc818dd63..45876967a2 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -19,7 +19,7 @@ parameters: go_version: type: string # https://go.dev/doc/devel/release - default: '1.25.10' + default: '1.26.3' aws_version: type: string # https://github.com/aws/aws-cli/blob/v2/CHANGELOG.rst @@ -63,7 +63,7 @@ executors: resource_class: medium docker-amd64: docker: - - image: snyklabs/cli-build-private:20260219-152627 + - image: snyklabs/cli-build-private:20260506-072205 auth: username: $DOCKER_CLI_BUILD_USERNAME password: $DOCKER_CLI_BUILD_PASSWORD @@ -71,7 +71,7 @@ executors: resource_class: large docker-amd64-xl: docker: - - image: snyklabs/cli-build-private:20260219-152627 + - image: snyklabs/cli-build-private:20260506-072205 auth: username: $DOCKER_CLI_BUILD_USERNAME password: $DOCKER_CLI_BUILD_PASSWORD @@ -79,7 +79,7 @@ executors: resource_class: xlarge docker-arm64: docker: - - image: snyklabs/cli-build-private-arm64:20260219-152627 + - image: snyklabs/cli-build-private-arm64:20260506-072205 auth: username: $DOCKER_CLI_BUILD_USERNAME password: $DOCKER_CLI_BUILD_PASSWORD @@ -87,7 +87,7 @@ executors: resource_class: arm.large docker-arm64-xl: docker: - - image: snyklabs/cli-build-private-arm64:20260219-152627 + - image: snyklabs/cli-build-private-arm64:20260506-072205 auth: username: $DOCKER_CLI_BUILD_USERNAME password: $DOCKER_CLI_BUILD_PASSWORD @@ -276,7 +276,7 @@ commands: - restore_cache: name: Restoring Windows tools cache keys: - - windows-tools-cache-v2-{{ arch }} + - windows-tools-cache-v4-{{ arch }}-{{ checksum ".nvmrc" }} - run: name: Install Node.js (native) shell: powershell @@ -315,9 +315,10 @@ commands: .\scripts\windows\ensure-python-uv.ps1 - save_cache: name: Saving Windows tools cache - key: windows-tools-cache-v2-{{ arch }} + key: windows-tools-cache-v4-{{ arch }}-{{ checksum ".nvmrc" }} paths: - << pipeline.parameters.windows_cache_dir >> + - C:\ProgramData\nvm install-deps-windows-native-full-signing: steps: @@ -378,17 +379,6 @@ commands: name: No dependencies to install command: echo all done! - # this can be removed if we install the xdg-utils package in the docker image - install-deps-linux-acceptance-tests: - steps: - - run: - name: Installing linux acceptance tests dependencies - command: | - sudo apt-get update - sudo apt-get install xdg-utils docker.io -y - curl -LsSf https://astral.sh/uv/install.sh | sh - echo 'export PATH="$HOME/.local/bin:$PATH"' >> $BASH_ENV - install-deps-python: parameters: os: @@ -612,7 +602,7 @@ workflows: go_target_os: linux go_os: linux go_arch: amd64 - static_binary: false # TODO: set to true when we have confidence for v1.1304.0 release + static_binary: true go_download_base_url: << pipeline.parameters.go_download_base_url >> executor: docker-amd64-xl requires: @@ -655,7 +645,7 @@ workflows: go_target_os: linux go_os: linux go_arch: arm64 - static_binary: false # TODO: set to true when we have confidence for v1.1304.0 release + static_binary: true go_download_base_url: << pipeline.parameters.go_download_base_url >> executor: docker-arm64-xl requires: @@ -778,7 +768,6 @@ workflows: ignore: - main - '/release.*/' - install_deps_extension: linux-acceptance-tests pre_test_cmds: export BROWSER="curl -L" requires: - build linux static arm64 @@ -801,7 +790,6 @@ workflows: ignore: - main - '/release.*/' - install_deps_extension: linux-acceptance-tests pre_test_cmds: export BROWSER="curl -L" requires: - build linux amd64 @@ -824,7 +812,6 @@ workflows: ignore: - main - '/release.*/' - install_deps_extension: linux-acceptance-tests pre_test_cmds: export BROWSER="curl -L" requires: - build linux arm64 @@ -850,7 +837,6 @@ workflows: ignore: - main - '/release.*/' - install_deps_extension: linux-acceptance-tests pre_test_cmds: export BROWSER="curl -L" requires: - build linux fips arm64 @@ -1158,6 +1144,20 @@ workflows: - '/release.*/' - '/.*e2e.*/' + - test-release-static: + name: e2e snyk-linux tests (scratch-container-amd64) + context: + - team_hammerhead-cli + requires: + - upload version + cli_download_base_url: << pipeline.parameters.cli_download_base_url >> + filters: + branches: + only: + - main + - '/release.*/' + - '/.*e2e.*/' + - noop: name: Start Deployments requires: @@ -1180,6 +1180,7 @@ workflows: - e2e fips tests (win-server2022-amd64) - e2e experimental tests (linux-static-amd64) - e2e experimental tests (scratch-container-amd64) + - e2e snyk-linux tests (scratch-container-amd64) filters: branches: only: diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 3c8f80f7f7..5ec73c6d03 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,5 +1,5 @@ # CLI -* @snyk/cli @snyk/productinfra_cli +* @snyk/cli @snyk/productinfra_cli @snyk/developer-experience_cli # Unify -src/lib/plugins/uv/ @snyk/codesec_unify @snyk/productinfra_cli +src/lib/plugins/uv/ @snyk/codesec_unify @snyk/open-source_unify @snyk/productinfra_cli @snyk/developer-experience_cli diff --git a/.github/workflows/check-dependencies.yml b/.github/workflows/check-dependencies.yml index aaa0b6dfce..4729272c6b 100644 --- a/.github/workflows/check-dependencies.yml +++ b/.github/workflows/check-dependencies.yml @@ -11,7 +11,7 @@ jobs: - uses: actions/checkout@v2 - uses: actions/setup-node@v4 with: - node-version: '16.16.0' + node-version-file: '.nvmrc' cache: 'npm' - run: npm ci - run: npx ts-node ./scripts/check-dependencies.ts diff --git a/.github/workflows/danger-zone.yml b/.github/workflows/danger-zone.yml index 4bac3ac82f..67195f1982 100644 --- a/.github/workflows/danger-zone.yml +++ b/.github/workflows/danger-zone.yml @@ -12,7 +12,7 @@ jobs: - uses: actions/checkout@v2 - uses: actions/setup-node@v4 with: - node-version: '18.19.1' + node-version-file: '.nvmrc' cache: 'npm' - run: npm ci - run: npx danger ci diff --git a/.github/workflows/snyk-protect-production-smoke-tests.yml b/.github/workflows/snyk-protect-production-smoke-tests.yml index 0094820804..80b2ef5738 100644 --- a/.github/workflows/snyk-protect-production-smoke-tests.yml +++ b/.github/workflows/snyk-protect-production-smoke-tests.yml @@ -14,7 +14,7 @@ jobs: fail-fast: false matrix: os: [ubuntu, macos, windows] - node_version: [16, 18, 20] + node_version: [16, 18, 20, 22] runs-on: ${{ matrix.os }}-latest steps: # Avoid modifying line endings in fixtures. diff --git a/.nvmrc b/.nvmrc index 2dbbe00e67..db49bb14d7 100644 --- a/.nvmrc +++ b/.nvmrc @@ -1 +1 @@ -20.11.1 +22.22.2 diff --git a/Makefile b/Makefile index 933a7fd459..010300b4f1 100644 --- a/Makefile +++ b/Makefile @@ -230,6 +230,11 @@ test-binary-wrapper: build-binary-wrapper @echo "-- Testing binary wrapper" @cd $(BINARY_WRAPPER_DIR); npm run test +.PHONY: test-release-scripts +test-release-scripts: + @echo "-- Testing release scripts" + @cd release-scripts; go test ./... + # targets responsible for the complete CLI build .PHONY: pre-build diff --git a/README.md b/README.md index 77175621ef..cc4c3f082f 100644 --- a/README.md +++ b/README.md @@ -1,16 +1,14 @@ # Getting started with the Snyk CLI -## Introduction to Snyk and the Snyk CLI +## Introduction to the Snyk CLI -[Snyk](https://snyk.io/) is a developer-first, cloud-native security tool to scan and monitor your software development projects for security vulnerabilities. Snyk scans multiple content types for security issues: +Snyk is a developer-first, cloud-native security tool to scan and monitor your software development projects for security vulnerabilities. Snyk scans multiple content types for security issues: - [Snyk Open Source](https://docs.snyk.io/scan-with-snyk/snyk-open-source): Find and automatically fix open-source vulnerabilities - [Snyk Code](https://docs.snyk.io/scan-with-snyk/snyk-code): Find and fix vulnerabilities in your application code in real time - [Snyk Container](https://docs.snyk.io/scan-with-snyk/snyk-container): Find and fix vulnerabilities in container images and Kubernetes applications - [Snyk IaC](https://docs.snyk.io/scan-with-snyk/snyk-iac): Find and fix insecure configurations in Terraform and Kubernetes code -[Learn more about what Snyk can do and sign up for a free account](https://snyk.io/). - The Snyk CLI brings the functionality of Snyk into your development workflow. You can run the CLI locally from the command line or in an IDE. You can also run the CLI in your CI/CD pipeline. The following shows an example of Snyk CLI test command output.

Snyk CLI test command output example

@@ -27,7 +25,9 @@ This page explains how to install, authenticate, and start scanning using the CL To use the CLI, you must install it and authenticate your machine. See [Install or update the Snyk CLI](https://docs.snyk.io/snyk-cli/install-or-update-the-snyk-cli) and [Authenticate the CLI with your account](authenticate-to-use-the-cli.md). You can refer to the [release notes](https://github.com/snyk/cli/releases) for a summary of changes in each release. Before scanning your code, review the [Code execution warning for Snyk CLI](https://docs.snyk.io/snyk-cli/code-execution-warning-for-snyk-cli). -**Note:** Before you can use the CLI for Open Source scanning, you must install your package manager. The needed third-party tools, such as Gradle or Maven, must be in the `PATH`. +{% hint style="info" %} +Before you can use the CLI for Open Source scanning, you must install your package manager. The needed third-party tools, such as Gradle or Maven, must be in the `PATH`. +{% endhint %} You can also install the CLI in your IDE or CI/CD environment. For details, see the [IDE and CI/CD documentation](https://docs.snyk.io/scm-ide-and-ci-cd-integrations) for instructions for each integration. @@ -41,17 +41,17 @@ Look at the `test` command report in your terminal. The report shows the vulnera ## Scan your development Project -**Note:** Before using the Snyk CLI to test your Open Source Project for vulnerabilities, with limited exceptions, you must **build your Project**. For details, see [Open Source Projects that must be built before testing](https://docs.snyk.io/snyk-cli/scan-and-maintain-projects-using-the-cli/snyk-cli-for-open-source/open-source-projects-that-must-be-built-before-testing-with-the-snyk-cli). +Before using the Snyk CLI to test your Open Source Project for vulnerabilities, with limited exceptions, you must build your Project. For details, see [Open Source Projects that must be built before testing](https://docs.snyk.io/snyk-cli/scan-and-maintain-projects-using-the-cli/snyk-cli-for-open-source/open-source-projects-that-must-be-built-before-testing-with-the-snyk-cli). -In addition, depending on the language of your open-source Project, you may need to **set up your language environment** before using the Snyk CLI. For details, refer to [Supported languages, package managers, and frameworks](https://docs.snyk.io/supported-languages-package-managers-and-frameworks). +In addition, depending on the language of your open-source Project, you may need to set up your language environment before using the Snyk CLI. For details, refer to [Supported languages, package managers, and frameworks](https://docs.snyk.io/supported-languages-package-managers-and-frameworks). After you have installed the CLI and authenticated your machine, to **scan an open-source Project**, use `cd /my/project/` to change the current directory to a folder containing a supported package manifest file, such as `package.json`, `pom.xml`, or `composer.lock`. Then run `snyk test`. All vulnerabilities identified are listed, including their path and fix guidance. -To scan your **source code,** run `snyk code test`. +To scan your source code, run `snyk code test`. -You can **scan a Docker image** by its tag running, for example: `snyk container test ubuntu:18.04`. +You can scan a Docker image by its tag running, for example: `snyk container test ubuntu:18.04`. -To scan a **Kubernetes (K8s) file,** run the following:\ +To scan a Kubernetes (K8s) file, run the following:\ `snyk iac test /path/to/kubernetes_file.yaml` For details about using the Snyk CLI to scan each content type, see the following: @@ -87,13 +87,13 @@ For more information, see [Monitor your Projects at regular intervals](https://d ## Running out of tests -Snyk allows unlimited tests for public repositories. If you are on the Free plan, you have a limited number of tests per month. Paid plans have unlimited tests on private and public repositories. If you are on the Free plan and notice that your test count is quickly being used, even with public repositories, you can remedy this by telling Snyk the public URL of the repository that is being scanned by the Snyk CLI. This ensures that Snyk does not count a public repository towards the test limits. +Test limits do not apply to public repositories. If you notice that the test limits related to your private repositories (where applicable) are being used by tests on public repositories, you can remedy this by telling Snyk the public URL of the repository that is being scanned by the Snyk CLI. This ensures that Snyk does not count a public repository towards the test limits. If you run out of tests on an open-source Project, follow these steps: - Run `snyk monitor`. -- Open the Snyk UI and navigate to the **settings** of the Project. -- Enter the URL of your open-source repository in **Git remote URL**. +- In the Snyk Web UI, navigate to the **Settings** of the Project. +- Enter the URL of your open-source repository under **Git remote URL**. ## Additional information about the Snyk CLI diff --git a/binary-releases/RELEASE_NOTES.md b/binary-releases/RELEASE_NOTES.md index 3ded2b2b3a..b9bb0a64ef 100644 --- a/binary-releases/RELEASE_NOTES.md +++ b/binary-releases/RELEASE_NOTES.md @@ -1,12 +1,20 @@ -## [1.1304.3](https://github.com/snyk/snyk/compare/v1.1304.2...v1.1304.3) (2026-05-13) - +## [1.1305.0](https://github.com/snyk/snyk/compare/v1.1304.3...v1.1305.0) (2026-05-20) The Snyk CLI is being deployed to different deployment channels, users can select the stability level according to their needs. For details please see [this documentation](https://docs.snyk.io/snyk-cli/releases-and-channels-for-the-snyk-cli) +### Features + +* **sbom**: Introduces the `--allow-incomplete-sbom` flag for `snyk sbom`, allowing the SBOM to be generated even when individual projects fail to resolve. Failed projects are surfaced as per-project errors alongside the successful results. ([29ba128](https://github.com/snyk/snyk/commit/29ba128c55afd9f41cfd0ffabe667c013b10a544)) +* **container**: Speed up `snyk container monitor` by sending dependency requests in parallel, configurable via the `SNYK_REQUEST_CONCURRENCY` environment variable. ([186c5fb](https://github.com/snyk/snyk/commit/186c5fb6b7c074d94bb9e5d4094f1909671c6f88), [6764f65](https://github.com/snyk/snyk/commit/6764f65247bb3c1c82ad5d8d3d331ba21f7c4a97)) +* **general**: Linux ARM64 and AMD64 binaries are now statically linked by default. ([f02b850](https://github.com/snyk/snyk/commit/f02b850ac8a4b6f527448874d5408ab1cfffeaab)) +* **mcp**: Adds an experimental breakability evaluation tool to the Snyk MCP Server. ([69806f5](https://github.com/snyk/snyk/commit/69806f5634b06551d46debb070553e454cf9ed54)) + + ### Bug Fixes -* **dependencies**: Updates dependencies to fix vulnerabilities: - - CVE-2026-45022 ([aa226a9](https://github.com/snyk/cli/commit/aa226a97b87572d87e81b6080dd2d291af8cfbfb)) - - CVE-2026-33814 ([1691c3b](https://github.com/snyk/cli/commit/1691c3be1b2f681767bfeac5d9b694992e122a20)) - - CVE-2026-33811 ([1691c3b](https://github.com/snyk/cli/commit/1691c3be1b2f681767bfeac5d9b694992e122a20)) - - CVE-2026-39836 ([1691c3b](https://github.com/snyk/cli/commit/1691c3be1b2f681767bfeac5d9b694992e122a20)) +* **test**: Fixes resolution of aliased npm packages so the alias from the lockfile is used instead of the target package name. ([9b0e4d9](https://github.com/snyk/snyk/commit/9b0e4d913af096aa1c5a8ce46e3ed1bd5d211e8f)) +* **test**: Fixes parsing of Python `.whl` files when scanning projects with `--all-projects`. ([12ac0db](https://github.com/snyk/snyk/commit/12ac0dbb46b64526c584a13892317c80111a1d1a)) +* **deps**: Updates dependencies to fix vulnerabilities: + - CVE-2026-34165 ([1eec8f8](https://github.com/snyk/snyk/commit/1eec8f81e4738985a92dd8e9823e1ab846713fe7)) + - CVE-2026-33762 ([1eec8f8](https://github.com/snyk/snyk/commit/1eec8f81e4738985a92dd8e9823e1ab846713fe7)) + - CVE-2025-62718 ([0c4bdcc](https://github.com/snyk/snyk/commit/0c4bdcc0936bf3ba05e985895d7b44e9a1fcc962)) diff --git a/cliv2/Makefile b/cliv2/Makefile index e4f7da69d9..5f4cca958e 100644 --- a/cliv2/Makefile +++ b/cliv2/Makefile @@ -5,7 +5,6 @@ GOOS = $(shell go env GOOS) GOARCH = $(shell go env GOARCH) GOHOSTOS = $(shell go env GOHOSTOS) GOHOSTARCH = $(shell go env GOHOSTARCH) -LS_COMMIT_HASH = $(shell cat go.mod | grep snyk-ls | cut -d "-" -f 4 | head -1) FIPS_CRYPTO_BACKEND_DEFAULT = systemcrypto FIPS_CRYPTO_BACKEND = MS_GO_NOSYSTEMCRYPTO ?= 1 @@ -22,7 +21,7 @@ CGO_ENABLED = 1 # Build tools GO_BIN := $(shell pwd)/.bin -OVERRIDE_GOCI_LINT_V := v2.8.0 +OVERRIDE_GOCI_LINT_V := v2.9.0 SHELL := env PATH=$(GO_BIN):$(PATH) $(SHELL) # Make directories per convention @@ -34,6 +33,9 @@ bindir = $(exec_prefix)/bin WORKING_DIR = $(CURDIR) BUILD_DIR = $(WORKING_DIR)/_bin CACHE_DIR = $(WORKING_DIR)/_cache +LS_PROTOCOL_VERSION_FILE = $(CACHE_DIR)/ls-protocol-version +LS_COMMIT_HASH_FILE = $(CACHE_DIR)/ls-commit-hash +LS_COMMIT_HASH = $(if $(wildcard $(LS_COMMIT_HASH_FILE)),$(shell cat $(LS_COMMIT_HASH_FILE)),determined during LS metadata generation) SRCS = $(shell find $(WORKING_DIR) -type f -name '*.go') # load cached variables if available @@ -90,6 +92,11 @@ V1_BUILD_TYPE = build:prod V1_BINARY_FOLDER = ts-cli-binaries # default to empty string, which means no subfolder V1_BINARY_SUBFOLDER = +# ensures we set the correct ts-cli-binaries subfolder for static node binaries +# when STATIC_NODE_BINARY=true is passed to the make command +ifeq ($(STATIC_NODE_BINARY), true) + V1_BINARY_SUBFOLDER = experimental/ +endif HASH_STRING = $(HASH)$(HASH_ALGORITHM) SIGN_SCRIPT = $(WORKING_DIR)/scripts/sign_$(_GO_OS).sh ISSIGNED_SCRIPT = $(WORKING_DIR)/scripts/issigned_$(_GO_OS).sh @@ -155,6 +162,8 @@ summary: configure: summary $(CACHE_DIR) $(CACHE_DIR)/variables.mk $(V1_DIRECTORY)/$(V1_EMBEDDED_FILE_OUTPUT) dependencies $(CACHE_DIR)/prepare-3rd-party-licenses $(BUILD_DIR)/$(V2_EXECUTABLE_NAME): $(BUILD_DIR) $(SRCS) generate-ls-protocol-metadata + $(eval LS_PROTOCOL_VERSION := $(shell cat $(LS_PROTOCOL_VERSION_FILE))) + $(eval LS_COMMIT_HASH := $(shell cat $(LS_COMMIT_HASH_FILE))) $(eval EXTRA_FLAGS := -X github.com/snyk/snyk-ls/application/config.Version=$(LS_COMMIT_HASH) -X github.com/snyk/snyk-ls/application/config.LsProtocolVersion=$(LS_PROTOCOL_VERSION) -X main.internalOS=$(GOOS) -X github.com/snyk/cli/cliv2/internal/embedded/cliv1.snykCLIVersion=$(CLI_V1_VERSION_TAG) -X github.com/snyk/cli-extension-iac/internal/commands/iactest.internalRulesClientURL=$(IAC_RULES_URL) -X github.com/snyk/cli/cliv2/internal/constants.StaticNodeJsBinary=$(STATIC_NODE_BINARY)) @echo "$(LOG_PREFIX) Building ( $(BUILD_DIR)/$(V2_EXECUTABLE_NAME) )" @echo "$(LOG_PREFIX) CGO_ENABLED: $(CGO_ENABLED)" @@ -252,12 +261,13 @@ build-ts-cli: $(V1_WORKING_DIR)/$(V1_BINARY_FOLDER)/$(V1_BINARY_SUBFOLDER)$(V1_E $(eval CLI_V1_LOCATION := $(V1_WORKING_DIR)/$(V1_BINARY_FOLDER)/$(V1_BINARY_SUBFOLDER)) .PHONY: generate-ls-protocol-metadata -generate-ls-protocol-metadata: +generate-ls-protocol-metadata: $(CACHE_DIR) $(eval CLI_V1_VERSION_TAG := $(shell cat $(DESTDIR)$(bindir)/version)) @echo "$(LOG_PREFIX) Determining LS protocol version" @echo "$(LOG_PREFIX) Writing protocol version file to $(DESTDIR)$(bindir)" - $(eval LS_PROTOCOL_VERSION := $(shell $(GOCMD) run $(WORKING_DIR)/../release-scripts/write-ls-protocol-version.go $(LS_COMMIT_HASH) $(CLI_V1_VERSION_TAG) $(DESTDIR)$(bindir))) - @echo "$(LOG_PREFIX) LS protocol version: $(LS_PROTOCOL_VERSION)" + @GOOS=$(GOHOSTOS) GOARCH=$(GOHOSTARCH) $(GOCMD) run $(WORKING_DIR)/../release-scripts/write-ls-protocol-version.go $(CLI_V1_VERSION_TAG) $(DESTDIR)$(bindir) $(LS_COMMIT_HASH_FILE) > $(LS_PROTOCOL_VERSION_FILE) + @echo "$(LOG_PREFIX) LS protocol version: $$(cat $(LS_PROTOCOL_VERSION_FILE))" + @echo "$(LOG_PREFIX) LS commit hash: $$(cat $(LS_COMMIT_HASH_FILE))" .PHONY: clean-ts-cli clean-ts-cli: diff --git a/cliv2/cmd/cliv2/errorhandling.go b/cliv2/cmd/cliv2/errorhandling.go index f884532561..ba96677986 100644 --- a/cliv2/cmd/cliv2/errorhandling.go +++ b/cliv2/cmd/cliv2/errorhandling.go @@ -1,6 +1,7 @@ package main import ( + "context" "errors" "iter" "os/exec" @@ -28,6 +29,11 @@ func decorateError(err error) error { var errorCatalogError snyk_errors.Error if !errors.As(err, &errorCatalogError) { + // decorate DeadlineExceeded with a more user-friendly error + if errors.Is(err, context.DeadlineExceeded) { + return cli.NewCommandTimeoutError("", snyk_errors.WithCause(err)) + } + genericError := cli.NewGeneralCLIFailureError(err.Error(), snyk_errors.WithCause(err)) return genericError } diff --git a/cliv2/cmd/cliv2/instrumentation.go b/cliv2/cmd/cliv2/instrumentation.go index 09e7e81bf5..546f03cfbe 100644 --- a/cliv2/cmd/cliv2/instrumentation.go +++ b/cliv2/cmd/cliv2/instrumentation.go @@ -4,14 +4,19 @@ package main import _ "github.com/snyk/go-application-framework/pkg/networking/fips_enable" import ( + "context" + "encoding/json" "os/exec" + "strconv" "strings" "time" + "github.com/rs/zerolog" "github.com/snyk/go-application-framework/pkg/analytics" "github.com/snyk/go-application-framework/pkg/configuration" "github.com/snyk/go-application-framework/pkg/instrumentation" + "github.com/snyk/cli/cliv2/internal/constants" cli_utils "github.com/snyk/cli/cliv2/internal/utils" localworkflows "github.com/snyk/go-application-framework/pkg/local_workflows" @@ -74,3 +79,76 @@ func updateInstrumentationDataBeforeSending(cliAnalytics analytics.Analytics, st cliAnalytics.GetInstrumentation().SetStatus(analytics.Failure) } } + +func sendAnalytics(ctx context.Context, a analytics.Analytics, debugLogger *zerolog.Logger) { + debugLogger.Print("Sending Analytics") + + a.SetApiUrl(globalConfiguration.GetString(configuration.API_URL)) + + request, err := a.GetRequest() + if err != nil { + debugLogger.Err(err).Msg("Failed to create Analytics request") + return + } + + // Use context to respect teardown timeout + request = request.WithContext(ctx) + + client := globalEngine.GetNetworkAccess().GetHttpClient() + res, err := client.Do(request) + if err != nil { + debugLogger.Err(err).Msg("Failed to send Analytics") + return + } + defer func() { + _ = res.Body.Close() + }() + + successfullySend := 200 <= res.StatusCode && res.StatusCode < 300 + if successfullySend { + debugLogger.Print("Analytics successfully send") + } else { + debugLogger.Print("Failed to send Analytics:", res.Status) + } +} + +func sendInstrumentation(ctx context.Context, eng workflow.Engine, instrumentor analytics.InstrumentationCollector, logger *zerolog.Logger) { + // Avoid duplicate data to be sent for IDE integrations that use the CLI + if !shallSendInstrumentation(eng.GetConfiguration(), instrumentor) { + logger.Print("This CLI call is not instrumented!") + return + } + + // add temporary static nodejs binary flag, remove once linuxstatic is official + staticNodeJsBinaryBool, parseErr := strconv.ParseBool(constants.StaticNodeJsBinary) + if parseErr != nil { + logger.Print("Failed to parse staticNodeJsBinary:", parseErr) + } else { + // the legacycli:: prefix is added to maintain compatibility with our monitoring dashboard + instrumentor.AddExtension("legacycli::static-nodejs-binary", staticNodeJsBinaryBool) + } + + logger.Print("Sending Instrumentation") + data, err := analytics.GetV2InstrumentationObject(instrumentor, analytics.WithLogger(logger)) + if err != nil { + logger.Err(err).Msg("Failed to derive data object") + } + + v2InstrumentationData := utils.ValueOf(json.Marshal(data)) + localConfiguration := globalConfiguration.Clone() + // the report analytics workflow needs --experimental to run + // we pass the flag here so that we report at every interaction + localConfiguration.Set(configuration.FLAG_EXPERIMENTAL, true) + localConfiguration.Set("inputData", string(v2InstrumentationData)) + _, err = eng.Invoke( + localworkflows.WORKFLOWID_REPORT_ANALYTICS, + workflow.WithConfig(localConfiguration), + workflow.WithContext(ctx), + ) + + if err != nil { + logger.Err(err).Msg("Failed to send Instrumentation") + } else { + logger.Print("Instrumentation successfully sent") + } +} diff --git a/cliv2/cmd/cliv2/main.go b/cliv2/cmd/cliv2/main.go index cb2fb369c0..66930a0c9c 100644 --- a/cliv2/cmd/cliv2/main.go +++ b/cliv2/cmd/cliv2/main.go @@ -6,12 +6,10 @@ import _ "github.com/snyk/go-application-framework/pkg/networking/fips_enable" import ( "context" "encoding/json" - "errors" "fmt" "io" "os" "os/exec" - "strconv" "strings" "sync" "time" @@ -75,6 +73,7 @@ import ( var internalOS string var globalEngine workflow.Engine var globalConfiguration configuration.Configuration +var globalContext context.Context var helpProvided bool var noopLogger zerolog.Logger = zerolog.New(io.Discard) @@ -88,6 +87,7 @@ const ( debug_level_flag string = "log-level" integrationNameFlag string = "integration-name" maxNetworkRequestAttempts string = "max-attempts" + teardownTimeout = 5 * time.Second ) type JsonErrorStruct struct { @@ -121,6 +121,7 @@ func initApplicationConfiguration(config configuration.Configuration) { config.AddAlternativeKeys(configuration.LOG_LEVEL, []string{debug_level_flag}) config.AddAlternativeKeys(configuration.INTEGRATION_NAME, []string{integrationNameFlag}) config.AddAlternativeKeys(middleware.ConfigurationKeyRequestAttempts, []string{"snyk_max_attempts", maxNetworkRequestAttempts}) + config.AddAlternativeKeys(cliv2.ConfigKeyRequestConcurrency, []string{"snyk_request_concurrency"}) } func getFullCommandString(cmd *cobra.Command) string { @@ -194,98 +195,33 @@ func runMainWorkflow(config configuration.Configuration, cmd *cobra.Command, arg globalLogger.Print("Running ", name) globalEngine.GetAnalytics().SetCommand(name) - err = runWorkflowAndProcessData(globalEngine, globalLogger, name) + err = runWorkflowAndProcessData(globalContext, globalEngine, globalLogger, name) return err } -func runWorkflowAndProcessData(engine workflow.Engine, logger *zerolog.Logger, name string) error { +func runWorkflowAndProcessData(ctx context.Context, engine workflow.Engine, logger *zerolog.Logger, name string) error { ic := engine.GetAnalytics().GetInstrumentation() - output, err := engine.Invoke(workflow.NewWorkflowIdentifier(name), workflow.WithInstrumentationCollector(ic)) + output, err := engine.Invoke(workflow.NewWorkflowIdentifier(name), workflow.WithContext(ctx), workflow.WithInstrumentationCollector(ic)) if err != nil { logger.Print("Failed to execute the command! ", err) return err } - outputFiltered, err := engine.Invoke(localworkflows.WORKFLOWID_FILTER_FINDINGS, workflow.WithInput(output), workflow.WithInstrumentationCollector(ic)) + outputFiltered, err := engine.Invoke(localworkflows.WORKFLOWID_FILTER_FINDINGS, workflow.WithContext(ctx), workflow.WithInput(output), workflow.WithInstrumentationCollector(ic)) if err != nil { logger.Err(err).Msg(err.Error()) return err } - _, err = engine.Invoke(localworkflows.WORKFLOWID_OUTPUT_WORKFLOW, workflow.WithInput(outputFiltered), workflow.WithInstrumentationCollector(ic)) + _, err = engine.Invoke(localworkflows.WORKFLOWID_OUTPUT_WORKFLOW, workflow.WithContext(ctx), workflow.WithInput(outputFiltered), workflow.WithInstrumentationCollector(ic)) if err == nil { err = getErrorFromWorkFlowData(engine, outputFiltered) } return err } -func sendAnalytics(analytics analytics.Analytics, debugLogger *zerolog.Logger) { - debugLogger.Print("Sending Analytics") - - analytics.SetApiUrl(globalConfiguration.GetString(configuration.API_URL)) - - res, err := analytics.Send() - if err != nil { - debugLogger.Err(err).Msg("Failed to send Analytics") - return - } - defer func() { _ = res.Body.Close() }() - - successfullySend := 200 <= res.StatusCode && res.StatusCode < 300 - if successfullySend { - debugLogger.Print("Analytics successfully send") - } else { - var details string - if res != nil { - details = res.Status - } - - debugLogger.Print("Failed to send Analytics:", details) - } -} - -func sendInstrumentation(eng workflow.Engine, instrumentor analytics.InstrumentationCollector, logger *zerolog.Logger) { - // Avoid duplicate data to be sent for IDE integrations that use the CLI - if !shallSendInstrumentation(eng.GetConfiguration(), instrumentor) { - logger.Print("This CLI call is not instrumented!") - return - } - - // add temporary static nodejs binary flag, remove once linuxstatic is official - staticNodeJsBinaryBool, parseErr := strconv.ParseBool(constants.StaticNodeJsBinary) - if parseErr != nil { - logger.Print("Failed to parse staticNodeJsBinary:", parseErr) - } else { - // the legacycli:: prefix is added to maintain compatibility with our monitoring dashboard - instrumentor.AddExtension("legacycli::static-nodejs-binary", staticNodeJsBinaryBool) - } - - logger.Print("Sending Instrumentation") - data, err := analytics.GetV2InstrumentationObject(instrumentor, analytics.WithLogger(logger)) - if err != nil { - logger.Err(err).Msg("Failed to derive data object") - } - - v2InstrumentationData := utils.ValueOf(json.Marshal(data)) - localConfiguration := globalConfiguration.Clone() - // the report analytics workflow needs --experimental to run - // we pass the flag here so that we report at every interaction - localConfiguration.Set(configuration.FLAG_EXPERIMENTAL, true) - localConfiguration.Set("inputData", string(v2InstrumentationData)) - _, err = eng.InvokeWithConfig( - localworkflows.WORKFLOWID_REPORT_ANALYTICS, - localConfiguration, - ) - - if err != nil { - logger.Err(err).Msg("Failed to send Instrumentation") - } else { - logger.Print("Instrumentation successfully sent") - } -} - func help(_ *cobra.Command, _ []string) error { helpProvided = true args := utils.RemoveSimilar(os.Args[1:], "--") // remove all double dash arguments to avoid issues with the help command @@ -513,9 +449,6 @@ func displayError(err error, userInterface ui.UserInterface, config configuratio jsonErrorBuffer, _ := json.MarshalIndent(jsonError, "", " ") _ = userInterface.OutputError(fmt.Errorf("%s", jsonErrorBuffer)) } else { - if errors.Is(err, context.DeadlineExceeded) { - err = fmt.Errorf("command timed out") - } uiError := userInterface.OutputError(err, ui.WithContext(ctx)) if uiError != nil { globalLogger.Err(uiError).Msg("ui failed to show error") @@ -548,11 +481,65 @@ func initExtensions(engine workflow.Engine, config configuration.Configuration) } } +// tearDown handles sending analytics and instrumentation +// It is used both for normal exit and signal-triggered exit +func tearDown(err error, errorList []error, startTime time.Time, ua networking.UserAgentInfo, cliAnalytics analytics.Analytics, networkAccess networking.NetworkAccess) int { + // Create a context with timeout for teardown operations to ensure we don't hang indefinitely + teardownCtx, cancel := context.WithTimeout(context.Background(), teardownTimeout) + defer cancel() + + outputError := err + allErrors := errorList + + if err != nil { + allErrors, outputError = processError(err, errorList) + + for _, tempError := range allErrors { + if tempError != nil { + cliAnalytics.AddError(tempError) + } + } + } + + exitCode := cliv2.DeriveExitCode(outputError) + globalLogger.Printf("Deriving Exit Code %d (cause: %v)", exitCode, outputError) + + displayError(outputError, globalEngine.GetUserInterface(), globalConfiguration, globalContext) + + updateInstrumentationDataBeforeSending(cliAnalytics, startTime, ua, exitCode) + + if !globalConfiguration.GetBool(configuration.ANALYTICS_DISABLED) { + sendAnalytics(teardownCtx, cliAnalytics, globalLogger) + } + sendInstrumentation(teardownCtx, globalEngine, cliAnalytics.GetInstrumentation(), globalLogger) + + // cleanup resources in use + // WARNING: deferred actions will execute AFTER cleanup; only defer if not impacted by this + if _, cleanupErr := globalEngine.Invoke(basic_workflows.WORKFLOWID_GLOBAL_CLEANUP, workflow.WithContext(teardownCtx)); cleanupErr != nil { + globalLogger.Printf("Failed to cleanup %v", cleanupErr) + } + + if globalConfiguration.GetBool(configuration.DEBUG) { + writeLogFooter(exitCode, allErrors, globalConfiguration, networkAccess) + } + + return exitCode +} + func MainWithErrorCode() int { initDebugBuild() errorList := []error{} errorListMutex := sync.Mutex{} + var finalExitCode int + + // preparing the possibility to tearDown from different threads while ensure it is only called once + var tearDownOnce sync.Once + + // init context + ctx := context.Background() + ctx = context.WithValue(ctx, networking.InteractionIdKey, instrumentation.AssembleUrnFromUUID(interactionId)) + globalContext = ctx startTime := time.Now() var err error @@ -633,10 +620,6 @@ func MainWithErrorCode() int { return constants.SNYK_EXIT_CODE_ERROR } - // init context - ctx := context.Background() - ctx = context.WithValue(ctx, networking.InteractionIdKey, instrumentation.AssembleUrnFromUUID(interactionId)) - // add output flags as persistent flags outputWorkflow, _ := globalEngine.GetWorkflow(localworkflows.WORKFLOWID_OUTPUT_WORKFLOW) outputFlags := workflow.FlagsetFromConfigurationOptions(outputWorkflow.GetConfigurationOptions()) @@ -657,7 +640,14 @@ func MainWithErrorCode() int { cliAnalytics.GetInstrumentation().SetStatus(analytics.Success) setTimeout(globalConfiguration, func() { - os.Exit(constants.SNYK_EXIT_CODE_EX_UNAVAILABLE) + tearDownOnce.Do(func() { + errorListMutex.Lock() + errorListCopy := append([]error{}, errorList...) + errorListMutex.Unlock() + + exitCode := tearDown(context.DeadlineExceeded, errorListCopy, startTime, ua, cliAnalytics, networkAccess) + os.Exit(exitCode) + }) }) // run the extensible cli @@ -681,43 +671,15 @@ func MainWithErrorCode() int { // ignore } - outputError := err - allErrors := errorList - - if err != nil { - allErrors, outputError = processError(err, errorList) - - for _, tempError := range allErrors { - if tempError != nil { - cliAnalytics.AddError(tempError) - } - } - } - - displayError(outputError, globalEngine.GetUserInterface(), globalConfiguration, ctx) - - exitCode := cliv2.DeriveExitCode(outputError) - globalLogger.Printf("Deriving Exit Code %d (cause: %v)", exitCode, outputError) - - updateInstrumentationDataBeforeSending(cliAnalytics, startTime, ua, exitCode) - - if !globalConfiguration.GetBool(configuration.ANALYTICS_DISABLED) { - sendAnalytics(cliAnalytics, globalLogger) - } - sendInstrumentation(globalEngine, cliAnalytics.GetInstrumentation(), globalLogger) - - // cleanup resources in use - // WARNING: deferred actions will execute AFTER cleanup; only defer if not impacted by this - _, err = globalEngine.Invoke(basic_workflows.WORKFLOWID_GLOBAL_CLEANUP) - if err != nil { - globalLogger.Printf("Failed to cleanup %v", err) - } + tearDownOnce.Do(func() { + errorListMutex.Lock() + errorListCopy := append([]error{}, errorList...) + errorListMutex.Unlock() - if debugEnabled { - writeLogFooter(exitCode, allErrors, globalConfiguration, networkAccess) - } + finalExitCode = tearDown(err, errorListCopy, startTime, ua, cliAnalytics, networkAccess) + }) - return exitCode + return finalExitCode } func processError(err error, errorList []error) ([]error, error) { @@ -754,7 +716,6 @@ func setTimeout(config configuration.Configuration, onTimeout func()) { go func() { const gracePeriodForSubProcesses = 3 <-time.After(time.Duration(timeout+gracePeriodForSubProcesses) * time.Second) - _, _ = fmt.Fprintf(os.Stdout, "command timed out") onTimeout() }() } diff --git a/cliv2/cmd/cliv2/main_test.go b/cliv2/cmd/cliv2/main_test.go index 0a70446a24..f96cebe1e5 100644 --- a/cliv2/cmd/cliv2/main_test.go +++ b/cliv2/cmd/cliv2/main_test.go @@ -1,7 +1,6 @@ package main import ( - "context" "encoding/json" "errors" "fmt" @@ -466,7 +465,7 @@ func Test_runWorkflowAndProcessData(t *testing.T) { // invoke method under test logger := zerolog.New(os.Stderr) - err = runWorkflowAndProcessData(globalEngine, &logger, testCmnd) + err = runWorkflowAndProcessData(t.Context(), globalEngine, &logger, testCmnd) var expectedError *clierrors.ErrorWithExitCode assert.ErrorAs(t, err, &expectedError) @@ -560,7 +559,7 @@ func Test_runWorkflowAndProcessData_with_Filtering(t *testing.T) { assert.NoError(t, err) logger := zerolog.New(os.Stderr) - err = runWorkflowAndProcessData(globalEngine, &logger, testCmnd) + err = runWorkflowAndProcessData(t.Context(), globalEngine, &logger, testCmnd) } func Test_setTimeout(t *testing.T) { @@ -588,7 +587,7 @@ func Test_displayError(t *testing.T) { userInterface.EXPECT().OutputError(err, gomock.Any()).Times(1) config := configuration.NewWithOpts(configuration.WithAutomaticEnv()) - displayError(err, userInterface, config, context.Background()) + displayError(err, userInterface, config, t.Context()) }) scenarios := []struct { @@ -609,7 +608,7 @@ func Test_displayError(t *testing.T) { t.Run(fmt.Sprintf("%s does not display anything", scenario.name), func(t *testing.T) { config := configuration.NewWithOpts(configuration.WithAutomaticEnv()) err := scenario.err - displayError(err, userInterface, config, context.Background()) + displayError(err, userInterface, config, t.Context()) }) } @@ -618,7 +617,7 @@ func Test_displayError(t *testing.T) { userInterface.EXPECT().OutputError(err, gomock.Any()).Times(1) config := configuration.NewWithOpts(configuration.WithAutomaticEnv()) - displayError(err, userInterface, config, context.Background()) + displayError(err, userInterface, config, t.Context()) }) } diff --git a/cliv2/go.mod b/cliv2/go.mod index 1e5ef8e0b6..176815af31 100644 --- a/cliv2/go.mod +++ b/cliv2/go.mod @@ -1,6 +1,6 @@ module github.com/snyk/cli/cliv2 -go 1.25.8 +go 1.26.3 require ( github.com/elazarl/goproxy v1.7.2 @@ -13,20 +13,20 @@ require ( github.com/snyk/cli-extension-agent-scan v0.0.0-20260505093105-90d9442ea939 github.com/snyk/cli-extension-ai-bom v0.0.0-20260319140413-ac7392950603 github.com/snyk/cli-extension-ai-redteam v0.0.0-20260331152502-ce341aeaff9e - github.com/snyk/cli-extension-dep-graph v0.32.0 + github.com/snyk/cli-extension-dep-graph v1.7.0 github.com/snyk/cli-extension-iac v0.0.0-20260206082514-00c443ccee80 github.com/snyk/cli-extension-iac-rules v0.0.0-20260206080712-9cbb5f95465d - github.com/snyk/cli-extension-os-flows v0.0.0-20260330131038-f7539faafecf - github.com/snyk/cli-extension-sbom v0.0.0-20260327120356-9befea04c9b0 - github.com/snyk/cli-extension-secrets v0.0.0-20260421112643-c8c29ed060b9 - github.com/snyk/code-client-go v1.26.2 + github.com/snyk/cli-extension-os-flows v0.0.0-20260423112219-b7ba9dd68e57 + github.com/snyk/cli-extension-sbom v0.0.0-20260428131356-48881c6270fa + github.com/snyk/cli-extension-secrets v0.0.0-20260505103358-cc205308a93e + github.com/snyk/code-client-go v1.27.0 github.com/snyk/container-cli v0.0.0-20260213211631-cd2b2cf8f3ea - github.com/snyk/error-catalog-golang-public v0.0.0-20260316131845-f02d7f42046b - github.com/snyk/go-application-framework v0.0.0-20260422125240-4b46b7117457 + github.com/snyk/error-catalog-golang-public v0.0.0-20260505112649-a5103d411663 + github.com/snyk/go-application-framework v0.0.0-20260506111235-cca3157b9435 github.com/snyk/go-httpauth v0.0.0-20240307114523-1f5ea3f55c65 github.com/snyk/snyk-iac-capture v0.6.5 - github.com/snyk/snyk-ls v0.0.0-20260401163317-c1fe9ee766fd - github.com/snyk/studio-mcp v1.9.2 + github.com/snyk/snyk-ls v0.0.0-20260507075428-365bccd7be16 + github.com/snyk/studio-mcp v1.11.0 github.com/spf13/cobra v1.9.1 github.com/spf13/pflag v1.0.10 github.com/stretchr/testify v1.11.1 diff --git a/cliv2/go.sum b/cliv2/go.sum index 08676d74c2..f1150f5e78 100644 --- a/cliv2/go.sum +++ b/cliv2/go.sum @@ -541,38 +541,38 @@ github.com/snyk/cli-extension-ai-bom v0.0.0-20260319140413-ac7392950603 h1:uZyw9 github.com/snyk/cli-extension-ai-bom v0.0.0-20260319140413-ac7392950603/go.mod h1:RnMP+tFTeKygfXSx7z+heyMZoOps67u5HFytjptHjuk= github.com/snyk/cli-extension-ai-redteam v0.0.0-20260331152502-ce341aeaff9e h1:WZ4Ph3iX+fAu3/HgblVnA+AXfKiEvrgHsZq8GHjnzzo= github.com/snyk/cli-extension-ai-redteam v0.0.0-20260331152502-ce341aeaff9e/go.mod h1:445d735F53IuegetHs/S4GyWyng4Crd9TPj4vosmFmM= -github.com/snyk/cli-extension-dep-graph v0.32.0 h1:6otM3SIqYIRFonKXeIGg0tZVcNxZevAySg3sOOT+64Q= -github.com/snyk/cli-extension-dep-graph v0.32.0/go.mod h1:66H2oCkziptQrUDibPe3m8rx+S6XcpnUL5udT+wfrmY= +github.com/snyk/cli-extension-dep-graph v1.7.0 h1:QssY+rbCaKbc4TlwMVkZE5yRHVkahPk00qfGh6GXX2I= +github.com/snyk/cli-extension-dep-graph v1.7.0/go.mod h1:UbsWeKktdH+6XWBOfZqE8N2eTrC8GgZHog8l9ehPA9I= github.com/snyk/cli-extension-iac v0.0.0-20260206082514-00c443ccee80 h1:JHbnSkgGc2oUejjzdWdeTghl0BZV7QamcRuyh7ornVo= github.com/snyk/cli-extension-iac v0.0.0-20260206082514-00c443ccee80/go.mod h1:Ht5k+sWdi//fM2MjcmBMWjcJmr35iMvQpYlBWnUHL4I= github.com/snyk/cli-extension-iac-rules v0.0.0-20260206080712-9cbb5f95465d h1:xkxHgZ+DT4hRiIEeAEv1JWLJRYV4MbAFvtEUpUkndPA= github.com/snyk/cli-extension-iac-rules v0.0.0-20260206080712-9cbb5f95465d/go.mod h1:ztpTmC4n8MEO0B48M2nL/Q9tb6CkLZ61ZKZbjwhOKRo= -github.com/snyk/cli-extension-os-flows v0.0.0-20260330131038-f7539faafecf h1:CGZ3hKD5O0g6Zfg51WqKxVUZWuIi/g+CiAekxQVqqbg= -github.com/snyk/cli-extension-os-flows v0.0.0-20260330131038-f7539faafecf/go.mod h1:guMfnOGktk4XXZ5fXulGPn55IlH3/8J8pMDcSXBWm54= -github.com/snyk/cli-extension-sbom v0.0.0-20260327120356-9befea04c9b0 h1:QK9cPBpPYzm1jxT2VAUUjtdXZfJllRRjRwuslTERuco= -github.com/snyk/cli-extension-sbom v0.0.0-20260327120356-9befea04c9b0/go.mod h1:SJ624HENWG4yjM6jNuLebTeNsMriozf1LcKhMYVm1aY= -github.com/snyk/cli-extension-secrets v0.0.0-20260421112643-c8c29ed060b9 h1:jUC2++pNczGu1QK3K7E0Hx/hXk0niNy6ND38U+gkF/A= -github.com/snyk/cli-extension-secrets v0.0.0-20260421112643-c8c29ed060b9/go.mod h1:vGlGtPhCnO7VYRuVjUa6TSZkIsQ4c99Bafu4xQYGQTM= -github.com/snyk/code-client-go v1.26.2 h1:vto7ZKj9OoU1rnmKeoZ+i68qXrT0I94CEMhvwK03O24= -github.com/snyk/code-client-go v1.26.2/go.mod h1:0NcZZHB48Sr4UAucEH2H10HwV7gjI2Ue0c+FxPWaTNo= +github.com/snyk/cli-extension-os-flows v0.0.0-20260423112219-b7ba9dd68e57 h1:8dpjcHif/0D738R4HlMFo7mi2GKNsb20oJawqae62k4= +github.com/snyk/cli-extension-os-flows v0.0.0-20260423112219-b7ba9dd68e57/go.mod h1:f16TyLAOBiU485lN/odNfmiyWyMu1iVrDoSEieiiblQ= +github.com/snyk/cli-extension-sbom v0.0.0-20260428131356-48881c6270fa h1:9GSKXrRCaRkBAj+jglUBuhO09I1xs2G5GxwrPxlj34M= +github.com/snyk/cli-extension-sbom v0.0.0-20260428131356-48881c6270fa/go.mod h1:SJ624HENWG4yjM6jNuLebTeNsMriozf1LcKhMYVm1aY= +github.com/snyk/cli-extension-secrets v0.0.0-20260505103358-cc205308a93e h1:LLdPjkz6zRxYwos2iIyKvlaCL7x8bZAAxeUF+4nG+So= +github.com/snyk/cli-extension-secrets v0.0.0-20260505103358-cc205308a93e/go.mod h1:+Ey86Xjvw2HhDM8OisCVNzlatzO9wEfSQRPapaNfwIk= +github.com/snyk/code-client-go v1.27.0 h1:FOX4JzgHssm5fei4ALyrBAIL9eJGnG52yTkqWcM1Qew= +github.com/snyk/code-client-go v1.27.0/go.mod h1:DQHr0nRchfrc54aciYyrveKWEpBhJwUoc1XoDhWt6W4= github.com/snyk/container-cli v0.0.0-20260213211631-cd2b2cf8f3ea h1:/v48hCMPiZVjplylgE2FX1ib8Qd8LN/vf8ZIKfA+wkI= github.com/snyk/container-cli v0.0.0-20260213211631-cd2b2cf8f3ea/go.mod h1:P5yW8+jkwhYBsj5l2jtHeWujyX+SAtvkC8+LELKdlWI= github.com/snyk/dep-graph/go v0.0.0-20260127160647-c836da762c62 h1:kgZNQ5ztI4+n3YKLR5LJbqL8WJmUYgDSbFKREIY79g0= github.com/snyk/dep-graph/go v0.0.0-20260127160647-c836da762c62/go.mod h1:hTr91da/4ze2nk9q6ZW1BmfM2Z8rLUZSEZ3kK+6WGpc= -github.com/snyk/error-catalog-golang-public v0.0.0-20260316131845-f02d7f42046b h1:DM2SPu7rhsD/TNS7zhv4ZoqLLi2cFOqg1VTBCP6RfSg= -github.com/snyk/error-catalog-golang-public v0.0.0-20260316131845-f02d7f42046b/go.mod h1:Ytttq7Pw4vOCu9NtRQaOeDU2dhBYUyNBe6kX4+nIIQ4= -github.com/snyk/go-application-framework v0.0.0-20260422125240-4b46b7117457 h1:edpfJmcHr177YK2hQCeVi0TOO/eglFCJUCd2Nji9LuQ= -github.com/snyk/go-application-framework v0.0.0-20260422125240-4b46b7117457/go.mod h1:7IOOtKxiQhtTbkrX7rax20QNJ/rwGill6n2Rejtld2I= +github.com/snyk/error-catalog-golang-public v0.0.0-20260505112649-a5103d411663 h1:j2ZPhi78wKIHTiL9EFTNVXMIbsk56FVF2d5Sy1ZwSYk= +github.com/snyk/error-catalog-golang-public v0.0.0-20260505112649-a5103d411663/go.mod h1:Ytttq7Pw4vOCu9NtRQaOeDU2dhBYUyNBe6kX4+nIIQ4= +github.com/snyk/go-application-framework v0.0.0-20260506111235-cca3157b9435 h1:R6tOMqc6GyC8ncoTT+eO6I38qs9/SxgPi9AEYoIEubQ= +github.com/snyk/go-application-framework v0.0.0-20260506111235-cca3157b9435/go.mod h1:yTGCJKf6RmqdwrNs5B9zmukL9x1D8EhfSK8mzaPB1Rk= github.com/snyk/go-httpauth v0.0.0-20240307114523-1f5ea3f55c65 h1:CEQuYv0Go6MEyRCD3YjLYM2u3Oxkx8GpCpFBd4rUTUk= github.com/snyk/go-httpauth v0.0.0-20240307114523-1f5ea3f55c65/go.mod h1:88KbbvGYlmLgee4OcQ19yr0bNpXpOr2kciOthaSzCAg= github.com/snyk/policy-engine v1.1.3 h1:MU+K8pxbN6VZ9P5wALUt8BwTjrPDpoEtmTtQqj7sKfY= github.com/snyk/policy-engine v1.1.3/go.mod h1:yOc6YZLWhVGUZjzskDTen8zxrNTUHL4FyZXM0Ezlb8M= github.com/snyk/snyk-iac-capture v0.6.5 h1:992DXCAJSN97KtUh8T5ndaWwd/6ZCal2bDkRXqM1u/E= github.com/snyk/snyk-iac-capture v0.6.5/go.mod h1:e47i55EmM0F69ZxyFHC4sCi7vyaJW6DLoaamJJCzWGk= -github.com/snyk/snyk-ls v0.0.0-20260401163317-c1fe9ee766fd h1:U1LAtMHLmLK1lxfvFX4errx7cA1l5jKKSLFoZFdPsmw= -github.com/snyk/snyk-ls v0.0.0-20260401163317-c1fe9ee766fd/go.mod h1:3lXmUS3pmxjyIQEfnuf8yFjfSmaOP7XuhTVusJkllhU= -github.com/snyk/studio-mcp v1.9.2 h1:XdS+JBBmQKjKPgBYru3mKnWrzrExc018DR/SfTGX0aw= -github.com/snyk/studio-mcp v1.9.2/go.mod h1:CiMKFs/PJrAMjG8Zjkvx+XwuM0XlxGJ7jP5yCr5hm5w= +github.com/snyk/snyk-ls v0.0.0-20260507075428-365bccd7be16 h1:CtZ8OWjWy6X0ErVvywCONoWpeQvSE5Ca3NGuP8OcDJQ= +github.com/snyk/snyk-ls v0.0.0-20260507075428-365bccd7be16/go.mod h1:N8TjmSBn40TyZMR2g5El+WOIVyCxMT2fIW56Kdh6Ca4= +github.com/snyk/studio-mcp v1.11.0 h1:WfvWqqHu5+Stb/y+LTVdWWiazc0i4BapxqMCJyGRArA= +github.com/snyk/studio-mcp v1.11.0/go.mod h1:CiMKFs/PJrAMjG8Zjkvx+XwuM0XlxGJ7jP5yCr5hm5w= github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo= github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0= github.com/sourcegraph/go-lsp v0.0.0-20240223163137-f80c5dd31dfd h1:Dq5WSzWsP1TbVi10zPWBI5LKEBDg4Y1OhWEph1wr5WQ= diff --git a/cliv2/internal/cliv2/cliv2.go b/cliv2/internal/cliv2/cliv2.go index 98233e462b..cb62d2ebce 100644 --- a/cliv2/internal/cliv2/cliv2.go +++ b/cliv2/internal/cliv2/cliv2.go @@ -66,6 +66,13 @@ const ( const ( configKeyErrFile = "INTERNAL_ERR_FILE_PATH" ERROR_HAS_BEEN_DISPLAYED = "hasBeenDisplayed" + // ConfigKeyRequestConcurrency is the configuration key holding the + // resolved maximum number of concurrent in-flight Snyk dependency-test + // or dependency-monitor HTTP requests issued by the legacy CLI. The + // user-facing SNYK_REQUEST_CONCURRENCY env var feeds this key (registered + // in main.go via AddAlternativeKeys); the resolved value is forwarded to + // the legacy CLI process via constants.SNYK_INTERNAL_REQUEST_CONCURRENCY_ENV. + ConfigKeyRequestConcurrency = "internal_request_concurrency" ) var ( @@ -263,8 +270,8 @@ func (c *CLI) commandVersion(passthroughArgs []string) error { } } -func (c *CLI) commandAbout(proxyInfo *proxy.ProxyInfo, passthroughArgs []string) error { - err := c.executeV1Default(proxyInfo, passthroughArgs) +func (c *CLI) commandAbout(ctx context.Context, proxyInfo *proxy.ProxyInfo, passthroughArgs []string) error { + err := c.executeV1Default(ctx, proxyInfo, passthroughArgs) if err != nil { return err } @@ -355,6 +362,7 @@ func PrepareV1EnvironmentVariables( constants.SNYK_NPM_ALL_PROXY, constants.SNYK_OPENSSL_CONF, constants.SNYK_INTERNAL_PREVIEW_FEATURES_ENABLED, + constants.SNYK_INTERNAL_REQUEST_CONCURRENCY_ENV, constants.DEBUG_CONST, } @@ -391,6 +399,16 @@ func fillEnvironmentFromConfig(inputAsMap map[string]string, config configuratio inputAsMap[constants.SNYK_INTERNAL_ERR_FILE] = config.GetString(configKeyErrFile) inputAsMap[constants.SNYK_TEMP_PATH] = config.GetString(configuration.TEMP_DIR_PATH) + // Forward the resolved request concurrency to the legacy CLI when the user + // set the value. We can't use config.IsSet here: in GAF, IsSet does not + // pre-bind env vars for alternative keys, so it returns false even when + // the SNYK_REQUEST_CONCURRENCY env var is set under WithSupportedEnvVarPrefixes + // (the production setup). GetString goes through GAF's get(), which binds + // the alt key before reading, so it returns the resolved value correctly. + if v := config.GetString(ConfigKeyRequestConcurrency); v != "" { + inputAsMap[constants.SNYK_INTERNAL_REQUEST_CONCURRENCY_ENV] = v + } + if config.GetBool(configuration.PREVIEW_FEATURES_ENABLED) { inputAsMap[constants.SNYK_INTERNAL_PREVIEW_FEATURES_ENABLED] = "1" } @@ -433,14 +451,11 @@ func (c *CLI) PrepareV1Command( return snykCmd, err } -func (c *CLI) executeV1Default(proxyInfo *proxy.ProxyInfo, passThroughArgs []string) error { +func (c *CLI) executeV1Default(ctx context.Context, proxyInfo *proxy.ProxyInfo, passThroughArgs []string) error { timeout := c.globalConfig.GetInt(configuration.TIMEOUT) - var ctx context.Context var cancel context.CancelFunc - if timeout == 0 { - ctx = context.Background() - } else { - ctx, cancel = context.WithTimeout(context.Background(), time.Duration(timeout)*time.Second) + if timeout > 0 { + ctx, cancel = context.WithTimeout(ctx, time.Duration(timeout)*time.Second) defer cancel() } @@ -545,7 +560,7 @@ func GetErrorFromFile(execErr error, errFilePath string, config configuration.Co return nil, ErrIPCNoDataSent } -func (c *CLI) Execute(proxyInfo *proxy.ProxyInfo, passThroughArgs []string) error { +func (c *CLI) Execute(ctx context.Context, proxyInfo *proxy.ProxyInfo, passThroughArgs []string) error { var err error handler := determineHandler(passThroughArgs) @@ -553,11 +568,11 @@ func (c *CLI) Execute(proxyInfo *proxy.ProxyInfo, passThroughArgs []string) erro case V2_VERSION: err = c.commandVersion(passThroughArgs) case V2_ABOUT: - err = c.commandAbout(proxyInfo, passThroughArgs) + err = c.commandAbout(ctx, proxyInfo, passThroughArgs) case V1_DEFAULT: fallthrough default: - err = c.executeV1Default(proxyInfo, passThroughArgs) + err = c.executeV1Default(ctx, proxyInfo, passThroughArgs) } return err diff --git a/cliv2/internal/cliv2/cliv2_test.go b/cliv2/internal/cliv2/cliv2_test.go index be38d023fc..54da22d7ae 100644 --- a/cliv2/internal/cliv2/cliv2_test.go +++ b/cliv2/internal/cliv2/cliv2_test.go @@ -59,7 +59,7 @@ func Test_NewCLIv2_SubprocessEnv_OverridesIfSet_AndDefaultsToOsEnv(t *testing.T) assert.NoError(t, err) cmd, err := cli.PrepareV1Command( - context.Background(), + t.Context(), "someExecutable", []string{"--help"}, getProxyInfoForTest(), @@ -83,7 +83,7 @@ func Test_NewCLIv2_SubprocessEnv_OverridesIfSet_AndDefaultsToOsEnv(t *testing.T) assert.NoError(t, err) cmd, err := cli.PrepareV1Command( - context.Background(), + t.Context(), "someExecutable", []string{"--help"}, getProxyInfoForTest(), @@ -270,6 +270,56 @@ func Test_PrepareV1EnvironmentVariables_OnlyExplicitlySetValues(t *testing.T) { }) } +func Test_PrepareV1EnvironmentVariables_RequestConcurrency(t *testing.T) { + // Mirror main.go's production configuration setup. Crucially, this uses + // WithSupportedEnvVarPrefixes (NOT WithAutomaticEnv): under that setup, + // GAF's IsSet does not pre-bind env vars for alternative keys, so any + // implementation that gates forwarding on IsSet would fail to forward + // the value. This test catches that regression. + newConfig := func() configuration.Configuration { + c := configuration.NewWithOpts( + configuration.WithSupportedEnvVarPrefixes("snyk_", "internal_", "test_"), + ) + c.AddAlternativeKeys(cliv2.ConfigKeyRequestConcurrency, []string{"snyk_request_concurrency"}) + return c + } + + t.Run("forwards resolved value to internal env when alt key is set via env", func(t *testing.T) { + t.Setenv("SNYK_REQUEST_CONCURRENCY", "17") + + actual, err := cliv2.PrepareV1EnvironmentVariables([]string{}, "foo", "bar", "proxy", "cacertlocation", newConfig(), []string{}) + + assert.Nil(t, err) + assert.Contains(t, actual, constants.SNYK_INTERNAL_REQUEST_CONCURRENCY_ENV+"=17") + }) + + t.Run("does not set internal env when alt key is unset", func(t *testing.T) { + // guard against a stray env var leaking into the test environment + t.Setenv("SNYK_REQUEST_CONCURRENCY", "") + _ = os.Unsetenv("SNYK_REQUEST_CONCURRENCY") + + actual, err := cliv2.PrepareV1EnvironmentVariables([]string{}, "foo", "bar", "proxy", "cacertlocation", newConfig(), []string{}) + + assert.Nil(t, err) + for _, kv := range actual { + assert.NotContains(t, kv, constants.SNYK_INTERNAL_REQUEST_CONCURRENCY_ENV+"=") + } + }) + + t.Run("user-set internal env is stripped before Go reapplies it", func(t *testing.T) { + t.Setenv("SNYK_REQUEST_CONCURRENCY", "9") + + // Simulate a user trying to bypass Go config by setting the internal var directly. + input := []string{constants.SNYK_INTERNAL_REQUEST_CONCURRENCY_ENV + "=999"} + + actual, err := cliv2.PrepareV1EnvironmentVariables(input, "foo", "bar", "proxy", "cacertlocation", newConfig(), []string{}) + + assert.Nil(t, err) + assert.Contains(t, actual, constants.SNYK_INTERNAL_REQUEST_CONCURRENCY_ENV+"=9") + assert.NotContains(t, actual, constants.SNYK_INTERNAL_REQUEST_CONCURRENCY_ENV+"=999") + }) +} + func Test_PrepareV1EnvironmentVariables_Fail_DontOverrideExisting(t *testing.T) { orgid := "orgid" testapi := "https://api.snyky.io" @@ -352,7 +402,7 @@ func Test_prepareV1Command(t *testing.T) { assert.NoError(t, err) snykCmd, err := cli.PrepareV1Command( - context.Background(), + t.Context(), "someExecutable", expectedArgs, getProxyInfoForTest(), @@ -376,7 +426,7 @@ func Test_prepareV1Command_InjectsExecutablePath(t *testing.T) { assert.NoError(t, err) snykCmd, err := cli.PrepareV1Command( - context.Background(), + t.Context(), "someExecutable", []string{"--help"}, getProxyInfoForTest(), @@ -408,7 +458,7 @@ func Test_extractOnlyOnce(t *testing.T) { assert.NoError(t, cli.Init()) // run once - err = cli.Execute(getProxyInfoForTest(), []string{"--help"}) + err = cli.Execute(t.Context(), getProxyInfoForTest(), []string{"--help"}) assert.Error(t, err) // invalid binary expected here assert.FileExists(t, cli.GetBinaryLocation()) fileInfo1, err := os.Stat(cli.GetBinaryLocation()) @@ -419,7 +469,7 @@ func Test_extractOnlyOnce(t *testing.T) { // run twice assert.Nil(t, cli.Init()) - err = cli.Execute(getProxyInfoForTest(), []string{"--help"}) + err = cli.Execute(t.Context(), getProxyInfoForTest(), []string{"--help"}) assert.Error(t, err) // invalid binary expected here assert.FileExists(t, cli.GetBinaryLocation()) fileInfo2, err := os.Stat(cli.GetBinaryLocation()) @@ -479,7 +529,7 @@ func Test_executeRunV2only(t *testing.T) { assert.NoError(t, err) assert.NoError(t, cli.Init()) - actualReturnCode := cliv2.DeriveExitCode(cli.Execute(getProxyInfoForTest(), []string{"--version"})) + actualReturnCode := cliv2.DeriveExitCode(cli.Execute(t.Context(), getProxyInfoForTest(), []string{"--version"})) assert.Equal(t, expectedReturnCode, actualReturnCode) assert.FileExists(t, cli.GetBinaryLocation()) } @@ -496,7 +546,7 @@ func Test_executeUnknownCommand(t *testing.T) { assert.NoError(t, err) assert.NoError(t, cli.Init()) - actualReturnCode := cliv2.DeriveExitCode(cli.Execute(getProxyInfoForTest(), []string{"bogusCommand"})) + actualReturnCode := cliv2.DeriveExitCode(cli.Execute(t.Context(), getProxyInfoForTest(), []string{"bogusCommand"})) assert.Equal(t, expectedReturnCode, actualReturnCode) } @@ -590,7 +640,7 @@ func Test_setTimeout(t *testing.T) { // sleep for 2s cli.SetV1BinaryLocation("/bin/sleep") - err = cli.Execute(getProxyInfoForTest(), []string{"2"}) + err = cli.Execute(t.Context(), getProxyInfoForTest(), []string{"2"}) assert.ErrorIs(t, err, context.DeadlineExceeded) } diff --git a/cliv2/internal/constants/constants.go b/cliv2/internal/constants/constants.go index 7b22c85e5d..ff082d0ea4 100644 --- a/cliv2/internal/constants/constants.go +++ b/cliv2/internal/constants/constants.go @@ -28,6 +28,7 @@ const DEBUG_CONST = "DEBUG" const SNYK_INTERNAL_ORGID_ENV = "SNYK_INTERNAL_ORGID" const SNYK_INTERNAL_ERR_FILE = "SNYK_ERR_FILE" const SNYK_INTERNAL_PREVIEW_FEATURES_ENABLED = "SNYK_INTERNAL_PREVIEW_FEATURES" +const SNYK_INTERNAL_REQUEST_CONCURRENCY_ENV = "SNYK_INTERNAL_REQUEST_CONCURRENCY" const SNYK_ENDPOINT_ENV = "SNYK_API" const SNYK_ORG_ENV = "SNYK_CFG_ORG" const SNYK_OPENSSL_CONF = "OPENSSL_CONF" diff --git a/cliv2/pkg/basic_workflows/legacycli.go b/cliv2/pkg/basic_workflows/legacycli.go index 9a9f711f37..24e4a7ffac 100644 --- a/cliv2/pkg/basic_workflows/legacycli.go +++ b/cliv2/pkg/basic_workflows/legacycli.go @@ -151,9 +151,9 @@ func legacycliWorkflow( return output, err } - // run the cli + // run the cli with context from invocation (allows cancellation on signal) proxyInfo := wrapperProxy.ProxyInfo() - err = cli.Execute(proxyInfo, finalizeArguments(args, config.GetStringSlice(configuration.UNKNOWN_ARGS))) + err = cli.Execute(invocation.Context(), proxyInfo, finalizeArguments(args, config.GetStringSlice(configuration.UNKNOWN_ARGS))) if !useStdIo { _ = outWriter.Flush() diff --git a/package-lock.json b/package-lock.json index 522ba3aaed..7fdf45320f 100644 --- a/package-lock.json +++ b/package-lock.json @@ -13,7 +13,7 @@ ], "dependencies": { "@open-policy-agent/opa-wasm": "^1.6.0", - "@sentry/node": "^7.34.0", + "@sentry/node": "^7.120.4", "@snyk/cli-interface": "2.15.0", "@snyk/cloud-config-parser": "^1.14.5", "@snyk/code-client": "^4.23.5", @@ -63,27 +63,27 @@ "ora": "5.4.0", "os-name": "^5.1.0", "p-map": "^4.0.0", - "proxy-from-env": "^1.0.0", + "proxy-from-env": "^2.1.0", "rimraf": "^2.6.3", "semver": "^6.0.0", "snyk-config": "^5.0.0", "snyk-cpp-plugin": "^2.24.3", - "snyk-docker-plugin": "^9.6.0", + "snyk-docker-plugin": "9.7.0", "snyk-go-plugin": "2.1.1", - "snyk-gradle-plugin": "5.1.1", + "snyk-gradle-plugin": "6.0.0", "snyk-module": "3.1.0", - "snyk-mvn-plugin": "^4.6.1", - "snyk-nodejs-lockfile-parser": "2.6.1", - "snyk-nodejs-plugin": "^1.6.1", + "snyk-mvn-plugin": "^4.7.0", + "snyk-nodejs-lockfile-parser": "2.7.1", + "snyk-nodejs-plugin": "^2.0.0", "snyk-nuget-plugin": "4.2.0", "snyk-php-plugin": "1.12.1", "snyk-policy": "^4.1.6", - "snyk-python-plugin": "^3.2.0", + "snyk-python-plugin": "^3.2.1", "snyk-resolve-deps": "4.10.0", "snyk-sbt-plugin": "3.1.0", "snyk-swiftpm-plugin": "1.4.1", "strip-ansi": "^6.0.1", - "tar": "^7.5.11", + "tar": "^7.5.8", "uuid": "^8.3.2", "wrap-ansi": "^5.1.0" }, @@ -104,7 +104,7 @@ "@types/sinon": "^7.5.0", "@typescript-eslint/eslint-plugin": "^4.30.0", "@typescript-eslint/parser": "^4.30.0", - "@yao-pkg/pkg": "5.16.1", + "@yao-pkg/pkg": "6.19.0", "ajv": "^8.18.0", "ajv-draft-04": "^1.0.0", "body-parser": "^1.19.0", @@ -143,7 +143,7 @@ "webpack-merge": "^5.8.0" }, "engines": { - "node": "^20" + "node": "^22" } }, "node_modules/@alcalzone/ansi-tokenize": { @@ -892,6 +892,448 @@ "node": "^12.20.0 || ^14.13.0 || ^15 || ^16 || ^17 || ^18 || ^19 || ^20 || ^21 || ^22 || ^23 || ^24 || ^25" } }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.7.tgz", + "integrity": "sha512-EKX3Qwmhz1eMdEJokhALr0YiD0lhQNwDqkPYyPhiSwKrh7/4KRjQc04sZ8db+5DVVnZ1LmbNDI1uAMPEUBnQPg==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.7.tgz", + "integrity": "sha512-jbPXvB4Yj2yBV7HUfE2KHe4GJX51QplCN1pGbYjvsyCZbQmies29EoJbkEc+vYuU5o45AfQn37vZlyXy4YJ8RQ==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.7.tgz", + "integrity": "sha512-62dPZHpIXzvChfvfLJow3q5dDtiNMkwiRzPylSCfriLvZeq0a1bWChrGx/BbUbPwOrsWKMn8idSllklzBy+dgQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.7.tgz", + "integrity": "sha512-x5VpMODneVDb70PYV2VQOmIUUiBtY3D3mPBG8NxVk5CogneYhkR7MmM3yR/uMdITLrC1ml/NV1rj4bMJuy9MCg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.7.tgz", + "integrity": "sha512-5lckdqeuBPlKUwvoCXIgI2D9/ABmPq3Rdp7IfL70393YgaASt7tbju3Ac+ePVi3KDH6N2RqePfHnXkaDtY9fkw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.7.tgz", + "integrity": "sha512-rYnXrKcXuT7Z+WL5K980jVFdvVKhCHhUwid+dDYQpH+qu+TefcomiMAJpIiC2EM3Rjtq0sO3StMV/+3w3MyyqQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.7.tgz", + "integrity": "sha512-B48PqeCsEgOtzME2GbNM2roU29AMTuOIN91dsMO30t+Ydis3z/3Ngoj5hhnsOSSwNzS+6JppqWsuhTp6E82l2w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.7.tgz", + "integrity": "sha512-jOBDK5XEjA4m5IJK3bpAQF9/Lelu/Z9ZcdhTRLf4cajlB+8VEhFFRjWgfy3M1O4rO2GQ/b2dLwCUGpiF/eATNQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.7.tgz", + "integrity": "sha512-RkT/YXYBTSULo3+af8Ib0ykH8u2MBh57o7q/DAs3lTJlyVQkgQvlrPTnjIzzRPQyavxtPtfg0EopvDyIt0j1rA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.7.tgz", + "integrity": "sha512-RZPHBoxXuNnPQO9rvjh5jdkRmVizktkT7TCDkDmQ0W2SwHInKCAV95GRuvdSvA7w4VMwfCjUiPwDi0ZO6Nfe9A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.7.tgz", + "integrity": "sha512-GA48aKNkyQDbd3KtkplYWT102C5sn/EZTY4XROkxONgruHPU72l+gW+FfF8tf2cFjeHaRbWpOYa/uRBz/Xq1Pg==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.7.tgz", + "integrity": "sha512-a4POruNM2oWsD4WKvBSEKGIiWQF8fZOAsycHOt6JBpZ+JN2n2JH9WAv56SOyu9X5IqAjqSIPTaJkqN8F7XOQ5Q==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.7.tgz", + "integrity": "sha512-KabT5I6StirGfIz0FMgl1I+R1H73Gp0ofL9A3nG3i/cYFJzKHhouBV5VWK1CSgKvVaG4q1RNpCTR2LuTVB3fIw==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.7.tgz", + "integrity": "sha512-gRsL4x6wsGHGRqhtI+ifpN/vpOFTQtnbsupUF5R5YTAg+y/lKelYR1hXbnBdzDjGbMYjVJLJTd2OFmMewAgwlQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.7.tgz", + "integrity": "sha512-hL25LbxO1QOngGzu2U5xeXtxXcW+/GvMN3ejANqXkxZ/opySAZMrc+9LY/WyjAan41unrR3YrmtTsUpwT66InQ==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.7.tgz", + "integrity": "sha512-2k8go8Ycu1Kb46vEelhu1vqEP+UeRVj2zY1pSuPdgvbd5ykAw82Lrro28vXUrRmzEsUV0NzCf54yARIK8r0fdw==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.7.tgz", + "integrity": "sha512-hzznmADPt+OmsYzw1EE33ccA+HPdIqiCRq7cQeL1Jlq2gb1+OyWBkMCrYGBJ+sxVzve2ZJEVeePbLM2iEIZSxA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.7.tgz", + "integrity": "sha512-b6pqtrQdigZBwZxAn1UpazEisvwaIDvdbMbmrly7cDTMFnw/+3lVxxCTGOrkPVnsYIosJJXAsILG9XcQS+Yu6w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.7.tgz", + "integrity": "sha512-OfatkLojr6U+WN5EDYuoQhtM+1xco+/6FSzJJnuWiUw5eVcicbyK3dq5EeV/QHT1uy6GoDhGbFpprUiHUYggrw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.7.tgz", + "integrity": "sha512-AFuojMQTxAz75Fo8idVcqoQWEHIXFRbOc1TrVcFSgCZtQfSdc1RXgB3tjOn/krRHENUB4j00bfGjyl2mJrU37A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.7.tgz", + "integrity": "sha512-+A1NJmfM8WNDv5CLVQYJ5PshuRm/4cI6WMZRg1by1GwPIQPCTs1GLEUHwiiQGT5zDdyLiRM/l1G0Pv54gvtKIg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openharmony-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.7.tgz", + "integrity": "sha512-+KrvYb/C8zA9CU/g0sR6w2RBw7IGc5J2BPnc3dYc5VJxHCSF1yNMxTV5LQ7GuKteQXZtspjFbiuW5/dOj7H4Yw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.7.tgz", + "integrity": "sha512-ikktIhFBzQNt/QDyOL580ti9+5mL/YZeUPKU2ivGtGjdTYoqz6jObj6nOMfhASpS4GU4Q/Clh1QtxWAvcYKamA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.7.tgz", + "integrity": "sha512-7yRhbHvPqSpRUV7Q20VuDwbjW5kIMwTHpptuUzV+AA46kiPze5Z7qgt6CLCK3pWFrHeNfDd1VKgyP4O+ng17CA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.7.tgz", + "integrity": "sha512-SmwKXe6VHIyZYbBLJrhOoCJRB/Z1tckzmgTLfFYOfpMAx63BJEaL9ExI8x7v0oAO3Zh6D/Oi1gVxEYr5oUCFhw==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.7.tgz", + "integrity": "sha512-56hiAJPhwQ1R4i+21FVF7V8kSD5zZTdHcVuRFMW0hn753vVfQN8xlx4uOPT4xoGH0Z/oVATuR82AiqSTDIpaHg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, "node_modules/@hutson/parse-repository-url": { "version": "5.0.0", "dev": true, @@ -2229,6 +2671,7 @@ }, "node_modules/@octokit/auth-token": { "version": "2.4.5", + "dev": true, "license": "MIT", "dependencies": { "@octokit/types": "^6.0.3" @@ -2236,6 +2679,7 @@ }, "node_modules/@octokit/core": { "version": "3.5.1", + "dev": true, "license": "MIT", "dependencies": { "@octokit/auth-token": "^2.4.4", @@ -2249,6 +2693,7 @@ }, "node_modules/@octokit/core/node_modules/@octokit/request-error": { "version": "2.1.0", + "dev": true, "license": "MIT", "dependencies": { "@octokit/types": "^6.0.3", @@ -2258,10 +2703,12 @@ }, "node_modules/@octokit/core/node_modules/universal-user-agent": { "version": "6.0.0", + "dev": true, "license": "ISC" }, "node_modules/@octokit/endpoint": { "version": "6.0.12", + "dev": true, "license": "MIT", "dependencies": { "@octokit/types": "^6.0.3", @@ -2271,10 +2718,12 @@ }, "node_modules/@octokit/endpoint/node_modules/universal-user-agent": { "version": "6.0.0", + "dev": true, "license": "ISC" }, "node_modules/@octokit/graphql": { "version": "4.6.4", + "dev": true, "license": "MIT", "dependencies": { "@octokit/request": "^5.6.0", @@ -2284,14 +2733,17 @@ }, "node_modules/@octokit/graphql/node_modules/universal-user-agent": { "version": "6.0.0", + "dev": true, "license": "ISC" }, "node_modules/@octokit/openapi-types": { "version": "12.11.0", + "dev": true, "license": "MIT" }, "node_modules/@octokit/plugin-paginate-rest": { "version": "2.21.3", + "dev": true, "license": "MIT", "dependencies": { "@octokit/types": "^6.40.0" @@ -2302,6 +2754,7 @@ }, "node_modules/@octokit/plugin-request-log": { "version": "1.0.4", + "dev": true, "license": "MIT", "peerDependencies": { "@octokit/core": ">=3" @@ -2309,6 +2762,7 @@ }, "node_modules/@octokit/plugin-rest-endpoint-methods": { "version": "5.16.2", + "dev": true, "license": "MIT", "dependencies": { "@octokit/types": "^6.39.0", @@ -2320,6 +2774,7 @@ }, "node_modules/@octokit/request": { "version": "5.6.1", + "dev": true, "license": "MIT", "dependencies": { "@octokit/endpoint": "^6.0.1", @@ -2350,6 +2805,7 @@ }, "node_modules/@octokit/request/node_modules/@octokit/request-error": { "version": "2.1.0", + "dev": true, "license": "MIT", "dependencies": { "@octokit/types": "^6.0.3", @@ -2359,10 +2815,12 @@ }, "node_modules/@octokit/request/node_modules/universal-user-agent": { "version": "6.0.0", + "dev": true, "license": "ISC" }, "node_modules/@octokit/rest": { "version": "18.12.0", + "dev": true, "license": "MIT", "dependencies": { "@octokit/core": "^3.5.1", @@ -2373,6 +2831,7 @@ }, "node_modules/@octokit/types": { "version": "6.41.0", + "dev": true, "license": "MIT", "dependencies": { "@octokit/openapi-types": "^12.11.0" @@ -2386,17 +2845,6 @@ "yaml": "^1.10.2" } }, - "node_modules/@pagerduty/pdjs": { - "version": "2.2.4", - "license": "Apache-2.0", - "dependencies": { - "browser-or-node": "^2.0.0", - "cross-fetch": "^3.0.6" - }, - "engines": { - "node": ">=10.0.0" - } - }, "node_modules/@pkgjs/parseargs": { "version": "0.11.0", "dev": true, @@ -2429,75 +2877,90 @@ "url": "https://opencollective.com/pnpm" } }, - "node_modules/@sentry/core": { - "version": "7.34.0", + "node_modules/@roberts_lando/vfs": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/@roberts_lando/vfs/-/vfs-0.3.3.tgz", + "integrity": "sha512-YjkxVSLw5WMZQoARaryRAjcxA+GbBzWMJdwYZX5oLUt9cC/gew9as4Dn7tcLzPp7BPoR221VpTZ+78TRPawnjg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 22" + } + }, + "node_modules/@sentry-internal/tracing": { + "version": "7.120.4", + "resolved": "https://registry.npmjs.org/@sentry-internal/tracing/-/tracing-7.120.4.tgz", + "integrity": "sha512-Fz5+4XCg3akeoFK+K7g+d7HqGMjmnLoY2eJlpONJmaeT9pXY7yfUyXKZMmMajdE2LxxKJgQ2YKvSCaGVamTjHw==", "license": "MIT", "dependencies": { - "@sentry/types": "7.34.0", - "@sentry/utils": "7.34.0", - "tslib": "^1.9.3" + "@sentry/core": "7.120.4", + "@sentry/types": "7.120.4", + "@sentry/utils": "7.120.4" }, "engines": { "node": ">=8" } }, - "node_modules/@sentry/node": { - "version": "7.34.0", + "node_modules/@sentry/core": { + "version": "7.120.4", + "resolved": "https://registry.npmjs.org/@sentry/core/-/core-7.120.4.tgz", + "integrity": "sha512-TXu3Q5kKiq8db9OXGkWyXUbIxMMuttB5vJ031yolOl5T/B69JRyAoKuojLBjRv1XX583gS1rSSoX8YXX7ATFGA==", "license": "MIT", "dependencies": { - "@sentry/core": "7.34.0", - "@sentry/types": "7.34.0", - "@sentry/utils": "7.34.0", - "cookie": "^0.4.1", - "https-proxy-agent": "^5.0.0", - "lru_map": "^0.3.3", - "tslib": "^1.9.3" + "@sentry/types": "7.120.4", + "@sentry/utils": "7.120.4" }, "engines": { "node": ">=8" } }, - "node_modules/@sentry/node/node_modules/agent-base": { - "version": "6.0.2", + "node_modules/@sentry/integrations": { + "version": "7.120.4", + "resolved": "https://registry.npmjs.org/@sentry/integrations/-/integrations-7.120.4.tgz", + "integrity": "sha512-kkBTLk053XlhDCg7OkBQTIMF4puqFibeRO3E3YiVc4PGLnocXMaVpOSCkMqAc1k1kZ09UgGi8DxfQhnFEjUkpA==", "license": "MIT", "dependencies": { - "debug": "4" + "@sentry/core": "7.120.4", + "@sentry/types": "7.120.4", + "@sentry/utils": "7.120.4", + "localforage": "^1.8.1" }, "engines": { - "node": ">= 6.0.0" - } - }, - "node_modules/@sentry/node/node_modules/cookie": { - "version": "0.4.2", - "license": "MIT", - "engines": { - "node": ">= 0.6" + "node": ">=8" } }, - "node_modules/@sentry/node/node_modules/https-proxy-agent": { - "version": "5.0.1", + "node_modules/@sentry/node": { + "version": "7.120.4", + "resolved": "https://registry.npmjs.org/@sentry/node/-/node-7.120.4.tgz", + "integrity": "sha512-qq3wZAXXj2SRWhqErnGCSJKUhPSlZ+RGnCZjhfjHpP49KNpcd9YdPTIUsFMgeyjdh6Ew6aVCv23g1hTP0CHpYw==", "license": "MIT", "dependencies": { - "agent-base": "6", - "debug": "4" + "@sentry-internal/tracing": "7.120.4", + "@sentry/core": "7.120.4", + "@sentry/integrations": "7.120.4", + "@sentry/types": "7.120.4", + "@sentry/utils": "7.120.4" }, "engines": { - "node": ">= 6" + "node": ">=8" } }, "node_modules/@sentry/types": { - "version": "7.34.0", + "version": "7.120.4", + "resolved": "https://registry.npmjs.org/@sentry/types/-/types-7.120.4.tgz", + "integrity": "sha512-cUq2hSSe6/qrU6oZsEP4InMI5VVdD86aypE+ENrQ6eZEVLTCYm1w6XhW1NvIu3UuWh7gZec4a9J7AFpYxki88Q==", "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/@sentry/utils": { - "version": "7.34.0", + "version": "7.120.4", + "resolved": "https://registry.npmjs.org/@sentry/utils/-/utils-7.120.4.tgz", + "integrity": "sha512-zCKpyDIWKHwtervNK2ZlaK8mMV7gVUijAgFeJStH+CU/imcdquizV3pFLlSQYRswG+Lbyd6CT/LGRh3IbtkCFw==", "license": "MIT", "dependencies": { - "@sentry/types": "7.34.0", - "tslib": "^1.9.3" + "@sentry/types": "7.120.4" }, "engines": { "node": ">=8" @@ -2633,14 +3096,6 @@ "dev": true, "license": "(Unlicense OR Apache-2.0)" }, - "node_modules/@slack/types": { - "version": "2.11.0", - "license": "MIT", - "engines": { - "node": ">= 12.13.0", - "npm": ">= 6.12.0" - } - }, "node_modules/@snyk/child-process": { "version": "0.4.1", "license": "Apache-2.0", @@ -2657,10 +3112,6 @@ "version": "2.6.0", "license": "0BSD" }, - "node_modules/@snyk/cli-alert": { - "resolved": "packages/cli-alert", - "link": true - }, "node_modules/@snyk/cli-interface": { "version": "2.15.0", "resolved": "https://registry.npmjs.org/@snyk/cli-interface/-/cli-interface-2.15.0.tgz", @@ -4457,7 +4908,9 @@ } }, "node_modules/@types/debug": { - "version": "4.1.7", + "version": "4.1.13", + "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.13.tgz", + "integrity": "sha512-KSVgmQmzMwPlmtljOomayoR89W4FynCAi3E8PPs7vmDVPe84hT+vGPKkJfThkmXs0x0jAaa9U8uW8bbfyS2fWw==", "license": "MIT", "dependencies": { "@types/ms": "*" @@ -4691,7 +5144,9 @@ "license": "MIT" }, "node_modules/@types/ms": { - "version": "0.7.31", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@types/ms/-/ms-2.1.0.tgz", + "integrity": "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==", "license": "MIT" }, "node_modules/@types/needle": { @@ -5285,30 +5740,43 @@ "license": "Apache-2.0" }, "node_modules/@yao-pkg/pkg": { - "version": "5.16.1", + "version": "6.19.0", + "resolved": "https://registry.npmjs.org/@yao-pkg/pkg/-/pkg-6.19.0.tgz", + "integrity": "sha512-Ys9Fn/F44C3nOTlNyhwviLyMxydgFzsB13jAAXKxEuIR7aAz8PrFlUpxTUlwaXUU847nlmg5mnaGIyOJpnVtRw==", "dev": true, "license": "MIT", "dependencies": { "@babel/generator": "^7.23.0", "@babel/parser": "^7.23.0", + "@babel/traverse": "^7.23.0", "@babel/types": "^7.23.0", - "@yao-pkg/pkg-fetch": "3.5.16", - "into-stream": "^6.0.0", - "minimist": "^1.2.6", + "@roberts_lando/vfs": "^0.3.3", + "@yao-pkg/pkg-fetch": "3.5.33", + "esbuild": "^0.27.3", + "into-stream": "^9.1.0", "multistream": "^4.1.0", "picocolors": "^1.1.0", "picomatch": "^4.0.2", + "postject": "^1.0.0-alpha.6", "prebuild-install": "^7.1.1", - "resolve": "^1.22.0", + "resolve": "^1.22.10", + "resolve.exports": "^2.0.3", "stream-meter": "^1.0.4", - "tinyglobby": "^0.2.9" + "tar": "^7.5.7", + "tinyglobby": "^0.2.11", + "unzipper": "^0.12.3" }, "bin": { "pkg": "lib-es5/bin.js" + }, + "engines": { + "node": ">=22.0.0" } }, "node_modules/@yao-pkg/pkg-fetch": { - "version": "3.5.16", + "version": "3.5.33", + "resolved": "https://registry.npmjs.org/@yao-pkg/pkg-fetch/-/pkg-fetch-3.5.33.tgz", + "integrity": "sha512-j2UoH+eP4VobfovQg1gkWwDoB4O/tv8rlLnEjUEEHuWXJ5eBLNUIrobMSEp773/2pgUJUfqqPUFIhS1pN8OZuQ==", "dev": true, "license": "MIT", "dependencies": { @@ -5317,7 +5785,7 @@ "picocolors": "^1.1.0", "progress": "^2.0.3", "semver": "^7.3.5", - "tar-fs": "^2.1.1", + "tar-fs": "^3.1.1", "yargs": "^16.2.0" }, "bin": { @@ -5326,6 +5794,8 @@ }, "node_modules/@yao-pkg/pkg-fetch/node_modules/agent-base": { "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", "dev": true, "license": "MIT", "dependencies": { @@ -5335,8 +5805,25 @@ "node": ">= 6.0.0" } }, + "node_modules/@yao-pkg/pkg-fetch/node_modules/b4a": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/b4a/-/b4a-1.8.0.tgz", + "integrity": "sha512-qRuSmNSkGQaHwNbM7J78Wwy+ghLEYF1zNrSeMxj4Kgw6y33O3mXcQ6Ie9fRvfU/YnxWkOchPXbaLb73TkIsfdg==", + "dev": true, + "license": "Apache-2.0", + "peerDependencies": { + "react-native-b4a": "*" + }, + "peerDependenciesMeta": { + "react-native-b4a": { + "optional": true + } + } + }, "node_modules/@yao-pkg/pkg-fetch/node_modules/https-proxy-agent": { "version": "5.0.1", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", + "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", "dev": true, "license": "MIT", "dependencies": { @@ -5348,7 +5835,9 @@ } }, "node_modules/@yao-pkg/pkg-fetch/node_modules/semver": { - "version": "7.7.2", + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.4.tgz", + "integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==", "dev": true, "license": "ISC", "bin": { @@ -5358,6 +5847,34 @@ "node": ">=10" } }, + "node_modules/@yao-pkg/pkg-fetch/node_modules/tar-fs": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-3.1.2.tgz", + "integrity": "sha512-QGxxTxxyleAdyM3kpFs14ymbYmNFrfY+pHj7Z8FgtbZ7w2//VAgLMac7sT6nRpIHjppXO2AwwEOg0bPFVRcmXw==", + "dev": true, + "license": "MIT", + "dependencies": { + "pump": "^3.0.0", + "tar-stream": "^3.1.5" + }, + "optionalDependencies": { + "bare-fs": "^4.0.1", + "bare-path": "^3.0.0" + } + }, + "node_modules/@yao-pkg/pkg-fetch/node_modules/tar-stream": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-3.2.0.tgz", + "integrity": "sha512-ojzvCvVaNp6aOTFmG7jaRD0meowIAuPc3cMMhSgKiVWws1GyHbGd/xvnyuRKcKlMpt3qvxx6r0hreCNITP9hIg==", + "dev": true, + "license": "MIT", + "dependencies": { + "b4a": "^1.6.4", + "bare-fs": "^4.5.5", + "fast-fifo": "^1.2.0", + "streamx": "^2.15.0" + } + }, "node_modules/@yao-pkg/pkg/node_modules/@babel/generator": { "version": "7.27.3", "dev": true, @@ -6514,6 +7031,7 @@ }, "node_modules/asynckit": { "version": "0.4.0", + "dev": true, "license": "MIT" }, "node_modules/at-least-node": { @@ -6614,8 +7132,8 @@ "version": "4.5.5", "resolved": "https://registry.npmjs.org/bare-fs/-/bare-fs-4.5.5.tgz", "integrity": "sha512-XvwYM6VZqKoqDll8BmSww5luA5eflDzY0uEFfBJtFKe4PAAtxBjU3YIxzIBzhyaEQBy1VXEQBto4cpN5RZJw+w==", + "devOptional": true, "license": "Apache-2.0", - "optional": true, "dependencies": { "bare-events": "^2.5.4", "bare-path": "^3.0.0", @@ -6639,8 +7157,8 @@ "version": "3.7.0", "resolved": "https://registry.npmjs.org/bare-os/-/bare-os-3.7.0.tgz", "integrity": "sha512-64Rcwj8qlnTZU8Ps6JJEdSmxBEUGgI7g8l+lMtsJLl4IsfTcHMTfJ188u2iGV6P6YPRZrtv72B2kjn+hp+Yv3g==", + "devOptional": true, "license": "Apache-2.0", - "optional": true, "engines": { "bare": ">=1.14.0" } @@ -6649,8 +7167,8 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/bare-path/-/bare-path-3.0.0.tgz", "integrity": "sha512-tyfW2cQcB5NN8Saijrhqn0Zh7AnFNsnczRcuWODH0eYAXBsJ5gVxAUuNr7tsHSC6IZ77cA0SitzT+s47kot8Mw==", + "devOptional": true, "license": "Apache-2.0", - "optional": true, "dependencies": { "bare-os": "^3.0.1" } @@ -6659,8 +7177,8 @@ "version": "2.8.0", "resolved": "https://registry.npmjs.org/bare-stream/-/bare-stream-2.8.0.tgz", "integrity": "sha512-reUN0M2sHRqCdG4lUK3Fw8w98eeUIZHL5c3H7Mbhk2yVBL+oofgaIp0ieLfD5QXwPCypBpmEEKU2WZKzbAk8GA==", + "devOptional": true, "license": "Apache-2.0", - "optional": true, "dependencies": { "streamx": "^2.21.0", "teex": "^1.0.1" @@ -6682,8 +7200,8 @@ "version": "2.3.2", "resolved": "https://registry.npmjs.org/bare-url/-/bare-url-2.3.2.tgz", "integrity": "sha512-ZMq4gd9ngV5aTMa5p9+UfY0b3skwhHELaDkhEHetMdX0LRkW9kzaym4oo/Eh+Ghm0CCDuMTsRIGM/ytUc1ZYmw==", + "devOptional": true, "license": "Apache-2.0", - "optional": true, "dependencies": { "bare-path": "^3.0.0" } @@ -6717,6 +7235,7 @@ }, "node_modules/before-after-hook": { "version": "2.2.2", + "dev": true, "license": "Apache-2.0" }, "node_modules/big.js": { @@ -6850,10 +7369,6 @@ "node": ">=8" } }, - "node_modules/browser-or-node": { - "version": "2.1.1", - "license": "MIT" - }, "node_modules/browserify-zlib": { "version": "0.1.4", "resolved": "https://registry.npmjs.org/browserify-zlib/-/browserify-zlib-0.1.4.tgz", @@ -7338,6 +7853,7 @@ }, "node_modules/call-bind-apply-helpers": { "version": "1.0.2", + "dev": true, "license": "MIT", "dependencies": { "es-errors": "^1.3.0", @@ -7780,6 +8296,7 @@ }, "node_modules/combined-stream": { "version": "1.0.8", + "dev": true, "license": "MIT", "dependencies": { "delayed-stream": "~1.0.0" @@ -8390,13 +8907,6 @@ "dev": true, "license": "MIT" }, - "node_modules/cross-fetch": { - "version": "3.1.8", - "license": "MIT", - "dependencies": { - "node-fetch": "^2.6.12" - } - }, "node_modules/cross-spawn": { "version": "6.0.5", "dev": true, @@ -8699,6 +9209,7 @@ }, "node_modules/delayed-stream": { "version": "1.0.0", + "dev": true, "license": "MIT", "engines": { "node": ">=0.4.0" @@ -8834,6 +9345,7 @@ }, "node_modules/deprecation": { "version": "2.3.1", + "dev": true, "license": "ISC" }, "node_modules/deps-regex": { @@ -8982,6 +9494,7 @@ }, "node_modules/dunder-proto": { "version": "1.0.1", + "dev": true, "license": "MIT", "dependencies": { "call-bind-apply-helpers": "^1.0.1", @@ -8992,6 +9505,49 @@ "node": ">= 0.4" } }, + "node_modules/duplexer2": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/duplexer2/-/duplexer2-0.1.4.tgz", + "integrity": "sha512-asLFVfWWtJ90ZyOUHMqk7/S2w2guQKxUI2itj3d92ADHhxUSbCMGi1f1cBcJ7xM1To+pE/Khbwo1yuNbMEPKeA==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "readable-stream": "^2.0.2" + } + }, + "node_modules/duplexer2/node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/duplexer2/node_modules/readable-stream": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", + "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", + "dev": true, + "license": "MIT", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/duplexer2/node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "license": "MIT", + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, "node_modules/duplexify": { "version": "3.7.1", "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.7.1.tgz", @@ -9114,6 +9670,31 @@ "node": ">= 0.8" } }, + "node_modules/encoding": { + "version": "0.1.13", + "resolved": "https://registry.npmjs.org/encoding/-/encoding-0.1.13.tgz", + "integrity": "sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "iconv-lite": "^0.6.2" + } + }, + "node_modules/encoding/node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/end-of-stream": { "version": "1.4.4", "license": "MIT", @@ -9212,6 +9793,7 @@ }, "node_modules/es-define-property": { "version": "1.0.1", + "dev": true, "license": "MIT", "engines": { "node": ">= 0.4" @@ -9219,6 +9801,7 @@ }, "node_modules/es-errors": { "version": "1.3.0", + "dev": true, "license": "MIT", "engines": { "node": ">= 0.4" @@ -9231,6 +9814,7 @@ }, "node_modules/es-object-atoms": { "version": "1.1.1", + "dev": true, "license": "MIT", "dependencies": { "es-errors": "^1.3.0" @@ -9280,6 +9864,48 @@ "es6-promise": "^4.0.3" } }, + "node_modules/esbuild": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.7.tgz", + "integrity": "sha512-IxpibTjyVnmrIQo5aqNpCgoACA/dTKLTlhMHihVHhdkxKyPO1uBBthumT0rdHmcsk9uMonIWS0m4FljWzILh3w==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.27.7", + "@esbuild/android-arm": "0.27.7", + "@esbuild/android-arm64": "0.27.7", + "@esbuild/android-x64": "0.27.7", + "@esbuild/darwin-arm64": "0.27.7", + "@esbuild/darwin-x64": "0.27.7", + "@esbuild/freebsd-arm64": "0.27.7", + "@esbuild/freebsd-x64": "0.27.7", + "@esbuild/linux-arm": "0.27.7", + "@esbuild/linux-arm64": "0.27.7", + "@esbuild/linux-ia32": "0.27.7", + "@esbuild/linux-loong64": "0.27.7", + "@esbuild/linux-mips64el": "0.27.7", + "@esbuild/linux-ppc64": "0.27.7", + "@esbuild/linux-riscv64": "0.27.7", + "@esbuild/linux-s390x": "0.27.7", + "@esbuild/linux-x64": "0.27.7", + "@esbuild/netbsd-arm64": "0.27.7", + "@esbuild/netbsd-x64": "0.27.7", + "@esbuild/openbsd-arm64": "0.27.7", + "@esbuild/openbsd-x64": "0.27.7", + "@esbuild/openharmony-arm64": "0.27.7", + "@esbuild/sunos-x64": "0.27.7", + "@esbuild/win32-arm64": "0.27.7", + "@esbuild/win32-ia32": "0.27.7", + "@esbuild/win32-x64": "0.27.7" + } + }, "node_modules/escalade": { "version": "3.1.1", "dev": true, @@ -10547,42 +11173,6 @@ "node": ">= 0.6" } }, - "node_modules/from2": { - "version": "2.3.0", - "dev": true, - "license": "MIT", - "dependencies": { - "inherits": "^2.0.1", - "readable-stream": "^2.0.0" - } - }, - "node_modules/from2/node_modules/isarray": { - "version": "1.0.0", - "dev": true, - "license": "MIT" - }, - "node_modules/from2/node_modules/readable-stream": { - "version": "2.3.7", - "dev": true, - "license": "MIT", - "dependencies": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "node_modules/from2/node_modules/string_decoder": { - "version": "1.1.1", - "dev": true, - "license": "MIT", - "dependencies": { - "safe-buffer": "~5.1.0" - } - }, "node_modules/fromentries": { "version": "1.3.2", "dev": true, @@ -10658,6 +11248,7 @@ }, "node_modules/function-bind": { "version": "1.1.2", + "dev": true, "license": "MIT", "funding": { "url": "https://github.com/sponsors/ljharb" @@ -10705,6 +11296,7 @@ }, "node_modules/get-intrinsic": { "version": "1.3.0", + "dev": true, "license": "MIT", "dependencies": { "call-bind-apply-helpers": "^1.0.2", @@ -10746,6 +11338,7 @@ }, "node_modules/get-proto": { "version": "1.0.1", + "dev": true, "license": "MIT", "dependencies": { "dunder-proto": "^1.0.1", @@ -10988,6 +11581,7 @@ }, "node_modules/gopd": { "version": "1.2.0", + "dev": true, "license": "MIT", "engines": { "node": ">= 0.4" @@ -11100,6 +11694,7 @@ }, "node_modules/has-symbols": { "version": "1.1.0", + "dev": true, "license": "MIT", "engines": { "node": ">= 0.4" @@ -11110,6 +11705,7 @@ }, "node_modules/has-tostringtag": { "version": "1.0.2", + "dev": true, "license": "MIT", "dependencies": { "has-symbols": "^1.0.3" @@ -11128,6 +11724,7 @@ }, "node_modules/hasown": { "version": "2.0.2", + "dev": true, "license": "MIT", "dependencies": { "function-bind": "^1.1.2" @@ -11778,15 +12375,13 @@ } }, "node_modules/into-stream": { - "version": "6.0.0", + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/into-stream/-/into-stream-9.1.0.tgz", + "integrity": "sha512-DRsRnQrbzdFjaQ1oe4C6/EIUymIOEix1qROEJTF9dbMq+M4Zrm6VaLp6SD/B9IsiEjPZuBSnWWFN+udajugdWA==", "dev": true, "license": "MIT", - "dependencies": { - "from2": "^2.3.0", - "p-is-promise": "^3.0.0" - }, "engines": { - "node": ">=10" + "node": ">=20" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -11895,11 +12490,16 @@ "license": "MIT" }, "node_modules/is-core-module": { - "version": "2.9.0", + "version": "2.16.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", + "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", "dev": true, "license": "MIT", "dependencies": { - "has": "^1.0.3" + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -12070,6 +12670,7 @@ }, "node_modules/is-plain-object": { "version": "5.0.0", + "dev": true, "license": "MIT", "engines": { "node": ">=0.10.0" @@ -14775,6 +15376,24 @@ "json5": "lib/cli.js" } }, + "node_modules/localforage": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/localforage/-/localforage-1.10.0.tgz", + "integrity": "sha512-14/H1aX7hzBBmmh7sGPd+AOMkkIrHM3Z1PAyGgZigA1H1p5O5ANnMyWzvpAETtG68/dC4pC0ncy3+PPGzXZHPg==", + "license": "Apache-2.0", + "dependencies": { + "lie": "3.1.1" + } + }, + "node_modules/localforage/node_modules/lie": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/lie/-/lie-3.1.1.tgz", + "integrity": "sha512-RiNhHysUjhrDQntfYSfY4MU24coXXdEOgw9WGcKHNeEwffDYbF//u87M1EWaMGzuFoSbqW0C9C6lEEhDOAswfw==", + "license": "MIT", + "dependencies": { + "immediate": "~3.0.5" + } + }, "node_modules/locate-path": { "version": "5.0.0", "dev": true, @@ -15091,10 +15710,6 @@ "node": ">=8" } }, - "node_modules/lru_map": { - "version": "0.3.3", - "license": "MIT" - }, "node_modules/lru-cache": { "version": "5.1.1", "license": "ISC", @@ -15237,6 +15852,7 @@ }, "node_modules/math-intrinsics": { "version": "1.1.0", + "dev": true, "license": "MIT", "engines": { "node": ">= 0.4" @@ -15342,6 +15958,7 @@ }, "node_modules/mime-db": { "version": "1.49.0", + "dev": true, "license": "MIT", "engines": { "node": ">= 0.6" @@ -15349,6 +15966,7 @@ }, "node_modules/mime-types": { "version": "2.1.32", + "dev": true, "license": "MIT", "dependencies": { "mime-db": "1.49.0" @@ -15725,6 +16343,7 @@ }, "node_modules/node-fetch": { "version": "2.7.0", + "dev": true, "license": "MIT", "dependencies": { "whatwg-url": "^5.0.0" @@ -15743,14 +16362,17 @@ }, "node_modules/node-fetch/node_modules/tr46": { "version": "0.0.3", + "dev": true, "license": "MIT" }, "node_modules/node-fetch/node_modules/webidl-conversions": { "version": "3.0.1", + "dev": true, "license": "BSD-2-Clause" }, "node_modules/node-fetch/node_modules/whatwg-url": { "version": "5.0.0", + "dev": true, "license": "MIT", "dependencies": { "tr46": "~0.0.3", @@ -16460,14 +17082,6 @@ "node": ">=4" } }, - "node_modules/p-is-promise": { - "version": "3.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, "node_modules/p-limit": { "version": "3.1.0", "dev": true, @@ -17196,6 +17810,32 @@ "dev": true, "license": "MIT" }, + "node_modules/postject": { + "version": "1.0.0-alpha.6", + "resolved": "https://registry.npmjs.org/postject/-/postject-1.0.0-alpha.6.tgz", + "integrity": "sha512-b9Eb8h2eVqNE8edvKdwqkrY6O7kAwmI8kcnBv1NScolYJbo59XUF0noFq+lxbC1yN20bmC0WBEbDC5H/7ASb0A==", + "dev": true, + "license": "MIT", + "dependencies": { + "commander": "^9.4.0" + }, + "bin": { + "postject": "dist/cli.js" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/postject/node_modules/commander": { + "version": "9.5.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-9.5.0.tgz", + "integrity": "sha512-KRs7WVDKg86PWiuAqhDrAQnTXZKraVcCc6vFdL14qrZ/DcWwuRo7VoiYXalXO7S5GKpqYiVEwCbgFDfxNHKJBQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.20.0 || >=14" + } + }, "node_modules/prelude-ls": { "version": "1.1.2", "dev": true, @@ -17415,8 +18055,13 @@ } }, "node_modules/proxy-from-env": { - "version": "1.1.0", - "license": "MIT" + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-2.1.0.tgz", + "integrity": "sha512-cJ+oHTW1VAEa8cJslgmUZrc+sjRKgAKl3Zyse6+PV38hZe/V6Z14TbCuXcan9F9ghlz4QrFr2c92TNF82UkYHA==", + "license": "MIT", + "engines": { + "node": ">=10" + } }, "node_modules/proxyquire": { "version": "1.8.0", @@ -18190,17 +18835,23 @@ "license": "MIT" }, "node_modules/resolve": { - "version": "1.22.1", + "version": "1.22.12", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.12.tgz", + "integrity": "sha512-TyeJ1zif53BPfHootBGwPRYT1RUt6oGWsaQr8UyZW/eAm9bKoijtvruSDEmZHm92CwS9nj7/fWttqPCgzep8CA==", "dev": true, "license": "MIT", "dependencies": { - "is-core-module": "^2.9.0", + "es-errors": "^1.3.0", + "is-core-module": "^2.16.1", "path-parse": "^1.0.7", "supports-preserve-symlinks-flag": "^1.0.0" }, "bin": { "resolve": "bin/resolve" }, + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -18304,7 +18955,9 @@ } }, "node_modules/resolve.exports": { - "version": "2.0.2", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/resolve.exports/-/resolve.exports-2.0.3.tgz", + "integrity": "sha512-OcXjMsGdhL4XnbShKpAcSqPMzQoYkYyhbEaeSko47MjRP9NfEQMhZkXL1DoFlt9LWQn4YttrdnV6X2OiyzBi+A==", "dev": true, "license": "MIT", "engines": { @@ -18685,26 +19338,40 @@ "license": "MIT" }, "node_modules/shescape": { - "version": "1.6.1", + "version": "2.1.11", + "resolved": "https://registry.npmjs.org/shescape/-/shescape-2.1.11.tgz", + "integrity": "sha512-kR+oVEEgfo2TzK6FZAXtZMZ4aaVFuy5WeToxgjh95KPqUOLoFb1M+PYOWrDV7QqfHgdwnBpPBeBbf07sDgyAtA==", "license": "MPL-2.0", "dependencies": { - "which": "^2.0.0" + "@ericcornelissen/lregexp": "^1.0.7", + "which": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0" }, "engines": { - "node": "^10.13.0 || ^12 || ^14 || ^16 || ^18" + "node": "^14.18.0 || ^16.13.0 || ^18 || ^19 || ^20 || ^22 || ^24 || ^25" + } + }, + "node_modules/shescape/node_modules/isexe": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-4.0.0.tgz", + "integrity": "sha512-FFUtZMpoZ8RqHS3XeXEmHWLA4thH+ZxCv2lOiPIn1Xc7CxrqhWzNSDzD+/chS/zbYezmiwWLdQC09JdQKmthOw==", + "license": "BlueOak-1.0.0", + "engines": { + "node": ">=20" } }, "node_modules/shescape/node_modules/which": { - "version": "2.0.2", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/which/-/which-6.0.1.tgz", + "integrity": "sha512-oGLe46MIrCRqX7ytPUf66EAYvdeMIZYn3WaocqqKZAxrBpkqHfL/qvTyJ/bTk5+AqHCjXmrv3CEWgy368zhRUg==", "license": "ISC", "dependencies": { - "isexe": "^2.0.0" + "isexe": "^4.0.0" }, "bin": { - "node-which": "bin/node-which" + "node-which": "bin/which.js" }, "engines": { - "node": ">= 8" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/side-channel": { @@ -19008,9 +19675,10 @@ "license": "ISC" }, "node_modules/snyk-docker-plugin": { - "version": "9.6.0", - "resolved": "https://registry.npmjs.org/snyk-docker-plugin/-/snyk-docker-plugin-9.6.0.tgz", - "integrity": "sha512-3MWKqxHQJSLa1MPuqaewVQWiQzfRfPPxfrrFcH8HIEPS/PcMKFFGOaSNCreU5sfeGzQOi471S/+GCCUkAoGkaQ==", + "version": "9.7.0", + "resolved": "https://registry.npmjs.org/snyk-docker-plugin/-/snyk-docker-plugin-9.7.0.tgz", + "integrity": "sha512-DsfbmfPJzWars4VyCI4WcMkwdn5nV6YkYW5W0aRO3xeBVvJnWsVQFuVE/e58W2Wo9igMXHO+4V8oWQRe0Q1D2g==", + "license": "Apache-2.0", "dependencies": { "@snyk/composer-lockfile-parser": "^1.4.1", "@snyk/dep-graph": "^2.12.1", @@ -19028,17 +19696,14 @@ "fzstd": "^0.1.1", "gunzip-maybe": "^1.4.2", "minimatch": "^9.0.0", - "mkdirp": "^1.0.4", "packageurl-js": "1.2.0", "semver": "^7.7.3", "shescape": "^2.1.7", - "snyk-nodejs-lockfile-parser": "^2.2.2", + "snyk-nodejs-lockfile-parser": "^2.7.0", "snyk-poetry-lockfile-parser": "1.9.1", "snyk-resolve-deps": "^4.9.1", "tar-stream": "^2.2.0", - "tmp": "^0.2.5", "tslib": "^1", - "uuid": "^8.2.0", "varint": "^6.0.0" }, "engines": { @@ -19070,15 +19735,6 @@ } } }, - "node_modules/snyk-docker-plugin/node_modules/isexe": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-4.0.0.tgz", - "integrity": "sha512-FFUtZMpoZ8RqHS3XeXEmHWLA4thH+ZxCv2lOiPIn1Xc7CxrqhWzNSDzD+/chS/zbYezmiwWLdQC09JdQKmthOw==", - "license": "BlueOak-1.0.0", - "engines": { - "node": ">=20" - } - }, "node_modules/snyk-docker-plugin/node_modules/minimatch": { "version": "9.0.9", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.9.tgz", @@ -19093,18 +19749,6 @@ "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/snyk-docker-plugin/node_modules/mkdirp": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", - "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", - "license": "MIT", - "bin": { - "mkdirp": "bin/cmd.js" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/snyk-docker-plugin/node_modules/ms": { "version": "2.1.3", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", @@ -19123,43 +19767,6 @@ "node": ">=10" } }, - "node_modules/snyk-docker-plugin/node_modules/shescape": { - "version": "2.1.8", - "resolved": "https://registry.npmjs.org/shescape/-/shescape-2.1.8.tgz", - "integrity": "sha512-owfw+5BB1A51KyNiCuUOyDSh1JOw+DrxgX0Uac7eORpF6YBEoLjbDlHn6tHQudnyi3pt3HeHl9jJ30YPmtEMHQ==", - "license": "MPL-2.0", - "dependencies": { - "@ericcornelissen/lregexp": "^1.0.3", - "which": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0" - }, - "engines": { - "node": "^14.18.0 || ^16.13.0 || ^18 || ^19 || ^20 || ^22 || ^24 || ^25" - } - }, - "node_modules/snyk-docker-plugin/node_modules/tmp": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.5.tgz", - "integrity": "sha512-voyz6MApa1rQGUxT3E+BK7/ROe8itEx7vD8/HEvt4xwXucvQ5G5oeEiHkmHZJuBO21RpOf+YYm9MOivj709jow==", - "license": "MIT", - "engines": { - "node": ">=14.14" - } - }, - "node_modules/snyk-docker-plugin/node_modules/which": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/which/-/which-6.0.1.tgz", - "integrity": "sha512-oGLe46MIrCRqX7ytPUf66EAYvdeMIZYn3WaocqqKZAxrBpkqHfL/qvTyJ/bTk5+AqHCjXmrv3CEWgy368zhRUg==", - "license": "ISC", - "dependencies": { - "isexe": "^4.0.0" - }, - "bin": { - "node-which": "bin/which.js" - }, - "engines": { - "node": "^20.17.0 || >=22.9.0" - } - }, "node_modules/snyk-go-parser": { "version": "1.13.0", "license": "Apache-2.0", @@ -19210,37 +19817,31 @@ } }, "node_modules/snyk-gradle-plugin": { - "version": "5.1.1", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/snyk-gradle-plugin/-/snyk-gradle-plugin-6.0.0.tgz", + "integrity": "sha512-QyQTKEGuaiwPg9qzzePpmTFuQpfGFJwCBUXnhEv4ETXGtK7dWKsniEnDUu8oFq63G8wEX1py9YBptXAdeL/neQ==", "license": "Apache-2.0", "dependencies": { "@common.js/yocto-queue": "^1.1.1", - "@snyk/cli-interface": "2.11.3", + "@snyk/cli-interface": "2.15.0", "@snyk/dep-graph": "^1.28.0", - "@types/debug": "^4.1.4", - "chalk": "^3.0.0", - "debug": "^4.1.1", + "@types/debug": "^4.1.13", + "chalk": "^4.1.2", "p-map": "^4.0.0", - "packageurl-js": "^1.0.0", - "shescape": "1.6.1", - "tmp": "0.2.1", - "tslib": "^2.0.0" + "packageurl-js": "^1.2.1", + "shescape": "2.1.11", + "tmp": "^0.2.5", + "tslib": "^2.8.1" }, "engines": { - "node": ">=16" - } - }, - "node_modules/snyk-gradle-plugin/node_modules/@snyk/cli-interface": { - "version": "2.11.3", - "license": "Apache-2.0", - "dependencies": { - "@types/graphlib": "^2" - }, - "peerDependencies": { - "@snyk/dep-graph": "^1" + "node": "^20.19.0 || ^22.13.0 || >=24.0.0", + "npm": ">=10" } }, "node_modules/snyk-gradle-plugin/node_modules/@snyk/dep-graph": { "version": "1.31.0", + "resolved": "https://registry.npmjs.org/@snyk/dep-graph/-/dep-graph-1.31.0.tgz", + "integrity": "sha512-nGSua40dcI/ISDDW46EYSjwVZxdWohb4bDlHFYtudL5bxo0PV9wFA1QeZewKQVeHLVaGkrESXdqQubP0pFf4vA==", "license": "Apache-2.0", "dependencies": { "event-loop-spinner": "^2.1.0", @@ -19269,10 +19870,14 @@ }, "node_modules/snyk-gradle-plugin/node_modules/@snyk/dep-graph/node_modules/tslib": { "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", "license": "0BSD" }, "node_modules/snyk-gradle-plugin/node_modules/ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "license": "MIT", "dependencies": { "color-convert": "^2.0.1" @@ -19285,18 +19890,25 @@ } }, "node_modules/snyk-gradle-plugin/node_modules/chalk": { - "version": "3.0.0", + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "license": "MIT", "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" }, "engines": { - "node": ">=8" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" } }, "node_modules/snyk-gradle-plugin/node_modules/color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "license": "MIT", "dependencies": { "color-name": "~1.1.4" @@ -19307,51 +19919,39 @@ }, "node_modules/snyk-gradle-plugin/node_modules/color-name": { "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "license": "MIT" }, "node_modules/snyk-gradle-plugin/node_modules/has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "license": "MIT", "engines": { "node": ">=8" } }, - "node_modules/snyk-gradle-plugin/node_modules/lru-cache": { - "version": "6.0.0", - "license": "ISC", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/snyk-gradle-plugin/node_modules/object-hash": { "version": "2.2.0", + "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-2.2.0.tgz", + "integrity": "sha512-gScRMn0bS5fH+IuwyIFgnh9zBdo4DV+6GhygmWM9HyNJSgS0hScp1f5vjtm7oIIOiT9trXrShAkLFSc2IqKNgw==", "license": "MIT", "engines": { "node": ">= 6" } }, - "node_modules/snyk-gradle-plugin/node_modules/rimraf": { - "version": "3.0.2", - "license": "ISC", - "dependencies": { - "glob": "^7.1.3" - }, - "bin": { - "rimraf": "bin.js" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } + "node_modules/snyk-gradle-plugin/node_modules/packageurl-js": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/packageurl-js/-/packageurl-js-1.2.1.tgz", + "integrity": "sha512-cZ6/MzuXaoFd16/k0WnwtI298UCaDHe/XlSh85SeOKbGZ1hq0xvNbx3ILyCMyk7uFQxl6scF3Aucj6/EO9NwcA==", + "license": "MIT" }, "node_modules/snyk-gradle-plugin/node_modules/semver": { - "version": "7.5.4", + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.4.tgz", + "integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==", "license": "ISC", - "dependencies": { - "lru-cache": "^6.0.0" - }, "bin": { "semver": "bin/semver.js" }, @@ -19361,6 +19961,8 @@ }, "node_modules/snyk-gradle-plugin/node_modules/supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "license": "MIT", "dependencies": { "has-flag": "^4.0.0" @@ -19370,23 +19972,20 @@ } }, "node_modules/snyk-gradle-plugin/node_modules/tmp": { - "version": "0.2.1", + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.5.tgz", + "integrity": "sha512-voyz6MApa1rQGUxT3E+BK7/ROe8itEx7vD8/HEvt4xwXucvQ5G5oeEiHkmHZJuBO21RpOf+YYm9MOivj709jow==", "license": "MIT", - "dependencies": { - "rimraf": "^3.0.0" - }, "engines": { - "node": ">=8.17.0" + "node": ">=14.14" } }, "node_modules/snyk-gradle-plugin/node_modules/tslib": { - "version": "2.3.1", + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", "license": "0BSD" }, - "node_modules/snyk-gradle-plugin/node_modules/yallist": { - "version": "4.0.0", - "license": "ISC" - }, "node_modules/snyk-module": { "version": "3.1.0", "license": "Apache-2.0", @@ -19420,9 +20019,9 @@ "license": "ISC" }, "node_modules/snyk-mvn-plugin": { - "version": "4.6.1", - "resolved": "https://registry.npmjs.org/snyk-mvn-plugin/-/snyk-mvn-plugin-4.6.1.tgz", - "integrity": "sha512-0gNiEzscx10E7anHHdvakKYcwz2fppNXpTrQAq26oYUT7Rfd/Vg9+Y1PtvdcLRI2PJBKHbK571Hjl/rKqgu4DQ==", + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/snyk-mvn-plugin/-/snyk-mvn-plugin-4.7.0.tgz", + "integrity": "sha512-edSS9J3pE60IdDhe1La/CMH2D9+GoPWPLv22/6YSd6f+lEquSVBgO+P0F0DON2YKC4xqkfsr0Kw4m6GtTxt22w==", "license": "Apache-2.0", "dependencies": { "@common.js/yocto-queue": "^1.1.1", @@ -19436,7 +20035,7 @@ "tslib": "^2.4.0" }, "engines": { - "node": "^20" + "node": "^20.18.1 || ^22.13.0 || >=24.0.0" } }, "node_modules/snyk-mvn-plugin/node_modules/@snyk/cli-interface": { @@ -19453,37 +20052,15 @@ "version": "2.0.1", "license": "MIT" }, - "node_modules/snyk-mvn-plugin/node_modules/shescape": { - "version": "2.1.4", - "license": "MPL-2.0", - "dependencies": { - "which": "^3.0.0" - }, - "engines": { - "node": "^14.18.0 || ^16.13.0 || ^18 || ^19 || ^20 || ^22 || ^24" - } - }, "node_modules/snyk-mvn-plugin/node_modules/tslib": { "version": "2.8.1", "license": "0BSD" }, - "node_modules/snyk-mvn-plugin/node_modules/which": { - "version": "3.0.1", - "license": "ISC", - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/which.js" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, "node_modules/snyk-nodejs-lockfile-parser": { - "version": "2.6.1", - "resolved": "https://registry.npmjs.org/snyk-nodejs-lockfile-parser/-/snyk-nodejs-lockfile-parser-2.6.1.tgz", - "integrity": "sha512-PGeG4XOjjPeCF1H4M2ki6oiWm6RPTHU0bPD0HQE52KT3Zi7Iumr6mLDQFlvs9NEao9wthbsKm0A0k1FHOaNAXw==", + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/snyk-nodejs-lockfile-parser/-/snyk-nodejs-lockfile-parser-2.7.1.tgz", + "integrity": "sha512-ViG434ZhiWXRtAEXVS2yjkHKz6Yk1lj9FxyMYWjRDZN/VplvrTGhkI/6BISgKotkR9h+QPsmVHiwWdoeVoqRog==", + "license": "Apache-2.0", "dependencies": { "@snyk/dep-graph": "^2.12.0", "@snyk/error-catalog-nodejs-public": "^5.16.0", @@ -19536,23 +20113,24 @@ } }, "node_modules/snyk-nodejs-plugin": { - "version": "1.6.1", - "resolved": "https://registry.npmjs.org/snyk-nodejs-plugin/-/snyk-nodejs-plugin-1.6.1.tgz", - "integrity": "sha512-X8PTZ0tZwoxIDzu4wMolHA83Gs5KljiUY86k3+sJUo+hKRdtrz70sIaaI5zKNHqq719vb7D7GAsQqTqRmwiK5g==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/snyk-nodejs-plugin/-/snyk-nodejs-plugin-2.0.0.tgz", + "integrity": "sha512-syXL9COUXqIDDLxpjSmQbtAfhnu1NTaSlpe71Uexy7wxc1dvl4ukkdMasS38wAQI/9JUt/pEGYoK5BcwzFL5jw==", + "license": "Apache-2.0", "dependencies": { "@snyk/cli-interface": "^2.13.0", "@snyk/dep-graph": "^2.7.4", "debug": "^4.3.4", - "lodash": "^4.17.21", + "lodash": "^4.18.1", "lodash.groupby": "^4.6.0", "lodash.isempty": "^4.4.0", "lodash.sortby": "^4.7.0", "micromatch": "4.0.8", - "snyk-nodejs-lockfile-parser": "2.6.1", + "snyk-nodejs-lockfile-parser": "2.7.1", "snyk-resolve-deps": "4.8.0" }, "engines": { - "node": "^18" + "node": "^20.18.1 || ^22.13.0 || >=24.0.0" } }, "node_modules/snyk-nodejs-plugin/node_modules/hosted-git-info": { @@ -19845,10 +20423,9 @@ "license": "ISC" }, "node_modules/snyk-python-plugin": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/snyk-python-plugin/-/snyk-python-plugin-3.2.0.tgz", - "integrity": "sha512-vLM2sIOOZAiuYhilDbAWX55FwEBfAe8XKNRYfHZIlir5refU2daGu63aq8IXYQOBWyrvmdgf2umZj3uJY1Mt+A==", - "license": "Apache-2.0", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/snyk-python-plugin/-/snyk-python-plugin-3.2.1.tgz", + "integrity": "sha512-OQhwWTKkWQjdQZFRNsfToeP1hJUDnqTnh7xm0xPUQQqaEx0qnQR+0Kda7YiwtyiWU5ZQbCkvcv2wZ1Y7+q28Mw==", "dependencies": { "@snyk/cli-interface": "^2.11.2", "@snyk/dep-graph": "^1.28.1", @@ -20845,6 +21422,7 @@ "version": "7.5.11", "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.11.tgz", "integrity": "sha512-ChjMH33/KetonMTAtpYdgUFr0tbz69Fp2v7zWxQfYZX4g5ZN2nOBXm1R2xyA+lMIKrLKIoKAwFj93jE/avX9cQ==", + "license": "BlueOak-1.0.0", "dependencies": { "@isaacs/fs-minipass": "^4.0.0", "chownr": "^3.0.0", @@ -20940,8 +21518,8 @@ "version": "1.0.1", "resolved": "https://registry.npmjs.org/teex/-/teex-1.0.1.tgz", "integrity": "sha512-eYE6iEI62Ni1H8oIa7KlDU6uQBtqr4Eajni3wX7rpfXD8ysFx8z0+dri+KWEPWpBsxXfxu58x/0jvTVT1ekOSg==", + "devOptional": true, "license": "MIT", - "optional": true, "dependencies": { "streamx": "^2.12.5" } @@ -21701,6 +22279,7 @@ }, "node_modules/typescript": { "version": "4.9.5", + "dev": true, "license": "Apache-2.0", "bin": { "tsc": "bin/tsc", @@ -21736,10 +22315,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/undici-types": { - "version": "5.26.5", - "license": "MIT" - }, "node_modules/unique-filename": { "version": "3.0.0", "dev": true, @@ -21938,6 +22513,35 @@ "node": ">= 0.8" } }, + "node_modules/unzipper": { + "version": "0.12.3", + "resolved": "https://registry.npmjs.org/unzipper/-/unzipper-0.12.3.tgz", + "integrity": "sha512-PZ8hTS+AqcGxsaQntl3IRBw65QrBI6lxzqDEL7IAo/XCEqRTKGfOX56Vea5TH9SZczRVxuzk1re04z/YjuYCJA==", + "dev": true, + "license": "MIT", + "dependencies": { + "bluebird": "~3.7.2", + "duplexer2": "~0.1.4", + "fs-extra": "^11.2.0", + "graceful-fs": "^4.2.2", + "node-int64": "^0.4.0" + } + }, + "node_modules/unzipper/node_modules/fs-extra": { + "version": "11.3.4", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.4.tgz", + "integrity": "sha512-CTXd6rk/M3/ULNQj8FBqBWHYBVYybQ3VPBw0xGKFe3tuH7ytT6ACnvzpIQ3UZtB8yvUKC2cXn1a+x+5EVQLovA==", + "dev": true, + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=14.14" + } + }, "node_modules/upath": { "version": "2.0.1", "license": "MIT", @@ -22744,6 +23348,7 @@ "packages/cli-alert": { "name": "@snyk/cli-alert", "version": "1.0.0", + "extraneous": true, "license": "Apache-2.0", "dependencies": { "@octokit/rest": "^18.0.5", @@ -22755,90 +23360,6 @@ "@types/node": "^20.11.30" } }, - "packages/cli-alert/node_modules/@slack/webhook": { - "version": "7.0.6", - "resolved": "https://registry.npmjs.org/@slack/webhook/-/webhook-7.0.6.tgz", - "integrity": "sha512-RvNCcOjNbzl5uQ2TZsbTJ+A+5ptoWMwnyd/W4lKzeXFToIwebeaZiuntcP0usmhZHj1LH9H1T9WN6Bt1B/DLyg==", - "license": "MIT", - "dependencies": { - "@slack/types": "^2.9.0", - "@types/node": ">=18.0.0", - "axios": "^1.11.0" - }, - "engines": { - "node": ">= 18", - "npm": ">= 8.6.0" - } - }, - "packages/cli-alert/node_modules/@slack/webhook/node_modules/axios": { - "version": "1.13.5", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.13.5.tgz", - "integrity": "sha512-cz4ur7Vb0xS4/KUN0tPWe44eqxrIu31me+fbang3ijiNscE129POzipJJA6zniq2C/Z6sJCjMimjS8Lc/GAs8Q==", - "license": "MIT", - "dependencies": { - "follow-redirects": "^1.15.11", - "form-data": "^4.0.5", - "proxy-from-env": "^1.1.0" - } - }, - "packages/cli-alert/node_modules/@slack/webhook/node_modules/axios/node_modules/follow-redirects": { - "version": "1.15.11", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.11.tgz", - "integrity": "sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==", - "funding": [ - { - "type": "individual", - "url": "https://github.com/sponsors/RubenVerborgh" - } - ], - "license": "MIT", - "engines": { - "node": ">=4.0" - }, - "peerDependenciesMeta": { - "debug": { - "optional": true - } - } - }, - "packages/cli-alert/node_modules/@slack/webhook/node_modules/axios/node_modules/form-data": { - "version": "4.0.5", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.5.tgz", - "integrity": "sha512-8RipRLol37bNs2bhoV67fiTEvdTrbMUYcFTiy3+wuuOnUog2QBHCZWXDRijWQfAkhBj2Uf5UnVaiWwA5vdd82w==", - "license": "MIT", - "dependencies": { - "asynckit": "^0.4.0", - "combined-stream": "^1.0.8", - "es-set-tostringtag": "^2.1.0", - "hasown": "^2.0.2", - "mime-types": "^2.1.12" - }, - "engines": { - "node": ">= 6" - } - }, - "packages/cli-alert/node_modules/@slack/webhook/node_modules/axios/node_modules/form-data/node_modules/es-set-tostringtag": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", - "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", - "license": "MIT", - "dependencies": { - "es-errors": "^1.3.0", - "get-intrinsic": "^1.2.6", - "has-tostringtag": "^1.0.2", - "hasown": "^2.0.2" - }, - "engines": { - "node": ">= 0.4" - } - }, - "packages/cli-alert/node_modules/@types/node": { - "version": "20.11.30", - "license": "MIT", - "dependencies": { - "undici-types": "~5.26.4" - } - }, "packages/snyk-fix": { "name": "@snyk/fix", "version": "1.0.0-monorepo", diff --git a/package.json b/package.json index d85ffef3ff..6120f657c3 100644 --- a/package.json +++ b/package.json @@ -21,7 +21,7 @@ "snyk": "bin/snyk" }, "engines": { - "node": "^20" + "node": "^22" }, "workspaces": [ "packages/*" @@ -61,7 +61,7 @@ "license": "Apache-2.0", "dependencies": { "@open-policy-agent/opa-wasm": "^1.6.0", - "@sentry/node": "^7.34.0", + "@sentry/node": "^7.120.4", "@snyk/cli-interface": "2.15.0", "@snyk/cloud-config-parser": "^1.14.5", "@snyk/code-client": "^4.23.5", @@ -111,27 +111,27 @@ "ora": "5.4.0", "os-name": "^5.1.0", "p-map": "^4.0.0", - "proxy-from-env": "^1.0.0", + "proxy-from-env": "^2.1.0", "rimraf": "^2.6.3", "semver": "^6.0.0", "snyk-config": "^5.0.0", "snyk-cpp-plugin": "^2.24.3", - "snyk-docker-plugin": "9.6.0", + "snyk-docker-plugin": "9.7.0", "snyk-go-plugin": "2.1.1", - "snyk-gradle-plugin": "5.1.1", + "snyk-gradle-plugin": "6.0.0", "snyk-module": "3.1.0", - "snyk-mvn-plugin": "^4.6.1", - "snyk-nodejs-lockfile-parser": "2.6.1", - "snyk-nodejs-plugin": "^1.6.1", + "snyk-mvn-plugin": "^4.7.0", + "snyk-nodejs-lockfile-parser": "2.7.1", + "snyk-nodejs-plugin": "^2.0.0", "snyk-nuget-plugin": "4.2.0", "snyk-php-plugin": "1.12.1", "snyk-policy": "^4.1.6", - "snyk-python-plugin": "^3.2.0", + "snyk-python-plugin": "^3.2.1", "snyk-resolve-deps": "4.10.0", "snyk-sbt-plugin": "3.1.0", "snyk-swiftpm-plugin": "1.4.1", "strip-ansi": "^6.0.1", - "tar": "^7.5.11", + "tar": "^7.5.8", "uuid": "^8.3.2", "wrap-ansi": "^5.1.0" }, @@ -149,7 +149,7 @@ "@types/sinon": "^7.5.0", "@typescript-eslint/eslint-plugin": "^4.30.0", "@typescript-eslint/parser": "^4.30.0", - "@yao-pkg/pkg": "5.16.1", + "@yao-pkg/pkg": "6.19.0", "ajv": "^8.18.0", "ajv-draft-04": "^1.0.0", "body-parser": "^1.19.0", @@ -195,7 +195,8 @@ }, "@types/jest-json-schema@^6.1.5": { "ajv@6.10.2": "8.18.0" - } + }, + "axios": "^1.15.0" }, "repository": { "type": "git", diff --git a/patches/needle+3.3.0.patch b/patches/needle+3.3.0.patch new file mode 100644 index 0000000000..6c97056974 --- /dev/null +++ b/patches/needle+3.3.0.patch @@ -0,0 +1,13 @@ +diff --git a/node_modules/needle/lib/needle.js b/node_modules/needle/lib/needle.js +index 543f7e5..e8b2a1d 100644 +--- a/node_modules/needle/lib/needle.js ++++ b/node_modules/needle/lib/needle.js +@@ -559,6 +559,8 @@ Needle.prototype.send_request = function(count, method, uri, config, post_data, + + var redirect_url = utils.resolve_url(headers.location, uri); + debug('Redirecting to ' + redirect_url.toString()); ++ // Consume the redirect response body to prevent socket hang ++ resp.resume(); + return self.send_request(++count, method, redirect_url.toString(), config, post_data, out, callback); + } else if (config.follow_max > 0) { + return done(new Error('Max redirects reached. Possible loop in: ' + headers.location)); diff --git a/release-scripts/go.mod b/release-scripts/go.mod new file mode 100644 index 0000000000..1d8fb7edee --- /dev/null +++ b/release-scripts/go.mod @@ -0,0 +1,3 @@ +module github.com/snyk/cli/release-scripts + +go 1.26.2 diff --git a/release-scripts/write-ls-protocol-version.go b/release-scripts/write-ls-protocol-version.go index c0de4cc7c9..ba3a132917 100644 --- a/release-scripts/write-ls-protocol-version.go +++ b/release-scripts/write-ls-protocol-version.go @@ -1,7 +1,9 @@ package main import ( + "encoding/json" "fmt" + "io" "os" "os/exec" "path/filepath" @@ -10,23 +12,121 @@ import ( "strings" ) +const snykLSModulePath = "github.com/snyk/snyk-ls" + +type lsProtocolVersion struct { + version int + commitHash string + source string +} + +var resolveLSProtocolVersionFromCommit = getLSProtocolVersionFromCommit +var resolveCommitHashFromDir = gitCommitHashFromDir + func getGoreleaserYAML(commit string) (int, error) { - installOutput, err := exec.Command("go", "install", "github.com/snyk/snyk-ls@"+commit).CombinedOutput() + resolved, err := getLSProtocolVersion() + if err != nil { + return -3, err + } + return resolved.version, nil +} + +func getLSProtocolVersion() (lsProtocolVersion, error) { + goModPath, err := currentGoModPath() + if err != nil { + return lsProtocolVersion{version: -3}, fmt.Errorf("failed to locate go.mod: %w", err) + } + if goModPath == "" { + return lsProtocolVersion{version: -3}, fmt.Errorf("failed to locate go.mod") + } + + return getLSProtocolVersionWithGoMod(goModPath) +} + +func getGoreleaserYAMLWithGoMod(commit string, goModPath string) (int, error) { + resolved, err := getLSProtocolVersionWithGoMod(goModPath) + if err != nil { + return -3, err + } + return resolved.version, nil +} + +func getLSProtocolVersionWithGoMod(goModPath string) (lsProtocolVersion, error) { + dependency, err := snykLSDependency(goModPath) + if err != nil { + return lsProtocolVersion{version: -3}, err + } + if dependency.replacementDir != "" { + goreleaserPath := filepath.Join(dependency.replacementDir, ".goreleaser.yaml") + protocolVersion, err := readLSProtocolVersion(goreleaserPath) + if err != nil { + return lsProtocolVersion{version: -3}, fmt.Errorf("failed to read LS_PROTOCOL_VERSION from .goreleaser.yaml in replaced snyk-ls path %q: %w", goreleaserPath, err) + } + + commitHash, err := resolveCommitHashFromDir(dependency.replacementDir) + if err != nil { + return lsProtocolVersion{version: -3}, fmt.Errorf("failed to determine commit hash from replaced snyk-ls path %q: %w", dependency.replacementDir, err) + } + return lsProtocolVersion{ + version: protocolVersion, + commitHash: commitHash, + source: fmt.Sprintf(".goreleaser.yaml from replaced snyk-ls path %q", goreleaserPath), + }, nil + } + + commitHash, err := commitHashFromModuleVersion(dependency.version) + if err != nil { + return lsProtocolVersion{version: -3}, err + } + + resolved, err := resolveLSProtocolVersionFromCommit(commitHash) + if err != nil { + return lsProtocolVersion{version: -3}, err + } + if resolved.commitHash == "" { + resolved.commitHash = commitHash + } + return resolved, nil +} + +func getGoreleaserYAMLFromCommit(commit string) (int, error) { + resolved, err := getLSProtocolVersionFromCommit(commit) + if err != nil { + return -3, err + } + return resolved.version, nil +} + +func getLSProtocolVersionFromCommit(commit string) (lsProtocolVersion, error) { + installOutput, err := exec.Command("go", "install", snykLSModulePath+"@"+commit).CombinedOutput() if err != nil { - return -3, fmt.Errorf("go install failed: %w: %q", err, string(installOutput)) + return lsProtocolVersion{version: -3}, fmt.Errorf("go install failed: %w: %q", err, string(installOutput)) } modCacheDir, err := goModCache() if err != nil { - return -3, fmt.Errorf("failed to locate go module cache: %w", err) + return lsProtocolVersion{version: -3}, fmt.Errorf("failed to locate go module cache: %w", err) } snykLsPkgPaths, err := filepath.Glob(filepath.Join(modCacheDir, "github.com", "snyk", "snyk-ls@v*-"+commit[:12])) if err != nil { - return -3, fmt.Errorf("failed to match snyk-ls: %w", err) + return lsProtocolVersion{version: -3}, fmt.Errorf("failed to match snyk-ls: %w", err) } if len(snykLsPkgPaths) == 0 { - return -3, fmt.Errorf("snyk-ls @ %s not found in module cache; try `go get`?", commit) + return lsProtocolVersion{version: -3}, fmt.Errorf("snyk-ls @ %s not found in module cache; try `go get`?", commit) } - goReleaserContents, err := os.ReadFile(filepath.Join(snykLsPkgPaths[0], ".goreleaser.yaml")) + goreleaserPath := filepath.Join(snykLsPkgPaths[0], ".goreleaser.yaml") + protocolVersion, err := readLSProtocolVersion(goreleaserPath) + if err != nil { + return lsProtocolVersion{version: -3}, err + } + return lsProtocolVersion{ + version: protocolVersion, + commitHash: commit, + source: fmt.Sprintf("commit-hash-based resolution from %s@%s", snykLSModulePath, commit), + }, nil +} + +func readLSProtocolVersion(goreleaserPath string) (int, error) { + goReleaserContents, err := os.ReadFile(goreleaserPath) if err != nil { return -3, fmt.Errorf("failed to read goreleaser file: %w", err) } @@ -44,6 +144,128 @@ func getGoreleaserYAML(commit string) (int, error) { return protocolVersion, nil } +func currentGoModPath() (string, error) { + goModPath, ok, err := findGoModFromWorkingDir() + if err != nil { + return "", err + } + if ok { + return goModPath, nil + } + + stdout, err := exec.Command("go", "env", "GOMOD").Output() + if err != nil { + return "", err + } + + goModPath = strings.TrimSpace(string(stdout)) + if goModPath == "" || goModPath == os.DevNull { + return "", nil + } + return goModPath, nil +} + +func findGoModFromWorkingDir() (string, bool, error) { + dir, err := os.Getwd() + if err != nil { + return "", false, err + } + + for { + goModPath := filepath.Join(dir, "go.mod") + if _, err := os.Stat(goModPath); err == nil { + return goModPath, true, nil + } else if !os.IsNotExist(err) { + return "", false, err + } + + parent := filepath.Dir(dir) + if parent == dir { + return "", false, nil + } + dir = parent + } +} + +type moduleVersion struct { + Path string + Version string +} + +type replaceDirective struct { + Old moduleVersion + New moduleVersion +} + +type goMod struct { + Require []moduleVersion + Replace []replaceDirective +} + +type snykLSModule struct { + version string + replacementDir string +} + +func parseGoMod(goModPath string) (goMod, error) { + output, err := exec.Command("go", "mod", "edit", "-json", goModPath).CombinedOutput() + if err != nil { + return goMod{}, fmt.Errorf("failed to inspect go.mod: %w: %q", err, string(output)) + } + + var parsed goMod + if err := json.Unmarshal(output, &parsed); err != nil { + return goMod{}, fmt.Errorf("failed to parse go.mod: %w", err) + } + + return parsed, nil +} + +func snykLSDependency(goModPath string) (snykLSModule, error) { + parsed, err := parseGoMod(goModPath) + if err != nil { + return snykLSModule{}, err + } + + var dependency snykLSModule + for _, required := range parsed.Require { + if required.Path == snykLSModulePath { + dependency.version = required.Version + break + } + } + if dependency.version == "" { + return snykLSModule{}, fmt.Errorf("%s dependency not found in %s", snykLSModulePath, goModPath) + } + + for _, replacement := range parsed.Replace { + if replacement.Old.Path != snykLSModulePath || replacement.New.Version != "" { + continue + } + + replacementPath := replacement.New.Path + if filepath.IsAbs(replacementPath) { + dependency.replacementDir = filepath.Clean(replacementPath) + return dependency, nil + } + dependency.replacementDir = filepath.Clean(filepath.Join(filepath.Dir(goModPath), replacementPath)) + return dependency, nil + } + + return dependency, nil +} + +func snykLSReplacementDir(goModPath string) (string, bool, error) { + dependency, err := snykLSDependency(goModPath) + if err != nil { + return "", false, err + } + if dependency.replacementDir == "" { + return "", false, nil + } + return dependency.replacementDir, true, nil +} + func goModCache() (string, error) { stdout, err := exec.Command("go", "env", "GOMODCACHE").Output() if err != nil { @@ -62,25 +284,60 @@ func extractLSProtocolVersion(yamlContent []byte) string { return "" } -func main() { - if len(os.Args) != 4 { - fmt.Println("Usage: go run script.go ") - os.Exit(1) +func commitHashFromModuleVersion(version string) (string, error) { + parts := strings.Split(version, "-") + if len(parts) >= 3 { + return parts[len(parts)-1], nil + } + if version != "" { + return version, nil } + return "", fmt.Errorf("failed to determine snyk-ls commit hash from empty module version") +} - commitHash := os.Args[1] - version := os.Args[2] - outputDir := os.Args[3] +func gitCommitHashFromDir(dir string) (string, error) { + output, err := exec.Command("git", "-C", dir, "rev-parse", "--short=12", "HEAD").CombinedOutput() + if err != nil { + return "", fmt.Errorf("git rev-parse failed: %w: %q", err, string(output)) + } + return strings.TrimSpace(string(output)), nil +} - lsProtocolVersion, err := getGoreleaserYAML(commitHash) - if err != nil || lsProtocolVersion < 0 { - fmt.Printf("Failed to retrieve LS_PROTOCOL_VERSION: %v\n", err) - os.Exit(1) +func writeLSProtocolVersionMetadata(version string, outputDir string, commitHashFile string, logWriter io.Writer) (int, error) { + resolved, err := getLSProtocolVersion() + if err != nil || resolved.version < 0 { + return -1, err } - filePath := filepath.Join(outputDir, fmt.Sprintf("ls-protocol-version-%d", lsProtocolVersion)) + if logWriter != nil { + fmt.Fprintf(logWriter, "-- Resolved LS protocol version %d using %s\n", resolved.version, resolved.source) + fmt.Fprintf(logWriter, "-- Resolved LS commit hash %s\n", resolved.commitHash) + } + + filePath := filepath.Join(outputDir, fmt.Sprintf("ls-protocol-version-%d", resolved.version)) if err := os.WriteFile(filePath, []byte(version), 0644); err != nil { - fmt.Printf("Failed to write to file: %v\n", err) + return -1, fmt.Errorf("failed to write to file: %w", err) + } + if err := os.WriteFile(commitHashFile, []byte(resolved.commitHash), 0644); err != nil { + return -1, fmt.Errorf("failed to write commit hash file: %w", err) + } + + return resolved.version, nil +} + +func main() { + if len(os.Args) != 4 { + fmt.Println("Usage: go run script.go ") + os.Exit(1) + } + + version := os.Args[1] + outputDir := os.Args[2] + commitHashFile := os.Args[3] + + lsProtocolVersion, err := writeLSProtocolVersionMetadata(version, outputDir, commitHashFile, os.Stderr) + if err != nil { + fmt.Fprintf(os.Stderr, "Failed to retrieve LS_PROTOCOL_VERSION: %v\n", err) os.Exit(1) } diff --git a/release-scripts/write-ls-protocol-version_test.go b/release-scripts/write-ls-protocol-version_test.go new file mode 100644 index 0000000000..58bad2ba9a --- /dev/null +++ b/release-scripts/write-ls-protocol-version_test.go @@ -0,0 +1,652 @@ +package main + +import ( + "bytes" + "os" + "os/exec" + "path/filepath" + "runtime" + "strings" + "testing" +) + +func TestGetGoreleaserYAMLUsesLocalSnykLSReplace(t *testing.T) { + tempDir := t.TempDir() + moduleDir := filepath.Join(tempDir, "cliv2") + replacedDir := filepath.Join(tempDir, "snyk-ls") + + if err := os.MkdirAll(moduleDir, 0755); err != nil { + t.Fatal(err) + } + if err := os.MkdirAll(replacedDir, 0755); err != nil { + t.Fatal(err) + } + + goMod := []byte(`module github.com/snyk/cli/cliv2 + +go 1.26 + +require github.com/snyk/snyk-ls v0.0.0-20260414093345-2a6d7434eb91 + +replace github.com/snyk/snyk-ls => ../snyk-ls +`) + goModPath := filepath.Join(moduleDir, "go.mod") + if err := os.WriteFile(goModPath, goMod, 0644); err != nil { + t.Fatal(err) + } + + goreleaserYAML := []byte(`env: + - LS_PROTOCOL_VERSION=123 +`) + if err := os.WriteFile(filepath.Join(replacedDir, ".goreleaser.yaml"), goreleaserYAML, 0644); err != nil { + t.Fatal(err) + } + + originalReplacementResolver := resolveCommitHashFromDir + t.Cleanup(func() { + resolveCommitHashFromDir = originalReplacementResolver + }) + resolveCommitHashFromDir = func(dir string) (string, error) { + return "replacement123", nil + } + + protocolVersion, err := getGoreleaserYAMLWithGoMod("commit-is-ignored", goModPath) + if err != nil { + t.Fatalf("expected protocol version from replacement: %v", err) + } + if protocolVersion != 123 { + t.Fatalf("expected protocol version 123, got %d", protocolVersion) + } +} + +func TestWriteLSProtocolVersionMetadataUsesReplacementAndLogsSource(t *testing.T) { + tempDir := t.TempDir() + moduleDir := filepath.Join(tempDir, "cliv2") + replacedDir := filepath.Join(tempDir, "snyk-ls") + outputDir := filepath.Join(tempDir, "output") + + if err := os.MkdirAll(moduleDir, 0755); err != nil { + t.Fatal(err) + } + if err := os.MkdirAll(replacedDir, 0755); err != nil { + t.Fatal(err) + } + if err := os.MkdirAll(outputDir, 0755); err != nil { + t.Fatal(err) + } + + goMod := []byte(`module github.com/snyk/cli/cliv2 + +go 1.26 + +require github.com/snyk/snyk-ls v0.0.0-20260414093345-2a6d7434eb91 + +replace github.com/snyk/snyk-ls => ../snyk-ls +`) + if err := os.WriteFile(filepath.Join(moduleDir, "go.mod"), goMod, 0644); err != nil { + t.Fatal(err) + } + goreleaserYAML := []byte(`env: + - LS_PROTOCOL_VERSION=456 +`) + if err := os.WriteFile(filepath.Join(replacedDir, ".goreleaser.yaml"), goreleaserYAML, 0644); err != nil { + t.Fatal(err) + } + + t.Chdir(moduleDir) + var log bytes.Buffer + commitHashFile := filepath.Join(tempDir, "ls-commit-hash") + + originalReplacementResolver := resolveCommitHashFromDir + t.Cleanup(func() { + resolveCommitHashFromDir = originalReplacementResolver + }) + resolveCommitHashFromDir = func(dir string) (string, error) { + if dir != replacedDir { + t.Fatalf("expected replacement commit hash to be resolved from %q, got %q", replacedDir, dir) + } + return "replacement123", nil + } + + protocolVersion, err := writeLSProtocolVersionMetadata("9.9.9", outputDir, commitHashFile, &log) + if err != nil { + t.Fatalf("expected metadata to be written from replacement: %v", err) + } + if protocolVersion != 456 { + t.Fatalf("expected protocol version 456, got %d", protocolVersion) + } + + outputFile := filepath.Join(outputDir, "ls-protocol-version-456") + contents, err := os.ReadFile(outputFile) + if err != nil { + t.Fatalf("expected output file to be written: %v", err) + } + if string(contents) != "9.9.9" { + t.Fatalf("expected output file to contain CLI version, got %q", string(contents)) + } + commitHash, err := os.ReadFile(commitHashFile) + if err != nil { + t.Fatalf("expected commit hash file to be written: %v", err) + } + if string(commitHash) != "replacement123" { + t.Fatalf("expected replacement commit hash, got %q", commitHash) + } + + logOutput := log.String() + if !strings.Contains(logOutput, "replaced snyk-ls path") || !strings.Contains(logOutput, filepath.Join(replacedDir, ".goreleaser.yaml")) { + t.Fatalf("expected log to identify replacement source, got %q", logOutput) + } +} + +func TestReplacementGoreleaserFailureDoesNotFallBackToCommitResolution(t *testing.T) { + tempDir := t.TempDir() + moduleDir := filepath.Join(tempDir, "cliv2") + replacedDir := filepath.Join(tempDir, "snyk-ls") + + if err := os.MkdirAll(moduleDir, 0755); err != nil { + t.Fatal(err) + } + if err := os.MkdirAll(replacedDir, 0755); err != nil { + t.Fatal(err) + } + + goMod := []byte(`module github.com/snyk/cli/cliv2 + +go 1.26 + +require github.com/snyk/snyk-ls v0.0.0-20260414093345-2a6d7434eb91 + +replace github.com/snyk/snyk-ls => ../snyk-ls +`) + goModPath := filepath.Join(moduleDir, "go.mod") + if err := os.WriteFile(goModPath, goMod, 0644); err != nil { + t.Fatal(err) + } + + _, err := getGoreleaserYAMLWithGoMod("commit-should-not-be-used", goModPath) + if err == nil { + t.Fatal("expected missing replacement .goreleaser.yaml to fail") + } + + errorMessage := err.Error() + if !strings.Contains(errorMessage, "replaced snyk-ls path") || !strings.Contains(errorMessage, ".goreleaser.yaml") { + t.Fatalf("expected replacement-specific goreleaser error, got %q", errorMessage) + } + if strings.Contains(errorMessage, "go install") || strings.Contains(errorMessage, "commit-should-not-be-used") { + t.Fatalf("expected no commit-hash fallback, got %q", errorMessage) + } +} + +func TestWriteLSProtocolVersionMetadataFallsBackToCommitResolutionWhenNoReplaceExists(t *testing.T) { + tempDir := t.TempDir() + moduleDir := filepath.Join(tempDir, "cliv2") + outputDir := filepath.Join(tempDir, "output") + + if err := os.MkdirAll(moduleDir, 0755); err != nil { + t.Fatal(err) + } + if err := os.MkdirAll(outputDir, 0755); err != nil { + t.Fatal(err) + } + + goMod := []byte(`module github.com/snyk/cli/cliv2 + +go 1.26 + +require github.com/snyk/snyk-ls v0.0.0-20260414093345-abcdef123456 +`) + if err := os.WriteFile(filepath.Join(moduleDir, "go.mod"), goMod, 0644); err != nil { + t.Fatal(err) + } + + originalResolver := resolveLSProtocolVersionFromCommit + t.Cleanup(func() { + resolveLSProtocolVersionFromCommit = originalResolver + }) + + var resolvedCommit string + resolveLSProtocolVersionFromCommit = func(commit string) (lsProtocolVersion, error) { + resolvedCommit = commit + return lsProtocolVersion{ + version: 789, + source: "commit-hash-based resolution from test", + }, nil + } + + t.Chdir(moduleDir) + var log bytes.Buffer + + commitHashFile := filepath.Join(tempDir, "ls-commit-hash") + protocolVersion, err := writeLSProtocolVersionMetadata("8.8.8", outputDir, commitHashFile, &log) + if err != nil { + t.Fatalf("expected metadata to be written from commit fallback: %v", err) + } + if protocolVersion != 789 { + t.Fatalf("expected protocol version 789, got %d", protocolVersion) + } + if resolvedCommit != "abcdef123456" { + t.Fatalf("expected commit resolver to use commit hash, got %q", resolvedCommit) + } + commitHash, err := os.ReadFile(commitHashFile) + if err != nil { + t.Fatalf("expected commit hash file to be written: %v", err) + } + if string(commitHash) != "abcdef123456" { + t.Fatalf("expected commit hash file to contain dependency commit, got %q", commitHash) + } + + outputFile := filepath.Join(outputDir, "ls-protocol-version-789") + contents, err := os.ReadFile(outputFile) + if err != nil { + t.Fatalf("expected output file to be written: %v", err) + } + if string(contents) != "8.8.8" { + t.Fatalf("expected output file to contain CLI version, got %q", string(contents)) + } + + if !strings.Contains(log.String(), "commit-hash-based resolution") { + t.Fatalf("expected log to identify commit source, got %q", log.String()) + } +} + +func TestSnykLSReplacementDirReturnsFalseWhenNoReplaceExists(t *testing.T) { + tempDir := t.TempDir() + goModPath := filepath.Join(tempDir, "go.mod") + goMod := []byte(`module github.com/snyk/cli/cliv2 + +go 1.26 + +require github.com/snyk/snyk-ls v0.0.0-20260414093345-2a6d7434eb91 +`) + if err := os.WriteFile(goModPath, goMod, 0644); err != nil { + t.Fatal(err) + } + + _, ok, err := snykLSReplacementDir(goModPath) + if err != nil { + t.Fatalf("expected no error when replace is absent: %v", err) + } + if ok { + t.Fatal("expected no replacement to be found") + } +} + +func TestCurrentGoModPathFindsGoModFromWorkingDirectory(t *testing.T) { + tempDir := t.TempDir() + moduleDir := filepath.Join(tempDir, "cliv2") + nestedDir := filepath.Join(moduleDir, "nested") + if err := os.MkdirAll(nestedDir, 0755); err != nil { + t.Fatal(err) + } + + goModPath := filepath.Join(moduleDir, "go.mod") + goMod := []byte(`module github.com/snyk/cli/cliv2 + +go 1.26 +`) + if err := os.WriteFile(goModPath, goMod, 0644); err != nil { + t.Fatal(err) + } + + t.Chdir(nestedDir) + + actual, err := currentGoModPath() + if err != nil { + t.Fatalf("expected go.mod path from working directory: %v", err) + } + if actual != goModPath { + t.Fatalf("expected %q, got %q", goModPath, actual) + } +} + +func TestGenerateLSProtocolMetadataFailsWhenReplacementGoreleaserIsMissing(t *testing.T) { + tempDir := t.TempDir() + rootDir := filepath.Join(tempDir, "cli") + moduleDir := filepath.Join(rootDir, "cliv2") + releaseScriptsDir := filepath.Join(rootDir, "release-scripts") + binDir := filepath.Join(tempDir, "bin") + replacedDir := filepath.Join(rootDir, "snyk-ls") + + if err := os.MkdirAll(moduleDir, 0755); err != nil { + t.Fatal(err) + } + if err := os.MkdirAll(releaseScriptsDir, 0755); err != nil { + t.Fatal(err) + } + if err := os.MkdirAll(binDir, 0755); err != nil { + t.Fatal(err) + } + if err := os.MkdirAll(replacedDir, 0755); err != nil { + t.Fatal(err) + } + if err := os.WriteFile(filepath.Join(binDir, "version"), []byte("9.9.9"), 0644); err != nil { + t.Fatal(err) + } + + goMod := []byte(`module github.com/snyk/cli/cliv2 + +go 1.26 + +require github.com/snyk/snyk-ls v0.0.0-20260414093345-2a6d7434eb91 + +replace github.com/snyk/snyk-ls => ../snyk-ls +`) + if err := os.WriteFile(filepath.Join(moduleDir, "go.mod"), goMod, 0644); err != nil { + t.Fatal(err) + } + copyReleaseScriptForMakefileTest(t, releaseScriptsDir) + + cmd := exec.Command("make", "-f", repoPath(t, "cliv2", "Makefile"), "-C", moduleDir, "generate-ls-protocol-metadata", "bindir="+binDir) + output, err := cmd.CombinedOutput() + if err == nil { + t.Fatalf("expected make target to fail when replacement .goreleaser.yaml is missing; output:\n%s", output) + } + + outputString := string(output) + if !strings.Contains(outputString, "replaced snyk-ls path") || !strings.Contains(outputString, ".goreleaser.yaml") { + t.Fatalf("expected replacement-specific goreleaser error, got:\n%s", outputString) + } + if strings.Contains(outputString, "LS protocol version: Failed") { + t.Fatalf("expected failure before printing an invalid protocol version, got:\n%s", outputString) + } +} + +func TestGenerateLSProtocolMetadataPrintsNumericProtocolVersion(t *testing.T) { + tempDir := t.TempDir() + rootDir := filepath.Join(tempDir, "cli") + moduleDir := filepath.Join(rootDir, "cliv2") + releaseScriptsDir := filepath.Join(rootDir, "release-scripts") + binDir := filepath.Join(tempDir, "bin") + replacedDir := filepath.Join(rootDir, "snyk-ls") + + if err := os.MkdirAll(moduleDir, 0755); err != nil { + t.Fatal(err) + } + if err := os.MkdirAll(releaseScriptsDir, 0755); err != nil { + t.Fatal(err) + } + if err := os.MkdirAll(binDir, 0755); err != nil { + t.Fatal(err) + } + if err := os.MkdirAll(replacedDir, 0755); err != nil { + t.Fatal(err) + } + if err := os.WriteFile(filepath.Join(binDir, "version"), []byte("9.9.9"), 0644); err != nil { + t.Fatal(err) + } + + goMod := []byte(`module github.com/snyk/cli/cliv2 + +go 1.26 + +require github.com/snyk/snyk-ls v0.0.0-20260414093345-2a6d7434eb91 + +replace github.com/snyk/snyk-ls => ../snyk-ls +`) + if err := os.WriteFile(filepath.Join(moduleDir, "go.mod"), goMod, 0644); err != nil { + t.Fatal(err) + } + goreleaserYAML := []byte(`env: + - LS_PROTOCOL_VERSION=321 +`) + if err := os.WriteFile(filepath.Join(replacedDir, ".goreleaser.yaml"), goreleaserYAML, 0644); err != nil { + t.Fatal(err) + } + expectedCommitHash := initGitRepo(t, replacedDir) + copyReleaseScriptForMakefileTest(t, releaseScriptsDir) + + cmd := exec.Command("make", "-f", repoPath(t, "cliv2", "Makefile"), "-C", moduleDir, "generate-ls-protocol-metadata", "bindir="+binDir) + output, err := cmd.CombinedOutput() + if err != nil { + t.Fatalf("expected make target to succeed: %v\n%s", err, output) + } + + outputString := string(output) + if !strings.Contains(outputString, "LS protocol version: 321") { + t.Fatalf("expected numeric LS protocol version in output, got:\n%s", outputString) + } + + metadata, err := os.ReadFile(filepath.Join(binDir, "ls-protocol-version-321")) + if err != nil { + t.Fatalf("expected protocol metadata file to be written: %v", err) + } + if string(metadata) != "9.9.9" { + t.Fatalf("expected metadata file to contain CLI version, got %q", metadata) + } + commitHash, err := os.ReadFile(filepath.Join(moduleDir, "_cache", "ls-commit-hash")) + if err != nil { + t.Fatalf("expected LS commit hash cache file to be written: %v", err) + } + if string(commitHash) != expectedCommitHash { + t.Fatalf("expected LS commit hash %q, got %q", expectedCommitHash, commitHash) + } +} + +func TestGenerateLSProtocolMetadataUsesHostGoPlatformForAlpineTarget(t *testing.T) { + tempDir := t.TempDir() + rootDir := filepath.Join(tempDir, "cli") + moduleDir := filepath.Join(rootDir, "cliv2") + releaseScriptsDir := filepath.Join(rootDir, "release-scripts") + binDir := filepath.Join(tempDir, "bin") + replacedDir := filepath.Join(rootDir, "snyk-ls") + + if err := os.MkdirAll(moduleDir, 0755); err != nil { + t.Fatal(err) + } + if err := os.MkdirAll(releaseScriptsDir, 0755); err != nil { + t.Fatal(err) + } + if err := os.MkdirAll(binDir, 0755); err != nil { + t.Fatal(err) + } + if err := os.MkdirAll(replacedDir, 0755); err != nil { + t.Fatal(err) + } + if err := os.WriteFile(filepath.Join(binDir, "version"), []byte("9.9.9"), 0644); err != nil { + t.Fatal(err) + } + + goMod := []byte(`module github.com/snyk/cli/cliv2 + +go 1.26 + +require github.com/snyk/snyk-ls v0.0.0-20260414093345-2a6d7434eb91 + +replace github.com/snyk/snyk-ls => ../snyk-ls +`) + if err := os.WriteFile(filepath.Join(moduleDir, "go.mod"), goMod, 0644); err != nil { + t.Fatal(err) + } + goreleaserYAML := []byte(`env: + - LS_PROTOCOL_VERSION=432 +`) + if err := os.WriteFile(filepath.Join(replacedDir, ".goreleaser.yaml"), goreleaserYAML, 0644); err != nil { + t.Fatal(err) + } + expectedCommitHash := initGitRepo(t, replacedDir) + copyReleaseScriptForMakefileTest(t, releaseScriptsDir) + + cmd := exec.Command( + "make", + "-f", repoPath(t, "cliv2", "Makefile"), + "-C", moduleDir, + "generate-ls-protocol-metadata", + "bindir="+binDir, + "GOOS=alpine", + "GOARCH=arm64", + "GOHOSTOS="+runtime.GOOS, + "GOHOSTARCH="+runtime.GOARCH, + ) + output, err := cmd.CombinedOutput() + if err != nil { + t.Fatalf("expected metadata generation to use host GOOS/GOARCH for alpine target: %v\n%s", err, output) + } + + outputString := string(output) + if strings.Contains(outputString, "unsupported GOOS/GOARCH pair alpine/arm64") { + t.Fatalf("expected host platform go run, got target platform failure:\n%s", outputString) + } + if !strings.Contains(outputString, "LS protocol version: 432") { + t.Fatalf("expected LS protocol version in output, got:\n%s", outputString) + } + if !strings.Contains(outputString, "LS commit hash: "+expectedCommitHash) { + t.Fatalf("expected LS commit hash in output, got:\n%s", outputString) + } +} + +func TestBuildRecipeUsesGeneratedLSMetadataInLdflags(t *testing.T) { + if runtime.GOOS == "windows" { + t.Skip("test uses a POSIX fake go wrapper; script behavior remains covered by unit tests") + } + + tempDir := t.TempDir() + rootDir := filepath.Join(tempDir, "cli") + moduleDir := filepath.Join(rootDir, "cliv2") + releaseScriptsDir := filepath.Join(rootDir, "release-scripts") + binDir := filepath.Join(tempDir, "bin") + replacedDir := filepath.Join(rootDir, "snyk-ls") + fakeGoPath := filepath.Join(tempDir, "fake-go") + fakeGoBuildLog := filepath.Join(tempDir, "fake-go-build.log") + + if err := os.MkdirAll(moduleDir, 0755); err != nil { + t.Fatal(err) + } + if err := os.MkdirAll(releaseScriptsDir, 0755); err != nil { + t.Fatal(err) + } + if err := os.MkdirAll(binDir, 0755); err != nil { + t.Fatal(err) + } + if err := os.MkdirAll(replacedDir, 0755); err != nil { + t.Fatal(err) + } + if err := os.WriteFile(filepath.Join(binDir, "version"), []byte("9.9.9"), 0644); err != nil { + t.Fatal(err) + } + + goMod := []byte(`module github.com/snyk/cli/cliv2 + +go 1.26 + +require github.com/snyk/snyk-ls v0.0.0-20260414093345-2a6d7434eb91 + +replace github.com/snyk/snyk-ls => ../snyk-ls +`) + if err := os.WriteFile(filepath.Join(moduleDir, "go.mod"), goMod, 0644); err != nil { + t.Fatal(err) + } + goreleaserYAML := []byte(`env: + - LS_PROTOCOL_VERSION=654 +`) + if err := os.WriteFile(filepath.Join(replacedDir, ".goreleaser.yaml"), goreleaserYAML, 0644); err != nil { + t.Fatal(err) + } + expectedCommitHash := initGitRepo(t, replacedDir) + copyReleaseScriptForMakefileTest(t, releaseScriptsDir) + writeFakeGo(t, fakeGoPath, fakeGoBuildLog) + makeModuleDir, err := filepath.EvalSymlinks(moduleDir) + if err != nil { + t.Fatal(err) + } + + cmd := exec.Command( + "make", + "-f", repoPath(t, "cliv2", "Makefile"), + "-C", makeModuleDir, + filepath.Join(makeModuleDir, "_bin", "snyk_darwin_arm64"), + "GOOS=darwin", + "GOARCH=arm64", + "GOCMD="+fakeGoPath, + "bindir="+binDir, + ) + output, err := cmd.CombinedOutput() + if err != nil { + t.Fatalf("expected build recipe to succeed with fake go: %v\n%s", err, output) + } + + outputString := string(output) + if !strings.Contains(outputString, "Version="+expectedCommitHash) { + t.Fatalf("expected LS commit hash in EXTRA_FLAGS, got:\n%s", outputString) + } + if !strings.Contains(outputString, "LsProtocolVersion=654") { + t.Fatalf("expected LS protocol version in EXTRA_FLAGS, got:\n%s", outputString) + } + + buildArgs, err := os.ReadFile(fakeGoBuildLog) + if err != nil { + t.Fatalf("expected fake go build to be called: %v", err) + } + if !strings.Contains(string(buildArgs), "Version="+expectedCommitHash) || !strings.Contains(string(buildArgs), "LsProtocolVersion=654") { + t.Fatalf("expected generated LS metadata in go build args, got:\n%s", buildArgs) + } +} + +func copyReleaseScriptForMakefileTest(t *testing.T, releaseScriptsDir string) { + t.Helper() + + contents, err := os.ReadFile(repoPath(t, "release-scripts", "write-ls-protocol-version.go")) + if err != nil { + t.Fatal(err) + } + if err := os.WriteFile(filepath.Join(releaseScriptsDir, "write-ls-protocol-version.go"), contents, 0644); err != nil { + t.Fatal(err) + } +} + +func writeFakeGo(t *testing.T, fakeGoPath string, fakeGoBuildLog string) { + t.Helper() + + realGo, err := exec.LookPath("go") + if err != nil { + t.Fatal(err) + } + + script := "#!/bin/sh\n" + + "if [ \"$1\" = \"run\" ]; then exec " + realGo + " \"$@\"; fi\n" + + "if [ \"$1\" = \"build\" ]; then printf '%s\\n' \"$*\" > \"" + fakeGoBuildLog + "\"; exit 0; fi\n" + + "exec " + realGo + " \"$@\"\n" + if err := os.WriteFile(fakeGoPath, []byte(script), 0755); err != nil { + t.Fatal(err) + } +} + +func initGitRepo(t *testing.T, dir string) string { + t.Helper() + + runGit(t, dir, "init") + if err := os.WriteFile(filepath.Join(dir, "README.md"), []byte("snyk-ls test repo"), 0644); err != nil { + t.Fatal(err) + } + runGit(t, dir, "add", "README.md") + runGit(t, dir, "-c", "user.email=test@example.com", "-c", "user.name=Test User", "commit", "-m", "initial") + + cmd := exec.Command("git", "-C", dir, "rev-parse", "--short=12", "HEAD") + output, err := cmd.CombinedOutput() + if err != nil { + t.Fatalf("failed to read git commit hash: %v\n%s", err, output) + } + return strings.TrimSpace(string(output)) +} + +func runGit(t *testing.T, dir string, args ...string) { + t.Helper() + + cmdArgs := append([]string{"-C", dir}, args...) + cmd := exec.Command("git", cmdArgs...) + output, err := cmd.CombinedOutput() + if err != nil { + t.Fatalf("git %s failed: %v\n%s", strings.Join(args, " "), err, output) + } +} + +func repoPath(t *testing.T, elements ...string) string { + t.Helper() + + _, currentFile, _, ok := runtime.Caller(0) + if !ok { + t.Fatal("failed to locate current test file") + } + repoRoot := filepath.Dir(filepath.Dir(currentFile)) + pathElements := append([]string{repoRoot}, elements...) + return filepath.Join(pathElements...) +} diff --git a/scripts/install-snyk.py b/scripts/install-snyk.py index e04635c1e1..a20676e949 100755 --- a/scripts/install-snyk.py +++ b/scripts/install-snyk.py @@ -42,13 +42,18 @@ def get_os_arch(): return None, None -def download_snyk_cli(download_version, base_url): +def download_snyk_cli(download_version, base_url, os_override, arch_override): success = 0 fail = 1 abort = 2 os_type, arch_type = get_os_arch() + if os_override: + os_type = os_override + if arch_override: + arch_type = arch_override + if not os_type or not arch_type: return abort @@ -108,7 +113,7 @@ def download_snyk_cli(download_version, base_url): # will try to download via the base_url 'retries' amount of times -def download_with_retry(retries, base_url): +def download_with_retry(retries, base_url, os_type, arch_type): for retry in range(1, retries + 1): print( "Trying to download version " @@ -118,7 +123,7 @@ def download_with_retry(retries, base_url): + " of #" + str(retries) ) - download_status = download_snyk_cli(args.version, base_url) + download_status = download_snyk_cli(args.version, base_url, os_type, arch_type) # download failed - retry if download_status == 1: @@ -136,6 +141,15 @@ def download_with_retry(retries, base_url): return download_status +def is_alpine(): + try: + with open("/etc/os-release", "r") as f: + content = f.read().lower() + return "id=alpine" in content + except Exception: + return False + + def get_filename(arch_type, os_type): filename = "" output_filename = "snyk" @@ -143,13 +157,11 @@ def get_filename(arch_type, os_type): if os_type == "linux" and arch_type == "arm64": filename = "snyk-linux-arm64" - stat_result = os.path.exists("/lib/ld-musl-aarch64.so.1") - if stat_result: + if is_alpine(): filename = "snyk-alpine-arm64" if os_type == "linux" and arch_type == "amd64": filename = "snyk-linux" - stat_result = os.path.exists("/lib/ld-musl-x86_64.so.1") - if stat_result: + if is_alpine(): filename = "snyk-alpine" if os_type == "windows" and arch_type == "amd64": filename = "snyk-win" @@ -186,6 +198,12 @@ def verify_checksum(file_path, expected_checksum): parser.add_argument( "--base_url", help="Base URL to download from", default=primary_url ) + parser.add_argument( + "--arch", help="Explicitly specify the architecture to download", default=None + ) + parser.add_argument( + "--os", help="Explicitly specify the OS to download", default=None + ) parser.add_argument("--retry", help="number of retries", default=3) args = parser.parse_args() @@ -199,6 +217,6 @@ def verify_checksum(file_path, expected_checksum): # retry 'args.retry' times before iterating to the next URL # aborted downloads will iterate to the next URL without retrying for url in urls: - download_status = download_with_retry(args.retry, url) + download_status = download_with_retry(args.retry, url, args.os, args.arch) if download_status == 0: break diff --git a/scripts/windows/install-node.ps1 b/scripts/windows/install-node.ps1 index d567d19ac6..eb6a348730 100644 --- a/scripts/windows/install-node.ps1 +++ b/scripts/windows/install-node.ps1 @@ -13,10 +13,7 @@ if (Test-Path $envScript) { } try { - $expectedNodeVersion = '20.11.1' - $expectedSha256 = 'c54f5f7e2416e826fd84e878f28e3b53363ae9c3f60a140af4434b2453b5ae89' - - # Resolve repo root from script location (script is in .circleci/windows) + # Resolve repo root from script location (script is in scripts/windows) $repoRoot = Resolve-Path (Join-Path $PSScriptRoot '..\..') $nvmrcPath = Join-Path $repoRoot '.nvmrc' @@ -24,42 +21,33 @@ try { throw ".nvmrc not found at $nvmrcPath" } - $nvmVersion = (Get-Content $nvmrcPath -Raw).Trim() - if ([string]::IsNullOrWhiteSpace($nvmVersion)) { + $nodeVersion = (Get-Content $nvmrcPath -Raw).Trim().TrimStart('v') + if ([string]::IsNullOrWhiteSpace($nodeVersion)) { throw ".nvmrc is empty at $nvmrcPath" } - if ($nvmVersion -ne $expectedNodeVersion) { - throw ".nvmrc version '$nvmVersion' does not match expected Node.js version '$expectedNodeVersion' used by Windows CI." + # nvm-windows is the only supported installer here. It manages + # C:\Program Files\nodejs as a junction; using a parallel MSI install on the + # same machine leaves stale npm internals. + $nvmExe = Get-Command nvm -ErrorAction SilentlyContinue + if (-not $nvmExe) { + throw "nvm-windows is required but was not found on PATH. Ensure the runner image provides nvm-windows." } - $nodeVersion = $expectedNodeVersion - - $msiPath = Join-Path $cacheDir "node-v$nodeVersion-x64.msi" - - if (-not (Test-Path $cacheDir)) { - New-Item -ItemType Directory -Path $cacheDir | Out-Null - } + Write-Host "nvm-windows: $($nvmExe.Source)" + Write-Host "NVM_HOME: $env:NVM_HOME" + Write-Host "NVM_SYMLINK: $env:NVM_SYMLINK" - if (-not (Test-Path $msiPath)) { - Write-Host "Downloading Node.js v$nodeVersion (x64 MSI)..." - $url = "https://nodejs.org/dist/v$nodeVersion/node-v$nodeVersion-x64.msi" - curl.exe -L $url -o $msiPath + $nvmList = & nvm list 2>&1 | Out-String + if ($nvmList -notmatch ('\b' + [regex]::Escape($nodeVersion) + '\b')) { + Write-Host "[nvm-cache] MISS: Node.js v$nodeVersion not present; installing via nvm..." + & nvm install $nodeVersion + } else { + Write-Host "[nvm-cache] HIT: Node.js v$nodeVersion already installed in nvm; skipping download." } - Write-Host 'Verifying Node.js installer checksum...' - $hash = Get-FileHash -Path $msiPath -Algorithm SHA256 - if ($hash.Hash.ToLower() -ne $expectedSha256.ToLower()) { - throw "Checksum verification failed for $msiPath. Expected $expectedSha256 but got $($hash.Hash.ToLower())." - } - - Write-Host "Installing Node.js v$nodeVersion..." - $msiArgs = "/i `"$msiPath`" /qn /norestart" - $process = Start-Process -FilePath msiexec.exe -ArgumentList $msiArgs -PassThru - $process.WaitForExit() - if ($process.ExitCode -ne 0) { - throw "Node.js MSI installer exited with code $($process.ExitCode)." - } + Write-Host "Activating Node.js v$nodeVersion via nvm..." + & nvm use $nodeVersion # Verify installation using the known default installation path $nodeExe = "C:\Program Files\nodejs\node.exe" @@ -72,44 +60,10 @@ try { throw "Installed Node.js version '$reportedVersion' does not match expected 'v$nodeVersion'." } - Write-Host "Node.js $reportedVersion installed successfully at $nodeExe" + Write-Host "Node.js $reportedVersion installed and active at $nodeExe" - try { - $nodeDir = Split-Path $nodeExe -Parent - $envScript = Join-Path $cacheDir $cacheFileName - if (-not (Test-Path $envScript)) { - New-Item -Path $envScript -ItemType File -Force | Out-Null - } - - $pathUpdateLine = '$Env:Path = "' + $nodeDir + ';" + $Env:Path' - $existing = Get-Content -Path $envScript -ErrorAction SilentlyContinue - if (-not $existing -or -not ($existing -contains $pathUpdateLine)) { - $pathUpdateLine | Out-File -FilePath $envScript -Append -Encoding UTF8 - } - } - catch { - Write-Host "Warning: failed to persist Node.js PATH update to env script: $($_.Exception.Message)" - } - - # Also create a bash-compatible version for non-PowerShell envs - try { - $bashEnvScript = Join-Path $cacheDir $bashCacheFileName - if (-not (Test-Path $bashEnvScript)) { - New-Item -Path $bashEnvScript -ItemType File -Force | Out-Null - } - $bashPath = $nodeDir.Replace('\', '/').Replace('C:', '/c') - $bashUpdateLine = 'export PATH="' + $bashPath + ':$PATH"' - $bashExisting = Get-Content -Path $bashEnvScript -ErrorAction SilentlyContinue - if (-not $bashExisting -or -not ($bashExisting -contains $bashUpdateLine)) { - $bashUpdateLine | Out-File -FilePath $bashEnvScript -Append -Encoding UTF8 - } - } - catch { - Write-Host "Warning: failed to persist Node.js PATH update to bash env script: $($_.Exception.Message)" - } } catch { Write-Error "Failed to install Node.js: $($_.Exception.Message)" exit 1 } - diff --git a/src/cli/args.ts b/src/cli/args.ts index 49816d776e..61718394a4 100644 --- a/src/cli/args.ts +++ b/src/cli/args.ts @@ -219,6 +219,7 @@ export function args(rawArgv: string[]): Args { 'include-provenance', 'fingerprint-algorithm', 'detection-depth', + 'exclude-paths', 'init-script', 'integration-name', 'integration-version', diff --git a/src/cli/commands/monitor/index.ts b/src/cli/commands/monitor/index.ts index effdcf9659..70e6fbbc35 100644 --- a/src/cli/commands/monitor/index.ts +++ b/src/cli/commands/monitor/index.ts @@ -219,7 +219,8 @@ export default async function monitor(...args0: MethodArgs): Promise { const verboseEnabled = args.includes('-Dverbose') || args.includes('-Dverbose=true') || - !!options['print-graph']; + !!options['print-graph'] || + !!options['print-output-jsonl-with-errors']; if (verboseEnabled) { enableMavenDverboseExhaustiveDeps = (await hasFeatureFlag( MAVEN_DVERBOSE_EXHAUSTIVE_DEPS_FF, diff --git a/src/cli/main.ts b/src/cli/main.ts index b36292e677..dbec95ca3e 100755 --- a/src/cli/main.ts +++ b/src/cli/main.ts @@ -32,6 +32,7 @@ import { import { IaCErrorCodes } from './commands/test/iac/local-execution/types'; import stripAnsi = require('strip-ansi'); import { ExcludeFlagInvalidInputError } from '../lib/errors/exclude-flag-invalid-input'; +import { ExcludePathsFlagInvalidInputError } from '../lib/errors/exclude-paths-flag-invalid-input'; import { modeValidation } from './modes'; import { JsonFileOutputBadInputError } from '../lib/errors/json-file-output-bad-input-error'; import { @@ -446,6 +447,18 @@ function validateUnsupportedOptionCombinations( throw new ExcludeFlagInvalidInputError(); } } + + if (options.excludePaths) { + if (!(options.allProjects || options.yarnWorkspaces)) { + throw new MissingOptionError('--exclude-paths', [ + '--yarn-workspaces', + '--all-projects', + ]); + } + if (typeof options.excludePaths !== 'string') { + throw new ExcludePathsFlagInvalidInputError(); + } + } } function validateUnsupportedSarifCombinations(args) { diff --git a/src/lib/ecosystems/monitor.ts b/src/lib/ecosystems/monitor.ts index 04e95fde30..19936044f3 100644 --- a/src/lib/ecosystems/monitor.ts +++ b/src/lib/ecosystems/monitor.ts @@ -1,8 +1,10 @@ import { InspectResult } from '@snyk/cli-interface/legacy/plugin'; import chalk from 'chalk'; +import * as pMap from 'p-map'; import config from '../config'; import { isCI } from '../is-ci'; import { makeRequest } from '../request/promise'; +import { getRequestConcurrency } from '../snyk-test/common'; import { Contributor, MonitorOptions, @@ -39,6 +41,7 @@ import { validateTags, } from '../../cli/commands/monitor'; import { isUnmanagedEcosystem, filterDockerFacts } from './common'; +import { extractAndApplyPluginAnalytics } from './plugin-analytics'; import { findAndLoadPolicy } from '../policy'; const SEPARATOR = '\n-------------------------------------------------------\n'; @@ -65,6 +68,11 @@ export async function monitorEcosystem( ecosystem, options, ); + + if (pluginResponse.analytics) { + extractAndApplyPluginAnalytics(pluginResponse.analytics); + } + scanResultsByPath[path] = filteredResponse.scanResults; const policy = await findAndLoadPolicy(path, 'cpp', options); @@ -142,47 +150,21 @@ async function monitorDependencies( ): Promise<[EcosystemMonitorResult[], EcosystemMonitorError[]]> { const results: EcosystemMonitorResult[] = []; const errors: EcosystemMonitorError[] = []; + const concurrency = getRequestConcurrency(); + for (const [path, scanResults] of Object.entries(scans)) { await spinner(`Monitoring dependencies in ${path}`); - for (const scanResult of scanResults) { - const monitorDependenciesRequest = - await generateMonitorDependenciesRequest(scanResult, options); - - const configOrg = config.org ? decodeURIComponent(config.org) : undefined; - - const payload = { - method: 'PUT', - url: `${config.API}/monitor-dependencies`, - json: true, - headers: { - 'x-is-ci': isCI(), - authorization: getAuthHeader(), - }, - body: monitorDependenciesRequest, - qs: { - org: options.org || configOrg, - }, - }; - try { - const response = - await makeRequest(payload); - results.push({ - ...response, - path, - scanResult, - }); - } catch (error) { - if (error.code === 401) { - throw AuthFailedError(); - } - if (error.code >= 400 && error.code < 500) { - throw new MonitorError(error.code, error.message); - } - errors.push({ - error: 'Could not monitor dependencies in ' + path, - path, - scanResult, - }); + const perScanResults = await pMap( + scanResults, + (scanResult) => monitorOneScanResult(scanResult, options, path), + { concurrency }, + ); + for (const r of perScanResults) { + if (r.result) { + results.push(r.result); + } + if (r.error) { + errors.push(r.error); } } spinner.clearAll(); @@ -190,6 +172,61 @@ async function monitorDependencies( return [results, errors]; } +async function monitorOneScanResult( + scanResult: ScanResult, + options: Options & MonitorOptions, + path: string, +): Promise<{ + result?: EcosystemMonitorResult; + error?: EcosystemMonitorError; +}> { + const monitorDependenciesRequest = await generateMonitorDependenciesRequest( + scanResult, + options, + ); + + const configOrg = config.org ? decodeURIComponent(config.org) : undefined; + + const payload = { + method: 'PUT', + url: `${config.API}/monitor-dependencies`, + json: true, + headers: { + 'x-is-ci': isCI(), + authorization: getAuthHeader(), + }, + body: monitorDependenciesRequest, + qs: { + org: options.org || configOrg, + }, + }; + + try { + const response = await makeRequest(payload); + return { + result: { + ...response, + path, + scanResult, + }, + }; + } catch (error) { + if (error.code === 401) { + throw AuthFailedError(); + } + if (error.code >= 400 && error.code < 500) { + throw new MonitorError(error.code, error.message); + } + return { + error: { + error: 'Could not monitor dependencies in ' + path, + path, + scanResult, + }, + }; + } +} + export async function getFormattedMonitorOutput( results: Array, monitorResults: EcosystemMonitorResult[], diff --git a/src/lib/ecosystems/test.ts b/src/lib/ecosystems/test.ts index 6424419eec..3cff71bda6 100644 --- a/src/lib/ecosystems/test.ts +++ b/src/lib/ecosystems/test.ts @@ -12,6 +12,7 @@ import { TestDependenciesResponse } from '../snyk-test/legacy'; import { assembleQueryString, printDepGraph, + printDepGraphJsonl, shouldPrintDepGraph, } from '../snyk-test/common'; import { getAuthHeader } from '../api-token'; @@ -54,9 +55,12 @@ export async function testEcosystem( } spinner.clearAll(); - if (isUnmanagedEcosystem(ecosystem) && shouldPrintDepGraph(options)) { + if ( + isUnmanagedEcosystem(ecosystem) && + (shouldPrintDepGraph(options) || options['print-output-jsonl-with-errors']) + ) { const [target] = paths; - return printUnmanagedDepGraph(results, target, process.stdout); + return printUnmanagedDepGraph(results, target, process.stdout, options); } const [testResults, errors] = await selectAndExecuteTestStrategy( @@ -99,11 +103,25 @@ export async function printUnmanagedDepGraph( results: ScanResultsByPath, target: string, destination: Writable, + options: Options, ): Promise { const [result] = await getUnmanagedDepGraph(results); const depGraph = convertDepGraph(result); - await printDepGraph(depGraph, target, destination); + if (options['print-output-jsonl-with-errors']) { + await printDepGraphJsonl( + depGraph, + target, + undefined, + undefined, + undefined, + undefined, + undefined, + destination, + ); + } else { + await printDepGraph(depGraph, target, destination); + } return TestCommandResult.createJsonTestCommandResult(''); } diff --git a/src/lib/ecosystems/types.ts b/src/lib/ecosystems/types.ts index 640006a04a..279ea6b6df 100644 --- a/src/lib/ecosystems/types.ts +++ b/src/lib/ecosystems/types.ts @@ -9,6 +9,7 @@ export type FindingType = 'iacIssue'; export interface PluginResponse { scanResults: ScanResult[]; + analytics?: Analytics[]; } export interface GitTarget { diff --git a/src/lib/errors/exclude-paths-flag-invalid-input.ts b/src/lib/errors/exclude-paths-flag-invalid-input.ts new file mode 100644 index 0000000000..b189c3416c --- /dev/null +++ b/src/lib/errors/exclude-paths-flag-invalid-input.ts @@ -0,0 +1,15 @@ +import { CLI } from '@snyk/error-catalog-nodejs-public'; +import { CustomError } from './custom-error'; + +export class ExcludePathsFlagInvalidInputError extends CustomError { + private static ERROR_CODE = 422; + private static ERROR_MESSAGE = + 'The --exclude-paths argument must be a comma separated list of file or directory paths.'; + + constructor() { + super(ExcludePathsFlagInvalidInputError.ERROR_MESSAGE); + this.code = ExcludePathsFlagInvalidInputError.ERROR_CODE; + this.userMessage = ExcludePathsFlagInvalidInputError.ERROR_MESSAGE; + this.errorCatalog = new CLI.InvalidFlagOptionError(''); + } +} diff --git a/src/lib/errors/index.ts b/src/lib/errors/index.ts index 205f3bb38a..2b828a213a 100644 --- a/src/lib/errors/index.ts +++ b/src/lib/errors/index.ts @@ -22,6 +22,7 @@ export { FeatureNotSupportedForOrgError } from './unsupported-feature-for-org-er export { MissingOptionError } from './missing-option-error'; export { MissingArgError } from './missing-arg-error'; export { ExcludeFlagBadInputError } from './exclude-flag-bad-input'; +export { ExcludePathsFlagInvalidInputError } from './exclude-paths-flag-invalid-input'; export { UnsupportedOptionCombinationError } from './unsupported-option-combination-error'; export { FeatureNotSupportedByPackageManagerError } from './feature-not-supported-by-package-manager-error'; export { DockerImageNotFoundError } from './docker-image-not-found-error'; diff --git a/src/lib/find-files.ts b/src/lib/find-files.ts index 3ed7d6d12a..f75d8849c0 100644 --- a/src/lib/find-files.ts +++ b/src/lib/find-files.ts @@ -54,6 +54,7 @@ const ignoreFolders = ['node_modules', '.build']; interface FindFilesConfig { path: string; ignore?: string[]; + excludePaths?: string[]; filter?: string[]; levelsDeep?: number; featureFlags?: Set; @@ -62,6 +63,7 @@ interface FindFilesConfig { type DefaultFindConfig = { path: string; ignore: string[]; + excludePaths: string[]; filter: string[]; levelsDeep: number; featureFlags: Set; @@ -70,6 +72,7 @@ type DefaultFindConfig = { const defaultFindConfig: DefaultFindConfig = { path: '', ignore: [], + excludePaths: [], filter: [], levelsDeep: 4, featureFlags: new Set(), @@ -137,6 +140,20 @@ export async function find(findConfig: FindFilesConfig): Promise { } } +export function isExcludedPath( + resolvedPath: string, + excludePaths: string[], +): boolean { + if (excludePaths.length === 0) { + return false; + } + if (process.platform === 'win32') { + const lowerPath = resolvedPath.toLowerCase(); + return excludePaths.some((ep) => ep.toLowerCase() === lowerPath); + } + return excludePaths.includes(resolvedPath); +} + function findFile(path: string, filter: string[] = []): string | null { if (filter.length > 0) { const filename = pathLib.basename(path); @@ -156,17 +173,16 @@ async function findInDirectory( const files = await readDirectory(config.path); const toFind = files .filter((file) => !config.ignore.includes(file)) - .map((file) => { - const resolvedPath = pathLib.resolve(config.path, file); + .map((file) => pathLib.resolve(config.path, file)) + .filter( + (resolvedPath) => !isExcludedPath(resolvedPath, config.excludePaths), + ) + .map((resolvedPath) => { if (!fs.existsSync(resolvedPath)) { - debug('File does not seem to exist, skipping: ', file); + debug('File does not seem to exist, skipping: ', resolvedPath); return { files: [], allFilesFound: [] }; } - const findconfig = { - ...config, - path: resolvedPath, - }; - return find(findconfig); + return find({ ...config, path: resolvedPath }); }); const found = await Promise.all(toFind); diff --git a/src/lib/plugins/get-deps-from-plugin.ts b/src/lib/plugins/get-deps-from-plugin.ts index 4873571d24..b977f1783e 100644 --- a/src/lib/plugins/get-deps-from-plugin.ts +++ b/src/lib/plugins/get-deps-from-plugin.ts @@ -3,7 +3,7 @@ import * as pathLib from 'path'; import chalk from 'chalk'; import { icon } from '../theme'; import { legacyPlugin as pluginApi } from '@snyk/cli-interface'; -import { find } from '../find-files'; +import { find, isExcludedPath } from '../find-files'; import { Options, TestOptions, MonitorOptions } from '../types'; import { NoSupportedManifestsFoundError } from '../errors'; import { @@ -45,10 +45,16 @@ export async function getDepsFromPlugin( const scanType = options.yarnWorkspaces ? 'yarnWorkspaces' : 'allProjects'; const levelsDeep = options.detectionDepth; const ignore = options.exclude ? options.exclude.split(',') : []; + const excludePaths = options.excludePaths + ? options.excludePaths + .split(',') + .map((p) => pathLib.resolve(root, p.trim())) + : []; const { files: targetFiles, allFilesFound } = await find({ path: root, ignore, + excludePaths, filter: multiProjectProcessors[scanType].files, featureFlags, levelsDeep, @@ -58,7 +64,21 @@ export async function getDepsFromPlugin( targetFiles, ); if (targetFiles.length === 0) { - throw NoSupportedManifestsFoundError([root]); + const error = NoSupportedManifestsFoundError([root]); + if (options['print-output-jsonl-with-errors']) { + return { + plugin: { name: 'custom-auto-detect' }, + scannedProjects: [], + failedResults: [ + { + targetFile: options.file, + error, + errMessage: error.userMessage, + }, + ], + } as MultiProjectResultCustom; + } + throw error; } // enable full sub-project scan for gradle options.allSubProjects = true; @@ -68,6 +88,22 @@ export async function getDepsFromPlugin( targetFiles, featureFlags, ); + + if (excludePaths.length > 0) { + // Workspace parsers (e.g. pnpm) discover projects by reading workspace + // config files rather than walking the filesystem, so they bypass the + // exclusion in find(). Re-apply isExcludedPath here so both code paths + // share the same matching semantics (including Windows case handling). + inspectRes.scannedProjects = inspectRes.scannedProjects.filter( + (project) => { + const targetFile = project.meta?.targetFile || project.targetFile; + if (!targetFile) return true; + const resolved = pathLib.resolve(root, targetFile); + return !isExcludedPath(resolved, excludePaths); + }, + ); + } + const scannedProjects = inspectRes.scannedProjects; const analyticData = { scannedProjects: scannedProjects.length, @@ -77,6 +113,7 @@ export async function getDepsFromPlugin( ), levelsDeep, ignore, + excludePaths, }; analytics.add(scanType, analyticData); debug( @@ -100,14 +137,49 @@ export async function getDepsFromPlugin( options.file = options.file || detectPackageFile(root, featureFlags); } if (!options.docker && !(options.file || options.packageManager)) { - throw NoSupportedManifestsFoundError([...root]); + const error = NoSupportedManifestsFoundError([root]); + if (options['print-output-jsonl-with-errors']) { + return { + plugin: { name: 'custom-auto-detect' }, + scannedProjects: [], + failedResults: [ + { + targetFile: options.file, + error, + errMessage: error.userMessage, + }, + ], + } as MultiProjectResultCustom; + } + throw error; + } + + let inspectRes: pluginApi.InspectResult; + try { + inspectRes = await getSinglePluginResult(root, options, '', featureFlags); + } catch (error) { + if (options['print-output-jsonl-with-errors']) { + const errMessage = + error?.message ?? 'Something went wrong getting dependencies'; + debug( + `Single plugin scan failed for ${options.file}, collecting as failed result: ${errMessage}`, + ); + return { + plugin: { + name: options.packageManager || 'unknown', + }, + scannedProjects: [], + failedResults: [ + { + targetFile: options.file, + error, + errMessage, + }, + ], + } as MultiProjectResultCustom; + } + throw error; } - const inspectRes = await getSinglePluginResult( - root, - options, - '', - featureFlags, - ); if (!pluginApi.isMultiResult(inspectRes)) { if (!inspectRes.package && !inspectRes.dependencyGraph) { diff --git a/src/lib/plugins/get-multi-plugin-result.ts b/src/lib/plugins/get-multi-plugin-result.ts index 2f7c44dd77..c3f124b117 100644 --- a/src/lib/plugins/get-multi-plugin-result.ts +++ b/src/lib/plugins/get-multi-plugin-result.ts @@ -11,7 +11,7 @@ import { SUPPORTED_MANIFEST_FILES, SupportedPackageManagers, } from '../package-managers'; -const { SHOW_NPM_SCOPE } = require('../feature-flags'); +import { SHOW_NPM_SCOPE } from '../feature-flags'; import { getSinglePluginResult } from './get-single-plugin-result'; import { convertSingleResultToMultiCustom } from './convert-single-splugin-res-to-multi-custom'; import { convertMultiResultToMultiCustom } from './convert-multi-plugin-res-to-multi-custom'; @@ -85,6 +85,13 @@ export async function getMultiPluginResult( featureFlags, ); unprocessedFilesfromWorkspaces = unprocessedFilesFromPnpm; + // Annotate each scanned project with the workspace plugin name for later identification + scannedPnpmResults.forEach((project) => { + if (!project.meta) { + project.meta = {}; + } + project.meta.workspacePluginName = 'snyk-nodejs-pnpm-workspaces'; + }); allResults.push(...scannedPnpmResults); const { @@ -97,6 +104,13 @@ export async function getMultiPluginResult( 'yarn', featureFlags, ); + // Annotate each scanned project with the workspace plugin name for later identification + scannedYarnResults.forEach((project) => { + if (!project.meta) { + project.meta = {}; + } + project.meta.workspacePluginName = 'snyk-nodejs-yarn-workspaces'; + }); allResults.push(...scannedYarnResults); const { scannedProjects: scannedNpmResults, unprocessedFiles } = @@ -107,6 +121,13 @@ export async function getMultiPluginResult( 'npm', featureFlags, ); + // Annotate each scanned project with the workspace plugin name for later identification + scannedNpmResults.forEach((project) => { + if (!project.meta) { + project.meta = {}; + } + project.meta.workspacePluginName = 'snyk-nodejs-npm-workspaces'; + }); allResults.push(...scannedNpmResults); debug(`Not part of a workspace: ${unprocessedFiles.join(', ')}}`); @@ -182,6 +203,16 @@ export async function getMultiPluginResult( } if (!allResults.length) { + if (options['print-output-jsonl-with-errors']) { + return { + plugin: { + name: 'custom-auto-detect', + }, + scannedProjects: allResults, + failedResults, + }; + } + // No projects were scanned successfully let message = `Failed to get dependencies for all ${targetFiles.length} potential projects.\n`; diff --git a/src/lib/snyk-test/assemble-payloads.ts b/src/lib/snyk-test/assemble-payloads.ts index 42c47f5100..fd7481dc51 100644 --- a/src/lib/snyk-test/assemble-payloads.ts +++ b/src/lib/snyk-test/assemble-payloads.ts @@ -6,6 +6,7 @@ import { isCI } from '../is-ci'; import { getPlugin } from '../ecosystems'; import { Ecosystem, ContainerTarget, ScanResult } from '../ecosystems/types'; import { filterDockerFacts } from '../ecosystems/common'; +import { extractAndApplyPluginAnalytics } from '../ecosystems/plugin-analytics'; import { Options, PolicyOptions, TestOptions } from '../types'; import { Payload } from './types'; import { assembleQueryString } from './common'; @@ -46,6 +47,10 @@ export async function assembleEcosystemPayloads( options, ); + if (pluginResponse.analytics) { + extractAndApplyPluginAnalytics(pluginResponse.analytics); + } + const payloads: Payload[] = []; // TODO: This is a temporary workaround until the plugins themselves can read policy files and set names! diff --git a/src/lib/snyk-test/common.ts b/src/lib/snyk-test/common.ts index 86724edc67..50c239c8b0 100644 --- a/src/lib/snyk-test/common.ts +++ b/src/lib/snyk-test/common.ts @@ -12,6 +12,37 @@ import { CLI, ProblemError } from '@snyk/error-catalog-nodejs-public'; import { CustomError } from '../errors'; import { FailedProjectScanError } from '../plugins/get-multi-plugin-result'; +/** + * Determines workspace information from the plugin name and scanned project metadata. + * Returns workspace metadata if the plugin is a workspace plugin, otherwise undefined. + */ +function getWorkspaceInfo( + pluginName: string | undefined, + workspacePluginName: string | undefined, +): { type: string } | undefined { + // Check workspace plugin name from scannedProject.meta (--all-projects) or parent plugin (--yarn-workspaces) + if ( + workspacePluginName === 'snyk-nodejs-yarn-workspaces' || + pluginName === 'snyk-nodejs-yarn-workspaces' + ) { + return { type: 'yarn' }; + } + if ( + workspacePluginName === 'snyk-nodejs-npm-workspaces' || + pluginName === 'snyk-nodejs-npm-workspaces' + ) { + return { type: 'npm' }; + } + if ( + workspacePluginName === 'snyk-nodejs-pnpm-workspaces' || + pluginName === 'snyk-nodejs-pnpm-workspaces' + ) { + return { type: 'pnpm' }; + } + + return undefined; +} + export function assembleQueryString(options) { const org = options.org || config.org || null; const qs: { @@ -80,6 +111,30 @@ export type FailOn = 'all' | 'upgradable' | 'patchable'; export const RETRY_ATTEMPTS = 3; export const RETRY_DELAY = 500; +const DEFAULT_REQUEST_CONCURRENCY = 5; +const MIN_REQUEST_CONCURRENCY = 1; +const MAX_REQUEST_CONCURRENCY = 50; + +/** + * Returns the maximum number of in-flight Snyk dependency-test or + * dependency-monitor HTTP requests permitted at once. The wrapping Go CLI + * resolves the user-facing SNYK_REQUEST_CONCURRENCY env var (and any future + * config-file/flag sources) and forwards the resolved value via the internal + * SNYK_INTERNAL_REQUEST_CONCURRENCY env var read here. Values are clamped to + * [MIN_REQUEST_CONCURRENCY, MAX_REQUEST_CONCURRENCY]. + */ +export function getRequestConcurrency(): number { + const raw = process.env.SNYK_INTERNAL_REQUEST_CONCURRENCY; + if (!raw) { + return DEFAULT_REQUEST_CONCURRENCY; + } + const parsed = parseInt(raw, 10); + if (!Number.isFinite(parsed) || parsed < MIN_REQUEST_CONCURRENCY) { + return DEFAULT_REQUEST_CONCURRENCY; + } + return Math.min(parsed, MAX_REQUEST_CONCURRENCY); +} + /** * printDepGraph writes the given dep-graph and target name to the destination * stream as expected by the `depgraph` CLI workflow. @@ -106,29 +161,31 @@ export function shouldPrintDepGraph(opts: Options): boolean { } /** - * printEffectiveDepGraph writes the given, possibly pruned dep-graph and target file to the destination - * stream as a JSON object containing both depGraph, normalisedTargetFile and targetFile from plugin. - * This allows extracting the effective dep-graph which is being used for the test. + * printDepGraphJsonl writes dep-graph metadata to the destination stream as one JSON object + * per line (JSONL): depGraph, normalisedTargetFile, optional targetFileFromPlugin, optional target. + * Callers supply the dep-graph payload (full or pruned) they want to serialize. */ -export async function printEffectiveDepGraph( +export async function printDepGraphJsonl( depGraph: DepGraphData, normalisedTargetFile: string, targetFileFromPlugin: string | undefined, target: GitTarget | ContainerTarget | null | undefined, + targetRuntime: string | undefined, + pluginName: string | undefined, + workspacePluginName: string | undefined, destination: Writable, ): Promise { return new Promise((res, rej) => { - const effectiveGraphOutput = { + const graphOutput: any = { depGraph, normalisedTargetFile, targetFileFromPlugin, target, + targetRuntime, + workspace: getWorkspaceInfo(pluginName, workspacePluginName), }; - new ConcatStream( - new JsonStreamStringify(effectiveGraphOutput), - Readable.from('\n'), - ) + new ConcatStream(new JsonStreamStringify(graphOutput), Readable.from('\n')) .on('end', res) .on('error', rej) .pipe(destination); @@ -136,17 +193,16 @@ export async function printEffectiveDepGraph( } /** - * printEffectiveDepGraphError writes an error output for failed dependency graph resolution - * to the destination stream in a format consistent with printEffectiveDepGraph. - * This is used when --print-effective-graph-with-errors is set but dependency resolution failed. + * printDepGraphError writes an error output for failed dependency graph resolution + * to the destination stream in a format consistent with printDepGraphJsonl. */ -export async function printEffectiveDepGraphError( +export async function printDepGraphError( root: string, failedProjectScanError: FailedProjectScanError, destination: Writable, ): Promise { return new Promise((res, rej) => { - // Normalize the target file path to be relative to root, consistent with printEffectiveDepGraph + // Normalize the target file path to be relative to root, consistent with printDepGraphJsonl const normalisedTargetFile = failedProjectScanError.targetFile ? path.relative(root, failedProjectScanError.targetFile) : failedProjectScanError.targetFile; @@ -154,15 +210,12 @@ export async function printEffectiveDepGraphError( const problemError = getOrCreateErrorCatalogError(failedProjectScanError); const serializedError = problemError.toJsonApi().body(); - const effectiveGraphErrorOutput = { + const errorRecord = { error: serializedError, normalisedTargetFile, }; - new ConcatStream( - new JsonStreamStringify(effectiveGraphErrorOutput), - Readable.from('\n'), - ) + new ConcatStream(new JsonStreamStringify(errorRecord), Readable.from('\n')) .on('end', res) .on('error', rej) .pipe(destination); diff --git a/src/lib/snyk-test/index.js b/src/lib/snyk-test/index.js index 8006dbf63a..03c98980fb 100644 --- a/src/lib/snyk-test/index.js +++ b/src/lib/snyk-test/index.js @@ -19,6 +19,7 @@ const { DISABLE_GO_PACKAGE_URLS_IN_CLI_FEATURE_FLAG, } = require('../package-managers'); const { getOrganizationID } = require('../organization'); +const { printDepGraphError } = require('./common'); const debug = require('debug')('snyk-test'); async function test(root, options, callback) { @@ -56,7 +57,8 @@ async function executeTest(root, options) { const verboseEnabled = args.includes('-Dverbose') || args.includes('-Dverbose=true') || - !!options['print-graph']; + !!options['print-graph'] || + !!options['print-output-jsonl-with-errors']; if (verboseEnabled) { enableMavenDverboseExhaustiveDeps = await hasFeatureFlag( MAVEN_DVERBOSE_EXHAUSTIVE_DEPS_FF, @@ -108,11 +110,26 @@ async function executeTest(root, options) { } if (!options.allProjects) { - options.packageManager = detect.detectPackageManager( - root, - options, - featureFlags, - ); + try { + options.packageManager = detect.detectPackageManager( + root, + options, + featureFlags, + ); + } catch (error) { + if (options['print-output-jsonl-with-errors']) { + await printDepGraphError( + root, + { + error, + errMessage: error.userMessage, + }, + process.stdout, + ); + return []; + } + throw error; + } } return run(root, options, featureFlags).then((results) => { diff --git a/src/lib/snyk-test/run-test.ts b/src/lib/snyk-test/run-test.ts index dc9a41e150..89906cc347 100644 --- a/src/lib/snyk-test/run-test.ts +++ b/src/lib/snyk-test/run-test.ts @@ -39,9 +39,10 @@ import { isCI } from '../is-ci'; import { RETRY_ATTEMPTS, RETRY_DELAY, + getRequestConcurrency, printDepGraph, - printEffectiveDepGraph, - printEffectiveDepGraphError, + printDepGraphJsonl, + printDepGraphError, assembleQueryString, shouldPrintDepGraph, shouldPrintEffectiveDepGraph, @@ -94,9 +95,6 @@ import { ProblemError } from '@snyk/error-catalog-nodejs-public'; const debug = debugModule('snyk:run-test'); -// Controls the number of simultaneous test requests that can be in-flight. -const MAX_CONCURRENCY = 5; - function prepareResponseForParsing( payload: Payload, response: TestDependenciesResponse, @@ -246,7 +244,11 @@ async function sendAndParseResults( ): Promise { const results: TestResult[] = []; const ecosystem = getEcosystem(options); - const depGraphs = new Map(); + const depGraphs: { + graph: depGraphLib.DepGraphData; + targetName: string; + targetFile: string; + }[] = []; await spinner.clear(spinnerLbl)(); if (!options.quiet) { @@ -293,7 +295,7 @@ async function sendAndParseResults( }; const responses = await pMap(payloads, sendRequest, { - concurrency: MAX_CONCURRENCY, + concurrency: getRequestConcurrency(), }); for (const { payload, originalPayload, response } of responses) { @@ -322,7 +324,11 @@ async function sendAndParseResults( if (ecosystem && depGraph) { const targetName = scanResult ? constructProjectName(scanResult) : ''; - depGraphs.set(targetName, depGraph.toJSON()); + depGraphs.push({ + targetName, + graph: depGraph.toJSON(), + targetFile: targetFile || displayTargetFile || '', + }); } const legacyRes = convertIssuesToAffectedPkgs(response); @@ -352,9 +358,28 @@ async function sendAndParseResults( if (ecosystem && shouldPrintDepGraph(options)) { await spinner.clear(spinnerLbl)(); - for (const [targetName, depGraph] of depGraphs.entries()) { - await printDepGraph(depGraph, targetName, process.stdout); + if (options['print-output-jsonl-with-errors']) { + for (const { graph, targetFile, targetName } of depGraphs) { + await printDepGraphJsonl( + graph, + targetFile || targetName, + undefined, + undefined, + undefined, + undefined, + undefined, + process.stdout, + ); + } + } else { + const depGraphsByTarget = new Map( + depGraphs.map(({ targetName, graph }) => [targetName, graph]), + ); + for (const [targetName, graph] of depGraphsByTarget) { + await printDepGraph(graph, targetName, process.stdout); + } } + return []; } @@ -376,10 +401,11 @@ export async function runTest( // dependency graph artifacts for printing. if ( !options.docker && - (shouldPrintDepGraph(options) || shouldPrintEffectiveDepGraph(options)) + (shouldPrintDepGraph(options) || + shouldPrintEffectiveDepGraph(options) || + options['print-output-jsonl-with-errors']) ) { - const results: TestResult[] = []; - return results; + return []; } return await sendAndParseResults(payloads, spinnerLbl, root, options); @@ -675,9 +701,12 @@ async function assembleLocalPayloads( failedResults, ); - if (shouldPrintEffectiveDepGraphWithErrors(options)) { + if ( + shouldPrintEffectiveDepGraphWithErrors(options) || + options['print-output-jsonl-with-errors'] + ) { for (const failed of failedResults) { - await printEffectiveDepGraphError(root, failed, process.stdout); + await printDepGraphError(root, failed, process.stdout); } } @@ -828,7 +857,20 @@ async function assembleLocalPayloads( ); } - await printDepGraph(root.toJSON(), targetFile || '', process.stdout); + if (options['print-output-jsonl-with-errors']) { + await printDepGraphJsonl( + root.toJSON(), + targetFile || '', + project.plugin.targetFile, + target, + scannedProject.meta?.targetRuntime ?? project.plugin?.targetRuntime, + deps.plugin.name, + scannedProject.meta?.workspacePluginName, + process.stdout, + ); + } else { + await printDepGraph(root.toJSON(), targetFile || '', process.stdout); + } } const body: PayloadBody = { @@ -869,17 +911,20 @@ async function assembleLocalPayloads( const pruneIsRequired = options.pruneRepeatedSubdependencies; - if (packageManager) { + if (packageManager && !options['print-output-jsonl-with-errors']) { depGraph = await pruneGraph(depGraph, packageManager, pruneIsRequired); } if (shouldPrintEffectiveDepGraph(options)) { spinner.clear(spinnerLbl)(); - await printEffectiveDepGraph( + await printDepGraphJsonl( depGraph.toJSON(), targetFile, project.plugin.targetFile, target, + scannedProject.meta?.targetRuntime ?? project.plugin?.targetRuntime, + deps.plugin.name, + scannedProject.meta?.workspacePluginName, process.stdout, ); } diff --git a/src/lib/types.ts b/src/lib/types.ts index d87db84e70..948aeee47f 100644 --- a/src/lib/types.ts +++ b/src/lib/types.ts @@ -70,12 +70,14 @@ export interface Options { 'print-dep-paths'?: boolean; 'print-effective-graph'?: boolean; 'print-effective-graph-with-errors'?: boolean; + 'print-output-jsonl-with-errors'?: boolean; 'remote-repo-url'?: string; criticality?: string; scanAllUnmanaged?: boolean; allProjects?: boolean; detectionDepth?: number; exclude?: string; + excludePaths?: string; strictOutOfSync?: boolean; // Used only with the IaC mode & Docker plugin. Allows requesting some experimental/unofficial features. experimental?: boolean; @@ -288,6 +290,7 @@ export type SupportedUserReachableFacingCliArgs = | 'maven-skip-wrapper' | 'include-provenance' | 'fingerprint-algorithm' + | 'exclude-paths' | 'gradle-normalize-deps'; export enum SupportedCliCommands { diff --git a/test/acceptance/fake-server.ts b/test/acceptance/fake-server.ts index 264f84b537..b5073ac470 100644 --- a/test/acceptance/fake-server.ts +++ b/test/acceptance/fake-server.ts @@ -78,6 +78,7 @@ export type FakeServer = { responses: Record[], ) => void; setStatusCode: (c: number) => void; + setResponseDelay: (delayMs: number) => void; setLocalCodeEngineConfiguration: (next: Record) => void; setFeatureFlag: (featureFlag: string, enabled: boolean) => void; setOrgSetting: (setting: string, enabled: boolean) => void; @@ -122,6 +123,7 @@ export const fakeServer = (basePath: string, snykToken: string): FakeServer => { let sarifResponse: Record | undefined = undefined; let redteamNextCallCount: Record = {}; let server: http.Server | undefined = undefined; + let responseDelayMs = 0; const sockets = new Set(); const getOrCreateEndpointConfig = (endpoint: string): EndpointConfig => { @@ -151,6 +153,7 @@ export const fakeServer = (basePath: string, snykToken: string): FakeServer => { availableSettings = new Map(); unauthorizedActions = new Map(); redteamNextCallCount = {}; + responseDelayMs = 0; }; const getRequests = () => { @@ -201,6 +204,10 @@ export const fakeServer = (basePath: string, snykToken: string): FakeServer => { statusCode = code; }; + const setResponseDelay = (delayMs: number) => { + responseDelayMs = delayMs; + }; + const setGlobalResponse = ( response: Record, code: number, @@ -362,6 +369,20 @@ export const fakeServer = (basePath: string, snykToken: string): FakeServer => { next(); }); + // Apply response delay if configured (exclude analytics/instrumentation/init endpoints) + app.use((req, res, next) => { + const isExcludedEndpoint = + req.url?.includes('/analytics') || + req.url?.includes('/instrumentation') || + req.url?.includes('/v1/track') || + req.url?.includes('/api/rest/orgs/'); + if (responseDelayMs > 0 && !isExcludedEndpoint) { + global.setTimeout(() => next(), responseDelayMs); + } else { + next(); + } + }); + app.use((req, res, next) => { // check and handle specific responses first if (handleSpecificResponses(req, res)) { @@ -684,29 +705,39 @@ export const fakeServer = (basePath: string, snykToken: string): FakeServer => { }); }); - app.post(`/api/hidden/orgs/:orgId/upload_revisions`, (req, res) => { - res.status(201).send({ - data: { - attributes: { - revision_type: 'snapshot', - sealed: false, + // Both prefixes are required due to API URL canonicalisation, performed in some extensions. + app.post( + [ + `/hidden/orgs/:orgId/upload_revisions`, + `/api/hidden/orgs/:orgId/upload_revisions`, + ], + (req, res) => { + res.status(201).send({ + data: { + attributes: { + revision_type: 'snapshot', + sealed: false, + }, + id: 'bc0729a7-109f-4fe9-a048-aac410e28c9a', + type: 'upload_revision', }, - id: 'bc0729a7-109f-4fe9-a048-aac410e28c9a', - type: 'upload_revision', - }, - jsonapi: { - version: '1.0', - }, - links: { - self: { - href: '/orgs/bb262a15-d798-458b-81fa-30a92cb3475c/upload_revisions/bc0729a7-109f-4fe9-a048-aac410e28c9a', + jsonapi: { + version: '1.0', }, - }, - }); - }); + links: { + self: { + href: `/orgs/${req.params.orgId}/upload_revisions/bc0729a7-109f-4fe9-a048-aac410e28c9a`, + }, + }, + }); + }, + ); app.post( - `/api/hidden/orgs/:orgId/upload_revisions/:uploadRevisionId/files`, + [ + `/hidden/orgs/:orgId/upload_revisions/:uploadRevisionId/files`, + `/api/hidden/orgs/:orgId/upload_revisions/:uploadRevisionId/files`, + ], (_, res) => { res.status(204); res.send(); @@ -714,7 +745,10 @@ export const fakeServer = (basePath: string, snykToken: string): FakeServer => { ); app.patch( - `/api/hidden/orgs/:orgId/upload_revisions/:uploadRevisionId`, + [ + `/hidden/orgs/:orgId/upload_revisions/:uploadRevisionId`, + `/api/hidden/orgs/:orgId/upload_revisions/:uploadRevisionId`, + ], (req, res) => { res.status(200).send({ data: { @@ -722,7 +756,7 @@ export const fakeServer = (basePath: string, snykToken: string): FakeServer => { revision_type: 'snapshot', sealed: true, }, - id: 'fbdb5cc0-6e34-4191-b088-0dff740faf38', + id: req.params.uploadRevisionId, type: 'upload_revision', }, jsonapi: { @@ -730,7 +764,7 @@ export const fakeServer = (basePath: string, snykToken: string): FakeServer => { }, links: { self: { - href: '/orgs/bb262a15-d798-458b-81fa-30a92cb3475c/upload_revisions/fbdb5cc0-6e34-4191-b088-0dff740faf38', + href: `/orgs/${req.params.orgId}/upload_revisions/${req.params.uploadRevisionId}`, }, }, }); @@ -1830,6 +1864,7 @@ ${componentsXml} setEndpointResponses, setGlobalResponse, setStatusCode, + setResponseDelay, setFeatureFlag, setOrgSetting, unauthorizeAction, diff --git a/test/acceptance/workspaces/pip-app-whl-all-projects/project-a/offline_packages/requests-2.32.3-py3-none-any.whl b/test/acceptance/workspaces/pip-app-whl-all-projects/project-a/offline_packages/requests-2.32.3-py3-none-any.whl new file mode 100644 index 0000000000..23662ce7ee Binary files /dev/null and b/test/acceptance/workspaces/pip-app-whl-all-projects/project-a/offline_packages/requests-2.32.3-py3-none-any.whl differ diff --git a/test/acceptance/workspaces/pip-app-whl-all-projects/project-a/requirements.txt b/test/acceptance/workspaces/pip-app-whl-all-projects/project-a/requirements.txt new file mode 100644 index 0000000000..5a7864990f --- /dev/null +++ b/test/acceptance/workspaces/pip-app-whl-all-projects/project-a/requirements.txt @@ -0,0 +1 @@ +./offline_packages/requests-2.32.3-py3-none-any.whl diff --git a/test/acceptance/workspaces/pip-app-whl-all-projects/project-b/offline_packages/requests-2.32.3-py3-none-any.whl b/test/acceptance/workspaces/pip-app-whl-all-projects/project-b/offline_packages/requests-2.32.3-py3-none-any.whl new file mode 100644 index 0000000000..23662ce7ee Binary files /dev/null and b/test/acceptance/workspaces/pip-app-whl-all-projects/project-b/offline_packages/requests-2.32.3-py3-none-any.whl differ diff --git a/test/acceptance/workspaces/pip-app-whl-all-projects/project-b/requirements.txt b/test/acceptance/workspaces/pip-app-whl-all-projects/project-b/requirements.txt new file mode 100644 index 0000000000..5a7864990f --- /dev/null +++ b/test/acceptance/workspaces/pip-app-whl-all-projects/project-b/requirements.txt @@ -0,0 +1 @@ +./offline_packages/requests-2.32.3-py3-none-any.whl diff --git a/test/fixtures/sbom-allow-incomplete/gradle-multi-partial-broken/broken-project/build.gradle b/test/fixtures/sbom-allow-incomplete/gradle-multi-partial-broken/broken-project/build.gradle new file mode 100644 index 0000000000..bef7557847 --- /dev/null +++ b/test/fixtures/sbom-allow-incomplete/gradle-multi-partial-broken/broken-project/build.gradle @@ -0,0 +1,10 @@ +// Intentionally invalid Gradle script to trigger a per-project scan error +// for the --allow-incomplete-sbom acceptance tests. +plugins { + id 'java' +} + +dependencies { + implementation 'this is :: not :: a :: valid :: gradle :: coordinate' + apply((((( // unbalanced parens / invalid groovy +} diff --git a/test/fixtures/sbom-allow-incomplete/gradle-multi-partial-broken/broken-project/settings.gradle b/test/fixtures/sbom-allow-incomplete/gradle-multi-partial-broken/broken-project/settings.gradle new file mode 100644 index 0000000000..d831a2d171 --- /dev/null +++ b/test/fixtures/sbom-allow-incomplete/gradle-multi-partial-broken/broken-project/settings.gradle @@ -0,0 +1 @@ +rootProject.name = 'gradle-broken-project' diff --git a/test/fixtures/sbom-allow-incomplete/gradle-multi-partial-broken/valid-project/build.gradle b/test/fixtures/sbom-allow-incomplete/gradle-multi-partial-broken/valid-project/build.gradle new file mode 100644 index 0000000000..eb4a72e658 --- /dev/null +++ b/test/fixtures/sbom-allow-incomplete/gradle-multi-partial-broken/valid-project/build.gradle @@ -0,0 +1,19 @@ +// Gradle fixture with transitive dependencies for --allow-incomplete-sbom. +// +// org.apache.logging.log4j:log4j-core:2.17.1 transitively depends on +// org.apache.logging.log4j:log4j-api:2.17.1, so the resulting SBOM +// contains both, plus the project itself. +plugins { + id 'java' +} + +group = 'io.snyk.example' +version = '1.0.0' + +repositories { + mavenCentral() +} + +dependencies { + implementation 'org.apache.logging.log4j:log4j-core:2.17.1' +} diff --git a/test/fixtures/sbom-allow-incomplete/gradle-multi-partial-broken/valid-project/settings.gradle b/test/fixtures/sbom-allow-incomplete/gradle-multi-partial-broken/valid-project/settings.gradle new file mode 100644 index 0000000000..b08eba6a6b --- /dev/null +++ b/test/fixtures/sbom-allow-incomplete/gradle-multi-partial-broken/valid-project/settings.gradle @@ -0,0 +1 @@ +rootProject.name = 'gradle-valid-project' diff --git a/test/fixtures/sbom-allow-incomplete/maven-multi-partial-broken/broken-project/pom.xml b/test/fixtures/sbom-allow-incomplete/maven-multi-partial-broken/broken-project/pom.xml new file mode 100644 index 0000000000..7c25e886c4 --- /dev/null +++ b/test/fixtures/sbom-allow-incomplete/maven-multi-partial-broken/broken-project/pom.xml @@ -0,0 +1,10 @@ + +4.0.0 + io.snyk.example + maven-broken-project + 1.0-SNAPSHOT diff --git a/test/fixtures/sbom-allow-incomplete/maven-multi-partial-broken/valid-project/pom.xml b/test/fixtures/sbom-allow-incomplete/maven-multi-partial-broken/valid-project/pom.xml new file mode 100644 index 0000000000..4089e22b37 --- /dev/null +++ b/test/fixtures/sbom-allow-incomplete/maven-multi-partial-broken/valid-project/pom.xml @@ -0,0 +1,29 @@ + + + 4.0.0 + + io.snyk.example + maven-valid-project + jar + 1.0-SNAPSHOT + maven-valid-project + Maven fixture with transitive dependencies for --allow-incomplete-sbom. + + + + + axis + axis + 1.4 + + + + diff --git a/test/fixtures/sbom-allow-incomplete/multi-lang-partial-broken/broken-app/package.json b/test/fixtures/sbom-allow-incomplete/multi-lang-partial-broken/broken-app/package.json new file mode 100644 index 0000000000..fd31ddbaa0 --- /dev/null +++ b/test/fixtures/sbom-allow-incomplete/multi-lang-partial-broken/broken-app/package.json @@ -0,0 +1 @@ +{foo "name":"multi-lang-broken-app","version":"1.0.0"} diff --git a/test/fixtures/sbom-allow-incomplete/multi-lang-partial-broken/gradle-app/build.gradle b/test/fixtures/sbom-allow-incomplete/multi-lang-partial-broken/gradle-app/build.gradle new file mode 100644 index 0000000000..dd607e7d99 --- /dev/null +++ b/test/fixtures/sbom-allow-incomplete/multi-lang-partial-broken/gradle-app/build.gradle @@ -0,0 +1,16 @@ +// log4j-core has transitive dependency on log4j-api, so the SBOM +// contains both Gradle components plus the project itself. +plugins { + id 'java' +} + +group = 'io.snyk.example' +version = '1.0.0' + +repositories { + mavenCentral() +} + +dependencies { + implementation 'org.apache.logging.log4j:log4j-core:2.17.1' +} diff --git a/test/fixtures/sbom-allow-incomplete/multi-lang-partial-broken/gradle-app/settings.gradle b/test/fixtures/sbom-allow-incomplete/multi-lang-partial-broken/gradle-app/settings.gradle new file mode 100644 index 0000000000..ce08395343 --- /dev/null +++ b/test/fixtures/sbom-allow-incomplete/multi-lang-partial-broken/gradle-app/settings.gradle @@ -0,0 +1 @@ +rootProject.name = 'multi-lang-gradle-app' diff --git a/test/fixtures/sbom-allow-incomplete/multi-lang-partial-broken/maven-app/pom.xml b/test/fixtures/sbom-allow-incomplete/multi-lang-partial-broken/maven-app/pom.xml new file mode 100644 index 0000000000..8cd3abf77a --- /dev/null +++ b/test/fixtures/sbom-allow-incomplete/multi-lang-partial-broken/maven-app/pom.xml @@ -0,0 +1,24 @@ + + + 4.0.0 + + io.snyk.example + multi-lang-maven-app + jar + 1.0-SNAPSHOT + multi-lang-maven-app + + + + + axis + axis + 1.4 + + + diff --git a/test/fixtures/sbom-allow-incomplete/multi-lang-partial-broken/npm-app/node_modules/debug/package.json b/test/fixtures/sbom-allow-incomplete/multi-lang-partial-broken/npm-app/node_modules/debug/package.json new file mode 100644 index 0000000000..5eed2e22b6 --- /dev/null +++ b/test/fixtures/sbom-allow-incomplete/multi-lang-partial-broken/npm-app/node_modules/debug/package.json @@ -0,0 +1,9 @@ +{ + "name": "debug", + "version": "2.2.0", + "description": "small debugging utility", + "main": "./node.js", + "dependencies": { + "ms": "0.7.1" + } +} diff --git a/test/fixtures/sbom-allow-incomplete/multi-lang-partial-broken/npm-app/node_modules/ms/package.json b/test/fixtures/sbom-allow-incomplete/multi-lang-partial-broken/npm-app/node_modules/ms/package.json new file mode 100644 index 0000000000..9b6c997c26 --- /dev/null +++ b/test/fixtures/sbom-allow-incomplete/multi-lang-partial-broken/npm-app/node_modules/ms/package.json @@ -0,0 +1,6 @@ +{ + "name": "ms", + "version": "0.7.1", + "description": "Tiny ms conversion utility", + "main": "./index" +} diff --git a/test/fixtures/sbom-allow-incomplete/multi-lang-partial-broken/npm-app/package-lock.json b/test/fixtures/sbom-allow-incomplete/multi-lang-partial-broken/npm-app/package-lock.json new file mode 100644 index 0000000000..150c3c0934 --- /dev/null +++ b/test/fixtures/sbom-allow-incomplete/multi-lang-partial-broken/npm-app/package-lock.json @@ -0,0 +1,21 @@ +{ + "name": "multi-lang-npm-app", + "version": "1.0.0", + "lockfileVersion": 1, + "requires": true, + "dependencies": { + "debug": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.2.0.tgz", + "integrity": "sha1-+HBX6ZWxofauaklgZkE3vFbwOdo=", + "requires": { + "ms": "0.7.1" + } + }, + "ms": { + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/ms/-/ms-0.7.1.tgz", + "integrity": "sha1-nNE8A62/8ltl7/3nzoZO6VIBcJg=" + } + } +} diff --git a/test/fixtures/sbom-allow-incomplete/multi-lang-partial-broken/npm-app/package.json b/test/fixtures/sbom-allow-incomplete/multi-lang-partial-broken/npm-app/package.json new file mode 100644 index 0000000000..bae7a76c6e --- /dev/null +++ b/test/fixtures/sbom-allow-incomplete/multi-lang-partial-broken/npm-app/package.json @@ -0,0 +1,7 @@ +{ + "name": "multi-lang-npm-app", + "version": "1.0.0", + "dependencies": { + "debug": "2.2.0" + } +} diff --git a/test/fixtures/sbom-allow-incomplete/npm-missing-lockfile/package.json b/test/fixtures/sbom-allow-incomplete/npm-missing-lockfile/package.json new file mode 100644 index 0000000000..8a03092e7d --- /dev/null +++ b/test/fixtures/sbom-allow-incomplete/npm-missing-lockfile/package.json @@ -0,0 +1 @@ +{"name":"npm-missing-lockfile","version":"1.0.0","dependencies":{"debug":"2.2.0"}} diff --git a/test/fixtures/sbom-allow-incomplete/npm-multi-all-broken/broken-project-a/package.json b/test/fixtures/sbom-allow-incomplete/npm-multi-all-broken/broken-project-a/package.json new file mode 100644 index 0000000000..96ae82375d --- /dev/null +++ b/test/fixtures/sbom-allow-incomplete/npm-multi-all-broken/broken-project-a/package.json @@ -0,0 +1 @@ +{foo "name":"broken-project-a","version":"1.0.0"} diff --git a/test/fixtures/sbom-allow-incomplete/npm-multi-all-broken/broken-project-b/package.json b/test/fixtures/sbom-allow-incomplete/npm-multi-all-broken/broken-project-b/package.json new file mode 100644 index 0000000000..9752e16be1 --- /dev/null +++ b/test/fixtures/sbom-allow-incomplete/npm-multi-all-broken/broken-project-b/package.json @@ -0,0 +1 @@ +{foo "name":"broken-project-b","version":"1.0.0"} diff --git a/test/fixtures/sbom-allow-incomplete/npm-multi-partial-broken/broken-project/package.json b/test/fixtures/sbom-allow-incomplete/npm-multi-partial-broken/broken-project/package.json new file mode 100644 index 0000000000..1b7bd0292b --- /dev/null +++ b/test/fixtures/sbom-allow-incomplete/npm-multi-partial-broken/broken-project/package.json @@ -0,0 +1 @@ +{foo "name":"broken-project","version":"1.0.0"} diff --git a/test/fixtures/sbom-allow-incomplete/npm-multi-partial-broken/valid-project/node_modules/debug/package.json b/test/fixtures/sbom-allow-incomplete/npm-multi-partial-broken/valid-project/node_modules/debug/package.json new file mode 100644 index 0000000000..5eed2e22b6 --- /dev/null +++ b/test/fixtures/sbom-allow-incomplete/npm-multi-partial-broken/valid-project/node_modules/debug/package.json @@ -0,0 +1,9 @@ +{ + "name": "debug", + "version": "2.2.0", + "description": "small debugging utility", + "main": "./node.js", + "dependencies": { + "ms": "0.7.1" + } +} diff --git a/test/fixtures/sbom-allow-incomplete/npm-multi-partial-broken/valid-project/node_modules/ms/package.json b/test/fixtures/sbom-allow-incomplete/npm-multi-partial-broken/valid-project/node_modules/ms/package.json new file mode 100644 index 0000000000..9b6c997c26 --- /dev/null +++ b/test/fixtures/sbom-allow-incomplete/npm-multi-partial-broken/valid-project/node_modules/ms/package.json @@ -0,0 +1,6 @@ +{ + "name": "ms", + "version": "0.7.1", + "description": "Tiny ms conversion utility", + "main": "./index" +} diff --git a/test/fixtures/sbom-allow-incomplete/npm-multi-partial-broken/valid-project/package-lock.json b/test/fixtures/sbom-allow-incomplete/npm-multi-partial-broken/valid-project/package-lock.json new file mode 100644 index 0000000000..fdd7214857 --- /dev/null +++ b/test/fixtures/sbom-allow-incomplete/npm-multi-partial-broken/valid-project/package-lock.json @@ -0,0 +1,21 @@ +{ + "name": "valid-project", + "version": "1.0.0", + "lockfileVersion": 1, + "requires": true, + "dependencies": { + "debug": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.2.0.tgz", + "integrity": "sha1-+HBX6ZWxofauaklgZkE3vFbwOdo=", + "requires": { + "ms": "0.7.1" + } + }, + "ms": { + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/ms/-/ms-0.7.1.tgz", + "integrity": "sha1-nNE8A62/8ltl7/3nzoZO6VIBcJg=" + } + } +} diff --git a/test/fixtures/sbom-allow-incomplete/npm-multi-partial-broken/valid-project/package.json b/test/fixtures/sbom-allow-incomplete/npm-multi-partial-broken/valid-project/package.json new file mode 100644 index 0000000000..d670d8649d --- /dev/null +++ b/test/fixtures/sbom-allow-incomplete/npm-multi-partial-broken/valid-project/package.json @@ -0,0 +1,7 @@ +{ + "name": "valid-project", + "version": "1.0.0", + "dependencies": { + "debug": "2.2.0" + } +} diff --git a/test/fixtures/sbom/uv-findings-response.json b/test/fixtures/sbom/uv-findings-response.json new file mode 100644 index 0000000000..efcb87d448 --- /dev/null +++ b/test/fixtures/sbom/uv-findings-response.json @@ -0,0 +1,404 @@ +{ + "data": [ + { + "attributes": { + "cause_of_failure": false, + "description": "## Overview\n[Jinja2](https://pypi.org/project/Jinja2/) is a template engine written in pure Python. It provides a Django inspired non-XML syntax but supports inline expressions and an optional sandboxed environment.\n\nAffected versions of this package are vulnerable to Regular Expression Denial of Service (ReDoS). The ReDoS vulnerability is mainly due to the `_punctuation_re regex` operator and its use of multiple wildcards. The last wildcard is the most exploitable as it searches for trailing punctuation.\r\n\r\nThis issue can be mitigated by using Markdown to format user content instead of the urlize filter, or by implementing request timeouts or limiting process memory.\r\n\r\n### PoC by Yeting Li\r\n```\r\nfrom jinja2.utils import urlize\r\nfrom time import perf_counter\r\n\r\nfor i in range(3):\r\n text = \"abc@\" + \".\" * (i+1)*5000 + \"!\"\r\n LEN = len(text)\r\n BEGIN = perf_counter()\r\n urlize(text)\r\n DURATION = perf_counter() - BEGIN\r\n print(f\"{LEN}: took {DURATION} seconds!\")\r\n```\n\n## Details\n\nDenial of Service (DoS) describes a family of attacks, all aimed at making a system inaccessible to its original and legitimate users. There are many types of DoS attacks, ranging from trying to clog the network pipes to the system by generating a large volume of traffic from many machines (a Distributed Denial of Service - DDoS - attack) to sending crafted requests that cause a system to crash or take a disproportional amount of time to process.\n\nThe Regular expression Denial of Service (ReDoS) is a type of Denial of Service attack. Regular expressions are incredibly powerful, but they aren't very intuitive and can ultimately end up making it easy for attackers to take your site down.\n\nLet\u2019s take the following regular expression as an example:\n```js\nregex = /A(B|C+)+D/\n```\n\nThis regular expression accomplishes the following:\n- `A` The string must start with the letter 'A'\n- `(B|C+)+` The string must then follow the letter A with either the letter 'B' or some number of occurrences of the letter 'C' (the `+` matches one or more times). The `+` at the end of this section states that we can look for one or more matches of this section.\n- `D` Finally, we ensure this section of the string ends with a 'D'\n\nThe expression would match inputs such as `ABBD`, `ABCCCCD`, `ABCBCCCD` and `ACCCCCD`\n\nIt most cases, it doesn't take very long for a regex engine to find a match:\n\n```bash\n$ time node -e '/A(B|C+)+D/.test(\"ACCCCCCCCCCCCCCCCCCCCCCCCCCCCD\")'\n0.04s user 0.01s system 95% cpu 0.052 total\n\n$ time node -e '/A(B|C+)+D/.test(\"ACCCCCCCCCCCCCCCCCCCCCCCCCCCCX\")'\n1.79s user 0.02s system 99% cpu 1.812 total\n```\n\nThe entire process of testing it against a 30 characters long string takes around ~52ms. But when given an invalid string, it takes nearly two seconds to complete the test, over ten times as long as it took to test a valid string. The dramatic difference is due to the way regular expressions get evaluated.\n\nMost Regex engines will work very similarly (with minor differences). The engine will match the first possible way to accept the current character and proceed to the next one. If it then fails to match the next one, it will backtrack and see if there was another way to digest the previous character. If it goes too far down the rabbit hole only to find out the string doesn\u2019t match in the end, and if many characters have multiple valid regex paths, the number of backtracking steps can become very large, resulting in what is known as _catastrophic backtracking_.\n\nLet's look at how our expression runs into this problem, using a shorter string: \"ACCCX\". While it seems fairly straightforward, there are still four different ways that the engine could match those three C's:\n1. CCC\n2. CC+C\n3. C+CC\n4. C+C+C.\n\nThe engine has to try each of those combinations to see if any of them potentially match against the expression. When you combine that with the other steps the engine must take, we can use [RegEx 101 debugger](https://regex101.com/debugger) to see the engine has to take a total of 38 steps before it can determine the string doesn't match.\n\nFrom there, the number of steps the engine must use to validate a string just continues to grow.\n\n| String | Number of C's | Number of steps |\n| -------|-------------:| -----:|\n| ACCCX | 3 | 38\n| ACCCCX | 4 | 71\n| ACCCCCX | 5 | 136\n| ACCCCCCCCCCCCCCX | 14 | 65,553\n\n\nBy the time the string includes 14 C's, the engine has to take over 65,000 steps just to see if the string is valid. These extreme situations can cause them to work very slowly (exponentially related to input size, as shown above), allowing an attacker to exploit this and can cause the service to excessively consume CPU, resulting in a Denial of Service.\n\n## Remediation\nUpgrade `Jinja2` to version 2.11.3 or higher.\n## References\n- [GitHub Additional Information](https://github.com/pallets/jinja/blob/ab81fd9c277900c85da0c322a2ff9d68a235b2e6/src/jinja2/utils.py#L20)\n- [GitHub PR](https://github.com/pallets/jinja/pull/1343)\n", + "evidence": [ + { + "path": [ + { + "name": "simple-project", + "version": "0.1.0" + }, + { + "name": "jinja2", + "version": "2.11.2" + } + ], + "source": "dependency_path" + }, + { + "reachability": "not_applicable", + "source": "reachability" + } + ], + "finding_type": "sca", + "key": "1713892d-d0ce-41d4-8623-e179e3c33128", + "locations": [ + { + "package": { + "name": "jinja2", + "version": "2.11.2" + }, + "type": "package" + } + ], + "policy_modifications": [], + "problems": [ + { + "id": "CVE-2020-28493", + "source": "cve" + }, + { + "id": "GHSA-g3rq-g295-4j3m", + "source": "ghsa" + }, + { + "affected_hash_ranges": [], + "affected_hashes": [], + "affected_versions": [ + "[,2.11.3)" + ], + "alternative_ids": [], + "created_at": "2020-09-25T17:30:26Z", + "credits": [ + "Yeting Li" + ], + "cvss_base_score": 5.3, + "cvss_sources": [ + { + "assigner": "Snyk", + "base_score": 5.3, + "cvss_version": "CVSSv31", + "modified_at": "0001-01-01T00:00:00Z", + "severity": "medium", + "type": "primary", + "vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:L/E:P" + }, + { + "assigner": "SUSE", + "base_score": 7.5, + "cvss_version": "CVSSv31", + "modified_at": "0001-01-01T00:00:00Z", + "severity": "high", + "type": "secondary", + "vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H" + }, + { + "assigner": "NVD", + "base_score": 5.3, + "cvss_version": "CVSSv31", + "modified_at": "0001-01-01T00:00:00Z", + "severity": "medium", + "type": "secondary", + "vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:L" + }, + { + "assigner": "Red Hat", + "base_score": 7.5, + "cvss_version": "CVSSv31", + "modified_at": "0001-01-01T00:00:00Z", + "severity": "high", + "type": "secondary", + "vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H" + } + ], + "cvss_vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:L/E:P", + "disclosed_at": "2020-09-25T17:29:19Z", + "ecosystem": { + "language": "python", + "package_manager": "uv", + "type": "build" + }, + "epss_details": { + "model_version": "v2025.03.14", + "percentile": "0.42975", + "probability": "0.00207" + }, + "exploit_details": { + "maturity_levels": [ + { + "format": "CVSSv3", + "level": "proof of concept", + "type": "secondary" + }, + { + "format": "CVSSv4", + "level": "proof of concept", + "type": "primary" + } + ], + "sources": [ + "Snyk" + ] + }, + "id": "SNYK-PYTHON-JINJA2-1012994", + "initially_fixed_in_versions": [ + "2.11.3" + ], + "is_fixable": true, + "is_malicious": false, + "is_social_media_trending": false, + "modified_at": "2024-03-11T09:53:50Z", + "package_name": "jinja2", + "package_version": "2.11.2", + "published_at": "2021-02-01T19:52:17Z", + "references": [ + { + "title": "GitHub Additional Information", + "url": "https://github.com/pallets/jinja/blob/ab81fd9c277900c85da0c322a2ff9d68a235b2e6/src/jinja2/utils.py%23L20" + }, + { + "title": "GitHub PR", + "url": "https://github.com/pallets/jinja/pull/1343" + } + ], + "severity": "medium", + "source": "snyk_vuln" + }, + { + "id": "CWE-400", + "source": "cwe" + } + ], + "rating": { + "severity": "medium" + }, + "risk": {}, + "title": "Regular Expression Denial of Service (ReDoS)" + }, + "id": "1713892d-d0ce-41d4-8623-e179e3c33128", + "links": {}, + "relationships": { + "fix": { + "data": { + "attributes": { + "action": { + "format": "upgrade_package_advice", + "package_name": "jinja2", + "upgrade_paths": [ + { + "dependency_path": [ + { + "name": "simple-project", + "version": "0.1.0" + }, + { + "name": "jinja2", + "version": "2.11.3" + } + ], + "is_drop": false + } + ] + }, + "outcome": "fully_resolved" + }, + "id": "1713892d-d0ce-41d4-8623-e179e3c33128", + "type": "fixes" + } + } + }, + "type": "findings" + }, + { + "attributes": { + "cause_of_failure": false, + "description": "## Overview\n[urllib3](https://pypi.org/project/urllib3/) is a HTTP library with thread-safe connection pooling, file post, and more.\n\nAffected versions of this package are vulnerable to Improper Handling of Highly Compressed Data (Data Amplification) in the Streaming API. The `ContentDecoder` class can be forced to allocate disproportionate resources when processing a single chunk with very high compression, such as via the `stream()`, `read(amt=256)`, `read1(amt=256)`, `read_chunked(amt=256)`, and `readinto(b)` functions.\r\n\r\n**Note:** It is recommended to patch Brotli dependencies (upgrade to at least 1.2.0) if they are installed outside of `urllib3` as well, to avoid other instances of the same vulnerability.\n\n## Details\n\nDenial of Service (DoS) describes a family of attacks, all aimed at making a system inaccessible to its intended and legitimate users.\n\nUnlike other vulnerabilities, DoS attacks usually do not aim at breaching security. Rather, they are focused on making websites and services unavailable to genuine users resulting in downtime.\n\nOne popular Denial of Service vulnerability is DDoS (a Distributed Denial of Service), an attack that attempts to clog network pipes to the system by generating a large volume of traffic from many machines.\n\nWhen it comes to open source libraries, DoS vulnerabilities allow attackers to trigger such a crash or crippling of the service by using a flaw either in the application code or from the use of open source libraries.\n\nTwo common types of DoS vulnerabilities:\n\n* High CPU/Memory Consumption- An attacker sending crafted requests that could cause the system to take a disproportionate amount of time to process. For example, [commons-fileupload:commons-fileupload](https://security.snyk.io/vuln/SNYK-JAVA-COMMONSFILEUPLOAD-30082).\n\n* Crash - An attacker sending crafted requests that could cause the system to crash. For Example, [npm `ws` package](https://snyk.io/vuln/npm:ws:20171108)\n\n## Remediation\nUpgrade `urllib3` to version 2.6.0 or higher.\n## References\n- [GitHub Commit](https://github.com/urllib3/urllib3/commit/c19571de34c47de3a766541b041637ba5f716ed7)\n", + "evidence": [ + { + "path": [ + { + "name": "simple-project", + "version": "0.1.0" + }, + { + "name": "urllib3", + "version": "1.24.3" + } + ], + "source": "dependency_path" + }, + { + "reachability": "not_applicable", + "source": "reachability" + } + ], + "finding_type": "sca", + "key": "192e81de-cce6-4b8d-b18a-064041a22fdc", + "locations": [ + { + "package": { + "name": "urllib3", + "version": "1.24.3" + }, + "type": "package" + } + ], + "policy_modifications": [], + "problems": [ + { + "id": "CWE-409", + "source": "cwe" + }, + { + "id": "CVE-2025-66471", + "source": "cve" + }, + { + "id": "GHSA-2xpw-w6gg-jr37", + "source": "ghsa" + }, + { + "affected_hash_ranges": [], + "affected_hashes": [], + "affected_versions": [ + "[1.0,2.6.0)" + ], + "alternative_ids": [], + "created_at": "2025-12-05T17:34:10Z", + "credits": [ + "Rui Xi", + "Miroslav Stampar" + ], + "cvss_base_score": 8.9, + "cvss_sources": [ + { + "assigner": "Snyk", + "base_score": 8.9, + "cvss_version": "CVSSv4", + "modified_at": "0001-01-01T00:00:00Z", + "severity": "high", + "type": "primary", + "vector": "CVSS:4.0/AV:N/AC:L/AT:P/PR:N/UI:N/VC:N/VI:N/VA:H/SC:N/SI:N/SA:H" + }, + { + "assigner": "Snyk", + "base_score": 6.8, + "cvss_version": "CVSSv31", + "modified_at": "0001-01-01T00:00:00Z", + "severity": "medium", + "type": "secondary", + "vector": "CVSS:3.1/AV:N/AC:H/PR:N/UI:N/S:C/C:N/I:N/A:H" + }, + { + "assigner": "NVD", + "base_score": 7.5, + "cvss_version": "CVSSv31", + "modified_at": "0001-01-01T00:00:00Z", + "severity": "high", + "type": "secondary", + "vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H" + }, + { + "assigner": "SUSE", + "base_score": 5.3, + "cvss_version": "CVSSv31", + "modified_at": "0001-01-01T00:00:00Z", + "severity": "medium", + "type": "secondary", + "vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:L" + }, + { + "assigner": "Red Hat", + "base_score": 7.5, + "cvss_version": "CVSSv31", + "modified_at": "0001-01-01T00:00:00Z", + "severity": "high", + "type": "secondary", + "vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H" + } + ], + "cvss_vector": "CVSS:4.0/AV:N/AC:L/AT:P/PR:N/UI:N/VC:N/VI:N/VA:H/SC:N/SI:N/SA:H", + "disclosed_at": "2025-12-05T16:40:41Z", + "ecosystem": { + "language": "python", + "package_manager": "uv", + "type": "build" + }, + "epss_details": { + "model_version": "v2025.03.14", + "percentile": "0.08913", + "probability": "0.00031" + }, + "exploit_details": { + "maturity_levels": [ + { + "format": "CVSSv3", + "level": "unproven", + "type": "secondary" + }, + { + "format": "CVSSv4", + "level": "unreported", + "type": "primary" + } + ], + "sources": [] + }, + "id": "SNYK-PYTHON-URLLIB3-14192442", + "initially_fixed_in_versions": [ + "2.6.0" + ], + "is_fixable": true, + "is_malicious": false, + "is_social_media_trending": false, + "modified_at": "2026-02-07T05:35:23Z", + "package_name": "urllib3", + "package_version": "1.24.3", + "published_at": "2025-12-07T11:04:17Z", + "references": [ + { + "title": "GitHub Commit", + "url": "https://github.com/urllib3/urllib3/commit/c19571de34c47de3a766541b041637ba5f716ed7" + } + ], + "severity": "high", + "source": "snyk_vuln" + } + ], + "rating": { + "severity": "high" + }, + "risk": {}, + "title": "Improper Handling of Highly Compressed Data (Data Amplification)" + }, + "id": "192e81de-cce6-4b8d-b18a-064041a22fdc", + "links": {}, + "relationships": { + "fix": { + "data": { + "attributes": { + "action": { + "format": "upgrade_package_advice", + "package_name": "urllib3", + "upgrade_paths": [ + { + "dependency_path": [ + { + "name": "simple-project", + "version": "0.1.0" + }, + { + "name": "urllib3", + "version": "2.6.0" + } + ], + "is_drop": false + } + ] + }, + "outcome": "fully_resolved" + }, + "id": "192e81de-cce6-4b8d-b18a-064041a22fdc", + "type": "fixes" + } + } + }, + "type": "findings" + } + ], + "jsonapi": { + "version": "1.0" + }, + "links": { + "first": "/orgs/6d66a729-298d-4055-a3e2-94e9b44e6c26/tests/916196fb-c6ad-4958-93d2-0e6cefe777bb/findings?version=2024-10-15&limit=100", + "self": "/orgs/6d66a729-298d-4055-a3e2-94e9b44e6c26/tests/916196fb-c6ad-4958-93d2-0e6cefe777bb/findings?version=2024-10-15&limit=100" + }, + "meta": { + "count": 2, + "order": "asc", + "sorted-by": "id" + } +} \ No newline at end of file diff --git a/test/fixtures/sbom/uv-sbom-cdx15.json b/test/fixtures/sbom/uv-sbom-cdx15.json new file mode 100644 index 0000000000..596a454611 --- /dev/null +++ b/test/fixtures/sbom/uv-sbom-cdx15.json @@ -0,0 +1,74 @@ +{ + "bomFormat": "CycloneDX", + "specVersion": "1.5", + "version": 1, + "serialNumber": "urn:uuid:360d9923-6729-4332-9370-ee0dc4758e04", + "metadata": { + "timestamp": "2026-04-10T10:40:16.133339000Z", + "tools": [ + { + "vendor": "Astral Software Inc.", + "name": "uv", + "version": "0.11.3" + } + ], + "component": { + "type": "library", + "bom-ref": "simple-project-1@0.1.0", + "name": "simple-project", + "version": "0.1.0", + "properties": [ + { + "name": "uv:package:is_project_root", + "value": "true" + } + ] + } + }, + "components": [ + { + "type": "library", + "bom-ref": "jinja2-2@2.11.2", + "name": "jinja2", + "version": "2.11.2", + "purl": "pkg:pypi/jinja2@2.11.2" + }, + { + "type": "library", + "bom-ref": "markupsafe-3@3.0.3", + "name": "markupsafe", + "version": "3.0.3", + "purl": "pkg:pypi/markupsafe@3.0.3" + }, + { + "type": "library", + "bom-ref": "urllib3-4@1.24.3", + "name": "urllib3", + "version": "1.24.3", + "purl": "pkg:pypi/urllib3@1.24.3" + } + ], + "dependencies": [ + { + "ref": "jinja2-2@2.11.2", + "dependsOn": [ + "markupsafe-3@3.0.3" + ] + }, + { + "ref": "markupsafe-3@3.0.3", + "dependsOn": [] + }, + { + "ref": "simple-project-1@0.1.0", + "dependsOn": [ + "jinja2-2@2.11.2", + "urllib3-4@1.24.3" + ] + }, + { + "ref": "urllib3-4@1.24.3", + "dependsOn": [] + } + ] +} diff --git a/test/jest/acceptance/cli-args.spec.ts b/test/jest/acceptance/cli-args.spec.ts index 08420f33b6..b23c3dc399 100644 --- a/test/jest/acceptance/cli-args.spec.ts +++ b/test/jest/acceptance/cli-args.spec.ts @@ -288,6 +288,19 @@ describe.each(userJourneyWorkflows)( expect(code).toEqual(2); }); + test('snyk test --exclude-paths without --all-projects displays error message', async () => { + const { code, stdout } = await runSnykCLI( + `test --exclude-paths=packages/api/package.json`, + { + env, + }, + ); + expect(stdout).toContainText( + 'The --exclude-paths option can only be used in combination with --all-projects or --yarn-workspaces.', + ); + expect(code).toEqual(2); + }); + test('snyk iac test --exclude=path/to/dir displays error message', async () => { const exclude = path.normalize('path/to/dir'); const { code, stdout } = await runSnykCLI( diff --git a/test/jest/acceptance/language-server-extension.spec.ts b/test/jest/acceptance/language-server-extension.spec.ts index c58b34924f..12100bec26 100644 --- a/test/jest/acceptance/language-server-extension.spec.ts +++ b/test/jest/acceptance/language-server-extension.spec.ts @@ -1,12 +1,22 @@ import { runSnykCLI } from '../util/runSnykCLI'; +import * as path from 'path'; import { pathToFileURL } from 'url'; import { sleep } from '../../../src/lib/common'; import * as cp from 'child_process'; import * as rpc from 'vscode-jsonrpc/node'; import { withFipsEnvIfNeeded } from '../util/fipsTestHelper'; +import { requireSnykToken } from '../util/requireSnykToken'; jest.setTimeout(1000 * 120); +/** snyk-ls v25+ wire type: only entries with `changed: true` are applied (see ConfigSetting in snyk-ls). */ +function lspConfigSetting(value: unknown): { + value: unknown; + changed: boolean; +} { + return { value, changed: true }; +} + describe('Language Server Extension', () => { it('get ls licenses', async () => { const result = await runSnykCLI('language-server --licenses -d'); @@ -30,10 +40,16 @@ describe('Language Server Extension', () => { }); it('run and wait for diagnostics', async () => { + const token = requireSnykToken(); let cmd = ''; if (process.env.TEST_SNYK_COMMAND !== undefined) { cmd = process.env.TEST_SNYK_COMMAND; } + if (!cmd) { + throw new Error( + 'Set TEST_SNYK_COMMAND to the built CLI binary (e.g. ./binary-releases/snyk-macos-arm64).', + ); + } const cli = cp.spawn(cmd, ['language-server'], { stdio: 'pipe', // Use stdin and stdout for communication: @@ -51,6 +67,33 @@ describe('Language Server Extension', () => { new rpc.StreamMessageWriter(cli.stdin), ); + const workspaceFixture = path.resolve( + path.join(__dirname, '../../fixtures/npm/with-vulnerable-lodash-dep'), + ); + const cliPathResolved = cmd !== '' ? path.resolve(cmd) : ''; + + // Keys are internal/pflag names (e.g. internal/types/ldx_sync_config.go in snyk-ls). + const initOptions = { + settings: { + api_endpoint: lspConfigSetting(process.env.TEST_SNYK_API), + token: lspConfigSetting(token), + authentication_method: lspConfigSetting('token'), + automatic_authentication: lspConfigSetting(false), + trust_enabled: lspConfigSetting(true), + trusted_folders: lspConfigSetting([workspaceFixture]), + snyk_oss_enabled: lspConfigSetting(true), + snyk_code_enabled: lspConfigSetting(false), + snyk_iac_enabled: lspConfigSetting(false), + snyk_secrets_enabled: lspConfigSetting(false), + automatic_download: lspConfigSetting(false), + cli_path: lspConfigSetting(cliPathResolved), + scan_automatic: lspConfigSetting(true), + send_error_reports: lspConfigSetting(false), + }, + integrationName: 'MyFakePlugin', + integrationVersion: '1.2.3', + }; + // create an RPC endpoint for the process connection.listen(); @@ -68,25 +111,11 @@ describe('Language Server Extension', () => { workspaceFolders: [ { name: 'workspace', - uri: pathToFileURL('./test/fixtures/npm/with-vulnerable-lodash-dep') - .href, + uri: pathToFileURL(workspaceFixture).href, }, ], rootUri: null, - initializationOptions: { - activateSnykCodeSecurity: 'false', - activateSnykCodeQuality: 'false', - activateSnykOpenSource: 'true', - activateSnykIac: 'false', - endpoint: process.env.TEST_SNYK_API, - token: process.env.TEST_SNYK_TOKEN, - manageBinariesAutomatically: 'false', - enableTrustedFoldersFeature: 'false', - integrationName: 'MyFakePlugin', - integrationVersion: '1.2.3', - enableTelemetry: 'false', - cliPath: cmd, - }, + initializationOptions: initOptions, }); let diagnosticCount = 0; diff --git a/test/jest/acceptance/print-effective-dep-graph-with-errors.spec.ts b/test/jest/acceptance/print-effective-dep-graph-with-errors.spec.ts index f9f687811e..40e66d4838 100644 --- a/test/jest/acceptance/print-effective-dep-graph-with-errors.spec.ts +++ b/test/jest/acceptance/print-effective-dep-graph-with-errors.spec.ts @@ -1,7 +1,11 @@ import { fakeServer } from '../../acceptance/fake-server'; -import { createProjectFromFixture } from '../util/createProject'; -import { runSnykCLI } from '../util/runSnykCLI'; +import { + createProjectFromFixture, + createProjectFromWorkspace, +} from '../util/createProject'; import { getServerPort } from '../util/getServerPort'; +import { parseJSONL } from '../util/parseJSONL'; +import { runSnykCLI } from '../util/runSnykCLI'; import { ProblemError } from '@snyk/error-catalog-nodejs-public'; jest.setTimeout(1000 * 30); @@ -163,20 +167,7 @@ describe('`test` command with `--print-effective-graph-with-errors` option', () expect(code).toBe(0); - // Parse JSONL output - const lines = stdout - .trim() - .split('\n') - .filter((line) => line.trim()); - - const jsonObjects: any[] = []; - for (const line of lines) { - try { - jsonObjects.push(JSON.parse(line)); - } catch { - // Skip non-JSON lines - } - } + const jsonObjects = parseJSONL(stdout) as any[]; // Should have at least one output (either success or error) expect(jsonObjects.length).toBeGreaterThan(0); @@ -225,4 +216,103 @@ describe('`test` command with `--print-effective-graph-with-errors` option', () // stderr should contain the failure warning expect(stderr).toMatch(/failed to get dependencies/i); }); + + it('outputs the target framework for nuget/dotnet projects', async () => { + const project = await createProjectFromWorkspace('nuget-app-6-7-8'); + const { code, stdout } = await runSnykCLI( + 'test --print-effective-graph-with-errors', + { + cwd: project.path(), + env, + }, + ); + + expect(code).toBe(0); + + const outputs = parseJSONL(stdout) as any[]; + + expect(outputs[0]).toMatchObject({ + targetRuntime: 'net6.0', + normalisedTargetFile: 'obj/project.assets.json', + }); + expect(outputs[0].depGraph).toBeDefined(); + }); + + it('includes workspace type for yarn workspaces', async () => { + const project = await createProjectFromWorkspace('yarn-workspaces'); + const { code, stdout } = await runSnykCLI( + 'test --yarn-workspaces --print-effective-graph-with-errors', + { + cwd: project.path(), + env, + }, + ); + + expect(code).toBe(0); + + const outputs = parseJSONL(stdout) as any[]; + + // All workspace projects should have workspace field + expect(outputs.length).toBeGreaterThan(0); + for (const output of outputs) { + expect(output).toHaveProperty('workspace'); + expect(output.workspace).toEqual({ type: 'yarn' }); + expect(output).toHaveProperty('depGraph'); + expect(output.depGraph.pkgManager.name).toBe('yarn'); + } + }); + + it('includes workspace type for workspaces with --all-projects', async () => { + const project = await createProjectFromWorkspace('yarn-workspaces'); + + const { code, stdout } = await runSnykCLI( + 'test --all-projects --print-effective-graph-with-errors', + { + cwd: project.path(), + env, + }, + ); + + expect(code).toBe(0); + + const outputs = parseJSONL(stdout) as any[]; + + // Should have outputs with workspace field + expect(outputs.length).toBeGreaterThan(0); + + // All outputs should have workspace field when detected as workspace + const workspaceOutputs = outputs.filter((output) => output.workspace); + expect(workspaceOutputs.length).toBeGreaterThan(0); + + // Verify workspace field structure + for (const output of workspaceOutputs) { + expect(output.workspace).toHaveProperty('type'); + expect(['yarn', 'npm', 'pnpm']).toContain(output.workspace.type); + expect(output).toHaveProperty('depGraph'); + } + }); + + it('does not include workspace field for non-workspace projects', async () => { + const project = await createProjectFromFixture( + 'npm/with-vulnerable-lodash-dep', + ); + server.setCustomResponse( + await project.readJSON('test-dep-graph-result.json'), + ); + const { code, stdout } = await runSnykCLI( + 'test --print-effective-graph-with-errors', + { + cwd: project.path(), + env, + }, + ); + + expect(code).toEqual(0); + + const jsonOutput = JSON.parse(stdout); + + // Non-workspace project should NOT have workspace field + expect(jsonOutput).not.toHaveProperty('workspace'); + expect(jsonOutput).toHaveProperty('depGraph'); + }); }); diff --git a/test/jest/acceptance/snyk-sbom-test/snyk-sbom-test.spec.ts b/test/jest/acceptance/snyk-sbom-test/snyk-sbom-test.spec.ts new file mode 100644 index 0000000000..842efb3fe9 --- /dev/null +++ b/test/jest/acceptance/snyk-sbom-test/snyk-sbom-test.spec.ts @@ -0,0 +1,78 @@ +import * as fs from 'fs'; +import * as path from 'path'; + +import { fakeServer } from '../../../acceptance/fake-server'; +import { getAvailableServerPort } from '../../util/getServerPort'; +import { getFixturePath } from '../../util/getFixturePath'; +import { runSnykCLI } from '../../util/runSnykCLI'; +import { EXIT_CODES } from '../../../../src/cli/exit-codes'; +import { testIf } from '../../../utils'; + +jest.setTimeout(1000 * 60 * 5); + +const hasBinary = !!process.env.TEST_SNYK_COMMAND; + +// Org ID returned by the fake server's /rest/self endpoint. +const FAKE_SERVER_ORG_ID = '55555555-5555-5555-5555-555555555555'; +// Test ID the fake server assigns via the test_jobs 303 redirect. +const FAKE_SERVER_TEST_ID = 'aaaaaaaa-bbbb-cccc-dddd-000000000002'; + +describe('snyk sbom test (mocked server only)', () => { + let server; + let env: Record; + + beforeAll(async () => { + const port = await getAvailableServerPort(process); + const baseApi = '/v1'; + env = { + ...process.env, + SNYK_API: 'http://localhost:' + port + baseApi, + SNYK_HOST: 'http://localhost:' + port, + SNYK_TOKEN: '123456789', + SNYK_HTTP_PROTOCOL_UPGRADE: '0', + }; + server = fakeServer(baseApi, env.SNYK_TOKEN); + await server.listenPromise(port); + }); + + afterEach(() => { + jest.resetAllMocks(); + server.restore(); + }); + + afterAll((done) => { + server.close(() => { + done(); + }); + }); + + testIf(hasBinary)( + '`sbom test` succeeds for a uv CycloneDX SBOM', + async () => { + server.setFeatureFlag('enableUvCLI', true); + + const findingsResponse = JSON.parse( + fs.readFileSync( + path.resolve(getFixturePath('sbom'), 'uv-findings-response.json'), + 'utf8', + ), + ); + server.setEndpointResponse( + `/rest/orgs/${FAKE_SERVER_ORG_ID}/tests/${FAKE_SERVER_TEST_ID}/findings`, + findingsResponse, + ); + + const sbomFilePath = getFixturePath('sbom/uv-sbom-cdx15.json'); + const { code, stdout, stderr } = await runSnykCLI( + `sbom test --file=${sbomFilePath}`, + { env }, + ); + + expect(stderr).toBe(''); + expect(stdout).not.toContain('no semver library defined for ecosystem'); + expect(stdout).toContain('Test Summary'); + expect(stdout).toContain('Issues to fix by upgrading'); + expect(code).toBe(EXIT_CODES.VULNS_FOUND); + }, + ); +}); diff --git a/test/jest/acceptance/snyk-sbom/allow-incomplete-sbom.spec.ts b/test/jest/acceptance/snyk-sbom/allow-incomplete-sbom.spec.ts new file mode 100644 index 0000000000..4a6c139fe4 --- /dev/null +++ b/test/jest/acceptance/snyk-sbom/allow-incomplete-sbom.spec.ts @@ -0,0 +1,567 @@ +import { + fakeServer, + getFirstIPv4Address, +} from '../../../acceptance/fake-server'; +import { isWindowsOperatingSystem, testIf } from '../../../utils'; +import { createProject } from '../../util/createProject'; +import { getAvailableServerPort } from '../../util/getServerPort'; +import { runSnykCLI } from '../../util/runSnykCLI'; + +jest.setTimeout(1000 * 60 * 5); + +/** + * Acceptance tests for `snyk sbom --allow-incomplete-sbom`. + * + * What the flag is supposed to do + * ──────────────────────────────── + * When `--allow-incomplete-sbom` is passed, the CLI is expected to: + * 1. Scan every detected project (npm, Maven, Gradle, …) without aborting + * on the first failure (`fail-fast=false`). + * 2. Produce a single dep-graph payload that contains: + * - `depGraphs[]` – every successfully resolved project + * - `scanErrors[]` – one entry per project that failed to resolve + * 3. Return exit code `0` even when some projects could not be resolved. + * 4. Not prune the dep-graph (so transitive dependencies stay visible + * in the resulting SBOM, even if a sub-tree is repeated). + * + * The behaviour is identical for both supported SBOM formats: + * - cyclonedx1.6+json + * - spdx2.3+json + * + * These tests run the real CLI binary against a `fakeServer` that mocks the + * `/sboms` endpoint. The fake server synthesises a CycloneDX/SPDX document + * out of the dep-graph(s) it received, so we can verify both: + * - the wire payload that left the CLI (depGraphs / scanErrors) + * - the SBOM document the CLI ultimately handed back to the user (stdout) + * + * Fixture layout (`test/fixtures/sbom-allow-incomplete/`): + * + * npm-multi-partial-broken/ + * valid-project/ – npm project with debug@2.2.0 → ms@0.7.1 (transitive) + * broken-project/ – npm project with an unparseable package.json + * + * npm-multi-all-broken/ + * broken-project-a/ – unparseable package.json + * broken-project-b/ – unparseable package.json + * + * maven-multi-partial-broken/ + * valid-project/ – pom.xml with axis:axis:1.4 + transitive deps + * broken-project/ – malformed pom.xml + * + * gradle-multi-partial-broken/ + * valid-project/ – build.gradle with log4j-core:2.17.1 → log4j-api + * broken-project/ – invalid Groovy / build.gradle + * + * multi-lang-partial-broken/ + * npm-app/ – npm with debug → ms (transitive) + * maven-app/ – maven with axis 1.4 (transitive) + * gradle-app/ – gradle with log4j-core 2.17.1 (transitive) + * broken-app/ – broken package.json + */ + +// ────────────────────────────────────────────────────────────────────────── +// Types +// ────────────────────────────────────────────────────────────────────────── + +interface DepGraphRequest { + pkgs: Array<{ id: string; info: { name: string; version?: string } }>; + graph: { + rootNodeId: string; + nodes: Array<{ nodeId: string; pkgId: string }>; + }; + pkgManager?: { name?: string }; +} + +interface SbomPayload { + depGraph?: DepGraphRequest; + depGraphs?: DepGraphRequest[]; + subject?: { name: string }; + scanErrors?: Array<{ subject?: string; text: string }>; + tools?: Array<{ name: string; vendor?: string; version?: string }>; +} + +interface SbomRequest { + method: string; + path: string; + body: SbomPayload; +} + +interface CycloneDxBom { + specVersion: string; + $schema: string; + components: Array<{ name: string; version?: string; purl?: string }>; + metadata: { component: { name: string } }; +} + +interface SpdxBom { + spdxVersion: string; + name: string; + packages: Array<{ name: string; version?: string }>; + creators: unknown[]; +} + +type AnyBom = CycloneDxBom | SpdxBom; + +// ────────────────────────────────────────────────────────────────────────── +// Test fixture helpers +// ────────────────────────────────────────────────────────────────────────── + +const ORG = 'aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee'; + +function isCycloneDx(bom: AnyBom): bom is CycloneDxBom { + return (bom as CycloneDxBom).specVersion !== undefined; +} + +function logSbomSummary(bom: AnyBom, payload: SbomPayload): void { + const summary = isCycloneDx(bom) + ? { + format: `CycloneDX ${bom.specVersion}`, + rootName: bom.metadata?.component?.name, + components: bom.components?.map((c) => c.name).sort(), + } + : { + format: bom.spdxVersion, + rootName: bom.name, + components: bom.packages?.map((p) => p.name).sort(), + }; + + const wire = { + depGraphCount: payload.depGraphs?.length ?? (payload.depGraph ? 1 : 0), + scanErrorTexts: (payload.scanErrors ?? []).map((e) => ({ + subject: e.subject, + // truncate noisy stack traces so the test log stays readable + text: e.text.length > 200 ? `${e.text.slice(0, 200)}…` : e.text, + })), + }; + + // eslint-disable-next-line no-console + console.log('SBOM received:', JSON.stringify(summary, null, 2)); + // eslint-disable-next-line no-console + console.log('Wire payload:', JSON.stringify(wire, null, 2)); +} + +// ────────────────────────────────────────────────────────────────────────── +// Test suite +// ────────────────────────────────────────────────────────────────────────── + +describe('snyk sbom --allow-incomplete-sbom (acceptance, mocked server)', () => { + let server: ReturnType; + let env: Record; + + beforeAll(async () => { + const ipAddress = getFirstIPv4Address(); + const port = await getAvailableServerPort(process); + const baseApi = '/api/v1'; + env = { + ...process.env, + SNYK_API: `http://${ipAddress}:${port}${baseApi}`, + SNYK_HOST: `http://${ipAddress}:${port}`, + SNYK_TOKEN: '123456789', + SNYK_DISABLE_ANALYTICS: '1', + SNYK_HTTP_PROTOCOL_UPGRADE: '0', + }; + server = fakeServer(baseApi, env.SNYK_TOKEN); + await server.listenPromise(port); + }); + + afterEach(() => { + jest.resetAllMocks(); + server.restore(); + }); + + afterAll((done) => { + server.close(() => done()); + }); + + // ─── Helpers using the closure of the suite ────────────────────────────── + + const getSbomRequests = (): SbomRequest[] => + (server.getRequests() as unknown as SbomRequest[]).filter( + (req) => req.method === 'POST' && req.path.includes('/sbom'), + ); + + const getSbomRequest = (): SbomRequest => { + const requests = getSbomRequests(); + expect(requests).toHaveLength(1); + return requests[0]; + }; + + const runSbom = async ( + cwd: string, + extraArgs: string, + ): Promise<{ code: number; stdout: string; stderr: string }> => { + return runSnykCLI(`sbom --org ${ORG} --debug ${extraArgs}`, { cwd, env }); + }; + + const parseSbom = (stdout: string): AnyBom => { + let bom: AnyBom | undefined; + expect(() => { + bom = JSON.parse(stdout) as AnyBom; + }).not.toThrow(); + if (!bom) { + throw new Error('failed to parse SBOM from stdout'); + } + return bom; + }; + + const componentNames = (bom: AnyBom): string[] => + isCycloneDx(bom) + ? (bom.components ?? []).map((c) => c.name) + : (bom.packages ?? []).map((p) => p.name); + + // ────────────────────────────────────────────────────────────────────── + // 1. NPM – partial failure (transitive deps in SBOM, scanError on broken) + // ────────────────────────────────────────────────────────────────────── + + const expectFormat = (bom: AnyBom, format: string): void => { + if (format === 'cyclonedx1.6+json') { + expect(isCycloneDx(bom)).toBe(true); + expect((bom as CycloneDxBom).specVersion).toBe('1.6'); + } else { + expect(isCycloneDx(bom)).toBe(false); + expect((bom as SpdxBom).spdxVersion).toBe('SPDX-2.3'); + } + }; + + describe.each([ + { format: 'cyclonedx1.6+json' }, + { format: 'spdx2.3+json' }, + ] as const)('NPM workspace, --format $format', ({ format }) => { + test( + 'GIVEN a multi-project NPM workspace with one broken project ' + + 'WHEN running `sbom --all-projects --allow-incomplete-sbom` ' + + 'THEN the CLI exits 0, the SBOM contains the transitive deps ' + + 'of the healthy project, and the wire payload has scanErrors ' + + 'for the broken project', + async () => { + // ── GIVEN ──────────────────────────────────────────────────────── + const project = await createProject( + 'sbom-allow-incomplete/npm-multi-partial-broken', + ); + + // ── WHEN ───────────────────────────────────────────────────────── + const { code, stdout, stderr } = await runSbom( + project.path(), + `--format ${format} --all-projects --allow-incomplete-sbom`, + ); + if (code !== 0) { + // eslint-disable-next-line no-console + console.error('CLI stderr:', stderr); + } + + // ── THEN ───────────────────────────────────────────────────────── + expect(code).toBe(0); + + const bom = parseSbom(stdout); + const { body: payload } = getSbomRequest(); + logSbomSummary(bom, payload); + + expectFormat(bom, format); + + // The SBOM exposes both the direct dep (`debug`) and the transitive + // (`ms`) – this proves the dep-graph was *not* pruned. + const names = componentNames(bom); + expect(names).toEqual(expect.arrayContaining(['debug', 'ms'])); + + // Wire payload: at least one resolved dep-graph and one scan error + expect(payload.depGraphs ?? []).toHaveLength(1); + const [graph] = payload.depGraphs ?? []; + expect(graph.pkgs.map((p) => p.info.name)).toEqual( + expect.arrayContaining(['debug', 'ms']), + ); + + expect(payload.scanErrors ?? []).toHaveLength(1); + expect((payload.scanErrors ?? [])[0].text.length).toBeGreaterThan(0); + }, + ); + + test( + 'GIVEN a workspace where every NPM project is broken ' + + 'WHEN running `sbom --all-projects --allow-incomplete-sbom` ' + + 'THEN the CLI exits 0, the SBOM has zero components, ' + + 'and the wire payload has one scanError per broken project', + async () => { + // ── GIVEN ──────────────────────────────────────────────────────── + const project = await createProject( + 'sbom-allow-incomplete/npm-multi-all-broken', + ); + + // ── WHEN ───────────────────────────────────────────────────────── + const { code, stdout, stderr } = await runSbom( + project.path(), + `--format ${format} --all-projects --allow-incomplete-sbom`, + ); + if (code !== 0) { + // eslint-disable-next-line no-console + console.error('CLI stderr:', stderr); + } + + // ── THEN ───────────────────────────────────────────────────────── + expect(code).toBe(0); + + const bom = parseSbom(stdout); + const { body: payload } = getSbomRequest(); + logSbomSummary(bom, payload); + + expectFormat(bom, format); + expect(componentNames(bom)).toHaveLength(0); + + expect(payload.depGraphs ?? []).toHaveLength(0); + expect(payload.scanErrors ?? []).toHaveLength(2); + for (const err of payload.scanErrors ?? []) { + expect(err.text.length).toBeGreaterThan(0); + } + }, + ); + }); + + // ────────────────────────────────────────────────────────────────────── + // 2. Maven – transitive deps survive in the SBOM (no pruning) + // ────────────────────────────────────────────────────────────────────── + + describe('Maven workspace', () => { + test( + 'GIVEN a multi-module workspace with one valid and one malformed pom.xml ' + + 'WHEN running `sbom --all-projects --allow-incomplete-sbom` ' + + 'THEN the CLI exits 0, the SBOM exposes the full transitive Maven graph, ' + + 'and the wire payload has a scanError for the malformed pom.xml', + async () => { + // ── GIVEN ──────────────────────────────────────────────────────── + const project = await createProject( + 'sbom-allow-incomplete/maven-multi-partial-broken', + ); + + // ── WHEN ───────────────────────────────────────────────────────── + const { code, stdout, stderr } = await runSbom( + project.path(), + '--format cyclonedx1.6+json --all-projects --allow-incomplete-sbom', + ); + if (code !== 0) { + // eslint-disable-next-line no-console + console.error('CLI stderr:', stderr); + } + + // ── THEN ───────────────────────────────────────────────────────── + expect(code).toBe(0); + + const bom = parseSbom(stdout) as CycloneDxBom; + const { body: payload } = getSbomRequest(); + logSbomSummary(bom, payload); + + expect(bom.specVersion).toBe('1.6'); + + // axis:axis:1.4 has well-known transitive deps; assert direct + at + // least one transitive component is present in the SBOM. + const names = componentNames(bom); + expect(names).toEqual( + expect.arrayContaining([ + 'axis:axis', + // commons-logging is a transitive of axis:axis through commons-discovery. + 'commons-logging:commons-logging', + ]), + ); + + // Exactly one resolved Maven graph and one scan error from the + // malformed pom.xml. The error's text must reference Maven so we + // know the right plugin produced it. + expect(payload.depGraphs ?? []).toHaveLength(1); + expect(payload.scanErrors ?? []).toHaveLength(1); + expect((payload.scanErrors ?? [])[0].text.toLowerCase()).toBe( + 'error parsing the xml file', + ); + }, + ); + }); + + // ────────────────────────────────────────────────────────────────────── + // 3. Gradle – transitive deps survive in the SBOM (no pruning) + // ────────────────────────────────────────────────────────────────────── + + describe('Gradle workspace', () => { + testIf(!isWindowsOperatingSystem())( + 'GIVEN a Gradle project with a broken sibling build.gradle ' + + 'WHEN running `sbom --all-projects --allow-incomplete-sbom` ' + + 'THEN the CLI exits 0, the SBOM exposes the full transitive Gradle graph, ' + + 'and the wire payload has a scanError for the broken build script', + async () => { + // ── GIVEN ──────────────────────────────────────────────────────── + const project = await createProject( + 'sbom-allow-incomplete/gradle-multi-partial-broken', + ); + + // ── WHEN ───────────────────────────────────────────────────────── + const { code, stdout, stderr } = await runSbom( + project.path(), + '--format cyclonedx1.6+json --all-projects --allow-incomplete-sbom', + ); + if (code !== 0) { + // eslint-disable-next-line no-console + console.error('CLI stderr:', stderr); + } + + // ── THEN ───────────────────────────────────────────────────────── + expect(code).toBe(0); + + const bom = parseSbom(stdout) as CycloneDxBom; + const { body: payload } = getSbomRequest(); + logSbomSummary(bom, payload); + + // log4j-core has a known transitive dep on log4j-api – both must + // be present, proving the dep-graph was not pruned. + const names = componentNames(bom); + expect(names).toEqual( + expect.arrayContaining([ + 'org.apache.logging.log4j:log4j-core', + 'org.apache.logging.log4j:log4j-api', + ]), + ); + + expect(payload.depGraphs ?? []).toHaveLength(1); + expect(payload.scanErrors ?? []).toHaveLength(1); + expect((payload.scanErrors ?? [])[0].text.toLowerCase()).toMatch( + /gradle error/, + ); + }, + ); + }); + + // ────────────────────────────────────────────────────────────────────── + // 4. Multi-language workspace (NPM + Maven + Gradle + broken NPM) + // ────────────────────────────────────────────────────────────────────── + + describe('multi-language workspace', () => { + testIf(!isWindowsOperatingSystem())( + 'GIVEN a workspace mixing healthy NPM, Maven and Gradle projects with one broken NPM project ' + + 'WHEN running `sbom --all-projects --allow-incomplete-sbom` ' + + 'THEN the CLI exits 0, the SBOM contains the transitive deps from all three ecosystems, ' + + 'and the wire payload contains exactly three depGraphs and one scanError', + async () => { + // ── GIVEN ──────────────────────────────────────────────────────── + const project = await createProject( + 'sbom-allow-incomplete/multi-lang-partial-broken', + ); + + // ── WHEN ───────────────────────────────────────────────────────── + const { code, stdout, stderr } = await runSbom( + project.path(), + '--format cyclonedx1.6+json --all-projects --allow-incomplete-sbom', + ); + if (code !== 0) { + // eslint-disable-next-line no-console + console.error('CLI stderr:', stderr); + } + + // ── THEN ───────────────────────────────────────────────────────── + expect(code).toBe(0); + + const bom = parseSbom(stdout) as CycloneDxBom; + const { body: payload } = getSbomRequest(); + logSbomSummary(bom, payload); + + const names = componentNames(bom); + + // NPM transitive + expect(names).toEqual(expect.arrayContaining(['debug', 'ms'])); + // Maven transitive + expect(names).toEqual( + expect.arrayContaining([ + 'axis:axis', + 'commons-logging:commons-logging', + ]), + ); + // Gradle transitive + expect(names).toEqual( + expect.arrayContaining([ + 'org.apache.logging.log4j:log4j-core', + 'org.apache.logging.log4j:log4j-api', + ]), + ); + + // Three healthy graphs (npm, maven, gradle) and exactly one error + expect(payload.depGraphs ?? []).toHaveLength(3); + expect(payload.scanErrors ?? []).toHaveLength(1); + + // Sanity: every healthy graph identifies its package manager so + // we know each ecosystem actually contributed a graph. + const pkgManagers = (payload.depGraphs ?? []) + .map((g) => g.pkgManager?.name) + .filter((n): n is string => !!n) + .sort(); + expect(pkgManagers).toEqual( + expect.arrayContaining(['gradle', 'maven', 'npm']), + ); + }, + ); + }); + + // ────────────────────────────────────────────────────────────────────── + // 5. Backward compatibility (without --allow-incomplete-sbom) + // ────────────────────────────────────────────────────────────────────── + + describe('backward compatibility (flag absent / off)', () => { + test( + 'GIVEN a multi-project NPM workspace with one broken project ' + + 'WHEN running `sbom --all-projects` WITHOUT --allow-incomplete-sbom ' + + 'THEN the CLI fails with a non-zero exit code (legacy fail-fast behaviour)', + async () => { + // ── GIVEN ──────────────────────────────────────────────────────── + const project = await createProject( + 'sbom-allow-incomplete/npm-multi-partial-broken', + ); + + // ── WHEN ───────────────────────────────────────────────────────── + const { code } = await runSbom( + project.path(), + '--format cyclonedx1.6+json --all-projects', + ); + + // ── THEN ───────────────────────────────────────────────────────── + expect(code).not.toBe(0); + + expect(getSbomRequests()).toHaveLength(0); + }, + ); + + test( + 'GIVEN a healthy single NPM project ' + + 'WHEN running `sbom` WITHOUT --allow-incomplete-sbom ' + + 'THEN the wire payload has neither depGraphs[] nor scanErrors[] ' + + 'and the SBOM is generated as before the feature was introduced', + async () => { + // ── GIVEN ──────────────────────────────────────────────────────── + const project = await createProject( + 'sbom-allow-incomplete/npm-multi-partial-broken', + ); + + // ── WHEN ───────────────────────────────────────────────────────── + const { code, stdout, stderr } = await runSnykCLI( + `sbom --org ${ORG} --format cyclonedx1.6+json --debug --file=valid-project/package.json`, + { cwd: project.path(), env }, + ); + if (code !== 0) { + // eslint-disable-next-line no-console + console.error('CLI stderr:', stderr); + } + + // ── THEN ───────────────────────────────────────────────────────── + expect(code).toBe(0); + + const bom = parseSbom(stdout) as CycloneDxBom; + const { body: payload } = getSbomRequest(); + logSbomSummary(bom, payload); + + expect(bom.specVersion).toBe('1.6'); + expect(componentNames(bom)).toEqual( + expect.arrayContaining(['debug', 'ms']), + ); + + // Single-project payload uses `depGraph` (singular) – the multi + // shape (`depGraphs[]` + `scanErrors[]`) must not appear, which + // would otherwise be a breaking change for the SBOM service. + expect(payload.depGraph).toBeDefined(); + expect(payload.depGraphs).toBeUndefined(); + expect(payload.scanErrors).toBeUndefined(); + }, + ); + }); +}); diff --git a/test/jest/acceptance/snyk-secrets/snyk-secrets-test-user-journey.spec.ts b/test/jest/acceptance/snyk-secrets/snyk-secrets-test-user-journey.spec.ts index 8b42f82843..e1044f3516 100644 --- a/test/jest/acceptance/snyk-secrets/snyk-secrets-test-user-journey.spec.ts +++ b/test/jest/acceptance/snyk-secrets/snyk-secrets-test-user-journey.spec.ts @@ -1,10 +1,19 @@ import { execSync } from 'child_process'; -import { existsSync, unlinkSync } from 'fs'; - +import { + existsSync, + unlinkSync, + mkdirSync, + rmSync, + copyFileSync, + readdirSync, + statSync, +} from 'fs'; import { matchers } from 'jest-json-schema'; import { runSnykCLI } from '../../util/runSnykCLI'; import { EXIT_CODES } from '../../../../src/cli/exit-codes'; -import { resolve } from 'path'; +import { join, resolve } from 'path'; +import { randomUUID } from 'crypto'; +import { makeTmpDirectory } from '../../../utils'; expect.extend(matchers); jest.setTimeout(1000 * 300); @@ -15,44 +24,128 @@ const TEST_REPO_COMMIT = '366ae0080cc67973619584080fc85734ba2658b2'; const TEST_REPO_URL = 'https://github.com/leaktk/fake-leaks'; const TEST_DIR = 'examples'; const TEST_FILE = 'some/long/path/server.key'; -const TEMP_LOCAL_PATH = '/tmp/snyk-secrets-test'; + +// Global variable to store the path of the cloned repo for this run +let TEMP_LOCAL_PATH: string; const env = { ...process.env, INTERNAL_SNYK_FEATURE_FLAG_IS_SECRETS_ENABLED: 'true', }; -beforeAll(() => { - if (!existsSync(TEMP_LOCAL_PATH)) { - try { - // Currently fake-leaks doesn't have any release tags, so we pin it to a commit instead - // and that's why we're cloning the full repo without --depth 1, which may slow down the tests - execSync( - `git clone ${TEST_REPO_URL} ${TEMP_LOCAL_PATH} && cd ${TEMP_LOCAL_PATH} && git checkout ${TEST_REPO_COMMIT}`, - { - stdio: 'pipe', - timeout: 30000, - }, - ); - } catch (error) { - throw new Error( - `Failed to clone test repository: ${error.message}. This test requires network access.`, - ); - } +beforeAll(async () => { + TEMP_LOCAL_PATH = await makeTmpDirectory(); + + try { + // Currently fake-leaks doesn't have any release tags, so we pin it to a commit instead + // and that's why we're cloning the full repo without --depth 1, which may slow down the tests + execSync( + `git clone ${TEST_REPO_URL} ${TEMP_LOCAL_PATH} && cd ${TEMP_LOCAL_PATH} && git checkout ${TEST_REPO_COMMIT}`, + { + stdio: 'pipe', + timeout: 30000, + }, + ); + } catch (error: any) { + throw new Error( + `Failed to clone test repository: ${error.message}. This test requires network access.`, + ); } }); afterAll(() => { if (existsSync(TEMP_LOCAL_PATH)) { try { - execSync(`rm -rf ${TEMP_LOCAL_PATH}`, { stdio: 'pipe' }); - } catch (err) { + rmSync(TEMP_LOCAL_PATH, { recursive: true, force: true }); + } catch (err: any) { console.warn('Failed to cleanup test repository:', err.message); } } }); -describe.skip('snyk secrets test', () => { +const copyFolderSync = (from: string, to: string) => { + mkdirSync(to, { recursive: true }); + readdirSync(from).forEach((element) => { + const fromPath = join(from, element); + const toPath = join(to, element); + if (statSync(fromPath).isFile()) copyFileSync(fromPath, toPath); + else copyFolderSync(fromPath, toPath); + }); +}; + +/** + * Sets up an isolated environment for testing the 'ignore' functionality. + * * Why this is necessary: + * - Generates unique secret identities within a dedicated temporary folder. + * - Isolates local state mutations (like .snyk file creation). + * - Prevents race conditions during concurrent test execution, guaranteeing + * zero side-effects on other acceptance tests. + */ +const setupIsolatedIgnoreEnv = async (basePath: string) => { + const uuid = randomUUID(); + const testDir = `${basePath}/ignores_test_${uuid}`; + + // Calculate an expiry date 15 minutes from now in YYYY-MM-DDThh:mm:ss.fffZ format + const expiryDate = new Date(Date.now() + 15 * 60000).toISOString(); + + const cleanup = () => { + if (existsSync(testDir)) { + try { + rmSync(testDir, { recursive: true, force: true }); + } catch (err: any) { + console.warn( + `Failed to cleanup isolated ignore directory:`, + err.message, + ); + } + } + }; + + try { + mkdirSync(testDir, { recursive: true }); + + // Copy the same file twice to trigger the same rule ID for multiple locations in SARIF validation + const sourceFile = join( + basePath, + 'semgrep-rules-examples', + 'detected-sendgrid-api-key.txt', + ); + + copyFileSync(sourceFile, join(testDir, `sendgrid-keys_1_${uuid}.txt`)); + copyFileSync(sourceFile, join(testDir, `sendgrid-keys_2_${uuid}.txt`)); + + // Run a base JSON scan to extract the exact finding IDs for these files + const { stdout: jsonStdout } = await runSnykCLI( + `secrets test ${testDir} --json`, + { env }, + ); + const jsonOutput = JSON.parse(jsonStdout); + const results = jsonOutput.runs[0].results || []; + + const findingIds = [ + ...new Set( + results.map((r: any) => r.fingerprints?.identity).filter(Boolean), + ), + ]; + const issuesToIgnore = findingIds.slice(0, 2); + + // Ignore the target issues + for (const [index, issueId] of issuesToIgnore.entries()) { + const reason = `Test ignore reason metadata ${index}`; + await runSnykCLI( + `ignore --id=${issueId} --expiry=${expiryDate} --reason=${reason}`, + { env, cwd: testDir }, + ); + } + + return { testDir, cleanup }; + } catch (error) { + cleanup(); + throw error; + } +}; + +describe('snyk secrets test', () => { describe('output formats', () => { it('should display human-readable output by default', async () => { const { code, stderr } = await runSnykCLI( @@ -64,7 +157,7 @@ describe.skip('snyk secrets test', () => { expect(code).toBe(EXIT_CODES.VULNS_FOUND); }); - it('should display sarif output with --sarif', async () => { + it.skip('should display sarif output with --sarif', async () => { const { code, stderr } = await runSnykCLI( `secrets test ${TEMP_LOCAL_PATH}/${TEST_DIR} --sarif`, { env }, @@ -74,7 +167,7 @@ describe.skip('snyk secrets test', () => { expect(code).toBe(EXIT_CODES.VULNS_FOUND); }); - it('should write sarif to output file with --sarif-file-output', async () => { + it.skip('should write sarif to output file with --sarif-file-output', async () => { const outputFile = 'test-sarif.json'; const outputFilePath = `${projectRoot}/${outputFile}`; @@ -90,7 +183,8 @@ describe.skip('snyk secrets test', () => { }); }); - it('filters out secret findings when using --severity-threshold', async () => { + // TODO: Re-enable once SARIF and JSON WIP outputs are finalized [PS-533] + it.skip('filters out secret findings when using --severity-threshold', async () => { const { code, stdout } = await runSnykCLI( `secrets test --severity-threshold=critical --sarif ${TEMP_LOCAL_PATH}/${TEST_DIR}`, { env }, @@ -168,14 +262,251 @@ describe.skip('snyk secrets test', () => { }); }); + describe('Human-readable output validation', () => { + it('should generate properly formatted human-readable output and map Finding IDs correctly', async () => { + const { code, stdout, stderr } = await runSnykCLI( + `secrets test ${TEMP_LOCAL_PATH}/${TEST_DIR}`, + { env }, + ); + + expect(stderr).toBe(''); + expect(code).toBe(EXIT_CODES.VULNS_FOUND); + + expect(stdout).toContain('Open Secrets issues:'); + expect(stdout).toContain('Test Summary'); + expect(stdout).toContain('Total secrets issues:'); + + // Validates: Finding ID is mapped correctly to a UUID string + const uuidRegex = + /Finding ID: [0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}/i; + expect(stdout).toMatch(uuidRegex); + }); + + it('should omit Finding IDs that start with UNDEFINED', async () => { + // Create a path outside of the repo to remove source used to generate secret identities + const NO_GIT_DIR = await makeTmpDirectory(); + + try { + // Copy just the test files + copyFolderSync(join(TEMP_LOCAL_PATH, TEST_DIR), NO_GIT_DIR); + + const { code, stdout, stderr } = await runSnykCLI( + `secrets test ${NO_GIT_DIR}`, + { env }, + ); + + expect(stderr).toBe(''); + expect(code).toBe(EXIT_CODES.VULNS_FOUND); + + // Finding ID should not be included when it starts with UNDEFINED + expect(stdout).not.toContain('Finding ID'); + } finally { + try { + rmSync(NO_GIT_DIR, { recursive: true, force: true }); + } catch (err: any) { + console.warn( + `Failed to cleanup non-git test directory:`, + err.message, + ); + } + } + }); + // TODO: Re-enable once SARIF and JSON WIP outputs are finalized [PS-533] + it.skip('should correctly render multiple ignores and their metadata in the output', async () => { + const { testDir, cleanup } = + await setupIsolatedIgnoreEnv(TEMP_LOCAL_PATH); + + try { + // Get human-readable with the ignores included + const { stdout, stderr, code } = await runSnykCLI( + `secrets test ${testDir} --include-ignores`, + { env, cwd: testDir }, + ); + + expect(stderr).toBe(''); + expect(code).toBe(EXIT_CODES.VULNS_FOUND); + + // Multiple ignores are rendered properly + expect(stdout).toMatch(/Ignored:\s*[2-9]/); + expect(stdout).toContain('! [IGNORED]'); + + // Validate ignores metadata is mapped and rendered correctly + // Validates Expiration format + expect(stdout).toMatch(/Expiration:\s+[A-Z][a-z]+\s+\d{2},\s+\d{4}/); + + // Validates the Reason field and spacing + expect(stdout).toMatch(/Reason:\s+Test ignore reason metadata 0/); + expect(stdout).toMatch(/Reason:\s+Test ignore reason metadata 1/); + expect(stdout).toMatch(/Ignored on:\s+[A-Z][a-z]+\s+\d{2},\s+\d{4}/); + } finally { + cleanup(); + } + }); + }); + + // TODO: Re-enable once SARIF and JSON WIP outputs are finalized [PS-533] + describe.skip('JSON output payload validation', () => { + it('should return a valid SARIF when json flag is used', async () => { + const { code, stdout, stderr } = await runSnykCLI( + `secrets test ${TEMP_LOCAL_PATH}/${TEST_DIR} --json`, + { env }, + ); + + expect(stderr).toBe(''); + expect(code).toBe(EXIT_CODES.VULNS_FOUND); + + const output = JSON.parse(stdout); + + // Basic SARIF schema requirements + expect(output).toHaveProperty('version'); + expect(typeof output.version).toBe('string'); + expect(Array.isArray(output.runs)).toBe(true); + expect(output.runs.length).toBeGreaterThan(0); + + const run = output.runs[0]; + + expect(run.tool.driver.name).toBe('Snyk Secrets'); + + // Results array exists and is populated + expect(Array.isArray(run.results)).toBe(true); + expect(run.results.length).toBeGreaterThan(0); + + // Ensure the first result has the expected ruleId mapping + expect(run.results[0]).toHaveProperty('ruleId'); + }); + }); + // TODO: Re-enable once SARIF and JSON WIP outputs are finalized [PS-533] + describe.skip('SARIF output payload validation', () => { + it('should generate an enriched SARIF payload with ignores', async () => { + const { testDir, cleanup } = + await setupIsolatedIgnoreEnv(TEMP_LOCAL_PATH); + + try { + const { code, stdout, stderr } = await runSnykCLI( + `secrets test ${testDir} --include-ignores --sarif`, + { env, cwd: testDir }, + ); + + expect(stderr).toBe(''); + expect(code).toBe(EXIT_CODES.VULNS_FOUND); + + const sarifOutput = JSON.parse(stdout); + const fingerprintRegex = /^[a-f0-9]{64}$/i; + const slugRegex = /^[a-z0-9-]+$/; + + // Only one run is performed + const run = sarifOutput.runs[0]; + + expect(run.tool.driver.name).toBe('Snyk Secrets'); + + const rules = run.tool.driver.rules || []; + const ruleIds = rules.map((rule: any) => rule.id); + const uniqueRuleIds = new Set(ruleIds); + + // Rules should only be included once in the SARIF, and not multiple times + expect(ruleIds.length).toBe(uniqueRuleIds.size); + + rules.forEach((rule: any) => { + expect(rule.id).toMatch(slugRegex); + + // Rules should have name + expect(rule).toHaveProperty('name'); + + // Validates: the properties from the rules include the severity + expect(rule.properties).toBeDefined(); + expect(rule.properties).toHaveProperty('severity'); + + // General structural checks + expect(rule).toHaveProperty('shortDescription.text'); + }); + + let foundMultipleLocations = false; + const results = run.results || []; + + results.forEach((result: any) => { + expect(result.ruleId).toMatch(slugRegex); + + // Validates: fingerprint is included in the result + expect(result).toHaveProperty('fingerprints'); + expect(result.fingerprints).toHaveProperty('fingerprint'); + expect(result.fingerprints.fingerprint).toMatch(fingerprintRegex); + + expect(Array.isArray(result.locations)).toBe(true); + expect(result.locations.length).toBeGreaterThan(0); + + // Tracks if we successfully grouped multiple locations into a single result + if (result.locations.length > 1) { + foundMultipleLocations = true; + } + + // Validate ignores metadata includes only these fields: status, justification, kind + if (result.suppressions && result.suppressions.length > 0) { + result.suppressions.forEach((suppression: any) => { + const suppressionKeys = Object.keys(suppression).sort(); + const expectedKeys = ['justification', 'kind', 'status'].sort(); + expect(suppressionKeys).toEqual(expectedKeys); + }); + } + }); + + expect(foundMultipleLocations).toBe(true); + } finally { + cleanup(); + } + }); + it('should ensure consistent secret identities regardless of the working directory', async () => { + // Use existing directories from the repo tree to test different path depths + // DIR_A is 1 level deep, DIR_C is 2 levels deep + const DIR_A = `${TEMP_LOCAL_PATH}/auth0`; + const DIR_C = `${TEMP_LOCAL_PATH}/aws/valid`; + + const targetDir = 'semgrep-rules-examples'; + + // Run scan from DIR_A + const { stdout: stdoutA, stderr: stderrA } = await runSnykCLI( + `secrets test ../${targetDir} --sarif`, + { env, cwd: DIR_A }, + ); + expect(stderrA).toBe(''); + + // Run scan from DIR_C + const { stdout: stdoutC, stderr: stderrC } = await runSnykCLI( + `secrets test ../../${targetDir} --sarif`, + { env, cwd: DIR_C }, + ); + expect(stderrC).toBe(''); + + const sarifA = JSON.parse(stdoutA); + const sarifC = JSON.parse(stdoutC); + + const resultsA = sarifA.runs[0].results || []; + const resultsC = sarifC.runs[0].results || []; + + // Ensure we actually scanned and found the secrets + expect(resultsA.length).toBeGreaterThan(0); + expect(resultsA.length).toBe(resultsC.length); + + // Helper to extract and sort fingerprints so order doesn't cause false failures + const getFingerprints = (results: any[]) => + results.map((r: any) => r.fingerprints?.fingerprint).sort(); + + const fingerprintsA = getFingerprints(resultsA); + const fingerprintsC = getFingerprints(resultsC); + + // Identities must be exactly the same, as they are computed relative to the git root + expect(fingerprintsA).toEqual(fingerprintsC); + }); + }); + describe('validation', () => { - it('should return an error for --report', async () => { + // Skipped because --report functionality is not yet fully functional [PS-533] + it.skip('should return an error for --report', async () => { const { code, stdout } = await runSnykCLI( `secrets test ${TEMP_LOCAL_PATH}/${TEST_DIR} --report`, { env }, ); - expect(stdout).toContain('Feature under development'); + expect(stdout).toContain('Feature not enabled'); expect(code).toBe(EXIT_CODES.ERROR); }); diff --git a/test/jest/acceptance/snyk-test/all-projects.spec.ts b/test/jest/acceptance/snyk-test/all-projects.spec.ts index 1bbbf8266d..f12e4f2831 100644 --- a/test/jest/acceptance/snyk-test/all-projects.spec.ts +++ b/test/jest/acceptance/snyk-test/all-projects.spec.ts @@ -413,6 +413,107 @@ describe('snyk test --all-projects (mocked server only)', () => { expect(code).toEqual(0); }); + test('`test pnpm-workspace --all-projects --exclude-paths=shared/package.json` excludes only the specified file', async () => { + server.setFeatureFlag('enablePnpmCli', true); + + const project = await createProjectFromFixture( + 'pnpm-workspace-with-exclude-issue/workspace', + ); + + const { code, stdout } = await runSnykCLI( + 'test --all-projects --exclude-paths=shared/package.json', + { + cwd: project.path(), + env, + }, + ); + + const backendRequests = server.getRequests().filter((req: any) => { + return req.url.includes('/api/v1/test'); + }); + + expect(backendRequests.length).toBe(3); + expect(stdout).not.toMatch(join('shared', 'package.json')); + expect(stdout).toMatch(join('app1', 'package.json')); + expect(stdout).toMatch(join('app2', 'package.json')); + expect(code).toEqual(0); + }); + + test('`test pnpm-workspace --all-projects --exclude-paths` with multiple paths excludes all specified files', async () => { + server.setFeatureFlag('enablePnpmCli', true); + + const project = await createProjectFromFixture( + 'pnpm-workspace-with-exclude-issue/workspace', + ); + + const { code, stdout } = await runSnykCLI( + 'test --all-projects --exclude-paths=shared/package.json,app2/package.json', + { + cwd: project.path(), + env, + }, + ); + + const backendRequests = server.getRequests().filter((req: any) => { + return req.url.includes('/api/v1/test'); + }); + + expect(backendRequests.length).toBe(2); + expect(stdout).not.toMatch(join('shared', 'package.json')); + expect(stdout).not.toMatch(join('app2', 'package.json')); + expect(stdout).toMatch(join('app1', 'package.json')); + expect(code).toEqual(0); + }); + + test('`test pnpm-workspace --all-projects --exclude-paths` accepts absolute paths', async () => { + server.setFeatureFlag('enablePnpmCli', true); + + const project = await createProjectFromFixture( + 'pnpm-workspace-with-exclude-issue/workspace', + ); + + const absolutePath = join(project.path(), 'shared', 'package.json'); + + const { code, stdout } = await runSnykCLI( + `test --all-projects --exclude-paths=${absolutePath}`, + { + cwd: project.path(), + env, + }, + ); + + const backendRequests = server.getRequests().filter((req: any) => { + return req.url.includes('/api/v1/test'); + }); + + expect(backendRequests.length).toBe(3); + expect(stdout).not.toMatch(join('shared', 'package.json')); + expect(stdout).toMatch(join('app1', 'package.json')); + expect(stdout).toMatch(join('app2', 'package.json')); + expect(code).toEqual(0); + }); + + test('`test pnpm-workspace --all-projects --exclude-paths=shared/package.json` does not affect other package.json files', async () => { + server.setFeatureFlag('enablePnpmCli', true); + + const project = await createProjectFromFixture( + 'pnpm-workspace-with-exclude-issue/workspace', + ); + + const { code, stdout } = await runSnykCLI( + 'test --all-projects --exclude-paths=shared/package.json', + { + cwd: project.path(), + env, + }, + ); + + expect(stdout).toMatch('package.json'); + expect(stdout).toMatch(join('app1', 'package.json')); + expect(stdout).toMatch(join('app2', 'package.json')); + expect(code).toEqual(0); + }); + test('`test pnpm-workspace --all-projects --exclude=shared --detection-depth=2` excludes specified directory', async () => { server.setFeatureFlag('enablePnpmCli', true); diff --git a/test/jest/acceptance/snyk-test/basic-test-all-languages.spec.ts b/test/jest/acceptance/snyk-test/basic-test-all-languages.spec.ts index 4f992b7928..ac9ef427cb 100644 --- a/test/jest/acceptance/snyk-test/basic-test-all-languages.spec.ts +++ b/test/jest/acceptance/snyk-test/basic-test-all-languages.spec.ts @@ -836,6 +836,12 @@ describe.each(userJourneyWorkflows)( expect(code).toEqual(2); }); + + test('run `snyk test xlsx` on npm package with redirect', async () => { + const { code } = await runSnykCLI('test xlsx'); + + expect([0, 1]).toContain(code); + }); }); }); }, diff --git a/test/jest/acceptance/snyk-test/npm-alias.spec.ts b/test/jest/acceptance/snyk-test/npm-alias.spec.ts index b99178c7ce..c70c02ff7b 100644 --- a/test/jest/acceptance/snyk-test/npm-alias.spec.ts +++ b/test/jest/acceptance/snyk-test/npm-alias.spec.ts @@ -76,7 +76,7 @@ describe('npm alias support', () => { expect(code).toEqual(0); expect(stdout).toContain('"pkgId": "@yao-pkg/pkg@6.5.0",'); - expect(stdout).toContain('"nodeId": "pkg@6.5.0",'); + expect(stdout).toContain('"nodeId": "@yao-pkg/pkg@6.5.0",'); expect(stdout).toContain('"alias": "pkg=>@yao-pkg/pkg@6.5.0"'); }); @@ -91,7 +91,7 @@ describe('npm alias support', () => { expect(code).toEqual(0); expect(stdout).toContain('"pkgId": "@yao-pkg/pkg@6.5.0",'); - expect(stdout).toContain('"nodeId": "pkg@6.5.0",'); + expect(stdout).toContain('"nodeId": "@yao-pkg/pkg@6.5.0",'); expect(stdout).toContain('"alias": "pkg=>@yao-pkg/pkg@6.5.0"'); }); @@ -129,7 +129,7 @@ describe('npm alias support', () => { expect(stdout).toContain('"pkgId": "hello-world-npm@1.1.0",'); expect(stdout).toContain('"nodeId": "hello-world-npm@1.1.0",'); - expect(stdout).toContain('"nodeId": "hello-world-npm-v1_1_1@1.1.1",'); + expect(stdout).toContain('"nodeId": "hello-world-npm@1.1.1",'); expect(stdout).toContain('"pkgId": "hello-world-npm@1.1.1",'); expect(stdout).toContain( @@ -153,7 +153,7 @@ describe('npm alias support', () => { expect(stdout).toContain('"pkgId": "hello-world-npm@1.1.0",'); expect(stdout).toContain('"nodeId": "hello-world-npm@1.1.0",'); - expect(stdout).toContain('"nodeId": "hello-world-npm-v1_1_1@1.1.1",'); + expect(stdout).toContain('"nodeId": "hello-world-npm@1.1.1",'); expect(stdout).toContain('"pkgId": "hello-world-npm@1.1.1",'); expect(stdout).toContain( diff --git a/test/jest/acceptance/snyk-test/test-python-whl-all-projects.spec.ts b/test/jest/acceptance/snyk-test/test-python-whl-all-projects.spec.ts new file mode 100644 index 0000000000..936d832c4e --- /dev/null +++ b/test/jest/acceptance/snyk-test/test-python-whl-all-projects.spec.ts @@ -0,0 +1,181 @@ +import { createProjectFromWorkspace } from '../../util/createProject'; +import { runSnykCLI } from '../../util/runSnykCLI'; +import { + fakeServer, + getFirstIPv4Address, +} from '../../../acceptance/fake-server'; +import { runCommand } from '../../util/runCommand'; +import { getServerPort } from '../../util/getServerPort'; +import * as path from 'path'; + +jest.setTimeout(1000 * 60 * 5); + +describe('`snyk test --all-projects` with .whl file references in requirements.txt', () => { + let server; + let env: Record; + + beforeAll((done) => { + const port = getServerPort(process); + const baseApi = '/api/v1'; + const ipAddress = getFirstIPv4Address(); + env = { + ...process.env, + SNYK_API: `http://${ipAddress}:${port}${baseApi}`, + SNYK_HOST: `http://${ipAddress}:${port}`, + SNYK_TOKEN: '123456789', + SNYK_DISABLE_ANALYTICS: '1', + SNYK_HTTP_PROTOCOL_UPGRADE: '0', + }; + server = fakeServer(baseApi, env.SNYK_TOKEN); + server.listen(port, () => { + done(); + }); + }); + + afterAll((done) => { + server.close(() => { + done(); + }); + }); + + it('should successfully scan projects with .whl files when using --all-projects', async () => { + const project = await createProjectFromWorkspace( + 'pip-app-whl-all-projects', + ); + + // Install packages in each project subdirectory + const projects = ['project-a', 'project-b']; + + let pythonCommand = 'python'; + await runCommand(pythonCommand, ['--version']).catch(() => { + pythonCommand = 'python3'; + }); + + const pythonPaths: string[] = []; + + for (const projectDir of projects) { + const projectPath = project.path(projectDir); + const sitePackagesPath = path.join(projectPath, 'site-packages'); + + await runCommand( + pythonCommand, + [ + '-m', + 'pip', + 'install', + '-r', + 'requirements.txt', + '--target', + 'site-packages', + ], + { + cwd: projectPath, + env, + }, + ).catch(() => { + // Ignore installation errors - test focuses on parsing behavior + }); + + pythonPaths.push(sitePackagesPath); + } + + // Set PYTHONPATH to include all site-packages directories (Windows only) + const testEnv = { + ...env, + }; + + if (process.platform === 'win32') { + testEnv.PYTHONPATH = pythonPaths.join(';'); + } + + // Run snyk test with --all-projects using JSON output + const { stdout, stderr } = await runSnykCLI( + `test --all-projects --json --command=${pythonCommand}`, + { + cwd: project.path(), + env: testEnv, + }, + ); + + // With the fix, .whl files should be parsed correctly + // No "Unparsable requirement line" errors (this was the bug) + expect(stderr).not.toContain('Unparsable requirement line'); + expect(stderr).not.toContain( + 'Expected package name at the start of dependency specifier', + ); + + // Parse JSON output + const parsed = JSON.parse(stdout); + const results = Array.isArray(parsed) ? parsed : [parsed]; + + // Extract project identifiers from displayTargetFile (e.g., "project-a/requirements.txt") + const projectIdentifiers = results + .map((r) => { + const targetFile = r.displayTargetFile || r.targetFile || ''; + // Extract "project-a" or "project-b" from paths like "project-a/requirements.txt" + const match = targetFile.match(/(project-[ab])/); + return match ? match[1] : null; + }) + .filter(Boolean); + + // Should successfully test both projects + expect(projectIdentifiers).toContain('project-a'); + expect(projectIdentifiers).toContain('project-b'); + }); + + it('should succeed when scanning individual projects (not using --all-projects)', async () => { + const project = await createProjectFromWorkspace( + 'pip-app-whl-all-projects', + ); + + let pythonCommand = 'python'; + await runCommand(pythonCommand, ['--version']).catch(() => { + pythonCommand = 'python3'; + }); + + const projectPath = project.path('project-a'); + const sitePackagesPath = path.join(projectPath, 'site-packages'); + + // Install packages into project's local directory using --target + await runCommand( + pythonCommand, + [ + '-m', + 'pip', + 'install', + '-r', + 'requirements.txt', + '--target', + 'site-packages', + ], + { + cwd: projectPath, + env, + }, + ).catch(() => { + // Ignore installation errors - test focuses on parsing behavior + }); + + // Set PYTHONPATH to include site-packages directory (Windows only) + const testEnv = { + ...env, + }; + + if (process.platform === 'win32') { + testEnv.PYTHONPATH = sitePackagesPath; + } + + // Run snyk test on individual project using JSON output + const { stderr } = await runSnykCLI( + `test --json --command=${pythonCommand}`, + { + cwd: projectPath, + env: testEnv, + }, + ); + + // Individual scanning should work correctly with .whl files + // No parse errors should occur + expect(stderr).not.toContain('Unparsable requirement line'); + }); +}); diff --git a/test/jest/acceptance/timeout.spec.ts b/test/jest/acceptance/timeout.spec.ts new file mode 100644 index 0000000000..49c2271cdc --- /dev/null +++ b/test/jest/acceptance/timeout.spec.ts @@ -0,0 +1,103 @@ +import { fakeServer, getFirstIPv4Address } from '../../acceptance/fake-server'; +import { runSnykCLI } from '../util/runSnykCLI'; +import { getAvailableServerPort } from '../util/getServerPort'; +import { EXIT_CODES } from '../../../src/cli/exit-codes'; +import { getCliConfig, restoreCliConfig } from '../../acceptance/config-helper'; + +jest.setTimeout(1000 * 60); // 60 seconds - tests involve timeouts + +// SNYK_TIMEOUT_SECS=5, server delay=10s, grace period=3s +// Expected: CLI should timeout around 8s (5+3), definitely before server responds at 10s +const TIMEOUT_SECS = 5; +const GRACE_PERIOD_SECS = 5; +const SERVER_DELAY_MS = 10000; +const EXPECTED_MIN_MS = TIMEOUT_SECS * 1000; // At least the timeout duration +const EXPECTED_MAX_MS = (TIMEOUT_SECS + GRACE_PERIOD_SECS) * 1000; // Timeout + grace +const orgId = '11111111-2222-3333-4444-555555555555'; + +describe('timeout behavior [exit code 69]', () => { + let server: ReturnType; + let env: Record; + let initialConfig: Record = {}; + + beforeAll(async () => { + const ipAddr = getFirstIPv4Address(); + const port = await getAvailableServerPort(process); + const baseApi = '/api/v1'; + + env = { + ...process.env, + SNYK_API: 'http://' + ipAddr + ':' + port + baseApi, + SNYK_TOKEN: '123456789', + SNYK_HTTP_PROTOCOL_UPGRADE: '0', + SNYK_CFG_ORG: orgId, + // Disable retries to speed up tests + INTERNAL_NETWORK_REQUEST_MAX_ATTEMPTS: '1', + // Set a short timeout for testing (5 seconds) + SNYK_TIMEOUT_SECS: String(TIMEOUT_SECS), + }; + + server = fakeServer(baseApi, env.SNYK_TOKEN); + await server.listenPromise(port); + }); + + beforeEach(async () => { + initialConfig = await getCliConfig(); + // Set server to delay responses longer than the timeout (10s > 5s timeout) + server.setResponseDelay(SERVER_DELAY_MS); + }); + + afterEach(async () => { + server.restore(); + await restoreCliConfig(initialConfig); + }); + + afterAll(async () => { + await server.closePromise(); + }); + + it.each([ + ['code test'], + ['test'], + ['container test scratch'], + ['container monitor scratch'], + ['iac test'], + ['monitor'], + ['whoami'], + ['auth 11111111-2222-3333-4444-555555555555'], + ['sbom --format=cyclonedx1.4+json -d'], + ])( + 'returns exit code 69 (EX_UNAVAILABLE) on timeout for "%s"', + async (args) => { + const startTime = Date.now(); + const { code, stdout } = await runSnykCLI(args, { + env, + }); + const duration = Date.now() - startTime; + + console.log(stdout); + + // print duration and min and max in seconds + console.log( + `Duration: ${duration / 1000} seconds, Min: ${EXPECTED_MIN_MS / 1000} seconds, Max: ${EXPECTED_MAX_MS / 1000} seconds`, + ); + + // Should return exit code 69 for timeout + expect(code).toEqual(EXIT_CODES.EX_UNAVAILABLE); + + // Should contain timeout-related message + expect(stdout).toContain('SNYK-CLI-0026'); + + // Should timeout within expected bounds (not wait for full server delay) + expect(duration).toBeGreaterThanOrEqual(EXPECTED_MIN_MS); + expect(duration).toBeLessThan(EXPECTED_MAX_MS); + + // Should send instrumentation data even on timeout + const requests = server.getRequests(); + const instrumentationRequest = requests.find((r) => + r.url?.includes(`/api/hidden/orgs/${orgId}/analytics`), + ); + expect(instrumentationRequest).toBeDefined(); + }, + ); +}); diff --git a/test/jest/unit/allow-incomplete-sbom.spec.ts b/test/jest/unit/allow-incomplete-sbom.spec.ts new file mode 100644 index 0000000000..9d82076242 --- /dev/null +++ b/test/jest/unit/allow-incomplete-sbom.spec.ts @@ -0,0 +1,260 @@ +import * as path from 'path'; +import { getDepsFromPlugin } from '../../../src/lib/plugins/get-deps-from-plugin'; +import { + getMultiPluginResult, + MultiProjectResultCustom, +} from '../../../src/lib/plugins/get-multi-plugin-result'; +import { Options, TestOptions } from '../../../src/lib/types'; +import { + createProject, + createProjectFromWorkspace, +} from '../util/createProject'; + +jest.setTimeout(1000 * 60 * 5); + +describe('allow-incomplete-sbom: error handling in plugin layer', () => { + const baseOptions: Options & TestOptions = { + path: '', + showVulnPaths: 'some', + }; + + describe('getDepsFromPlugin — single project', () => { + it('returns failedResults when plugin throws and flag is set', async () => { + const project = await createProject( + 'sbom-allow-incomplete/npm-missing-lockfile', + ); + + const options: Options & TestOptions = { + ...baseOptions, + path: project.path(), + 'print-output-jsonl-with-errors': true, + }; + + const result = (await getDepsFromPlugin( + project.path(), + options, + )) as MultiProjectResultCustom; + + expect(result.scannedProjects).toHaveLength(0); + expect(result.failedResults).toBeDefined(); + expect(result.failedResults).toHaveLength(1); + expect(result.failedResults![0].targetFile).toBe('package.json'); + expect(result.failedResults![0].errMessage).toBeDefined(); + expect(result.failedResults![0].errMessage.length).toBeGreaterThan(0); + }); + + it('throws when plugin fails and flag is NOT set', async () => { + const project = await createProject( + 'sbom-allow-incomplete/npm-missing-lockfile', + ); + + const options: Options & TestOptions = { + ...baseOptions, + path: project.path(), + }; + + await expect( + getDepsFromPlugin(project.path(), options), + ).rejects.toThrow(); + }); + + it('succeeds for a valid project with the flag set', async () => { + const project = await createProjectFromWorkspace('npm-package'); + + const options: Options & TestOptions = { + ...baseOptions, + path: project.path(), + packageManager: 'npm', + 'print-output-jsonl-with-errors': true, + }; + + const result = (await getDepsFromPlugin( + project.path(), + options, + )) as MultiProjectResultCustom; + + expect(result.scannedProjects.length).toBeGreaterThanOrEqual(1); + expect(result.failedResults ?? []).toHaveLength(0); + }); + }); + + describe('getMultiPluginResult — multi project', () => { + it('collects failedResults for broken projects alongside successful ones', async () => { + const project = await createProject( + 'sbom-allow-incomplete/npm-multi-partial-broken', + ); + + const targetFiles = [ + path.join(project.path(), 'valid-project', 'package.json'), + path.join(project.path(), 'broken-project', 'package.json'), + ]; + + const options: Options & TestOptions = { + ...baseOptions, + path: project.path(), + allProjects: true, + 'print-output-jsonl-with-errors': true, + }; + + const result = await getMultiPluginResult( + project.path(), + options, + targetFiles, + ); + + expect(result.scannedProjects.length).toBeGreaterThanOrEqual(1); + expect(result.failedResults).toBeDefined(); + expect(result.failedResults!.length).toBeGreaterThanOrEqual(1); + + const failed = result.failedResults![0]; + expect(failed.targetFile).toBeDefined(); + expect(failed.errMessage).toBeDefined(); + expect(failed.errMessage.length).toBeGreaterThan(0); + }); + + it('returns empty scannedProjects with failedResults when all projects fail and flag is set', async () => { + const project = await createProject( + 'sbom-allow-incomplete/npm-multi-all-broken', + ); + + const targetFiles = [ + path.join(project.path(), 'broken-project-a', 'package.json'), + path.join(project.path(), 'broken-project-b', 'package.json'), + ]; + + const options: Options & TestOptions = { + ...baseOptions, + path: project.path(), + allProjects: true, + 'print-output-jsonl-with-errors': true, + }; + + const result = await getMultiPluginResult( + project.path(), + options, + targetFiles, + ); + + expect(result.scannedProjects).toHaveLength(0); + expect(result.failedResults).toBeDefined(); + expect(result.failedResults).toHaveLength(2); + expect(result.failedResults![0].errMessage).toBeDefined(); + expect(result.failedResults![1].errMessage).toBeDefined(); + }); + + it('throws when all projects fail and flag is NOT set', async () => { + const project = await createProject( + 'sbom-allow-incomplete/npm-multi-all-broken', + ); + + const targetFiles = [ + path.join(project.path(), 'broken-project-a', 'package.json'), + path.join(project.path(), 'broken-project-b', 'package.json'), + ]; + + const options: Options & TestOptions = { + ...baseOptions, + path: project.path(), + allProjects: true, + }; + + await expect( + getMultiPluginResult(project.path(), options, targetFiles), + ).rejects.toThrow(/Failed to get dependencies/); + }); + + it('still returns failedResults for partial failure even without the flag', async () => { + const project = await createProject( + 'sbom-allow-incomplete/npm-multi-partial-broken', + ); + + const targetFiles = [ + path.join(project.path(), 'valid-project', 'package.json'), + path.join(project.path(), 'broken-project', 'package.json'), + ]; + + const options: Options & TestOptions = { + ...baseOptions, + path: project.path(), + allProjects: true, + }; + + // With partial failure + some successes, getMultiPluginResult + // always returns (doesn't throw). The flag only matters when ALL fail. + const result = await getMultiPluginResult( + project.path(), + options, + targetFiles, + ); + + expect(result.scannedProjects.length).toBeGreaterThanOrEqual(1); + expect(result.failedResults).toBeDefined(); + expect(result.failedResults!.length).toBeGreaterThanOrEqual(1); + }); + }); + + describe('getDepsFromPlugin — allProjects integration', () => { + it('collects both successes and failures with flag via allProjects', async () => { + const project = await createProject( + 'sbom-allow-incomplete/npm-multi-partial-broken', + ); + + const options: Options & TestOptions = { + ...baseOptions, + path: project.path(), + allProjects: true, + 'print-output-jsonl-with-errors': true, + }; + + const result = (await getDepsFromPlugin( + project.path(), + options, + )) as MultiProjectResultCustom; + + expect(result.scannedProjects.length).toBeGreaterThanOrEqual(1); + expect(result.failedResults).toBeDefined(); + expect(result.failedResults!.length).toBeGreaterThanOrEqual(1); + }); + + it('returns empty results with errors when all projects fail with flag via allProjects', async () => { + const project = await createProject( + 'sbom-allow-incomplete/npm-multi-all-broken', + ); + + const options: Options & TestOptions = { + ...baseOptions, + path: project.path(), + allProjects: true, + 'print-output-jsonl-with-errors': true, + }; + + const result = (await getDepsFromPlugin( + project.path(), + options, + )) as MultiProjectResultCustom; + + expect(result.scannedProjects).toHaveLength(0); + expect(result.failedResults).toBeDefined(); + expect(result.failedResults!.length).toBeGreaterThanOrEqual(1); + }); + + it('flag has no adverse effect when all projects succeed', async () => { + const project = await createProjectFromWorkspace('npm-package'); + + const options: Options & TestOptions = { + ...baseOptions, + path: project.path(), + allProjects: true, + 'print-output-jsonl-with-errors': true, + }; + + const result = (await getDepsFromPlugin( + project.path(), + options, + )) as MultiProjectResultCustom; + + expect(result.scannedProjects.length).toBeGreaterThanOrEqual(1); + expect(result.failedResults ?? []).toHaveLength(0); + }); + }); +}); diff --git a/test/jest/unit/ecosystems-monitor-docker.spec.ts b/test/jest/unit/ecosystems-monitor-docker.spec.ts index b2f113b9cc..d8717a66b1 100644 --- a/test/jest/unit/ecosystems-monitor-docker.spec.ts +++ b/test/jest/unit/ecosystems-monitor-docker.spec.ts @@ -285,4 +285,160 @@ describe('monitorEcosystem docker/container', () => { ); expect(parsedOutput.projectName).not.toBe('my-custom-project-name'); }); + + describe('parallelization of monitor-dependencies requests', () => { + const ORIGINAL_CONCURRENCY = process.env.SNYK_INTERNAL_REQUEST_CONCURRENCY; + + afterEach(() => { + if (ORIGINAL_CONCURRENCY === undefined) { + delete process.env.SNYK_INTERNAL_REQUEST_CONCURRENCY; + } else { + process.env.SNYK_INTERNAL_REQUEST_CONCURRENCY = ORIGINAL_CONCURRENCY; + } + }); + + function makeMavenScanResult(targetFile: string): ScanResult { + const base = readJsonFixture( + 'maven-project-0-dependencies-scan-result.json', + ) as ScanResult; + return { + ...base, + identity: { ...base.identity, targetFile }, + }; + } + + function makeMonitorResponse(identity: string) { + const base = readJsonFixture( + 'monitor-dependencies-response-with-project-name.json', + ) as ecosystemsTypes.MonitorDependenciesResponse; + return { + ...base, + id: `${identity}-id`, + projectName: identity, + }; + } + + async function runMonitor(scanResults: ScanResult[]) { + jest.spyOn(dockerPlugin, 'scan').mockResolvedValue({ scanResults }); + return ecosystems.monitorEcosystem('docker', ['/srv'], { + path: '/srv', + docker: true, + org: 'my-org', + }); + } + + it('caps in-flight requests at the default concurrency (5)', async () => { + delete process.env.SNYK_INTERNAL_REQUEST_CONCURRENCY; + const scanResults = Array.from({ length: 25 }, (_, i) => + makeMavenScanResult(`app-${i}`), + ); + + let inFlight = 0; + let peakInFlight = 0; + jest.spyOn(request, 'makeRequest').mockImplementation((payload: any) => { + inFlight++; + peakInFlight = Math.max(peakInFlight, inFlight); + const identity = payload.body.scanResult.identity.targetFile; + return new Promise((resolve) => { + setTimeout(() => { + inFlight--; + resolve(makeMonitorResponse(identity)); + }, 10); + }); + }); + + await runMonitor(scanResults); + + expect(peakInFlight).toBeLessThanOrEqual(5); + expect(peakInFlight).toBeGreaterThan(1); + }); + + it('respects SNYK_INTERNAL_REQUEST_CONCURRENCY override', async () => { + process.env.SNYK_INTERNAL_REQUEST_CONCURRENCY = '3'; + const scanResults = Array.from({ length: 15 }, (_, i) => + makeMavenScanResult(`app-${i}`), + ); + + let inFlight = 0; + let peakInFlight = 0; + jest.spyOn(request, 'makeRequest').mockImplementation((payload: any) => { + inFlight++; + peakInFlight = Math.max(peakInFlight, inFlight); + const identity = payload.body.scanResult.identity.targetFile; + return new Promise((resolve) => { + setTimeout(() => { + inFlight--; + resolve(makeMonitorResponse(identity)); + }, 10); + }); + }); + + await runMonitor(scanResults); + + expect(peakInFlight).toBeLessThanOrEqual(3); + }); + + it('preserves result order matching input order', async () => { + delete process.env.SNYK_INTERNAL_REQUEST_CONCURRENCY; + const scanResults = ['os', 'app-1', 'app-2', 'app-3', 'app-4'].map( + makeMavenScanResult, + ); + + // Stagger response times in reverse so completion order != input order. + jest.spyOn(request, 'makeRequest').mockImplementation((payload: any) => { + const identity = payload.body.scanResult.identity.targetFile; + const delay = + { os: 30, 'app-1': 20, 'app-2': 5, 'app-3': 25, 'app-4': 10 }[ + identity + ] ?? 0; + return new Promise((resolve) => + setTimeout(() => resolve(makeMonitorResponse(identity)), delay), + ); + }); + + const [results] = await runMonitor(scanResults); + + expect(results.map((r) => r.projectName)).toEqual([ + 'os', + 'app-1', + 'app-2', + 'app-3', + 'app-4', + ]); + }); + + it('throws MonitorError when any request returns 4xx (fail-fast)', async () => { + delete process.env.SNYK_INTERNAL_REQUEST_CONCURRENCY; + const scanResults = ['app-1', 'app-2', 'app-3'].map(makeMavenScanResult); + + jest.spyOn(request, 'makeRequest').mockImplementation((payload: any) => { + const identity = payload.body.scanResult.identity.targetFile; + if (identity === 'app-2') { + return Promise.reject({ code: 403, message: 'forbidden' }); + } + return Promise.resolve(makeMonitorResponse(identity)); + }); + + await expect(runMonitor(scanResults)).rejects.toThrow('forbidden'); + }); + + it('accumulates 5xx errors per scan-result without aborting', async () => { + delete process.env.SNYK_INTERNAL_REQUEST_CONCURRENCY; + const scanResults = ['app-1', 'app-2', 'app-3'].map(makeMavenScanResult); + + jest.spyOn(request, 'makeRequest').mockImplementation((payload: any) => { + const identity = payload.body.scanResult.identity.targetFile; + if (identity === 'app-2') { + return Promise.reject({ code: 503, message: 'unavailable' }); + } + return Promise.resolve(makeMonitorResponse(identity)); + }); + + const [results, errors] = await runMonitor(scanResults); + + expect(results.map((r) => r.projectName)).toEqual(['app-1', 'app-3']); + expect(errors).toHaveLength(1); + expect(errors[0].error).toContain('Could not monitor dependencies'); + }); + }); }); diff --git a/test/jest/unit/lib/ecosystems/common.spec.ts b/test/jest/unit/lib/ecosystems/common.spec.ts index 74f586b5f9..1e2b04e857 100644 --- a/test/jest/unit/lib/ecosystems/common.spec.ts +++ b/test/jest/unit/lib/ecosystems/common.spec.ts @@ -90,7 +90,9 @@ describe('printUnmanagedDepGraph fn', () => { }, }); - const { result } = await printUnmanagedDepGraph({}, 'foo/bar', mockDest); + const { result } = await printUnmanagedDepGraph({}, 'foo/bar', mockDest, { + path: '.', + }); expect(result).toBe(''); expect(buffer.toString()).toMatchSnapshot(); diff --git a/test/jest/unit/lib/snyk-test/common.spec.ts b/test/jest/unit/lib/snyk-test/common.spec.ts index 5afe8cad37..07a1d6ad75 100644 --- a/test/jest/unit/lib/snyk-test/common.spec.ts +++ b/test/jest/unit/lib/snyk-test/common.spec.ts @@ -1,7 +1,10 @@ import { CLI, ProblemError } from '@snyk/error-catalog-nodejs-public'; import { CustomError } from '../../../../../src/lib/errors'; import { FailedProjectScanError } from '../../../../../src/lib/plugins/get-multi-plugin-result'; -import { getOrCreateErrorCatalogError } from '../../../../../src/lib/snyk-test/common'; +import { + getOrCreateErrorCatalogError, + getRequestConcurrency, +} from '../../../../../src/lib/snyk-test/common'; describe('getOrCreateErrorCatalogError', () => { const defaultErrMessage = 'Default error message'; @@ -116,3 +119,60 @@ describe('getOrCreateErrorCatalogError', () => { expect(result.detail).toBe(defaultErrMessage); }); }); + +describe('getRequestConcurrency', () => { + const originalValue = process.env.SNYK_INTERNAL_REQUEST_CONCURRENCY; + + afterEach(() => { + if (originalValue === undefined) { + delete process.env.SNYK_INTERNAL_REQUEST_CONCURRENCY; + } else { + process.env.SNYK_INTERNAL_REQUEST_CONCURRENCY = originalValue; + } + }); + + it('returns the default of 5 when SNYK_INTERNAL_REQUEST_CONCURRENCY is unset', () => { + delete process.env.SNYK_INTERNAL_REQUEST_CONCURRENCY; + expect(getRequestConcurrency()).toBe(5); + }); + + it('returns the default of 5 when SNYK_INTERNAL_REQUEST_CONCURRENCY is empty', () => { + process.env.SNYK_INTERNAL_REQUEST_CONCURRENCY = ''; + expect(getRequestConcurrency()).toBe(5); + }); + + it('returns the parsed value when SNYK_INTERNAL_REQUEST_CONCURRENCY is a valid integer', () => { + process.env.SNYK_INTERNAL_REQUEST_CONCURRENCY = '15'; + expect(getRequestConcurrency()).toBe(15); + }); + + it('clamps to the maximum of 50 when SNYK_INTERNAL_REQUEST_CONCURRENCY exceeds the cap', () => { + process.env.SNYK_INTERNAL_REQUEST_CONCURRENCY = '500'; + expect(getRequestConcurrency()).toBe(50); + }); + + it('returns the default when SNYK_INTERNAL_REQUEST_CONCURRENCY is below the minimum', () => { + process.env.SNYK_INTERNAL_REQUEST_CONCURRENCY = '0'; + expect(getRequestConcurrency()).toBe(5); + }); + + it('returns the default when SNYK_INTERNAL_REQUEST_CONCURRENCY is negative', () => { + process.env.SNYK_INTERNAL_REQUEST_CONCURRENCY = '-5'; + expect(getRequestConcurrency()).toBe(5); + }); + + it('returns the default when SNYK_INTERNAL_REQUEST_CONCURRENCY is non-numeric', () => { + process.env.SNYK_INTERNAL_REQUEST_CONCURRENCY = 'abc'; + expect(getRequestConcurrency()).toBe(5); + }); + + it('honors the minimum boundary', () => { + process.env.SNYK_INTERNAL_REQUEST_CONCURRENCY = '1'; + expect(getRequestConcurrency()).toBe(1); + }); + + it('honors the maximum boundary', () => { + process.env.SNYK_INTERNAL_REQUEST_CONCURRENCY = '50'; + expect(getRequestConcurrency()).toBe(50); + }); +}); diff --git a/test/jest/util/parseJSONL.ts b/test/jest/util/parseJSONL.ts new file mode 100644 index 0000000000..25469649a9 --- /dev/null +++ b/test/jest/util/parseJSONL.ts @@ -0,0 +1,16 @@ +/** + * Parses JSON Lines: one JSON value per line. Lines that are not valid JSON are skipped. + */ +export function parseJSONL(jsonl: string): unknown[] { + const lines: string[] = jsonl.trim().split('\n').filter(Boolean); + + const result: unknown[] = []; + for (const line of lines) { + try { + result.push(JSON.parse(line.trim())); + } catch { + // Skip non-JSON lines + } + } + return result; +} diff --git a/test/tap/find-files.test.ts b/test/tap/find-files.test.ts index fa1c6305fc..4599dc1451 100644 --- a/test/tap/find-files.test.ts +++ b/test/tap/find-files.test.ts @@ -273,3 +273,83 @@ test('find returns a single valid manifest after filtering', async (t) => { const expected = [path.join(mavenPath, 'pom.xml')]; t.same(result, expected, 'should return the single manifest'); }); + +test('find excludes specific files by absolute path via excludePaths', async (t) => { + const npmPackageJson = path.join(testFixture, 'npm', 'package.json'); + const { files: result } = await find({ + path: testFixture, + filter: ['package.json'], + excludePaths: [npmPackageJson], + levelsDeep: 6, + }); + const expected = [ + path.join(testFixture, 'npm-with-lockfile', 'package.json'), + path.join(testFixture, 'yarn', 'package.json'), + ]; + t.same( + result.sort(), + expected.sort(), + 'should exclude only the specified file', + ); +}); + +test('find excludePaths does not affect files with the same basename at different paths', async (t) => { + const yarnPackageJson = path.join(testFixture, 'yarn', 'package.json'); + const { files: result } = await find({ + path: testFixture, + filter: ['package.json'], + excludePaths: [yarnPackageJson], + levelsDeep: 6, + }); + t.ok( + result.includes(path.join(testFixture, 'npm', 'package.json')), + 'should still include npm/package.json', + ); + t.ok( + result.includes( + path.join(testFixture, 'npm-with-lockfile', 'package.json'), + ), + 'should still include npm-with-lockfile/package.json', + ); + t.notOk(result.includes(yarnPackageJson), 'should exclude yarn/package.json'); +}); + +test('find excludePaths can exclude directories', async (t) => { + const npmDir = path.join(testFixture, 'npm'); + const { files: result } = await find({ + path: testFixture, + filter: ['package.json'], + excludePaths: [npmDir], + levelsDeep: 6, + }); + t.notOk( + result.includes(path.join(testFixture, 'npm', 'package.json')), + 'should not include files from excluded directory', + ); +}); + +test('find still recurses into nested subdirectories when excludePaths is set', async (t) => { + // Guards against regressions in findInDirectory's pipeline: the excludePaths + // filter must not short-circuit recursion. Using a non-matching exclusion + // path exercises the filter without actually excluding anything, then we + // assert that manifests several directories deep are still discovered. + const unrelatedExclusion = path.join(testFixture, 'does-not-exist'); + const { files: result } = await find({ + path: testFixture, + filter: ['build.gradle', 'Gopkg.lock', 'Package.swift'], + excludePaths: [unrelatedExclusion], + levelsDeep: 6, + }); + const expectedNested = [ + path.join(testFixture, 'gradle-multiple', 'gradle', 'build.gradle'), + path.join(testFixture, 'gradle-multiple', 'gradle-another', 'build.gradle'), + path.join(testFixture, 'golang', 'golang-app', 'Gopkg.lock'), + path.join(testFixture, 'swift', 'test.build', 'Package.swift'), + ]; + for (const expectedFile of expectedNested) { + t.ok( + result.includes(expectedFile), + `should recurse and find ${path.relative(testFixture, expectedFile)}`, + ); + } +});