diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index ae0f0db498f..6a68f9ca170 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -26,7 +26,7 @@ This checklist is for your information. - [ ] Bugfixes should be submitted against the `bugfix` branch. - [ ] Give a meaningful name to your PR, as it may end up being used in the release notes. - [ ] Your code is flake8 compliant. -- [ ] Your code is python 3.12 compliant. +- [ ] Your code is python 3.13 compliant. - [ ] If this is a new feature and not a bug fix, you've included the proper documentation in the docs at https://github.com/DefectDojo/django-DefectDojo/tree/dev/docs as part of this PR. - [ ] Model changes must include the necessary migrations in the dojo/db_migrations folder. - [ ] Add applicable tests to the unit tests. diff --git a/.github/workflows/build-docker-images-for-testing.yml b/.github/workflows/build-docker-images-for-testing.yml index 53e44b5e6a9..9175b7c2993 100644 --- a/.github/workflows/build-docker-images-for-testing.yml +++ b/.github/workflows/build-docker-images-for-testing.yml @@ -67,7 +67,7 @@ jobs: # export docker images to be used in next jobs below - name: Upload image ${{ matrix.docker-image }} as artifact timeout-minutes: 15 - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 with: name: built-docker-image-${{ matrix.docker-image }}-${{ matrix.os }}-${{ env.PLATFORM }} path: ${{ matrix.docker-image }}-${{ matrix.os }}-${{ env.PLATFORM }}_img diff --git a/.github/workflows/close-stale.yml b/.github/workflows/close-stale.yml index 0b371f1cb60..857f619c78b 100644 --- a/.github/workflows/close-stale.yml +++ b/.github/workflows/close-stale.yml @@ -16,7 +16,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Close issues and PRs that are pending closure - uses: actions/stale@5bef64f19d7facfb25b37b414482c7164d639639 # v9.1.0 + uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0 with: # Disable automatic stale marking - only close manually labeled items days-before-stale: -1 diff --git a/.github/workflows/fetch-oas.yml b/.github/workflows/fetch-oas.yml index d6ff0ffbc28..4569439e20a 100644 --- a/.github/workflows/fetch-oas.yml +++ b/.github/workflows/fetch-oas.yml @@ -51,7 +51,7 @@ jobs: run: docker compose down - name: Upload oas.${{ matrix.file-type }} as artifact - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 with: name: oas-${{ matrix.file-type }} path: oas.${{ matrix.file-type }} diff --git a/.github/workflows/gh-pages.yml b/.github/workflows/gh-pages.yml index ce956ce5a0f..217f0317688 100644 --- a/.github/workflows/gh-pages.yml +++ b/.github/workflows/gh-pages.yml @@ -19,9 +19,9 @@ jobs: extended: true - name: Setup Node - uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0 + uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0 with: - node-version: '22.20.0' + node-version: '24.11.0' # TODO: Renovate helper might not be needed here - needs to be fully tested - name: Cache dependencies uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 diff --git a/.github/workflows/helm-docs-updates.yml b/.github/workflows/helm-docs-updates.yml deleted file mode 100644 index 0d70215e146..00000000000 --- a/.github/workflows/helm-docs-updates.yml +++ /dev/null @@ -1,25 +0,0 @@ -name: Update HELM docs for Renovate & Dependabot - -on: - pull_request: - branches: - - master - - dev - - bugfix - - release/** - - hotfix/** - -jobs: - docs_updates: - name: Update documentation - runs-on: ubuntu-latest - if: startsWith(github.head_ref, 'renovate/') || startsWith(github.head_ref, 'dependabot/') - steps: - - name: Checkout - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - - - name: Run helm-docs - uses: losisin/helm-docs-github-action@a57fae5676e4c55a228ea654a1bcaec8dd3cf5b5 # v1.6.2 - with: - chart-search-root: "helm/defectdojo" - git-push: true diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index 784ee42b676..140c4f2befd 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -58,7 +58,7 @@ jobs: # load docker images from build jobs - name: Load images from artifacts - uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: path: built-docker-image pattern: built-docker-image-* @@ -76,7 +76,7 @@ jobs: run: ln -s docker-compose.override.integration_tests.yml docker-compose.override.yml - name: Start Dojo - run: docker compose up --no-deps -d postgres nginx celerybeat celeryworker mailhog uwsgi redis + run: docker compose up --no-deps -d postgres nginx celerybeat celeryworker mailhog uwsgi valkey env: DJANGO_VERSION: ${{ matrix.os }} NGINX_VERSION: alpine diff --git a/.github/workflows/k8s-tests.yml b/.github/workflows/k8s-tests.yml index 3d70879630d..237c27e4dc5 100644 --- a/.github/workflows/k8s-tests.yml +++ b/.github/workflows/k8s-tests.yml @@ -5,15 +5,6 @@ on: env: DD_HOSTNAME: defectdojo.default.minikube.local - HELM_REDIS_BROKER_SETTINGS: " \ - --set redis.enabled=true \ - --set celery.broker=redis \ - --set createRedisSecret=true \ - " - HELM_PG_DATABASE_SETTINGS: " \ - --set postgresql.enabled=true \ - --set createPostgresqlSecret=true \ - " jobs: setting_minikube_cluster: name: Kubernetes Deployment @@ -23,11 +14,11 @@ jobs: matrix: include: # databases, broker and k8s are independent, so we don't need to test each combination - # lastest k8s version (https://kubernetes.io/releases/) and oldest supported version from aws - # are tested (https://docs.aws.amazon.com/eks/latest/userguide/kubernetes-versions.html#available-versions) - - databases: pgsql - brokers: redis - k8s: 'v1.34.0' # renovate: datasource=github-releases depName=kubernetes/kubernetes + # lastest k8s version (https://kubernetes.io/releases/) and the oldest officially supported version + # are tested (https://kubernetes.io/releases/) + - k8s: 'v1.34.0' # renovate: datasource=github-releases depName=kubernetes/kubernetes versioning=loose + os: debian + - k8s: 'v1.31.13' # Do not track with renovate as we likely want to rev this manually os: debian steps: - name: Checkout @@ -47,7 +38,7 @@ jobs: minikube status - name: Load images from artifacts - uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: path: built-docker-image pattern: built-docker-image-* @@ -68,12 +59,6 @@ jobs: helm dependency list ./helm/defectdojo helm dependency update ./helm/defectdojo - - name: Set confings into Outputs - id: set - run: |- - echo "pgsql=${{ env.HELM_PG_DATABASE_SETTINGS }}" >> $GITHUB_ENV - echo "redis=${{ env.HELM_REDIS_BROKER_SETTINGS }}" >> $GITHUB_ENV - - name: Deploying Django application with ${{ matrix.databases }} ${{ matrix.brokers }} timeout-minutes: 15 run: |- @@ -84,10 +69,14 @@ jobs: defectdojo \ ./helm/defectdojo \ --set django.ingress.enabled=true \ + --set images.django.image.tag=latest \ + --set images.nginx.image.tag=latest \ --set imagePullPolicy=Never \ --set initializer.keepSeconds="-1" \ - ${{ env[matrix.databases] }} \ - ${{ env[matrix.brokers] }} \ + --set redis.enabled=true \ + --set createRedisSecret=true \ + --set postgresql.enabled=true \ + --set createPostgresqlSecret=true \ --set createSecret=true - name: Check deployment status diff --git a/.github/workflows/release-1-create-pr.yml b/.github/workflows/release-1-create-pr.yml index 4e4b710400f..7d3f9bb64a0 100644 --- a/.github/workflows/release-1-create-pr.yml +++ b/.github/workflows/release-1-create-pr.yml @@ -98,7 +98,7 @@ jobs: chart-search-root: "helm/defectdojo" - name: Push version changes - uses: stefanzweifel/git-auto-commit-action@778341af668090896ca464160c2def5d1d1a3eb0 # v6.0.1 + uses: stefanzweifel/git-auto-commit-action@28e16e81777b558cc906c8750092100bbb34c5e3 # v7.0.0 with: commit_user_name: "${{ env.GIT_USERNAME }}" commit_user_email: "${{ env.GIT_EMAIL }}" diff --git a/.github/workflows/release-3-master-into-dev.yml b/.github/workflows/release-3-master-into-dev.yml index d13ce0a9323..15674b5af40 100644 --- a/.github/workflows/release-3-master-into-dev.yml +++ b/.github/workflows/release-3-master-into-dev.yml @@ -86,7 +86,7 @@ jobs: chart-search-root: "helm/defectdojo" - name: Push version changes - uses: stefanzweifel/git-auto-commit-action@778341af668090896ca464160c2def5d1d1a3eb0 # v6.0.1 + uses: stefanzweifel/git-auto-commit-action@28e16e81777b558cc906c8750092100bbb34c5e3 # v7.0.0 with: commit_user_name: "${{ env.GIT_USERNAME }}" commit_user_email: "${{ env.GIT_EMAIL }}" @@ -162,7 +162,7 @@ jobs: chart-search-root: "helm/defectdojo" - name: Push version changes - uses: stefanzweifel/git-auto-commit-action@778341af668090896ca464160c2def5d1d1a3eb0 # v6.0.1 + uses: stefanzweifel/git-auto-commit-action@28e16e81777b558cc906c8750092100bbb34c5e3 # v7.0.0 with: commit_user_name: "${{ env.GIT_USERNAME }}" commit_user_email: "${{ env.GIT_EMAIL }}" diff --git a/.github/workflows/release-drafter.yml b/.github/workflows/release-drafter.yml index 17e8324ca27..baa804441a0 100644 --- a/.github/workflows/release-drafter.yml +++ b/.github/workflows/release-drafter.yml @@ -47,7 +47,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Load OAS files from artifacts - uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: pattern: oas-* diff --git a/.github/workflows/release-x-manual-docker-containers.yml b/.github/workflows/release-x-manual-docker-containers.yml index a492bed7518..eb3c001e680 100644 --- a/.github/workflows/release-x-manual-docker-containers.yml +++ b/.github/workflows/release-x-manual-docker-containers.yml @@ -89,7 +89,7 @@ jobs: # upload the digest file as artifact - name: Upload digest - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 with: name: digests-${{ matrix.docker-image}}-${{ matrix.os }}-${{ env.PLATFORM }} path: ${{ runner.temp }}/digests/* diff --git a/.github/workflows/release-x-manual-helm-chart.yml b/.github/workflows/release-x-manual-helm-chart.yml index b6b88edc1c2..a1105697c7d 100644 --- a/.github/workflows/release-x-manual-helm-chart.yml +++ b/.github/workflows/release-x-manual-helm-chart.yml @@ -69,16 +69,6 @@ jobs: helm dependency list ./helm/defectdojo helm dependency update ./helm/defectdojo - - name: Add yq - uses: mikefarah/yq@6251e95af8df3505def48c71f3119836701495d6 # v4.47.2 - - - name: Pin version docker version - id: pin_image - run: |- - yq --version - yq -i '.tag="${{ inputs.release_number }}"' helm/defectdojo/values.yaml - echo "Current image tag:`yq -r '.tag' helm/defectdojo/values.yaml`" - - name: Package Helm chart id: package-helm-chart run: | @@ -87,7 +77,7 @@ jobs: echo "chart_version=$(ls build | cut -d '-' -f 2,3 | sed 's|\.tgz||')" >> $GITHUB_ENV - name: Create release ${{ inputs.release_number }} - uses: softprops/action-gh-release@62c96d0c4e8a889135c1f3a25910db8dbe0e85f7 # v2.3.4 + uses: softprops/action-gh-release@6da8fa9354ddfdc4aeace5fc48d7f679b5214090 # v2.4.1 with: name: '${{ inputs.release_number }} 🌈' tag_name: ${{ inputs.release_number }} diff --git a/.github/workflows/release-x-manual-merge-container-digests.yml b/.github/workflows/release-x-manual-merge-container-digests.yml index 65abfdb7e08..156d3dfb28f 100644 --- a/.github/workflows/release-x-manual-merge-container-digests.yml +++ b/.github/workflows/release-x-manual-merge-container-digests.yml @@ -41,7 +41,7 @@ jobs: # only download digests for this image and this os - name: Download digests - uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: path: ${{ runner.temp }}/digests pattern: digests-${{ matrix.docker-image}}-${{ matrix.os }}-* diff --git a/.github/workflows/renovate.yaml b/.github/workflows/renovate.yaml index 0b9ee77e1c7..4639ecea596 100644 --- a/.github/workflows/renovate.yaml +++ b/.github/workflows/renovate.yaml @@ -21,4 +21,4 @@ jobs: uses: suzuki-shunsuke/github-action-renovate-config-validator@c22827f47f4f4a5364bdba19e1fe36907ef1318e # v1.1.1 with: strict: "true" - validator_version: 41.146.0 # renovate: datasource=github-releases depName=renovatebot/renovate + validator_version: 41.168.0 # renovate: datasource=github-releases depName=renovatebot/renovate diff --git a/.github/workflows/rest-framework-tests.yml b/.github/workflows/rest-framework-tests.yml index 5df066ec486..23aa9a0af0c 100644 --- a/.github/workflows/rest-framework-tests.yml +++ b/.github/workflows/rest-framework-tests.yml @@ -36,7 +36,7 @@ jobs: # load docker images from build jobs - name: Load images from artifacts - uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: path: built-docker-image pattern: built-docker-image-* diff --git a/.github/workflows/shellcheck.yml b/.github/workflows/shellcheck.yml index ab338bfa37b..99a51ddcf6d 100644 --- a/.github/workflows/shellcheck.yml +++ b/.github/workflows/shellcheck.yml @@ -2,10 +2,7 @@ name: Shellcheck on: pull_request: -env: - SHELLCHECK_REPO: 'koalaman/shellcheck' - SHELLCHECK_VERSION: 'v0.9.0' # renovate: datasource=github-releases depName=koalaman/shellcheck - SHELLCHECK_SHA: '038fd81de6b7e20cc651571362683853670cdc71' # Renovate config is not currently adjusted to update hash - it needs to be done manually for now + jobs: shellcheck: runs-on: ubuntu-latest @@ -13,113 +10,9 @@ jobs: - name: Checkout uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - - name: Grab shellcheck - run: | - set -e - - SHELLCHECK_TARBALL_URL="https://github.com/${SHELLCHECK_REPO}/releases/download/${SHELLCHECK_VERSION}/shellcheck-${SHELLCHECK_VERSION}.linux.x86_64.tar.xz" - SHELLCHECK_TARBALL_LOC="shellcheck.tar.xz" - curl -L "${SHELLCHECK_TARBALL_URL}" -o "${SHELLCHECK_TARBALL_LOC}" - tarball_sha=$(shasum ${SHELLCHECK_TARBALL_LOC} | awk '{print $1}') - if [ "${tarball_sha}" != "${SHELLCHECK_SHA}" ]; then - echo "Got invalid SHA for shellcheck: ${tarball_sha}" - exit 1 - fi - tar -xvf "${SHELLCHECK_TARBALL_LOC}" - cd "shellcheck-${SHELLCHECK_VERSION}" || exit 1 - mv shellcheck "${GITHUB_WORKSPACE}/shellcheck" - - - name: Run shellcheck - shell: bash - run: | - set -o pipefail - - # Make sure we already put the proper shellcheck binary in place - if [ ! -f "./shellcheck" ]; then - echo "shellcheck not found" - exit 1 - fi - - # Make sure we know what to compare the PR's changes against - if [ -z "${GITHUB_BASE_REF}" ]; then - echo "No base reference supplied" - exit 1 - fi - - num_findings=0 - - # Execute shellcheck and add errors based on the output - run_shellcheck() { - local modified_shell_script="${1}" - local findings_file="findings.txt" - - # Remove leftover findings file from previous iterations - if [ -f "${findings_file}" ]; then - rm "${findings_file}" - fi - - echo "Running shellcheck against ${modified_shell_script}..." - - # If shellcheck reported no errors (exited with 0 status code), return - if ./shellcheck -f json -S warning "${modified_shell_script}" | jq -c '.[]' > "${findings_file}"; then - return 0 - fi - - # Walk each of the individual findings - while IFS= read -r finding; do - num_findings=$((num_findings+1)) - - line=$(echo "${finding}" | jq '.line') - end_line=$(echo "${finding}" | jq '.endLine') - column=$(echo "${finding}" | jq '.column') - end_column=$(echo "${finding}" | jq '.endColumn') - code=$(echo "${finding}" | jq '.code') - title="SC${code}" - message="$(echo "${finding}" | jq -r '.message') See https://github.com/koalaman/shellcheck/wiki/${title}" - - echo "Line: ${line}" - echo "End line: ${end_line}" - echo "Column: ${column}" - echo "End column: ${end_column}" - echo "Title: ${title}" - echo "Message: ${message}" - - # Raise an error with the file/line/etc - echo "::error file=${modified_shell_script},line=${line},endLine=${end_line},column=${column},endColumn=${end_column},title=${title}::${message}" - done < ${findings_file} - } - - # Find the shell scripts that were created or modified by this PR - find_modified_shell_scripts() { - shell_scripts="shell_scripts.txt" - modified_files="modified_files.txt" - modified_shell_scripts="modified_shell_scripts.txt" - - find . -name "*.sh" -or -name "*.bash" | sed 's#^\./##' > "${shell_scripts}" - git diff --name-only "origin/${GITHUB_BASE_REF}" HEAD > "${modified_files}" - - if [ ! -s "${shell_scripts}" ] || [ ! -s "${modified_files}" ]; then - echo "No modified shell scripts detected" - exit 0 - fi - - if ! grep -Fxf "${shell_scripts}" "${modified_files}" > "${modified_shell_scripts}"; then - echo "No modified shell scripts detected" - exit 0 - fi - } - - git fetch origin "${GITHUB_BASE_REF}" || exit 1 - - find_modified_shell_scripts - - # Loop through the modified shell scripts - while IFS= read -r modified_shell_script; do - run_shellcheck "${modified_shell_script}" - done < ${modified_shell_scripts} - - # If shellcheck reported any findings, fail the workflow - if [ ${num_findings} -gt 0 ]; then - echo "shellcheck reported ${num_findings} findings." - exit 1 - fi + - name: Run ShellCheck + uses: ludeeus/action-shellcheck@00cae500b08a931fb5698e11e79bfbd38e612a38 # 2.0.0 + with: + version: 'v0.11.0' # renovate: datasource=github-releases depName=koalaman/shellcheck versioning=loose + env: + SHELLCHECK_OPTS: -e SC1091 -e SC2086 # TODO: fix following findings diff --git a/.github/workflows/test-helm-chart.yml b/.github/workflows/test-helm-chart.yml index f7e9199ab67..c35698e51e9 100644 --- a/.github/workflows/test-helm-chart.yml +++ b/.github/workflows/test-helm-chart.yml @@ -24,7 +24,7 @@ jobs: - uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 with: - python-version: 3.13 # Renovate helper is not needed here + python-version: 3.14 # Renovate helper is not needed here - name: Configure Helm repos run: |- @@ -34,8 +34,8 @@ jobs: - name: Set up chart-testing uses: helm/chart-testing-action@0d28d3144d3a25ea2cc349d6e59901c4ff469b3b # v2.7.0 with: - yamale_version: 4.0.4 # renovate: datasource=pypi depName=yamale versioning=semver - yamllint_version: 1.35.1 # renovate: datasource=pypi depName=yamllint versioning=semver + yamale_version: 6.0.0 # renovate: datasource=pypi depName=yamale versioning=semver + yamllint_version: 1.37.1 # renovate: datasource=pypi depName=yamllint versioning=semver - name: Determine target branch id: ct-branch-target @@ -68,15 +68,23 @@ jobs: - name: Check update of "artifacthub.io/changes" HELM annotation if: env.changed == 'true' run: | + # fast fail if `git show` fails + set -e + set -o pipefail + target_branch=${{ env.ct-branch }} echo "Checking Chart.yaml annotation changes" # Get current branch annotation current_annotation=$(yq e '.annotations."artifacthub.io/changes"' "helm/defectdojo/Chart.yaml") + echo "Current annotation: " + echo $current_annotation # Get target branch version of Chart.yaml annotation - target_annotation=$(git show "${{ env.ct-branch }}:helm/defectdojo/Chart.yaml" | yq e '.annotations."artifacthub.io/changes"' -) + target_annotation=$(git show "origin/${{ env.ct-branch }}:helm/defectdojo/Chart.yaml" | yq e '.annotations."artifacthub.io/changes"' -) + echo "Target annotation: " + echo $target_annotation if [[ "$current_annotation" == "$target_annotation" ]]; then echo "::error file=helm/defectdojo/Chart.yaml::The 'artifacthub.io/changes' annotation has not been updated compared to ${{ env.ct-branch }}. For more, check the hint in 'helm/defectdojo/Chart.yaml'" @@ -99,12 +107,25 @@ jobs: steps: - name: Checkout uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - + + - name: Update values in HELM chart + if: startsWith(github.head_ref, 'renovate/') || startsWith(github.head_ref, 'dependabot/') + run: | + yq -i '.annotations."artifacthub.io/changes" += "- kind: changed\n description: ${{ github.event.pull_request.title }}\n"' helm/defectdojo/Chart.yaml + + - name: Run helm-docs (update) + uses: losisin/helm-docs-github-action@a57fae5676e4c55a228ea654a1bcaec8dd3cf5b5 # v1.6.2 + if: startsWith(github.head_ref, 'renovate/') || startsWith(github.head_ref, 'dependabot/') + with: + chart-search-root: "helm/defectdojo" + git-push: true + # Documentation provided in the README file needs to contain the latest information from `values.yaml` and all other related assets. # If this step fails, install https://github.com/norwoodj/helm-docs and run locally `helm-docs --chart-search-root helm/defectdojo` before committing your changes. # The helm-docs documentation will be generated for you. - - name: Run helm-docs + - name: Run helm-docs (check) uses: losisin/helm-docs-github-action@a57fae5676e4c55a228ea654a1bcaec8dd3cf5b5 # v1.6.2 + if: ! startsWith(github.head_ref, 'renovate/') || startsWith(github.head_ref, 'dependabot/') with: fail-on-diff: true chart-search-root: "helm/defectdojo" @@ -121,7 +142,7 @@ jobs: # If this step fails, install https://github.com/losisin/helm-values-schema-json and run locally `helm schema --use-helm-docs` in `helm/defectdojo` before committing your changes. # The helm schema will be generated for you. - name: Generate values schema json - uses: losisin/helm-values-schema-json-action@d5847286fa04322702c4f8d45031974798c83ac7 # v2.3.0 + uses: losisin/helm-values-schema-json-action@660c441a4a507436a294fc55227e1df54aca5407 # v2.3.1 with: fail-on-diff: true working-directory: "helm/defectdojo" diff --git a/.github/workflows/validate_docs_build.yml b/.github/workflows/validate_docs_build.yml index 0a972a6f1f8..01e2371bec3 100644 --- a/.github/workflows/validate_docs_build.yml +++ b/.github/workflows/validate_docs_build.yml @@ -16,9 +16,9 @@ jobs: extended: true - name: Setup Node - uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0 + uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0 with: - node-version: '22.20.0' + node-version: '24.11.0' # TODO: Renovate helper might not be needed here - needs to be fully tested - name: Cache dependencies uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 diff --git a/Dockerfile.django-alpine b/Dockerfile.django-alpine index 010017b0f50..bcca856298a 100644 --- a/Dockerfile.django-alpine +++ b/Dockerfile.django-alpine @@ -5,7 +5,7 @@ # Dockerfile.nginx to use the caching mechanism of Docker. # Ref: https://devguide.python.org/#branchstatus -FROM python:3.12.11-alpine3.22@sha256:02a73ead8397e904cea6d17e18516f1df3590e05dc8823bd5b1c7f849227d272 AS base +FROM python:3.13.7-alpine3.22@sha256:9ba6d8cbebf0fb6546ae71f2a1c14f6ffd2fdab83af7fa5669734ef30ad48844 AS base FROM base AS build WORKDIR /app RUN \ diff --git a/Dockerfile.django-debian b/Dockerfile.django-debian index b8077bb0b77..e816d204e05 100644 --- a/Dockerfile.django-debian +++ b/Dockerfile.django-debian @@ -5,7 +5,7 @@ # Dockerfile.nginx to use the caching mechanism of Docker. # Ref: https://devguide.python.org/#branchstatus -FROM python:3.12.11-slim-trixie@sha256:d67a7b66b989ad6b6d6b10d428dcc5e0bfc3e5f88906e67d490c4d3daac57047 AS base +FROM python:3.13.7-slim-trixie@sha256:5f55cdf0c5d9dc1a415637a5ccc4a9e18663ad203673173b8cda8f8dcacef689 AS base FROM base AS build WORKDIR /app RUN \ diff --git a/Dockerfile.integration-tests-debian b/Dockerfile.integration-tests-debian index 95398cb6e8e..06cf3b7c435 100644 --- a/Dockerfile.integration-tests-debian +++ b/Dockerfile.integration-tests-debian @@ -3,7 +3,7 @@ FROM openapitools/openapi-generator-cli:v7.16.0@sha256:e56372add5e038753fb91aa1bbb470724ef58382fdfc35082bf1b3e079ce353c AS openapitools # currently only supports x64, no arm yet due to chrome and selenium dependencies -FROM python:3.12.11-slim-trixie@sha256:d67a7b66b989ad6b6d6b10d428dcc5e0bfc3e5f88906e67d490c4d3daac57047 AS build +FROM python:3.13.7-slim-trixie@sha256:5f55cdf0c5d9dc1a415637a5ccc4a9e18663ad203673173b8cda8f8dcacef689 AS build WORKDIR /app RUN \ apt-get -y update && \ diff --git a/Dockerfile.nginx-alpine b/Dockerfile.nginx-alpine index fd50cb9e472..7c608d08444 100644 --- a/Dockerfile.nginx-alpine +++ b/Dockerfile.nginx-alpine @@ -5,7 +5,7 @@ # Dockerfile.django-alpine to use the caching mechanism of Docker. # Ref: https://devguide.python.org/#branchstatus -FROM python:3.12.11-alpine3.22@sha256:02a73ead8397e904cea6d17e18516f1df3590e05dc8823bd5b1c7f849227d272 AS base +FROM python:3.13.7-alpine3.22@sha256:9ba6d8cbebf0fb6546ae71f2a1c14f6ffd2fdab83af7fa5669734ef30ad48844 AS base FROM base AS build WORKDIR /app RUN \ @@ -63,7 +63,7 @@ COPY dojo/ ./dojo/ # always collect static for debug toolbar as we can't make it dependant on env variables or build arguments without breaking docker layer caching RUN env DD_SECRET_KEY='.' DD_DJANGO_DEBUG_TOOLBAR_ENABLED=True python3 manage.py collectstatic --noinput --verbosity=2 && true -FROM nginx:1.29.1-alpine3.22@sha256:42a516af16b852e33b7682d5ef8acbd5d13fe08fecadc7ed98605ba5e3b26ab8 +FROM nginx:1.29.2-alpine3.22@sha256:61e01287e546aac28a3f56839c136b31f590273f3b41187a36f46f6a03bbfe22 ARG uid=1001 ARG appuser=defectdojo COPY --from=collectstatic /app/static/ /usr/share/nginx/html/static/ diff --git a/README.md b/README.md index e239a7f6baf..f9d2511b07c 100644 --- a/README.md +++ b/README.md @@ -56,8 +56,8 @@ cd django-DefectDojo # Building Docker images docker compose build -# Run the application (for other profiles besides postgres-redis see -# https://github.com/DefectDojo/django-DefectDojo/blob/dev/readme-docs/DOCKER.md) +# Run the application +# (see https://github.com/DefectDojo/django-DefectDojo/blob/dev/readme-docs/DOCKER.md for more info) docker compose up -d # Obtain admin credentials. The initializer can take up to 3 minutes to run. @@ -67,7 +67,7 @@ docker compose logs initializer | grep "Admin password:" ## For Docker Compose V1 -You can run Compose V1 by calling `docker-compose` (by adding the hyphen (-) between `docker compose`). +You can run Compose V1 by calling `docker-compose` (by adding the hyphen (-) between `docker compose`). Following commands are using original version so you might need to adjust them: ```sh @@ -132,8 +132,8 @@ Moderators can help you with pull requests or feedback on dev ideas: * Blake Owens ([@blakeaowens](https://github.com/blakeaowens)) ## Hall of Fame -* Jannik Jürgens ([@alles-klar](https://github.com/alles-klar)) - Jannik was a long time contributor and moderator for - DefectDojo and made significant contributions to many areas of the platform. Jannik was instrumental in pioneering +* Jannik Jürgens ([@alles-klar](https://github.com/alles-klar)) - Jannik was a long time contributor and moderator for + DefectDojo and made significant contributions to many areas of the platform. Jannik was instrumental in pioneering and optimizing deployment methods. * Valentijn Scholten ([@valentijnscholten](https://github.com/valentijnscholten) | [Sponsor](https://github.com/sponsors/valentijnscholten) | diff --git a/components/package.json b/components/package.json index 34a3610dbbc..bf6b25cf39d 100644 --- a/components/package.json +++ b/components/package.json @@ -1,6 +1,6 @@ { "name": "defectdojo", - "version": "2.51.3", + "version": "2.52.0", "license" : "BSD-3-Clause", "private": true, "dependencies": { @@ -14,7 +14,7 @@ "clipboard": "^2.0.11", "datatables.net": "^2.3.4", "datatables.net-buttons-bs": "^3.2.5", - "datatables.net-colreorder": "^2.1.1", + "datatables.net-colreorder": "^2.1.2", "drmonty-datatables-plugins": "^1.0.0", "drmonty-datatables-responsive": "^1.0.0", "easymde": "^2.20.0", diff --git a/components/yarn.lock b/components/yarn.lock index 78aa6e5e86e..9df054d62d4 100644 --- a/components/yarn.lock +++ b/components/yarn.lock @@ -204,10 +204,10 @@ datatables.net-buttons@3.2.5: datatables.net "^2" jquery ">=1.7" -datatables.net-colreorder@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/datatables.net-colreorder/-/datatables.net-colreorder-2.1.1.tgz#ddcbfb27d5e2b97fe8ce4acdb8ca35442a801fe5" - integrity sha512-alhSZYEYmxsXujl43nIHh2+Ym8o/CBm/2kPIExcUz7sOB8FOw2Q614KztqRYh46V5IA+RUuGSxzodjakZ63wAQ== +datatables.net-colreorder@^2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/datatables.net-colreorder/-/datatables.net-colreorder-2.1.2.tgz#cf45eae93f4afd0bbe2f34d47105b312defa8cc7" + integrity sha512-lIsUyOt2nBm4sD2cSzDKZcIVrGgrZkh90Z2f03s8p7DYcZSfXMHAhFBrDYf9/eAK6wJnODN8EDMsrtPHfgoSXA== dependencies: datatables.net "^2" jquery ">=1.7" diff --git a/docker-compose.override.unit_tests.yml b/docker-compose.override.unit_tests.yml index d1b90f57fdd..439abea2d3f 100644 --- a/docker-compose.override.unit_tests.yml +++ b/docker-compose.override.unit_tests.yml @@ -42,7 +42,7 @@ services: POSTGRES_DB: ${DD_TEST_DATABASE_NAME:-test_defectdojo} volumes: - defectdojo_postgres_unit_tests:/var/lib/postgresql/data - redis: !reset + valkey: !reset "webhook.endpoint": image: mccutchen/go-httpbin:2.18.3@sha256:3992f3763e9ce5a4307eae0a869a78b4df3931dc8feba74ab823dd2444af6a6b volumes: diff --git a/docker-compose.override.unit_tests_cicd.yml b/docker-compose.override.unit_tests_cicd.yml index 8d6eec1701c..0acd340ce4c 100644 --- a/docker-compose.override.unit_tests_cicd.yml +++ b/docker-compose.override.unit_tests_cicd.yml @@ -41,7 +41,7 @@ services: POSTGRES_DB: ${DD_TEST_DATABASE_NAME:-test_defectdojo} volumes: - defectdojo_postgres_unit_tests:/var/lib/postgresql/data - redis: !reset + valkey: !reset "webhook.endpoint": image: mccutchen/go-httpbin:2.18.3@sha256:3992f3763e9ce5a4307eae0a869a78b4df3931dc8feba74ab823dd2444af6a6b volumes: diff --git a/docker-compose.yml b/docker-compose.yml index f18651fa52e..24832c74e3e 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -41,7 +41,7 @@ services: condition: service_completed_successfully postgres: condition: service_started - redis: + valkey: condition: service_started entrypoint: ['/wait-for-it.sh', '${DD_DATABASE_HOST:-postgres}:${DD_DATABASE_PORT:-5432}', '-t', '30', '--', '/entrypoint-uwsgi.sh'] environment: @@ -49,7 +49,7 @@ services: DD_DJANGO_METRICS_ENABLED: "${DD_DJANGO_METRICS_ENABLED:-False}" DD_ALLOWED_HOSTS: "${DD_ALLOWED_HOSTS:-*}" DD_DATABASE_URL: ${DD_DATABASE_URL:-postgresql://defectdojo:defectdojo@postgres:5432/defectdojo} - DD_CELERY_BROKER_URL: ${DD_CELERY_BROKER_URL:-redis://redis:6379/0} + DD_CELERY_BROKER_URL: ${DD_CELERY_BROKER_URL:-redis://valkey:6379/0} DD_SECRET_KEY: "${DD_SECRET_KEY:-hhZCp@D28z!n@NED*yB!ROMt+WzsY*iq}" DD_CREDENTIAL_AES_256_KEY: "${DD_CREDENTIAL_AES_256_KEY:-&91a*agLqesc*0DJ+2*bAbsUZfR*4nLw}" DD_DATABASE_READINESS_TIMEOUT: "${DD_DATABASE_READINESS_TIMEOUT:-30}" @@ -65,12 +65,12 @@ services: condition: service_completed_successfully postgres: condition: service_started - redis: + valkey: condition: service_started entrypoint: ['/wait-for-it.sh', '${DD_DATABASE_HOST:-postgres}:${DD_DATABASE_PORT:-5432}', '-t', '30', '--', '/entrypoint-celery-beat.sh'] environment: DD_DATABASE_URL: ${DD_DATABASE_URL:-postgresql://defectdojo:defectdojo@postgres:5432/defectdojo} - DD_CELERY_BROKER_URL: ${DD_CELERY_BROKER_URL:-redis://redis:6379/0} + DD_CELERY_BROKER_URL: ${DD_CELERY_BROKER_URL:-redis://valkey:6379/0} DD_SECRET_KEY: "${DD_SECRET_KEY:-hhZCp@D28z!n@NED*yB!ROMt+WzsY*iq}" DD_CREDENTIAL_AES_256_KEY: "${DD_CREDENTIAL_AES_256_KEY:-&91a*agLqesc*0DJ+2*bAbsUZfR*4nLw}" DD_DATABASE_READINESS_TIMEOUT: "${DD_DATABASE_READINESS_TIMEOUT:-30}" @@ -85,12 +85,12 @@ services: condition: service_completed_successfully postgres: condition: service_started - redis: + valkey: condition: service_started entrypoint: ['/wait-for-it.sh', '${DD_DATABASE_HOST:-postgres}:${DD_DATABASE_PORT:-5432}', '-t', '30', '--', '/entrypoint-celery-worker.sh'] environment: DD_DATABASE_URL: ${DD_DATABASE_URL:-postgresql://defectdojo:defectdojo@postgres:5432/defectdojo} - DD_CELERY_BROKER_URL: ${DD_CELERY_BROKER_URL:-redis://redis:6379/0} + DD_CELERY_BROKER_URL: ${DD_CELERY_BROKER_URL:-redis://valkey:6379/0} DD_SECRET_KEY: "${DD_SECRET_KEY:-hhZCp@D28z!n@NED*yB!ROMt+WzsY*iq}" DD_CREDENTIAL_AES_256_KEY: "${DD_CREDENTIAL_AES_256_KEY:-&91a*agLqesc*0DJ+2*bAbsUZfR*4nLw}" DD_DATABASE_READINESS_TIMEOUT: "${DD_DATABASE_READINESS_TIMEOUT:-30}" @@ -120,19 +120,21 @@ services: source: ./docker/extra_settings target: /app/docker/extra_settings postgres: - image: postgres:18.0-alpine@sha256:70b32afe0c274b4d93098fd724fcdaab3aba47270a4f1e63cbf9cc69d7bf1be4 + image: postgres:18.0-alpine@sha256:48c8ad3a7284b82be4482a52076d47d879fd6fb084a1cbfccbd551f9331b0e40 environment: POSTGRES_DB: ${DD_DATABASE_NAME:-defectdojo} POSTGRES_USER: ${DD_DATABASE_USER:-defectdojo} POSTGRES_PASSWORD: ${DD_DATABASE_PASSWORD:-defectdojo} volumes: - defectdojo_postgres:/var/lib/postgresql/data - redis: - # Pinning to this version due to licensing constraints - image: redis:7.2.11-alpine@sha256:7632e82373929f39cdbead93f2e45d8b3cd295072c4755e00e7e6b19d56cc512 + valkey: + image: valkey/valkey:7.2.11-alpine@sha256:7b2019b47ad58be661fa6eba5ea66106eadde03459387113aaed29a464a5876b volumes: + # we keep using the redis volume as renaming is not possible and copying data over + # would require steps during downtime or complex commands in the intializer - defectdojo_redis:/data volumes: defectdojo_postgres: {} defectdojo_media: {} defectdojo_redis: {} + diff --git a/docs/assets/images/priority_chooseengine.png b/docs/assets/images/priority_chooseengine.png new file mode 100644 index 00000000000..c3211fa0e92 Binary files /dev/null and b/docs/assets/images/priority_chooseengine.png differ diff --git a/docs/assets/images/priority_default.png b/docs/assets/images/priority_default.png new file mode 100644 index 00000000000..65443684094 Binary files /dev/null and b/docs/assets/images/priority_default.png differ diff --git a/docs/assets/images/priority_engine_new.png b/docs/assets/images/priority_engine_new.png new file mode 100644 index 00000000000..9021231e76d Binary files /dev/null and b/docs/assets/images/priority_engine_new.png differ diff --git a/docs/assets/images/priority_sliders.png b/docs/assets/images/priority_sliders.png new file mode 100644 index 00000000000..4fc720ab8a0 Binary files /dev/null and b/docs/assets/images/priority_sliders.png differ diff --git a/docs/assets/images/risk_threshold.png b/docs/assets/images/risk_threshold.png new file mode 100644 index 00000000000..0e0a6215f72 Binary files /dev/null and b/docs/assets/images/risk_threshold.png differ diff --git a/docs/content/en/connecting_your_tools/connectors/connectors_tool_reference.md b/docs/content/en/connecting_your_tools/connectors/connectors_tool_reference.md index 5ef730db7d1..67c6b892e0c 100644 --- a/docs/content/en/connecting_your_tools/connectors/connectors_tool_reference.md +++ b/docs/content/en/connecting_your_tools/connectors/connectors_tool_reference.md @@ -172,6 +172,8 @@ The SonarQube Connector can fetch data from either a SonarCloud account or from 1. Enter the base url of your SonarQube instance in the Location field: for example `https://my.sonarqube.com/` 2. Enter a valid **API key** in the Secret field. This will need to be a **[User](https://docs.sonarsource.com/sonarqube/latest/user-guide/user-account/generating-and-using-tokens/)** [API Token Type](https://docs.sonarsource.com/sonarqube/latest/user-guide/user-account/generating-and-using-tokens/). +The token will need to have access to Projects, Vulnerabilities and Hotspots within Sonar. + API tokens can be found and generated via **My Account \-\> Security \-\> Generate Token** in the SonarQube app. For more information, [see SonarQube documentation](https://docs.sonarsource.com/sonarqube/latest/user-guide/user-account/generating-and-using-tokens/). ## **Snyk** @@ -187,7 +189,7 @@ See the [Snyk API documentation](https://docs.snyk.io/snyk-api) for more info. ## Tenable -The Tenable connector uses the **Tenable.io** REST API to fetch data. +The Tenable connector uses the **Tenable.io** REST API to fetch data. Currently, only vulnerability scans are imported - Web App Scans cannot be imported with the Connector. On\-premise Tenable Connectors are not available at this time. diff --git a/docs/content/en/connecting_your_tools/parsers/file/github_sast.md b/docs/content/en/connecting_your_tools/parsers/file/github_sast.md new file mode 100644 index 00000000000..a551d9ea0ef --- /dev/null +++ b/docs/content/en/connecting_your_tools/parsers/file/github_sast.md @@ -0,0 +1,9 @@ +--- +title: "Github SAST Scan" +toc_hide: true +--- +Import findings in JSON format from Github Code Scanning REST API: + + +### Sample Scan Data +Sample Github SAST scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/github_sast). \ No newline at end of file diff --git a/docs/content/en/connecting_your_tools/parsers/file/github_vulnerability.md b/docs/content/en/connecting_your_tools/parsers/file/github_vulnerability.md index 4d92f546685..5705165913a 100644 --- a/docs/content/en/connecting_your_tools/parsers/file/github_vulnerability.md +++ b/docs/content/en/connecting_your_tools/parsers/file/github_vulnerability.md @@ -1,5 +1,5 @@ --- -title: "Github Vulnerability" +title: "Github Vulnerability Scan" toc_hide: true --- Import findings from Github vulnerability scan (GraphQL Query): @@ -15,6 +15,8 @@ vulnerabilityAlerts (RepositoryVulnerabilityAlert object) + createdAt (optional) + vulnerableManifestPath + state (optional) + + dependabotUpdate (DependabotUpdate object) (optional) + + pullRequest (PullRequest object) (optional) + securityVulnerability (SecurityVulnerability object) + severity (CRITICAL/HIGH/LOW/MODERATE) + package (optional) @@ -27,10 +29,17 @@ vulnerabilityAlerts (RepositoryVulnerabilityAlert object) + value + references (optional) + url (optional) - + cvss (optional) + + cvss (optional - deprecated, use cvssSeverities instead) + score (optional) + vectorString (optional) + + cvssSeverities (optional) + + cvssV3 (CVSS object) (optional) + + score (optional) + + vectorString (optional) + cwes (optional) + + epss (EPSS object) (optional) + + percentage (optional) + + percentile (optional) ``` References: diff --git a/docs/content/en/connecting_your_tools/parsers/file/mobsf.md b/docs/content/en/connecting_your_tools/parsers/file/mobsf.md index 7bbbf564a0c..caac14fbf14 100644 --- a/docs/content/en/connecting_your_tools/parsers/file/mobsf.md +++ b/docs/content/en/connecting_your_tools/parsers/file/mobsf.md @@ -2,7 +2,9 @@ title: "MobSF Scanner" toc_hide: true --- -Export a JSON file using the API, api/v1/report\_json. +"Mobsfscan Scan" has been merged into the "MobSF Scan" parser. The "Mobsfscan Scan" scan_type has been retained to keep deduplication working for existing Tests, but users are encouraged to move to the "MobSF Scan" scan_type. + +Export a JSON file using the API, api/v1/report\_json and import it to Defectdojo or import a JSON report from ### Sample Scan Data Sample MobSF Scanner scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/mobsf). diff --git a/docs/content/en/connecting_your_tools/parsers/file/mobsfscan.md b/docs/content/en/connecting_your_tools/parsers/file/mobsfscan.md deleted file mode 100644 index 2c39d114287..00000000000 --- a/docs/content/en/connecting_your_tools/parsers/file/mobsfscan.md +++ /dev/null @@ -1,17 +0,0 @@ ---- -title: "Mobsfscan" -toc_hide: true ---- -Import JSON report from - -### Sample Scan Data -Sample Mobsfscan scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/mobsfscan). - -### Default Deduplication Hashcode Fields -By default, DefectDojo identifies duplicate Findings using these [hashcode fields](https://docs.defectdojo.com/en/working_with_findings/finding_deduplication/about_deduplication/): - -- title -- severity -- cwe -- file path -- description diff --git a/docs/content/en/open_source/installation/architecture.md b/docs/content/en/open_source/installation/architecture.md index cd3d70710d6..d3085609844 100644 --- a/docs/content/en/open_source/installation/architecture.md +++ b/docs/content/en/open_source/installation/architecture.md @@ -21,7 +21,9 @@ dynamic content. ## Message Broker The application server sends tasks to a [Message Broker](https://docs.celeryq.dev/en/stable/getting-started/backends-and-brokers/index.html) -for asynchronous execution. Currently, only [Redis](https://github.com/redis/redis) is supported as a broker. +for asynchronous execution. Currently, only [Valkey](https://valkey.io/) is supported as a broker in the docker compose setup. +The Helm chart still uses [Redis](https://github.com/redis/redis) is supported as a broker, but will be migrated to Valkey shortly. + ## Celery Worker diff --git a/docs/content/en/open_source/ldap-authentication.md b/docs/content/en/open_source/ldap-authentication.md index ba246ae8aa1..e8db98ff232 100644 --- a/docs/content/en/open_source/ldap-authentication.md +++ b/docs/content/en/open_source/ldap-authentication.md @@ -7,12 +7,12 @@ weight: 4 ## LDAP Authentication -Out of the box Defect Dojo does not support LDAP authentication. +Out of the box DefectDojo does not support LDAP authentication. -*However*, since Defect Dojo is built using Django, it isn't too difficult to add support for LDAP. +*However*, since DefectDojo is built using Django, it isn't too difficult to add support for LDAP. So long as you don't mind building your own Docker images... -We will need to modify a grand total of 4-5 files, depending on how you want to pass Dojo your LDAP secrets. +We will need to modify a grand total of 4-5 files, depending on how you want to pass DefectDojo your LDAP secrets. - Dockerfile.django-* - Dockerfile.nginx-* @@ -23,7 +23,14 @@ We will need to modify a grand total of 4-5 files, depending on how you want to #### Dockerfile modifications -In both Dockerfile.django and Dockerfile.nginx, you want to add the following lines to the apt-get install layers: +In both `Dockerfile.django-alpine` and `Dockerfile.nginx-alpine`, you need to add the following lines to the `apk add` layers: + +```bash +openldap-dev \ +cyrus-sasl-dev \ +``` + +Also, in `Dockerfile.django-debian`, you need to add the following lines to the `apt-get install` layers: ```bash libldap2-dev \ @@ -42,8 +49,8 @@ Please check for the latest version of these requirements at the time of impleme Otherwise add the following to requirements.txt: ```python -python-ldap==3.4.2 -django-auth-ldap==4.1.0 +python-ldap==3.4.5 +django-auth-ldap==5.2.0 ``` @@ -55,14 +62,17 @@ At the top of the file: ```python import ldap from django_auth_ldap.config import LDAPSearch, GroupOfNamesType +import environ ``` Then further down add LDAP settings to the env dict: ```python # LDAP -DD_LDAP_SERVER_URI=(str, 'ldap://ldap.example.com'), -DD_LDAP_BIND_DN=(str, ''), -DD_LDAP_BIND_PASSWORD=(str, ''), +env = environ.FileAwareEnv( + DD_LDAP_SERVER_URI=(str, 'ldap://ldap.example.com'), + DD_LDAP_BIND_DN=(str, ''), + DD_LDAP_BIND_PASSWORD=(str, ''), +) ``` Then under the env dict add: @@ -70,6 +80,7 @@ Then under the env dict add: AUTH_LDAP_SERVER_URI = env('DD_LDAP_SERVER_URI') AUTH_LDAP_BIND_DN = env('DD_LDAP_BIND_DN') AUTH_LDAP_BIND_PASSWORD = env('DD_LDAP_BIND_PASSWORD') + AUTH_LDAP_USER_SEARCH = LDAPSearch( "ou=Groups,dc=example,dc=com", ldap.SCOPE_SUBTREE, "(uid=%(user)s)" ) @@ -116,7 +127,7 @@ Read the docs for Django Authentication with LDAP here: https://django-auth-ldap #### docker-compose.yml -In order to pass the variables to the local_settings.py file via docker, it's a good idea to add these to the docker compose file. +In order to pass the variables to the `local_settings.py` file via docker, it's a good idea to add these to the `docker-compose.yml` file. You can do this by adding the following variables to the environment section for the uwsgi image: ```yaml @@ -125,4 +136,4 @@ DD_LDAP_BIND_DN: "${DD_LDAP_BIND_DN:-}" DD_LDAP_BIND_PASSWORD: "${DD_LDAP_BIND_PASSWORD:-}" ``` -Alternatively you can set these values in a local_settings.py file. +Alternatively you can set these values in a `local_settings.py` file. diff --git a/docs/content/en/open_source/upgrading/2.51.md b/docs/content/en/open_source/upgrading/2.51.md index 3ce5c95a6f5..6c796b103d7 100644 --- a/docs/content/en/open_source/upgrading/2.51.md +++ b/docs/content/en/open_source/upgrading/2.51.md @@ -48,6 +48,13 @@ The following Helm chart values have been modified in this release: - **Enhanced probe configuration for Celery**: Added support for customizing liveness, readiness, and startup probes in both Celery beat and worker deployments. - **Enhanced environment variable management**: All deployments now include `extraEnv` support for adding custom environment variables. For backwards compatibility, `.Values.extraEnv` can be used to inject common environment variables to all workloads. +## GitHub Scan Type and Parser Updates +The Github Vulnerability scan type and parser has been split into two disctinct scan types: +- [Github Vulnerability](https://github.com/DefectDojo/django-DefectDojo/blob/master/docs/content/en/connecting_your_tools/parsers/file/github_vulnerability.md) (original) +- [Github SAST](https://github.com/DefectDojo/django-DefectDojo/blob/master/docs/content/en/connecting_your_tools/parsers/file/github_sast.md) + +The original Github Vulnerability scan type will continue to accept SCA vulnerabilities uploaded in GitHub's GraphQL format, as it has always done. It will also continue to accept SAST uploads, however we recommend upgrading to the new Github SAST scan type for uploading these types of vulnerabilities going forward. This new scan type will accept the raw JSON response from [GitHub's REST API for code scanning alerts](https://docs.github.com/en/rest/code-scanning/code-scanning). Sample Github SAST scan data can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/github_sast). + ### Other changes - **Celery pod annotations**: Now we can add annotations to Celery beat/worker pods separately. diff --git a/docs/content/en/open_source/upgrading/2.52.md b/docs/content/en/open_source/upgrading/2.52.md new file mode 100644 index 00000000000..20eef3fb214 --- /dev/null +++ b/docs/content/en/open_source/upgrading/2.52.md @@ -0,0 +1,96 @@ +--- +title: 'Upgrading to DefectDojo Version 2.52.x' +toc_hide: true +weight: -20251006 +description: Replaced Redis with Valkey & Helm chart changes & MobSF parser merge +--- + +## Fix UI overwriting service field from parsers + +The web form in the UI by default sends an empty string, which ended up overwriting the service value provided by parsers. + +Only a few parsers do this, so the impact of this fix is low: + +- Trivy Scan +- Trivy Operator Scan +- Hydra Scan +- JFrog Xray API Summary Artifact Scan +- StackHawk HawkScan + +See [PR 13517](https://github.com/DefectDojo/django-DefectDojo/pull/13517) for more details. + +## Deduplication fix of `UNIQUE_ID_OR_HASH_CODE` +A bug was fixed in the `UNIQUE_ID_OR_HASH_CODE` algorithm where it stopped processing candidate findings with equal `unique_id_from_tool` or `hash_code` value. +Strictly speaking this is not a breaking change, but we wanted to make you aware that you can see more (better) more deduplicatation for parsers using this algorithm. + +## Valkey in `docker compose` + +Since the license change at Redis the fork ValKey has become widely popular and is backed by industry giants such as AWS. AWS is advising to use ValKey over Redis and is using lower prices for ValKey compared to Redis. + +Defect Dojo 2.52 now uses ValKey as a message broker. Teh existing redit volume can be used by Valkey, so this is just a drop in replacement. + +If you want to know more or have a setup where you cannot just re-use the existing volume, please visit https://valkey.io/topics/migration/. + +When you shutdown Defect Dojo to perform the upgrade, the celery tasks that are in the queue are stored to disk. After the upgrade, the celery workers will process these tasks as normal. + +If you want to be 110% sure no tasks will be lost you could perform the upgrade in two steps: + +1) Stop nginx, uwsgi, celerybeat to prevent new tasks from being created: + +`docker compose down nginx, uwsgi, celerybeat` + +2) Observe the Redis queue and/or the logs of the celeryworker(s) and wait until all tasks are finished: + +`docker compose exec redis redis-cli llen celery` -- should output 0 +`docker compose logs celeryworker` -- should stop outputting new task logs + +3) Stop the remaining services: + +`docker compose down` + +4) Continue the upgrade as normal per the [upgrade guide](upgrading_guide) +`docker compose pull` +`docker compose up -d` + +## Helm Chart Changes + +This release introduces more important changes to the Helm chart configuration: + +### Breaking changes + +#### Tags + +`tag` and `repositoryPrefix` fields have been deprecated. Currently, image tags used in containers are derived by default from the `appVersion` defined in the Chart. +This behavior can be overridden by setting the `tag` value in `images.django` and `images.nginx`. +If fine-tuning is necessary, each container’s image value can also be customized individually (`celery.beat.image`, `celery.worker.image`, `django.nginx.image`, `django.uwsgi.image`, `initializer.image`, and `dbMigrationChecker.image`). +Digest pinning is now supported as well. + +#### Security context + +This Helm chart extends security context capabilities to all deployed pods and containers. +You can define a default pod and container security context globally using `securityContext.podSecurityContext` and `securityContext.containerSecurityContext` keys. +Additionally, each deployment can specify its own pod and container security contexts, which will override or merge with the global ones. + +#### Fine-grained resources + +Now each container can specify the resource requests and limits. + +#### Moved values + +The following Helm chart values have been modified in this release: + +- `securityContext.djangoSecurityContext` → deprecated in favor of container-specific security contexts (`celery.beat.containerSecurityContext`, `celery.worker.containerSecurityContext`, `django.uwsgi.containerSecurityContext` and `dbMigrationChecker.containerSecurityContext`) +- `securityContext.nginxSecurityContext` → deprecated in favor of container-specific security contexts (`django.nginx.containerSecurityContext`) + +### Other changes + +- **Extra annotations**: Now we can add common annotations to all resources. + +There are other instructions for upgrading to 2.52.x. Check the [Release Notes](https://github.com/DefectDojo/django-DefectDojo/releases/tag/2.52.0) for the contents of the release. + +## Merge of MobSF parsers + +Mobsfscan Scan" has been merged into the "MobSF Scan" parser. The "Mobsfscan Scan" scan_type has been retained to keep deduplication working for existing Tests, but users are encouraged to move to the "MobSF Scan" scan_type. + +## Release notes +Check the [Release Notes](https://github.com/DefectDojo/django-DefectDojo/releases/tag/2.52.0) for the contents of the release. diff --git a/docs/content/en/share_your_findings/integrations.md b/docs/content/en/share_your_findings/integrations.md index ea18f545b02..e2dd663a9f4 100644 --- a/docs/content/en/share_your_findings/integrations.md +++ b/docs/content/en/share_your_findings/integrations.md @@ -9,7 +9,7 @@ Supported Integrations: - [Azure Devops](/en/share_your_findings/integrations_toolreference/#azure-devops-boards) - [GitHub](/en/share_your_findings/integrations_toolreference/#github) - [GitLab Boards](/en/share_your_findings/integrations_toolreference/#gitlab) -- ServiceNow (Coming Soon) +- [ServiceNow](/en/share_your_findings/integrations_toolreference/#servicenow) ## Opening the Integrations page diff --git a/docs/content/en/share_your_findings/integrations_toolreference.md b/docs/content/en/share_your_findings/integrations_toolreference.md index 68799f6bdca..e8c36e4b51c 100644 --- a/docs/content/en/share_your_findings/integrations_toolreference.md +++ b/docs/content/en/share_your_findings/integrations_toolreference.md @@ -1,6 +1,6 @@ --- title: "Integrators Tool Reference" -description: "Beta Feature" +description: "Detailed setup guides for Integrators" weight: 1 --- @@ -101,7 +101,7 @@ The GitLab integration allows you to add issues to a [GitLab Project](https://do ### Issue Tracker Mapping -- **Project Name**: The name of the project in GitLab that you want to send issues to +- **Project Name**: The name of the project in GitLab that you want to send issues to. ### Severity Mapping Details @@ -122,3 +122,62 @@ By default, GitLab has statuses of 'opened' and 'closed'. Additional status lab - **Closed Mapping**: `closed` - **False Positive Mapping**: `closed` - **Risk Accepted Mapping**: `closed` + +## ServiceNow + +The ServiceNow Integration allows you to push DefectDojo Findings as ServiceNow Incidents. + +### Instance Setup + +Your ServiceNow instance will require you to obtain a Refresh Token, associated with the User or Service account that will push Incidents to ServiceNow. + +You'll need to start by creating an OAuth registration on your ServiceNow instance for DefectDojo: + +1. In the left-hand navigation bar, search for “Application Registry” and select it. +2. Click “New”. +3. Choose “Create an OAuth API endpoint for external clients”. +4. Fill in the required fields: + * Name: Provide a meaningful name for your application (e.g., Vulnerability Integration Client). + * (Optional) Adjust the Token Lifespan: + * Access Token Lifespan: Default is 1800 seconds (30 minutes). + * Refresh Token Lifespan: The default is 8640000 seconds (approximately 100 days). +5. Click Submit to create the application record. +6. After submission, select the application from the list and take note of the **Client ID and Client Secret** fields. + +You will then need to use this registration to obtain a Refresh Token, which can only be obtained through the ServiceNow API. Open a terminal window and paste the following (substituting the variables wrapped in `{{}}` with your user's actual information) + +``` +curl --request POST \ + --url {{INSTANCE_HOST}}/oauth_token.do \ + --header 'content-type: application/x-www-form-urlencoded' \ + --data grant_type=password \ + --data 'client_id={{CLIENT_ID}}' \ + --data 'client_secret={{CLIENT_SECRET}}' \ + --data 'username={{USERNAME}}' \ + --data 'password={{PASSWORD}}' + ``` + +If your ServiceNow credentials are correct, and allow for admin level-access to ServiceNow, you should receive a response with a RefreshToken. You'll need that token to complete integration with DefectDojo. + +- **Instance Label** should be the label that you want to use to identify this integration. +- **Location** should be set to the URL for your ServiceNow server, for example `https://your-organization.service-now.com/`. +- **Refresh Token** is where the Refresh Token should be entered. +- **Client ID** should be the Client ID set in the OAuth App Registration. +- **Client ID** should be the Client Secret set in the OAuth App Registration. + +### Severity Mapping Details + +This maps to the ServiceNow Impact field. +- **Info Mapping**: `1` +- **Low Mapping**: `1` +- **Medium Mapping**: `2` +- **High Mapping**: `3` +- **Critical Mapping**: `3` + +### Status Mapping Details + +- **Status Field Name**: `State` +- **Active Mapping**: `New` +- **Closed Mapping**: `Closed` +- **False Positive Mapping**: `Resolved` +- **Risk Accepted Mapping**: `Resolved` diff --git a/docs/content/en/working_with_findings/finding_priority.md b/docs/content/en/working_with_findings/finding_priority.md index 646d4e83765..caff9c56bae 100644 --- a/docs/content/en/working_with_findings/finding_priority.md +++ b/docs/content/en/working_with_findings/finding_priority.md @@ -58,8 +58,9 @@ High. Criticality is a subjective field, so when assigning this field, consider Product compares to other Products in your organization. * **User Records** is a numerical estimation of user records in a database (or a system that can access that database). -* **Revenue** is a numerical estimation of annual revenue for the Product. It is not -possible to set a currency type in DefectDojo, so make sure that all of your Revenue +* **Revenue** is a numerical estimation of annual revenue for the Product. To calculate Priority, DefectDojo will calculate a percentage by comparing this Product's revenue to the sum of all Products within the Product Type. + +It is not possible to set a currency type in DefectDojo, so make sure that all of your Revenue estimations have the same currency denomination. (“50000” could mean $50,000 US Dollars or ¥50,000 Japanese Yen - the denomination does not matter as long as all of your Products have revenue calculated in the same currency). @@ -85,9 +86,6 @@ Findings within a Product can have additional metadata which can further adjust * Whether the Finding is in the KEV (Known Exploited Vulnerabilities) database, which is checked by DefectDojo on a regular basis * The tool-reported Severity of a Finding (Info, Low, Medium, High, Critical) -Currently, Priority calculation and the underlying formula cannot be adjusted. These -numbers are meant as a reference only - your team’s actual priority for remediation -may vary from the DefectDojo calculation. ## Finding Risk Calculation diff --git a/docs/content/en/working_with_findings/priority_adjustments.md b/docs/content/en/working_with_findings/priority_adjustments.md new file mode 100644 index 00000000000..2ea030b61c2 --- /dev/null +++ b/docs/content/en/working_with_findings/priority_adjustments.md @@ -0,0 +1,62 @@ +--- +title: "Adjusting Priority and Risk (Pro)" +description: "Change weighting of Priority and Risk calculations" +weight: 2 +--- + +DefectDojo Pro's Priority and Risk calculations can be adjusted, allowing you to tailor DefectDojo Pro to match your internal standards for Finding Priority and Risk. + +## Prioritization Engines + +Similar to SLA configurations, Prioritization Engines allow you to set the rules governing how Priority and Risk are calculated. + +![image](images/priority_default.png) + +DefectDojo comes with a built-in Prioritization Engine, which is applied to all Products. However, you can edit this Prioritization Engine to change the weighting of **Finding** and **Product** multipliers, which will adjust how Finding Priority and Risk are assigned. + +### Finding Multipliers + +Eight contextual factors impact the Priority score of a Finding. Three of these are Finding-specific, and the other five are assigned based on the Product that holds the Finding. + +You can tune your Prioritization Engine by adjusting how these factors are applied to the final calculation. + +![image](images/priority_sliders.png) + +Select a factor by clicking the button, and adjust this slider allows you to control the percentage a particular factor is applied. As you adjust the slider, you'll see the Risk thresholds change as a result. + +#### Finding-Level Multipliers + +* **Severity** - a Finding's Severity level +* **Exploitability** - a Finding's KEV and/or EPSS score +* **Endpoints** - the amount of Endpoints associated with a Finding + +#### Product-Level Multipliers + +* **Business Criticality** - the related Product's Business Criticality (None, Very Low, Low, Medium, High, or Very +High) +* **User Records** - the related Product's User Records count +* **Revenue** - the related Product's revenue, relative to the total revenue of the Product Type +* **External Audience** - whether or not the related Product has an external audience +* **Internet Accessible** - whether or not the related Product is internet accessible + +### Risk Thresholds + +Based on the tuning of the Priority Engine, DefectDojo will automatically recommend Risk Thresholds. However, these thresholds can be adjusted as well and set to whatever values you deem appropriate. + +![image](images/risk_threshold.png) + +## Creating New Prioritization Engines + +You can use multiple Prioritization Engines, which can each be assigned to different Products. + +![image](images/priority_engine_new.png) + +Creating a new Prioritization Engine will open the Prioritization Engine form. Once this form is submitted, a new Prioritization Engine will be added to the table. + +## Assigning Prioritization Engines to Products + +Each Product can have a Prioritization Engine currently in use via the **Edit Product** form for a given Product. + +![image](images/priority_chooseengine.png) + +Note that when a Product's Prioritization Engine is changed, or a Prioritization Engine is updated, the Product's Prioritization Engine or the Prioritization Engine itself will be "Locked" until the prioritization calculation has completed. \ No newline at end of file diff --git a/docs/package-lock.json b/docs/package-lock.json index f2025f915ba..26c62b5a377 100644 --- a/docs/package-lock.json +++ b/docs/package-lock.json @@ -9,18 +9,18 @@ "version": "1.8.0", "license": "MIT", "dependencies": { - "@docsearch/css": "^4.2.0", - "@docsearch/js": "^4.2.0", - "@tabler/icons": "^3.34.1", - "@thulite/doks-core": "^1.8.3", - "@thulite/images": "^3.3.1", - "@thulite/inline-svg": "^1.2.0", - "@thulite/seo": "^2.4.1", - "thulite": "^2.6.3" + "@docsearch/css": "4.2.0", + "@docsearch/js": "4.2.0", + "@tabler/icons": "3.35.0", + "@thulite/doks-core": "1.8.3", + "@thulite/images": "3.3.3", + "@thulite/inline-svg": "1.2.1", + "@thulite/seo": "2.4.2", + "thulite": "2.6.3" }, "devDependencies": { - "prettier": "^3.6.2", - "vite": "^7.0.6" + "prettier": "3.6.2", + "vite": "7.1.11" }, "engines": { "node": ">=20.11.0" @@ -2120,7 +2120,6 @@ "resolved": "https://registry.npmjs.org/@popperjs/core/-/core-2.11.8.tgz", "integrity": "sha512-P1st0aksCrn9sGZhp8GMYwBnQsbvAWsZAX44oXNNvLHGqAOcoVxmjZiohstwQ7SqKnbR47akdNi+uleWD8+g6A==", "license": "MIT", - "peer": true, "funding": { "type": "opencollective", "url": "https://opencollective.com/popperjs" @@ -2727,7 +2726,6 @@ } ], "license": "MIT", - "peer": true, "dependencies": { "baseline-browser-mapping": "^2.8.19", "caniuse-lite": "^1.0.30001751", @@ -3799,7 +3797,6 @@ } ], "license": "MIT", - "peer": true, "dependencies": { "nanoid": "^3.3.11", "picocolors": "^1.1.1", @@ -4456,7 +4453,6 @@ "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", "license": "MIT", - "peer": true, "engines": { "node": ">=12" }, @@ -4660,7 +4656,6 @@ "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", "dev": true, "license": "MIT", - "peer": true, "engines": { "node": ">=12" }, diff --git a/docs/package.json b/docs/package.json index 6bbc6290c89..69785ab15ee 100644 --- a/docs/package.json +++ b/docs/package.json @@ -16,18 +16,18 @@ "preview": "vite preview --outDir public" }, "dependencies": { - "@docsearch/css": "^4.2.0", - "@docsearch/js": "^4.2.0", - "@tabler/icons": "^3.34.1", - "@thulite/doks-core": "^1.8.3", - "@thulite/images": "^3.3.1", - "@thulite/inline-svg": "^1.2.0", - "@thulite/seo": "^2.4.1", - "thulite": "^2.6.3" + "@docsearch/css": "4.2.0", + "@docsearch/js": "4.2.0", + "@tabler/icons": "3.35.0", + "@thulite/doks-core": "1.8.3", + "@thulite/images": "3.3.3", + "@thulite/inline-svg": "1.2.1", + "@thulite/seo": "2.4.2", + "thulite": "2.6.3" }, "devDependencies": { - "prettier": "^3.6.2", - "vite": "^7.0.6" + "prettier": "3.6.2", + "vite": "7.1.11" }, "engines": { "node": ">=20.11.0" diff --git a/dojo/__init__.py b/dojo/__init__.py index 0347bb9b284..784b90d2773 100644 --- a/dojo/__init__.py +++ b/dojo/__init__.py @@ -4,6 +4,6 @@ # Django starts so that shared_task will use this app. from .celery import app as celery_app # noqa: F401 -__version__ = "2.51.3" +__version__ = "2.52.0" __url__ = "https://github.com/DefectDojo/django-DefectDojo" __docs__ = "https://documentation.defectdojo.com" diff --git a/dojo/api_v2/prefetch/schema.py b/dojo/api_v2/prefetch/schema.py index 21791f8daab..86078a86317 100644 --- a/dojo/api_v2/prefetch/schema.py +++ b/dojo/api_v2/prefetch/schema.py @@ -1,5 +1,5 @@ from .prefetcher import _Prefetcher -from .utils import _get_prefetchable_fields +from .utils import get_prefetchable_fields def _get_path_to_GET_serializer_map(generator): @@ -53,7 +53,7 @@ def prefetch_postprocessing_hook(result, generator, request, public): if parameter["name"] == "prefetch": prefetcher = _Prefetcher() - fields = _get_prefetchable_fields( + fields = get_prefetchable_fields( serializer_classes[path](), ) diff --git a/dojo/api_v2/prefetch/utils.py b/dojo/api_v2/prefetch/utils.py index eefb1b642ec..2c2546f9e03 100644 --- a/dojo/api_v2/prefetch/utils.py +++ b/dojo/api_v2/prefetch/utils.py @@ -33,7 +33,7 @@ def _is_one_to_one_relation(field): return isinstance(field, related.ForwardManyToOneDescriptor) -def _get_prefetchable_fields(serializer): +def get_prefetchable_fields(serializer): """ Get the fields that are prefetchable according to the serializer description. Method mainly used by for automatic schema generation. diff --git a/dojo/api_v2/serializers.py b/dojo/api_v2/serializers.py index 5de0698edee..806a8a1453a 100644 --- a/dojo/api_v2/serializers.py +++ b/dojo/api_v2/serializers.py @@ -1692,6 +1692,9 @@ class FindingSerializer(serializers.ModelSerializer): many=True, read_only=True, source="risk_acceptance_set", ) push_to_jira = serializers.BooleanField(default=False) + found_by = serializers.PrimaryKeyRelatedField( + queryset=Test_Type.objects.all(), many=True, + ) age = serializers.IntegerField(read_only=True) sla_days_remaining = serializers.IntegerField(read_only=True, allow_null=True) finding_meta = FindingMetaSerializer(read_only=True, many=True) @@ -1774,6 +1777,16 @@ def update(self, instance, validated_data): if parsed_vulnerability_ids: save_vulnerability_ids(instance, parsed_vulnerability_ids) + # Get found_by from validated_data + found_by = validated_data.pop("found_by", None) + # Handle updates to found_by data + if found_by: + instance.found_by.set(found_by) + # If there is no argument entered for found_by, the user would like to clear out the values on the Finding's found_by field + # Findings still maintain original found_by value associated with their test + # In the event the user does not supply the found_by field at all, we do not modify it + elif isinstance(found_by, list) and len(found_by) == 0: + instance.found_by.clear() instance = super().update( instance, validated_data, ) diff --git a/dojo/api_v2/views.py b/dojo/api_v2/views.py index 126ac2dee56..2b2c7a36d2e 100644 --- a/dojo/api_v2/views.py +++ b/dojo/api_v2/views.py @@ -399,7 +399,8 @@ def get_queryset(self): # @extend_schema_view(**schema_with_prefetch()) # Nested models with prefetch make the response schema too long for Swagger UI class EngagementViewSet( - PrefetchDojoModelViewSet, + # PrefetchDojoModelViewSet, + DojoModelViewSet, ra_api.AcceptedRisksMixin, ): serializer_class = serializers.EngagementSerializer @@ -933,6 +934,8 @@ def close(self, request, pk=None): context={"request": request}, ) if finding_close.is_valid(): + # Remove the prefetched tags to avoid issues with delegating to celery + finding.tags._remove_prefetched_objects() # Use shared helper to perform close operations finding_helper.close_finding( finding=finding, diff --git a/dojo/apps.py b/dojo/apps.py index f47eb5184f2..f1b2769f760 100644 --- a/dojo/apps.py +++ b/dojo/apps.py @@ -72,21 +72,21 @@ def ready(self): # Load any signals here that will be ready for runtime # Importing the signals file is good enough if using the reciever decorator - import dojo.announcement.signals # noqa: PLC0415 raised: AppRegistryNotReady - import dojo.benchmark.signals # noqa: PLC0415 raised: AppRegistryNotReady - import dojo.cred.signals # noqa: PLC0415 raised: AppRegistryNotReady - import dojo.endpoint.signals # noqa: PLC0415 raised: AppRegistryNotReady - import dojo.engagement.signals # noqa: PLC0415 raised: AppRegistryNotReady - import dojo.file_uploads.signals # noqa: PLC0415 raised: AppRegistryNotReady - import dojo.finding_group.signals # noqa: PLC0415 raised: AppRegistryNotReady - import dojo.notes.signals # noqa: PLC0415 raised: AppRegistryNotReady - import dojo.product.signals # noqa: PLC0415 raised: AppRegistryNotReady - import dojo.product_type.signals # noqa: PLC0415 raised: AppRegistryNotReady - import dojo.risk_acceptance.signals # noqa: PLC0415 raised: AppRegistryNotReady - import dojo.sla_config.helpers # noqa: PLC0415 raised: AppRegistryNotReady - import dojo.tags_signals # noqa: PLC0415 raised: AppRegistryNotReady - import dojo.test.signals # noqa: PLC0415 raised: AppRegistryNotReady - import dojo.tool_product.signals # noqa: F401,PLC0415 raised: AppRegistryNotReady + import dojo.announcement.signals # noqa: PLC0415, F401 raised: AppRegistryNotReady + import dojo.benchmark.signals # noqa: PLC0415, F401 raised: AppRegistryNotReady + import dojo.cred.signals # noqa: PLC0415, F401 raised: AppRegistryNotReady + import dojo.endpoint.signals # noqa: PLC0415, F401 raised: AppRegistryNotReady + import dojo.engagement.signals # noqa: PLC0415, F401 raised: AppRegistryNotReady + import dojo.file_uploads.signals # noqa: PLC0415, F401 raised: AppRegistryNotReady + import dojo.finding_group.signals # noqa: PLC0415, F401 raised: AppRegistryNotReady + import dojo.notes.signals # noqa: PLC0415, F401 raised: AppRegistryNotReady + import dojo.product.signals # noqa: PLC0415, F401 raised: AppRegistryNotReady + import dojo.product_type.signals # noqa: PLC0415, F401 raised: AppRegistryNotReady + import dojo.risk_acceptance.signals # noqa: PLC0415, F401 raised: AppRegistryNotReady + import dojo.sla_config.helpers # noqa: PLC0415, F401 raised: AppRegistryNotReady + import dojo.tags_signals # noqa: PLC0415, F401 raised: AppRegistryNotReady + import dojo.test.signals # noqa: PLC0415, F401 raised: AppRegistryNotReady + import dojo.tool_product.signals # noqa: PLC0415, F401 raised: AppRegistryNotReady # Configure audit system after all models are loaded # This must be done in ready() to avoid "Models aren't loaded yet" errors diff --git a/dojo/db_migrations/0245_alter_jira_instance_accepted_mapping_resolution.py b/dojo/db_migrations/0245_alter_jira_instance_accepted_mapping_resolution.py new file mode 100644 index 00000000000..3596368327f --- /dev/null +++ b/dojo/db_migrations/0245_alter_jira_instance_accepted_mapping_resolution.py @@ -0,0 +1,18 @@ +# Generated by Django 5.1.13 on 2025-10-21 10:25 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('dojo', '0244_pghistory_indices'), + ] + + operations = [ + migrations.AlterField( + model_name='jira_instance', + name='accepted_mapping_resolution', + field=models.CharField(blank=True, help_text='JIRA issues that are closed in JIRA with one of these resolutions will result in the Finding becoming Risk Accepted in Defect Dojo. JIRA issues that are closed in JIRA with one of these resolutions will result in the Finding becoming Risk Accepted in Defect Dojo. The expiration time for this Risk Acceptance will be determined by the "Risk acceptance form default days" in "System Settings". This mapping is not used when Findings are pushed to JIRA. In that case the Risk Accepted Findings are closed in JIRA and JIRA sets the default resolution.', max_length=300, null=True, verbose_name='Risk Accepted resolution mapping'), + ), + ] diff --git a/dojo/db_migrations/0246_endpoint_idx_ep_product_lower_host_and_more.py b/dojo/db_migrations/0246_endpoint_idx_ep_product_lower_host_and_more.py new file mode 100644 index 00000000000..70ae2bd5fe1 --- /dev/null +++ b/dojo/db_migrations/0246_endpoint_idx_ep_product_lower_host_and_more.py @@ -0,0 +1,26 @@ +# Generated by Django 5.1.13 on 2025-10-23 22:01 + +import django.db.models.functions.text +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('dojo', '0245_alter_jira_instance_accepted_mapping_resolution'), + ] + + operations = [ + migrations.AddIndex( + model_name='endpoint', + index=models.Index(models.F('product'), django.db.models.functions.text.Lower('host'), name='idx_ep_product_lower_host'), + ), + migrations.AddIndex( + model_name='endpoint_status', + index=models.Index(condition=models.Q(('false_positive', False), ('mitigated', False), ('out_of_scope', False), ('risk_accepted', False)), fields=['endpoint'], name='idx_eps_active_by_endpoint'), + ), + migrations.AddIndex( + model_name='endpoint_status', + index=models.Index(condition=models.Q(('false_positive', False), ('mitigated', False), ('out_of_scope', False), ('risk_accepted', False)), fields=['finding'], name='idx_eps_active_by_finding'), + ), + ] diff --git a/dojo/decorators.py b/dojo/decorators.py index b7b84d59430..bba9efe234c 100644 --- a/dojo/decorators.py +++ b/dojo/decorators.py @@ -222,7 +222,7 @@ def _wrapped(request, *args, **kw): if username: dojo_user = Dojo_User.objects.filter(username=username).first() if dojo_user: - Dojo_User.enable_force_password_reset(dojo_user) + dojo_user.enable_force_password_reset() raise Ratelimited return fn(request, *args, **kw) return _wrapped diff --git a/dojo/endpoint/utils.py b/dojo/endpoint/utils.py index 10646ba265c..75f81e60827 100644 --- a/dojo/endpoint/utils.py +++ b/dojo/endpoint/utils.py @@ -6,11 +6,10 @@ from django.contrib import messages from django.core.exceptions import ValidationError from django.core.validators import validate_ipv46_address -from django.db import transaction from django.db.models import Count, Q from django.http import HttpResponseRedirect from django.urls import reverse -from hyperlink._url import SCHEME_PORT_MAP +from hyperlink._url import SCHEME_PORT_MAP # noqa: PLC2701 from dojo.models import DojoMeta, Endpoint @@ -55,21 +54,27 @@ def endpoint_filter(**kwargs): def endpoint_get_or_create(**kwargs): - with transaction.atomic(): - qs = endpoint_filter(**kwargs) - count = qs.count() - if count == 0: - return Endpoint.objects.get_or_create(**kwargs) - if count == 1: - return qs.order_by("id").first(), False - logger.warning( - f"Endpoints in your database are broken. " - f"Please access {reverse('endpoint_migrate')} and migrate them to new format or remove them.", - ) - # Get the oldest endpoint first, and return that instead - # a datetime is not captured on the endpoint model, so ID - # will have to work here instead - return qs.order_by("id").first(), False + # This code looks a bit ugly/complicated. + # But this method is called so frequently that we need to optimize it. + # It executes at most one SELECT and one optional INSERT. + qs = endpoint_filter(**kwargs) + # Fetch up to two matches in a single round-trip. This covers + # the common cases efficiently: zero (create) or one (reuse). + matches = list(qs.order_by("id")[:2]) + if not matches: + # Most common case: nothing exists yet + return Endpoint.objects.create(**kwargs), True + if len(matches) == 1: + # Common case: exactly one existing endpoint + return matches[0], False + logger.warning( + f"Endpoints in your database are broken. " + f"Please access {reverse('endpoint_migrate')} and migrate them to new format or remove them.", + ) + # Get the oldest endpoint first, and return that instead + # a datetime is not captured on the endpoint model, so ID + # will have to work here instead + return matches[0], False def clean_hosts_run(apps, change): diff --git a/dojo/engagement/urls.py b/dojo/engagement/urls.py index c70bb56a95e..0f33c3aa697 100644 --- a/dojo/engagement/urls.py +++ b/dojo/engagement/urls.py @@ -30,6 +30,8 @@ name="close_engagement"), re_path(r"^engagement/(?P\d+)/reopen$", views.reopen_eng, name="reopen_engagement"), + re_path(r"^engagement/(?P\d+)/jira/unlink$", views.unlink_jira, + name="engagement_unlink_jira"), re_path(r"^engagement/(?P\d+)/complete_checklist$", views.complete_checklist, name="complete_checklist"), re_path(r"^engagement/(?P\d+)/risk_acceptance/add$", diff --git a/dojo/engagement/views.py b/dojo/engagement/views.py index 7ae3e758ead..b45b417e39c 100644 --- a/dojo/engagement/views.py +++ b/dojo/engagement/views.py @@ -19,13 +19,14 @@ from django.db.models import OuterRef, Q, Value from django.db.models.functions import Coalesce from django.db.models.query import Prefetch, QuerySet -from django.http import HttpRequest, HttpResponse, HttpResponseRedirect, QueryDict, StreamingHttpResponse +from django.http import HttpRequest, HttpResponse, HttpResponseRedirect, JsonResponse, QueryDict, StreamingHttpResponse from django.shortcuts import get_object_or_404, render from django.urls import Resolver404, reverse from django.utils import timezone from django.utils.translation import gettext as _ from django.views import View from django.views.decorators.cache import cache_page +from django.views.decorators.http import require_POST from django.views.decorators.vary import vary_on_cookie from openpyxl import Workbook from openpyxl.styles import Font @@ -961,20 +962,20 @@ def process_form( "active": None, "verified": None, "scan_type": request.POST.get("scan_type"), - "test_title": form.cleaned_data.get("test_title"), + "test_title": form.cleaned_data.get("test_title") or None, "tags": form.cleaned_data.get("tags"), - "version": form.cleaned_data.get("version"), - "branch_tag": form.cleaned_data.get("branch_tag", None), - "build_id": form.cleaned_data.get("build_id", None), - "commit_hash": form.cleaned_data.get("commit_hash", None), - "api_scan_configuration": form.cleaned_data.get("api_scan_configuration", None), - "service": form.cleaned_data.get("service", None), + "version": form.cleaned_data.get("version") or None, + "branch_tag": form.cleaned_data.get("branch_tag") or None, + "build_id": form.cleaned_data.get("build_id") or None, + "commit_hash": form.cleaned_data.get("commit_hash") or None, + "api_scan_configuration": form.cleaned_data.get("api_scan_configuration") or None, + "service": form.cleaned_data.get("service") or None, "close_old_findings": form.cleaned_data.get("close_old_findings", None), "apply_tags_to_findings": form.cleaned_data.get("apply_tags_to_findings", False), "apply_tags_to_endpoints": form.cleaned_data.get("apply_tags_to_endpoints", False), "close_old_findings_product_scope": form.cleaned_data.get("close_old_findings_product_scope", None), - "group_by": form.cleaned_data.get("group_by", None), - "create_finding_groups_for_all_findings": form.cleaned_data.get("create_finding_groups_for_all_findings"), + "group_by": form.cleaned_data.get("group_by") or None, + "create_finding_groups_for_all_findings": form.cleaned_data.get("create_finding_groups_for_all_findings", None), "environment": self.get_development_environment(environment_name=form.cleaned_data.get("environment")), }) # Create the engagement if necessary @@ -1134,6 +1135,40 @@ def close_eng(request, eid): return HttpResponseRedirect(reverse("view_engagements", args=(eng.product.id, ))) +@user_is_authorized(Engagement, Permissions.Engagement_Edit, "eid") +@require_POST +def unlink_jira(request, eid): + eng = get_object_or_404(Engagement, id=eid) + logger.info("trying to unlink a linked jira epic from engagement %d:%s", eng.id, eng.name) + if eng.has_jira_issue: + try: + jira_helper.unlink_jira(request, eng) + messages.add_message( + request, + messages.SUCCESS, + "Link to JIRA epic successfully deleted", + extra_tags="alert-success", + ) + return JsonResponse({"result": "OK"}) + except Exception: + logger.exception("Link to JIRA epic could not be deleted") + messages.add_message( + request, + messages.ERROR, + "Link to JIRA epic could not be deleted, see alerts for details", + extra_tags="alert-danger", + ) + return HttpResponse(status=500) + else: + messages.add_message( + request, + messages.ERROR, + "Link to JIRA epic not found", + extra_tags="alert-danger", + ) + return HttpResponse(status=400) + + @user_is_authorized(Engagement, Permissions.Engagement_Edit, "eid") def reopen_eng(request, eid): eng = Engagement.objects.get(id=eid) diff --git a/dojo/importers/auto_create_context.py b/dojo/importers/auto_create_context.py index bf4d16cee92..26d37ae65b0 100644 --- a/dojo/importers/auto_create_context.py +++ b/dojo/importers/auto_create_context.py @@ -318,10 +318,16 @@ def get_or_create_engagement( target_end = (timezone.now() + timedelta(days=365)).date() # Create the engagement with transaction.atomic(): - return Engagement.objects.select_for_update().create( + # Lock the parent product row to serialize engagement creation per product + locked_product = Product.objects.select_for_update().get(pk=product.pk) + # Re-check for an existing engagement now that we hold the lock + existing = get_last_object_or_none(Engagement, product=locked_product, name=engagement_name) + if existing: + return existing + return Engagement.objects.create( engagement_type="CI/CD", name=engagement_name, - product=product, + product=locked_product, lead=get_current_user(), target_start=target_start, target_end=target_end, diff --git a/dojo/importers/base_importer.py b/dojo/importers/base_importer.py index f6d754ba929..212c976dc33 100644 --- a/dojo/importers/base_importer.py +++ b/dojo/importers/base_importer.py @@ -49,6 +49,7 @@ class Parser: and is purely for the sake of type hinting """ + @staticmethod def get_findings(scan_type: str, test: Test) -> list[Finding]: """ Stub function to make the hinting happier. The actual class diff --git a/dojo/importers/default_importer.py b/dojo/importers/default_importer.py index d127ed33f6a..726e55717eb 100644 --- a/dojo/importers/default_importer.py +++ b/dojo/importers/default_importer.py @@ -108,7 +108,7 @@ def process_scan( parser = self.get_parser() # Get the findings from the parser based on what methods the parser supplies # This could either mean traditional file parsing, or API pull parsing - parsed_findings = self.parse_findings(scan, parser) + parsed_findings = self.parse_findings(scan, parser) or [] # process the findings in the foreground or background new_findings = self.determine_process_method(parsed_findings, **kwargs) # Close any old findings in the processed list if the the user specified for that diff --git a/dojo/importers/default_reimporter.py b/dojo/importers/default_reimporter.py index 7adb2c65c48..a1625a85f33 100644 --- a/dojo/importers/default_reimporter.py +++ b/dojo/importers/default_reimporter.py @@ -93,7 +93,7 @@ def process_scan( parser = self.get_parser() # Get the findings from the parser based on what methods the parser supplies # This could either mean traditional file parsing, or API pull parsing - parsed_findings = self.parse_findings(scan, parser) + parsed_findings = self.parse_findings(scan, parser) or [] # process the findings in the foreground or background ( new_findings, @@ -170,7 +170,11 @@ def process_findings( # we need to make sure there are no side effects such as closing findings # for findings with a different service value # https://github.com/DefectDojo/django-DefectDojo/issues/12754 - original_findings = self.test.finding_set.all().filter(service=self.service) + if self.service is not None: + original_findings = self.test.finding_set.all().filter(service=self.service) + else: + original_findings = self.test.finding_set.all().filter(Q(service__isnull=True) | Q(service__exact="")) + logger.debug(f"original_findings_qyer: {original_findings.query}") self.original_items = list(original_findings) logger.debug(f"original_items: {[(item.id, item.hash_code) for item in self.original_items]}") diff --git a/dojo/importers/endpoint_manager.py b/dojo/importers/endpoint_manager.py index f733d5c9e5a..ccfff345c40 100644 --- a/dojo/importers/endpoint_manager.py +++ b/dojo/importers/endpoint_manager.py @@ -31,6 +31,7 @@ def add_endpoints_to_unsaved_finding( self.clean_unsaved_endpoints(endpoints) for endpoint in endpoints: ep = None + eps = [] try: ep, _ = endpoint_get_or_create( protocol=endpoint.protocol, @@ -41,6 +42,7 @@ def add_endpoints_to_unsaved_finding( query=endpoint.query, fragment=endpoint.fragment, product=finding.test.engagement.product) + eps.append(ep) except (MultipleObjectsReturned): msg = ( f"Endpoints in your database are broken. " @@ -48,10 +50,12 @@ def add_endpoints_to_unsaved_finding( ) raise Exception(msg) - Endpoint_Status.objects.get_or_create( - finding=finding, - endpoint=ep, - defaults={"date": finding.date}) + # bulk_create will translate to INSERT WITH IGNORE CONFLICTS + # much faster than get_or_create which issues two queries per endpoint + # bulk_create will not trigger endpoint_status.save and signals which is fine for now + rows = [Endpoint_Status(finding=finding, endpoint=e, date=finding.date) for e in eps] + Endpoint_Status.objects.bulk_create(rows, ignore_conflicts=True, batch_size=1000) + logger.debug(f"IMPORT_SCAN: {len(endpoints)} endpoints imported") @dojo_async_task diff --git a/dojo/importers/options.py b/dojo/importers/options.py index b83a8b8597c..3b7c624235d 100644 --- a/dojo/importers/options.py +++ b/dojo/importers/options.py @@ -96,6 +96,7 @@ def log_translation( for field in self.field_names: logger.debug(f"{field}: {getattr(self, field)}") + @staticmethod def _compress_decorator(function): @wraps(function) def inner_compress_function(*args, **kwargs): @@ -103,6 +104,7 @@ def inner_compress_function(*args, **kwargs): return function(*args, **kwargs) return inner_compress_function + @staticmethod def _decompress_decorator(function): @wraps(function) def inner_decompress_function(*args, **kwargs): diff --git a/dojo/jira_link/helper.py b/dojo/jira_link/helper.py index 9dbbd6deeee..bf2b0101fed 100644 --- a/dojo/jira_link/helper.py +++ b/dojo/jira_link/helper.py @@ -6,6 +6,7 @@ from typing import Any import requests +from dateutil.relativedelta import relativedelta from django.conf import settings from django.contrib import messages from django.template import TemplateDoesNotExist @@ -1802,9 +1803,14 @@ def process_resolution_from_jira(finding, resolution_id, resolution_name, assign if finding.test.engagement.product.enable_full_risk_acceptance: logger.debug(f"Creating risk acceptance for finding linked to {jira_issue.jira_key}.") + # loads the expiration from the system setting "Risk acceptance form default days" as otherwise + # the acceptance will never expire + risk_acceptance_form_default_days = get_system_setting("risk_acceptance_form_default_days", 90) + expiration_date_from_system_settings = timezone.now() + relativedelta(days=risk_acceptance_form_default_days) ra = Risk_Acceptance.objects.create( accepted_by=assignee_name, owner=finding.reporter, + expiration_date=expiration_date_from_system_settings, decision_details=f"Risk Acceptance automatically created from JIRA issue {jira_issue.jira_key} with resolution {resolution_name}", ) finding.test.engagement.risk_acceptance.add(ra) diff --git a/dojo/middleware.py b/dojo/middleware.py index 5d63b1a35a0..5b50f3cc987 100644 --- a/dojo/middleware.py +++ b/dojo/middleware.py @@ -6,13 +6,18 @@ from urllib.parse import quote import pghistory.middleware +import requests from auditlog.context import set_actor from auditlog.middleware import AuditlogMiddleware as _AuditlogMiddleware from django.conf import settings +from django.contrib import messages from django.db import models from django.http import HttpResponseRedirect +from django.shortcuts import redirect from django.urls import reverse from django.utils.functional import SimpleLazyObject +from social_core.exceptions import AuthCanceled, AuthFailed, AuthForbidden +from social_django.middleware import SocialAuthExceptionMiddleware from watson.middleware import SearchContextMiddleware from watson.search import search_context_manager @@ -75,6 +80,28 @@ def __call__(self, request): return self.get_response(request) +class CustomSocialAuthExceptionMiddleware(SocialAuthExceptionMiddleware): + def process_exception(self, request, exception): + if isinstance(exception, requests.exceptions.RequestException): + messages.error(request, "Please use the standard login below.") + return redirect("/login?force_login_form") + if isinstance(exception, AuthCanceled): + messages.warning(request, "Social login was canceled. Please try again or use the standard login.") + return redirect("/login?force_login_form") + if isinstance(exception, AuthFailed): + messages.error(request, "Social login failed. Please try again or use the standard login.") + return redirect("/login?force_login_form") + if isinstance(exception, AuthForbidden): + messages.error(request, "You are not authorized to log in via this method. Please contact support or use the standard login.") + return redirect("/login?force_login_form") + if isinstance(exception, TypeError) and "'NoneType' object is not iterable" in str(exception): + logger.warning("OIDC login error: NoneType is not iterable") + messages.error(request, "An unexpected error occurred during social login. Please use the standard login.") + return redirect("/login?force_login_form") + logger.error(f"Unhandled exception during social login: {exception}") + return super().process_exception(request, exception) + + class DojoSytemSettingsMiddleware: _thread_local = local() diff --git a/dojo/models.py b/dojo/models.py index 8eb0f45f719..396e851f9b4 100644 --- a/dojo/models.py +++ b/dojo/models.py @@ -25,7 +25,7 @@ from django.core.files.base import ContentFile from django.core.validators import MaxValueValidator, MinValueValidator, RegexValidator, validate_ipv46_address from django.db import connection, models -from django.db.models import Count, JSONField, Q +from django.db.models import Count, F, JSONField, Q from django.db.models.expressions import Case, When from django.db.models.functions import Lower from django.urls import reverse @@ -129,7 +129,7 @@ def _manage_inherited_tags(obj, incoming_inherited_tags, potentially_existing_ta obj.tags.set(cleaned_tag_list) -def _copy_model_util(model_in_database, exclude_fields: list[str] | None = None): +def copy_model_util(model_in_database, exclude_fields: list[str] | None = None): if exclude_fields is None: exclude_fields = [] new_model_instance = model_in_database.__class__() @@ -231,15 +231,15 @@ def wants_block_execution(user): def force_password_reset(user): return hasattr(user, "usercontactinfo") and user.usercontactinfo.force_password_reset - def disable_force_password_reset(user): - if hasattr(user, "usercontactinfo"): - user.usercontactinfo.force_password_reset = False - user.usercontactinfo.save() + def disable_force_password_reset(self): + if hasattr(self, "usercontactinfo"): + self.usercontactinfo.force_password_reset = False + self.usercontactinfo.save() - def enable_force_password_reset(user): - if hasattr(user, "usercontactinfo"): - user.usercontactinfo.force_password_reset = True - user.usercontactinfo.save() + def enable_force_password_reset(self): + if hasattr(self, "usercontactinfo"): + self.usercontactinfo.force_password_reset = True + self.usercontactinfo.save() @staticmethod def generate_full_name(user): @@ -750,7 +750,7 @@ class NoteHistory(models.Model): current_editor = models.ForeignKey(Dojo_User, editable=False, null=True, on_delete=models.CASCADE) def copy(self): - copy = _copy_model_util(self) + copy = copy_model_util(self) copy.save() return copy @@ -776,7 +776,7 @@ def __str__(self): return self.entry def copy(self): - copy = _copy_model_util(self) + copy = copy_model_util(self) # Save the necessary ManyToMany relationships old_history = list(self.history.all()) # Save the object before setting any ManyToMany relationships @@ -801,7 +801,7 @@ def delete(self, *args, **kwargs): storage.delete(path) def copy(self): - copy = _copy_model_util(self) + copy = copy_model_util(self) # Add unique modifier to file name copy.title = f"{self.title} - clone-{str(uuid4())[:8]}" # Create new unique file name @@ -1581,7 +1581,7 @@ def get_absolute_url(self): return reverse("view_engagement", args=[str(self.id)]) def copy(self): - copy = _copy_model_util(self) + copy = copy_model_util(self) # Save the necessary ManyToMany relationships old_notes = list(self.notes.all()) old_files = list(self.files.all()) @@ -1690,6 +1690,17 @@ class Meta: indexes = [ models.Index(fields=["finding", "mitigated"]), models.Index(fields=["endpoint", "mitigated"]), + # Optimize frequent lookups of "active" statuses (mitigated/flags all False) + models.Index( + name="idx_eps_active_by_endpoint", + fields=["endpoint"], + condition=Q(mitigated=False, false_positive=False, out_of_scope=False, risk_accepted=False), + ), + models.Index( + name="idx_eps_active_by_finding", + fields=["finding"], + condition=Q(mitigated=False, false_positive=False, out_of_scope=False, risk_accepted=False), + ), ] constraints = [ models.UniqueConstraint(fields=["finding", "endpoint"], name="endpoint-finding relation"), @@ -1699,7 +1710,7 @@ def __str__(self): return f"'{self.finding}' on '{self.endpoint}'" def copy(self, finding=None): - copy = _copy_model_util(self) + copy = copy_model_util(self) current_endpoint = self.endpoint if finding: copy.finding = finding @@ -1749,6 +1760,12 @@ class Meta: ordering = ["product", "host", "protocol", "port", "userinfo", "path", "query", "fragment"] indexes = [ models.Index(fields=["product"]), + # Fast case-insensitive equality on host within product scope + models.Index( + F("product"), + Lower("host"), + name="idx_ep_product_lower_host", + ), ] def __hash__(self): @@ -2161,7 +2178,7 @@ def get_breadcrumbs(self): return bc def copy(self, engagement=None): - copy = _copy_model_util(self) + copy = copy_model_util(self) # Save the necessary ManyToMany relationships old_notes = list(self.notes.all()) old_files = list(self.files.all()) @@ -2829,7 +2846,7 @@ def get_absolute_url(self): return reverse("view_finding", args=[str(self.id)]) def copy(self, test=None): - copy = _copy_model_util(self) + copy = copy_model_util(self) # Save the necessary ManyToMany relationships old_notes = list(self.notes.all()) old_files = list(self.files.all()) @@ -3010,6 +3027,7 @@ def hash_fields(self, fields_to_hash): if hasattr(settings, "HASH_CODE_FIELDS_ALWAYS"): for field in settings.HASH_CODE_FIELDS_ALWAYS: if getattr(self, field): + deduplicationLogger.debug("adding HASH_CODE_FIELDS_ALWAYSfield %s to hash_fields: %s", field, getattr(self, field)) fields_to_hash += str(getattr(self, field)) logger.debug("fields_to_hash : %s", fields_to_hash) @@ -3813,7 +3831,7 @@ def engagement(self): return None def copy(self, engagement=None): - copy = _copy_model_util(self) + copy = copy_model_util(self) # Save the necessary ManyToMany relationships old_notes = list(self.notes.all()) old_accepted_findings_hash_codes = [finding.hash_code for finding in self.accepted_findings.all()] @@ -3962,7 +3980,7 @@ class JIRA_Instance(models.Model): high_mapping_severity = models.CharField(max_length=200, help_text=_("Maps to the 'Priority' field in Jira. For example: High")) critical_mapping_severity = models.CharField(max_length=200, help_text=_("Maps to the 'Priority' field in Jira. For example: Critical")) finding_text = models.TextField(null=True, blank=True, help_text=_("Additional text that will be added to the finding in Jira. For example including how the finding was created or who to contact for more information.")) - accepted_mapping_resolution = models.CharField(null=True, blank=True, max_length=300, verbose_name="Risk Accepted resolution mapping", help_text=_("JIRA issues that are closed in JIRA with one of these resolutions will result in the Finding becoming Risk Accepted in Defect Dojo. This Risk Acceptance will not have an expiration date. This mapping is not used when Findings are pushed to JIRA. In that case the Risk Accepted Findings are closed in JIRA and JIRA sets the default resolution.")) + accepted_mapping_resolution = models.CharField(null=True, blank=True, max_length=300, verbose_name="Risk Accepted resolution mapping", help_text=_('JIRA issues that are closed in JIRA with one of these resolutions will result in the Finding becoming Risk Accepted in Defect Dojo. JIRA issues that are closed in JIRA with one of these resolutions will result in the Finding becoming Risk Accepted in Defect Dojo. The expiration time for this Risk Acceptance will be determined by the "Risk acceptance form default days" in "System Settings". This mapping is not used when Findings are pushed to JIRA. In that case the Risk Accepted Findings are closed in JIRA and JIRA sets the default resolution.')) false_positive_mapping_resolution = models.CharField(null=True, blank=True, verbose_name="False Positive resolution mapping", max_length=300, help_text=_("JIRA issues that are closed in JIRA with one of these resolutions will result in the Finding being marked as False Positive Defect Dojo. This mapping is not used when Findings are pushed to JIRA. In that case the Finding is closed in JIRA and JIRA sets the default resolution.")) global_jira_sla_notification = models.BooleanField(default=True, blank=False, verbose_name=_("Globally send SLA notifications as comment?"), help_text=_("This setting can be overidden at the Product level")) finding_jira_sync = models.BooleanField(default=False, blank=False, verbose_name=_("Automatically sync Findings with JIRA?"), help_text=_("If enabled, this will sync changes to a Finding automatically to JIRA")) diff --git a/dojo/notifications/helper.py b/dojo/notifications/helper.py index f59060331d1..c4458daec01 100644 --- a/dojo/notifications/helper.py +++ b/dojo/notifications/helper.py @@ -627,6 +627,10 @@ def __init__(self, *args: list, **kwargs: dict) -> None: def create_notification(self, event: str | None = None, **kwargs: dict) -> None: # Process the notifications for a given list of recipients if kwargs.get("recipients") is not None: + recipients = kwargs.get("recipients", []) + if not recipients: + logger.debug("No recipients provided for event: %s", event) + return self._process_recipients(event=event, **kwargs) else: logger.debug("creating system notifications for event: %s", event) diff --git a/dojo/pipeline.py b/dojo/pipeline.py index 888cce0ba06..8aaea4079bb 100644 --- a/dojo/pipeline.py +++ b/dojo/pipeline.py @@ -183,5 +183,6 @@ def sanitize_username(username): def create_user(strategy, details, backend, user=None, *args, **kwargs): if not settings.SOCIAL_AUTH_CREATE_USER: return None - details["username"] = sanitize_username(details.get("username")) + username = details.get(settings.SOCIAL_AUTH_CREATE_USER_MAPPING) + details["username"] = sanitize_username(username) return social_core.pipeline.user.create_user(strategy, details, backend, user, args, kwargs) diff --git a/dojo/settings/settings.dist.py b/dojo/settings/settings.dist.py index 6243e44a690..97fdd706ea4 100644 --- a/dojo/settings/settings.dist.py +++ b/dojo/settings/settings.dist.py @@ -93,7 +93,7 @@ DD_CELERY_LOG_LEVEL=(str, "INFO"), DD_TAG_BULK_ADD_BATCH_SIZE=(int, 1000), # Minimum number of model updated instances before search index updates as performaed asynchronously. Set to -1 to disable async updates. - DD_WATSON_ASYNC_INDEX_UPDATE_THRESHOLD=(int, 100), + DD_WATSON_ASYNC_INDEX_UPDATE_THRESHOLD=(int, 10), DD_WATSON_ASYNC_INDEX_UPDATE_BATCH_SIZE=(int, 1000), DD_FOOTER_VERSION=(str, ""), # models should be passed to celery by ID, default is False (for now) @@ -113,6 +113,7 @@ DD_FORGOT_USERNAME=(bool, True), # do we show link "I forgot my username" on login screen DD_SOCIAL_AUTH_SHOW_LOGIN_FORM=(bool, True), # do we show user/pass input DD_SOCIAL_AUTH_CREATE_USER=(bool, True), # if True creates user at first login + DD_SOCIAL_AUTH_CREATE_USER_MAPPING=(str, "username"), # could also be email or fullname DD_SOCIAL_LOGIN_AUTO_REDIRECT=(bool, False), # auto-redirect if there is only one social login method DD_SOCIAL_AUTH_TRAILING_SLASH=(bool, True), DD_SOCIAL_AUTH_OIDC_AUTH_ENABLED=(bool, False), @@ -214,6 +215,8 @@ # `RemoteUser` is usually used behind AuthN proxy and users should not know about this mechanism from Swagger because it is not usable by users. # It should be hidden by default. DD_AUTH_REMOTEUSER_VISIBLE_IN_SWAGGER=(bool, False), + # Some security policies require allowing users to have only one active session + DD_SINGLE_USER_SESSION=(bool, False), # if somebody is using own documentation how to use DefectDojo in his own company DD_DOCUMENTATION_URL=(str, "https://documentation.defectdojo.com"), # merging findings doesn't always work well with dedupe and reimport etc. @@ -574,6 +577,7 @@ def generate_url(scheme, double_slashes, user, password, host, port, path, param SHOW_LOGIN_FORM = env("DD_SOCIAL_AUTH_SHOW_LOGIN_FORM") SOCIAL_LOGIN_AUTO_REDIRECT = env("DD_SOCIAL_LOGIN_AUTO_REDIRECT") SOCIAL_AUTH_CREATE_USER = env("DD_SOCIAL_AUTH_CREATE_USER") +SOCIAL_AUTH_CREATE_USER_MAPPING = env("DD_SOCIAL_AUTH_CREATE_USER_MAPPING") SOCIAL_AUTH_STRATEGY = "social_django.strategy.DjangoStrategy" SOCIAL_AUTH_STORAGE = "social_django.models.DjangoStorage" @@ -622,6 +626,8 @@ def generate_url(scheme, double_slashes, user, password, host, port, path, param SOCIAL_AUTH_OIDC_KEY = env("DD_SOCIAL_AUTH_OIDC_KEY") SOCIAL_AUTH_OIDC_SECRET = env("DD_SOCIAL_AUTH_OIDC_SECRET") # Optional settings +if value := env("DD_LOGIN_REDIRECT_URL"): + SOCIAL_AUTH_LOGIN_REDIRECT_URL = value if value := env("DD_SOCIAL_AUTH_OIDC_ID_KEY"): SOCIAL_AUTH_OIDC_ID_KEY = value if value := env("DD_SOCIAL_AUTH_OIDC_USERNAME_KEY"): @@ -919,6 +925,7 @@ def generate_url(scheme, double_slashes, user, password, host, port, path, param "auditlog", "pgtrigger", "pghistory", + "single_session", ) # ------------------------------------------------------------------------------ @@ -936,7 +943,7 @@ def generate_url(scheme, double_slashes, user, password, host, port, path, param "django.middleware.clickjacking.XFrameOptionsMiddleware", "dojo.middleware.LoginRequiredMiddleware", "dojo.middleware.AdditionalHeaderMiddleware", - "social_django.middleware.SocialAuthExceptionMiddleware", + "dojo.middleware.CustomSocialAuthExceptionMiddleware", "crum.CurrentRequestUserMiddleware", "dojo.middleware.AuditlogMiddleware", "dojo.middleware.AsyncSearchContextMiddleware", @@ -1149,6 +1156,13 @@ def saml2_attrib_map_format(din): ("dojo.remote_user.RemoteUserAuthentication",) + \ REST_FRAMEWORK["DEFAULT_AUTHENTICATION_CLASSES"] +# ------------------------------------------------------------------------------ +# SINGLE_USER_SESSION +# ------------------------------------------------------------------------------ + +SESSION_ENGINE = "django.contrib.sessions.backends.db" +SINGLE_USER_SESSION = env("DD_SINGLE_USER_SESSION") + # ------------------------------------------------------------------------------ # CELERY # ------------------------------------------------------------------------------ @@ -1325,6 +1339,7 @@ def saml2_attrib_map_format(din): "JFrog Xray On Demand Binary Scan": ["title", "component_name", "component_version"], "Scout Suite Scan": ["file_path", "vuln_id_from_tool"], # for now we use file_path as there is no attribute for "service" "Meterian Scan": ["cwe", "component_name", "component_version", "description", "severity"], + "Github SAST Scan": ["vuln_id_from_tool", "severity", "file_path", "line"], "Github Vulnerability Scan": ["title", "severity", "component_name", "vulnerability_ids", "file_path"], "Github Secrets Detection Report": ["title", "file_path", "line"], "Solar Appscreener Scan": ["title", "file_path", "line", "severity"], @@ -1357,7 +1372,7 @@ def saml2_attrib_map_format(din): "HCLAppScan XML": ["title", "description"], "HCL AppScan on Cloud SAST XML": ["title", "file_path", "line", "severity"], "KICS Scan": ["file_path", "line", "severity", "description", "title"], - "MobSF Scan": ["title", "description", "severity"], + "MobSF Scan": ["title", "description", "severity", "file_path"], "MobSF Scorecard Scan": ["title", "description", "severity"], "OSV Scan": ["title", "description", "severity"], "Snyk Code Scan": ["vuln_id_from_tool", "file_path"], @@ -1571,6 +1586,7 @@ def saml2_attrib_map_format(din): "Scout Suite Scan": DEDUPE_ALGO_HASH_CODE, "AWS Security Hub Scan": DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL, "Meterian Scan": DEDUPE_ALGO_HASH_CODE, + "Github SAST Scan": DEDUPE_ALGO_HASH_CODE, "Github Vulnerability Scan": DEDUPE_ALGO_HASH_CODE, "Github Secrets Detection Report": DEDUPE_ALGO_HASH_CODE, "Cloudsploit Scan": DEDUPE_ALGO_HASH_CODE, @@ -1881,6 +1897,7 @@ def saml2_attrib_map_format(din): "KB": "https://support.hcl-software.com/csm?id=kb_article&sysparm_article=", # e.g. https://support.hcl-software.com/csm?id=kb_article&sysparm_article=KB0108401 "KHV": "https://avd.aquasec.com/misconfig/kubernetes/", # e.g. https://avd.aquasec.com/misconfig/kubernetes/khv045 "LEN-": "https://support.lenovo.com/cl/de/product_security/", # e.g. https://support.lenovo.com/cl/de/product_security/LEN-94953 + "MAL-": "https://cvepremium.circl.lu/vuln/", # e.g. https://cvepremium.circl.lu/vuln/mal-2025-49305 "MGAA-": "https://advisories.mageia.org/&&.html", # e.g. https://advisories.mageia.org/MGAA-2013-0054.html "MGASA-": "https://advisories.mageia.org/&&.html", # e.g. https://advisories.mageia.org/MGASA-2025-0023.html "MSRC_": "https://cvepremium.circl.lu/vuln/", # e.g. https://cvepremium.circl.lu/vuln/msrc_cve-2025-59200 diff --git a/dojo/templates/dojo/finding_related_row.html b/dojo/templates/dojo/finding_related_row.html index ba5336570ab..d02e884100b 100644 --- a/dojo/templates/dojo/finding_related_row.html +++ b/dojo/templates/dojo/finding_related_row.html @@ -13,8 +13,8 @@ {% else %} Similar {% endif %} - - + + {{ similar_finding.severity_display }} diff --git a/dojo/templates/dojo/view_eng.html b/dojo/templates/dojo/view_eng.html index 728b8867f7d..ab09dadb7c5 100644 --- a/dojo/templates/dojo/view_eng.html +++ b/dojo/templates/dojo/view_eng.html @@ -826,13 +826,18 @@

{% if jissue and jira_project %} - - Jira - {{ eng | jira_key }} + + Jira + + {{ eng | jira_key }} (epic) - - - + + {% if eng|has_object_permission:"Engagement_Edit" %} +   + + {% endif %} + + {% elif jira_project %} JIRA @@ -1088,6 +1093,28 @@

var host = slashes.concat(window.location.host); modal.find('p#questionnaireURL').text('Questionnaire URL: ' + host + path) }) + + function jira_action(elem, url) { + $(elem).removeClass().addClass('fa-solid fa-spin fa-spinner') + + $.ajax({ + type: "post", + dataType:'json', + data: '', + context: this, + url: url, + beforeSend: function (jqXHR, settings) { + jqXHR.setRequestHeader('X-CSRFToken', '{{ csrf_token }}'); + }, + complete: function(e) { + location.reload() + } + }); + } + + $("#unlink_eng_jira").on('click', function(e) { + jira_action(this,'{% url 'engagement_unlink_jira' eng.id %}') + }); }); {% include 'dojo/snippets/risk_acceptance_actions_snippet_js.html' %} diff --git a/dojo/templates/dojo/view_finding.html b/dojo/templates/dojo/view_finding.html index a992a22d401..c8f79b63b25 100755 --- a/dojo/templates/dojo/view_finding.html +++ b/dojo/templates/dojo/view_finding.html @@ -538,9 +538,7 @@

- {% if finding.service %} - {% endif %} {% if finding.file_path %} {% endif %} @@ -571,13 +569,11 @@

{% endif %}

- {% if finding.service %} - {% endif %} {% if finding.file_path %}
ServiceLocation
{{ finding.service }} diff --git a/dojo/templates/notifications/alert/scan_added_empty.tpl b/dojo/templates/notifications/alert/scan_added_empty.tpl deleted file mode 120000 index 03390a2d58d..00000000000 --- a/dojo/templates/notifications/alert/scan_added_empty.tpl +++ /dev/null @@ -1 +0,0 @@ -{% include "notifications/alert/scan_added.tpl" %} \ No newline at end of file diff --git a/dojo/templates/notifications/alert/scan_added_empty.tpl b/dojo/templates/notifications/alert/scan_added_empty.tpl new file mode 100644 index 00000000000..6d749556aa2 --- /dev/null +++ b/dojo/templates/notifications/alert/scan_added_empty.tpl @@ -0,0 +1 @@ +{% include notifications/alert/scan_added.tpl %} diff --git a/dojo/templates/notifications/alert/user_mentioned.tpl b/dojo/templates/notifications/alert/user_mentioned.tpl index 1fc741ee2d7..9a0b35c0470 100644 --- a/dojo/templates/notifications/alert/user_mentioned.tpl +++ b/dojo/templates/notifications/alert/user_mentioned.tpl @@ -1,4 +1,4 @@ {% load i18n %}{% blocktranslate trimmed %} -User {{ user }} jotted a note on {{ section }}{% endblocktranslate %}: +User {{ requested_by }} jotted a note on {{ section }}{% endblocktranslate %}: {{ note }} \ No newline at end of file diff --git a/dojo/templates/notifications/mail/user_mentioned.tpl b/dojo/templates/notifications/mail/user_mentioned.tpl index 9601da3c9a5..d828940400d 100644 --- a/dojo/templates/notifications/mail/user_mentioned.tpl +++ b/dojo/templates/notifications/mail/user_mentioned.tpl @@ -9,7 +9,7 @@

{% blocktranslate trimmed %} - User {{ user }} jotted a note on {{ section }}:
+ User {{ requested_by }} jotted a note on {{ section }}:

{{ note }}

diff --git a/dojo/templates/notifications/msteams/user_mentioned.tpl b/dojo/templates/notifications/msteams/user_mentioned.tpl index ed8f38ee80c..aba4d11c089 100644 --- a/dojo/templates/notifications/msteams/user_mentioned.tpl +++ b/dojo/templates/notifications/msteams/user_mentioned.tpl @@ -54,7 +54,7 @@ NOTE: This template is currently NOT USED in practice because: }, { "type": "TextBlock", - "text": "{% trans 'User' %} {{ user }} {% trans 'mentioned you in' %} {{ section }}.", + "text": "{% trans 'User' %} {{ requested_by }} {% trans 'mentioned you in' %} {{ section }}.", "wrap": true, "spacing": "Medium" }, @@ -63,7 +63,7 @@ NOTE: This template is currently NOT USED in practice because: "facts": [ { "title": "{% trans 'User' %}:", - "value": "{{ user }}" + "value": "{{ requested_by }}" }, { "title": "{% trans 'Section' %}:", diff --git a/dojo/templates/notifications/slack/user_mentioned.tpl b/dojo/templates/notifications/slack/user_mentioned.tpl index aba6c9aed6a..9131de845a8 100644 --- a/dojo/templates/notifications/slack/user_mentioned.tpl +++ b/dojo/templates/notifications/slack/user_mentioned.tpl @@ -1,12 +1,12 @@ {% load i18n %}{% blocktranslate trimmed %} -User {{ user }} jotted a note on {{ section }}: +User {{ requested_by }} jotted a note on {{ section }}: {{ note }} Full details of the note can be reviewed at {{ url }} {% endblocktranslate %} {% if system_settings.disclaimer_notifications and system_settings.disclaimer_notifications.strip %} - + {% trans "Disclaimer" %}: {{ system_settings.disclaimer_notifications }} {% endif %} diff --git a/dojo/test/views.py b/dojo/test/views.py index 06301d20813..b5777f15cac 100644 --- a/dojo/test/views.py +++ b/dojo/test/views.py @@ -905,17 +905,17 @@ def process_form( "minimum_severity": form.cleaned_data.get("minimum_severity"), "do_not_reactivate": form.cleaned_data.get("do_not_reactivate"), "tags": form.cleaned_data.get("tags"), - "version": form.cleaned_data.get("version"), - "branch_tag": form.cleaned_data.get("branch_tag", None), - "build_id": form.cleaned_data.get("build_id", None), - "commit_hash": form.cleaned_data.get("commit_hash", None), - "api_scan_configuration": form.cleaned_data.get("api_scan_configuration", None), - "service": form.cleaned_data.get("service", None), + "version": form.cleaned_data.get("version") or None, + "branch_tag": form.cleaned_data.get("branch_tag") or None, + "build_id": form.cleaned_data.get("build_id") or None, + "commit_hash": form.cleaned_data.get("commit_hash") or None, + "api_scan_configuration": form.cleaned_data.get("api_scan_configuration") or None, + "service": form.cleaned_data.get("service") or None, "apply_tags_to_findings": form.cleaned_data.get("apply_tags_to_findings", False), "apply_tags_to_endpoints": form.cleaned_data.get("apply_tags_to_endpoints", False), - "group_by": form.cleaned_data.get("group_by", None), + "group_by": form.cleaned_data.get("group_by") or None, "close_old_findings": form.cleaned_data.get("close_old_findings", None), - "create_finding_groups_for_all_findings": form.cleaned_data.get("create_finding_groups_for_all_findings"), + "create_finding_groups_for_all_findings": form.cleaned_data.get("create_finding_groups_for_all_findings", None), }) # Override the form values of active and verified if activeChoice := form.cleaned_data.get("active", None): diff --git a/dojo/tools/cyclonedx/json_parser.py b/dojo/tools/cyclonedx/json_parser.py index a53b9dd799d..73cda102c1f 100644 --- a/dojo/tools/cyclonedx/json_parser.py +++ b/dojo/tools/cyclonedx/json_parser.py @@ -36,7 +36,10 @@ def _get_findings_json(self, file, test): # better than always 'Medium' ratings = vulnerability.get("ratings") if ratings: - severity = ratings[0]["severity"] + # Determine if we can use the severity field + # In some cases, the severity field is missing, so we can rely on either the Medium severity + # or the CVSS vector (retrieved further down below) to determine the severity: + severity = ratings[0].get("severity", "Medium") severity = Cyclonedxhelper().fix_severity(severity) else: severity = "Medium" diff --git a/dojo/tools/mobsfscan/__init__.py b/dojo/tools/github_sast/__init__.py similarity index 100% rename from dojo/tools/mobsfscan/__init__.py rename to dojo/tools/github_sast/__init__.py diff --git a/dojo/tools/github_sast/parser.py b/dojo/tools/github_sast/parser.py new file mode 100644 index 00000000000..4d20e71623a --- /dev/null +++ b/dojo/tools/github_sast/parser.py @@ -0,0 +1,84 @@ +import json +from urllib.parse import urlparse + +from dojo.models import Finding + + +class GithubSASTParser: + def get_scan_types(self): + return ["Github SAST Scan"] + + def get_label_for_scan_types(self, scan_type): + return scan_type + + def get_description_for_scan_types(self, scan_type): + return "GitHub SAST report file can be imported in JSON format." + + def get_findings(self, filename, test): + data = json.load(filename) + if not isinstance(data, list): + error_msg = "Invalid SAST report format, expected a JSON list of alerts." + raise TypeError(error_msg) + + findings = [] + for vuln in data: + rule = vuln.get("rule", {}) + inst = vuln.get("most_recent_instance", {}) + loc = inst.get("location", {}) + html_url = vuln.get("html_url") + rule_id = rule.get("id") + title = f"{rule.get('description')} ({rule_id})" + severity = rule.get("security_severity_level", "Info").title() + active = vuln.get("state") == "open" + + # Build description with context + desc_lines = [] + if html_url: + desc_lines.append(f"GitHub Alert: [{html_url}]({html_url})") + owner = repo = None + commit_sha = inst.get("commit_sha") + if html_url: + parsed = urlparse(html_url) + parts = parsed.path.strip("/").split("/") + # URL is ///security/... so parts[0]=owner, parts[1]=repo + if len(parts) >= 2: + owner, repo = parts[0], parts[1] + if owner and repo and commit_sha and loc.get("path") and loc.get("start_line"): + file_link = ( + f"{parsed.scheme}://{parsed.netloc}/" + f"{owner}/{repo}/blob/{commit_sha}/" + f"{loc['path']}#L{loc['start_line']}" + ) + desc_lines.append(f"Location: [{loc['path']}:{loc['start_line']}]({file_link})") + elif loc.get("path") and loc.get("start_line"): + # fallback if something is missing + desc_lines.append(f"Location: {loc['path']}:{loc['start_line']}") + msg = inst.get("message", {}).get("text") + if msg: + desc_lines.append(f"Message: {msg}") + if severity: + desc_lines.append(f"Rule Severity: {severity}") + if rule.get("full_description"): + desc_lines.append(f"Description: {rule.get('full_description')}") + description = "\n".join(desc_lines) + + finding = Finding( + title=title, + test=test, + description=description, + severity=severity, + active=active, + static_finding=True, + dynamic_finding=False, + vuln_id_from_tool=rule_id, + ) + + # File path & line + finding.file_path = loc.get("path") + finding.line = loc.get("start_line") + + if html_url: + finding.url = html_url + + findings.append(finding) + return findings diff --git a/dojo/tools/github_vulnerability/parser.py b/dojo/tools/github_vulnerability/parser.py index aa958934cc7..5c646086aeb 100644 --- a/dojo/tools/github_vulnerability/parser.py +++ b/dojo/tools/github_vulnerability/parser.py @@ -18,114 +18,128 @@ def get_description_for_scan_types(self, scan_type): def get_findings(self, filename, test): data = json.load(filename) - if "data" in data: - vulnerabilityAlerts = self._search_vulnerability_alerts(data["data"]) - if not vulnerabilityAlerts: - msg = "Invalid report, no 'vulnerabilityAlerts' node found" - raise ValueError(msg) - repository_url = None - if "repository" in data["data"]: - if "nameWithOwner" in data["data"]["repository"]: - repository_url = "https://github.com/{}".format( - data["data"]["repository"]["nameWithOwner"], - ) - if "url" in data["data"]["repository"]: - repository_url = data["data"]["repository"]["url"] + + if isinstance(data, dict): + if "data" not in data: + error_msg = ( + "Invalid report format, expected a GitHub RepositoryVulnerabilityAlert GraphQL query response." + ) + raise ValueError(error_msg) + + alerts = self._search_vulnerability_alerts(data.get("data")) + if not alerts: + error_msg = "Invalid report, no 'vulnerabilityAlerts' node found" + raise ValueError(error_msg) + + repo = data.get("data").get("repository", {}) + repo_url = repo.get("url") + dupes = {} - for alert in vulnerabilityAlerts["nodes"]: - description = alert["securityVulnerability"]["advisory"][ - "description" - ] - if "number" in alert and repository_url is not None: - dependabot_url = ( - repository_url - + "/security/dependabot/{}".format(alert["number"]) - ) - description = ( - f"[{dependabot_url}]({dependabot_url})\n" - + description - ) + for alert in alerts.get("nodes", []): + vuln = alert.get("securityVulnerability", {}) + advisory = vuln.get("advisory", {}) + summary = advisory.get("summary", "") + desc = advisory.get("description", "") + + pr_link = None + dependabot_update = alert.get("dependabotUpdate", {}) + if dependabot_update: + pr = dependabot_update.get("pullRequest", {}) + if pr: + pr_link = pr.get("permalink") + desc = f"Fix PR: [{pr_link}]({pr_link})\n" + desc + + alert_num = alert.get("number") + if alert_num and repo_url: + alert_link = f"{repo_url}/security/dependabot/{alert_num}" + desc = f"Repo Alert: [{alert_link}]({alert_link})\n" + desc + finding = Finding( - title=alert["securityVulnerability"]["advisory"]["summary"], + title=summary, test=test, - description=description, - severity=self._convert_security( - alert["securityVulnerability"].get("severity", "MODERATE"), - ), + description=desc, + severity=self._convert_security(vuln.get("severity", "MODERATE")), static_finding=True, dynamic_finding=False, - unique_id_from_tool=alert["id"], + unique_id_from_tool=alert.get("id"), ) - if "vulnerableManifestPath" in alert: - finding.file_path = alert["vulnerableManifestPath"] - if "vulnerableRequirements" in alert and alert["vulnerableRequirements"].startswith("= "): - finding.component_version = alert["vulnerableRequirements"][2:] - if "createdAt" in alert: - finding.date = dateutil.parser.parse(alert["createdAt"]) - if "state" in alert and ( - alert["state"] == "FIXED" or alert["state"] == "DISMISSED" - ): + + if alert_num and repo_url: + finding.url = alert_link + + cwes = advisory.get("cwes", {}).get("nodes", []) + if cwes: + cwe_id = cwes[0].get("cweId", "")[4:] + if cwe_id.isdigit(): + finding.cwe = int(cwe_id) + + if alert.get("vulnerableManifestPath"): + finding.file_path = alert.get("vulnerableManifestPath") + req = alert.get("vulnerableRequirements", "") + if req.startswith("= "): + finding.component_version = req[2:] + elif req: + finding.component_version = req + pkg = vuln.get("package", {}) + finding.component_name = pkg.get("name") + + if alert.get("createdAt"): + finding.date = dateutil.parser.parse(alert.get("createdAt")) + if alert.get("state") in {"FIXED", "DISMISSED"}: finding.active = False finding.is_mitigated = True - # if the package is present - if "package" in alert["securityVulnerability"]: - finding.component_name = alert["securityVulnerability"][ - "package" - ].get("name") - if "references" in alert["securityVulnerability"]["advisory"]: - finding.references = "" - for ref in alert["securityVulnerability"]["advisory"][ - "references" - ]: - finding.references += ref["url"] + "\r\n" - if "identifiers" in alert["securityVulnerability"]["advisory"]: - unsaved_vulnerability_ids = [identifier.get("value") for identifier in alert["securityVulnerability"]["advisory"][ - "identifiers" - ] if identifier.get("value")] - if unsaved_vulnerability_ids: - finding.unsaved_vulnerability_ids = ( - unsaved_vulnerability_ids - ) - if "cvss" in alert["securityVulnerability"]["advisory"]: - if ( - "score" - in alert["securityVulnerability"]["advisory"]["cvss"] - ): - score = alert["securityVulnerability"]["advisory"]["cvss"][ - "score" - ] + + ref_urls = [r.get("url") for r in advisory.get("references", []) if r.get("url")] + if alert_num and repo_url: + ref_urls.append(alert_link) + if pr_link: + ref_urls.append(pr_link) + if ref_urls: + finding.references = "\r\n".join(ref_urls) + + ids = [i.get("value") for i in advisory.get("identifiers", []) if i.get("value")] + if ids: + for identifier in ids: + if identifier.startswith("CVE-"): + finding.cve = identifier + elif identifier.startswith("GHSA-"): + finding.vuln_id_from_tool = identifier + if not finding.vuln_id_from_tool: + finding.vuln_id_from_tool = ids[0] + finding.unsaved_vulnerability_ids = ids + + # cvss is deprecated, so we favor cvssSeverities if it exists + for key in ("cvssSeverities", "cvss"): + cvss = advisory.get(key, {}) + if key == "cvssSeverities" and cvss: + cvss = cvss.get("cvssV3", {}) + if cvss: + score = cvss.get("score") if score is not None: finding.cvssv3_score = score - if ( - "vectorString" - in alert["securityVulnerability"]["advisory"]["cvss"] - ): - cvss_vector_string = alert["securityVulnerability"][ - "advisory" - ]["cvss"]["vectorString"] - if cvss_vector_string is not None: - cvss_objects = cvss_parser.parse_cvss_from_text( - cvss_vector_string, - ) - if len(cvss_objects) > 0: - finding.cvssv3 = cvss_objects[0].clean_vector() - if ( - "cwes" in alert["securityVulnerability"]["advisory"] - and "nodes" - in alert["securityVulnerability"]["advisory"]["cwes"] - ): - cwe_nodes = alert["securityVulnerability"]["advisory"]["cwes"][ - "nodes" - ] - if cwe_nodes and len(cwe_nodes) > 0: - finding.cwe = int(cwe_nodes[0].get("cweId")[4:]) + vec = cvss.get("vectorString") + if vec: + parsed = cvss_parser.parse_cvss_from_text(vec) + if parsed: + finding.cvssv3 = parsed[0].clean_vector() + break + + epss = advisory.get("epss", {}) + percentage = epss.get("percentage") + percentile = epss.get("percentile") + if percentage is not None: + finding.epss_score = percentage + if percentile is not None: + finding.epss_percentile = percentile + dupe_key = finding.unique_id_from_tool if dupe_key in dupes: - find = dupes[dupe_key] - find.nb_occurences += 1 + dupes[dupe_key].nb_occurences += 1 else: dupes[dupe_key] = finding + return list(dupes.values()) + if isinstance(data, list): findings = [] for vuln in data: @@ -177,24 +191,25 @@ def get_findings(self, filename, test): ) findings.append(finding) return findings - return None + error_msg = ( + "Invalid report format, expected a GitHub RepositoryVulnerabilityAlert GraphQL query response." + ) + raise TypeError(error_msg) def _search_vulnerability_alerts(self, data): - if isinstance(data, list): + if isinstance(data, dict): + if "vulnerabilityAlerts" in data: + return data["vulnerabilityAlerts"] + for v in data.values(): + res = self._search_vulnerability_alerts(v) + if res: + return res + elif isinstance(data, list): for item in data: - result = self._search_vulnerability_alerts(item) - if result: - return result - elif isinstance(data, dict): - for key in data: - if key == "vulnerabilityAlerts": - return data[key] - result = self._search_vulnerability_alerts(data[key]) - if result: - return result + res = self._search_vulnerability_alerts(item) + if res: + return res return None def _convert_security(self, val): - if val.lower() == "moderate": - return "Medium" - return val.title() + return "Medium" if val.lower() == "moderate" else val.title() diff --git a/dojo/tools/mobsf/api_report_json.py b/dojo/tools/mobsf/api_report_json.py new file mode 100644 index 00000000000..6f5bd1c6c75 --- /dev/null +++ b/dojo/tools/mobsf/api_report_json.py @@ -0,0 +1,388 @@ +from datetime import datetime + +from html2text import html2text + +from dojo.models import Finding + + +class MobSFapireport: + def get_findings(self, data, test): + dupes = {} + find_date = datetime.now() + + test_description = "" + if "name" in data: + test_description = "**Info:**\n" + if "packagename" in data: + test_description = "{} **Package Name:** {}\n".format(test_description, data["packagename"]) + + if "mainactivity" in data: + test_description = "{} **Main Activity:** {}\n".format(test_description, data["mainactivity"]) + + if "pltfm" in data: + test_description = "{} **Platform:** {}\n".format(test_description, data["pltfm"]) + + if "sdk" in data: + test_description = "{} **SDK:** {}\n".format(test_description, data["sdk"]) + + if "min" in data: + test_description = "{} **Min SDK:** {}\n".format(test_description, data["min"]) + + if "targetsdk" in data: + test_description = "{} **Target SDK:** {}\n".format(test_description, data["targetsdk"]) + + if "minsdk" in data: + test_description = "{} **Min SDK:** {}\n".format(test_description, data["minsdk"]) + + if "maxsdk" in data: + test_description = "{} **Max SDK:** {}\n".format(test_description, data["maxsdk"]) + + test_description = f"{test_description}\n**File Information:**\n" + + if "name" in data: + test_description = "{} **Name:** {}\n".format(test_description, data["name"]) + + if "md5" in data: + test_description = "{} **MD5:** {}\n".format(test_description, data["md5"]) + + if "sha1" in data: + test_description = "{} **SHA-1:** {}\n".format(test_description, data["sha1"]) + + if "sha256" in data: + test_description = "{} **SHA-256:** {}\n".format(test_description, data["sha256"]) + + if "size" in data: + test_description = "{} **Size:** {}\n".format(test_description, data["size"]) + + if "urls" in data: + curl = "" + for url in data["urls"]: + for durl in url["urls"]: + curl = f"{durl}\n" + + if curl: + test_description = f"{test_description}\n**URL's:**\n {curl}\n" + + if "bin_anal" in data: + test_description = "{} \n**Binary Analysis:** {}\n".format(test_description, data["bin_anal"]) + + test.description = html2text(test_description) + + mobsf_findings = [] + # Mobile Permissions + if "permissions" in data: + # for permission, details in data["permissions"].items(): + if isinstance(data["permissions"], list): + for details in data["permissions"]: + mobsf_item = { + "category": "Mobile Permissions", + "title": details.get("name", ""), + "severity": self.getSeverityForPermission(details.get("status")), + "description": "**Permission Type:** " + details.get("name", "") + " (" + details.get("status", "") + ")\n\n**Description:** " + details.get("description", "") + "\n\n**Reason:** " + details.get("reason", ""), + "file_path": None, + } + mobsf_findings.append(mobsf_item) + else: + for permission, details in list(data["permissions"].items()): + mobsf_item = { + "category": "Mobile Permissions", + "title": permission, + "severity": self.getSeverityForPermission(details.get("status", "")), + "description": "**Permission Type:** " + permission + "\n\n**Description:** " + details.get("description", ""), + "file_path": None, + } + mobsf_findings.append(mobsf_item) + + # Insecure Connections + if "insecure_connections" in data: + for details in data["insecure_connections"]: + insecure_urls = "" + for url in details.split(","): + insecure_urls = insecure_urls + url + "\n" + + mobsf_item = { + "category": None, + "title": "Insecure Connections", + "severity": "Low", + "description": insecure_urls, + "file_path": None, + } + mobsf_findings.append(mobsf_item) + + # Certificate Analysis + if "certificate_analysis" in data: + if data["certificate_analysis"] != {}: + certificate_info = data["certificate_analysis"]["certificate_info"] + for details in data["certificate_analysis"]["certificate_findings"]: + if len(details) == 3: + mobsf_item = { + "category": "Certificate Analysis", + "title": details[2], + "severity": details[0].title(), + "description": details[1] + "\n\n**Certificate Info:** " + certificate_info, + "file_path": None, + } + mobsf_findings.append(mobsf_item) + elif len(details) == 2: + mobsf_item = { + "category": "Certificate Analysis", + "title": details[1], + "severity": details[0].title(), + "description": details[1] + "\n\n**Certificate Info:** " + certificate_info, + "file_path": None, + } + mobsf_findings.append(mobsf_item) + + # Manifest Analysis + if "manifest_analysis" in data: + if data["manifest_analysis"] != {} and isinstance(data["manifest_analysis"], dict): + if data["manifest_analysis"]["manifest_findings"]: + for details in data["manifest_analysis"]["manifest_findings"]: + mobsf_item = { + "category": "Manifest Analysis", + "title": details["title"], + "severity": details["severity"].title(), + "description": details["description"] + "\n\n " + details["name"], + "file_path": None, + } + mobsf_findings.append(mobsf_item) + else: + for details in data["manifest_analysis"]: + mobsf_item = { + "category": "Manifest Analysis", + "title": details["title"], + "severity": details["stat"].title(), + "description": details["desc"] + "\n\n " + details["name"], + "file_path": None, + } + mobsf_findings.append(mobsf_item) + + # Code Analysis + if "code_analysis" in data: + if data["code_analysis"] != {}: + if data["code_analysis"].get("findings"): + for details in data["code_analysis"]["findings"]: + metadata = data["code_analysis"]["findings"][details] + mobsf_item = { + "category": "Code Analysis", + "title": details, + "severity": metadata["metadata"]["severity"].title(), + "description": metadata["metadata"]["description"], + "file_path": None, + } + mobsf_findings.append(mobsf_item) + else: + for details in data["code_analysis"]: + metadata = data["code_analysis"][details] + if metadata.get("metadata"): + mobsf_item = { + "category": "Code Analysis", + "title": details, + "severity": metadata["metadata"]["severity"].title(), + "description": metadata["metadata"]["description"], + "file_path": None, + } + mobsf_findings.append(mobsf_item) + + # Binary Analysis + if "binary_analysis" in data: + if isinstance(data["binary_analysis"], list): + for details in data["binary_analysis"]: + for binary_analysis_type in details: + if binary_analysis_type != "name": + mobsf_item = { + "category": "Binary Analysis", + "title": details[binary_analysis_type]["description"].split(".")[0], + "severity": details[binary_analysis_type]["severity"].title(), + "description": details[binary_analysis_type]["description"], + "file_path": details["name"], + } + mobsf_findings.append(mobsf_item) + elif data["binary_analysis"].get("findings"): + for details in data["binary_analysis"]["findings"].values(): + # "findings":{ + # "Binary makes use of insecure API(s)":{ + # "detailed_desc":"The binary may contain the following insecure API(s) _memcpy\n, _strlen\n", + # "severity":"high", + # "cvss":6, + # "cwe":"CWE-676: Use of Potentially Dangerous Function", + # "owasp-mobile":"M7: Client Code Quality", + # "masvs":"MSTG-CODE-8" + # }, + mobsf_item = { + "category": "Binary Analysis", + "title": details["detailed_desc"], + "severity": details["severity"].title(), + "description": details["detailed_desc"], + "file_path": None, + } + mobsf_findings.append(mobsf_item) + else: + for details in data["binary_analysis"].values(): + # "Binary makes use of insecure API(s)":{ + # "detailed_desc":"The binary may contain the following insecure API(s) _vsprintf.", + # "severity":"high", + # "cvss":6, + # "cwe":"CWE-676 - Use of Potentially Dangerous Function", + # "owasp-mobile":"M7: Client Code Quality", + # "masvs":"MSTG-CODE-8" + # } + mobsf_item = { + "category": "Binary Analysis", + "title": details["detailed_desc"], + "severity": details["severity"].title(), + "description": details["detailed_desc"], + "file_path": None, + } + mobsf_findings.append(mobsf_item) + + # specific node for Android reports + if "android_api" in data: + # "android_insecure_random": { + # "files": { + # "u/c/a/b/a/c.java": "9", + # "kotlinx/coroutines/repackaged/net/bytebuddy/utility/RandomString.java": "3", + # ... + # "hu/mycompany/vbnmqweq/gateway/msg/Response.java": "13" + # }, + # "metadata": { + # "id": "android_insecure_random", + # "description": "The App uses an insecure Random Number Generator.", + # "type": "Regex", + # "pattern": "java\\.util\\.Random;", + # "severity": "high", + # "input_case": "exact", + # "cvss": 7.5, + # "cwe": "CWE-330 Use of Insufficiently Random Values", + # "owasp-mobile": "M5: Insufficient Cryptography", + # "masvs": "MSTG-CRYPTO-6" + # } + # }, + for api, details in list(data["android_api"].items()): + mobsf_item = { + "category": "Android API", + "title": details["metadata"]["description"], + "severity": details["metadata"]["severity"].title(), + "description": "**API:** " + api + "\n\n**Description:** " + details["metadata"]["description"], + "file_path": None, + } + mobsf_findings.append(mobsf_item) + + # Manifest + if "manifest" in data: + for details in data["manifest"]: + mobsf_item = { + "category": "Manifest", + "title": details["title"], + "severity": details["stat"], + "description": details["desc"], + "file_path": None, + } + mobsf_findings.append(mobsf_item) + + # MobSF Findings + if "findings" in data: + for title, finding in list(data["findings"].items()): + description = title + file_path = None + + if "path" in finding: + description += "\n\n**Files:**\n" + for path in finding["path"]: + if file_path is None: + file_path = path + description = description + " * " + path + "\n" + + mobsf_item = { + "category": "Findings", + "title": title, + "severity": finding["level"], + "description": description, + "file_path": file_path, + } + + mobsf_findings.append(mobsf_item) + if isinstance(data, list): + for finding in data: + mobsf_item = { + "category": finding["category"], + "title": finding["name"], + "severity": finding["severity"], + "description": finding["description"] + "\n" + "**apk_exploit_dict:** " + str(finding["apk_exploit_dict"]) + "\n" + "**line_number:** " + str(finding["line_number"]), + "file_path": finding["file_object"], + } + mobsf_findings.append(mobsf_item) + for mobsf_finding in mobsf_findings: + title = mobsf_finding["title"] + sev = self.getCriticalityRating(mobsf_finding["severity"]) + description = "" + file_path = None + if mobsf_finding["category"]: + description += "**Category:** " + mobsf_finding["category"] + "\n\n" + description += html2text(mobsf_finding["description"]) + finding = Finding( + title=title, + cwe=919, # Weaknesses in Mobile Applications + test=test, + description=description, + severity=sev, + references=None, + date=find_date, + static_finding=True, + dynamic_finding=False, + nb_occurences=1, + ) + if mobsf_finding["file_path"]: + finding.file_path = mobsf_finding["file_path"] + dupe_key = sev + title + description + mobsf_finding["file_path"] + else: + dupe_key = sev + title + description + if mobsf_finding["category"]: + dupe_key += mobsf_finding["category"] + if dupe_key in dupes: + find = dupes[dupe_key] + if description is not None: + find.description += description + find.nb_occurences += 1 + else: + dupes[dupe_key] = finding + return list(dupes.values()) + + def getSeverityForPermission(self, status): + """ + Convert status for permission detection to severity + + In MobSF there is only 4 know values for permission, + we map them as this: + dangerous => High (Critical?) + normal => Info + signature => Info (it's positive so... Info) + signatureOrSystem => Info (it's positive so... Info) + """ + if status == "dangerous": + return "High" + return "Info" + + # Criticality rating + def getCriticalityRating(self, rating): + criticality = "Info" + if rating.lower() == "good": + criticality = "Info" + elif rating.lower() == "warning": + criticality = "Low" + elif rating.lower() == "vulnerability": + criticality = "Medium" + else: + criticality = rating.lower().capitalize() + return criticality + + def suite_data(self, suites): + suite_info = "" + suite_info += suites["name"] + "\n" + suite_info += "Cipher Strength: " + str(suites["cipherStrength"]) + "\n" + if "ecdhBits" in suites: + suite_info += "ecdhBits: " + str(suites["ecdhBits"]) + "\n" + if "ecdhStrength" in suites: + suite_info += "ecdhStrength: " + str(suites["ecdhStrength"]) + suite_info += "\n\n" + return suite_info diff --git a/dojo/tools/mobsf/parser.py b/dojo/tools/mobsf/parser.py index c61065ea892..ff5a7122655 100644 --- a/dojo/tools/mobsf/parser.py +++ b/dojo/tools/mobsf/parser.py @@ -1,22 +1,20 @@ import json -from datetime import datetime -from html2text import html2text - -from dojo.models import Finding +from dojo.tools.mobsf.api_report_json import MobSFapireport +from dojo.tools.mobsf.report import MobSFjsonreport class MobSFParser: def get_scan_types(self): - return ["MobSF Scan"] + return ["MobSF Scan", "Mobsfscan Scan"] def get_label_for_scan_types(self, scan_type): return "MobSF Scan" def get_description_for_scan_types(self, scan_type): - return "Export a JSON file using the API, api/v1/report_json." + return "Import JSON report from mobsfscan report file or from api/v1/report_json" def get_findings(self, filename, test): tree = filename.read() @@ -24,381 +22,8 @@ def get_findings(self, filename, test): data = json.loads(str(tree, "utf-8")) except: data = json.loads(tree) - find_date = datetime.now() - dupes = {} - test_description = "" - if "name" in data: - test_description = "**Info:**\n" - if "packagename" in data: - test_description = "{} **Package Name:** {}\n".format(test_description, data["packagename"]) - - if "mainactivity" in data: - test_description = "{} **Main Activity:** {}\n".format(test_description, data["mainactivity"]) - - if "pltfm" in data: - test_description = "{} **Platform:** {}\n".format(test_description, data["pltfm"]) - - if "sdk" in data: - test_description = "{} **SDK:** {}\n".format(test_description, data["sdk"]) - - if "min" in data: - test_description = "{} **Min SDK:** {}\n".format(test_description, data["min"]) - - if "targetsdk" in data: - test_description = "{} **Target SDK:** {}\n".format(test_description, data["targetsdk"]) - - if "minsdk" in data: - test_description = "{} **Min SDK:** {}\n".format(test_description, data["minsdk"]) - - if "maxsdk" in data: - test_description = "{} **Max SDK:** {}\n".format(test_description, data["maxsdk"]) - - test_description = f"{test_description}\n**File Information:**\n" - - if "name" in data: - test_description = "{} **Name:** {}\n".format(test_description, data["name"]) - - if "md5" in data: - test_description = "{} **MD5:** {}\n".format(test_description, data["md5"]) - - if "sha1" in data: - test_description = "{} **SHA-1:** {}\n".format(test_description, data["sha1"]) - - if "sha256" in data: - test_description = "{} **SHA-256:** {}\n".format(test_description, data["sha256"]) - - if "size" in data: - test_description = "{} **Size:** {}\n".format(test_description, data["size"]) - - if "urls" in data: - curl = "" - for url in data["urls"]: - for durl in url["urls"]: - curl = f"{durl}\n" - - if curl: - test_description = f"{test_description}\n**URL's:**\n {curl}\n" - - if "bin_anal" in data: - test_description = "{} \n**Binary Analysis:** {}\n".format(test_description, data["bin_anal"]) - - test.description = html2text(test_description) - - mobsf_findings = [] - # Mobile Permissions - if "permissions" in data: - # for permission, details in data["permissions"].items(): - if isinstance(data["permissions"], list): - for details in data["permissions"]: - mobsf_item = { - "category": "Mobile Permissions", - "title": details.get("name", ""), - "severity": self.getSeverityForPermission(details.get("status")), - "description": "**Permission Type:** " + details.get("name", "") + " (" + details.get("status", "") + ")\n\n**Description:** " + details.get("description", "") + "\n\n**Reason:** " + details.get("reason", ""), - "file_path": None, - } - mobsf_findings.append(mobsf_item) - else: - for permission, details in list(data["permissions"].items()): - mobsf_item = { - "category": "Mobile Permissions", - "title": permission, - "severity": self.getSeverityForPermission(details.get("status", "")), - "description": "**Permission Type:** " + permission + "\n\n**Description:** " + details.get("description", ""), - "file_path": None, - } - mobsf_findings.append(mobsf_item) - - # Insecure Connections - if "insecure_connections" in data: - for details in data["insecure_connections"]: - insecure_urls = "" - for url in details.split(","): - insecure_urls = insecure_urls + url + "\n" - - mobsf_item = { - "category": None, - "title": "Insecure Connections", - "severity": "Low", - "description": insecure_urls, - "file_path": None, - } - mobsf_findings.append(mobsf_item) - - # Certificate Analysis - if "certificate_analysis" in data: - if data["certificate_analysis"] != {}: - certificate_info = data["certificate_analysis"]["certificate_info"] - for details in data["certificate_analysis"]["certificate_findings"]: - if len(details) == 3: - mobsf_item = { - "category": "Certificate Analysis", - "title": details[2], - "severity": details[0].title(), - "description": details[1] + "\n\n**Certificate Info:** " + certificate_info, - "file_path": None, - } - mobsf_findings.append(mobsf_item) - elif len(details) == 2: - mobsf_item = { - "category": "Certificate Analysis", - "title": details[1], - "severity": details[0].title(), - "description": details[1] + "\n\n**Certificate Info:** " + certificate_info, - "file_path": None, - } - mobsf_findings.append(mobsf_item) - - # Manifest Analysis - if "manifest_analysis" in data: - if data["manifest_analysis"] != {} and isinstance(data["manifest_analysis"], dict): - if data["manifest_analysis"]["manifest_findings"]: - for details in data["manifest_analysis"]["manifest_findings"]: - mobsf_item = { - "category": "Manifest Analysis", - "title": details["title"], - "severity": details["severity"].title(), - "description": details["description"] + "\n\n " + details["name"], - "file_path": None, - } - mobsf_findings.append(mobsf_item) - else: - for details in data["manifest_analysis"]: - mobsf_item = { - "category": "Manifest Analysis", - "title": details["title"], - "severity": details["stat"].title(), - "description": details["desc"] + "\n\n " + details["name"], - "file_path": None, - } - mobsf_findings.append(mobsf_item) - - # Code Analysis - if "code_analysis" in data: - if data["code_analysis"] != {}: - if data["code_analysis"].get("findings"): - for details in data["code_analysis"]["findings"]: - metadata = data["code_analysis"]["findings"][details] - mobsf_item = { - "category": "Code Analysis", - "title": details, - "severity": metadata["metadata"]["severity"].title(), - "description": metadata["metadata"]["description"], - "file_path": None, - } - mobsf_findings.append(mobsf_item) - else: - for details in data["code_analysis"]: - metadata = data["code_analysis"][details] - if metadata.get("metadata"): - mobsf_item = { - "category": "Code Analysis", - "title": details, - "severity": metadata["metadata"]["severity"].title(), - "description": metadata["metadata"]["description"], - "file_path": None, - } - mobsf_findings.append(mobsf_item) - - # Binary Analysis - if "binary_analysis" in data: - if isinstance(data["binary_analysis"], list): - for details in data["binary_analysis"]: - for binary_analysis_type in details: - if binary_analysis_type != "name": - mobsf_item = { - "category": "Binary Analysis", - "title": details[binary_analysis_type]["description"].split(".")[0], - "severity": details[binary_analysis_type]["severity"].title(), - "description": details[binary_analysis_type]["description"], - "file_path": details["name"], - } - mobsf_findings.append(mobsf_item) - elif data["binary_analysis"].get("findings"): - for details in data["binary_analysis"]["findings"].values(): - # "findings":{ - # "Binary makes use of insecure API(s)":{ - # "detailed_desc":"The binary may contain the following insecure API(s) _memcpy\n, _strlen\n", - # "severity":"high", - # "cvss":6, - # "cwe":"CWE-676: Use of Potentially Dangerous Function", - # "owasp-mobile":"M7: Client Code Quality", - # "masvs":"MSTG-CODE-8" - # }, - mobsf_item = { - "category": "Binary Analysis", - "title": details["detailed_desc"], - "severity": details["severity"].title(), - "description": details["detailed_desc"], - "file_path": None, - } - mobsf_findings.append(mobsf_item) - else: - for details in data["binary_analysis"].values(): - # "Binary makes use of insecure API(s)":{ - # "detailed_desc":"The binary may contain the following insecure API(s) _vsprintf.", - # "severity":"high", - # "cvss":6, - # "cwe":"CWE-676 - Use of Potentially Dangerous Function", - # "owasp-mobile":"M7: Client Code Quality", - # "masvs":"MSTG-CODE-8" - # } - mobsf_item = { - "category": "Binary Analysis", - "title": details["detailed_desc"], - "severity": details["severity"].title(), - "description": details["detailed_desc"], - "file_path": None, - } - mobsf_findings.append(mobsf_item) - - # specific node for Android reports - if "android_api" in data: - # "android_insecure_random": { - # "files": { - # "u/c/a/b/a/c.java": "9", - # "kotlinx/coroutines/repackaged/net/bytebuddy/utility/RandomString.java": "3", - # ... - # "hu/mycompany/vbnmqweq/gateway/msg/Response.java": "13" - # }, - # "metadata": { - # "id": "android_insecure_random", - # "description": "The App uses an insecure Random Number Generator.", - # "type": "Regex", - # "pattern": "java\\.util\\.Random;", - # "severity": "high", - # "input_case": "exact", - # "cvss": 7.5, - # "cwe": "CWE-330 Use of Insufficiently Random Values", - # "owasp-mobile": "M5: Insufficient Cryptography", - # "masvs": "MSTG-CRYPTO-6" - # } - # }, - for api, details in list(data["android_api"].items()): - mobsf_item = { - "category": "Android API", - "title": details["metadata"]["description"], - "severity": details["metadata"]["severity"].title(), - "description": "**API:** " + api + "\n\n**Description:** " + details["metadata"]["description"], - "file_path": None, - } - mobsf_findings.append(mobsf_item) - - # Manifest - if "manifest" in data: - for details in data["manifest"]: - mobsf_item = { - "category": "Manifest", - "title": details["title"], - "severity": details["stat"], - "description": details["desc"], - "file_path": None, - } - mobsf_findings.append(mobsf_item) - - # MobSF Findings - if "findings" in data: - for title, finding in list(data["findings"].items()): - description = title - file_path = None - - if "path" in finding: - description += "\n\n**Files:**\n" - for path in finding["path"]: - if file_path is None: - file_path = path - description = description + " * " + path + "\n" - - mobsf_item = { - "category": "Findings", - "title": title, - "severity": finding["level"], - "description": description, - "file_path": file_path, - } - - mobsf_findings.append(mobsf_item) - if isinstance(data, list): - for finding in data: - mobsf_item = { - "category": finding["category"], - "title": finding["name"], - "severity": finding["severity"], - "description": finding["description"] + "\n" + "**apk_exploit_dict:** " + str(finding["apk_exploit_dict"]) + "\n" + "**line_number:** " + str(finding["line_number"]), - "file_path": finding["file_object"], - } - mobsf_findings.append(mobsf_item) - for mobsf_finding in mobsf_findings: - title = mobsf_finding["title"] - sev = self.getCriticalityRating(mobsf_finding["severity"]) - description = "" - file_path = None - if mobsf_finding["category"]: - description += "**Category:** " + mobsf_finding["category"] + "\n\n" - description += html2text(mobsf_finding["description"]) - finding = Finding( - title=title, - cwe=919, # Weaknesses in Mobile Applications - test=test, - description=description, - severity=sev, - references=None, - date=find_date, - static_finding=True, - dynamic_finding=False, - nb_occurences=1, - ) - if mobsf_finding["file_path"]: - finding.file_path = mobsf_finding["file_path"] - dupe_key = sev + title + description + mobsf_finding["file_path"] - else: - dupe_key = sev + title + description - if mobsf_finding["category"]: - dupe_key += mobsf_finding["category"] - if dupe_key in dupes: - find = dupes[dupe_key] - if description is not None: - find.description += description - find.nb_occurences += 1 - else: - dupes[dupe_key] = finding - return list(dupes.values()) - - def getSeverityForPermission(self, status): - """ - Convert status for permission detection to severity - - In MobSF there is only 4 know values for permission, - we map them as this: - dangerous => High (Critical?) - normal => Info - signature => Info (it's positive so... Info) - signatureOrSystem => Info (it's positive so... Info) - """ - if status == "dangerous": - return "High" - return "Info" - - # Criticality rating - def getCriticalityRating(self, rating): - criticality = "Info" - if rating.lower() == "good": - criticality = "Info" - elif rating.lower() == "warning": - criticality = "Low" - elif rating.lower() == "vulnerability": - criticality = "Medium" - else: - criticality = rating.lower().capitalize() - return criticality - - def suite_data(self, suites): - suite_info = "" - suite_info += suites["name"] + "\n" - suite_info += "Cipher Strength: " + str(suites["cipherStrength"]) + "\n" - if "ecdhBits" in suites: - suite_info += "ecdhBits: " + str(suites["ecdhBits"]) + "\n" - if "ecdhStrength" in suites: - suite_info += "ecdhStrength: " + str(suites["ecdhStrength"]) - suite_info += "\n\n" - return suite_info + if isinstance(data, list) or data.get("results") is None: + return MobSFapireport().get_findings(data, test) + if len(data.get("results")) == 0: + return [] + return MobSFjsonreport().get_findings(data, test) diff --git a/dojo/tools/mobsfscan/parser.py b/dojo/tools/mobsf/report.py similarity index 84% rename from dojo/tools/mobsfscan/parser.py rename to dojo/tools/mobsf/report.py index 49995720acb..3f076e2f8a5 100644 --- a/dojo/tools/mobsfscan/parser.py +++ b/dojo/tools/mobsf/report.py @@ -1,11 +1,10 @@ import hashlib -import json import re from dojo.models import Finding -class MobsfscanParser: +class MobSFjsonreport: """A class that can be used to parse the mobsfscan (https://github.com/MobSF/mobsfscan) JSON report file.""" @@ -15,19 +14,7 @@ class MobsfscanParser: "INFO": "Low", } - def get_scan_types(self): - return ["Mobsfscan Scan"] - - def get_label_for_scan_types(self, scan_type): - return "Mobsfscan Scan" - - def get_description_for_scan_types(self, scan_type): - return "Import JSON report for mobsfscan report file." - - def get_findings(self, filename, test): - data = json.load(filename) - if len(data.get("results")) == 0: - return [] + def get_findings(self, data, test): dupes = {} for key, item in data.get("results").items(): metadata = item.get("metadata") diff --git a/dojo/tools/nexpose/parser.py b/dojo/tools/nexpose/parser.py index d7f197d2b21..9c03ba8f277 100644 --- a/dojo/tools/nexpose/parser.py +++ b/dojo/tools/nexpose/parser.py @@ -4,7 +4,7 @@ import html2text from defusedxml import ElementTree from django.conf import settings -from hyperlink._url import SCHEME_PORT_MAP +from hyperlink._url import SCHEME_PORT_MAP # noqa: PLC2701 from dojo.models import Endpoint, Finding diff --git a/dojo/tools/tenable/xml_format.py b/dojo/tools/tenable/xml_format.py index 53f82a440ac..7bbf36baa66 100644 --- a/dojo/tools/tenable/xml_format.py +++ b/dojo/tools/tenable/xml_format.py @@ -3,7 +3,7 @@ from cvss import CVSS3 from defusedxml import ElementTree -from hyperlink._url import SCHEME_PORT_MAP +from hyperlink._url import SCHEME_PORT_MAP # noqa: PLC2701 from dojo.models import Endpoint, Finding, Test diff --git a/dojo/tools/wazuh/v4_7.py b/dojo/tools/wazuh/v4_7.py index 1357571d0d5..661dfd3c5a7 100644 --- a/dojo/tools/wazuh/v4_7.py +++ b/dojo/tools/wazuh/v4_7.py @@ -25,6 +25,19 @@ def parse_findings(self, test, data): agent_ip = item.get("agent_ip") detection_time = item.get("detection_time").split("T")[0] + # Map Wazuh severity to its equivalent in DefectDojo + SEVERITY_MAP = { + "Critical": "Critical", + "High": "High", + "Medium": "Medium", + "Low": "Low", + "Info": "Info", + "Informational": "Info", + "Untriaged": "Info", + } + # Get DefectDojo severity and default to "Info" if severity is not in the mapping + severity = SEVERITY_MAP.get(severity, "Info") + references = "\n".join(links) if links else None title = ( diff --git a/dojo/tools/wazuh/v4_8.py b/dojo/tools/wazuh/v4_8.py index 636ee0210d5..2031c759986 100644 --- a/dojo/tools/wazuh/v4_8.py +++ b/dojo/tools/wazuh/v4_8.py @@ -25,6 +25,19 @@ def parse_findings(self, test, data): detection_time = vuln.get("detected_at").split("T")[0] references = vuln.get("reference") + # Map Wazuh severity to its equivalent in DefectDojo + SEVERITY_MAP = { + "Critical": "Critical", + "High": "High", + "Medium": "Medium", + "Low": "Low", + "Info": "Info", + "Informational": "Info", + "Untriaged": "Info", + } + # Get DefectDojo severity and default to "Info" if severity is not in the mapping + severity = SEVERITY_MAP.get(severity, "Info") + title = ( cve + " affects (version: " + item.get("package").get("version") + ")" ) diff --git a/dojo/user/views.py b/dojo/user/views.py index 603eb2e0db4..f4e2539d659 100644 --- a/dojo/user/views.py +++ b/dojo/user/views.py @@ -287,7 +287,7 @@ def change_password(request): new_password = form.cleaned_data["new_password"] user.set_password(new_password) - Dojo_User.disable_force_password_reset(user) + user.disable_force_password_reset() user.save() messages.add_message(request, diff --git a/dojo/utils.py b/dojo/utils.py index 1d609bd4a13..fc676e8d2cf 100644 --- a/dojo/utils.py +++ b/dojo/utils.py @@ -510,7 +510,7 @@ def deduplicate_uid_or_hash_code(new_finding): id=new_finding.id).exclude( duplicate=True).order_by("id") deduplicationLogger.debug("Found " - + str(len(existing_findings)) + " findings with either the same unique_id_from_tool or hash_code") + + str(len(existing_findings)) + " findings with either the same unique_id_from_tool or hash_code: " + str([find.id for find in existing_findings])) for find in existing_findings: if is_deduplication_on_engagement_mismatch(new_finding, find): deduplicationLogger.debug( @@ -519,10 +519,10 @@ def deduplicate_uid_or_hash_code(new_finding): try: if are_endpoints_duplicates(new_finding, find): set_duplicate(new_finding, find) + break except Exception as e: deduplicationLogger.debug(str(e)) continue - break def set_duplicate(new_finding, existing_finding): @@ -1463,7 +1463,8 @@ def process_tag_notifications(request, note, parent_url, parent_title): title=f"{request.user} jotted a note", url=parent_url, icon="commenting", - recipients=users_to_notify) + recipients=users_to_notify, + requested_by=get_current_user()) def encrypt(key, iv, plaintext): diff --git a/helm/defectdojo/Chart.yaml b/helm/defectdojo/Chart.yaml index a783af5f4e6..68abf43f6de 100644 --- a/helm/defectdojo/Chart.yaml +++ b/helm/defectdojo/Chart.yaml @@ -1,8 +1,8 @@ apiVersion: v2 -appVersion: "2.51.3" +appVersion: "2.52.0" description: A Helm chart for Kubernetes to install DefectDojo name: defectdojo -version: 1.7.3 +version: 1.8.0 icon: https://defectdojo.com/hubfs/DefectDojo_favicon.png maintainers: - name: madchap @@ -34,4 +34,16 @@ dependencies: # description: Critical bug annotations: artifacthub.io/prerelease: "false" - artifacthub.io/changes: "- kind: changed\n description: Bump DefectDojo to 2.51.3\n" + artifacthub.io/changes: | + - kind: changed + description: DRY cloudsql-proxy + - kind: changed + description: Each component allow to specific image + allow digest pinning + allow different tags for Django and Nginx + - kind: added + description: Convert existing comments to descriptors + - kind: added + description: Testing on the oldest officially supported k8s + - kind: added + description: Checker for maximal number of celery beats + - kind: changed + description: Bump DefectDojo to 2.52.0 diff --git a/helm/defectdojo/README.md b/helm/defectdojo/README.md index b5cccadbd8f..456011dab3e 100644 --- a/helm/defectdojo/README.md +++ b/helm/defectdojo/README.md @@ -11,7 +11,7 @@ this [guide](https://helm.sh/docs/using_helm/#installing-helm). ## Supported Kubernetes Versions -The tests cover the deployment on the lastest [kubernetes version](https://kubernetes.io/releases/) and the oldest supported [version from AWS](https://docs.aws.amazon.com/eks/latest/userguide/kubernetes-versions.html#available-versions). The assumption is that version in between do not have significant differences. Current tested versions can looks up in the [github k8s workflow](https://github.com/DefectDojo/django-DefectDojo/blob/master/.github/workflows/k8s-tests.yml). +The tests cover the deployment on the lastest [kubernetes version](https://kubernetes.io/releases/) and [the oldest officially supported version](https://kubernetes.io/releases/). The assumption is that version in between do not have significant differences. Current tested versions can looks up in the [github k8s workflow](https://github.com/DefectDojo/django-DefectDojo/blob/master/.github/workflows/k8s-tests.yml). ## Helm chart @@ -495,7 +495,7 @@ kubectl delete pvc data-defectdojo-redis-0 data-defectdojo-postgresql-0 # General information about chart values -![Version: 1.7.3](https://img.shields.io/badge/Version-1.7.3-informational?style=flat-square) ![AppVersion: 2.51.3](https://img.shields.io/badge/AppVersion-2.51.3-informational?style=flat-square) +![Version: 1.8.0](https://img.shields.io/badge/Version-1.8.0-informational?style=flat-square) ![AppVersion: 2.52.0](https://img.shields.io/badge/AppVersion-2.52.0-informational?style=flat-square) A Helm chart for Kubernetes to install DefectDojo @@ -524,85 +524,97 @@ A Helm chart for Kubernetes to install DefectDojo | admin.password | string | `""` | | | admin.secretKey | string | `""` | | | admin.user | string | `"admin"` | | -| annotations | object | `{}` | | -| celery.annotations | object | `{}` | | +| alternativeHosts | list | `[]` | optional list of alternative hostnames to use that gets appended to DD_ALLOWED_HOSTS. This is necessary when your local hostname does not match the global hostname. | +| celery.annotations | object | `{}` | Common annotations to worker and beat deployments and pods. | | celery.beat.affinity | object | `{}` | | -| celery.beat.annotations | object | `{}` | | +| celery.beat.annotations | object | `{}` | Annotations for the Celery beat deployment. | | celery.beat.automountServiceAccountToken | bool | `false` | | -| celery.beat.extraEnv | list | `[]` | | -| celery.beat.extraInitContainers | list | `[]` | | -| celery.beat.extraVolumeMounts | list | `[]` | | -| celery.beat.extraVolumes | list | `[]` | | -| celery.beat.livenessProbe | object | `{}` | | +| celery.beat.containerSecurityContext | object | `{}` | Container security context for the Celery beat containers. | +| celery.beat.extraEnv | list | `[]` | Additional environment variables injected to Celery beat containers. | +| celery.beat.extraInitContainers | list | `[]` | A list of additional initContainers to run before celery beat containers. | +| celery.beat.extraVolumeMounts | list | `[]` | Array of additional volume mount points for the celery beat containers. | +| celery.beat.extraVolumes | list | `[]` | A list of extra volumes to mount @type: array | +| celery.beat.image | object | `{"digest":"","registry":"","repository":"","tag":""}` | If empty, uses values from images.django.image | +| celery.beat.livenessProbe | object | `{}` | Enable liveness probe for Celery beat container. ``` exec: command: - bash - -c - celery -A dojo inspect ping -t 5 initialDelaySeconds: 30 periodSeconds: 60 timeoutSeconds: 10 ``` | | celery.beat.nodeSelector | object | `{}` | | -| celery.beat.podAnnotations | object | `{}` | | -| celery.beat.readinessProbe | object | `{}` | | -| celery.beat.replicas | int | `1` | | +| celery.beat.podAnnotations | object | `{}` | Annotations for the Celery beat pods. | +| celery.beat.podSecurityContext | object | `{}` | Pod security context for the Celery beat pods. | +| celery.beat.readinessProbe | object | `{}` | Enable readiness probe for Celery beat container. | +| celery.beat.replicas | int | `1` | Multiple replicas are not allowed (Beat is intended to be a singleton) because scaling to >1 will double-run schedules | | celery.beat.resources.limits.cpu | string | `"2000m"` | | | celery.beat.resources.limits.memory | string | `"256Mi"` | | | celery.beat.resources.requests.cpu | string | `"100m"` | | | celery.beat.resources.requests.memory | string | `"128Mi"` | | -| celery.beat.startupProbe | object | `{}` | | +| celery.beat.startupProbe | object | `{}` | Enable startup probe for Celery beat container. | | celery.beat.tolerations | list | `[]` | | | celery.broker | string | `"redis"` | | | celery.logLevel | string | `"INFO"` | | | celery.worker.affinity | object | `{}` | | -| celery.worker.annotations | object | `{}` | | -| celery.worker.appSettings.poolType | string | `"solo"` | | +| celery.worker.annotations | object | `{}` | Annotations for the Celery worker deployment. | +| celery.worker.appSettings.poolType | string | `"solo"` | Performance improved celery worker config when needing to deal with a lot of findings (e.g deduplication ops) poolType: prefork autoscaleMin: 2 autoscaleMax: 8 concurrency: 8 prefetchMultiplier: 128 | | celery.worker.automountServiceAccountToken | bool | `false` | | -| celery.worker.extraEnv | list | `[]` | | -| celery.worker.extraInitContainers | list | `[]` | | -| celery.worker.extraVolumeMounts | list | `[]` | | -| celery.worker.extraVolumes | list | `[]` | | -| celery.worker.livenessProbe | object | `{}` | | +| celery.worker.containerSecurityContext | object | `{}` | Container security context for the Celery worker containers. | +| celery.worker.extraEnv | list | `[]` | Additional environment variables injected to Celery worker containers. | +| celery.worker.extraInitContainers | list | `[]` | A list of additional initContainers to run before celery worker containers. | +| celery.worker.extraVolumeMounts | list | `[]` | Array of additional volume mount points for the celery worker containers. | +| celery.worker.extraVolumes | list | `[]` | A list of extra volumes to mount. @type: array | +| celery.worker.image | object | `{"digest":"","registry":"","repository":"","tag":""}` | If empty, uses values from images.django.image | +| celery.worker.livenessProbe | object | `{}` | Enable liveness probe for Celery worker containers. ``` exec: command: - bash - -c - celery -A dojo inspect ping -t 5 initialDelaySeconds: 30 periodSeconds: 60 timeoutSeconds: 10 ``` | | celery.worker.nodeSelector | object | `{}` | | -| celery.worker.podAnnotations | object | `{}` | | -| celery.worker.readinessProbe | object | `{}` | | +| celery.worker.podAnnotations | object | `{}` | Annotations for the Celery beat pods. | +| celery.worker.podSecurityContext | object | `{}` | Pod security context for the Celery worker pods. | +| celery.worker.readinessProbe | object | `{}` | Enable readiness probe for Celery worker container. | | celery.worker.replicas | int | `1` | | | celery.worker.resources.limits.cpu | string | `"2000m"` | | | celery.worker.resources.limits.memory | string | `"512Mi"` | | | celery.worker.resources.requests.cpu | string | `"100m"` | | | celery.worker.resources.requests.memory | string | `"128Mi"` | | -| celery.worker.startupProbe | object | `{}` | | +| celery.worker.startupProbe | object | `{}` | Enable startup probe for Celery worker container. | | celery.worker.tolerations | list | `[]` | | -| cloudsql.enable_iam_login | bool | `false` | | -| cloudsql.enabled | bool | `false` | | -| cloudsql.image.pullPolicy | string | `"IfNotPresent"` | | -| cloudsql.image.repository | string | `"gcr.io/cloudsql-docker/gce-proxy"` | | -| cloudsql.image.tag | string | `"1.37.9"` | | -| cloudsql.instance | string | `""` | | -| cloudsql.use_private_ip | bool | `false` | | -| cloudsql.verbose | bool | `true` | | -| createPostgresqlSecret | bool | `false` | | -| createRedisSecret | bool | `false` | | -| createSecret | bool | `false` | | -| dbMigrationChecker.enabled | bool | `true` | | -| dbMigrationChecker.resources.limits.cpu | string | `"200m"` | | -| dbMigrationChecker.resources.limits.memory | string | `"200Mi"` | | -| dbMigrationChecker.resources.requests.cpu | string | `"100m"` | | -| dbMigrationChecker.resources.requests.memory | string | `"100Mi"` | | -| disableHooks | bool | `false` | | +| cloudsql | object | `{"containerSecurityContext":{},"enable_iam_login":false,"enabled":false,"extraEnv":[],"extraVolumeMounts":[],"image":{"pullPolicy":"IfNotPresent","repository":"gcr.io/cloudsql-docker/gce-proxy","tag":"1.37.9"},"instance":"","resources":{},"use_private_ip":false,"verbose":true}` | Google CloudSQL support in GKE via gce-proxy | +| cloudsql.containerSecurityContext | object | `{}` | Optional: security context for the CloudSQL proxy container. | +| cloudsql.enable_iam_login | bool | `false` | use IAM database authentication | +| cloudsql.enabled | bool | `false` | To use CloudSQL in GKE set 'enable: true' | +| cloudsql.extraEnv | list | `[]` | Additional environment variables for the CloudSQL proxy container. | +| cloudsql.extraVolumeMounts | list | `[]` | Array of additional volume mount points for the CloudSQL proxy container | +| cloudsql.image | object | `{"pullPolicy":"IfNotPresent","repository":"gcr.io/cloudsql-docker/gce-proxy","tag":"1.37.9"}` | set repo and image tag of gce-proxy | +| cloudsql.instance | string | `""` | set CloudSQL instance: 'project:zone:instancename' | +| cloudsql.resources | object | `{}` | Optional: add resource requests/limits for the CloudSQL proxy container. | +| cloudsql.use_private_ip | bool | `false` | whether to use a private IP to connect to the database | +| cloudsql.verbose | bool | `true` | By default, the proxy has verbose logging. Set this to false to make it less verbose | +| createPostgresqlSecret | bool | `false` | create postgresql secret in defectdojo chart, outside of postgresql chart | +| createRedisSecret | bool | `false` | create redis secret in defectdojo chart, outside of redis chart | +| createSecret | bool | `false` | create defectdojo specific secret | +| dbMigrationChecker.containerSecurityContext | object | `{}` | Container security context for the DB migration checker. | +| dbMigrationChecker.enabled | bool | `true` | Enable/disable the DB migration checker. | +| dbMigrationChecker.extraEnv | list | `[]` | Additional environment variables for DB migration checker. | +| dbMigrationChecker.extraVolumeMounts | list | `[]` | Array of additional volume mount points for DB migration checker. | +| dbMigrationChecker.image | object | `{"digest":"","registry":"","repository":"","tag":""}` | If empty, uses values from images.django.image | +| dbMigrationChecker.resources | object | `{"limits":{"cpu":"200m","memory":"200Mi"},"requests":{"cpu":"100m","memory":"100Mi"}}` | Resource requests/limits for the DB migration checker. | +| disableHooks | bool | `false` | Avoid using pre-install hooks, which might cause issues with ArgoCD | | django.affinity | object | `{}` | | | django.annotations | object | `{}` | | | django.automountServiceAccountToken | bool | `false` | | -| django.extraInitContainers | list | `[]` | | -| django.extraVolumes | list | `[]` | | +| django.extraEnv | list | `[]` | Additional environment variables injected to all Django containers and initContainers. | +| django.extraInitContainers | list | `[]` | A list of additional initContainers to run before the uwsgi and nginx containers. | +| django.extraVolumeMounts | list | `[]` | Array of additional volume mount points common to all containers and initContainers. | +| django.extraVolumes | list | `[]` | A list of extra volumes to mount. | | django.ingress.activateTLS | bool | `true` | | -| django.ingress.annotations | object | `{}` | | +| django.ingress.annotations | object | `{}` | Restricts the type of ingress controller that can interact with our chart (nginx, traefik, ...) `kubernetes.io/ingress.class: nginx` Depending on the size and complexity of your scans, you might want to increase the default ingress timeouts if you see repeated 504 Gateway Timeouts `nginx.ingress.kubernetes.io/proxy-read-timeout: "1800"` `nginx.ingress.kubernetes.io/proxy-send-timeout: "1800"` | | django.ingress.enabled | bool | `true` | | | django.ingress.ingressClassName | string | `""` | | | django.ingress.secretName | string | `"defectdojo-tls"` | | -| django.mediaPersistentVolume.enabled | bool | `true` | | -| django.mediaPersistentVolume.fsGroup | int | `1001` | | -| django.mediaPersistentVolume.name | string | `"media"` | | -| django.mediaPersistentVolume.persistentVolumeClaim.accessModes[0] | string | `"ReadWriteMany"` | | -| django.mediaPersistentVolume.persistentVolumeClaim.create | bool | `false` | | -| django.mediaPersistentVolume.persistentVolumeClaim.name | string | `""` | | -| django.mediaPersistentVolume.persistentVolumeClaim.size | string | `"5Gi"` | | -| django.mediaPersistentVolume.persistentVolumeClaim.storageClassName | string | `""` | | -| django.mediaPersistentVolume.type | string | `"emptyDir"` | | -| django.nginx.extraEnv | list | `[]` | | -| django.nginx.extraVolumeMounts | list | `[]` | | +| django.mediaPersistentVolume | object | `{"enabled":true,"fsGroup":1001,"name":"media","persistentVolumeClaim":{"accessModes":["ReadWriteMany"],"create":false,"name":"","size":"5Gi","storageClassName":""},"type":"emptyDir"}` | This feature needs more preparation before can be enabled, please visit KUBERNETES.md#media-persistent-volume | +| django.mediaPersistentVolume.name | string | `"media"` | any name | +| django.mediaPersistentVolume.persistentVolumeClaim | object | `{"accessModes":["ReadWriteMany"],"create":false,"name":"","size":"5Gi","storageClassName":""}` | in case if pvc specified, should point to the already existing pvc | +| django.mediaPersistentVolume.persistentVolumeClaim.accessModes | list | `["ReadWriteMany"]` | check KUBERNETES.md doc first for option to choose | +| django.mediaPersistentVolume.persistentVolumeClaim.create | bool | `false` | set to true to create a new pvc and if django.mediaPersistentVolume.type is set to pvc | +| django.mediaPersistentVolume.type | string | `"emptyDir"` | could be emptyDir (not for production) or pvc | +| django.nginx.containerSecurityContext | object | `{"runAsUser":1001}` | Container security context for the nginx containers. | +| django.nginx.containerSecurityContext.runAsUser | int | `1001` | nginx dockerfile sets USER=1001 | +| django.nginx.extraEnv | list | `[]` | To extra environment variables to the nginx container, you can use extraEnv. For example: extraEnv: - name: FOO valueFrom: configMapKeyRef: name: foo key: bar | +| django.nginx.extraVolumeMounts | list | `[]` | Array of additional volume mount points for nginx containers. | +| django.nginx.image | object | `{"digest":"","registry":"","repository":"","tag":""}` | If empty, uses values from images.nginx.image | | django.nginx.resources.limits.cpu | string | `"2000m"` | | | django.nginx.resources.limits.memory | string | `"256Mi"` | | | django.nginx.resources.requests.cpu | string | `"100m"` | | @@ -610,28 +622,31 @@ A Helm chart for Kubernetes to install DefectDojo | django.nginx.tls.enabled | bool | `false` | | | django.nginx.tls.generateCertificate | bool | `false` | | | django.nodeSelector | object | `{}` | | +| django.podSecurityContext | object | `{"fsGroup":1001}` | Pod security context for the Django pods. | | django.replicas | int | `1` | | | django.service.annotations | object | `{}` | | | django.service.type | string | `""` | | | django.strategy | object | `{}` | | | django.tolerations | list | `[]` | | -| django.uwsgi.appSettings.maxFd | int | `0` | | +| django.uwsgi.appSettings.maxFd | int | `0` | Use this value to set the maximum number of file descriptors. If set to 0 will be detected by uwsgi e.g. 102400 | | django.uwsgi.appSettings.processes | int | `4` | | | django.uwsgi.appSettings.threads | int | `4` | | | django.uwsgi.certificates.certFileName | string | `"ca.crt"` | | | django.uwsgi.certificates.certMountPath | string | `"/certs/"` | | | django.uwsgi.certificates.configName | string | `"defectdojo-ca-certs"` | | -| django.uwsgi.certificates.enabled | bool | `false` | | -| django.uwsgi.enableDebug | bool | `false` | | -| django.uwsgi.extraEnv | list | `[]` | | -| django.uwsgi.extraVolumeMounts | list | `[]` | | -| django.uwsgi.livenessProbe.enabled | bool | `true` | | +| django.uwsgi.certificates.enabled | bool | `false` | includes additional CA certificate as volume, it refrences REQUESTS_CA_BUNDLE env varible to create configMap `kubectl create cm defectdojo-ca-certs --from-file=ca.crt` NOTE: it reflects REQUESTS_CA_BUNDLE for celery workers, beats as well | +| django.uwsgi.containerSecurityContext.runAsUser | int | `1001` | django dockerfile sets USER=1001 | +| django.uwsgi.enableDebug | bool | `false` | this also requires DD_DEBUG to be set to True | +| django.uwsgi.extraEnv | list | `[]` | To add (or override) extra variables which need to be pulled from another configMap, you can use extraEnv. For example: extraEnv: - name: DD_DATABASE_HOST valueFrom: configMapKeyRef: name: my-other-postgres-configmap key: cluster_endpoint | +| django.uwsgi.extraVolumeMounts | list | `[]` | Array of additional volume mount points for uwsgi containers. | +| django.uwsgi.image | object | `{"digest":"","registry":"","repository":"","tag":""}` | If empty, uses values from images.django.image | +| django.uwsgi.livenessProbe.enabled | bool | `true` | Enable liveness checks on uwsgi container. | | django.uwsgi.livenessProbe.failureThreshold | int | `6` | | | django.uwsgi.livenessProbe.initialDelaySeconds | int | `0` | | | django.uwsgi.livenessProbe.periodSeconds | int | `10` | | | django.uwsgi.livenessProbe.successThreshold | int | `1` | | | django.uwsgi.livenessProbe.timeoutSeconds | int | `5` | | -| django.uwsgi.readinessProbe.enabled | bool | `true` | | +| django.uwsgi.readinessProbe.enabled | bool | `true` | Enable readiness checks on uwsgi container. | | django.uwsgi.readinessProbe.failureThreshold | int | `6` | | | django.uwsgi.readinessProbe.initialDelaySeconds | int | `0` | | | django.uwsgi.readinessProbe.periodSeconds | int | `10` | | @@ -641,97 +656,97 @@ A Helm chart for Kubernetes to install DefectDojo | django.uwsgi.resources.limits.memory | string | `"512Mi"` | | | django.uwsgi.resources.requests.cpu | string | `"100m"` | | | django.uwsgi.resources.requests.memory | string | `"256Mi"` | | -| django.uwsgi.startupProbe.enabled | bool | `true` | | +| django.uwsgi.startupProbe.enabled | bool | `true` | Enable startup checks on uwsgi container. | | django.uwsgi.startupProbe.failureThreshold | int | `30` | | | django.uwsgi.startupProbe.initialDelaySeconds | int | `0` | | | django.uwsgi.startupProbe.periodSeconds | int | `5` | | | django.uwsgi.startupProbe.successThreshold | int | `1` | | | django.uwsgi.startupProbe.timeoutSeconds | int | `1` | | -| extraConfigs | object | `{}` | | -| extraEnv | list | `[]` | | -| extraLabels | object | `{}` | | -| extraSecrets | object | `{}` | | -| gke.useGKEIngress | bool | `false` | | -| gke.useManagedCertificate | bool | `false` | | -| gke.workloadIdentityEmail | string | `""` | | -| host | string | `"defectdojo.default.minikube.local"` | | +| extraAnnotations | object | `{}` | Annotations globally added to all resources | +| extraConfigs | object | `{}` | To add extra variables not predefined by helm config it is possible to define in extraConfigs block, e.g. below: NOTE Do not store any kind of sensitive information inside of it ``` DD_SOCIAL_AUTH_AUTH0_OAUTH2_ENABLED: 'true' DD_SOCIAL_AUTH_AUTH0_KEY: 'dev' DD_SOCIAL_AUTH_AUTH0_DOMAIN: 'xxxxx' ``` | +| extraEnv | list | `[]` | To add (or override) extra variables which need to be pulled from another configMap, you can use extraEnv. For example: ``` - name: DD_DATABASE_HOST valueFrom: configMapKeyRef: name: my-other-postgres-configmap key: cluster_endpoint ``` | +| extraLabels | object | `{}` | Labels globally added to all resources | +| extraSecrets | object | `{}` | Extra secrets can be created inside of extraSecrets block: NOTE This is just an exmaple, do not store sensitive data in plain text form, better inject it during the deployment/upgrade by --set extraSecrets.secret=someSecret ``` DD_SOCIAL_AUTH_AUTH0_SECRET: 'xxx' ``` | +| gke | object | `{"useGKEIngress":false,"useManagedCertificate":false,"workloadIdentityEmail":""}` | Settings to make running the chart on GKE simpler | +| gke.useGKEIngress | bool | `false` | Set to true to configure the Ingress to use the GKE provided ingress controller | +| gke.useManagedCertificate | bool | `false` | Set to true to have GKE automatically provision a TLS certificate for the host specified Requires useGKEIngress to be set to true When using this option, be sure to set django.ingress.activateTLS to false | +| gke.workloadIdentityEmail | string | `""` | Workload Identity allows the K8s service account to assume the IAM access of a GCP service account to interact with other GCP services Only works with serviceAccount.create = true | +| host | string | `"defectdojo.default.minikube.local"` | Primary hostname of instance | | imagePullPolicy | string | `"Always"` | | -| imagePullSecrets | string | `nil` | | +| imagePullSecrets | string | `nil` | When using a private registry, name of the secret that holds the registry secret (eg deploy token from gitlab-ci project) Create secrets as: kubectl create secret docker-registry defectdojoregistrykey --docker-username=registry_username --docker-password=registry_password --docker-server='https://index.docker.io/v1/' | +| images.django.image.digest | string | `""` | Prefix "sha@" is expected in this place | +| images.django.image.registry | string | `""` | | +| images.django.image.repository | string | `"defectdojo/defectdojo-django"` | | +| images.django.image.tag | string | `""` | If empty, use appVersion. Another possible values are: latest, X.X.X, X.X.X-debian, X.X.X-alpine (where X.X.X is version of DD). For dev builds (only for testing purposes): nightly-dev, nightly-dev-debian, nightly-dev-alpine. To see all, check https://hub.docker.com/r/defectdojo/defectdojo-django/tags. | +| images.nginx.image.digest | string | `""` | Prefix "sha@" is expected in this place | +| images.nginx.image.registry | string | `""` | | +| images.nginx.image.repository | string | `"defectdojo/defectdojo-nginx"` | | +| images.nginx.image.tag | string | `""` | If empty, use appVersion. Another possible values are: latest, X.X.X, X.X.X-alpine (where X.X.X is version of DD). For dev builds (only for testing purposes): nightly-dev, nightly-dev-alpine. To see all, check https://hub.docker.com/r/defectdojo/defectdojo-nginx/tags. | | initializer.affinity | object | `{}` | | | initializer.annotations | object | `{}` | | | initializer.automountServiceAccountToken | bool | `false` | | -| initializer.extraEnv | list | `[]` | | -| initializer.extraVolumeMounts | list | `[]` | | -| initializer.extraVolumes | list | `[]` | | +| initializer.containerSecurityContext | object | `{}` | Container security context for the initializer Job container | +| initializer.extraEnv | list | `[]` | Additional environment variables injected to the initializer job pods. | +| initializer.extraVolumeMounts | list | `[]` | Array of additional volume mount points for the initializer job (init)containers. | +| initializer.extraVolumes | list | `[]` | A list of extra volumes to attach to the initializer job pods. | +| initializer.image | object | `{"digest":"","registry":"","repository":"","tag":""}` | If empty, uses values from images.django.image | | initializer.jobAnnotations | object | `{}` | | -| initializer.keepSeconds | int | `60` | | +| initializer.keepSeconds | int | `60` | A positive integer will keep this Job and Pod deployed for the specified number of seconds, after which they will be removed. For all other values, the Job and Pod will remain deployed. | | initializer.labels | object | `{}` | | | initializer.nodeSelector | object | `{}` | | +| initializer.podSecurityContext | object | `{}` | Pod security context for the initializer Job | | initializer.resources.limits.cpu | string | `"2000m"` | | | initializer.resources.limits.memory | string | `"512Mi"` | | | initializer.resources.requests.cpu | string | `"100m"` | | | initializer.resources.requests.memory | string | `"256Mi"` | | | initializer.run | bool | `true` | | -| initializer.staticName | bool | `false` | | +| initializer.staticName | bool | `false` | staticName defines whether name of the job will be the same (e.g., "defectdojo-initializer") or different every time - generated based on current time (e.g., "defectdojo-initializer-2024-11-11-18-57") This might be handy for ArgoCD deployments | | initializer.tolerations | list | `[]` | | -| localsettingspy | string | `""` | | +| localsettingspy | string | `""` | To add code snippet which would extend setting functionality, you might add it here It will be stored as ConfigMap and mounted `dojo/settings/local_settings.py`. For more see: https://documentation.defectdojo.com/getting_started/configuration/ For example: ``` localsettingspy: | INSTALLED_APPS += ( 'debug_toolbar', ) MIDDLEWARE = [ 'debug_toolbar.middleware.DebugToolbarMiddleware', ] + MIDDLEWARE ``` | | monitoring.enabled | bool | `false` | | -| monitoring.prometheus.enabled | bool | `false` | | -| monitoring.prometheus.image | string | `"nginx/nginx-prometheus-exporter:1.4.2"` | | +| monitoring.prometheus.containerSecurityContext | object | `{}` | Optional: container security context for nginx prometheus exporter | +| monitoring.prometheus.enabled | bool | `false` | Add the nginx prometheus exporter sidecar | +| monitoring.prometheus.extraEnv | list | `[]` | Optional: additional environment variables injected to the nginx prometheus exporter container | +| monitoring.prometheus.extraVolumeMounts | list | `[]` | Array of additional volume mount points for the nginx prometheus exporter | +| monitoring.prometheus.image.digest | string | `""` | | +| monitoring.prometheus.image.registry | string | `""` | | +| monitoring.prometheus.image.repository | string | `"nginx/nginx-prometheus-exporter"` | | +| monitoring.prometheus.image.tag | string | `"1.4.2"` | | | monitoring.prometheus.imagePullPolicy | string | `"IfNotPresent"` | | -| networkPolicy.annotations | object | `{}` | | -| networkPolicy.egress | list | `[]` | | -| networkPolicy.enabled | bool | `false` | | -| networkPolicy.ingress | list | `[]` | | -| networkPolicy.ingressExtend | list | `[]` | | -| podLabels | object | `{}` | | -| postgresServer | string | `nil` | | -| postgresql.architecture | string | `"standalone"` | | -| postgresql.auth.database | string | `"defectdojo"` | | -| postgresql.auth.existingSecret | string | `"defectdojo-postgresql-specific"` | | -| postgresql.auth.password | string | `""` | | -| postgresql.auth.secretKeys.adminPasswordKey | string | `"postgresql-postgres-password"` | | -| postgresql.auth.secretKeys.replicationPasswordKey | string | `"postgresql-replication-password"` | | -| postgresql.auth.secretKeys.userPasswordKey | string | `"postgresql-password"` | | -| postgresql.auth.username | string | `"defectdojo"` | | -| postgresql.enabled | bool | `true` | | -| postgresql.primary.affinity | object | `{}` | | -| postgresql.primary.containerSecurityContext.enabled | bool | `true` | | -| postgresql.primary.containerSecurityContext.runAsUser | int | `1001` | | -| postgresql.primary.name | string | `"primary"` | | -| postgresql.primary.nodeSelector | object | `{}` | | -| postgresql.primary.persistence.enabled | bool | `true` | | -| postgresql.primary.podSecurityContext.enabled | bool | `true` | | -| postgresql.primary.podSecurityContext.fsGroup | int | `1001` | | -| postgresql.primary.service.ports.postgresql | int | `5432` | | -| postgresql.shmVolume.chmod.enabled | bool | `false` | | -| postgresql.volumePermissions.containerSecurityContext.runAsUser | int | `1001` | | -| postgresql.volumePermissions.enabled | bool | `false` | | -| redis.architecture | string | `"standalone"` | | -| redis.auth.existingSecret | string | `"defectdojo-redis-specific"` | | -| redis.auth.existingSecretPasswordKey | string | `"redis-password"` | | -| redis.auth.password | string | `""` | | -| redis.enabled | bool | `true` | | -| redis.sentinel.enabled | bool | `false` | | -| redis.tls.enabled | bool | `false` | | -| redisParams | string | `""` | | -| redisServer | string | `nil` | | -| repositoryPrefix | string | `"defectdojo"` | | -| revisionHistoryLimit | int | `10` | | -| secrets.annotations | object | `{}` | | -| securityContext.djangoSecurityContext.runAsUser | int | `1001` | | -| securityContext.enabled | bool | `true` | | -| securityContext.nginxSecurityContext.runAsUser | int | `1001` | | -| serviceAccount.annotations | object | `{}` | | -| serviceAccount.create | bool | `true` | | -| serviceAccount.labels | object | `{}` | | -| tag | string | `"latest"` | | +| monitoring.prometheus.resources | object | `{}` | Optional: add resource requests/limits for the nginx prometheus exporter container | +| networkPolicy | object | `{"annotations":{},"egress":[],"enabled":false,"ingress":[],"ingressExtend":[]}` | Enables application network policy For more info follow https://kubernetes.io/docs/concepts/services-networking/network-policies/ | +| networkPolicy.egress | list | `[]` | ``` egress: - to: - ipBlock: cidr: 10.0.0.0/24 ports: - protocol: TCP port: 443 ``` | +| networkPolicy.ingress | list | `[]` | For more detailed configuration with ports and peers. It will ignore ingressExtend ``` ingress: - from: - podSelector: matchLabels: app.kubernetes.io/instance: defectdojo - podSelector: matchLabels: app.kubernetes.io/instance: defectdojo-prometheus ports: - protocol: TCP port: 8443 ``` | +| networkPolicy.ingressExtend | list | `[]` | if additional labels need to be allowed (e.g. prometheus scraper) ``` ingressExtend: - podSelector: matchLabels: app.kubernetes.io/instance: defectdojo-prometheus ``` | +| podLabels | object | `{}` | Additional labels to add to the pods: ``` podLabels: key: value ``` | +| postgresServer | string | `nil` | To use an external PostgreSQL instance (like CloudSQL), set `postgresql.enabled` to false, set items in `postgresql.auth` part for authentication, and set the address here: | +| postgresql | object | `{"architecture":"standalone","auth":{"database":"defectdojo","existingSecret":"defectdojo-postgresql-specific","password":"","secretKeys":{"adminPasswordKey":"postgresql-postgres-password","replicationPasswordKey":"postgresql-replication-password","userPasswordKey":"postgresql-password"},"username":"defectdojo"},"enabled":true,"primary":{"affinity":{},"containerSecurityContext":{"enabled":true,"runAsUser":1001},"name":"primary","nodeSelector":{},"persistence":{"enabled":true},"podSecurityContext":{"enabled":true,"fsGroup":1001},"service":{"ports":{"postgresql":5432}}},"shmVolume":{"chmod":{"enabled":false}},"volumePermissions":{"containerSecurityContext":{"runAsUser":1001},"enabled":false}}` | For more advance options check the bitnami chart documentation: https://github.com/bitnami/charts/tree/main/bitnami/postgresql | +| postgresql.enabled | bool | `true` | To use an external instance, switch enabled to `false` and set the address in `postgresServer` below | +| postgresql.primary.containerSecurityContext.enabled | bool | `true` | Default is true for K8s. Enabled needs to false for OpenShift restricted SCC and true for anyuid SCC | +| postgresql.primary.containerSecurityContext.runAsUser | int | `1001` | runAsUser specification below is not applied if enabled=false. enabled=false is the required setting for OpenShift "restricted SCC" to work successfully. | +| postgresql.primary.podSecurityContext.enabled | bool | `true` | Default is true for K8s. Enabled needs to false for OpenShift restricted SCC and true for anyuid SCC | +| postgresql.primary.podSecurityContext.fsGroup | int | `1001` | fsGroup specification below is not applied if enabled=false. enabled=false is the required setting for OpenShift "restricted SCC" to work successfully. | +| postgresql.volumePermissions.containerSecurityContext | object | `{"runAsUser":1001}` | if using restricted SCC set runAsUser: "auto" and if running under anyuid SCC - runAsUser needs to match the line above | +| redis | object | `{"architecture":"standalone","auth":{"existingSecret":"defectdojo-redis-specific","existingSecretPasswordKey":"redis-password","password":""},"enabled":true,"sentinel":{"enabled":false},"tls":{"enabled":false}}` | For more advance options check the bitnami chart documentation: https://github.com/bitnami/charts/tree/main/bitnami/redis | +| redis.enabled | bool | `true` | To use an external instance, switch enabled to `false`` and set the address in `redisServer` below | +| redis.tls.enabled | bool | `false` | If TLS is enabled, the Redis broker will use the redis:// and optionally mount the certificates from an existing secret. | +| redisParams | string | `""` | Parameters attached to the redis connection string, defaults to "ssl_cert_reqs=optional" if `redis.tls.enabled` | +| redisServer | string | `nil` | To use an external Redis instance, set `redis.enabled` to false and set the address here: | +| revisionHistoryLimit | int | `10` | Allow overriding of revisionHistoryLimit across all deployments. | +| secrets.annotations | object | `{}` | Add annotations for secret resources | +| securityContext | object | `{"containerSecurityContext":{"runAsNonRoot":true},"enabled":true,"podSecurityContext":{"runAsNonRoot":true}}` | Security context settings | +| serviceAccount.annotations | object | `{}` | Optional additional annotations to add to the DefectDojo's Service Account. | +| serviceAccount.create | bool | `true` | Specifies whether a service account should be created. | +| serviceAccount.labels | object | `{}` | Optional additional labels to add to the DefectDojo's Service Account. | +| serviceAccount.name | string | `""` | The name of the service account to use. If not set and create is true, a name is generated using the fullname template | +| siteUrl | string | `""` | The full URL to your defectdojo instance, depends on the domain where DD is deployed, it also affects links in Jira. Use syntax: `siteUrl: 'https://'` | | tests.unitTests.automountServiceAccountToken | bool | `false` | | +| tests.unitTests.image | object | `{"digest":"","registry":"","repository":"","tag":""}` | If empty, uses values from images.django.image | | tests.unitTests.resources.limits.cpu | string | `"500m"` | | | tests.unitTests.resources.limits.memory | string | `"512Mi"` | | | tests.unitTests.resources.requests.cpu | string | `"100m"` | | | tests.unitTests.resources.requests.memory | string | `"128Mi"` | | -| trackConfig | string | `"disabled"` | | +| trackConfig | string | `"disabled"` | Track configuration (trackConfig): will automatically respin application pods in case of config changes detection can be: 1. disabled (default) 2. enabled, enables tracking configuration changes based on SHA256 | ---------------------------------------------- Autogenerated from chart metadata using [helm-docs v1.14.2](https://github.com/norwoodj/helm-docs/releases/v1.14.2) diff --git a/helm/defectdojo/README.md.gotmpl b/helm/defectdojo/README.md.gotmpl index 9583a95d167..e4ab067a647 100644 --- a/helm/defectdojo/README.md.gotmpl +++ b/helm/defectdojo/README.md.gotmpl @@ -11,7 +11,7 @@ this [guide](https://helm.sh/docs/using_helm/#installing-helm). ## Supported Kubernetes Versions -The tests cover the deployment on the lastest [kubernetes version](https://kubernetes.io/releases/) and the oldest supported [version from AWS](https://docs.aws.amazon.com/eks/latest/userguide/kubernetes-versions.html#available-versions). The assumption is that version in between do not have significant differences. Current tested versions can looks up in the [github k8s workflow](https://github.com/DefectDojo/django-DefectDojo/blob/master/.github/workflows/k8s-tests.yml). +The tests cover the deployment on the lastest [kubernetes version](https://kubernetes.io/releases/) and [the oldest officially supported version](https://kubernetes.io/releases/). The assumption is that version in between do not have significant differences. Current tested versions can looks up in the [github k8s workflow](https://github.com/DefectDojo/django-DefectDojo/blob/master/.github/workflows/k8s-tests.yml). ## Helm chart diff --git a/helm/defectdojo/templates/_helpers.tpl b/helm/defectdojo/templates/_helpers.tpl index 025b35078db..b6243d6ac19 100644 --- a/helm/defectdojo/templates/_helpers.tpl +++ b/helm/defectdojo/templates/_helpers.tpl @@ -1,15 +1,15 @@ -{{/* vim: set filetype=mustache: */}} -{{/* -Expand the name of the chart. +{{- /* vim: set filetype=mustache: */}} +{{- /* + Expand the name of the chart. */}} {{- define "defectdojo.name" -}} {{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" -}} {{- end -}} -{{/* -Create a default fully qualified app name. -We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). -If release name contains chart name it will be used as a full name. +{{- /* + Create a default fully qualified app name. + We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). + If release name contains chart name it will be used as a full name. */}} {{- define "defectdojo.fullname" -}} {{- if .Values.fullnameOverride -}} @@ -24,15 +24,15 @@ If release name contains chart name it will be used as a full name. {{- end -}} {{- end -}} -{{/* -Create chart name and version as used by the chart label. +{{- /* + Create chart name and version as used by the chart label. */}} {{- define "defectdojo.chart" -}} {{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" -}} {{- end -}} -{{/* -Create the name of the service account to use +{{- /* + Create the name of the service account to use */}} {{- define "defectdojo.serviceAccountName" -}} {{- if .Values.serviceAccount.create -}} @@ -42,7 +42,7 @@ Create the name of the service account to use {{- end -}} {{- end -}} -{{/* +{{- /* Determine the hostname to use for PostgreSQL/Redis. */}} {{- define "postgresql.hostname" -}} @@ -67,7 +67,7 @@ Create the name of the service account to use {{- end -}} {{- end -}} -{{/* +{{- /* Determine the protocol to use for Redis. */}} {{- define "redis.scheme" -}} @@ -82,23 +82,67 @@ Create the name of the service account to use {{- end -}} {{- end -}} -{{/* +{{- /* Builds the repository names for use with local or private registries */}} -{{- define "celery.repository" -}} -{{- printf "%s" .Values.repositoryPrefix -}}/defectdojo-django +{{- define "celery.beat.image" -}} +{{ include "images.image" (dict "imageRoot" (merge .Values.celery.beat.image .Values.images.django.image) "global" .Values.global "chart" .Chart ) }} {{- end -}} -{{- define "django.nginx.repository" -}} -{{- printf "%s" .Values.repositoryPrefix -}}/defectdojo-nginx +{{- define "celery.worker.image" -}} +{{ include "images.image" (dict "imageRoot" (merge .Values.celery.worker.image .Values.images.django.image) "global" .Values.global "chart" .Chart ) }} {{- end -}} -{{- define "django.uwsgi.repository" -}} -{{- printf "%s" .Values.repositoryPrefix -}}/defectdojo-django +{{- define "django.nginx.image" -}} +{{ include "images.image" (dict "imageRoot" (merge .Values.django.nginx.image .Values.images.nginx.image) "global" .Values.global "chart" .Chart ) }} {{- end -}} -{{- define "initializer.repository" -}} -{{- printf "%s" .Values.repositoryPrefix -}}/defectdojo-django +{{- define "django.uwsgi.image" -}} +{{ include "images.image" (dict "imageRoot" (merge .Values.django.uwsgi.image .Values.images.django.image) "global" .Values.global "chart" .Chart ) }} +{{- end -}} + +{{- define "initializer.image" -}} +{{ include "images.image" (dict "imageRoot" (merge .Values.initializer.image .Values.images.django.image) "global" .Values.global "chart" .Chart ) }} +{{- end -}} + +{{- define "dbMigrationChecker.image" -}} +{{ include "images.image" (dict "imageRoot" (merge .Values.dbMigrationChecker.image .Values.images.django.image) "global" .Values.global "chart" .Chart ) }} +{{- end -}} + +{{- define "unitTests.image" -}} +{{ include "images.image" (dict "imageRoot" (merge .Values.tests.unitTests.image .Values.images.django.image) "global" .Values.global "chart" .Chart ) }} +{{- end -}} + +{{- define "monitoring.prometheus.image" -}} +{{ include "images.image" (dict "imageRoot" .Values.monitoring.prometheus.image "global" .Values.global ) }} +{{- end -}} + +{{- /* +Return the proper image name. +If image tag and digest are not defined, termination fallbacks to chart appVersion. +{{ include "images.image" ( dict "imageRoot" .Values.path.to.the.image "global" .Values.global "chart" .Chart ) }} +Inspired by Bitnami Common Chart v2.31.7 +*/}} +{{- define "images.image" -}} +{{- $registryName := default .imageRoot.registry ((.global).imageRegistry) -}} +{{- $repositoryName := .imageRoot.repository -}} +{{- $separator := ":" -}} +{{- $termination := .imageRoot.tag | toString -}} + +{{- if not .imageRoot.tag }} + {{- if .chart }} + {{- $termination = .chart.AppVersion | toString -}} + {{- end -}} +{{- end -}} +{{- if .imageRoot.digest }} + {{- $separator = "@" -}} + {{- $termination = .imageRoot.digest | toString -}} +{{- end -}} +{{- if $registryName }} + {{- printf "%s/%s%s%s" $registryName $repositoryName $separator $termination -}} +{{- else -}} + {{- printf "%s%s%s" $repositoryName $separator $termination -}} +{{- end -}} {{- end -}} {{- define "initializer.jobname" -}} @@ -109,7 +153,7 @@ Create the name of the service account to use {{- end -}} {{- end -}} -{{/* +{{- /* Creates the array for DD_ALLOWED_HOSTS in configmap */}} {{- define "django.allowed_hosts" -}} @@ -121,7 +165,7 @@ Create the name of the service account to use {{- end -}} {{- end -}} -{{/* +{{- /* Creates the persistentVolumeName */}} {{- define "django.pvc_name" -}} @@ -132,7 +176,7 @@ Create the name of the service account to use {{- end -}} {{- end -}} -{{/* +{{- /* Define db-migration-checker */}} {{- define "dbMigrationChecker" -}} @@ -141,11 +185,15 @@ Create the name of the service account to use - sh - -c - while ! /app/manage.py migrate --check; do echo "Database is not migrated to the latest state yet"; sleep 5; done; echo "Database is migrated to the latest state"; - image: '{{ template "django.uwsgi.repository" . }}:{{ .Values.tag }}' + image: '{{ template "dbMigrationChecker.image" . }}' imagePullPolicy: {{ .Values.imagePullPolicy }} {{- if .Values.securityContext.enabled }} securityContext: - {{- toYaml .Values.securityContext.djangoSecurityContext | nindent 4 }} + {{- include "helpers.securityContext" (list + .Values + "securityContext.containerSecurityContext" + "dbMigrationChecker.containerSecurityContext" + ) | nindent 4 }} {{- end }} envFrom: - configMapRef: @@ -163,9 +211,101 @@ Create the name of the service account to use secretKeyRef: name: {{ .Values.postgresql.auth.existingSecret | default "defectdojo-postgresql-specific" }} key: {{ .Values.postgresql.auth.secretKeys.userPasswordKey | default "postgresql-password" }} - {{- if .Values.extraEnv }} - {{- toYaml .Values.extraEnv | nindent 2 }} + {{- with .Values.extraEnv }} + {{- toYaml . | nindent 2 }} + {{- end }} + {{- with.Values.dbMigrationChecker.extraEnv }} + {{- toYaml . | nindent 2 }} {{- end }} resources: {{- toYaml .Values.dbMigrationChecker.resources | nindent 4 }} + {{- with .Values.dbMigrationChecker.extraVolumeMounts }} + volumeMounts: + {{- . | toYaml | nindent 4 }} + {{- end }} +{{- end -}} + +{{- /* + Define cloudsql-proxy +*/}} +{{- define "cloudsqlProxy" -}} +- name: cloudsql-proxy + image: {{ .Values.cloudsql.image.repository }}:{{ .Values.cloudsql.image.tag }} + imagePullPolicy: {{ .Values.cloudsql.image.pullPolicy }} + {{- with .Values.cloudsql.extraEnv }} + env: {{- . | toYaml | nindent 4 }} + {{- end }} + {{- with .Values.cloudsql.resources }} + resources: {{- . | toYaml | nindent 4 }} + {{- end }} + restartPolicy: Always + {{- if .Values.securityContext.enabled }} + securityContext: + {{- include "helpers.securityContext" (list + .Values + "securityContext.containerSecurityContext" + "cloudsql.containerSecurityContext" + ) | nindent 4 }} + {{- end }} + command: ["/cloud_sql_proxy"] + args: + - "-verbose={{ .Values.cloudsql.verbose }}" + - "-instances={{ .Values.cloudsql.instance }}=tcp:{{ .Values.postgresql.primary.service.ports.postgresql }}" + {{- if .Values.cloudsql.enable_iam_login }} + - "-enable_iam_login" + {{- end }} + {{- if .Values.cloudsql.use_private_ip }} + - "-ip_address_types=PRIVATE" + {{- end }} + {{- with .Values.cloudsql.extraVolumeMounts }} + volumeMounts: {{ . | toYaml | nindent 4 }} + {{- end }} +{{- end -}} + +{{- /* +Returns the JSON representation of the value for a dot-notation path +from a given context. + Args: + 0: context (e.g., .Values) + 1: path (e.g., "foo.bar") +*/}} +{{- define "helpers.getValue" -}} + {{- $ctx := merge dict (index . 0) -}} + {{- $path := index . 1 -}} + {{- $parts := splitList "." $path -}} + {{- $value := $ctx -}} + {{- range $idx, $part := $parts -}} + {{- if kindIs "map" $value -}} + {{- $value = index $value $part -}} + {{- else -}} + {{- $value = "" -}} + {{- /* Exit early by setting to last iteration */}} + {{- $idx = sub (len $parts) 1 -}} + {{- end -}} + {{- end -}} + {{- toJson $value -}} +{{- end -}} + +{{- /* + Build the security context. + Args: + 0: values context (.Values) + 1: the default security context key (e.g. "securityContext.containerSecurityContext") + 2: the key under the context with security context (e.g., "foo.bar") +*/}} +{{- define "helpers.securityContext" -}} +{{- $values := merge dict (index . 0) -}} +{{- $defaultSecurityContextKey := index . 1 -}} +{{- $securityContextKey := index . 2 -}} +{{- $securityContext := dict -}} +{{- with $values }} + {{- $securityContext = (merge + $securityContext + (include "helpers.getValue" (list $values $defaultSecurityContextKey) | fromJson) + (include "helpers.getValue" (list $values $securityContextKey) | fromJson) + ) -}} +{{- end -}} +{{- with $securityContext -}} +{{- . | toYaml | nindent 2 -}} +{{- end -}} {{- end -}} diff --git a/helm/defectdojo/templates/celery-beat-deployment.yaml b/helm/defectdojo/templates/celery-beat-deployment.yaml index 4e5b4833331..b1832f71e29 100644 --- a/helm/defectdojo/templates/celery-beat-deployment.yaml +++ b/helm/defectdojo/templates/celery-beat-deployment.yaml @@ -2,7 +2,12 @@ apiVersion: apps/v1 kind: Deployment metadata: - name: {{ $fullName }}-celery-beat + {{- with mergeOverwrite dict .Values.extraAnnotations .Values.celery.annotations .Values.celery.beat.annotations }} + annotations: + {{- range $key, $value := . }} + {{ $key }}: {{ quote $value }} + {{- end }} + {{- end }} labels: defectdojo.org/component: celery defectdojo.org/subcomponent: beat @@ -10,13 +15,11 @@ metadata: app.kubernetes.io/instance: {{ .Release.Name }} app.kubernetes.io/managed-by: {{ .Release.Service }} helm.sh/chart: {{ include "defectdojo.chart" . }} - {{- with .Values.extraLabels }} - {{- toYaml . | nindent 4 }} + {{- range $key, $value := .Values.extraLabels }} + {{ $key }}: {{ quote $value }} {{- end }} - {{- with mergeOverwrite .Values.celery.annotations .Values.celery.beat.annotations }} - annotations: - {{- toYaml . | nindent 4 }} - {{- end }} + name: {{ $fullName }}-celery-beat + namespace: {{ .Release.Namespace }} spec: replicas: {{ .Values.celery.beat.replicas }} {{- with .Values.revisionHistoryLimit }} @@ -35,15 +38,12 @@ spec: defectdojo.org/subcomponent: beat app.kubernetes.io/name: {{ include "defectdojo.name" . }} app.kubernetes.io/instance: {{ .Release.Name }} - {{- with .Values.extraLabels }} - {{- toYaml . | nindent 8 }} - {{- end }} - {{- with .Values.podLabels }} - {{- toYaml . | nindent 8 }} + {{- range $key, $value := mergeOverwrite dict .Values.extraLabels .Values.podLabels }} + {{ $key }}: {{ quote $value }} {{- end }} annotations: - {{- with mergeOverwrite .Values.celery.annotations .Values.celery.beat.podAnnotations }} - {{- toYaml . | nindent 8 }} + {{- range $key, $value := mergeOverwrite dict .Values.extraAnnotations .Values.celery.annotations .Values.celery.beat.podAnnotations }} + {{ $key }}: {{ quote $value }} {{- end }} {{- if eq (.Values.trackConfig | default "disabled") "enabled" }} checksum/config: {{ include (print $.Template.BasePath "/configmap.yaml") . | sha256sum }} @@ -51,6 +51,14 @@ spec: checksum/esecret: {{ include (print $.Template.BasePath "/extra-secret.yaml") . | sha256sum }} {{- end }} spec: + {{- if .Values.securityContext.enabled }} + securityContext: + {{- include "helpers.securityContext" (list + .Values + "securityContext.podSecurityContext" + "celery.beat.podSecurityContext" + ) | nindent 8 }} + {{- end }} serviceAccountName: {{ include "defectdojo.serviceAccountName" . }} automountServiceAccountToken: {{ .Values.celery.beat.automountServiceAccountToken }} {{- with .Values.imagePullSecrets }} @@ -60,12 +68,12 @@ spec: volumes: - name: run emptyDir: {} - {{- if .Values.localsettingspy }} + {{- if .Values.localsettingspy }} - name: localsettingspy configMap: name: {{ $fullName }}-localsettingspy {{- end }} - {{- if .Values.django.uwsgi.certificates.enabled }} + {{- if .Values.django.uwsgi.certificates.enabled }} - name: cert-mount configMap: name: {{ .Values.django.uwsgi.certificates.configName }} @@ -80,22 +88,7 @@ spec: {{- end }} {{- end }} {{- if .Values.cloudsql.enabled }} - - name: cloudsql-proxy - image: {{ .Values.cloudsql.image.repository }}:{{ .Values.cloudsql.image.tag }} - imagePullPolicy: {{ .Values.cloudsql.image.pullPolicy }} - restartPolicy: Always - securityContext: - runAsNonRoot: true - command: ["/cloud_sql_proxy"] - args: - - "-verbose={{ .Values.cloudsql.verbose }}" - - "-instances={{ .Values.cloudsql.instance }}=tcp:{{ .Values.postgresql.primary.service.ports.postgresql }}" - {{- if .Values.cloudsql.enable_iam_login }} - - "-enable_iam_login" - {{- end }} - {{- if .Values.cloudsql.use_private_ip }} - - "-ip_address_types=PRIVATE" - {{- end }} + {{- include "cloudsqlProxy" . | nindent 6 }} {{- end }} {{- if .Values.dbMigrationChecker.enabled }} {{$data := dict "fullName" $fullName }} @@ -106,7 +99,7 @@ spec: - command: - /entrypoint-celery-beat.sh name: celery - image: "{{ template "celery.repository" . }}:{{ .Values.tag }}" + image: "{{ template "celery.beat.image" . }}" imagePullPolicy: {{ .Values.imagePullPolicy }} {{- with .Values.celery.beat.livenessProbe }} livenessProbe: {{ toYaml . | nindent 10 }} @@ -119,12 +112,16 @@ spec: {{- end }} {{- if .Values.securityContext.enabled }} securityContext: - {{- toYaml .Values.securityContext.djangoSecurityContext | nindent 10 }} + {{- include "helpers.securityContext" (list + .Values + "securityContext.containerSecurityContext" + "celery.beat.containerSecurityContext" + ) | nindent 10 }} {{- end }} volumeMounts: - name: run mountPath: /run/defectdojo - {{- if .Values.localsettingspy }} + {{- if .Values.localsettingspy }} - name: localsettingspy readOnly: true mountPath: /app/dojo/settings/local_settings.py diff --git a/helm/defectdojo/templates/celery-worker-deployment.yaml b/helm/defectdojo/templates/celery-worker-deployment.yaml index 68a9cfdf077..14ddcf79f4b 100644 --- a/helm/defectdojo/templates/celery-worker-deployment.yaml +++ b/helm/defectdojo/templates/celery-worker-deployment.yaml @@ -2,7 +2,12 @@ apiVersion: apps/v1 kind: Deployment metadata: - name: {{ $fullName }}-celery-worker + {{- with mergeOverwrite dict .Values.extraAnnotations .Values.celery.annotations .Values.celery.worker.annotations }} + annotations: + {{- range $key, $value := . }} + {{ $key }}: {{ quote $value }} + {{- end }} + {{- end }} labels: defectdojo.org/component: celery defectdojo.org/subcomponent: worker @@ -10,13 +15,11 @@ metadata: app.kubernetes.io/instance: {{ .Release.Name }} app.kubernetes.io/managed-by: {{ .Release.Service }} helm.sh/chart: {{ include "defectdojo.chart" . }} - {{- with .Values.extraLabels }} - {{- toYaml . | nindent 4 }} + {{- range $key, $value := .Values.extraLabels }} + {{ $key }}: {{ quote $value }} {{- end }} - {{- with mergeOverwrite .Values.celery.annotations .Values.celery.worker.annotations }} - annotations: - {{- toYaml . | nindent 4 }} - {{- end }} + name: {{ $fullName }}-celery-worker + namespace: {{ .Release.Namespace }} spec: replicas: {{ .Values.celery.worker.replicas }} {{- with .Values.revisionHistoryLimit }} @@ -35,15 +38,12 @@ spec: defectdojo.org/subcomponent: worker app.kubernetes.io/name: {{ include "defectdojo.name" . }} app.kubernetes.io/instance: {{ .Release.Name }} - {{- with .Values.extraLabels }} - {{- toYaml . | nindent 8 }} - {{- end }} - {{- with .Values.podLabels }} - {{- toYaml . | nindent 8 }} + {{- range $key, $value := mergeOverwrite dict .Values.extraLabels .Values.podLabels }} + {{ $key }}: {{ quote $value }} {{- end }} annotations: - {{- with mergeOverwrite .Values.celery.annotations .Values.celery.worker.podAnnotations }} - {{- toYaml . | nindent 8 }} + {{- range $key, $value := mergeOverwrite dict .Values.extraAnnotations .Values.celery.annotations .Values.celery.worker.podAnnotations }} + {{ $key }}: {{ quote $value }} {{- end }} {{- if eq (.Values.trackConfig | default "disabled") "enabled" }} checksum/config: {{ include (print $.Template.BasePath "/configmap.yaml") . | sha256sum }} @@ -51,6 +51,14 @@ spec: checksum/esecret: {{ include (print $.Template.BasePath "/extra-secret.yaml") . | sha256sum }} {{- end }} spec: + {{- if .Values.securityContext.enabled }} + securityContext: + {{- include "helpers.securityContext" (list + .Values + "securityContext.podSecurityContext" + "celery.worker.podSecurityContext" + ) | nindent 8 }} + {{- end }} serviceAccountName: {{ include "defectdojo.serviceAccountName" . }} automountServiceAccountToken: {{ .Values.celery.worker.automountServiceAccountToken }} {{- with .Values.imagePullSecrets }} @@ -58,12 +66,12 @@ spec: - name: {{ . }} {{- end }} volumes: - {{- if .Values.localsettingspy }} + {{- if .Values.localsettingspy }} - name: localsettingspy configMap: name: {{ $fullName }}-localsettingspy {{- end }} - {{- if .Values.django.uwsgi.certificates.enabled }} + {{- if .Values.django.uwsgi.certificates.enabled }} - name: cert-mount configMap: name: {{ .Values.django.uwsgi.certificates.configName }} @@ -78,22 +86,7 @@ spec: {{- end }} {{- end }} {{- if .Values.cloudsql.enabled }} - - name: cloudsql-proxy - image: {{ .Values.cloudsql.image.repository }}:{{ .Values.cloudsql.image.tag }} - imagePullPolicy: {{ .Values.cloudsql.image.pullPolicy }} - restartPolicy: Always - securityContext: - runAsNonRoot: true - command: ["/cloud_sql_proxy"] - args: - - "-verbose={{ .Values.cloudsql.verbose }}" - - "-instances={{ .Values.cloudsql.instance }}=tcp:{{ .Values.postgresql.primary.service.ports.postgresql }}" - {{- if .Values.cloudsql.enable_iam_login }} - - "-enable_iam_login" - {{- end }} - {{- if .Values.cloudsql.use_private_ip }} - - "-ip_address_types=PRIVATE" - {{- end }} + {{- include "cloudsqlProxy" . | nindent 6 }} {{- end }} {{- if .Values.dbMigrationChecker.enabled }} {{$data := dict "fullName" $fullName }} @@ -102,7 +95,7 @@ spec: {{- end }} containers: - name: celery - image: "{{ template "celery.repository" . }}:{{ .Values.tag }}" + image: "{{ template "celery.worker.image" . }}" imagePullPolicy: {{ .Values.imagePullPolicy }} {{- with .Values.celery.worker.livenessProbe }} livenessProbe: {{ toYaml . | nindent 10 }} @@ -115,7 +108,11 @@ spec: {{- end }} {{- if .Values.securityContext.enabled }} securityContext: - {{- toYaml .Values.securityContext.djangoSecurityContext | nindent 10 }} + {{- include "helpers.securityContext" (list + .Values + "securityContext.containerSecurityContext" + "celery.worker.containerSecurityContext" + ) | nindent 10 }} {{- end }} command: ['/entrypoint-celery-worker.sh'] volumeMounts: @@ -125,7 +122,7 @@ spec: mountPath: /app/dojo/settings/local_settings.py subPath: file {{- end }} - {{- if .Values.django.uwsgi.certificates.enabled }} + {{- if .Values.django.uwsgi.certificates.enabled }} - name: cert-mount mountPath: {{ .Values.django.uwsgi.certificates.certMountPath }} {{- end }} diff --git a/helm/defectdojo/templates/configmap-local-settings-py.yaml b/helm/defectdojo/templates/configmap-local-settings-py.yaml index dc75942fbc0..30c42244251 100644 --- a/helm/defectdojo/templates/configmap-local-settings-py.yaml +++ b/helm/defectdojo/templates/configmap-local-settings-py.yaml @@ -1,14 +1,24 @@ -{{- if .Values.localsettingspy }} +{{- if .Values.localsettingspy }} {{- $fullName := include "defectdojo.fullname" . -}} apiVersion: v1 kind: ConfigMap metadata: - name: {{ $fullName }}-localsettingspy + {{- with .Values.extraAnnotations }} + annotations: + {{- range $key, $value := . }} + {{ $key }}: {{ quote $value }} + {{- end }} + {{- end }} labels: app.kubernetes.io/name: {{ include "defectdojo.name" . }} app.kubernetes.io/instance: {{ .Release.Name }} app.kubernetes.io/managed-by: {{ .Release.Service }} helm.sh/chart: {{ include "defectdojo.chart" . }} + {{- with .Values.extraLabels }} + {{- toYaml . | nindent 4 }} + {{- end }} + name: {{ $fullName }}-localsettingspy + namespace: {{ .Release.Namespace }} data: file: {{ toYaml .Values.localsettingspy | indent 4 }} diff --git a/helm/defectdojo/templates/configmap.yaml b/helm/defectdojo/templates/configmap.yaml index e5078f57903..d25926c2c3f 100644 --- a/helm/defectdojo/templates/configmap.yaml +++ b/helm/defectdojo/templates/configmap.yaml @@ -3,21 +3,22 @@ apiVersion: v1 kind: ConfigMap metadata: - name: {{ $fullName }} + {{- with .Values.extraAnnotations }} + annotations: + {{- range $key, $value := . }} + {{ $key }}: {{ quote $value }} + {{- end }} + {{- end }} labels: app.kubernetes.io/name: {{ include "defectdojo.name" . }} app.kubernetes.io/instance: {{ .Release.Name }} app.kubernetes.io/managed-by: {{ .Release.Service }} helm.sh/chart: {{ include "defectdojo.chart" . }} - {{- with .Values.extraLabels }} - {{- toYaml . | nindent 4 }} + {{- range $key, $value := .Values.extraLabels }} + {{ $key }}: {{ quote $value }} {{- end }} -{{- if .Values.annotations }} - annotations: -{{- with .Values.annotations }} - {{- toYaml . | nindent 4 }} -{{- end }} -{{- end }} + name: {{ $fullName }} + namespace: {{ .Release.Namespace }} data: DD_ADMIN_USER: {{ .Values.admin.user | default "admin" }} DD_ADMIN_MAIL: {{ .Values.admin.Mail | default "admin@defectdojo.local" }} diff --git a/helm/defectdojo/templates/django-deployment.yaml b/helm/defectdojo/templates/django-deployment.yaml index 986c8898fc9..b4eee529383 100644 --- a/helm/defectdojo/templates/django-deployment.yaml +++ b/helm/defectdojo/templates/django-deployment.yaml @@ -2,20 +2,23 @@ apiVersion: apps/v1 kind: Deployment metadata: - name: {{ $fullName }}-django + {{- with mergeOverwrite dict .Values.extraAnnotations .Values.django.annotations }} + annotations: + {{- range $key, $value := . }} + {{ $key }}: {{ quote $value }} + {{- end }} + {{- end }} labels: defectdojo.org/component: django app.kubernetes.io/name: {{ include "defectdojo.name" . }} app.kubernetes.io/instance: {{ .Release.Name }} app.kubernetes.io/managed-by: {{ .Release.Service }} helm.sh/chart: {{ include "defectdojo.chart" . }} - {{- with .Values.extraLabels }} - {{- toYaml . | nindent 4 }} + {{- range $key, $value := .Values.extraLabels }} + {{ $key }}: {{ quote $value }} {{- end }} - {{- with .Values.django.annotations }} - annotations: - {{- toYaml . | nindent 4 }} - {{- end }} + name: {{ $fullName }}-django + namespace: {{ .Release.Namespace }} spec: replicas: {{ .Values.django.replicas }} {{- with .Values.django.strategy }} @@ -36,15 +39,12 @@ spec: defectdojo.org/component: django app.kubernetes.io/name: {{ include "defectdojo.name" . }} app.kubernetes.io/instance: {{ .Release.Name }} - {{- with .Values.extraLabels }} - {{- toYaml . | nindent 8 }} - {{- end }} - {{- with .Values.podLabels }} - {{- toYaml . | nindent 8 }} - {{- end }} + {{- range $key, $value := mergeOverwrite dict .Values.extraLabels .Values.podLabels }} + {{ $key }}: {{ quote $value }} + {{- end }} annotations: - {{- with .Values.django.annotations }} - {{- toYaml . | nindent 8 }} + {{- range $key, $value := mergeOverwrite dict .Values.extraAnnotations .Values.django.annotations }} + {{ $key }}: {{ quote $value }} {{- end }} {{- if and .Values.monitoring.enabled .Values.monitoring.prometheus.enabled }} prometheus.io/path: /metrics @@ -65,8 +65,14 @@ spec: - name: {{ quote . }} {{- end }} {{- if .Values.django.mediaPersistentVolume.enabled }} + {{- if .Values.securityContext.enabled }} securityContext: - fsGroup: {{ .Values.django.mediaPersistentVolume.fsGroup | default 1001 }} + {{- include "helpers.securityContext" (list + .Values + "securityContext.podSecurityContext" + "django.podSecurityContext" + ) | nindent 8 }} + {{- end }} {{- end }} volumes: - name: run @@ -99,25 +105,10 @@ spec: - {{- . | toYaml | nindent 8 }} {{- end }} {{- if .Values.cloudsql.enabled }} - - name: cloudsql-proxy - image: {{ .Values.cloudsql.image.repository }}:{{ .Values.cloudsql.image.tag }} - imagePullPolicy: {{ .Values.cloudsql.image.pullPolicy }} - restartPolicy: Always - securityContext: - runAsNonRoot: true - command: ["/cloud_sql_proxy"] - args: - - "-verbose={{ .Values.cloudsql.verbose }}" - - "-instances={{ .Values.cloudsql.instance }}=tcp:{{ .Values.postgresql.primary.service.ports.postgresql }}" - {{- if .Values.cloudsql.enable_iam_login }} - - "-enable_iam_login" - {{- end }} - {{- if .Values.cloudsql.use_private_ip }} - - "-ip_address_types=PRIVATE" - {{- end }} + {{- include "cloudsqlProxy" . | nindent 6 }} {{- end }} {{- if .Values.dbMigrationChecker.enabled }} - {{$data := dict "fullName" $fullName }} + {{- $data := dict "fullName" $fullName }} {{- $newContext := merge . (dict "fullName" $fullName) }} {{- include "dbMigrationChecker" $newContext | nindent 6 }} {{- end }} @@ -125,9 +116,15 @@ spec: containers: {{- if and .Values.monitoring.enabled .Values.monitoring.prometheus.enabled }} - name: metrics - image: {{ .Values.monitoring.prometheus.image }} + image: '{{ template "monitoring.prometheus.image" . }}' imagePullPolicy: {{ .Values.monitoring.prometheus.imagePullPolicy }} - command: [ '/usr/bin/nginx-prometheus-exporter', '--nginx.scrape-uri', 'http://127.0.0.1:8080/nginx_status'] + command: + - /usr/bin/nginx-prometheus-exporter + - --nginx.scrape-uri + - http://127.0.0.1:8080/nginx_status + {{- with .Values.monitoring.prometheus.extraEnv }} + env: {{- . | toYaml | nindent 8 }} + {{- end }} ports: - name: http-metrics protocol: TCP @@ -139,13 +136,31 @@ spec: periodSeconds: 20 initialDelaySeconds: 15 timeoutSeconds: 5 + {{- with .Values.monitoring.prometheus.resources }} + resources: {{- . | toYaml | nindent 10 }} + {{- end }} + {{- if .Values.securityContext.enabled }} + securityContext: + {{- include "helpers.securityContext" (list + .Values + "securityContext.containerSecurityContext" + "monitoring.prometheus.containerSecurityContext" + ) | nindent 10 }} + {{- end }} + {{- with .Values.monitoring.prometheus.extraVolumeMounts }} + volumeMounts: {{ . | toYaml | nindent 10 }} + {{- end }} {{- end }} - name: uwsgi - image: '{{ template "django.uwsgi.repository" . }}:{{ .Values.tag }}' + image: '{{ template "django.uwsgi.image" . }}' imagePullPolicy: {{ .Values.imagePullPolicy }} {{- if .Values.securityContext.enabled }} securityContext: - {{- toYaml .Values.securityContext.djangoSecurityContext | nindent 10 }} + {{- include "helpers.securityContext" (list + .Values + "securityContext.containerSecurityContext" + "django.uwsgi.containerSecurityContext" + ) | nindent 10 }} {{- end }} volumeMounts: - name: run @@ -160,6 +175,9 @@ spec: - name: cert-mount mountPath: {{ .Values.django.uwsgi.certificates.certMountPath }} {{- end }} + {{- with .Values.django.extraVolumeMounts }} + {{- . | toYaml | nindent 8 }} + {{- end }} {{- with .Values.django.uwsgi.extraVolumeMounts }} {{- . | toYaml | nindent 8 }} {{- end }} @@ -213,6 +231,9 @@ spec: {{- with .Values.extraEnv }} {{- . | toYaml | nindent 8 }} {{- end }} + {{- with .Values.django.extraEnv }} + {{- . | toYaml | nindent 8 }} + {{- end }} {{- with .Values.django.uwsgi.extraEnv }} {{- . | toYaml | nindent 8 }} {{- end }} @@ -235,15 +256,22 @@ spec: resources: {{- toYaml .Values.django.uwsgi.resources | nindent 10 }} - name: nginx - image: '{{ template "django.nginx.repository" . }}:{{ .Values.tag }}' + image: '{{ template "django.nginx.image" . }}' imagePullPolicy: {{ .Values.imagePullPolicy }} {{- if .Values.securityContext.enabled }} securityContext: - {{- toYaml .Values.securityContext.nginxSecurityContext | nindent 10 }} + {{- include "helpers.securityContext" (list + .Values + "securityContext.containerSecurityContext" + "django.nginx.containerSecurityContext" + ) | nindent 10 }} {{- end }} volumeMounts: - name: run mountPath: /run/defectdojo + {{- with .Values.django.extraVolumeMounts }} + {{- . | toYaml | nindent 8 }} + {{- end }} {{- with .Values.django.nginx.extraVolumeMounts }} {{- . | toYaml | nindent 8 }} {{- end }} @@ -271,6 +299,9 @@ spec: {{- with .Values.extraEnv }} {{- . | toYaml | nindent 8 }} {{- end }} + {{- with .Values.django.extraEnv }} + {{- . | toYaml | nindent 8 }} + {{- end }} {{- with .Values.django.nginx.extraEnv }} {{- . | toYaml | nindent 8 }} {{- end }} diff --git a/helm/defectdojo/templates/django-ingress.yaml b/helm/defectdojo/templates/django-ingress.yaml index 4a0209d15a2..aee880f23d9 100644 --- a/helm/defectdojo/templates/django-ingress.yaml +++ b/helm/defectdojo/templates/django-ingress.yaml @@ -3,28 +3,32 @@ apiVersion: networking.k8s.io/v1 kind: Ingress metadata: - name: {{ $fullName }} - labels: - defectdojo.org/component: django - app.kubernetes.io/name: {{ include "defectdojo.name" . }} - app.kubernetes.io/instance: {{ .Release.Name }} - app.kubernetes.io/managed-by: {{ .Release.Service }} - helm.sh/chart: {{ include "defectdojo.chart" . }} - {{- with .Values.extraLabels }} - {{- toYaml . | nindent 4 }} - {{- end }} -{{- if or .Values.django.ingress.annotations .Values.gke.useGKEIngress }} + {{- if or .Values.extraAnnotations .Values.django.ingress.annotations .Values.gke.useGKEIngress }} annotations: -{{- with .Values.django.ingress.annotations }} - {{- toYaml . | nindent 4 }} -{{- end }} + {{- range $key, $value := .Values.extraAnnotations }} + {{ $key }}: {{ quote $value }} + {{- end }} + {{- range $key, $value := .Values.django.ingress.annotations }} + {{ $key }}: {{ quote $value }} + {{- end }} {{- if .Values.gke.useGKEIngress }} {{- if .Values.gke.useManagedCertificate }} kubernetes.io/ingress.allow-http: "false" networking.gke.io/managed-certificates: {{ $fullName }}-django {{- end }} {{- end }} -{{- end }} + {{- end }} + labels: + defectdojo.org/component: django + app.kubernetes.io/name: {{ include "defectdojo.name" . }} + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/managed-by: {{ .Release.Service }} + helm.sh/chart: {{ include "defectdojo.chart" . }} + {{- range $key, $value := .Values.extraLabels }} + {{ $key }}: {{ quote $value }} + {{- end }} + name: {{ $fullName }} + namespace: {{ .Release.Namespace }} spec: {{- if .Values.django.ingress.ingressClassName }} ingressClassName: {{ .Values.django.ingress.ingressClassName }} diff --git a/helm/defectdojo/templates/django-service.yaml b/helm/defectdojo/templates/django-service.yaml index f8c20aa092f..5f966c15edc 100644 --- a/helm/defectdojo/templates/django-service.yaml +++ b/helm/defectdojo/templates/django-service.yaml @@ -2,22 +2,23 @@ apiVersion: v1 kind: Service metadata: - name: {{ $fullName }}-django + {{- with mergeOverwrite dict .Values.extraAnnotations .Values.django.service.annotations }} + annotations: + {{- range $key, $value := . }} + {{ $key }}: {{ $value | quote }} + {{- end }} + {{- end }} labels: defectdojo.org/component: django app.kubernetes.io/name: {{ include "defectdojo.name" . }} app.kubernetes.io/instance: {{ .Release.Name }} app.kubernetes.io/managed-by: {{ .Release.Service }} helm.sh/chart: {{ include "defectdojo.chart" . }} - {{- with .Values.extraLabels }} - {{- toYaml . | nindent 4 }} + {{- range $key, $value := .Values.extraLabels }} + {{ $key }}: {{ quote $value }} {{- end }} -{{- if .Values.django.service.annotations }} - annotations: - {{- range $key, $value := .Values.django.service.annotations }} - {{ $key }}: {{ $value | quote }} - {{- end }} -{{- end }} + name: {{ $fullName }}-django + namespace: {{ .Release.Namespace }} spec: selector: defectdojo.org/component: django diff --git a/helm/defectdojo/templates/extra-secret.yaml b/helm/defectdojo/templates/extra-secret.yaml index d97800283a6..caa5b1fcbfa 100644 --- a/helm/defectdojo/templates/extra-secret.yaml +++ b/helm/defectdojo/templates/extra-secret.yaml @@ -3,24 +3,22 @@ apiVersion: v1 kind: Secret metadata: - name: {{ $fullName }}-extrasecrets + {{- with mergeOverwrite dict .Values.secrets.annotations .Values.extraAnnotations }} + annotations: + {{- range $key, $value := . }} + {{ $key }}: {{ $value | quote }} + {{- end }} + {{- end }} labels: app.kubernetes.io/name: {{ include "defectdojo.name" . }} app.kubernetes.io/instance: {{ .Release.Name }} app.kubernetes.io/managed-by: {{ .Release.Service }} helm.sh/chart: {{ include "defectdojo.chart" . }} - {{- with .Values.extraLabels }} - {{- toYaml . | nindent 4 }} + {{- range $key, $value := .Values.extraLabels }} + {{ $key }}: {{ quote $value }} {{- end }} - {{- if or .Values.secrets.annotations .Values.annotations }} - annotations: - {{- with .Values.secrets.annotations }} - {{- toYaml . | nindent 4 }} - {{- end }} - {{- with .Values.annotations }} - {{- toYaml . | nindent 4 }} - {{- end }} - {{- end }} + name: {{ $fullName }}-extrasecrets + namespace: {{ .Release.Namespace }} type: Opaque data: {{- range $key, $value := .Values.extraSecrets }} diff --git a/helm/defectdojo/templates/gke-managed-certificate.yaml b/helm/defectdojo/templates/gke-managed-certificate.yaml index 43399626310..14dc539e6b7 100644 --- a/helm/defectdojo/templates/gke-managed-certificate.yaml +++ b/helm/defectdojo/templates/gke-managed-certificate.yaml @@ -1,9 +1,22 @@ -{{- if .Values.gke.useManagedCertificate }} +{{- if .Values.gke.useManagedCertificate | and (.Capabilities.APIVersions.Has "networking.gke.io/v1") }} {{- $fullName := include "defectdojo.fullname" . -}} apiVersion: networking.gke.io/v1 kind: ManagedCertificate metadata: + {{- with .Values.extraAnnotations }} + annotations: + {{- range $key, $value := . }} + {{ $key }}: {{ quote $value }} + {{- end }} + {{- end }} + {{- with .Values.extraLabels }} + labels: + {{- range $key, $value := . }} + {{ $key }}: {{ quote $value }} + {{- end }} + {{- end }} name: {{ $fullName }}-django + namespace: {{ .Release.Namespace }} spec: domains: - {{ .Values.host }} diff --git a/helm/defectdojo/templates/initializer-job.yaml b/helm/defectdojo/templates/initializer-job.yaml index aa4bff0cbd7..43dcd269d8f 100644 --- a/helm/defectdojo/templates/initializer-job.yaml +++ b/helm/defectdojo/templates/initializer-job.yaml @@ -3,20 +3,23 @@ apiVersion: batch/v1 kind: Job metadata: - name: {{ template "initializer.jobname" . }} + {{- with mergeOverwrite dict .Values.extraAnnotations .Values.initializer.jobAnnotations }} + annotations: + {{- range $key, $value := . }} + {{ $key }}: {{ quote $value }} + {{- end }} + {{- end }} labels: defectdojo.org/component: initializer app.kubernetes.io/name: {{ include "defectdojo.name" . }} app.kubernetes.io/instance: {{ .Release.Name }} app.kubernetes.io/managed-by: {{ .Release.Service }} helm.sh/chart: {{ include "defectdojo.chart" . }} - {{- with .Values.extraLabels }} - {{- toYaml . | nindent 4 }} - {{- end }} - annotations: - {{- with .Values.initializer.jobAnnotations }} - {{- toYaml . | nindent 4 }} + {{- range $key, $value := .Values.extraLabels }} + {{ $key }}: {{ quote $value }} {{- end }} + name: {{ template "initializer.jobname" . }} + namespace: {{ .Release.Namespace }} spec: {{- if and (int .Values.initializer.keepSeconds) (gt (int .Values.initializer.keepSeconds) 0) }} ttlSecondsAfterFinished: {{ .Values.initializer.keepSeconds }} @@ -38,6 +41,14 @@ spec: {{- toYaml . | nindent 8 }} {{- end }} spec: + {{- if .Values.securityContext.enabled }} + securityContext: + {{- include "helpers.securityContext" (list + .Values + "securityContext.podSecurityContext" + "initializer.podSecurityContext" + ) | nindent 8 }} + {{- end }} serviceAccountName: {{ include "defectdojo.serviceAccountName" . }} automountServiceAccountToken: {{ .Values.initializer.automountServiceAccountToken }} {{- with .Values.imagePullSecrets }} @@ -64,40 +75,22 @@ spec: {{- end }} initContainers: {{- if .Values.cloudsql.enabled }} - - name: cloudsql-proxy - image: {{ .Values.cloudsql.image.repository }}:{{ .Values.cloudsql.image.tag }} - imagePullPolicy: {{ .Values.cloudsql.image.pullPolicy }} - restartPolicy: Always - securityContext: - runAsNonRoot: true - command: ["/cloud_sql_proxy"] - args: - - "-verbose={{ .Values.cloudsql.verbose }}" - - "-instances={{ .Values.cloudsql.instance }}=tcp:{{ .Values.postgresql.primary.service.ports.postgresql }}" - {{- if .Values.cloudsql.enable_iam_login }} - - "-enable_iam_login" - {{- end }} - {{- if .Values.cloudsql.use_private_ip }} - - "-ip_address_types=PRIVATE" - {{- end }} - volumeMounts: - {{- range .Values.initializer.extraVolumes }} - - name: userconfig-{{ .name }} - readOnly: true - mountPath: {{ .path }} - subPath: {{ .subPath }} - {{- end }} + {{- include "cloudsqlProxy" . | nindent 6 }} {{- end }} - name: wait-for-db command: - '/bin/bash' - '-c' - '/wait-for-it.sh ${DD_DATABASE_HOST:-postgres}:${DD_DATABASE_PORT:-5432} -t 300 -s -- /bin/echo Database is up' - image: '{{ template "django.uwsgi.repository" . }}:{{ .Values.tag }}' + image: "{{ template "initializer.image" . }}" imagePullPolicy: {{ .Values.imagePullPolicy }} {{- if .Values.securityContext.enabled }} securityContext: - {{- toYaml .Values.securityContext.djangoSecurityContext | nindent 10 }} + {{- include "helpers.securityContext" (list + .Values + "securityContext.containerSecurityContext" + "django.uwsgi.containerSecurityContext" + ) | nindent 10 }} {{- end }} envFrom: - configMapRef: @@ -120,11 +113,15 @@ spec: {{- end }} containers: - name: initializer - image: "{{ template "initializer.repository" . }}:{{ .Values.tag }}" + image: "{{ template "initializer.image" . }}" imagePullPolicy: {{ .Values.imagePullPolicy }} {{- if .Values.securityContext.enabled }} securityContext: - {{- toYaml .Values.securityContext.djangoSecurityContext | nindent 10 }} + {{- include "helpers.securityContext" (list + .Values + "securityContext.containerSecurityContext" + "initializer.containerSecurityContext" + ) | nindent 10 }} {{- end }} volumeMounts: {{- if .Values.localsettingspy }} diff --git a/helm/defectdojo/templates/media-pvc.yaml b/helm/defectdojo/templates/media-pvc.yaml index d31d3251b44..57fcae8e0c7 100644 --- a/helm/defectdojo/templates/media-pvc.yaml +++ b/helm/defectdojo/templates/media-pvc.yaml @@ -1,22 +1,29 @@ {{- $fullName := include "django.pvc_name" $ -}} {{ with .Values.django.mediaPersistentVolume }} -{{- if and .enabled (eq .type "pvc") .persistentVolumeClaim.create }} +{{- if and .enabled (eq .type "pvc") .persistentVolumeClaim.create }} apiVersion: v1 kind: PersistentVolumeClaim metadata: + {{- with .Values.extraAnnotations }} + annotations: + {{- range $key, $value := . }} + {{ $key }}: {{ quote $value }} + {{- end }} + {{- end }} labels: defectdojo.org/component: django app.kubernetes.io/name: {{ include "defectdojo.name" $ }} app.kubernetes.io/instance: {{ $.Release.Name }} app.kubernetes.io/managed-by: {{ $.Release.Service }} helm.sh/chart: {{ include "defectdojo.chart" $ }} - {{- with $.Values.extraLabels }} - {{- toYaml . | nindent 4 }} + {{- range $key, $value := .Values.extraLabels }} + {{ $key }}: {{ quote $value }} {{- end }} name: {{ $fullName }} + namespace: {{ .Release.Namespace }} spec: accessModes: - {{- toYaml .persistentVolumeClaim.accessModes |nindent 4 }} + {{- toYaml .persistentVolumeClaim.accessModes | nindent 4 }} resources: requests: storage: {{ .persistentVolumeClaim.size }} diff --git a/helm/defectdojo/templates/network-policy.yaml b/helm/defectdojo/templates/network-policy.yaml index e580a0df80c..333b58da3e6 100644 --- a/helm/defectdojo/templates/network-policy.yaml +++ b/helm/defectdojo/templates/network-policy.yaml @@ -3,21 +3,22 @@ apiVersion: networking.k8s.io/v1 kind: NetworkPolicy metadata: - name: {{ $fullName }}-networkpolicy + {{- with mergeOverwrite dict .Values.extraAnnotations .Values.networkPolicy.annotations }} + annotations: + {{- range $key, $value := . }} + {{ $key }}: {{ quote $value }} + {{- end }} + {{- end }} labels: app.kubernetes.io/instance: {{ .Release.Name }} app.kubernetes.io/managed-by: {{ .Release.Service }} helm.sh/chart: {{ include "defectdojo.chart" . }} app.kubernetes.io/name: {{ include "defectdojo.name" . }} - {{- with .Values.extraLabels }} - {{- toYaml . | nindent 4 }} + {{- range $key, $value := .Values.extraLabels }} + {{ $key }}: {{ quote $value }} {{- end }} -{{- if .Values.networkPolicy.annotations }} - annotations: -{{- with .Values.networkPolicy.annotations }} - {{- toYaml . | nindent 4 }} -{{- end }} -{{- end }} + name: {{ $fullName }}-networkpolicy + namespace: {{ .Release.Namespace }} spec: podSelector: matchLabels: @@ -43,15 +44,22 @@ spec: apiVersion: networking.k8s.io/v1 kind: NetworkPolicy metadata: - name: {{ $fullName }}-networkpolicy-django + {{- with mergeOverwrite dict .Values.extraAnnotations .Values.networkPolicy.annotations }} + annotations: + {{- range $key, $value := . }} + {{ $key }}: {{ quote $value }} + {{- end }} + {{- end }} labels: app.kubernetes.io/instance: {{ .Release.Name }} app.kubernetes.io/managed-by: {{ .Release.Service }} helm.sh/chart: {{ include "defectdojo.chart" . }} app.kubernetes.io/name: {{ include "defectdojo.name" . }} -{{- with .Values.extraLabels }} - {{- toYaml . | nindent 4 }} -{{- end }} + {{- range $key, $value := .Values.extraLabels }} + {{ $key }}: {{ quote $value }} + {{- end }} + name: {{ $fullName }}-networkpolicy-django + namespace: {{ .Release.Namespace }} spec: podSelector: matchLabels: diff --git a/helm/defectdojo/templates/sa.yaml b/helm/defectdojo/templates/sa.yaml index 4345da6360a..1394f077945 100644 --- a/helm/defectdojo/templates/sa.yaml +++ b/helm/defectdojo/templates/sa.yaml @@ -2,31 +2,26 @@ kind: ServiceAccount apiVersion: v1 metadata: - name: {{ include "defectdojo.serviceAccountName" . }} - labels: - app.kubernetes.io/name: {{ include "defectdojo.name" . }} - app.kubernetes.io/instance: {{ .Release.Name }} - app.kubernetes.io/managed-by: {{ .Release.Service }} - helm.sh/chart: {{ include "defectdojo.chart" . }} - {{- with .Values.extraLabels }} - {{- toYaml . | nindent 4 }} - {{- end }} - {{- with .Values.serviceAccount.labels }} - {{- toYaml . | nindent 4 }} - {{- end }} annotations: {{- if (not .Values.disableHooks) }} helm.sh/resource-policy: keep helm.sh/hook: "pre-install" helm.sh/hook-delete-policy: "before-hook-creation" {{- end }} - {{- with .Values.annotations }} - {{ toYaml . | nindent 4 }} - {{- end }} - {{- with .Values.serviceAccount.annotations }} - {{ toYaml . | nindent 4 }} + {{- range $key, $value := mergeOverwrite dict .Values.extraAnnotations .Values.serviceAccount.annotations }} + {{ $key }}: {{ quote $value }} {{- end }} {{- if ne .Values.gke.workloadIdentityEmail "" }} iam.gke.io/gcp-service-account: {{ .Values.gke.workloadIdentityEmail }} {{- end }} + labels: + app.kubernetes.io/name: {{ include "defectdojo.name" . }} + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/managed-by: {{ .Release.Service }} + helm.sh/chart: {{ include "defectdojo.chart" . }} + {{- range $key, $value := mergeOverwrite dict .Values.extraLabels .Values.serviceAccount.labels }} + {{ $key }}: {{ quote $value }} + {{- end }} + name: {{ include "defectdojo.serviceAccountName" . }} + namespace: {{ .Release.Namespace }} {{- end }} \ No newline at end of file diff --git a/helm/defectdojo/templates/secret-postgresql.yaml b/helm/defectdojo/templates/secret-postgresql.yaml index 12924bb29c5..57f38a0e883 100644 --- a/helm/defectdojo/templates/secret-postgresql.yaml +++ b/helm/defectdojo/templates/secret-postgresql.yaml @@ -2,27 +2,25 @@ apiVersion: v1 kind: Secret metadata: - name: {{ .Values.postgresql.auth.existingSecret }} - labels: - app.kubernetes.io/name: {{ include "defectdojo.name" . }} - app.kubernetes.io/instance: {{ .Release.Name }} - app.kubernetes.io/managed-by: {{ .Release.Service }} - helm.sh/chart: {{ include "defectdojo.chart" . }} - {{- with .Values.extraLabels }} - {{- toYaml . | nindent 4 }} - {{- end }} annotations: {{- if (not .Values.disableHooks) }} helm.sh/resource-policy: keep helm.sh/hook: "pre-install" helm.sh/hook-delete-policy: "before-hook-creation" {{- end }} - {{- with .Values.secrets.annotations }} - {{- toYaml . | nindent 4 }} + {{- range $key, $value := mergeOverwrite dict .Values.extraAnnotations .Values.secrets.annotations }} + {{ $key }}: {{ quote $value }} {{- end }} - {{- with .Values.annotations }} - {{- toYaml . | nindent 4 }} + labels: + app.kubernetes.io/name: {{ include "defectdojo.name" . }} + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/managed-by: {{ .Release.Service }} + helm.sh/chart: {{ include "defectdojo.chart" . }} + {{- range $key, $value := .Values.extraLabels }} + {{ $key }}: {{ quote $value }} {{- end }} + name: {{ .Values.postgresql.auth.existingSecret }} + namespace: {{ .Release.Namespace }} type: Opaque data: {{- if .Values.postgresql.auth.password }} diff --git a/helm/defectdojo/templates/secret-redis.yaml b/helm/defectdojo/templates/secret-redis.yaml index f6d102c2513..b2a5a3a84c2 100644 --- a/helm/defectdojo/templates/secret-redis.yaml +++ b/helm/defectdojo/templates/secret-redis.yaml @@ -2,27 +2,25 @@ apiVersion: v1 kind: Secret metadata: - name: {{ .Values.redis.auth.existingSecret }} - labels: - app.kubernetes.io/name: {{ include "defectdojo.name" . }} - app.kubernetes.io/instance: {{ .Release.Name }} - app.kubernetes.io/managed-by: {{ .Release.Service }} - helm.sh/chart: {{ include "defectdojo.chart" . }} - {{- with .Values.extraLabels }} - {{- toYaml . | nindent 4 }} - {{- end }} annotations: {{- if (not .Values.disableHooks) }} helm.sh/resource-policy: keep helm.sh/hook: "pre-install" helm.sh/hook-delete-policy: "before-hook-creation" {{- end }} - {{- with .Values.secrets.annotations }} - {{- toYaml . | nindent 4 }} + {{- range $key, $value := mergeOverwrite dict .Values.extraAnnotations .Values.secrets.annotations }} + {{ $key }}: {{ quote $value }} {{- end }} - {{- with .Values.annotations }} - {{- toYaml . | nindent 4 }} + labels: + app.kubernetes.io/name: {{ include "defectdojo.name" . }} + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/managed-by: {{ .Release.Service }} + helm.sh/chart: {{ include "defectdojo.chart" . }} + {{- range $key, $value := .Values.extraLabels }} + {{ $key }}: {{ quote $value }} {{- end }} + name: {{ .Values.redis.auth.existingSecret }} + namespace: {{ .Release.Namespace }} type: Opaque data: {{- if .Values.redis.auth.password }} diff --git a/helm/defectdojo/templates/secret.yaml b/helm/defectdojo/templates/secret.yaml index c3a3c56f6c4..3a4a5299d64 100644 --- a/helm/defectdojo/templates/secret.yaml +++ b/helm/defectdojo/templates/secret.yaml @@ -3,47 +3,45 @@ apiVersion: v1 kind: Secret metadata: - name: {{ $fullName }} - labels: - app.kubernetes.io/name: {{ include "defectdojo.name" . }} - app.kubernetes.io/instance: {{ .Release.Name }} - app.kubernetes.io/managed-by: {{ .Release.Service }} - helm.sh/chart: {{ include "defectdojo.chart" . }} - {{- with .Values.extraLabels }} - {{- toYaml . | nindent 4 }} - {{- end }} annotations: {{- if (not .Values.disableHooks) }} helm.sh/resource-policy: keep helm.sh/hook: "pre-install" helm.sh/hook-delete-policy: "before-hook-creation" {{- end }} - {{- with .Values.secrets.annotations }} - {{- toYaml . | nindent 4 }} + {{- range $key, $value := mergeOverwrite dict .Values.extraAnnotations .Values.secrets.annotations }} + {{ $key }}: {{ quote $value }} {{- end }} - {{- with .Values.annotations }} - {{- toYaml . | nindent 4 }} + labels: + app.kubernetes.io/name: {{ include "defectdojo.name" . }} + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/managed-by: {{ .Release.Service }} + helm.sh/chart: {{ include "defectdojo.chart" . }} + {{- range $key, $value := .Values.extraLabels }} + {{ $key }}: {{ quote $value }} {{- end }} + name: {{ $fullName }} + namespace: {{ .Release.Namespace }} type: Opaque data: {{- if .Values.admin.password }} DD_ADMIN_PASSWORD: {{ .Values.admin.password | b64enc | quote }} -{{- else}} +{{- else }} DD_ADMIN_PASSWORD: {{ randAlphaNum 22 | b64enc | quote }} -{{- end}} +{{- end }} {{- if .Values.admin.secretKey }} DD_SECRET_KEY: {{ .Values.admin.secretKey | b64enc | quote }} -{{- else}} +{{- else }} DD_SECRET_KEY: {{ randAlphaNum 128 | b64enc | quote }} -{{- end}} +{{- end }} {{- if .Values.admin.credentialAes256Key }} DD_CREDENTIAL_AES_256_KEY: {{ .Values.admin.credentialAes256Key | b64enc | quote }} -{{- else}} +{{- else }} DD_CREDENTIAL_AES_256_KEY: {{ randAlphaNum 128 | b64enc | quote }} -{{- end}} +{{- end }} {{- if .Values.admin.metricsHttpAuthPassword }} METRICS_HTTP_AUTH_PASSWORD: {{ .Values.admin.metricsHttpAuthPassword | b64enc | quote }} -{{- else}} +{{- else }} METRICS_HTTP_AUTH_PASSWORD: {{ randAlphaNum 32 | b64enc | quote }} -{{- end}} +{{- end }} {{- end }} diff --git a/helm/defectdojo/templates/tests/unit-tests.yaml b/helm/defectdojo/templates/tests/unit-tests.yaml index 08939429008..01fa4cf1041 100644 --- a/helm/defectdojo/templates/tests/unit-tests.yaml +++ b/helm/defectdojo/templates/tests/unit-tests.yaml @@ -19,7 +19,7 @@ spec: {{- end }} containers: - name: unit-tests - image: '{{ .Values.repositoryPrefix }}/defectdojo-django:{{ .Values.tag }}' + image: '{{ template "unitTests.image" . }}' imagePullPolicy: {{ .Values.imagePullPolicy }} {{- if .Values.securityContext.enabled }} securityContext: diff --git a/helm/defectdojo/values.schema.json b/helm/defectdojo/values.schema.json index 3d899e176e0..76b1411877d 100644 --- a/helm/defectdojo/values.schema.json +++ b/helm/defectdojo/values.schema.json @@ -31,13 +31,15 @@ } } }, - "annotations": { - "type": "object" + "alternativeHosts": { + "description": "optional list of alternative hostnames to use that gets appended to DD_ALLOWED_HOSTS. This is necessary when your local hostname does not match the global hostname.", + "type": "array" }, "celery": { "type": "object", "properties": { "annotations": { + "description": "Common annotations to worker and beat deployments and pods.", "type": "object" }, "beat": { @@ -47,37 +49,73 @@ "type": "object" }, "annotations": { + "description": "Annotations for the Celery beat deployment.", "type": "object" }, "automountServiceAccountToken": { "type": "boolean" }, + "containerSecurityContext": { + "description": "Container security context for the Celery beat containers.", + "type": "object" + }, "extraEnv": { + "description": "Additional environment variables injected to Celery beat containers.", "type": "array" }, "extraInitContainers": { + "description": "A list of additional initContainers to run before celery beat containers.", "type": "array" }, "extraVolumeMounts": { + "description": "Array of additional volume mount points for the celery beat containers.", "type": "array" }, "extraVolumes": { + "description": "A list of extra volumes to mount @type: array\u003cmap\u003e", "type": "array" }, + "image": { + "description": "If empty, uses values from images.django.image", + "type": "object", + "properties": { + "digest": { + "type": "string" + }, + "registry": { + "type": "string" + }, + "repository": { + "type": "string" + }, + "tag": { + "type": "string" + } + } + }, "livenessProbe": { + "description": "Enable liveness probe for Celery beat container. ``` exec: command: - bash - -c - celery -A dojo inspect ping -t 5 initialDelaySeconds: 30 periodSeconds: 60 timeoutSeconds: 10 ```", "type": "object" }, "nodeSelector": { "type": "object" }, "podAnnotations": { + "description": "Annotations for the Celery beat pods.", + "type": "object" + }, + "podSecurityContext": { + "description": "Pod security context for the Celery beat pods.", "type": "object" }, "readinessProbe": { + "description": "Enable readiness probe for Celery beat container.", "type": "object" }, "replicas": { - "type": "integer" + "description": "Multiple replicas are not allowed (Beat is intended to be a singleton) because scaling to \u003e1 will double-run schedules", + "type": "integer", + "maximum": 1 }, "resources": { "type": "object", @@ -107,6 +145,7 @@ } }, "startupProbe": { + "description": "Enable startup probe for Celery beat container.", "type": "object" }, "tolerations": { @@ -127,12 +166,14 @@ "type": "object" }, "annotations": { + "description": "Annotations for the Celery worker deployment.", "type": "object" }, "appSettings": { "type": "object", "properties": { "poolType": { + "description": "Performance improved celery worker config when needing to deal with a lot of findings (e.g deduplication ops) poolType: prefork autoscaleMin: 2 autoscaleMax: 8 concurrency: 8 prefetchMultiplier: 128", "type": "string" } } @@ -140,28 +181,61 @@ "automountServiceAccountToken": { "type": "boolean" }, + "containerSecurityContext": { + "description": "Container security context for the Celery worker containers.", + "type": "object" + }, "extraEnv": { + "description": "Additional environment variables injected to Celery worker containers.", "type": "array" }, "extraInitContainers": { + "description": "A list of additional initContainers to run before celery worker containers.", "type": "array" }, "extraVolumeMounts": { + "description": "Array of additional volume mount points for the celery worker containers.", "type": "array" }, "extraVolumes": { + "description": "A list of extra volumes to mount. @type: array\u003cmap\u003e", "type": "array" }, + "image": { + "description": "If empty, uses values from images.django.image", + "type": "object", + "properties": { + "digest": { + "type": "string" + }, + "registry": { + "type": "string" + }, + "repository": { + "type": "string" + }, + "tag": { + "type": "string" + } + } + }, "livenessProbe": { + "description": "Enable liveness probe for Celery worker containers. ``` exec: command: - bash - -c - celery -A dojo inspect ping -t 5 initialDelaySeconds: 30 periodSeconds: 60 timeoutSeconds: 10 ```", "type": "object" }, "nodeSelector": { "type": "object" }, "podAnnotations": { + "description": "Annotations for the Celery beat pods.", + "type": "object" + }, + "podSecurityContext": { + "description": "Pod security context for the Celery worker pods.", "type": "object" }, "readinessProbe": { + "description": "Enable readiness probe for Celery worker container.", "type": "object" }, "replicas": { @@ -195,6 +269,7 @@ } }, "startupProbe": { + "description": "Enable startup probe for Celery worker container.", "type": "object" }, "tolerations": { @@ -205,15 +280,31 @@ } }, "cloudsql": { + "description": "Google CloudSQL support in GKE via gce-proxy", "type": "object", "properties": { + "containerSecurityContext": { + "description": "Optional: security context for the CloudSQL proxy container.", + "type": "object" + }, "enable_iam_login": { + "description": "use IAM database authentication", "type": "boolean" }, "enabled": { + "description": "To use CloudSQL in GKE set 'enable: true'", "type": "boolean" }, + "extraEnv": { + "description": "Additional environment variables for the CloudSQL proxy container.", + "type": "array" + }, + "extraVolumeMounts": { + "description": "Array of additional volume mount points for the CloudSQL proxy container", + "type": "array" + }, "image": { + "description": "set repo and image tag of gce-proxy", "type": "object", "properties": { "pullPolicy": { @@ -228,32 +319,74 @@ } }, "instance": { + "description": "set CloudSQL instance: 'project:zone:instancename'", "type": "string" }, + "resources": { + "description": "Optional: add resource requests/limits for the CloudSQL proxy container.", + "type": "object" + }, "use_private_ip": { + "description": "whether to use a private IP to connect to the database", "type": "boolean" }, "verbose": { + "description": "By default, the proxy has verbose logging. Set this to false to make it less verbose", "type": "boolean" } } }, "createPostgresqlSecret": { + "description": "create postgresql secret in defectdojo chart, outside of postgresql chart", "type": "boolean" }, "createRedisSecret": { + "description": "create redis secret in defectdojo chart, outside of redis chart", "type": "boolean" }, "createSecret": { + "description": "create defectdojo specific secret", "type": "boolean" }, "dbMigrationChecker": { "type": "object", "properties": { + "containerSecurityContext": { + "description": "Container security context for the DB migration checker.", + "type": "object" + }, "enabled": { + "description": "Enable/disable the DB migration checker.", "type": "boolean" }, + "extraEnv": { + "description": "Additional environment variables for DB migration checker.", + "type": "array" + }, + "extraVolumeMounts": { + "description": "Array of additional volume mount points for DB migration checker.", + "type": "array" + }, + "image": { + "description": "If empty, uses values from images.django.image", + "type": "object", + "properties": { + "digest": { + "type": "string" + }, + "registry": { + "type": "string" + }, + "repository": { + "type": "string" + }, + "tag": { + "type": "string" + } + } + }, "resources": { + "description": "Resource requests/limits for the DB migration checker.", "type": "object", "properties": { "limits": { @@ -283,6 +416,7 @@ } }, "disableHooks": { + "description": "Avoid using pre-install hooks, which might cause issues with ArgoCD", "type": "boolean" }, "django": { @@ -297,10 +431,20 @@ "automountServiceAccountToken": { "type": "boolean" }, + "extraEnv": { + "description": "Additional environment variables injected to all Django containers and initContainers.", + "type": "array" + }, "extraInitContainers": { + "description": "A list of additional initContainers to run before the uwsgi and nginx containers.", + "type": "array" + }, + "extraVolumeMounts": { + "description": "Array of additional volume mount points common to all containers and initContainers.", "type": "array" }, "extraVolumes": { + "description": "A list of extra volumes to mount.", "type": "array" }, "ingress": { @@ -310,6 +454,7 @@ "type": "boolean" }, "annotations": { + "description": "Restricts the type of ingress controller that can interact with our chart (nginx, traefik, ...) `kubernetes.io/ingress.class: nginx` Depending on the size and complexity of your scans, you might want to increase the default ingress timeouts if you see repeated 504 Gateway Timeouts `nginx.ingress.kubernetes.io/proxy-read-timeout: \"1800\"` `nginx.ingress.kubernetes.io/proxy-send-timeout: \"1800\"`", "type": "object" }, "enabled": { @@ -324,6 +469,7 @@ } }, "mediaPersistentVolume": { + "description": "This feature needs more preparation before can be enabled, please visit KUBERNETES.md#media-persistent-volume", "type": "object", "properties": { "enabled": { @@ -333,18 +479,22 @@ "type": "integer" }, "name": { + "description": "any name", "type": "string" }, "persistentVolumeClaim": { + "description": "in case if pvc specified, should point to the already existing pvc", "type": "object", "properties": { "accessModes": { + "description": "check KUBERNETES.md doc first for option to choose", "type": "array", "items": { "type": "string" } }, "create": { + "description": "set to true to create a new pvc and if django.mediaPersistentVolume.type is set to pvc", "type": "boolean" }, "name": { @@ -359,6 +509,7 @@ } }, "type": { + "description": "could be emptyDir (not for production) or pvc", "type": "string" } } @@ -366,12 +517,42 @@ "nginx": { "type": "object", "properties": { + "containerSecurityContext": { + "description": "Container security context for the nginx containers.", + "type": "object", + "properties": { + "runAsUser": { + "description": "nginx dockerfile sets USER=1001", + "type": "integer" + } + } + }, "extraEnv": { + "description": "To extra environment variables to the nginx container, you can use extraEnv. For example: extraEnv: - name: FOO valueFrom: configMapKeyRef: name: foo key: bar", "type": "array" }, "extraVolumeMounts": { + "description": "Array of additional volume mount points for nginx containers.", "type": "array" }, + "image": { + "description": "If empty, uses values from images.nginx.image", + "type": "object", + "properties": { + "digest": { + "type": "string" + }, + "registry": { + "type": "string" + }, + "repository": { + "type": "string" + }, + "tag": { + "type": "string" + } + } + }, "resources": { "type": "object", "properties": { @@ -415,6 +596,15 @@ "nodeSelector": { "type": "object" }, + "podSecurityContext": { + "description": "Pod security context for the Django pods.", + "type": "object", + "properties": { + "fsGroup": { + "type": "integer" + } + } + }, "replicas": { "type": "integer" }, @@ -442,6 +632,7 @@ "type": "object", "properties": { "maxFd": { + "description": "Use this value to set the maximum number of file descriptors. If set to 0 will be detected by uwsgi e.g. 102400", "type": "integer" }, "processes": { @@ -465,23 +656,55 @@ "type": "string" }, "enabled": { + "description": "includes additional CA certificate as volume, it refrences REQUESTS_CA_BUNDLE env varible NOTE: it reflects REQUESTS_CA_BUNDLE for celery workers, beats as well", "type": "boolean" } } }, + "containerSecurityContext": { + "type": "object", + "properties": { + "runAsUser": { + "description": "django dockerfile sets USER=1001", + "type": "integer" + } + } + }, "enableDebug": { + "description": "this also requires DD_DEBUG to be set to True", "type": "boolean" }, "extraEnv": { + "description": "To add (or override) extra variables which need to be pulled from another configMap, you can use extraEnv. For example: extraEnv: - name: DD_DATABASE_HOST valueFrom: configMapKeyRef: name: my-other-postgres-configmap key: cluster_endpoint", "type": "array" }, "extraVolumeMounts": { + "description": "Array of additional volume mount points for uwsgi containers.", "type": "array" }, + "image": { + "description": "If empty, uses values from images.django.image", + "type": "object", + "properties": { + "digest": { + "type": "string" + }, + "registry": { + "type": "string" + }, + "repository": { + "type": "string" + }, + "tag": { + "type": "string" + } + } + }, "livenessProbe": { "type": "object", "properties": { "enabled": { + "description": "Enable liveness checks on uwsgi container.", "type": "boolean" }, "failureThreshold": { @@ -505,6 +728,7 @@ "type": "object", "properties": { "enabled": { + "description": "Enable readiness checks on uwsgi container.", "type": "boolean" }, "failureThreshold": { @@ -555,6 +779,7 @@ "type": "object", "properties": { "enabled": { + "description": "Enable startup checks on uwsgi container.", "type": "boolean" }, "failureThreshold": { @@ -578,44 +803,111 @@ } } }, + "extraAnnotations": { + "description": "Annotations globally added to all resources", + "type": "object" + }, "extraConfigs": { + "description": "To add extra variables not predefined by helm config it is possible to define in extraConfigs block, e.g. below: NOTE Do not store any kind of sensitive information inside of it ``` DD_SOCIAL_AUTH_AUTH0_OAUTH2_ENABLED: 'true' DD_SOCIAL_AUTH_AUTH0_KEY: 'dev' DD_SOCIAL_AUTH_AUTH0_DOMAIN: 'xxxxx' ```", "type": "object" }, "extraEnv": { + "description": "To add (or override) extra variables which need to be pulled from another configMap, you can use extraEnv. For example: ``` - name: DD_DATABASE_HOST valueFrom: configMapKeyRef: name: my-other-postgres-configmap key: cluster_endpoint ```", "type": "array" }, "extraLabels": { + "description": "Labels globally added to all resources", "type": "object" }, "extraSecrets": { + "description": "Extra secrets can be created inside of extraSecrets block: ``` DD_SOCIAL_AUTH_AUTH0_SECRET: 'xxx' ```", "type": "object" }, "gke": { + "description": "Settings to make running the chart on GKE simpler", "type": "object", "properties": { "useGKEIngress": { + "description": "Set to true to configure the Ingress to use the GKE provided ingress controller", "type": "boolean" }, "useManagedCertificate": { + "description": "Set to true to have GKE automatically provision a TLS certificate for the host specified Requires useGKEIngress to be set to true When using this option, be sure to set django.ingress.activateTLS to false", "type": "boolean" }, "workloadIdentityEmail": { + "description": "Workload Identity allows the K8s service account to assume the IAM access of a GCP service account to interact with other GCP services Only works with serviceAccount.create = true", "type": "string" } } }, "host": { + "description": "Primary hostname of instance", "type": "string" }, "imagePullPolicy": { "type": "string" }, "imagePullSecrets": { + "description": "When using a private registry, name of the secret that holds the registry secret (eg deploy token from gitlab-ci project)", "type": [ "string", "null" ] }, + "images": { + "type": "object", + "properties": { + "django": { + "type": "object", + "properties": { + "image": { + "type": "object", + "properties": { + "digest": { + "description": "Prefix \"sha@\" is expected in this place", + "type": "string" + }, + "registry": { + "type": "string" + }, + "repository": { + "type": "string" + }, + "tag": { + "description": "If empty, use appVersion. Another possible values are: latest, X.X.X, X.X.X-debian, X.X.X-alpine (where X.X.X is version of DD). For dev builds (only for testing purposes): nightly-dev, nightly-dev-debian, nightly-dev-alpine. To see all, check https://hub.docker.com/r/defectdojo/defectdojo-django/tags.", + "type": "string" + } + } + } + } + }, + "nginx": { + "type": "object", + "properties": { + "image": { + "type": "object", + "properties": { + "digest": { + "description": "Prefix \"sha@\" is expected in this place", + "type": "string" + }, + "registry": { + "type": "string" + }, + "repository": { + "type": "string" + }, + "tag": { + "description": "If empty, use appVersion. Another possible values are: latest, X.X.X, X.X.X-alpine (where X.X.X is version of DD). For dev builds (only for testing purposes): nightly-dev, nightly-dev-alpine. To see all, check https://hub.docker.com/r/defectdojo/defectdojo-nginx/tags.", + "type": "string" + } + } + } + } + } + } + }, "initializer": { "type": "object", "properties": { @@ -628,19 +920,45 @@ "automountServiceAccountToken": { "type": "boolean" }, + "containerSecurityContext": { + "description": "Container security context for the initializer Job container", + "type": "object" + }, "extraEnv": { + "description": "Additional environment variables injected to the initializer job pods.", "type": "array" }, "extraVolumeMounts": { + "description": "Array of additional volume mount points for the initializer job (init)containers.", "type": "array" }, "extraVolumes": { + "description": "A list of extra volumes to attach to the initializer job pods.", "type": "array" }, + "image": { + "description": "If empty, uses values from images.django.image", + "type": "object", + "properties": { + "digest": { + "type": "string" + }, + "registry": { + "type": "string" + }, + "repository": { + "type": "string" + }, + "tag": { + "type": "string" + } + } + }, "jobAnnotations": { "type": "object" }, "keepSeconds": { + "description": "A positive integer will keep this Job and Pod deployed for the specified number of seconds, after which they will be removed. For all other values, the Job and Pod will remain deployed.", "type": "integer" }, "labels": { @@ -649,6 +967,10 @@ "nodeSelector": { "type": "object" }, + "podSecurityContext": { + "description": "Pod security context for the initializer Job", + "type": "object" + }, "resources": { "type": "object", "properties": { @@ -680,6 +1002,7 @@ "type": "boolean" }, "staticName": { + "description": "staticName defines whether name of the job will be the same (e.g., \"defectdojo-initializer\") or different every time - generated based on current time (e.g., \"defectdojo-initializer-2024-11-11-18-57\") This might be handy for ArgoCD deployments", "type": "boolean" }, "tolerations": { @@ -688,6 +1011,7 @@ } }, "localsettingspy": { + "description": "To add code snippet which would extend setting functionality, you might add it here It will be stored as ConfigMap and mounted `dojo/settings/local_settings.py`. For more see: https://documentation.defectdojo.com/getting_started/configuration/ For example: ``` localsettingspy: | INSTALLED_APPS += ( 'debug_toolbar', ) MIDDLEWARE = [ 'debug_toolbar.middleware.DebugToolbarMiddleware', ] + MIDDLEWARE ```", "type": "string" }, "monitoring": { @@ -699,49 +1023,87 @@ "prometheus": { "type": "object", "properties": { + "containerSecurityContext": { + "description": "Optional: container security context for nginx prometheus exporter", + "type": "object" + }, "enabled": { + "description": "Add the nginx prometheus exporter sidecar", "type": "boolean" }, + "extraEnv": { + "description": "Optional: additional environment variables injected to the nginx prometheus exporter container", + "type": "array" + }, + "extraVolumeMounts": { + "description": "Array of additional volume mount points for the nginx prometheus exporter", + "type": "array" + }, "image": { - "type": "string" + "type": "object", + "properties": { + "digest": { + "type": "string" + }, + "registry": { + "type": "string" + }, + "repository": { + "type": "string" + }, + "tag": { + "type": "string" + } + } }, "imagePullPolicy": { "type": "string" + }, + "resources": { + "description": "Optional: add resource requests/limits for the nginx prometheus exporter container", + "type": "object" } } } } }, "networkPolicy": { + "description": "Enables application network policy For more info follow https://kubernetes.io/docs/concepts/services-networking/network-policies/", "type": "object", "properties": { "annotations": { "type": "object" }, "egress": { + "description": " ``` egress: - to: - ipBlock: cidr: 10.0.0.0/24 ports: - protocol: TCP port: 443 ```", "type": "array" }, "enabled": { "type": "boolean" }, "ingress": { + "description": "For more detailed configuration with ports and peers. It will ignore ingressExtend ``` ingress: - from: - podSelector: matchLabels: app.kubernetes.io/instance: defectdojo - podSelector: matchLabels: app.kubernetes.io/instance: defectdojo-prometheus ports: - protocol: TCP port: 8443 ```", "type": "array" }, "ingressExtend": { + "description": "if additional labels need to be allowed (e.g. prometheus scraper) ``` ingressExtend: - podSelector: matchLabels: app.kubernetes.io/instance: defectdojo-prometheus ```", "type": "array" } } }, "podLabels": { + "description": "Additional labels to add to the pods: ``` podLabels: key: value ```", "type": "object" }, "postgresServer": { + "description": "To use an external PostgreSQL instance (like CloudSQL), set `postgresql.enabled` to false, set items in `postgresql.auth` part for authentication, and set the address here:", "type": [ "string", "null" ] }, "postgresql": { + "description": "For more advance options check the bitnami chart documentation: https://github.com/bitnami/charts/tree/main/bitnami/postgresql", "type": "object", "properties": { "architecture": { @@ -779,6 +1141,7 @@ } }, "enabled": { + "description": "To use an external instance, switch enabled to `false` and set the address in `postgresServer` below", "type": "boolean" }, "primary": { @@ -791,9 +1154,11 @@ "type": "object", "properties": { "enabled": { + "description": "Default is true for K8s. Enabled needs to false for OpenShift restricted SCC and true for anyuid SCC", "type": "boolean" }, "runAsUser": { + "description": "runAsUser specification below is not applied if enabled=false. enabled=false is the required setting for OpenShift \"restricted SCC\" to work successfully.", "type": "integer" } } @@ -816,9 +1181,11 @@ "type": "object", "properties": { "enabled": { + "description": "Default is true for K8s. Enabled needs to false for OpenShift restricted SCC and true for anyuid SCC", "type": "boolean" }, "fsGroup": { + "description": "fsGroup specification below is not applied if enabled=false. enabled=false is the required setting for OpenShift \"restricted SCC\" to work successfully.", "type": "integer" } } @@ -855,6 +1222,7 @@ "type": "object", "properties": { "containerSecurityContext": { + "description": "if using restricted SCC set runAsUser: \"auto\" and if running under anyuid SCC - runAsUser needs to match the line above", "type": "object", "properties": { "runAsUser": { @@ -870,6 +1238,7 @@ } }, "redis": { + "description": "For more advance options check the bitnami chart documentation: https://github.com/bitnami/charts/tree/main/bitnami/redis", "type": "object", "properties": { "architecture": { @@ -890,6 +1259,7 @@ } }, "enabled": { + "description": "To use an external instance, switch enabled to `false`` and set the address in `redisServer` below", "type": "boolean" }, "sentinel": { @@ -904,6 +1274,7 @@ "type": "object", "properties": { "enabled": { + "description": "If TLS is enabled, the Redis broker will use the redis:// and optionally mount the certificates from an existing secret.", "type": "boolean" } } @@ -911,47 +1282,49 @@ } }, "redisParams": { + "description": "Parameters attached to the redis connection string, defaults to \"ssl_cert_reqs=optional\" if `redis.tls.enabled`", "type": "string" }, "redisServer": { + "description": "To use an external Redis instance, set `redis.enabled` to false and set the address here:", "type": [ "string", "null" ] }, - "repositoryPrefix": { - "type": "string" - }, "revisionHistoryLimit": { + "description": "Allow overriding of revisionHistoryLimit across all deployments.", "type": "integer" }, "secrets": { "type": "object", "properties": { "annotations": { + "description": "Add annotations for secret resources", "type": "object" } } }, "securityContext": { + "description": "Security context settings", "type": "object", "properties": { - "djangoSecurityContext": { + "containerSecurityContext": { "type": "object", "properties": { - "runAsUser": { - "type": "integer" + "runAsNonRoot": { + "type": "boolean" } } }, "enabled": { "type": "boolean" }, - "nginxSecurityContext": { + "podSecurityContext": { "type": "object", "properties": { - "runAsUser": { - "type": "integer" + "runAsNonRoot": { + "type": "boolean" } } } @@ -961,17 +1334,25 @@ "type": "object", "properties": { "annotations": { + "description": "Optional additional annotations to add to the DefectDojo's Service Account.", "type": "object" }, "create": { + "description": "Specifies whether a service account should be created.", "type": "boolean" }, "labels": { + "description": "Optional additional labels to add to the DefectDojo's Service Account.", "type": "object" + }, + "name": { + "description": "The name of the service account to use. If not set and create is true, a name is generated using the fullname template", + "type": "string" } } }, - "tag": { + "siteUrl": { + "description": "The full URL to your defectdojo instance, depends on the domain where DD is deployed, it also affects links in Jira. Use syntax: `siteUrl: 'https://\u003cyourdomain\u003e'`", "type": "string" }, "tests": { @@ -983,6 +1364,24 @@ "automountServiceAccountToken": { "type": "boolean" }, + "image": { + "description": "If empty, uses values from images.django.image", + "type": "object", + "properties": { + "digest": { + "type": "string" + }, + "registry": { + "type": "string" + }, + "repository": { + "type": "string" + }, + "tag": { + "type": "string" + } + } + }, "resources": { "type": "object", "properties": { @@ -1015,6 +1414,7 @@ } }, "trackConfig": { + "description": "Track configuration (trackConfig): will automatically respin application pods in case of config changes detection can be: 1. disabled (default) 2. enabled, enables tracking configuration changes based on SHA256", "type": "string" } } diff --git a/helm/defectdojo/values.yaml b/helm/defectdojo/values.yaml index dd47f65eea4..cd850ace3c1 100644 --- a/helm/defectdojo/values.yaml +++ b/helm/defectdojo/values.yaml @@ -1,35 +1,70 @@ --- -# Global settings -# create defectdojo specific secret +# -- Security context settings +securityContext: + enabled: true + containerSecurityContext: + runAsNonRoot: true + podSecurityContext: + runAsNonRoot: true + +# -- create defectdojo specific secret createSecret: false -# create redis secret in defectdojo chart, outside of redis chart +# -- create redis secret in defectdojo chart, outside of redis chart createRedisSecret: false -# create postgresql secret in defectdojo chart, outside of postgresql chart +# -- create postgresql secret in defectdojo chart, outside of postgresql chart createPostgresqlSecret: false -# Track configuration (trackConfig): will automatically respin application pods in case of config changes detection +# -- Track configuration (trackConfig): will automatically respin application pods in case of config changes detection # can be: -# - disabled, default -# - enabled, enables tracking configuration changes based on SHA256 +# 1. disabled (default) +# 2. enabled, enables tracking configuration changes based on SHA256 trackConfig: disabled -# Avoid using pre-install hooks, which might cause issues with ArgoCD +# -- Avoid using pre-install hooks, which might cause issues with ArgoCD disableHooks: false +# -- Annotations globally added to all resources +extraAnnotations: {} +# -- Labels globally added to all resources extraLabels: {} -# Add extra labels for k8s -# Enables application network policy +images: + django: + image: + registry: "" + repository: defectdojo/defectdojo-django + # -- If empty, use appVersion. + # Another possible values are: latest, X.X.X, X.X.X-debian, X.X.X-alpine (where X.X.X is version of DD). + # For dev builds (only for testing purposes): nightly-dev, nightly-dev-debian, nightly-dev-alpine. + # To see all, check https://hub.docker.com/r/defectdojo/defectdojo-django/tags. + tag: "" + # -- Prefix "sha@" is expected in this place + digest: "" + nginx: + image: + registry: "" + repository: defectdojo/defectdojo-nginx + # -- If empty, use appVersion. + # Another possible values are: latest, X.X.X, X.X.X-alpine (where X.X.X is version of DD). + # For dev builds (only for testing purposes): nightly-dev, nightly-dev-alpine. + # To see all, check https://hub.docker.com/r/defectdojo/defectdojo-nginx/tags. + tag: "" + # -- Prefix "sha@" is expected in this place + digest: "" + +# -- Enables application network policy # For more info follow https://kubernetes.io/docs/concepts/services-networking/network-policies/ networkPolicy: enabled: false - # if additional labels need to be allowed (e.g. prometheus scraper) - ingressExtend: [] + # -- if additional labels need to be allowed (e.g. prometheus scraper) + # ``` # ingressExtend: # - podSelector: # matchLabels: # app.kubernetes.io/instance: defectdojo-prometheus - # For more detailed configuration with ports and peers. It will ignore ingressExtend - ingress: [] + # ``` + ingressExtend: [] + # -- For more detailed configuration with ports and peers. It will ignore ingressExtend + # ``` # ingress: # - from: # - podSelector: @@ -41,7 +76,10 @@ networkPolicy: # ports: # - protocol: TCP # port: 8443 - egress: [] + # ``` + ingress: [] + # -- + # ``` # egress: # - to: # - ipBlock: @@ -49,61 +87,68 @@ networkPolicy: # ports: # - protocol: TCP # port: 443 + # ``` + egress: [] annotations: {} -# Primary hostname of instance +# -- Primary hostname of instance host: defectdojo.default.minikube.local -# The full URL to your defectdojo instance, depends on the domain where DD is deployed, it also affects links in Jira -# siteUrl: 'https://' +# -- The full URL to your defectdojo instance, depends on the domain where DD is deployed, it also affects links in Jira. +# Use syntax: `siteUrl: 'https://'` +siteUrl: "" -# optional list of alternative hostnames to use that gets appended to +# -- optional list of alternative hostnames to use that gets appended to # DD_ALLOWED_HOSTS. This is necessary when your local hostname does not match # the global hostname. -# alternativeHosts: +alternativeHosts: [] # - defectdojo.example.com imagePullPolicy: Always -# Where to pull the defectDojo images from. Defaults to "defectdojo/*" repositories on hub.docker.com -repositoryPrefix: defectdojo -# When using a private registry, name of the secret that holds the registry secret (eg deploy token from gitlab-ci project) -# Create secrets as: kubectl create secret docker-registry defectdojoregistrykey --docker-username=registry_username --docker-password=registry_password --docker-server='https://index.docker.io/v1/' # @schema type:[string, null] +# -- When using a private registry, name of the secret that holds the registry secret (eg deploy token from gitlab-ci project) +# Create secrets as: kubectl create secret docker-registry defectdojoregistrykey --docker-username=registry_username --docker-password=registry_password --docker-server='https://index.docker.io/v1/' imagePullSecrets: ~ -tag: latest -# Additional labels to add to the pods: +# -- Additional labels to add to the pods: +# ``` # podLabels: # key: value +# ``` podLabels: {} -# Allow overriding of revisionHistoryLimit across all deployments. +# -- Allow overriding of revisionHistoryLimit across all deployments. revisionHistoryLimit: 10 -securityContext: - enabled: true - djangoSecurityContext: - # django dockerfile sets USER=1001 - runAsUser: 1001 - nginxSecurityContext: - # nginx dockerfile sets USER=1001 - runAsUser: 1001 - serviceAccount: - # Specifies whether a service account should be created. + # -- Specifies whether a service account should be created. create: true - # The name of the service account to use. + # -- The name of the service account to use. # If not set and create is true, a name is generated using the fullname template - # name: "" + name: "" - # Optional additional annotations to add to the DefectDojo's Service Account. + # -- Optional additional annotations to add to the DefectDojo's Service Account. annotations: {} - # Optional additional labels to add to the DefectDojo's Service Account. + # -- Optional additional labels to add to the DefectDojo's Service Account. labels: {} dbMigrationChecker: + # -- If empty, uses values from images.django.image + image: + registry: "" + repository: "" + tag: "" + digest: "" + # -- Enable/disable the DB migration checker. enabled: true + # -- Container security context for the DB migration checker. + containerSecurityContext: {} + # -- Additional environment variables for DB migration checker. + extraEnv: [] + # -- Array of additional volume mount points for DB migration checker. + extraVolumeMounts: [] + # -- Resource requests/limits for the DB migration checker. resources: requests: cpu: 100m @@ -114,6 +159,12 @@ dbMigrationChecker: tests: unitTests: + # -- If empty, uses values from images.django.image + image: + registry: "" + repository: "" + tag: "" + digest: "" automountServiceAccountToken: false resources: requests: @@ -135,53 +186,77 @@ admin: monitoring: enabled: false - # Add the nginx prometheus exporter sidecar prometheus: + # -- Add the nginx prometheus exporter sidecar enabled: false - image: nginx/nginx-prometheus-exporter:1.4.2 + image: + registry: "" + repository: nginx/nginx-prometheus-exporter + tag: "1.4.2" + digest: "" imagePullPolicy: IfNotPresent - -annotations: {} + # -- Optional: container security context for nginx prometheus exporter + containerSecurityContext: {} + # -- Optional: additional environment variables injected to the nginx prometheus exporter container + extraEnv: [] + # -- Array of additional volume mount points for the nginx prometheus exporter + extraVolumeMounts: [] + # -- Optional: add resource requests/limits for the nginx prometheus exporter container + resources: {} secrets: - # Add annotations for secret resources + # -- Add annotations for secret resources annotations: {} # Components celery: broker: redis logLevel: INFO - # Common annotations to worker and beat deployments and pods. + # -- Common annotations to worker and beat deployments and pods. annotations: {} beat: + # -- If empty, uses values from images.django.image + image: + registry: "" + repository: "" + tag: "" + digest: "" automountServiceAccountToken: false - # Annotations for the Celery beat deployment. + # -- Annotations for the Celery beat deployment. annotations: {} affinity: {} - # Additional environment variables injected to Celery beat containers. + # -- Container security context for the Celery beat containers. + containerSecurityContext: {} + # -- Additional environment variables injected to Celery beat containers. extraEnv: [] - # A list of additional initContainers to run before celery beat containers. + # -- A list of additional initContainers to run before celery beat containers. extraInitContainers: [] - # Array of additional volume mount points for the celery beat containers. + # -- Array of additional volume mount points for the celery beat containers. extraVolumeMounts: [] - # A list of extra volumes to mount + # -- A list of extra volumes to mount # @type: array extraVolumes: [] - # Enable liveness probe for Celery beat container. + # -- Enable liveness probe for Celery beat container. + # ``` + # exec: + # command: + # - bash + # - -c + # - celery -A dojo inspect ping -t 5 + # initialDelaySeconds: 30 + # periodSeconds: 60 + # timeoutSeconds: 10 + # ``` livenessProbe: {} - # exec: - # command: - # - bash - # - -c - # - celery -A dojo inspect ping -t 5 - # initialDelaySeconds: 30 - # periodSeconds: 60 - # timeoutSeconds: 10 nodeSelector: {} - # Annotations for the Celery beat pods. + # -- Annotations for the Celery beat pods. podAnnotations: {} - # Enable readiness probe for Celery beat container. + # -- Pod security context for the Celery beat pods. + podSecurityContext: {} + # -- Enable readiness probe for Celery beat container. readinessProbe: {} + # @schema maximum:1 + # -- Multiple replicas are not allowed (Beat is intended to be a singleton) because scaling to >1 will double-run schedules replicas: 1 resources: requests: @@ -190,37 +265,49 @@ celery: limits: cpu: 2000m memory: 256Mi - # Enable startup probe for Celery beat container. + # -- Enable startup probe for Celery beat container. startupProbe: {} tolerations: [] worker: + # -- If empty, uses values from images.django.image + image: + registry: "" + repository: "" + tag: "" + digest: "" automountServiceAccountToken: false - # Annotations for the Celery worker deployment. + # -- Annotations for the Celery worker deployment. annotations: {} affinity: {} - # Additional environment variables injected to Celery worker containers. + # -- Container security context for the Celery worker containers. + containerSecurityContext: {} + # -- Additional environment variables injected to Celery worker containers. extraEnv: [] - # A list of additional initContainers to run before celery worker containers. + # -- A list of additional initContainers to run before celery worker containers. extraInitContainers: [] - # Array of additional volume mount points for the celery worker containers. + # -- Array of additional volume mount points for the celery worker containers. extraVolumeMounts: [] - # A list of extra volumes to mount. + # -- A list of extra volumes to mount. # @type: array extraVolumes: [] - # Enable liveness probe for Celery worker containers. + # -- Enable liveness probe for Celery worker containers. + # ``` + # exec: + # command: + # - bash + # - -c + # - celery -A dojo inspect ping -t 5 + # initialDelaySeconds: 30 + # periodSeconds: 60 + # timeoutSeconds: 10 + # ``` livenessProbe: {} - # exec: - # command: - # - bash - # - -c - # - celery -A dojo inspect ping -t 5 - # initialDelaySeconds: 30 - # periodSeconds: 60 - # timeoutSeconds: 10 nodeSelector: {} - # Annotations for the Celery beat pods. + # -- Annotations for the Celery beat pods. podAnnotations: {} - # Enable readiness probe for Celery worker container. + # -- Pod security context for the Celery worker pods. + podSecurityContext: {} + # -- Enable readiness probe for Celery worker container. readinessProbe: {} replicas: 1 resources: @@ -230,18 +317,17 @@ celery: limits: cpu: 2000m memory: 512Mi - # Enable startup probe for Celery worker container. + # -- Enable startup probe for Celery worker container. startupProbe: {} tolerations: [] appSettings: - poolType: solo - # Performance improved celery worker config when needing to deal with a lot of findings (e.g deduplication ops) - # Comment out the "solo" line, and uncomment the following lines. + # -- Performance improved celery worker config when needing to deal with a lot of findings (e.g deduplication ops) # poolType: prefork # autoscaleMin: 2 # autoscaleMax: 8 # concurrency: 8 # prefetchMultiplier: 128 + poolType: solo django: automountServiceAccountToken: false @@ -250,19 +336,32 @@ django: annotations: {} type: "" affinity: {} + # -- Pod security context for the Django pods. + podSecurityContext: + fsGroup: 1001 ingress: enabled: true ingressClassName: "" activateTLS: true secretName: defectdojo-tls + # -- Restricts the type of ingress controller that can interact with our chart (nginx, traefik, ...) + # `kubernetes.io/ingress.class: nginx` + # Depending on the size and complexity of your scans, you might want to increase the default ingress timeouts if you see repeated 504 Gateway Timeouts + # `nginx.ingress.kubernetes.io/proxy-read-timeout: "1800"` + # `nginx.ingress.kubernetes.io/proxy-send-timeout: "1800"` annotations: {} - # Restricts the type of ingress controller that can interact with our chart (nginx, traefik, ...) - # kubernetes.io/ingress.class: nginx - # Depending on the size and complexity of your scans, you might want to increase the default ingress timeouts if you see repeated 504 Gateway Timeouts - # nginx.ingress.kubernetes.io/proxy-read-timeout: "1800" - # nginx.ingress.kubernetes.io/proxy-send-timeout: "1800" nginx: - # To extra environment variables to the nginx container, you can use extraEnv. For example: + # -- If empty, uses values from images.nginx.image + image: + registry: "" + repository: "" + tag: "" + digest: "" + # -- Container security context for the nginx containers. + containerSecurityContext: + # -- nginx dockerfile sets USER=1001 + runAsUser: 1001 + # -- To extra environment variables to the nginx container, you can use extraEnv. For example: # extraEnv: # - name: FOO # valueFrom: @@ -270,7 +369,7 @@ django: # name: foo # key: bar extraEnv: [] - # Array of additional volume mount points for nginx containers. + # -- Array of additional volume mount points for nginx containers. extraVolumeMounts: [] tls: enabled: false @@ -287,7 +386,16 @@ django: strategy: {} tolerations: [] uwsgi: - # To add (or override) extra variables which need to be pulled from another configMap, you can + # -- If empty, uses values from images.django.image + image: + registry: "" + repository: "" + tag: "" + digest: "" + containerSecurityContext: + # -- django dockerfile sets USER=1001 + runAsUser: 1001 + # -- To add (or override) extra variables which need to be pulled from another configMap, you can # use extraEnv. For example: # extraEnv: # - name: DD_DATABASE_HOST @@ -296,10 +404,10 @@ django: # name: my-other-postgres-configmap # key: cluster_endpoint extraEnv: [] - # Array of additional volume mount points for uwsgi containers. + # -- Array of additional volume mount points for uwsgi containers. extraVolumeMounts: [] livenessProbe: - # Enable liveness checks on uwsgi container. + # -- Enable liveness checks on uwsgi container. enabled: true failureThreshold: 6 initialDelaySeconds: 0 @@ -307,7 +415,7 @@ django: successThreshold: 1 timeoutSeconds: 5 readinessProbe: - # Enable readiness checks on uwsgi container. + # -- Enable readiness checks on uwsgi container. enabled: true failureThreshold: 6 initialDelaySeconds: 0 @@ -315,7 +423,7 @@ django: successThreshold: 1 timeoutSeconds: 5 startupProbe: - # Enable startup checks on uwsgi container. + # -- Enable startup checks on uwsgi container. enabled: true failureThreshold: 30 initialDelaySeconds: 0 @@ -332,10 +440,13 @@ django: appSettings: processes: 4 threads: 4 - maxFd: 0 # 102400 # Use this value to set the maximum number of file descriptors. If set to 0 will be detected by uwsgi - enableDebug: false # this also requires DD_DEBUG to be set to True + # -- Use this value to set the maximum number of file descriptors. If set to 0 will be detected by uwsgi + # e.g. 102400 + maxFd: 0 + # -- this also requires DD_DEBUG to be set to True + enableDebug: false certificates: - # includes additional CA certificate as volume, it refrences REQUESTS_CA_BUNDLE env varible + # -- includes additional CA certificate as volume, it refrences REQUESTS_CA_BUNDLE env varible # to create configMap `kubectl create cm defectdojo-ca-certs --from-file=ca.crt` # NOTE: it reflects REQUESTS_CA_BUNDLE for celery workers, beats as well enabled: false @@ -343,27 +454,32 @@ django: certMountPath: /certs/ certFileName: ca.crt - # A list of additional initContainers to run before the uwsgi and nginx containers. + # -- Additional environment variables injected to all Django containers and initContainers. + extraEnv: [] + # -- A list of additional initContainers to run before the uwsgi and nginx containers. extraInitContainers: [] - # A list of extra volumes to mount. + # -- Array of additional volume mount points common to all containers and initContainers. + extraVolumeMounts: [] + # -- A list of extra volumes to mount. extraVolumes: [] - # This feature needs more preparation before can be enabled, please visit KUBERNETES.md#media-persistent-volume + # -- This feature needs more preparation before can be enabled, please visit KUBERNETES.md#media-persistent-volume mediaPersistentVolume: enabled: true fsGroup: 1001 - # any name + # -- any name name: media - # could be emptyDir (not for production) or pvc + # -- could be emptyDir (not for production) or pvc type: emptyDir - # in case if pvc specified, should point to the already existing pvc + # -- in case if pvc specified, should point to the already existing pvc persistentVolumeClaim: - # set to true to create a new pvc and if django.mediaPersistentVolume.type is set to pvc + # -- set to true to create a new pvc and if django.mediaPersistentVolume.type is set to pvc create: false name: "" size: 5Gi + # -- check KUBERNETES.md doc first for option to choose accessModes: - - ReadWriteMany # check KUBERNETES.md doc first for option to choose + - ReadWriteMany storageClassName: "" initializer: @@ -372,10 +488,17 @@ initializer: jobAnnotations: {} annotations: {} labels: {} - keepSeconds: 60 # A positive integer will keep this Job and Pod deployed for the specified number of seconds, after which they will be removed. For all other values, the Job and Pod will remain deployed. + # -- A positive integer will keep this Job and Pod deployed for the specified number of seconds, after which they will be removed. For all other values, the Job and Pod will remain deployed. + keepSeconds: 60 affinity: {} nodeSelector: {} tolerations: [] + # -- If empty, uses values from images.django.image + image: + registry: "" + repository: "" + tag: "" + digest: "" resources: requests: cpu: 100m @@ -383,21 +506,25 @@ initializer: limits: cpu: 2000m memory: 512Mi - # Additional environment variables injected to the initializer job pods. + # -- Container security context for the initializer Job container + containerSecurityContext: {} + # -- Additional environment variables injected to the initializer job pods. extraEnv: [] - # Array of additional volume mount points for the initializer job (init)containers. + # -- Array of additional volume mount points for the initializer job (init)containers. extraVolumeMounts: [] - # A list of extra volumes to attach to the initializer job pods. + # -- A list of extra volumes to attach to the initializer job pods. extraVolumes: [] + # -- Pod security context for the initializer Job + podSecurityContext: {} - # staticName defines whether name of the job will be the same (e.g., "defectdojo-initializer") + # -- staticName defines whether name of the job will be the same (e.g., "defectdojo-initializer") # or different every time - generated based on current time (e.g., "defectdojo-initializer-2024-11-11-18-57") # This might be handy for ArgoCD deployments staticName: false -# For more advance options check the bitnami chart documentation: https://github.com/bitnami/charts/tree/main/bitnami/postgresql +# -- For more advance options check the bitnami chart documentation: https://github.com/bitnami/charts/tree/main/bitnami/postgresql postgresql: - # To use an external instance, switch enabled to `false` and set the address in `postgresServer` below + # -- To use an external instance, switch enabled to `false` and set the address in `postgresServer` below enabled: true auth: username: defectdojo @@ -417,59 +544,67 @@ postgresql: ports: postgresql: 5432 podSecurityContext: - # Default is true for K8s. Enabled needs to false for OpenShift restricted SCC and true for anyuid SCC + # -- Default is true for K8s. Enabled needs to false for OpenShift restricted SCC and true for anyuid SCC enabled: true - # fsGroup specification below is not applied if enabled=false. enabled=false is the required setting for OpenShift "restricted SCC" to work successfully. + # -- fsGroup specification below is not applied if enabled=false. enabled=false is the required setting for OpenShift "restricted SCC" to work successfully. fsGroup: 1001 containerSecurityContext: - # Default is true for K8s. Enabled needs to false for OpenShift restricted SCC and true for anyuid SCC + # -- Default is true for K8s. Enabled needs to false for OpenShift restricted SCC and true for anyuid SCC enabled: true - # runAsUser specification below is not applied if enabled=false. enabled=false is the required setting for OpenShift "restricted SCC" to work successfully. + # -- runAsUser specification below is not applied if enabled=false. enabled=false is the required setting for OpenShift "restricted SCC" to work successfully. runAsUser: 1001 affinity: {} nodeSelector: {} volumePermissions: enabled: false - # if using restricted SCC set runAsUser: "auto" and if running under anyuid SCC - runAsUser needs to match the line above + # -- if using restricted SCC set runAsUser: "auto" and if running under anyuid SCC - runAsUser needs to match the line above containerSecurityContext: runAsUser: 1001 shmVolume: chmod: enabled: false -# Google CloudSQL support in GKE via gce-proxy +# -- Google CloudSQL support in GKE via gce-proxy cloudsql: - # To use CloudSQL in GKE set 'enable: true' + # -- To use CloudSQL in GKE set 'enable: true' enabled: false - # By default, the proxy has verbose logging. Set this to false to make it less verbose + # -- By default, the proxy has verbose logging. Set this to false to make it less verbose verbose: true + # -- set repo and image tag of gce-proxy image: - # set repo and image tag of gce-proxy repository: gcr.io/cloudsql-docker/gce-proxy tag: 1.37.9 pullPolicy: IfNotPresent - # set CloudSQL instance: 'project:zone:instancename' + # -- set CloudSQL instance: 'project:zone:instancename' instance: "" - # use IAM database authentication + # -- use IAM database authentication enable_iam_login: false - # whether to use a private IP to connect to the database + # -- whether to use a private IP to connect to the database use_private_ip: false + # -- Optional: security context for the CloudSQL proxy container. + containerSecurityContext: {} + # -- Additional environment variables for the CloudSQL proxy container. + extraEnv: [] + # -- Array of additional volume mount points for the CloudSQL proxy container + extraVolumeMounts: [] + # -- Optional: add resource requests/limits for the CloudSQL proxy container. + resources: {} -# Settings to make running the chart on GKE simpler +# -- Settings to make running the chart on GKE simpler gke: - # Set to true to configure the Ingress to use the GKE provided ingress controller + # -- Set to true to configure the Ingress to use the GKE provided ingress controller useGKEIngress: false - # Set to true to have GKE automatically provision a TLS certificate for the host specified + # -- Set to true to have GKE automatically provision a TLS certificate for the host specified # Requires useGKEIngress to be set to true # When using this option, be sure to set django.ingress.activateTLS to false useManagedCertificate: false - # Workload Identity allows the K8s service account to assume the IAM access of a GCP service account to interact with other GCP services + # -- Workload Identity allows the K8s service account to assume the IAM access of a GCP service account to interact with other GCP services # Only works with serviceAccount.create = true workloadIdentityEmail: "" -# For more advance options check the bitnami chart documentation: https://github.com/bitnami/charts/tree/main/bitnami/redis +# -- For more advance options check the bitnami chart documentation: https://github.com/bitnami/charts/tree/main/bitnami/redis redis: - # To use an external instance, switch enabled to `false`` and set the address in `redisServer` below + # -- To use an external instance, switch enabled to `false`` and set the address in `redisServer` below enabled: true auth: existingSecret: defectdojo-redis-specific @@ -484,41 +619,47 @@ redis: # Sentinel configuration parameters sentinel: enabled: false - # If TLS is enabled, the Redis broker will use the redis:// and optionally mount the certificates - # from an existing secret. tls: + # -- If TLS is enabled, the Redis broker will use the redis:// and optionally mount the certificates + # from an existing secret. enabled: false # existingSecret: redis-tls # certFilename: tls.crt # certKeyFilename: tls.key # certCAFilename: ca.crt -# To add extra variables not predefined by helm config it is possible to define in extraConfigs block, e.g. below: +# -- To add extra variables not predefined by helm config it is possible to define in extraConfigs block, e.g. below: # NOTE Do not store any kind of sensitive information inside of it +# ``` +# DD_SOCIAL_AUTH_AUTH0_OAUTH2_ENABLED: 'true' +# DD_SOCIAL_AUTH_AUTH0_KEY: 'dev' +# DD_SOCIAL_AUTH_AUTH0_DOMAIN: 'xxxxx' +# ``` extraConfigs: {} -# DD_SOCIAL_AUTH_AUTH0_OAUTH2_ENABLED: 'true' -# DD_SOCIAL_AUTH_AUTH0_KEY: 'dev' -# DD_SOCIAL_AUTH_AUTH0_DOMAIN: 'xxxxx' -# Extra secrets can be created inside of extraSecrets block: +# -- Extra secrets can be created inside of extraSecrets block: # NOTE This is just an exmaple, do not store sensitive data in plain text form, better inject it during the deployment/upgrade by --set extraSecrets.secret=someSecret +# ``` +# DD_SOCIAL_AUTH_AUTH0_SECRET: 'xxx' +# ``` extraSecrets: {} -# DD_SOCIAL_AUTH_AUTH0_SECRET: 'xxx' -# To add (or override) extra variables which need to be pulled from another configMap, you can +# -- To add (or override) extra variables which need to be pulled from another configMap, you can # use extraEnv. For example: -extraEnv: [] +# ``` # - name: DD_DATABASE_HOST # valueFrom: # configMapKeyRef: # name: my-other-postgres-configmap # key: cluster_endpoint +# ``` +extraEnv: [] -# To add code snippet which would extend setting functionality, you might add it here +# -- To add code snippet which would extend setting functionality, you might add it here # It will be stored as ConfigMap and mounted `dojo/settings/local_settings.py`. # For more see: https://documentation.defectdojo.com/getting_started/configuration/ -localsettingspy: "" # For example: +# ``` # localsettingspy: | # INSTALLED_APPS += ( # 'debug_toolbar', @@ -526,16 +667,19 @@ localsettingspy: "" # MIDDLEWARE = [ # 'debug_toolbar.middleware.DebugToolbarMiddleware', # ] + MIDDLEWARE +# ``` +localsettingspy: "" + # # External database support. # -# To use an external Redis instance, set `redis.enabled` to false and set the address here: # @schema type:[string, null] +# -- To use an external Redis instance, set `redis.enabled` to false and set the address here: redisServer: ~ -# Parameters attached to the redis connection string, defaults to "ssl_cert_reqs=optional" if `redis.tls.enabled` +# -- Parameters attached to the redis connection string, defaults to "ssl_cert_reqs=optional" if `redis.tls.enabled` redisParams: "" # -# To use an external PostgreSQL instance (like CloudSQL), set `postgresql.enabled` to false, -# set items in `postgresql.auth` part for authentication, and set the address here: # @schema type:[string, null] +# -- To use an external PostgreSQL instance (like CloudSQL), set `postgresql.enabled` to false, +# set items in `postgresql.auth` part for authentication, and set the address here: postgresServer: ~ diff --git a/readme-docs/CONTRIBUTING.md b/readme-docs/CONTRIBUTING.md index e440f498c07..27f8093355e 100644 --- a/readme-docs/CONTRIBUTING.md +++ b/readme-docs/CONTRIBUTING.md @@ -56,7 +56,7 @@ Please use [these test scripts](../tests) to test your changes. These are the sc For changes that require additional settings, you can now use local_settings.py file. See the logging section below for more information. ## Python3 Version -For compatibility reasons, the code in dev branch should be python3.12 compliant. +For compatibility reasons, the code in dev branch should be python3.13 compliant. ## Database migrations When changes are made to the database model, a database migration is needed. This migration can be generated using something like @@ -82,7 +82,7 @@ DefectDojo. 0. Pull requests should be submitted to the `dev` or `bugfix` branch. -0. In dev branch, the code should be python 3.12 compliant. +0. In dev branch, the code should be python 3.13 compliant. [dojo_settings]: /dojo/settings/settings.dist.py "DefectDojo settings file" [pep8]: https://www.python.org/dev/peps/pep-0008/ "PEP8" diff --git a/requirements-dev.txt b/requirements-dev.txt index 25fe9b22226..4e8b5cd1fd5 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -2,7 +2,7 @@ # These are only needed during development and testing # Debug toolbar for development -django-debug-toolbar==6.0.0 +django-debug-toolbar==6.1.0 django-debug-toolbar-request-history==0.1.4 # Testing dependencies diff --git a/requirements-lint.txt b/requirements-lint.txt index 6a0ba23ce92..fcefb6c9a0f 100644 --- a/requirements-lint.txt +++ b/requirements-lint.txt @@ -1 +1 @@ -ruff==0.13.2 \ No newline at end of file +ruff==0.14.3 diff --git a/requirements.txt b/requirements.txt index 5063cff6e11..82bc08c4176 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,16 +1,16 @@ # requirements.txt for DefectDojo using Python 3.x asteval==1.0.6 -bleach==6.2.0 +bleach==6.3.0 bleach[css] celery==5.5.3 defusedxml==0.7.1 django_celery_results==2.6.0 django-auditlog==3.2.1 -django-pghistory==3.7.0 +django-pghistory==3.8.3 django-dbbackup==5.0.0 django-environ==0.12.0 django-filter==25.1 -django-imagekit==5.0.0 +django-imagekit==6.0.0 django-multiselectfield==1.0.1 django-polymorphic==4.1.0 django-crispy-forms==2.4 @@ -19,30 +19,31 @@ django-slack==5.19.0 django-watson==1.6.3 django-prometheus==2.4.1 Django==5.1.13 +django-single-session==0.2.0 djangorestframework==3.16.1 html2text==2025.4.15 -humanize==4.13.0 -jira==3.8.0 +humanize==4.14.0 +jira==3.10.5 PyGithub==2.8.1 lxml==6.0.2 Markdown==3.9 openpyxl==3.1.5 -Pillow==11.3.0 # required by django-imagekit -psycopg[c]==3.2.10 -cryptography==46.0.2 +Pillow==12.0.0 # required by django-imagekit +psycopg[c]==3.2.12 +cryptography==46.0.3 python-dateutil==2.9.0.post0 -redis==6.4.0 +redis==7.0.1 requests==2.32.5 -sqlalchemy==2.0.43 # Required by Celery broker transport +sqlalchemy==2.0.44 # Required by Celery broker transport urllib3==2.5.0 -uWSGI==2.0.30 +uWSGI==2.0.31 vobject==0.9.9 whitenoise==5.2.0 titlecase==2.4.1 -social-auth-app-django==5.4.3 -social-auth-core==4.7.0 +social-auth-app-django==5.6.0 +social-auth-core==4.8.1 gitpython==3.1.45 -python-gitlab==6.4.0 +python-gitlab==7.0.0 cpe==1.3.1 packageurl-python==0.17.5 django-crum==0.7.9 @@ -61,9 +62,9 @@ django-ratelimit==4.1.0 argon2-cffi==25.1.0 blackduck==1.1.3 pycurl==7.45.7 # Required for Celery Broker AWS (SQS) support -boto3==1.40.44 # Required for Celery Broker AWS (SQS) support +boto3==1.40.63 # Required for Celery Broker AWS (SQS) support netaddr==1.3.0 -vulners==2.3.7 +vulners==3.1.1 fontawesomefree==6.6.0 PyYAML==6.0.3 pyopenssl==25.3.0 diff --git a/ruff.toml b/ruff.toml index 598517dd435..670a95b7b99 100644 --- a/ruff.toml +++ b/ruff.toml @@ -1,5 +1,5 @@ -# Always generate Python 3.12-compatible code. -target-version = "py312" +# Always generate Python 3.13-compatible code. +target-version = "py313" # Same as Black. line-length = 120 @@ -36,9 +36,9 @@ select = [ "FAST", "YTT", "ASYNC", - "S1", "S2", "S302", "S303", "S304", "S305", "S306", "S307", "S31", "S321", "S323", "S324", "S401", "S402", "S406", "S407", "S408", "S409", "S41", "S5", "S601", "S602", "S604", "S605", "S606", "S607", "S609", "S61", "S7", + "S1", "S2", "S302", "S303", "S304", "S305", "S306", "S307", "S31", "S32", "S401", "S402", "S406", "S407", "S408", "S409", "S41", "S5", "S601", "S602", "S604", "S605", "S606", "S607", "S609", "S61", "S7", "FBT", - "B00", "B010", "B011", "B012", "B013", "B014", "B015", "B016", "B017", "B018", "B019", "B020", "B021", "B022", "B023", "B025", "B028", "B029", "B03", "B901", "B903", "B905", "B911", + "B00", "B01", "B020", "B021", "B022", "B023", "B025", "B027", "B028", "B029", "B03", "B901", "B903", "B905", "B911", "A", "COM", "C4", @@ -58,7 +58,7 @@ select = [ "PIE", "T20", "PYI", - "PT001", "PT002", "PT003", "PT006", "PT007", "PT008", "PT01", "PT020", "PT021", "PT022", "PT023", "PT024", "PT025", "PT026", "PT028", "PT029", "PT03", + "PT", "Q", "RSE", "RET", @@ -74,7 +74,7 @@ select = [ "C90", "NPY", "PD", - "N803", "N804", "N811", "N812", "N813", "N814", "N817", "N818", "N999", + "N803", "N804", "N805", "N811", "N812", "N813", "N814", "N817", "N818", "N999", "PERF1", "PERF2", "PERF401", "PERF403", "E", "W", @@ -82,7 +82,7 @@ select = [ "D2", "D3", "D402", "D403", "D405", "D406", "D407", "D408", "D409", "D410", "D411", "D412", "D413", "D414", "D416", "F", "PGH", - "PLC0", "PLC1", "PLC24", "PLC28", "PLC3", + "PLC", "PLE", "PLR01", "PLR02", "PLR04", "PLR0915", "PLR1711", "PLR1704", "PLR1714", "PLR1716", "PLR172", "PLR173", "PLR2044", "PLR5", "PLR6104", "PLR6201", "PLW", @@ -101,6 +101,8 @@ ignore = [ "FIX002", # TODOs need some love but we will probably not get of them "D211", # `one-blank-line-before-class` (D203) and `no-blank-line-before-class` (D211) are incompatible. "D212", # `multi-line-summary-first-line` (D212) and `multi-line-summary-second-line` (D213) are incompatible. + "PT009", # We are using a different style of tests (official Django tests), so it does not make sense to try to fix it + "PT027", # Same ^ ] # Allow autofix for all enabled rules (when `--fix`) is provided. diff --git a/run-integration-tests.sh b/run-integration-tests.sh index 953fbbab31f..a07fe15e629 100755 --- a/run-integration-tests.sh +++ b/run-integration-tests.sh @@ -41,7 +41,7 @@ while [[ $# -gt 0 ]]; do esac done -echo "Running docker compose unit tests with profile postgres-redis and test case $TEST_CASE ..." +echo "Running docker compose unit tests and test case $TEST_CASE ..." # Compose V2 integrates compose functions into the Docker platform, # continuing to support most of the previous docker-compose features @@ -50,8 +50,8 @@ echo "Running docker compose unit tests with profile postgres-redis and test cas echo "Building images..." ./docker/setEnv.sh integration_tests docker compose build -echo "Setting up DefectDojo with Postgres and Redis..." -DD_INTEGRATION_TEST_FILENAME="$TEST_CASE" docker compose -d postgres nginx celerybeat celeryworker mailhog uwsgi redis +echo "Setting up DefectDojo" +DD_INTEGRATION_TEST_FILENAME="$TEST_CASE" docker compose -d postgres nginx celerybeat celeryworker mailhog uwsgi valkey echo "Initializing DefectDojo..." DD_INTEGRATION_TEST_FILENAME="$TEST_CASE" docker compose --exit-code-from initializer initializer echo "Running the integration tests..." diff --git a/run-unittest.sh b/run-unittest.sh index d771285c9ee..6073d4ff582 100755 --- a/run-unittest.sh +++ b/run-unittest.sh @@ -17,6 +17,14 @@ usage() { echo "You must specify a test case (arg)!" echo "Any additional arguments will be passed to the test command." echo + echo "Make sure you run this script in dev mode." + echo "You can enter dev mode using the following command:" + echo "./docker/setEnv.sh dev" + echo + echo "Lastly, make sure the application is running by using the following docker commands:" + echo "docker compose build" + echo "docker compose up" + echo echo "Example commands:" echo "./run-unittest.sh --test-case unittests.tools.test_stackhawk_parser.TestStackHawkParser" echo "./run-unittest.sh --test-case unittests.tools.test_stackhawk_parser.TestStackHawkParser -v3 --failfast" diff --git a/unittests/scans/cyclonedx/no-severity.json b/unittests/scans/cyclonedx/no-severity.json new file mode 100644 index 00000000000..ed12833bc5c --- /dev/null +++ b/unittests/scans/cyclonedx/no-severity.json @@ -0,0 +1,35 @@ +{ + "bomFormat": "CycloneDX", + "specVersion": "1.4", + "version": 1, + "metadata": { + "timestamp": "2025-10-28T14:38:10Z" + }, + "vulnerabilities": [ + { + "id": "CVE-2021-44228", + "source": { + "name": "NVD", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2021-44228" + }, + "ratings": [ + { + "source": { + "name": "NVD", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2021-44228" + }, + "score": 10.0, + "method": "CVSSv3", + "vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:C/C:H/I:H/A:H" + } + ], + "created": "2025-09-05T05:05:47Z", + "updated": "2025-03-03T16:51:00Z", + "affects": [ + { + "ref": "gerbwetbqt" + } + ] + } + ] +} diff --git a/unittests/scans/github_sast/github_sast_many_vul.json b/unittests/scans/github_sast/github_sast_many_vul.json new file mode 100644 index 00000000000..ca03ebf4094 --- /dev/null +++ b/unittests/scans/github_sast/github_sast_many_vul.json @@ -0,0 +1,107 @@ +[ + { + "number":35, + "created_at":"2024-01-19T14:11:18Z", + "updated_at":"2024-01-19T14:11:20Z", + "url":"https://api.github.com/repos/OWASP/test-repository/code-scanning/alerts/35", + "html_url":"https://github.com/OWASP/test-repository/security/code-scanning/35", + "state":"open", + "fixed_at":"None", + "dismissed_by":"None", + "dismissed_at":"None", + "dismissed_reason":"None", + "dismissed_comment":"None", + "rule":{ + "id":"py/clear-text-storage-sensitive-data", + "severity":"error", + "description":"Clear-text storage of sensitive information", + "name":"py/clear-text-storage-sensitive-data", + "tags":[ + "external/cwe/cwe-312", + "external/cwe/cwe-315", + "external/cwe/cwe-359", + "security" + ], + "security_severity_level":"high" + }, + "tool":{ + "name":"CodeQL", + "guid":"None", + "version":"2.16.2" + }, + "most_recent_instance":{ + "ref":"refs/OWASP/test-repository", + "analysis_key":"dynamic/github-code-scanning/codeql:analyze", + "environment":"{\"language\":\"python\"}", + "category":"/language:python", + "state":"open", + "commit_sha":"XXX", + "message":{ + "text":"This expression stores sensitive data (secret) as clear text." + }, + "location":{ + "path":"src/file.py", + "start_line":42, + "end_line":42, + "start_column":17, + "end_column":23 + }, + "classifications":[] + }, + "instances_url":"https://api.github.com/repos/OWASP/test-repository/code-scanning/alerts/35/instances" + }, + { + "number":34, + "created_at":"2024-01-19T14:11:18Z", + "updated_at":"2024-01-19T14:11:20Z", + "url":"https://api.github.com/repos/OWASP/test-repository/code-scanning/alerts/34", + "html_url":"https://github.com/OWASP/test-repository/security/code-scanning/34", + "state":"open", + "fixed_at":"None", + "dismissed_by":"None", + "dismissed_at":"None", + "dismissed_reason":"None", + "dismissed_comment":"None", + "rule":{ + "id":"py/path-injection", + "severity":"error", + "description":"Uncontrolled data used in path expression", + "name":"py/path-injection", + "tags":[ + "correctness", + "external/cwe/cwe-022", + "external/cwe/cwe-023", + "external/cwe/cwe-036", + "external/cwe/cwe-073", + "external/cwe/cwe-099", + "security" + ], + "security_severity_level":"high" + }, + "tool":{ + "name":"CodeQL", + "guid":"None", + "version":"2.16.2" + }, + "most_recent_instance":{ + "ref":"refs/OWASP/test-repository", + "analysis_key":"dynamic/github-code-scanning/codeql:analyze", + "environment":"{\"language\":\"python\"}", + "category":"/language:python", + "state":"open", + "commit_sha":"XXX", + "message":{ + "text":"This path depends on a user-provided value." + }, + "location":{ + "path":"src/file2.py", + "start_line":78, + "end_line":78, + "start_column":25, + "end_column":63 + }, + "classifications":[] + }, + "instances_url":"https://api.github.com/repos/OWASP/test-repository/code-scanning/alerts/34/instances" + } + ] \ No newline at end of file diff --git a/unittests/scans/github_sast/github_sast_one_vul.json b/unittests/scans/github_sast/github_sast_one_vul.json new file mode 100644 index 00000000000..cd598f7077e --- /dev/null +++ b/unittests/scans/github_sast/github_sast_one_vul.json @@ -0,0 +1,53 @@ +[ + { + "number":35, + "created_at":"2024-01-19T14:11:18Z", + "updated_at":"2024-01-19T14:11:20Z", + "url":"https://api.github.com/repos/OWASP/test-repository/code-scanning/alerts/35", + "html_url":"https://github.com/OWASP/test-repository/security/code-scanning/35", + "state":"open", + "fixed_at":"None", + "dismissed_by":"None", + "dismissed_at":"None", + "dismissed_reason":"None", + "dismissed_comment":"None", + "rule":{ + "id":"py/clear-text-storage-sensitive-data", + "severity":"error", + "description":"Clear-text storage of sensitive information", + "name":"py/clear-text-storage-sensitive-data", + "tags":[ + "external/cwe/cwe-312", + "external/cwe/cwe-315", + "external/cwe/cwe-359", + "security" + ], + "security_severity_level":"high" + }, + "tool":{ + "name":"CodeQL", + "guid":"None", + "version":"2.16.2" + }, + "most_recent_instance":{ + "ref":"refs/OWASP/test-repository", + "analysis_key":"dynamic/github-code-scanning/codeql:analyze", + "environment":"{\"language\":\"python\"}", + "category":"/language:python", + "state":"open", + "commit_sha":"XXX", + "message":{ + "text":"This expression stores sensitive data (secret) as clear text." + }, + "location":{ + "path":"src/file.py", + "start_line":42, + "end_line":42, + "start_column":17, + "end_column":23 + }, + "classifications":[] + }, + "instances_url":"https://api.github.com/repos/OWASP/test-repository/code-scanning/alerts/35/instances" + } + ] \ No newline at end of file diff --git a/unittests/scans/github_sast/github_sast_zero_vul.json b/unittests/scans/github_sast/github_sast_zero_vul.json new file mode 100644 index 00000000000..0637a088a01 --- /dev/null +++ b/unittests/scans/github_sast/github_sast_zero_vul.json @@ -0,0 +1 @@ +[] \ No newline at end of file diff --git a/unittests/scans/github_vulnerability/github-1-vuln-repo-dependabot-link.json b/unittests/scans/github_vulnerability/github-1-vuln-repo-dependabot-link.json index 4c493c8360a..bdb216d707a 100644 --- a/unittests/scans/github_vulnerability/github-1-vuln-repo-dependabot-link.json +++ b/unittests/scans/github_vulnerability/github-1-vuln-repo-dependabot-link.json @@ -2,6 +2,7 @@ "data": { "repository": { "nameWithOwner": "OWASP/test-repository", + "url": "https://github.com/OWASP/test-repository", "search": { "nodes": [ { @@ -10,6 +11,11 @@ { "id": "aabbccddeeff1122334401", "number": "1", + "dependabotUpdate": { + "pullRequest": { + "permalink": "https://github.com/OWASP/test-repository/pull/1" + } + }, "securityVulnerability": { "severity": "CRITICAL", "package": { diff --git a/unittests/scans/github_vulnerability/github-vuln-version.json b/unittests/scans/github_vulnerability/github-vuln-version.json index e80afe7e583..7b1ac5e0037 100644 --- a/unittests/scans/github_vulnerability/github-vuln-version.json +++ b/unittests/scans/github_vulnerability/github-vuln-version.json @@ -7,6 +7,11 @@ "id": "RVA_kwDOLJyUo88AAAABQUWapw", "createdAt": "2024-01-26T02:42:32Z", "vulnerableManifestPath": "sompath/pom.xml", + "dependabotUpdate": { + "pullRequest": { + "permalink": "https://github.com/OWASP/test-repository/pull/1" + } + }, "securityVulnerability": { "severity": "CRITICAL", "updatedAt": "2022-12-09T22:02:22Z", @@ -21,6 +26,10 @@ "advisory": { "description": "Pivotal Spring Framework before 6.0.0 suffers from a potential remote code execution (RCE) issue if used for Java deserialization of untrusted data. Depending on how the library is implemented within a product, this issue may or not occur, and authentication may be required.\n\nMaintainers recommend investigating alternative components or a potential mitigating control. Version 4.2.6 and 3.2.17 contain [enhanced documentation](https://github.com/spring-projects/spring-framework/commit/5cbe90b2cd91b866a5a9586e460f311860e11cfa) advising users to take precautions against unsafe Java deserialization, version 5.3.0 [deprecate the impacted classes](https://github.com/spring-projects/spring-framework/issues/25379) and version 6.0.0 [removed it entirely](https://github.com/spring-projects/spring-framework/issues/27422).", "summary": "Pivotal Spring Framework contains unsafe Java deserialization methods", + "epss": { + "percentage": 0.00212, + "percentile": 0.44035 + }, "identifiers": [ { "value": "GHSA-4wrc-f8pq-fpqp", diff --git a/unittests/scans/github_vulnerability/issue_9582.json b/unittests/scans/github_vulnerability/issue_9582.json index 7e297d8f1b2..4500ffcd6e7 100644 --- a/unittests/scans/github_vulnerability/issue_9582.json +++ b/unittests/scans/github_vulnerability/issue_9582.json @@ -1,111 +1,111 @@ -[ - { - "number":35, - "created_at":"2024-01-19T14:11:18Z", - "updated_at":"2024-01-19T14:11:20Z", - "url":"https://api.github.com/repos/XX/YY/code-scanning/alerts/35", - "html_url":"https://github.com/XX/YY/security/code-scanning/35", - "state":"open", - "fixed_at":"None", - "dismissed_by":"None", - "dismissed_at":"None", - "dismissed_reason":"None", - "dismissed_comment":"None", - "rule":{ - "id":"py/clear-text-storage-sensitive-data", - "severity":"error", - "description":"Clear-text storage of sensitive information", - "name":"py/clear-text-storage-sensitive-data", - "tags":[ - "external/cwe/cwe-312", - "external/cwe/cwe-315", - "external/cwe/cwe-359", - "security" - ], - "security_severity_level":"high" - }, - "tool":{ - "name":"CodeQL", - "guid":"None", - "version":"2.16.2" - }, - "most_recent_instance":{ - "ref":"refs/XX/YY", - "analysis_key":"dynamic/github-code-scanning/codeql:analyze", - "environment":"{\"language\":\"python\"}", - "category":"/language:python", - "state":"open", - "commit_sha":"XXX", - "message":{ - "text":"This expression stores sensitive data (secret) as clear text." - }, - "location":{ - "path":"Unsafe Deserialization/file.py", - "start_line":42, - "end_line":42, - "start_column":17, - "end_column":23 - }, - "classifications":[ - - ] - }, - "instances_url":"https://api.github.com/repos/XX/YY/code-scanning/alerts/35/instances" - }, - { - "number":34, - "created_at":"2024-01-19T14:11:18Z", - "updated_at":"2024-01-19T14:11:20Z", - "url":"https://api.github.com/repos/XX/YY/code-scanning/alerts/34", - "html_url":"https://github.com/XX/YY/security/code-scanning/34", - "state":"open", - "fixed_at":"None", - "dismissed_by":"None", - "dismissed_at":"None", - "dismissed_reason":"None", - "dismissed_comment":"None", - "rule":{ - "id":"py/path-injection", - "severity":"error", - "description":"Uncontrolled data used in path expression", - "name":"py/path-injection", - "tags":[ - "correctness", - "external/cwe/cwe-022", - "external/cwe/cwe-023", - "external/cwe/cwe-036", - "external/cwe/cwe-073", - "external/cwe/cwe-099", - "security" - ], - "security_severity_level":"high" - }, - "tool":{ - "name":"CodeQL", - "guid":"None", - "version":"2.16.2" - }, - "most_recent_instance":{ - "ref":"refs/XX/YY", - "analysis_key":"dynamic/github-code-scanning/codeql:analyze", - "environment":"{\"language\":\"python\"}", - "category":"/language:python", - "state":"open", - "commit_sha":"XXX", - "message":{ - "text":"This path depends on a user-provided value." - }, - "location":{ - "path":"Path Traversal/file2.py", - "start_line":78, - "end_line":78, - "start_column":25, - "end_column":63 - }, - "classifications":[ - - ] - }, - "instances_url":"https://api.github.com/repos/XX/YY/code-scanning/alerts/34/instances" - } -] \ No newline at end of file +[ + { + "number":35, + "created_at":"2024-01-19T14:11:18Z", + "updated_at":"2024-01-19T14:11:20Z", + "url":"https://api.github.com/repos/XX/YY/code-scanning/alerts/35", + "html_url":"https://github.com/XX/YY/security/code-scanning/35", + "state":"open", + "fixed_at":"None", + "dismissed_by":"None", + "dismissed_at":"None", + "dismissed_reason":"None", + "dismissed_comment":"None", + "rule":{ + "id":"py/clear-text-storage-sensitive-data", + "severity":"error", + "description":"Clear-text storage of sensitive information", + "name":"py/clear-text-storage-sensitive-data", + "tags":[ + "external/cwe/cwe-312", + "external/cwe/cwe-315", + "external/cwe/cwe-359", + "security" + ], + "security_severity_level":"high" + }, + "tool":{ + "name":"CodeQL", + "guid":"None", + "version":"2.16.2" + }, + "most_recent_instance":{ + "ref":"refs/XX/YY", + "analysis_key":"dynamic/github-code-scanning/codeql:analyze", + "environment":"{\"language\":\"python\"}", + "category":"/language:python", + "state":"open", + "commit_sha":"XXX", + "message":{ + "text":"This expression stores sensitive data (secret) as clear text." + }, + "location":{ + "path":"Unsafe Deserialization/file.py", + "start_line":42, + "end_line":42, + "start_column":17, + "end_column":23 + }, + "classifications":[ + + ] + }, + "instances_url":"https://api.github.com/repos/XX/YY/code-scanning/alerts/35/instances" + }, + { + "number":34, + "created_at":"2024-01-19T14:11:18Z", + "updated_at":"2024-01-19T14:11:20Z", + "url":"https://api.github.com/repos/XX/YY/code-scanning/alerts/34", + "html_url":"https://github.com/XX/YY/security/code-scanning/34", + "state":"open", + "fixed_at":"None", + "dismissed_by":"None", + "dismissed_at":"None", + "dismissed_reason":"None", + "dismissed_comment":"None", + "rule":{ + "id":"py/path-injection", + "severity":"error", + "description":"Uncontrolled data used in path expression", + "name":"py/path-injection", + "tags":[ + "correctness", + "external/cwe/cwe-022", + "external/cwe/cwe-023", + "external/cwe/cwe-036", + "external/cwe/cwe-073", + "external/cwe/cwe-099", + "security" + ], + "security_severity_level":"high" + }, + "tool":{ + "name":"CodeQL", + "guid":"None", + "version":"2.16.2" + }, + "most_recent_instance":{ + "ref":"refs/XX/YY", + "analysis_key":"dynamic/github-code-scanning/codeql:analyze", + "environment":"{\"language\":\"python\"}", + "category":"/language:python", + "state":"open", + "commit_sha":"XXX", + "message":{ + "text":"This path depends on a user-provided value." + }, + "location":{ + "path":"Path Traversal/file2.py", + "start_line":78, + "end_line":78, + "start_column":25, + "end_column":63 + }, + "classifications":[ + + ] + }, + "instances_url":"https://api.github.com/repos/XX/YY/code-scanning/alerts/34/instances" + } + ] \ No newline at end of file diff --git a/unittests/scans/mobsfscan/many_findings.json b/unittests/scans/mobsf/many_findings.json similarity index 100% rename from unittests/scans/mobsfscan/many_findings.json rename to unittests/scans/mobsf/many_findings.json diff --git a/unittests/scans/mobsfscan/many_findings_cwe_lower.json b/unittests/scans/mobsf/many_findings_cwe_lower.json similarity index 100% rename from unittests/scans/mobsfscan/many_findings_cwe_lower.json rename to unittests/scans/mobsf/many_findings_cwe_lower.json diff --git a/unittests/scans/mobsfscan/no_findings.json b/unittests/scans/mobsf/no_findings.json similarity index 100% rename from unittests/scans/mobsfscan/no_findings.json rename to unittests/scans/mobsf/no_findings.json diff --git a/unittests/scans/wazuh/wazuh_abnormal_severity.json b/unittests/scans/wazuh/wazuh_abnormal_severity.json new file mode 100644 index 00000000000..7a35f00c559 --- /dev/null +++ b/unittests/scans/wazuh/wazuh_abnormal_severity.json @@ -0,0 +1,80 @@ +{ + "took": 8, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 125, + "relation": "eq" + }, + "max_score": 5.596354, + "hits": [ + { + "_index": "wazuh-states-vulnerabilities-wazuh-server", + "_id": "001_c2f8c1a3b6e902b4c6d8e0g7a4b6c5d0e2b4a6n5_CVE-2025-27558", + "_score": 5.596323, + "_source": { + "agent": { + "id": "001", + "name": "myhost0", + "type": "Wazuh", + "version": "v4.11.1" + }, + "host": { + "os": { + "full": "Ubuntu 24.04.2 LTS", + "kernel": "6.8.0-62-generic", + "name": "Ubuntu", + "platform": "ubuntu", + "type": "ubuntu", + "version": "24.04.2" + } + }, + "package": { + "architecture": "amd64", + "description": "Signed kernel image generic", + "name": "linux-image-6.8.0-60-generic", + "size": 15025152, + "type": "deb", + "version": "6.8.0-60.63" + }, + "vulnerability": { + "category": "Packages", + "classification": "-", + "description": "IEEE P603.12-REVme D1.2 through D7.1 allows FragAttacks against meshnetworks. In mesh networks using Wi-Fi Protected Access (WPA, WPA2, orWPA3) or Wired Equivalent Privacy (WEP), an adversary can exploit thisvulnerability to inject arbitrary frames towards devices that supportreceiving non-SSP A-MSDU frames. NOTE: this issue exists because of anincorrect fix for CVE-2020-24588. P802.11-REVme, as of early 2025, is aplanned release of the 802.11 standard.", + "detected_at": "2025-05-25T17:07:15.204Z", + "enumeration": "CVE", + "id": "CVE-2025-27558", + "published_at": "2025-04-22T19:16:08Z", + "reference": "https://ubuntu.com/security/CVE-2025-27558, https://www.cve.org/CVERecord?id=CVE-2025-27558", + "scanner": { + "condition": "Package default status", + "reference": "https://cti.wazuh.com/vulnerabilities/cves/CVE-2025-27558", + "source": "Canonical Security Tracker", + "vendor": "Wazuh" + }, + "score": { + "base": 9.1, + "version": "3.1" + }, + "severity": "-", + "under_evaluation": false + }, + "wazuh": { + "cluster": { + "name": "wazuh-server" + }, + "schema": { + "version": "1.0.0" + } + } + } + } + ] + } + } \ No newline at end of file diff --git a/unittests/test_deduplication_logic.py b/unittests/test_deduplication_logic.py index 01c041146b5..f5390eaef60 100644 --- a/unittests/test_deduplication_logic.py +++ b/unittests/test_deduplication_logic.py @@ -22,7 +22,7 @@ Test_Import, Test_Import_Finding_Action, User, - _copy_model_util, + copy_model_util, ) from .dojo_test_case import DojoTestCase, get_unit_tests_scans_path @@ -1215,56 +1215,136 @@ def test_dedupe_same_id_different_test_type_unique_id_or_hash_code(self): # expect not duplicate as the mathcing finding is from another test_type, hash_code is also different self.assert_finding(finding_new, not_pk=224, duplicate=True, duplicate_finding_id=224, hash_code=finding_224.hash_code) - def test_identical_different_endpoints_unique_id_or_hash_code(self): + def test_identical_different_endpoints_unique_id_or_hash_code_dynamic(self): # create identical copy, so unique id is the same - finding_new, finding_224 = self.copy_and_reset_finding(find_id=224) + finding_new1, finding_224 = self.copy_and_reset_finding(find_id=224) - finding_new.save(dedupe_option=False) - ep1 = Endpoint(product=finding_new.test.engagement.product, finding=finding_new, host="myhost.com", protocol="https") + finding_new1.save(dedupe_option=False) + ep1 = Endpoint(product=finding_new1.test.engagement.product, finding=finding_new1, host="myhost.com", protocol="https") ep1.save() - finding_new.endpoints.add(ep1) - finding_new.save() + finding_new1.endpoints.add(ep1) + finding_new1.save() if settings.DEDUPE_ALGO_ENDPOINT_FIELDS == []: # expect duplicate, as endpoints shouldn't affect dedupe and hash_code due to unique_id - self.assert_finding(finding_new, not_pk=224, duplicate=True, duplicate_finding_id=224, hash_code=finding_224.hash_code) + self.assert_finding(finding_new1, not_pk=224, duplicate=True, duplicate_finding_id=224, hash_code=finding_224.hash_code) else: - self.assert_finding(finding_new, not_pk=224, duplicate=False, duplicate_finding_id=None, hash_code=finding_224.hash_code) + # endpoints don't match with 224, so not a duplicate + self.assert_finding(finding_new1, not_pk=224, duplicate=False, duplicate_finding_id=None, hash_code=finding_224.hash_code) + + # remove the finding to prevent it from being duplicated by the next finding we create + finding_new1.delete() # same scenario, now with different uid. and different endpoints, but hash will be different due the endpoints because we set dynamic_finding to True - finding_new, finding_224 = self.copy_and_reset_finding(find_id=224) + finding_new2, finding_224 = self.copy_and_reset_finding(find_id=224) - finding_new.save(dedupe_option=False) - ep1 = Endpoint(product=finding_new.test.engagement.product, finding=finding_new, host="myhost.com", protocol="https") + finding_new2.save(dedupe_option=False) + ep1 = Endpoint(product=finding_new1.test.engagement.product, finding=finding_new2, host="myhost.com", protocol="https") ep1.save() - finding_new.endpoints.add(ep1) - finding_new.unique_id_from_tool = 1 - finding_new.dynamic_finding = True - finding_new.save() + finding_new2.endpoints.add(ep1) + finding_new2.unique_id_from_tool = 1 + finding_new2.dynamic_finding = True + finding_new2.save() if settings.DEDUPE_ALGO_ENDPOINT_FIELDS == []: # different uid. and different endpoints, but endpoints not used for hash anymore -> duplicate - self.assert_finding(finding_new, not_pk=224, duplicate=True, hash_code=finding_224.hash_code) + self.assert_finding(finding_new2, not_pk=224, duplicate=True, duplicate_finding_id=224, hash_code=finding_224.hash_code) + else: + # endpoints do not match with 224 + self.assert_finding(finding_new1, not_pk=224, duplicate=False, duplicate_finding_id=None, hash_code=finding_224.hash_code) + + def test_identical_different_endpoints_unique_id_or_hash_code_static(self): + # create identical copy, so unique id is the same + finding_new1, finding_224 = self.copy_and_reset_finding(find_id=224) + + finding_new1.save(dedupe_option=False) + ep1 = Endpoint(product=finding_new1.test.engagement.product, finding=finding_new1, host="myhost.com", protocol="https") + ep1.save() + finding_new1.endpoints.add(ep1) + finding_new1.save() + + if settings.DEDUPE_ALGO_ENDPOINT_FIELDS == []: + # expect duplicate, as endpoints shouldn't affect dedupe and hash_code due to unique_id + self.assert_finding(finding_new1, not_pk=224, duplicate=True, duplicate_finding_id=224, hash_code=finding_224.hash_code) else: - # this only works right now because of a bug in the deduplication code: https://github.com/DefectDojo/django-DefectDojo/issues/13497 - self.assert_finding(finding_new, not_pk=224, duplicate=False, hash_code=finding_224.hash_code) + # endpoints don't match with 224, so not a duplicate + self.assert_finding(finding_new1, not_pk=224, duplicate=False, duplicate_finding_id=None, hash_code=finding_224.hash_code) + + # remove the finding to prevent it from being duplicated by the next finding we create + finding_new1.delete() # same scenario, now with different uid. and different endpoints - finding_new, finding_224 = self.copy_and_reset_finding(find_id=224) + finding_new3, finding_224 = self.copy_and_reset_finding(find_id=224) - finding_new.save(dedupe_option=False) - ep1 = Endpoint(product=finding_new.test.engagement.product, finding=finding_new, host="myhost.com", protocol="https") + finding_new3.save(dedupe_option=False) + ep1 = Endpoint(product=finding_new3.test.engagement.product, finding=finding_new3, host="myhost.com", protocol="https") ep1.save() - finding_new.endpoints.add(ep1) - finding_new.unique_id_from_tool = 1 - finding_new.dynamic_finding = False - finding_new.save() + finding_new3.endpoints.add(ep1) + finding_new3.unique_id_from_tool = 1 + finding_new3.dynamic_finding = False + finding_new3.save() + + if settings.DEDUPE_ALGO_ENDPOINT_FIELDS == []: + # different uid. and different endpoints, dynamic_finding is set to False hash_code still not affected by endpoints + self.assert_finding(finding_new3, not_pk=224, duplicate=True, duplicate_finding_id=224, hash_code=finding_224.hash_code) + else: + # endpoints do not match with 224 + self.assert_finding(finding_new1, not_pk=224, duplicate=False, duplicate_finding_id=None, hash_code=finding_224.hash_code) + + def test_identical_different_endpoints_unique_id_or_hash_code_multiple(self): + # create identical copy, so unique id is the same + finding_new1, finding_224 = self.copy_and_reset_finding(find_id=224) + + finding_new1.save(dedupe_option=False) + ep1 = Endpoint(product=finding_new1.test.engagement.product, finding=finding_new1, host="myhost.com", protocol="https") + ep1.save() + finding_new1.endpoints.add(ep1) + finding_new1.save() + + if settings.DEDUPE_ALGO_ENDPOINT_FIELDS == []: + # expect duplicate, as endpoints shouldn't affect dedupe and hash_code due to unique_id + self.assert_finding(finding_new1, not_pk=224, duplicate=True, duplicate_finding_id=224, hash_code=finding_224.hash_code) + else: + # endpoints don't match with 224, so not a duplicate + self.assert_finding(finding_new1, not_pk=224, duplicate=False, duplicate_finding_id=None, hash_code=finding_224.hash_code) + + # same scenario, now with different uid. and different endpoints, but hash will be different due the endpoints because we set dynamic_finding to True + finding_new2, finding_224 = self.copy_and_reset_finding(find_id=224) + + finding_new2.save(dedupe_option=False) + ep1 = Endpoint(product=finding_new1.test.engagement.product, finding=finding_new2, host="myhost.com", protocol="https") + ep1.save() + finding_new2.endpoints.add(ep1) + finding_new2.unique_id_from_tool = 1 + finding_new2.dynamic_finding = True + finding_new2.save() + + if settings.DEDUPE_ALGO_ENDPOINT_FIELDS == []: + # different uid. and different endpoints, but endpoints not used for hash anymore -> duplicate + self.assert_finding(finding_new2, not_pk=224, duplicate=True, duplicate_finding_id=224, hash_code=finding_224.hash_code) + else: + # endpoints do not match with 224, but they do match with the finding just created. this proves that the dedupe algo considers more than only the first + # candidate https://github.com/DefectDojo/django-DefectDojo/issues/13497 + self.assert_finding(finding_new2, not_pk=224, duplicate=True, duplicate_finding_id=finding_new1.pk, hash_code=finding_224.hash_code) + + # same scenario, now with different uid. and different endpoints + finding_new3, finding_224 = self.copy_and_reset_finding(find_id=224) + + finding_new3.save(dedupe_option=False) + ep1 = Endpoint(product=finding_new3.test.engagement.product, finding=finding_new3, host="myhost.com", protocol="https") + ep1.save() + finding_new3.endpoints.add(ep1) + finding_new3.unique_id_from_tool = 1 + finding_new3.dynamic_finding = False + finding_new3.save() if settings.DEDUPE_ALGO_ENDPOINT_FIELDS == []: # different uid. and different endpoints, dynamic_finding is set to False hash_code still not affected by endpoints - self.assert_finding(finding_new, not_pk=224, duplicate=True, duplicate_finding_id=224, hash_code=finding_224.hash_code) + self.assert_finding(finding_new3, not_pk=224, duplicate=True, duplicate_finding_id=224, hash_code=finding_224.hash_code) else: - self.assert_finding(finding_new, not_pk=224, duplicate=False, duplicate_finding_id=None, hash_code=finding_224.hash_code) + # endpoints do not match with 224, but they do match with the finding just created. this proves that the dedupe algo considers more than only the first + # candidate https://github.com/DefectDojo/django-DefectDojo/issues/13497 + self.assert_finding(finding_new3, not_pk=224, duplicate=True, duplicate_finding_id=finding_new1.pk, hash_code=finding_224.hash_code) # # some extra tests @@ -1476,7 +1556,7 @@ def log_summary(self, product=None, engagement=None, test=None): def copy_and_reset_finding(self, find_id): org = Finding.objects.get(id=find_id) - new = _copy_model_util(org) + new = copy_model_util(org) new.duplicate = False new.duplicate_finding = None new.active = True @@ -1513,13 +1593,13 @@ def copy_and_reset_finding_add_endpoints(self, find_id, *, static=False, dynamic def copy_and_reset_test(self, test_id): org = Test.objects.get(id=test_id) - new = _copy_model_util(org) + new = copy_model_util(org) # return unsaved new finding and reloaded existing finding return new, Test.objects.get(id=test_id) def copy_and_reset_engagement(self, eng_id): org = Engagement.objects.get(id=eng_id) - new = _copy_model_util(org) + new = copy_model_util(org) # return unsaved new finding and reloaded existing finding return new, Engagement.objects.get(id=eng_id) diff --git a/unittests/test_duplication_loops.py b/unittests/test_duplication_loops.py index cc0d250774e..d85e52e1046 100644 --- a/unittests/test_duplication_loops.py +++ b/unittests/test_duplication_loops.py @@ -4,7 +4,7 @@ from django.test.utils import override_settings from dojo.management.commands.fix_loop_duplicates import fix_loop_duplicates -from dojo.models import Engagement, Finding, Product, User, _copy_model_util +from dojo.models import Engagement, Finding, Product, User, copy_model_util from dojo.utils import set_duplicate from .dojo_test_case import DojoTestCase @@ -27,19 +27,19 @@ def run(self, result=None): super().run(result) def setUp(self): - self.finding_a = _copy_model_util(Finding.objects.get(id=2), exclude_fields=["duplicate_finding"]) + self.finding_a = copy_model_util(Finding.objects.get(id=2), exclude_fields=["duplicate_finding"]) self.finding_a.title = "A: " + self.finding_a.title self.finding_a.duplicate = False self.finding_a.hash_code = None self.finding_a.save() - self.finding_b = _copy_model_util(Finding.objects.get(id=3), exclude_fields=["duplicate_finding"]) + self.finding_b = copy_model_util(Finding.objects.get(id=3), exclude_fields=["duplicate_finding"]) self.finding_b.title = "B: " + self.finding_b.title self.finding_b.duplicate = False self.finding_b.hash_code = None self.finding_b.save() - self.finding_c = _copy_model_util(Finding.objects.get(id=4), exclude_fields=["duplicate_finding"]) + self.finding_c = copy_model_util(Finding.objects.get(id=4), exclude_fields=["duplicate_finding"]) self.finding_c.pk = None self.finding_c.title = "C: " + self.finding_c.title self.finding_c.duplicate = False @@ -262,7 +262,7 @@ def test_loop_relations_for_three(self): # Another loop-test for 4 findings def test_loop_relations_for_four(self): - self.finding_d = _copy_model_util(Finding.objects.get(id=4), exclude_fields=["duplicate_finding"]) + self.finding_d = copy_model_util(Finding.objects.get(id=4), exclude_fields=["duplicate_finding"]) self.finding_d.duplicate = False self.finding_d.save() diff --git a/unittests/test_false_positive_history_logic.py b/unittests/test_false_positive_history_logic.py index 4a380383f7d..5d8f31a15de 100644 --- a/unittests/test_false_positive_history_logic.py +++ b/unittests/test_false_positive_history_logic.py @@ -12,7 +12,7 @@ System_Settings, Test, User, - _copy_model_util, + copy_model_util, ) from .dojo_test_case import DojoTestCase @@ -1719,7 +1719,7 @@ def log_summary(self, product=None, engagement=None, test=None): def copy_and_reset_finding(self, find_id): org = Finding.objects.get(id=find_id) - new = _copy_model_util(org) + new = copy_model_util(org) new.duplicate = False new.duplicate_finding = None new.false_p = False @@ -1730,19 +1730,19 @@ def copy_and_reset_finding(self, find_id): def copy_and_reset_test(self, test_id): org = Test.objects.get(id=test_id) - new = _copy_model_util(org) + new = copy_model_util(org) # return unsaved new test and reloaded existing test return new, Test.objects.get(id=test_id) def copy_and_reset_engagement(self, eng_id): org = Engagement.objects.get(id=eng_id) - new = _copy_model_util(org) + new = copy_model_util(org) # return unsaved new engagement and reloaded existing engagement return new, Engagement.objects.get(id=eng_id) def copy_and_reset_product(self, prod_id): org = Product.objects.get(id=prod_id) - new = _copy_model_util(org) + new = copy_model_util(org) new.name = f"{org.name} (Copy {datetime.now()})" # return unsaved new product and reloaded existing product return new, Product.objects.get(id=prod_id) diff --git a/unittests/test_importers_performance.py b/unittests/test_importers_performance.py index 7405aae9353..38d63babad1 100644 --- a/unittests/test_importers_performance.py +++ b/unittests/test_importers_performance.py @@ -177,11 +177,11 @@ def test_import_reimport_reimport_performance_async(self): configure_pghistory_triggers() self._import_reimport_performance( - expected_num_queries1=593, + expected_num_queries1=340, expected_num_async_tasks1=10, - expected_num_queries2=498, + expected_num_queries2=288, expected_num_async_tasks2=22, - expected_num_queries3=289, + expected_num_queries3=175, expected_num_async_tasks3=20, ) @@ -195,11 +195,11 @@ def test_import_reimport_reimport_performance_pghistory_async(self): configure_pghistory_triggers() self._import_reimport_performance( - expected_num_queries1=559, + expected_num_queries1=306, expected_num_async_tasks1=10, - expected_num_queries2=491, + expected_num_queries2=281, expected_num_async_tasks2=22, - expected_num_queries3=284, + expected_num_queries3=170, expected_num_async_tasks3=20, ) @@ -219,11 +219,11 @@ def test_import_reimport_reimport_performance_no_async(self): testuser.usercontactinfo.block_execution = True testuser.usercontactinfo.save() self._import_reimport_performance( - expected_num_queries1=603, + expected_num_queries1=350, expected_num_async_tasks1=10, - expected_num_queries2=515, + expected_num_queries2=305, expected_num_async_tasks2=22, - expected_num_queries3=304, + expected_num_queries3=190, expected_num_async_tasks3=20, ) @@ -241,11 +241,11 @@ def test_import_reimport_reimport_performance_pghistory_no_async(self): testuser.usercontactinfo.save() self._import_reimport_performance( - expected_num_queries1=569, + expected_num_queries1=316, expected_num_async_tasks1=10, - expected_num_queries2=508, + expected_num_queries2=298, expected_num_async_tasks2=22, - expected_num_queries3=299, + expected_num_queries3=185, expected_num_async_tasks3=20, ) @@ -267,11 +267,11 @@ def test_import_reimport_reimport_performance_no_async_with_product_grading(self self.system_settings(enable_product_grade=True) self._import_reimport_performance( - expected_num_queries1=604, + expected_num_queries1=351, expected_num_async_tasks1=11, - expected_num_queries2=516, + expected_num_queries2=306, expected_num_async_tasks2=23, - expected_num_queries3=305, + expected_num_queries3=191, expected_num_async_tasks3=21, ) @@ -290,11 +290,11 @@ def test_import_reimport_reimport_performance_pghistory_no_async_with_product_gr self.system_settings(enable_product_grade=True) self._import_reimport_performance( - expected_num_queries1=570, + expected_num_queries1=317, expected_num_async_tasks1=11, - expected_num_queries2=509, + expected_num_queries2=299, expected_num_async_tasks2=23, - expected_num_queries3=300, + expected_num_queries3=186, expected_num_async_tasks3=21, ) @@ -413,9 +413,9 @@ def test_deduplication_performance_async(self): self.system_settings(enable_deduplication=True) self._deduplication_performance( - expected_num_queries1=660, + expected_num_queries1=311, expected_num_async_tasks1=12, - expected_num_queries2=519, + expected_num_queries2=204, expected_num_async_tasks2=12, check_duplicates=False, # Async mode - deduplication happens later ) @@ -430,9 +430,9 @@ def test_deduplication_performance_pghistory_async(self): self.system_settings(enable_deduplication=True) self._deduplication_performance( - expected_num_queries1=624, + expected_num_queries1=275, expected_num_async_tasks1=12, - expected_num_queries2=500, + expected_num_queries2=185, expected_num_async_tasks2=12, check_duplicates=False, # Async mode - deduplication happens later ) @@ -451,9 +451,9 @@ def test_deduplication_performance_no_async(self): testuser.usercontactinfo.save() self._deduplication_performance( - expected_num_queries1=672, + expected_num_queries1=323, expected_num_async_tasks1=12, - expected_num_queries2=633, + expected_num_queries2=318, expected_num_async_tasks2=12, ) @@ -471,8 +471,8 @@ def test_deduplication_performance_pghistory_no_async(self): testuser.usercontactinfo.save() self._deduplication_performance( - expected_num_queries1=636, + expected_num_queries1=287, expected_num_async_tasks1=12, - expected_num_queries2=596, + expected_num_queries2=281, expected_num_async_tasks2=12, ) diff --git a/unittests/test_rest_framework.py b/unittests/test_rest_framework.py index 4f5120792d4..f32350e2e86 100644 --- a/unittests/test_rest_framework.py +++ b/unittests/test_rest_framework.py @@ -34,7 +34,7 @@ from dojo.api_v2.mixins import DeletePreviewModelMixin from dojo.api_v2.prefetch import PrefetchListMixin, PrefetchRetrieveMixin -from dojo.api_v2.prefetch.utils import _get_prefetchable_fields +from dojo.api_v2.prefetch.utils import get_prefetchable_fields from dojo.api_v2.views import ( AnnouncementViewSet, AppAnalysisViewSet, @@ -416,7 +416,7 @@ def test_detail(self): @skipIfNotSubclass(PrefetchRetrieveMixin) def test_detail_prefetch(self): # print("=======================================================") - prefetchable_fields = [x[0] for x in _get_prefetchable_fields(self.viewset.serializer_class)] + prefetchable_fields = [x[0] for x in get_prefetchable_fields(self.viewset.serializer_class)] current_objects = self.client.get(self.url, format="json").data relative_url = self.url + "{}/".format(current_objects["results"][0]["id"]) @@ -508,7 +508,7 @@ def test_list(self): @skipIfNotSubclass(PrefetchListMixin) def test_list_prefetch(self): - prefetchable_fields = [x[0] for x in _get_prefetchable_fields(self.viewset.serializer_class)] + prefetchable_fields = [x[0] for x in get_prefetchable_fields(self.viewset.serializer_class)] response = self.client.get(self.url, data={ "prefetch": ",".join(prefetchable_fields), diff --git a/unittests/test_social_auth_failure_handling.py b/unittests/test_social_auth_failure_handling.py new file mode 100644 index 00000000000..0cf55f8d860 --- /dev/null +++ b/unittests/test_social_auth_failure_handling.py @@ -0,0 +1,152 @@ +from unittest.mock import patch + +from django.contrib import messages +from django.contrib.auth.models import AnonymousUser +from django.contrib.messages.storage.fallback import FallbackStorage +from django.contrib.sessions.middleware import SessionMiddleware +from django.http import HttpResponse +from django.test import RequestFactory, override_settings +from requests.exceptions import ConnectionError as RequestsConnectionError +from social_core.exceptions import AuthCanceled, AuthFailed, AuthForbidden + +from dojo.middleware import CustomSocialAuthExceptionMiddleware + +from .dojo_test_case import DojoTestCase + + +class TestSocialAuthMiddlewareUnit(DojoTestCase): + + """ + Unit tests: + Directly test CustomSocialAuthExceptionMiddleware behavior + by simulating exceptions (ConnectionError, AuthCanceled, AuthFailed, AuthForbidden), + without relying on actual backend configuration or whether the + /complete// URLs are registered and accessible. + """ + + def setUp(self): + self.factory = RequestFactory() + self.middleware = CustomSocialAuthExceptionMiddleware(lambda *_: HttpResponse("OK")) + + def _prepare_request(self, path): + request = self.factory.get(path) + request.user = AnonymousUser() + SessionMiddleware(lambda *_: None).process_request(request) + request.session.save() + request._messages = FallbackStorage(request) + return request + + def test_social_auth_exception_redirects_to_login(self): + login_paths = [ + "/login/oidc/", + "/login/auth0/", + "/login/google-oauth2/", + "/login/okta-oauth2/", + "/login/azuread-tenant-oauth2/", + "/login/gitlab/", + "/login/keycloak-oauth2/", + "/login/github/", + ] + exceptions = [ + (RequestsConnectionError("Host unreachable"), "Please use the standard login below."), + (AuthCanceled("User canceled login"), "Social login was canceled. Please try again or use the standard login."), + (AuthFailed("Token exchange failed"), "Social login failed. Please try again or use the standard login."), + (AuthForbidden("User not allowed"), "You are not authorized to log in via this method. Please contact support or use the standard login."), + ] + for path in login_paths: + for exception, expected_message in exceptions: + with self.subTest(path=path, exception=type(exception).__name__): + request = self._prepare_request(path) + response = self.middleware.process_exception(request, exception) + self.assertEqual(response.status_code, 302) + self.assertEqual(response.url, "/login?force_login_form") + storage = list(messages.get_messages(request)) + self.assertTrue(any(expected_message in str(msg) for msg in storage)) + + def test_non_social_auth_path_still_redirects_on_auth_exception(self): + """Ensure middleware handles AuthFailed even on unrelated paths.""" + request = self._prepare_request("/some/other/path/") + exception = AuthFailed("Should be handled globally") + response = self.middleware.process_exception(request, exception) + self.assertEqual(response.status_code, 302) + self.assertEqual(response.url, "/login?force_login_form") + storage = list(messages.get_messages(request)) + self.assertTrue(any("Social login failed. Please try again or use the standard login." in str(msg) for msg in storage)) + + def test_non_social_auth_path_redirects_on_auth_forbidden(self): + """Ensure middleware handles AuthForbidden even on unrelated paths.""" + request = self._prepare_request("/some/other/path/") + exception = AuthForbidden("User not allowed") + response = self.middleware.process_exception(request, exception) + self.assertEqual(response.status_code, 302) + self.assertEqual(response.url, "/login?force_login_form") + storage = list(messages.get_messages(request)) + self.assertTrue(any("You are not authorized to log in via this method." in str(msg) for msg in storage)) + + def test_type_error_none_type_iterable_redirect(self): + """Ensure middleware catches 'NoneType' object is not iterable TypeError and redirects.""" + request = self._prepare_request("/login/oidc/") + exception = TypeError("'NoneType' object is not iterable") + response = self.middleware.process_exception(request, exception) + self.assertEqual(response.status_code, 302) + self.assertEqual(response.url, "/login?force_login_form") + storage = list(messages.get_messages(request)) + self.assertTrue(any("An unexpected error occurred during social login." in str(msg) for msg in storage)) + + +@override_settings( + AUTHENTICATION_BACKENDS=( + "social_core.backends.github.GithubOAuth2", + "social_core.backends.gitlab.GitLabOAuth2", + "social_core.backends.keycloak.KeycloakOAuth2", + "social_core.backends.azuread_tenant.AzureADTenantOAuth2", + "social_core.backends.auth0.Auth0OAuth2", + "social_core.backends.okta.OktaOAuth2", + "social_core.backends.open_id_connect.OpenIdConnectAuth", + "django.contrib.auth.backends.ModelBackend", + ), +) +class TestSocialAuthIntegrationFailures(DojoTestCase): + + """ + Integration tests: + Simulate social login failures by calling /complete// URLs + and mocking auth_complete() to raise AuthFailed, AuthCanceled, and AuthForbidden. + Verifies that the middleware is correctly integrated and handles backend failures. + """ + + BACKEND_CLASS_PATHS = { + "github": "social_core.backends.github.GithubOAuth2", + "gitlab": "social_core.backends.gitlab.GitLabOAuth2", + "keycloak": "social_core.backends.keycloak.KeycloakOAuth2", + "azuread-tenant-oauth2": "social_core.backends.azuread_tenant.AzureADTenantOAuth2", + "auth0": "social_core.backends.auth0.Auth0OAuth2", + "okta-oauth2": "social_core.backends.okta.OktaOAuth2", + "oidc": "social_core.backends.open_id_connect.OpenIdConnectAuth", + } + + def _test_backend_exception(self, backend_slug, exception, expected_message): + backend_class_path = self.BACKEND_CLASS_PATHS[backend_slug] + with patch(f"{backend_class_path}.auth_complete", side_effect=exception): + response = self.client.get(f"/complete/{backend_slug}/", follow=True) + self.assertEqual(response.status_code, 200) + self.assertContains(response, expected_message) + + def test_all_backends_auth_failed(self): + for backend in self.BACKEND_CLASS_PATHS: + with self.subTest(backend=backend): + self._test_backend_exception(backend, AuthFailed(backend=None), "Social login failed. Please try again or use the standard login.") + + def test_all_backends_auth_canceled(self): + for backend in self.BACKEND_CLASS_PATHS: + with self.subTest(backend=backend): + self._test_backend_exception(backend, AuthCanceled(backend=None), "Social login was canceled. Please try again or use the standard login.") + + def test_all_backends_auth_forbidden(self): + for backend in self.BACKEND_CLASS_PATHS: + with self.subTest(backend=backend): + self._test_backend_exception( + backend, + AuthForbidden(backend=None), + "You are not authorized to log in via this method. Please contact support or use the standard login.", + ) diff --git a/unittests/test_utils_deduplication_reopen.py b/unittests/test_utils_deduplication_reopen.py index 91ba2c49d12..a7e72ede118 100644 --- a/unittests/test_utils_deduplication_reopen.py +++ b/unittests/test_utils_deduplication_reopen.py @@ -2,7 +2,7 @@ import logging from dojo.management.commands.fix_loop_duplicates import fix_loop_duplicates -from dojo.models import Finding, _copy_model_util +from dojo.models import Finding, copy_model_util from dojo.utils import set_duplicate from .dojo_test_case import DojoTestCase @@ -14,7 +14,7 @@ class TestDuplicationReopen(DojoTestCase): fixtures = ["dojo_testdata.json"] def setUp(self): - self.finding_a = _copy_model_util(Finding.objects.get(id=2), exclude_fields=["duplicate_finding"]) + self.finding_a = copy_model_util(Finding.objects.get(id=2), exclude_fields=["duplicate_finding"]) self.finding_a.duplicate = False self.finding_a.mitigated = datetime.datetime(1970, 1, 1, tzinfo=datetime.UTC) self.finding_a.is_mitigated = True @@ -22,19 +22,19 @@ def setUp(self): self.finding_a.active = False self.finding_a.save() - self.finding_b = _copy_model_util(Finding.objects.get(id=3), exclude_fields=["duplicate_finding"]) + self.finding_b = copy_model_util(Finding.objects.get(id=3), exclude_fields=["duplicate_finding"]) self.finding_a.active = True self.finding_b.duplicate = False self.finding_b.save() - self.finding_c = _copy_model_util(Finding.objects.get(id=4), exclude_fields=["duplicate_finding"]) + self.finding_c = copy_model_util(Finding.objects.get(id=4), exclude_fields=["duplicate_finding"]) self.finding_c.duplicate = False self.finding_c.out_of_scope = True self.finding_c.active = False logger.debug("creating finding_c") self.finding_c.save() - self.finding_d = _copy_model_util(Finding.objects.get(id=5), exclude_fields=["duplicate_finding"]) + self.finding_d = copy_model_util(Finding.objects.get(id=5), exclude_fields=["duplicate_finding"]) self.finding_d.duplicate = False logger.debug("creating finding_d") self.finding_d.save() diff --git a/unittests/tools/test_cyclonedx_parser.py b/unittests/tools/test_cyclonedx_parser.py index 898d649c38b..e98b5338ff8 100644 --- a/unittests/tools/test_cyclonedx_parser.py +++ b/unittests/tools/test_cyclonedx_parser.py @@ -357,3 +357,17 @@ def test_cyclonedx_issue_8022(self): self.assertIn(finding.severity, Finding.SEVERITIES) finding.clean() self.assertEqual(1, len(findings)) + + def test_cyclonedx_no_severity(self): + """CycloneDX version 1.4 JSON format""" + with (get_unit_tests_scans_path("cyclonedx") / "no-severity.json").open(encoding="utf-8") as file: + parser = CycloneDXParser() + findings = parser.get_findings(file, Test()) + self.assertEqual(1, len(findings)) + finding = findings[0] + # There is so little information in the vulnerability, that we cannot build a proper title + self.assertEqual("None:None | CVE-2021-44228", finding.title) + self.assertEqual("Critical", finding.severity) + # The score will be evaluated when the finding save method is ran + # self.assertEqual(10.0, finding.cvssv3_score) + self.assertEqual("CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:C/C:H/I:H/A:H", finding.cvssv3) diff --git a/unittests/tools/test_github_sast_parser.py b/unittests/tools/test_github_sast_parser.py new file mode 100644 index 00000000000..9b42a795ec5 --- /dev/null +++ b/unittests/tools/test_github_sast_parser.py @@ -0,0 +1,53 @@ +import io + +from dojo.models import Test +from dojo.tools.github_sast.parser import GithubSASTParser +from unittests.dojo_test_case import DojoTestCase, get_unit_tests_scans_path + + +class TestGithubSASTParser(DojoTestCase): + def test_parse_file_with_no_vuln_has_no_findings(self): + """Empty list should yield no findings""" + with (get_unit_tests_scans_path("github_sast") / "github_sast_zero_vul.json").open( + encoding="utf-8", + ) as testfile: + parser = GithubSASTParser() + findings = parser.get_findings(testfile, Test()) + self.assertEqual(0, len(findings)) + + def test_parse_file_with_one_vuln_parsed_correctly(self): + """Single vulnerability entry parsed correctly""" + with (get_unit_tests_scans_path("github_sast") / "github_sast_one_vul.json").open(encoding="utf-8") as testfile: + parser = GithubSASTParser() + findings = parser.get_findings(testfile, Test()) + self.assertEqual(1, len(findings)) + finding = findings[0] + for ep in getattr(finding, "unsaved_endpoints", []): + ep.clean() + + expected_title = "Clear-text storage of sensitive information (py/clear-text-storage-sensitive-data)" + self.assertEqual(expected_title, finding.title) + self.assertEqual("src/file.py", finding.file_path) + self.assertEqual(42, finding.line) + self.assertEqual("py/clear-text-storage-sensitive-data", finding.vuln_id_from_tool) + self.assertEqual("High", finding.severity) + self.assertEqual("https://github.com/OWASP/test-repository/security/code-scanning/35", finding.url) + self.assertIn("This expression stores sensitive data", finding.description) + + def test_parse_file_with_multiple_vulns_has_multiple_findings(self): + """Multiple entries produce corresponding findings""" + with (get_unit_tests_scans_path("github_sast") / "github_sast_many_vul.json").open( + encoding="utf-8", + ) as testfile: + parser = GithubSASTParser() + findings = parser.get_findings(testfile, Test()) + self.assertEqual(2, len(findings)) + lines = sorted(f.line for f in findings) + self.assertListEqual([42, 78], lines) + + def test_parse_file_invalid_format_raises(self): + """Non-list JSON should raise""" + bad_json = io.StringIO('{"not": "a list"}') + parser = GithubSASTParser() + with self.assertRaises(TypeError): + parser.get_findings(bad_json, Test()) diff --git a/unittests/tools/test_github_vulnerability_parser.py b/unittests/tools/test_github_vulnerability_parser.py index 1065a31a2f5..2e5869476bf 100644 --- a/unittests/tools/test_github_vulnerability_parser.py +++ b/unittests/tools/test_github_vulnerability_parser.py @@ -10,14 +10,18 @@ class TestGithubVulnerabilityParser(DojoTestCase): def test_parse_file_with_no_vuln_has_no_findings(self): """Sample with zero vulnerability""" - with (get_unit_tests_scans_path("github_vulnerability") / "github-0-vuln.json").open(encoding="utf-8") as testfile: + with (get_unit_tests_scans_path("github_vulnerability") / "github-0-vuln.json").open( + encoding="utf-8", + ) as testfile: parser = GithubVulnerabilityParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(0, len(findings)) def test_parse_file_with_one_vuln_has_one_findings(self): """Sample with one vulnerability""" - with (get_unit_tests_scans_path("github_vulnerability") / "github-1-vuln.json").open(encoding="utf-8") as testfile: + with (get_unit_tests_scans_path("github_vulnerability") / "github-1-vuln.json").open( + encoding="utf-8", + ) as testfile: parser = GithubVulnerabilityParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(1, len(findings)) @@ -36,8 +40,10 @@ def test_parse_file_with_one_vuln_has_one_findings(self): self.assertEqual(finding.unique_id_from_tool, "aabbccddeeff1122334401") def test_parse_file_with_one_vuln_has_one_finding_and_dependabot_direct_link(self): - """Sample with one vulnerability""" - with (get_unit_tests_scans_path("github_vulnerability") / "github-1-vuln-repo-dependabot-link.json").open(encoding="utf-8") as testfile: + """Sample with dependabot PR and repository alert link""" + with (get_unit_tests_scans_path("github_vulnerability") / "github-1-vuln-repo-dependabot-link.json").open( + encoding="utf-8", + ) as testfile: parser = GithubVulnerabilityParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(1, len(findings)) @@ -47,23 +53,31 @@ def test_parse_file_with_one_vuln_has_one_finding_and_dependabot_direct_link(sel with self.subTest(i=0): finding = findings[0] self.assertEqual(finding.title, "Critical severity vulnerability that affects package") - self.assertEqual( - finding.description, - "[https://github.com/OWASP/test-repository/security/dependabot/1](https://github.com/OWASP/test-repository/security/dependabot/1)\nThis is a sample description for sample description from Github API.", + expected_desc = ( + "Repo Alert: [https://github.com/OWASP/test-repository/security/dependabot/1]" + "(https://github.com/OWASP/test-repository/security/dependabot/1)\n" + "Fix PR: [https://github.com/OWASP/test-repository/pull/1]" + "(https://github.com/OWASP/test-repository/pull/1)\n" + "This is a sample description for sample description from Github API." ) + self.assertEqual(finding.description, expected_desc) self.assertEqual(finding.severity, "Critical") self.assertEqual(finding.component_name, "package") self.assertEqual(finding.unique_id_from_tool, "aabbccddeeff1122334401") def test_parse_file_with_multiple_vuln_has_multiple_findings(self): """Sample with five vulnerability""" - with (get_unit_tests_scans_path("github_vulnerability") / "github-5-vuln.json").open(encoding="utf-8") as testfile: + with (get_unit_tests_scans_path("github_vulnerability") / "github-5-vuln.json").open( + encoding="utf-8", + ) as testfile: parser = GithubVulnerabilityParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(5, len(findings)) def test_parse_file_issue2984(self): - with (get_unit_tests_scans_path("github_vulnerability") / "github_issue2984.json").open(encoding="utf-8") as testfile: + with (get_unit_tests_scans_path("github_vulnerability") / "github_issue2984.json").open( + encoding="utf-8", + ) as testfile: parser = GithubVulnerabilityParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(4, len(findings)) @@ -87,7 +101,9 @@ def test_parse_file_issue2984(self): self.assertEqual(finding.unique_id_from_tool, "DASFMMFKLNKDSAKFSDLANJKKFDSNJSAKDFNJKDFS=") def test_parse_file_search(self): - with (get_unit_tests_scans_path("github_vulnerability") / "github_search.json").open(encoding="utf-8") as testfile: + with (get_unit_tests_scans_path("github_vulnerability") / "github_search.json").open( + encoding="utf-8", + ) as testfile: parser = GithubVulnerabilityParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(2, len(findings)) @@ -102,7 +118,9 @@ def test_parse_file_search(self): self.assertEqual(finding.unsaved_vulnerability_ids[0], "GHSA-2qrg-x229-3v8q") self.assertEqual(finding.unsaved_vulnerability_ids[1], "CVE-2019-17571") self.assertEqual(finding.component_name, "log4j:log4j") - self.assertEqual(finding.unique_id_from_tool, "MDI4OlJlcG9zaXRvcnlWdWxuZXJhYmlsaXR5QWxlcnQyMDg2Nzc5NzY=") + self.assertEqual( + finding.unique_id_from_tool, "MDI4OlJlcG9zaXRvcnlWdWxuZXJhYmlsaXR5QWxlcnQyMDg2Nzc5NzY=", + ) with self.subTest(i=1): finding = findings[1] self.assertEqual(finding.title, "Deserialization of Untrusted Data in Log4j") @@ -111,11 +129,15 @@ def test_parse_file_search(self): self.assertEqual(finding.unsaved_vulnerability_ids[0], "GHSA-2qrg-x229-3v8q") self.assertEqual(finding.unsaved_vulnerability_ids[1], "CVE-2019-17571") self.assertEqual(finding.component_name, "log4j:log4j") - self.assertEqual(finding.unique_id_from_tool, "MDI4OlJlcG9zaXRvcnlWdWxuZXJhYmlsaXR5QWxlcnQ1NTE5NTI2OTM=") + self.assertEqual( + finding.unique_id_from_tool, "MDI4OlJlcG9zaXRvcnlWdWxuZXJhYmlsaXR5QWxlcnQ1NTE5NTI2OTM=", + ) def test_parse_file_search2(self): """Search result with more data/attributes""" - with (get_unit_tests_scans_path("github_vulnerability") / "github_search2.json").open(encoding="utf-8") as testfile: + with (get_unit_tests_scans_path("github_vulnerability") / "github_search2.json").open( + encoding="utf-8", + ) as testfile: parser = GithubVulnerabilityParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(2, len(findings)) @@ -130,7 +152,9 @@ def test_parse_file_search2(self): self.assertEqual(finding.unsaved_vulnerability_ids[0], "GHSA-2qrg-x229-3v8q") self.assertEqual(finding.unsaved_vulnerability_ids[1], "CVE-2019-17571") self.assertEqual(finding.component_name, "log4j:log4j") - self.assertEqual(finding.unique_id_from_tool, "MDI4OlJlcG9zaXRvcnlWdWxuZXJhYmlsaXR5QWxlcnQyMDg2Nzc5NzY=") + self.assertEqual( + finding.unique_id_from_tool, "MDI4OlJlcG9zaXRvcnlWdWxuZXJhYmlsaXR5QWxlcnQyMDg2Nzc5NzY=", + ) with self.subTest(i=1): finding = findings[1] self.assertEqual(finding.title, "Deserialization of Untrusted Data in Log4j") @@ -139,11 +163,15 @@ def test_parse_file_search2(self): self.assertEqual(finding.unsaved_vulnerability_ids[0], "GHSA-2qrg-x229-3v8q") self.assertEqual(finding.unsaved_vulnerability_ids[1], "CVE-2019-17571") self.assertEqual(finding.component_name, "log4j:log4j") - self.assertEqual(finding.unique_id_from_tool, "MDI4OlJlcG9zaXRvcnlWdWxuZXJhYmlsaXR5QWxlcnQ1NTE5NTI2OTM=") + self.assertEqual( + finding.unique_id_from_tool, "MDI4OlJlcG9zaXRvcnlWdWxuZXJhYmlsaXR5QWxlcnQ1NTE5NTI2OTM=", + ) def test_parse_file_search3(self): """Search result with more data/attributes""" - with (get_unit_tests_scans_path("github_vulnerability") / "github_search3.json").open(encoding="utf-8") as testfile: + with (get_unit_tests_scans_path("github_vulnerability") / "github_search3.json").open( + encoding="utf-8", + ) as testfile: parser = GithubVulnerabilityParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(2, len(findings)) @@ -160,7 +188,9 @@ def test_parse_file_search3(self): self.assertEqual(finding.component_name, "log4j:log4j") self.assertEqual(finding.cvssv3, "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H") self.assertEqual(finding.file_path, "gogoph-crawler/pom.xml") - self.assertEqual(finding.unique_id_from_tool, "MDI4OlJlcG9zaXRvcnlWdWxuZXJhYmlsaXR5QWxlcnQyMDg2Nzc5NzY=") + self.assertEqual( + finding.unique_id_from_tool, "MDI4OlJlcG9zaXRvcnlWdWxuZXJhYmlsaXR5QWxlcnQyMDg2Nzc5NzY=", + ) with self.subTest(i=1): finding = findings[1] self.assertEqual(finding.title, "Deserialization of Untrusted Data in Log4j") @@ -171,11 +201,15 @@ def test_parse_file_search3(self): self.assertEqual(finding.component_name, "log4j:log4j") self.assertEqual(finding.cvssv3, "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H") self.assertEqual(finding.file_path, "gogoph/pom.xml") - self.assertEqual(finding.unique_id_from_tool, "MDI4OlJlcG9zaXRvcnlWdWxuZXJhYmlsaXR5QWxlcnQ1NTE5NTI2OTM=") + self.assertEqual( + finding.unique_id_from_tool, "MDI4OlJlcG9zaXRvcnlWdWxuZXJhYmlsaXR5QWxlcnQ1NTE5NTI2OTM=", + ) def test_parse_file_search4_null_cvss_vector(self): """Search result with more data/attributes""" - with (get_unit_tests_scans_path("github_vulnerability") / "github_search4_null_cvss_vector.json").open(encoding="utf-8") as testfile: + with (get_unit_tests_scans_path("github_vulnerability") / "github_search4_null_cvss_vector.json").open( + encoding="utf-8", + ) as testfile: parser = GithubVulnerabilityParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(2, len(findings)) @@ -192,7 +226,9 @@ def test_parse_file_search4_null_cvss_vector(self): self.assertEqual(finding.component_name, "log4j:log4j") self.assertEqual(finding.cvssv3, None) self.assertEqual(finding.file_path, "gogoph-crawler/pom.xml") - self.assertEqual(finding.unique_id_from_tool, "MDI4OlJlcG9zaXRvcnlWdWxuZXJhYmlsaXR5QWxlcnQyMDg2Nzc5NzY=") + self.assertEqual( + finding.unique_id_from_tool, "MDI4OlJlcG9zaXRvcnlWdWxuZXJhYmlsaXR5QWxlcnQyMDg2Nzc5NzY=", + ) with self.subTest(i=1): finding = findings[1] self.assertEqual(finding.title, "Deserialization of Untrusted Data in Log4j") @@ -203,7 +239,9 @@ def test_parse_file_search4_null_cvss_vector(self): self.assertEqual(finding.component_name, "log4j:log4j") self.assertEqual(finding.cvssv3, "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H") self.assertEqual(finding.file_path, "gogoph/pom.xml") - self.assertEqual(finding.unique_id_from_tool, "MDI4OlJlcG9zaXRvcnlWdWxuZXJhYmlsaXR5QWxlcnQ1NTE5NTI2OTM=") + self.assertEqual( + finding.unique_id_from_tool, "MDI4OlJlcG9zaXRvcnlWdWxuZXJhYmlsaXR5QWxlcnQ1NTE5NTI2OTM=", + ) def test_parse_cwe_and_date(self): with (get_unit_tests_scans_path("github_vulnerability") / "github_h2.json").open(encoding="utf-8") as testfile: @@ -229,7 +267,9 @@ def test_parse_cwe_and_date(self): self.assertEqual(finding.active, True) def test_parse_state(self): - with (get_unit_tests_scans_path("github_vulnerability") / "github_shiro.json").open(encoding="utf-8") as testfile: + with (get_unit_tests_scans_path("github_vulnerability") / "github_shiro.json").open( + encoding="utf-8", + ) as testfile: parser = GithubVulnerabilityParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(1, len(findings)) @@ -238,7 +278,10 @@ def test_parse_state(self): with self.subTest(i=0): finding = findings[0] - self.assertEqual(finding.title, "Apache Shiro vulnerable to a specially crafted HTTP request causing an authentication bypass") + self.assertEqual( + finding.title, + "Apache Shiro vulnerable to a specially crafted HTTP request causing an authentication bypass", + ) self.assertEqual(finding.severity, "Critical") self.assertEqual(len(finding.unsaved_vulnerability_ids), 2) self.assertEqual(finding.unsaved_vulnerability_ids[0], "GHSA-f6jp-j6w3-w9hm") @@ -253,7 +296,9 @@ def test_parse_state(self): self.assertEqual(finding.is_mitigated, True) def test_parser_version(self): - with (get_unit_tests_scans_path("github_vulnerability") / "github-vuln-version.json").open(encoding="utf-8") as testfile: + with (get_unit_tests_scans_path("github_vulnerability") / "github-vuln-version.json").open( + encoding="utf-8", + ) as testfile: parser = GithubVulnerabilityParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(1, len(findings)) @@ -266,6 +311,8 @@ def test_parser_version(self): self.assertEqual(finding.severity, "Critical") self.assertEqual(finding.component_name, "org.springframework:spring-web") self.assertEqual(finding.component_version, "5.3.29") + self.assertAlmostEqual(finding.epss_score, 0.00212, places=5) + self.assertAlmostEqual(finding.epss_percentile, 0.44035, places=5) def test_parse_file_issue_9582(self): with (get_unit_tests_scans_path("github_vulnerability") / "issue_9582.json").open(encoding="utf-8") as testfile: diff --git a/unittests/tools/test_mobsf_parser.py b/unittests/tools/test_mobsf_parser.py index 53ddbb8a3e6..88719e57d88 100644 --- a/unittests/tools/test_mobsf_parser.py +++ b/unittests/tools/test_mobsf_parser.py @@ -136,3 +136,162 @@ def test_parse_damnvulnrablebank(self): findings = parser.get_findings(testfile, test) testfile.close() self.assertEqual(80, len(findings)) + + def test_parse_no_findings(self): + with (get_unit_tests_scans_path("mobsf") / "no_findings.json").open(encoding="utf-8") as testfile: + parser = MobSFParser() + findings = parser.get_findings(testfile, Test()) + self.assertEqual(0, len(findings)) + + def test_parse_many_findings(self): + with (get_unit_tests_scans_path("mobsf") / "many_findings.json").open(encoding="utf-8") as testfile: + parser = MobSFParser() + findings = parser.get_findings(testfile, Test()) + self.assertEqual(8, len(findings)) + + with self.subTest(i=0): + finding = findings[0] + self.assertEqual("android_certificate_transparency", finding.title) + self.assertEqual("Low", finding.severity) + self.assertEqual(1, finding.nb_occurences) + self.assertIsNotNone(finding.description) + self.assertEqual(295, finding.cwe) + self.assertIsNotNone(finding.references) + + with self.subTest(i=1): + finding = findings[1] + self.assertEqual("android_kotlin_hardcoded", finding.title) + self.assertEqual("Medium", finding.severity) + self.assertEqual(1, finding.nb_occurences) + self.assertIsNotNone(finding.description) + self.assertEqual(798, finding.cwe) + self.assertIsNotNone(finding.references) + self.assertEqual("app/src/main/java/com/routes/domain/analytics/event/Signatures.kt", finding.file_path) + self.assertEqual(10, finding.line) + + with self.subTest(i=2): + finding = findings[2] + self.assertEqual("android_kotlin_hardcoded", finding.title) + self.assertEqual("Medium", finding.severity) + self.assertEqual(1, finding.nb_occurences) + self.assertIsNotNone(finding.description) + self.assertEqual(798, finding.cwe) + self.assertIsNotNone(finding.references) + self.assertEqual("app/src/main/java/com/routes/domain/analytics/event/Signatures2.kt", finding.file_path) + self.assertEqual(20, finding.line) + + with self.subTest(i=3): + finding = findings[3] + self.assertEqual("android_prevent_screenshot", finding.title) + self.assertEqual("Low", finding.severity) + self.assertEqual(1, finding.nb_occurences) + self.assertIsNotNone(finding.description) + self.assertEqual(200, finding.cwe) + self.assertIsNotNone(finding.references) + + with self.subTest(i=4): + finding = findings[4] + self.assertEqual("android_root_detection", finding.title) + self.assertEqual("Low", finding.severity) + self.assertEqual(1, finding.nb_occurences) + self.assertIsNotNone(finding.description) + self.assertEqual(919, finding.cwe) + self.assertIsNotNone(finding.references) + + with self.subTest(i=5): + finding = findings[5] + self.assertEqual("android_safetynet", finding.title) + self.assertEqual("Low", finding.severity) + self.assertEqual(1, finding.nb_occurences) + self.assertIsNotNone(finding.description) + self.assertEqual(353, finding.cwe) + self.assertIsNotNone(finding.references) + + with self.subTest(i=6): + finding = findings[6] + self.assertEqual("android_ssl_pinning", finding.title) + self.assertEqual("Low", finding.severity) + self.assertEqual(1, finding.nb_occurences) + self.assertIsNotNone(finding.description) + self.assertEqual(295, finding.cwe) + self.assertIsNotNone(finding.references) + + with self.subTest(i=7): + finding = findings[7] + self.assertEqual("android_tapjacking", finding.title) + self.assertEqual("Low", finding.severity) + self.assertEqual(1, finding.nb_occurences) + self.assertIsNotNone(finding.description) + self.assertEqual(200, finding.cwe) + self.assertIsNotNone(finding.references) + + def test_parse_many_findings_cwe_lower(self): + with (get_unit_tests_scans_path("mobsf") / "many_findings_cwe_lower.json").open(encoding="utf-8") as testfile: + parser = MobSFParser() + findings = parser.get_findings(testfile, Test()) + self.assertEqual(7, len(findings)) + + with self.subTest(i=0): + finding = findings[0] + self.assertEqual("android_certificate_transparency", finding.title) + self.assertEqual("Low", finding.severity) + self.assertEqual(1, finding.nb_occurences) + self.assertIsNotNone(finding.description) + self.assertEqual(295, finding.cwe) + self.assertIsNotNone(finding.references) + + with self.subTest(i=1): + finding = findings[1] + self.assertEqual("android_kotlin_hardcoded", finding.title) + self.assertEqual("Medium", finding.severity) + self.assertEqual(1, finding.nb_occurences) + self.assertIsNotNone(finding.description) + self.assertEqual(798, finding.cwe) + self.assertIsNotNone(finding.references) + self.assertEqual("app/src/main/java/com/routes/domain/analytics/event/Signatures.kt", finding.file_path) + self.assertEqual(10, finding.line) + + with self.subTest(i=2): + finding = findings[2] + self.assertEqual("android_prevent_screenshot", finding.title) + self.assertEqual("Low", finding.severity) + self.assertEqual(1, finding.nb_occurences) + self.assertIsNotNone(finding.description) + self.assertEqual(200, finding.cwe) + self.assertIsNotNone(finding.references) + + with self.subTest(i=3): + finding = findings[3] + self.assertEqual("android_root_detection", finding.title) + self.assertEqual("Low", finding.severity) + self.assertEqual(1, finding.nb_occurences) + self.assertIsNotNone(finding.description) + self.assertEqual(919, finding.cwe) + self.assertIsNotNone(finding.references) + + with self.subTest(i=4): + finding = findings[4] + self.assertEqual("android_safetynet", finding.title) + self.assertEqual("Low", finding.severity) + self.assertEqual(1, finding.nb_occurences) + self.assertIsNotNone(finding.description) + self.assertEqual(353, finding.cwe) + self.assertIsNotNone(finding.references) + + with self.subTest(i=5): + finding = findings[5] + self.assertEqual("android_ssl_pinning", finding.title) + self.assertEqual("Low", finding.severity) + self.assertEqual(1, finding.nb_occurences) + self.assertIsNotNone(finding.description) + self.assertEqual(295, finding.cwe) + self.assertIsNotNone(finding.references) + + with self.subTest(i=6): + finding = findings[6] + self.assertEqual("android_tapjacking", finding.title) + self.assertEqual("Low", finding.severity) + self.assertEqual(1, finding.nb_occurences) + self.assertIsNotNone(finding.description) + self.assertEqual(200, finding.cwe) + self.assertIsNotNone(finding.references) diff --git a/unittests/tools/test_mobsfscan_parser.py b/unittests/tools/test_mobsfscan_parser.py deleted file mode 100644 index cbb6245c227..00000000000 --- a/unittests/tools/test_mobsfscan_parser.py +++ /dev/null @@ -1,165 +0,0 @@ -from dojo.models import Test -from dojo.tools.mobsfscan.parser import MobsfscanParser -from unittests.dojo_test_case import DojoTestCase, get_unit_tests_scans_path - - -class TestMobsfscanParser(DojoTestCase): - - def test_parse_no_findings(self): - with (get_unit_tests_scans_path("mobsfscan") / "no_findings.json").open(encoding="utf-8") as testfile: - parser = MobsfscanParser() - findings = parser.get_findings(testfile, Test()) - self.assertEqual(0, len(findings)) - - def test_parse_many_findings(self): - with (get_unit_tests_scans_path("mobsfscan") / "many_findings.json").open(encoding="utf-8") as testfile: - parser = MobsfscanParser() - findings = parser.get_findings(testfile, Test()) - self.assertEqual(8, len(findings)) - - with self.subTest(i=0): - finding = findings[0] - self.assertEqual("android_certificate_transparency", finding.title) - self.assertEqual("Low", finding.severity) - self.assertEqual(1, finding.nb_occurences) - self.assertIsNotNone(finding.description) - self.assertEqual(295, finding.cwe) - self.assertIsNotNone(finding.references) - - with self.subTest(i=1): - finding = findings[1] - self.assertEqual("android_kotlin_hardcoded", finding.title) - self.assertEqual("Medium", finding.severity) - self.assertEqual(1, finding.nb_occurences) - self.assertIsNotNone(finding.description) - self.assertEqual(798, finding.cwe) - self.assertIsNotNone(finding.references) - self.assertEqual("app/src/main/java/com/routes/domain/analytics/event/Signatures.kt", finding.file_path) - self.assertEqual(10, finding.line) - - with self.subTest(i=2): - finding = findings[2] - self.assertEqual("android_kotlin_hardcoded", finding.title) - self.assertEqual("Medium", finding.severity) - self.assertEqual(1, finding.nb_occurences) - self.assertIsNotNone(finding.description) - self.assertEqual(798, finding.cwe) - self.assertIsNotNone(finding.references) - self.assertEqual("app/src/main/java/com/routes/domain/analytics/event/Signatures2.kt", finding.file_path) - self.assertEqual(20, finding.line) - - with self.subTest(i=3): - finding = findings[3] - self.assertEqual("android_prevent_screenshot", finding.title) - self.assertEqual("Low", finding.severity) - self.assertEqual(1, finding.nb_occurences) - self.assertIsNotNone(finding.description) - self.assertEqual(200, finding.cwe) - self.assertIsNotNone(finding.references) - - with self.subTest(i=4): - finding = findings[4] - self.assertEqual("android_root_detection", finding.title) - self.assertEqual("Low", finding.severity) - self.assertEqual(1, finding.nb_occurences) - self.assertIsNotNone(finding.description) - self.assertEqual(919, finding.cwe) - self.assertIsNotNone(finding.references) - - with self.subTest(i=5): - finding = findings[5] - self.assertEqual("android_safetynet", finding.title) - self.assertEqual("Low", finding.severity) - self.assertEqual(1, finding.nb_occurences) - self.assertIsNotNone(finding.description) - self.assertEqual(353, finding.cwe) - self.assertIsNotNone(finding.references) - - with self.subTest(i=6): - finding = findings[6] - self.assertEqual("android_ssl_pinning", finding.title) - self.assertEqual("Low", finding.severity) - self.assertEqual(1, finding.nb_occurences) - self.assertIsNotNone(finding.description) - self.assertEqual(295, finding.cwe) - self.assertIsNotNone(finding.references) - - with self.subTest(i=7): - finding = findings[7] - self.assertEqual("android_tapjacking", finding.title) - self.assertEqual("Low", finding.severity) - self.assertEqual(1, finding.nb_occurences) - self.assertIsNotNone(finding.description) - self.assertEqual(200, finding.cwe) - self.assertIsNotNone(finding.references) - - def test_parse_many_findings_cwe_lower(self): - with (get_unit_tests_scans_path("mobsfscan") / "many_findings_cwe_lower.json").open(encoding="utf-8") as testfile: - parser = MobsfscanParser() - findings = parser.get_findings(testfile, Test()) - self.assertEqual(7, len(findings)) - - with self.subTest(i=0): - finding = findings[0] - self.assertEqual("android_certificate_transparency", finding.title) - self.assertEqual("Low", finding.severity) - self.assertEqual(1, finding.nb_occurences) - self.assertIsNotNone(finding.description) - self.assertEqual(295, finding.cwe) - self.assertIsNotNone(finding.references) - - with self.subTest(i=1): - finding = findings[1] - self.assertEqual("android_kotlin_hardcoded", finding.title) - self.assertEqual("Medium", finding.severity) - self.assertEqual(1, finding.nb_occurences) - self.assertIsNotNone(finding.description) - self.assertEqual(798, finding.cwe) - self.assertIsNotNone(finding.references) - self.assertEqual("app/src/main/java/com/routes/domain/analytics/event/Signatures.kt", finding.file_path) - self.assertEqual(10, finding.line) - - with self.subTest(i=2): - finding = findings[2] - self.assertEqual("android_prevent_screenshot", finding.title) - self.assertEqual("Low", finding.severity) - self.assertEqual(1, finding.nb_occurences) - self.assertIsNotNone(finding.description) - self.assertEqual(200, finding.cwe) - self.assertIsNotNone(finding.references) - - with self.subTest(i=3): - finding = findings[3] - self.assertEqual("android_root_detection", finding.title) - self.assertEqual("Low", finding.severity) - self.assertEqual(1, finding.nb_occurences) - self.assertIsNotNone(finding.description) - self.assertEqual(919, finding.cwe) - self.assertIsNotNone(finding.references) - - with self.subTest(i=4): - finding = findings[4] - self.assertEqual("android_safetynet", finding.title) - self.assertEqual("Low", finding.severity) - self.assertEqual(1, finding.nb_occurences) - self.assertIsNotNone(finding.description) - self.assertEqual(353, finding.cwe) - self.assertIsNotNone(finding.references) - - with self.subTest(i=5): - finding = findings[5] - self.assertEqual("android_ssl_pinning", finding.title) - self.assertEqual("Low", finding.severity) - self.assertEqual(1, finding.nb_occurences) - self.assertIsNotNone(finding.description) - self.assertEqual(295, finding.cwe) - self.assertIsNotNone(finding.references) - - with self.subTest(i=6): - finding = findings[6] - self.assertEqual("android_tapjacking", finding.title) - self.assertEqual("Low", finding.severity) - self.assertEqual(1, finding.nb_occurences) - self.assertIsNotNone(finding.description) - self.assertEqual(200, finding.cwe) - self.assertIsNotNone(finding.references) diff --git a/unittests/tools/test_wazuh_parser.py b/unittests/tools/test_wazuh_parser.py index 5f73fef4f47..60d741c0b9b 100644 --- a/unittests/tools/test_wazuh_parser.py +++ b/unittests/tools/test_wazuh_parser.py @@ -60,3 +60,10 @@ def test_parse_v4_8_many_findings(self): self.assertEqual("CVE-2025-27558 affects (version: 6.8.0-60.63)", findings[0].title) self.assertEqual("Critical", findings[0].severity) self.assertEqual(9.1, findings[0].cvssv3_score) + + def test_parse_wazuh_abnormal_severity(self): + with (get_unit_tests_scans_path("wazuh") / "wazuh_abnormal_severity.json").open(encoding="utf-8") as testfile: + parser = WazuhParser() + findings = parser.get_findings(testfile, Test()) + for finding in findings: + self.assertEqual("Info", finding.severity)