Skip to content

Commit f266860

Browse files
Merge remote-tracking branch 'upstream/dev' into perf/tag-inheritance-phase-b
2 parents dfa3625 + 6788368 commit f266860

10 files changed

Lines changed: 5985 additions & 114 deletions

File tree

docker-compose.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -129,7 +129,7 @@ services:
129129
volumes:
130130
- defectdojo_postgres:/var/lib/postgresql/data
131131
valkey:
132-
image: valkey/valkey:9.0.3-alpine@sha256:e1095c6c76ee982cb2d1e07edbb7fb2a53606630a1d810d5a47c9f646b708bf5
132+
image: valkey/valkey:9.0.4-alpine@sha256:d1cc70645bbcef743615463a2fa4616e841407545e18f560aed0c49671a90147
133133
volumes:
134134
# we keep using the redis volume as renaming is not possible and copying data over
135135
# would require steps during downtime or complex commands in the intializer

dojo/filters.py

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1624,6 +1624,7 @@ class ApiFindingFilter(DojoFilter):
16241624
verified = BooleanFilter(field_name="verified")
16251625
has_jira = BooleanFilter(field_name="jira_issue", lookup_expr="isnull", exclude=True)
16261626
fix_available = BooleanFilter(field_name="fix_available")
1627+
mitigation_available = BooleanFilter(method="filter_mitigation_available", label="Mitigation Available")
16271628
# CharFilter
16281629
component_version = CharFilter(lookup_expr="icontains")
16291630
component_name = CharFilter(lookup_expr="icontains")
@@ -1796,6 +1797,11 @@ def filter_mitigated_on(self, queryset, name, value):
17961797

17971798
return queryset.filter(mitigated=value)
17981799

1800+
def filter_mitigation_available(self, queryset, name, value):
1801+
if value:
1802+
return queryset.exclude(mitigation__isnull=True).exclude(mitigation__exact="")
1803+
return queryset.filter(Q(mitigation__isnull=True) | Q(mitigation__exact=""))
1804+
17991805

18001806
class PercentageFilter(NumberFilter):
18011807
def __init__(self, *args, **kwargs):
@@ -1830,6 +1836,8 @@ class FindingFilterHelper(FilterSet):
18301836
duplicate = ReportBooleanFilter()
18311837
is_mitigated = ReportBooleanFilter()
18321838
fix_available = ReportBooleanFilter()
1839+
mitigation = CharFilter(lookup_expr="icontains")
1840+
mitigation_available = BooleanFilter(method="filter_mitigation_available", label="Mitigation Available")
18331841
mitigated = DateRangeFilter(field_name="mitigated", label="Mitigated Date")
18341842
mitigated_on = DateTimeFilter(field_name="mitigated", lookup_expr="exact", label="Mitigated On", method="filter_mitigated_on")
18351843
mitigated_before = DateTimeFilter(field_name="mitigated", lookup_expr="lt", label="Mitigated Before")
@@ -2021,6 +2029,11 @@ def filter_mitigated_on(self, queryset, name, value):
20212029

20222030
return queryset.filter(mitigated=value)
20232031

2032+
def filter_mitigation_available(self, queryset, name, value):
2033+
if value:
2034+
return queryset.exclude(mitigation__isnull=True).exclude(mitigation__exact="")
2035+
return queryset.filter(Q(mitigation__isnull=True) | Q(mitigation__exact=""))
2036+
20242037

20252038
def get_finding_group_queryset_for_context(pid=None, eid=None, tid=None):
20262039
"""
@@ -3417,6 +3430,7 @@ class ReportFindingFilterHelper(FilterSet):
34173430
out_of_scope = ReportBooleanFilter()
34183431
outside_of_sla = FindingSLAFilter(label="Outside of SLA")
34193432
file_path = CharFilter(lookup_expr="icontains")
3433+
mitigation_available = BooleanFilter(method="filter_mitigation_available", label="Mitigation Available")
34203434

34213435
o = OrderingFilter(
34223436
fields=(
@@ -3439,6 +3453,11 @@ class Meta:
34393453
"numerical_severity", "reporter", "last_reviewed",
34403454
"jira_creation", "jira_change", "files"]
34413455

3456+
def filter_mitigation_available(self, queryset, name, value):
3457+
if value:
3458+
return queryset.exclude(mitigation__isnull=True).exclude(mitigation__exact="")
3459+
return queryset.filter(Q(mitigation__isnull=True) | Q(mitigation__exact=""))
3460+
34423461
def manage_kwargs(self, kwargs):
34433462
self.prod_type = None
34443463
self.product = None

dojo/finding/helper.py

Lines changed: 24 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -611,20 +611,32 @@ def reconfigure_duplicate_cluster(original, cluster_outside):
611611
cluster_outside.exclude(id=new_original.id).update(duplicate_finding=new_original)
612612

613613

614-
def prepare_duplicates_for_delete(obj):
614+
def prepare_duplicates_for_delete(obj, *, preview_only=False):
615615
"""
616616
Prepare duplicate clusters before deleting a Test, Engagement, Product, or Product_Type.
617617
618618
Resets inside-scope duplicate FKs and reconfigures outside-scope clusters
619619
so that cascade_delete won't hit FK violations on the self-referential
620620
duplicate_finding field.
621+
622+
When preview_only=True, no data is modified. Returns the count of outside-scope
623+
findings that would be deleted (non-zero only when DUPLICATE_CLUSTER_CASCADE_DELETE=True).
621624
"""
622625
from dojo.utils import FINDING_SCOPE_FILTERS # noqa: PLC0415 circular import
623626

624627
scope_field = FINDING_SCOPE_FILTERS.get(type(obj))
625628
if scope_field is None:
626-
logger.warning("prepare_duplicates_for_delete: unsupported object type %s", type(obj).__name__)
627-
return
629+
if not preview_only:
630+
logger.warning("prepare_duplicates_for_delete: unsupported object type %s", type(obj).__name__)
631+
return 0 if preview_only else None
632+
633+
if preview_only:
634+
if not settings.DUPLICATE_CLUSTER_CASCADE_DELETE:
635+
return 0
636+
scope_ids_subquery = Finding.objects.filter(**{scope_field: obj}).values_list("id", flat=True)
637+
return Finding.objects.filter(
638+
duplicate_finding_id__in=scope_ids_subquery,
639+
).exclude(id__in=scope_ids_subquery).count()
628640

629641
logger.debug("prepare_duplicates_for_delete: %s %d", type(obj).__name__, obj.id)
630642

@@ -637,7 +649,7 @@ def prepare_duplicates_for_delete(obj):
637649

638650
if not scope_ids_subquery.exists():
639651
logger.debug("no findings in scope, nothing to prepare")
640-
return
652+
return None
641653

642654
# Bulk-reset inside-scope duplicates: single UPDATE instead of per-original mass_model_updater.
643655
# Clears the duplicate_finding FK so cascade_delete won't trip over dangling self-references.
@@ -694,6 +706,8 @@ def prepare_duplicates_for_delete(obj):
694706
outside_orphan_count,
695707
)
696708

709+
return None
710+
697711

698712
@receiver(pre_delete, sender=Test)
699713
def test_pre_delete(sender, instance, **kwargs):
@@ -830,13 +844,15 @@ def _bulk_delete_findings_internal(finding_qs, chunk_size=1000, *, order_desc=Fa
830844
)
831845

832846

833-
def bulk_delete_findings(finding_qs, chunk_size=1000, cascade_root=None, *, order_desc=False):
847+
def bulk_delete_findings(finding_qs, chunk_size=1000, cascade_root=None, *, order_desc=False, preview_only=False):
834848
"""
835849
Entry point; may delegate to Pro via settings.BULK_DELETE_FINDINGS_METHOD.
836850
837851
cascade_root: optional dict describing the top-level object whose cascade triggered
838852
this bulk delete (e.g. {"model": "dojo.engagement", "pk": 9}). Ignored by OSS
839853
when no custom method is configured.
854+
855+
preview_only: when True, return a ``{product_id: finding_count}`` dict without deleting anything.
840856
"""
841857
from dojo.utils import get_custom_method # noqa: PLC0415 circular import
842858

@@ -846,7 +862,10 @@ def bulk_delete_findings(finding_qs, chunk_size=1000, cascade_root=None, *, orde
846862
chunk_size=chunk_size,
847863
cascade_root=cascade_root,
848864
order_desc=order_desc,
865+
preview_only=preview_only,
849866
)
867+
if preview_only:
868+
return None
850869
return _bulk_delete_findings_internal(finding_qs, chunk_size=chunk_size, order_desc=order_desc)
851870

852871

0 commit comments

Comments
 (0)