Skip to content
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions dojo/api_v2/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -1858,8 +1858,9 @@ def update(self, instance, validated_data):
for location_ref in locations:
location_ref.location.associate_with_finding(instance)

if push_to_jira:
jira_helper.push_to_jira(instance)
if push_to_jira or finding_helper.is_keep_in_sync_with_jira(instance):
# Push synchronously so that we can see jira errors in real time
jira_helper.push_to_jira(instance, sync=True)

return instance

Expand Down
37 changes: 28 additions & 9 deletions dojo/finding/helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
do_dedupe_finding_task_internal,
get_finding_models_for_deduplication,
)
from dojo.jira_link.helper import is_keep_in_sync_with_jira
from dojo.location.models import Location
from dojo.location.status import FindingLocationStatus
from dojo.location.utils import save_locations_to_add
Expand All @@ -33,6 +34,7 @@
Engagement,
Finding,
Finding_Group,
JIRA_Instance,
Notes,
System_Settings,
Test,
Expand Down Expand Up @@ -459,14 +461,24 @@ def post_process_finding_save_internal(finding, dedupe_option=True, rules_option

@dojo_async_task
@app.task
def post_process_findings_batch(finding_ids, *args, dedupe_option=True, rules_option=True, product_grading_option=True,
issue_updater_option=True, push_to_jira=False, user=None, **kwargs):
def post_process_findings_batch(
finding_ids,
*args,
dedupe_option=True,
rules_option=True,
product_grading_option=True,
issue_updater_option=True,
push_to_jira=False,
jira_instance_id=None,
user=None,
**kwargs,
):

logger.debug(
f"post_process_findings_batch called: finding_ids_count={len(finding_ids) if finding_ids else 0}, "
f"args={args}, dedupe_option={dedupe_option}, rules_option={rules_option}, "
f"product_grading_option={product_grading_option}, issue_updater_option={issue_updater_option}, "
f"push_to_jira={push_to_jira}, user={user.id if user else None}, kwargs={kwargs}",
f"push_to_jira={push_to_jira}, jira_instance_id={jira_instance_id}, user={user.id if user else None}, kwargs={kwargs}",
)
if not finding_ids:
return
Expand Down Expand Up @@ -502,14 +514,21 @@ def post_process_findings_batch(finding_ids, *args, dedupe_option=True, rules_op
if product_grading_option and system_settings.enable_product_grade:
calculate_grade(findings[0].test.engagement.product.id)

if push_to_jira:
# If we received the ID of a jira instance, then we need to determine the keep in sync behavior
jira_instance = None
if jira_instance_id is not None:
with suppress(JIRA_Instance.DoesNotExist):
jira_instance = JIRA_Instance.objects.get(id=jira_instance_id)
# We dont check if the finding jira sync is applicable quite yet until we can get in the loop
# but this is a way to at least make it that far
if push_to_jira or getattr(jira_instance, "finding_jira_sync", False):
for finding in findings:
if finding.has_jira_issue or not finding.finding_group:
jira_helper.push_to_jira(finding)
else:
jira_helper.push_to_jira(finding.finding_group)
object_to_push = finding if finding.has_jira_issue or not finding.finding_group else finding.finding_group
# Check the push_to_jira flag again to potentially shorty circuit without checking for existing findings
if push_to_jira or is_keep_in_sync_with_jira(object_to_push, prefetched_jira_instance=jira_instance):
jira_helper.push_to_jira(object_to_push)
else:
logger.debug("push_to_jira is False, not ushing to JIRA")
logger.debug("push_to_jira is False, not pushing to JIRA")


@receiver(pre_delete, sender=Finding)
Expand Down
3 changes: 2 additions & 1 deletion dojo/importers/base_importer.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
from dojo.importers.endpoint_manager import EndpointManager
from dojo.importers.location_manager import LocationManager
from dojo.importers.options import ImporterOptions
from dojo.jira_link.helper import is_keep_in_sync_with_jira
from dojo.location.models import AbstractLocation, Location
from dojo.models import (
# Import History States
Expand Down Expand Up @@ -998,7 +999,7 @@ def mitigate_finding(
# don't try to dedupe findings that we are closing
finding.save(dedupe_option=False, product_grading_option=product_grading_option)
else:
finding.save(dedupe_option=False, push_to_jira=self.push_to_jira, product_grading_option=product_grading_option)
finding.save(dedupe_option=False, push_to_jira=(self.push_to_jira or is_keep_in_sync_with_jira(finding, prefetched_jira_instance=self.jira_instance)), product_grading_option=product_grading_option)

def notify_scan_added(
self,
Expand Down
9 changes: 7 additions & 2 deletions dojo/importers/default_importer.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from dojo.finding import helper as finding_helper
from dojo.importers.base_importer import BaseImporter, Parser
from dojo.importers.options import ImporterOptions
from dojo.jira_link.helper import is_keep_in_sync_with_jira
from dojo.models import (
Engagement,
Finding,
Expand Down Expand Up @@ -381,9 +382,13 @@ def close_old_findings(
product_grading_option=False,
)
# push finding groups to jira since we only only want to push whole groups
if self.findings_groups_enabled and self.push_to_jira:
# We dont check if the finding jira sync is applicable quite yet until we can get in the loop
# but this is a way to at least make it that far
if self.findings_groups_enabled and (self.push_to_jira or getattr(self.jira_instance, "finding_jira_sync", False)):
for finding_group in {finding.finding_group for finding in old_findings if finding.finding_group is not None}:
jira_helper.push_to_jira(finding_group)
# Check the push_to_jira flag again to potentially shorty circuit without checking for existing findings
if self.push_to_jira or is_keep_in_sync_with_jira(finding_group, prefetched_jira_instance=self.jira_instance):
jira_helper.push_to_jira(finding_group)

# Calculate grade once after all findings have been closed
if old_findings:
Expand Down
32 changes: 21 additions & 11 deletions dojo/importers/default_reimporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
)
from dojo.importers.base_importer import BaseImporter, Parser
from dojo.importers.options import ImporterOptions
from dojo.jira_link.helper import is_keep_in_sync_with_jira
from dojo.location.status import FindingLocationStatus
from dojo.models import (
Development_Environment,
Expand Down Expand Up @@ -439,6 +440,7 @@ def process_findings(
product_grading_option=True,
issue_updater_option=True,
push_to_jira=push_to_jira,
jira_instance_id=getattr(self.jira_instance, "id", None),
)

# No chord: tasks are dispatched immediately above per batch
Expand Down Expand Up @@ -497,10 +499,13 @@ def close_old_findings(
)
mitigated_findings.append(finding)
# push finding groups to jira since we only only want to push whole groups
if self.findings_groups_enabled and self.push_to_jira:
# We dont check if the finding jira sync is applicable quite yet until we can get in the loop
# but this is a way to at least make it that far
if self.findings_groups_enabled and (self.push_to_jira or getattr(self.jira_instance, "finding_jira_sync", False)):
for finding_group in {finding.finding_group for finding in findings if finding.finding_group is not None}:
jira_helper.push_to_jira(finding_group)

# Check the push_to_jira flag again to potentially shorty circuit without checking for existing findings
if self.push_to_jira or is_keep_in_sync_with_jira(finding_group, prefetched_jira_instance=self.jira_instance):
jira_helper.push_to_jira(finding_group)
# Calculate grade once after all findings have been closed
if mitigated_findings:
perform_product_grading(self.test.engagement.product)
Expand Down Expand Up @@ -983,19 +988,24 @@ def process_groups_for_all_findings(
create_finding_groups_for_all_findings=self.create_finding_groups_for_all_findings,
**kwargs,
)
if self.push_to_jira:
if findings[0].finding_group is not None:
jira_helper.push_to_jira(findings[0].finding_group)
else:
jira_helper.push_to_jira(findings[0])

if self.findings_groups_enabled and self.push_to_jira:
# We dont check if the finding jira sync is applicable quite yet until we can get in the loop
# but this is a way to at least make it that far
if self.push_to_jira or getattr(self.jira_instance, "finding_jira_sync", False):
object_to_push = findings[0].finding_group if findings[0].finding_group is not None else findings[0]
# Check the push_to_jira flag again to potentially shorty circuit without checking for existing findings
if self.push_to_jira or is_keep_in_sync_with_jira(object_to_push, prefetched_jira_instance=self.jira_instance):
jira_helper.push_to_jira(object_to_push)
# We dont check if the finding jira sync is applicable quite yet until we can get in the loop
# but this is a way to at least make it that far
if self.findings_groups_enabled and (self.push_to_jira or getattr(self.jira_instance, "finding_jira_sync", False)):
for finding_group in {
finding.finding_group
for finding in self.reactivated_items + self.unchanged_items
if finding.finding_group is not None and not finding.is_mitigated
}:
jira_helper.push_to_jira(finding_group)
# Check the push_to_jira flag again to potentially shorty circuit without checking for existing findings
if self.push_to_jira or is_keep_in_sync_with_jira(finding_group, prefetched_jira_instance=self.jira_instance):
jira_helper.push_to_jira(finding_group)

def process_results(
self,
Expand Down
6 changes: 5 additions & 1 deletion dojo/importers/options.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,14 @@
from django.utils import timezone
from django.utils.functional import SimpleLazyObject

from dojo.jira_link.helper import get_jira_instance
from dojo.models import (
Development_Environment,
Dojo_User,
Endpoint,
Engagement,
Finding,
JIRA_Instance,
Product_API_Scan_Configuration,
Test,
Test_Import,
Expand Down Expand Up @@ -70,7 +72,6 @@ def load_base_options(
self.lead: Dojo_User | None = self.validate_lead(*args, **kwargs)
self.minimum_severity: str = self.validate_minimum_severity(*args, **kwargs)
self.parsed_findings: list[Finding] | None = self.validate_parsed_findings(*args, **kwargs)
self.push_to_jira: bool = self.validate_push_to_jira(*args, **kwargs)
self.scan_date: datetime = self.validate_scan_date(*args, **kwargs)
self.scan_type: str = self.validate_scan_type(*args, **kwargs)
self.service: str = self.validate_service(*args, **kwargs)
Expand All @@ -80,6 +81,8 @@ def load_base_options(
self.test_title: str = self.validate_test_title(*args, **kwargs)
self.verified: bool = self.validate_verified(*args, **kwargs)
self.version: str = self.validate_version(*args, **kwargs)
# Save this for last to use engagement and test for prefetching related to Jira info
self.push_to_jira: bool = self.validate_push_to_jira(*args, **kwargs)

def load_additional_options(
self,
Expand Down Expand Up @@ -478,6 +481,7 @@ def validate_push_to_jira(
*args: list,
**kwargs: dict,
) -> bool:
self.jira_instance: JIRA_Instance | None = get_jira_instance(self.engagement or self.test)
return self.validate(
"push_to_jira",
expected_types=[bool],
Expand Down
60 changes: 31 additions & 29 deletions dojo/jira_link/helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,17 +145,19 @@ def _safely_get_obj_status_for_jira(obj: Finding | Finding_Group, *, isenforced:
return status or ["Inactive"]


def is_keep_in_sync_with_jira(finding):
keep_in_sync_enabled = False
# Check if there is a jira issue that needs to be updated
jira_issue_exists = finding.has_jira_issue or (finding.finding_group and finding.finding_group.has_jira_issue)
if jira_issue_exists:
# Determine if any automatic sync should occur
jira_instance = get_jira_instance(finding)
if jira_instance:
keep_in_sync_enabled = jira_instance.finding_jira_sync

return keep_in_sync_enabled
def is_keep_in_sync_with_jira(obj: Finding | Finding_Group, prefetched_jira_instance: JIRA_Instance = None):
"""Determine if any automatic sync should occur"""
jira_issue_exists = False
# Check for a jira issue on each type of object
if isinstance(obj, Finding):
jira_issue_exists = obj.has_jira_issue or (obj.finding_group and obj.finding_group.has_jira_issue)
elif isinstance(obj, Finding_Group):
jira_issue_exists = obj.has_jira_issue
# Now determine if we need to pull the jira instance to check if sync is enabled
# but only if there is a jira issue that would need syncing
if jira_issue_exists and (jira_instance := prefetched_jira_instance or get_jira_instance(obj)) is not None:
return jira_instance.finding_jira_sync
return False


# checks if a finding can be pushed to JIRA
Expand Down Expand Up @@ -225,8 +227,8 @@ def can_be_pushed_to_jira(obj, form=None):


# use_inheritance=True means get jira_project config from product if engagement itself has none
def get_jira_project(obj, *, use_inheritance=True):
if not is_jira_enabled():
def get_jira_project(obj, *, use_inheritance=True, jira_enabled: bool = False):
if not jira_enabled and not (jira_enabled := is_jira_enabled()):
return None

if obj is None:
Expand All @@ -242,19 +244,19 @@ def get_jira_project(obj, *, use_inheritance=True):
return obj.jira_project
# some old jira_issue records don't have a jira_project, so try to go via the finding instead
if (hasattr(obj, "finding") and obj.finding) or (hasattr(obj, "engagement") and obj.engagement):
return get_jira_project(obj.finding, use_inheritance=use_inheritance)
return get_jira_project(obj.finding, use_inheritance=use_inheritance, jira_enabled=jira_enabled)
return None

if isinstance(obj, Finding | Stub_Finding):
finding = obj
return get_jira_project(finding.test)
return get_jira_project(finding.test, jira_enabled=jira_enabled)

if isinstance(obj, Finding_Group):
return get_jira_project(obj.test)
return get_jira_project(obj.test, jira_enabled=jira_enabled)

if isinstance(obj, Test):
test = obj
return get_jira_project(test.engagement)
return get_jira_project(test.engagement, jira_enabled=jira_enabled)

if isinstance(obj, Engagement):
engagement = obj
Expand All @@ -269,7 +271,7 @@ def get_jira_project(obj, *, use_inheritance=True):

if use_inheritance:
logger.debug("delegating to product %s for %s", engagement.product, engagement)
return get_jira_project(engagement.product)
return get_jira_project(engagement.product, jira_enabled=jira_enabled)
logger.debug("not delegating to product %s for %s", engagement.product, engagement)
return None

Expand All @@ -286,11 +288,11 @@ def get_jira_project(obj, *, use_inheritance=True):
return None


def get_jira_instance(obj):
if not is_jira_enabled():
def get_jira_instance(obj, jira_enabled: bool = False): # noqa: FBT001, FBT002
if not jira_enabled and not (jira_enabled := is_jira_enabled()):
return None

jira_project = get_jira_project(obj)
jira_project = get_jira_project(obj, jira_enabled=jira_enabled)
if jira_project:
logger.debug("found jira_instance %s for %s", jira_project.jira_instance, obj)
return jira_project.jira_instance
Expand Down Expand Up @@ -415,17 +417,17 @@ def get_jira_finding_text(jira_instance):
return None


def has_jira_issue(obj):
def has_jira_issue(obj: Finding | Engagement | Finding_Group) -> bool:
return get_jira_issue(obj) is not None


def get_jira_issue(obj):
if isinstance(obj, Finding | Engagement | Finding_Group):
try:
return obj.jira_issue
except JIRA_Issue.DoesNotExist:
return None
return None
def get_jira_issue(obj: Finding | Engagement | Finding_Group) -> JIRA_Issue | None:
"""
This pattern is "cheaper" than the try/catch handling of the DoesNotExist exception
that would happen if we try to access obj.jira_issue when there is none, and it also
works with prefetch_related where the related object is None instead of a RelatedManager
"""
return getattr(obj, "jira_issue", None)


def has_jira_configured(obj):
Expand Down
12 changes: 12 additions & 0 deletions unittests/dojo_test_case.py
Original file line number Diff line number Diff line change
Expand Up @@ -488,12 +488,24 @@ def assert_jira_updated_map_changed(self, test_id, updated_map):
logger.debug("finding!")
self.assertNotEqual(jira_helper.get_jira_updated(finding), updated_map[finding.id])

def assert_jira_status_changed(self, finding_id: int, payload: dict, current_status_name: str, expected_status_name: str, push_to_jira: bool = True): # noqa: FBT001, FBT002
pre_jira_status = self.get_jira_issue_status(finding_id)
self.assertEqual(current_status_name, pre_jira_status.name)
self.patch_finding_api(finding_id, {"push_to_jira": push_to_jira, **payload})
post_jira_status = self.get_jira_issue_status(finding_id)
self.assertEqual(expected_status_name, post_jira_status.name)

# Toggle epic mapping on jira product
def toggle_jira_project_epic_mapping(self, obj, value):
project = jira_helper.get_jira_project(obj)
project.enable_engagement_epic_mapping = value
project.save()

def toggle_jira_finding_sync(self, obj, value):
instance = jira_helper.get_jira_instance(obj)
instance.finding_jira_sync = value
instance.save()

# Return a list of jira issue in json format.
def get_epic_issues(self, engagement):
instance = jira_helper.get_jira_instance(engagement)
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
{
"findings": [
{
"title": "High",
"description": "test",
"date": "2025-12-01",
"severity": "High",
"component_name": "Component A"
}
]
}
Loading