Skip to content

Commit ea2acc4

Browse files
push_to_jira: fix pushing to JIRA during import/reimport in asynchronous mode (DefectDojo#13916)
* push_to_jira: add logging * push_to_jira: add logging * push to jira: fix passing of parameters in async mode * push to jira: fix passing of parameters in async mode
1 parent df77d98 commit ea2acc4

7 files changed

Lines changed: 77 additions & 5 deletions

File tree

dojo/api_v2/serializers.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2281,6 +2281,7 @@ def process_scan(
22812281
Raises exceptions in the event of an error
22822282
"""
22832283
try:
2284+
logger.debug(f"process_scan called with context: {context}")
22842285
start_time = time.perf_counter()
22852286
importer = self.get_importer(**context)
22862287
context["test"], _, _, _, _, _, _ = importer.process_scan(
@@ -2558,6 +2559,7 @@ def process_scan(
25582559
"""
25592560
statistics_before, statistics_delta = None, None
25602561
try:
2562+
logger.debug(f"process_scan called with context: {context}")
25612563
start_time = time.perf_counter()
25622564
if test := context.get("test"):
25632565
statistics_before = test.statistics

dojo/api_v2/views.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2515,7 +2515,7 @@ def perform_create(self, serializer):
25152515
jira_driver = engagement or (product or None)
25162516
if jira_project := (jira_helper.get_jira_project(jira_driver) if jira_driver else None):
25172517
push_to_jira = push_to_jira or jira_project.push_all_issues
2518-
# logger.debug(f"push_to_jira: {push_to_jira}")
2518+
25192519
serializer.save(push_to_jira=push_to_jira)
25202520

25212521
def get_queryset(self):

dojo/engagement/views.py

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -936,6 +936,30 @@ def import_findings(
936936
) -> str | None:
937937
"""Attempt to import with all the supplied information"""
938938
try:
939+
# Log only user-entered form values, excluding internal objects
940+
user_values = {
941+
"scan_type": context.get("scan_type"),
942+
"scan_date": context.get("scan_date"),
943+
"minimum_severity": context.get("minimum_severity"),
944+
"active": context.get("active"),
945+
"verified": context.get("verified"),
946+
"test_title": context.get("test_title"),
947+
"tags": context.get("tags"),
948+
"version": context.get("version"),
949+
"branch_tag": context.get("branch_tag"),
950+
"build_id": context.get("build_id"),
951+
"commit_hash": context.get("commit_hash"),
952+
"service": context.get("service"),
953+
"close_old_findings": context.get("close_old_findings"),
954+
"apply_tags_to_findings": context.get("apply_tags_to_findings"),
955+
"apply_tags_to_endpoints": context.get("apply_tags_to_endpoints"),
956+
"close_old_findings_product_scope": context.get("close_old_findings_product_scope"),
957+
"group_by": context.get("group_by"),
958+
"create_finding_groups_for_all_findings": context.get("create_finding_groups_for_all_findings"),
959+
"push_to_jira": context.get("push_to_jira"),
960+
"push_all_jira_issues": context.get("push_all_jira_issues"),
961+
}
962+
logger.debug(f"import_findings called with user values: {user_values}")
939963
importer_client = self.get_importer(context)
940964
context["test"], _, finding_count, closed_finding_count, _, _, _ = importer_client.process_scan(
941965
context.pop("scan", None),

dojo/finding/deduplication.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@ def get_finding_models_for_deduplication(finding_ids):
2727
2828
"""
2929
if not finding_ids:
30+
logger.debug("get_finding_models_for_deduplication called with no finding_ids")
3031
return []
3132

3233
return list(
@@ -543,6 +544,7 @@ def dedupe_batch_of_findings(findings, *args, **kwargs):
543544
return batch_dedupe_method(findings, *args, **kwargs)
544545

545546
if not findings:
547+
logger.debug("dedupe_batch_of_findings called with no findings")
546548
return None
547549

548550
enabled = System_Settings.objects.get().enable_deduplication

dojo/finding/helper.py

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -470,22 +470,30 @@ def post_process_finding_save_internal(finding, dedupe_option=True, rules_option
470470
@app.task
471471
def post_process_findings_batch_signature(finding_ids, *args, dedupe_option=True, rules_option=True, product_grading_option=True,
472472
issue_updater_option=True, push_to_jira=False, user=None, **kwargs):
473-
return post_process_findings_batch(finding_ids, dedupe_option, rules_option, product_grading_option,
474-
issue_updater_option, push_to_jira, user, **kwargs)
473+
return post_process_findings_batch(finding_ids, *args, dedupe_option=dedupe_option, rules_option=rules_option, product_grading_option=product_grading_option, issue_updater_option=issue_updater_option, push_to_jira=push_to_jira, user=user, **kwargs)
474+
# Pass arguments as keyword arguments to ensure Celery properly serializes them
475475

476476

477477
@dojo_async_task
478478
@app.task
479479
def post_process_findings_batch(finding_ids, *args, dedupe_option=True, rules_option=True, product_grading_option=True,
480480
issue_updater_option=True, push_to_jira=False, user=None, **kwargs):
481481

482+
logger.debug(
483+
f"post_process_findings_batch called: finding_ids_count={len(finding_ids) if finding_ids else 0}, "
484+
f"args={args}, dedupe_option={dedupe_option}, rules_option={rules_option}, "
485+
f"product_grading_option={product_grading_option}, issue_updater_option={issue_updater_option}, "
486+
f"push_to_jira={push_to_jira}, user={user.id if user else None}, kwargs={kwargs}",
487+
)
482488
if not finding_ids:
483489
return
484490

485491
system_settings = System_Settings.objects.get()
486492

487493
# use list() to force a complete query execution and related objects to be loaded once
494+
logger.debug(f"getting finding models for batch deduplication with: {len(finding_ids)} findings")
488495
findings = get_finding_models_for_deduplication(finding_ids)
496+
logger.debug(f"found {len(findings)} findings for batch deduplication")
489497

490498
if not findings:
491499
logger.debug(f"no findings found for batch deduplication with IDs: {finding_ids}")
@@ -517,6 +525,8 @@ def post_process_findings_batch(finding_ids, *args, dedupe_option=True, rules_op
517525
jira_helper.push_to_jira(finding)
518526
else:
519527
jira_helper.push_to_jira(finding.finding_group)
528+
else:
529+
logger.debug("push_to_jira is False, not ushing to JIRA")
520530

521531

522532
@receiver(pre_delete, sender=Finding)

dojo/importers/default_importer.py

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -238,22 +238,30 @@ def process_findings(
238238
# Categorize this finding as a new one
239239
new_findings.append(finding)
240240
# all data is already saved on the finding, we only need to trigger post processing in batches
241+
logger.debug("process_findings: self.push_to_jira=%s, self.findings_groups_enabled=%s, self.group_by=%s",
242+
self.push_to_jira, self.findings_groups_enabled, self.group_by)
241243
push_to_jira = self.push_to_jira and (not self.findings_groups_enabled or not self.group_by)
244+
logger.debug("process_findings: computed push_to_jira=%s", push_to_jira)
242245
batch_finding_ids.append(finding.id)
243246

244247
# If batch is full or we're at the end, dispatch one batched task
245248
if len(batch_finding_ids) >= batch_max_size or is_final_finding:
246249
finding_ids_batch = list(batch_finding_ids)
247250
batch_finding_ids.clear()
251+
logger.debug("process_findings: dispatching batch with push_to_jira=%s (batch_size=%d, is_final=%s)",
252+
push_to_jira, len(finding_ids_batch), is_final_finding)
248253
if we_want_async(async_user=self.user):
249-
finding_helper.post_process_findings_batch_signature(
254+
signature = finding_helper.post_process_findings_batch_signature(
250255
finding_ids_batch,
251256
dedupe_option=True,
252257
rules_option=True,
253258
product_grading_option=True,
254259
issue_updater_option=True,
255260
push_to_jira=push_to_jira,
256-
)()
261+
)
262+
logger.debug("process_findings: signature created with push_to_jira=%s, signature.kwargs=%s",
263+
push_to_jira, signature.kwargs)
264+
signature()
257265
else:
258266
finding_helper.post_process_findings_batch(
259267
finding_ids_batch,
@@ -279,6 +287,8 @@ def process_findings(
279287
jira_helper.push_to_jira(findings[0].finding_group)
280288
else:
281289
jira_helper.push_to_jira(findings[0])
290+
else:
291+
logger.debug("push_to_jira is False, not pushing to JIRA")
282292

283293
# Note: All chord batching is now handled within the loop above
284294

dojo/test/views.py

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -964,6 +964,30 @@ def reimport_findings(
964964
) -> str | None:
965965
"""Attempt to import with all the supplied information"""
966966
try:
967+
# Log only user-entered form values, excluding internal objects
968+
user_values = {
969+
"scan_type": context.get("scan_type"),
970+
"scan_date": context.get("scan_date"),
971+
"minimum_severity": context.get("minimum_severity"),
972+
"active": context.get("active"),
973+
"verified": context.get("verified"),
974+
"tags": context.get("tags"),
975+
"version": context.get("version"),
976+
"branch_tag": context.get("branch_tag"),
977+
"build_id": context.get("build_id"),
978+
"commit_hash": context.get("commit_hash"),
979+
"service": context.get("service"),
980+
"close_old_findings": context.get("close_old_findings"),
981+
"apply_tags_to_findings": context.get("apply_tags_to_findings"),
982+
"apply_tags_to_endpoints": context.get("apply_tags_to_endpoints"),
983+
"close_old_findings_product_scope": context.get("close_old_findings_product_scope"),
984+
"group_by": context.get("group_by"),
985+
"create_finding_groups_for_all_findings": context.get("create_finding_groups_for_all_findings"),
986+
"push_to_jira": context.get("push_to_jira"),
987+
"push_all_jira_issues": context.get("push_all_jira_issues"),
988+
"do_not_reactivate": context.get("do_not_reactivate"),
989+
}
990+
logger.debug(f"reimport_findings called with user values: {user_values}")
967991
importer_client = self.get_reimporter(context)
968992
(
969993
context["test"],

0 commit comments

Comments
 (0)