Skip to content

Commit 0c7adea

Browse files
author
I
committed
fix(importers): keep dedup stats in sync with API responses
1 parent 44ebefb commit 0c7adea

2 files changed

Lines changed: 5 additions & 6 deletions

File tree

dojo/importers/default_importer.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -157,6 +157,7 @@ def process_findings(
157157
parsed_findings: list[Finding],
158158
**kwargs: dict,
159159
) -> list[Finding]:
160+
sync_requested = kwargs.get("sync", True)
160161
# Progressive batching for chord execution
161162
post_processing_task_signatures = []
162163
current_batch_number = 1
@@ -253,7 +254,7 @@ def process_findings(
253254
post_processing_task_signatures.append(post_processing_task_signature)
254255

255256
# Check if we should launch a chord (batch full or end of findings)
256-
if we_want_async(async_user=self.user) and post_processing_task_signatures:
257+
if we_want_async(async_user=self.user, sync=sync_requested) and post_processing_task_signatures:
257258
post_processing_task_signatures, current_batch_number, _ = self.maybe_launch_post_processing_chord(
258259
post_processing_task_signatures,
259260
current_batch_number,
@@ -283,8 +284,7 @@ def process_findings(
283284
# Always perform an initial grading, even though it might get overwritten later.
284285
perform_product_grading(self.test.engagement.product)
285286

286-
sync = kwargs.get("sync", True)
287-
if not sync:
287+
if not sync_requested:
288288
return [serialize("json", [finding]) for finding in new_findings]
289289
return new_findings
290290

dojo/importers/default_reimporter.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -164,6 +164,7 @@ def process_findings(
164164
the finding may be appended to a new or existing group based upon user selection
165165
at import time
166166
"""
167+
sync_requested = kwargs.get("sync", True)
167168
self.deduplication_algorithm = self.determine_deduplication_algorithm()
168169
# Only process findings with the same service value (or None)
169170
# Even though the service values is used in the hash_code calculation,
@@ -271,7 +272,7 @@ def process_findings(
271272
post_processing_task_signatures.append(post_processing_task_signature)
272273

273274
# Check if we should launch a chord (batch full or end of findings)
274-
if we_want_async(async_user=self.user) and post_processing_task_signatures:
275+
if we_want_async(async_user=self.user, sync=sync_requested) and post_processing_task_signatures:
275276
post_processing_task_signatures, current_batch_number, _ = self.maybe_launch_post_processing_chord(
276277
post_processing_task_signatures,
277278
current_batch_number,
@@ -772,6 +773,4 @@ def calculate_unsaved_finding_hash_code(
772773
self,
773774
unsaved_finding: Finding,
774775
) -> str:
775-
# this is overridden in Pro, but will still call this via super()
776-
deduplicationLogger.debug("Calculating hash code for unsaved finding")
777776
return unsaved_finding.compute_hash_code()

0 commit comments

Comments
 (0)