Skip to content

Commit d82dfdd

Browse files
Merge branch 'dev' into reimport_fix_available
2 parents 927dbc2 + d303fea commit d82dfdd

12 files changed

Lines changed: 1530 additions & 9 deletions

File tree

.github/workflows/k8s-tests.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ jobs:
1616
# databases, broker and k8s are independent, so we don't need to test each combination
1717
# lastest k8s version (https://kubernetes.io/releases/) and the oldest officially supported version
1818
# are tested (https://kubernetes.io/releases/)
19-
- k8s: 'v1.34.0' # renovate: datasource=github-releases depName=kubernetes/kubernetes versioning=loose
19+
- k8s: 'v1.34.1' # renovate: datasource=github-releases depName=kubernetes/kubernetes versioning=loose
2020
os: debian
2121
- k8s: 'v1.31.13' # Do not track with renovate as we likely want to rev this manually
2222
os: debian

.github/workflows/renovate.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,4 +21,4 @@ jobs:
2121
uses: suzuki-shunsuke/github-action-renovate-config-validator@c22827f47f4f4a5364bdba19e1fe36907ef1318e # v1.1.1
2222
with:
2323
strict: "true"
24-
validator_version: 41.169.2 # renovate: datasource=github-releases depName=renovatebot/renovate
24+
validator_version: 41.170.0 # renovate: datasource=github-releases depName=renovatebot/renovate
Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
---
2+
title: "n0s1 Scanner"
3+
toc_hide: true
4+
---
5+
6+
### File Types
7+
Parser n0s1 expects a JSON file of scanner n0s1.
8+
9+
### Sample Scan Data
10+
Sample n0s1 scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/n0s1).
11+
12+
### Link To Tool
13+
See n0s1 on GitHub: https://github.com/spark1security/n0s1
14+
15+
### Default Deduplication Hashcode Fields
16+
By default, DefectDojo identifies duplicate Findings using these [hashcode fields](https://docs.defectdojo.com/en/working_with_findings/finding_deduplication/about_deduplication/):
17+
18+
- description

docs/package-lock.json

Lines changed: 4 additions & 4 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

docs/package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@
2727
},
2828
"devDependencies": {
2929
"prettier": "3.6.2",
30-
"vite": "7.1.11"
30+
"vite": "7.1.12"
3131
},
3232
"engines": {
3333
"node": ">=20.11.0"

dojo/settings/settings.dist.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1397,6 +1397,7 @@ def saml2_attrib_map_format(din):
13971397
"Cycognito Scan": ["title", "severity"],
13981398
"OpenVAS Parser v2": ["title", "severity", "vuln_id_from_tool", "endpoints"],
13991399
"Snyk Issue API Scan": ["vuln_id_from_tool", "file_path"],
1400+
"n0s1 Scanner": ["description"],
14001401
}
14011402

14021403
# Override the hardcoded settings here via the env var

dojo/tools/n0s1/__init__.py

Whitespace-only changes.

dojo/tools/n0s1/parser.py

Lines changed: 86 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,86 @@
1+
2+
import json
3+
4+
from dojo.models import Finding
5+
from dojo.tools.parser_test import ParserTest
6+
7+
8+
class N0s1Parser:
9+
def get_scan_types(self):
10+
return ["n0s1 Scanner"]
11+
12+
def get_label_for_scan_types(self, scan_type):
13+
return scan_type
14+
15+
def get_description_for_scan_types(self, scan_type):
16+
return "JSON output from the n0s1 scanner."
17+
18+
def get_tests(self, scan_type, handle):
19+
data = json.load(handle)
20+
subscanner = self.detect_subscanner(data)
21+
test = ParserTest(
22+
name=subscanner,
23+
parser_type=subscanner,
24+
version=data.get("tool", {}).get("version", ""),
25+
description=f"Scan from {subscanner}",
26+
)
27+
test.findings = self.get_findings_from_data(data)
28+
return [test]
29+
30+
def get_findings(self, scan_file, test):
31+
data = json.load(scan_file)
32+
return self.get_findings_from_data(data)
33+
34+
def detect_subscanner(self, data):
35+
platforms = {f.get("details", {}).get("platform", "") for f in data.get("findings", {}).values()}
36+
if "Confluence" in platforms:
37+
return "n0s1 Confluence"
38+
if "GitHub" in platforms:
39+
return "n0s1 GitHub"
40+
if "GitLab" in platforms:
41+
return "n0s1 GitLab"
42+
return "n0s1"
43+
44+
def get_findings_from_data(self, data):
45+
dupes = {}
46+
regex_configs = {}
47+
if "regex_config" in data and "rules" in data["regex_config"]:
48+
for rule in data["regex_config"]["rules"]:
49+
regex_configs[rule["id"]] = rule
50+
for finding_id, finding_data in data.get("findings", {}).items():
51+
details = finding_data.get("details", {})
52+
regex_ref = details.get("matched_regex_config", {})
53+
regex_id = regex_ref.get("id")
54+
regex_info = regex_configs.get(regex_id, {})
55+
merged_regex = {
56+
"id": regex_id,
57+
"description": regex_ref.get("description", regex_info.get("description", "N/A")),
58+
"regex": regex_ref.get("regex", regex_info.get("regex", "N/A")),
59+
"keywords": regex_info.get("keywords", []),
60+
"tags": regex_info.get("tags", []),
61+
}
62+
title = merged_regex["id"] or "n0s1 Finding"
63+
description = f"**URL:** {finding_data.get('url', 'N/A')}\n"
64+
description += f"**Secret:** {finding_data.get('secret', 'N/A')}\n"
65+
description += f"**Platform:** {details.get('platform', 'N/A')}\n"
66+
description += f"**Ticket Field:** {details.get('ticket_field', 'N/A')}\n"
67+
description += f"**Regex ID:** {merged_regex['id']}\n"
68+
description += f"**Regex Description:** {merged_regex['description']}\n"
69+
description += f"**Regex Pattern:** {merged_regex['regex']}\n"
70+
if merged_regex["keywords"]:
71+
description += f"**Keywords:** {', '.join(merged_regex['keywords'])}\n"
72+
if merged_regex["tags"]:
73+
description += f"**Tags:** {', '.join(merged_regex['tags'])}\n"
74+
dupe_key = finding_data.get("id", finding_id)
75+
if dupe_key in dupes:
76+
continue
77+
finding = Finding(
78+
title=title,
79+
description=description,
80+
severity="High",
81+
dynamic_finding=True,
82+
static_finding=False,
83+
unique_id_from_tool=dupe_key,
84+
)
85+
dupes[dupe_key] = finding
86+
return list(dupes.values())

requirements.txt

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ humanize==4.14.0
2626
jira==3.10.5
2727
PyGithub==2.8.1
2828
lxml==6.0.2
29-
Markdown==3.9
29+
Markdown==3.10
3030
openpyxl==3.1.5
3131
Pillow==12.0.0 # required by django-imagekit
3232
psycopg[c]==3.2.12
@@ -62,7 +62,7 @@ django-ratelimit==4.1.0
6262
argon2-cffi==25.1.0
6363
blackduck==1.1.3
6464
pycurl==7.45.7 # Required for Celery Broker AWS (SQS) support
65-
boto3==1.40.63 # Required for Celery Broker AWS (SQS) support
65+
boto3==1.40.65 # Required for Celery Broker AWS (SQS) support
6666
netaddr==1.3.0
6767
vulners==3.1.1
6868
fontawesomefree==6.6.0

0 commit comments

Comments
 (0)