From c8d1d8f7dce0ebc276042601584263099080750f Mon Sep 17 00:00:00 2001 From: Quirin Ziessler Date: Sat, 30 Dec 2023 13:02:06 +0100 Subject: [PATCH 01/13] improvement for wazuh importer --- dojo/tools/wazuh/parser.py | 130 +++++++++++++++++++------------------ 1 file changed, 66 insertions(+), 64 deletions(-) diff --git a/dojo/tools/wazuh/parser.py b/dojo/tools/wazuh/parser.py index b1ea19d836b..392e4227bc4 100644 --- a/dojo/tools/wazuh/parser.py +++ b/dojo/tools/wazuh/parser.py @@ -1,10 +1,14 @@ +import hashlib import json -from dojo.models import Finding +from dojo.models import Finding, Endpoint class WazuhParser(object): """ - Use Wazuh Vulnerability API to retrieve the findings + IMPORTANT: Please use the script available here https://github.com/quirinziessler/wazuh-findings-exporter to generate + the report for DefectDojo. This script fetches the findings from wazuh based on a single Wazuh group. + In DD please configure on engagement per group and upload the report. + The vulnerabilities with condition "Package unfixed" are skipped because there is no fix out yet. https://github.com/wazuh/wazuh/issues/14560 """ @@ -18,73 +22,71 @@ def get_label_for_scan_types(self, scan_type): def get_description_for_scan_types(self, scan_type): return "Wazuh" - def get_findings(self, filename, test): - data = json.load(filename) + def get_findings(self, file, test): + data = json.load(file) + + if not data: + return [] + # Detect duplications dupes = dict() - try: - vulnerability = data[next(iter(data.keys()))]["affected_items"] - except (KeyError, StopIteration): - return list() + # Loop through each element in the list + for entry in data: + vulnerabilities = entry.get("data", {}).get("affected_items", []) + for item in vulnerabilities: + if ( + item["condition"] != "Package unfixed" + and item["severity"] != "Untriaged" + ): + id = item.get("cve") + package_name = item.get("name") + package_version = item.get("version") + description = item.get("condition") + severity = item.get("severity").capitalize() + agent_ip = item.get("agent_ip") + links = item.get("external_references") + cvssv3_score = item.get("cvss3_score") + publish_date = item.get("published") + agent_name = item.get("agent_name") - if vulnerability is None: - return list() + if links: + references = "\n".join(links) + else: + references = None - for item in vulnerability: - if ( - item["condition"] != "Package unfixed" - and item["severity"] != "Untriaged" - ): - id = item.get("cve") - package_name = item.get("name") - package_version = item.get("version") - description = item.get("condition") - if item.get("severity") == "Untriaged": - severity = "Info" - else: - severity = item.get("severity") - if item.get("status") == "VALID": - active = True - else: - active = False - links = item.get("external_references") - title = ( - item.get("title") + " (version: " + package_version + ")" - ) - severity = item.get("severity", "info").capitalize() - if links: - references = "" - for link in links: - references += f"{link}\n" - else: - references = None - - if id and id.startswith("CVE"): - vulnerability_id = id - else: - vulnerability_id = None + title = ( + item.get("title") + " (version: " + package_version + ")" + ) + dupe_key = title + id + agent_name + package_name + package_version + dupe_key = hashlib.sha256(dupe_key.encode('utf-8')).hexdigest() - dupe_key = title + if dupe_key in dupes: + find = dupes[dupe_key] + else: + dupes[dupe_key] = True - if dupe_key in dupes: - find = dupes[dupe_key] - else: - dupes[dupe_key] = True + find = Finding( + title=title, + test=test, + description=description, + severity=severity, + mitigation="mitigation", + references=references, + static_finding=True, + component_name=package_name, + component_version=package_version, + cvssv3_score = cvssv3_score, + publish_date = publish_date, + unique_id_from_tool = dupe_key, + ) + # in some cases the agent_ip is not the perfect way on how to identify a host. Thus prefer the agent_name, if existant. + if agent_ip and agent_name: + find.unsaved_endpoints = [Endpoint(host=agent_name)] + elif agent_ip: + find.unsaved_endpoints = [Endpoint(host=agent_ip)] + elif agent_name: + find.unsaved_endpoints = [Endpoint(host=agent_name)] + dupes[dupe_key] = find - find = Finding( - title=title, - test=test, - description=description, - severity=severity, - active=active, - mitigation="mitigation", - references=references, - static_finding=True, - component_name=package_name, - component_version=package_version, - ) - if vulnerability_id: - find.unsaved_vulnerability_ids = [vulnerability_id] - dupes[dupe_key] = find - return list(dupes.values()) + return list(dupes.values()) \ No newline at end of file From f1cc6475e192a331973e3dba615cd8da8c4f3593 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Quirin=20Zie=C3=9Fler?= <19915467+quirinziessler@users.noreply.github.com> Date: Sat, 30 Dec 2023 14:36:06 +0100 Subject: [PATCH 02/13] :wrench: change on dedupe for Wazuh --- dojo/settings/settings.dist.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dojo/settings/settings.dist.py b/dojo/settings/settings.dist.py index 6c543f69c99..6a423a68ebf 100644 --- a/dojo/settings/settings.dist.py +++ b/dojo/settings/settings.dist.py @@ -1255,7 +1255,7 @@ def saml2_attrib_map_format(dict): 'NeuVector (compliance)': ['title', 'vuln_id_from_tool', 'description'], 'Wpscan': ['title', 'description', 'severity'], 'Popeye Scan': ['title', 'description'], - 'Wazuh Scan': ['title'], + 'Wazuh Scan': ['title', 'severity', 'endpoints'], 'Nuclei Scan': ['title', 'cwe', 'severity'], 'KubeHunter Scan': ['title', 'description'], 'kube-bench Scan': ['title', 'vuln_id_from_tool', 'description'], From e15b930184ceeffc9263b072d76285ef830a22a9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Quirin=20Zie=C3=9Fler?= <19915467+quirinziessler@users.noreply.github.com> Date: Sat, 30 Dec 2023 14:39:24 +0100 Subject: [PATCH 03/13] :wrench: change on dedupe for Wazuh --- dojo/settings/settings.dist.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dojo/settings/settings.dist.py b/dojo/settings/settings.dist.py index 6a423a68ebf..2db2fa21f5e 100644 --- a/dojo/settings/settings.dist.py +++ b/dojo/settings/settings.dist.py @@ -1255,7 +1255,6 @@ def saml2_attrib_map_format(dict): 'NeuVector (compliance)': ['title', 'vuln_id_from_tool', 'description'], 'Wpscan': ['title', 'description', 'severity'], 'Popeye Scan': ['title', 'description'], - 'Wazuh Scan': ['title', 'severity', 'endpoints'], 'Nuclei Scan': ['title', 'cwe', 'severity'], 'KubeHunter Scan': ['title', 'description'], 'kube-bench Scan': ['title', 'vuln_id_from_tool', 'description'], @@ -1469,6 +1468,7 @@ def saml2_attrib_map_format(dict): 'Threagile risks report': DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL_OR_HASH_CODE, 'Humble Json Importer': DEDUPE_ALGO_HASH_CODE, 'MSDefender API': DEDUPE_ALGO_HASH_CODE, + 'Wazuh Scan': DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL, } # Override the hardcoded settings here via the env var From ca51d5c73edaa1dea7f740742916bdb8a659ff35 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Quirin=20Zie=C3=9Fler?= <19915467+quirinziessler@users.noreply.github.com> Date: Sat, 30 Dec 2023 14:49:09 +0100 Subject: [PATCH 04/13] :memo: --- docs/content/en/integrations/parsers/file/wazuh.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/content/en/integrations/parsers/file/wazuh.md b/docs/content/en/integrations/parsers/file/wazuh.md index bbf191840ab..5bb9b4b4f98 100644 --- a/docs/content/en/integrations/parsers/file/wazuh.md +++ b/docs/content/en/integrations/parsers/file/wazuh.md @@ -2,4 +2,4 @@ title: "Wazuh Scanner" toc_hide: true --- -Import JSON report. +Import findings from Wazuh. The export from wazuh should be done via the script [available here](https://github.com/quirinziessler/wazuh-findings-exporter). The script fetches the findings by Wazuh client groups and saves them as json, ready for upload. Have in mind to adjust the max file size via "DD_SCAN_FILE_MAX_SIZE" if the file is larger than the default value of 100MB. \ No newline at end of file From 800ae30a41feec644c198c8e4df45ff1d1fcdb05 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Quirin=20Zie=C3=9Fler?= <19915467+quirinziessler@users.noreply.github.com> Date: Sat, 30 Dec 2023 14:50:33 +0100 Subject: [PATCH 05/13] :pencil2: --- dojo/tools/wazuh/parser.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dojo/tools/wazuh/parser.py b/dojo/tools/wazuh/parser.py index 392e4227bc4..ef22bafd830 100644 --- a/dojo/tools/wazuh/parser.py +++ b/dojo/tools/wazuh/parser.py @@ -7,7 +7,7 @@ class WazuhParser(object): """ IMPORTANT: Please use the script available here https://github.com/quirinziessler/wazuh-findings-exporter to generate the report for DefectDojo. This script fetches the findings from wazuh based on a single Wazuh group. - In DD please configure on engagement per group and upload the report. + In DD please configure one engagement per group and upload the report. The vulnerabilities with condition "Package unfixed" are skipped because there is no fix out yet. https://github.com/wazuh/wazuh/issues/14560 From 8e5048c77f0aebd03aefe9b7dec2822901bf709f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Quirin=20Zie=C3=9Fler?= Date: Mon, 15 Jan 2024 17:02:31 +0100 Subject: [PATCH 06/13] :memo: --- docs/content/en/integrations/parsers/file/wazuh.md | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/docs/content/en/integrations/parsers/file/wazuh.md b/docs/content/en/integrations/parsers/file/wazuh.md index 5bb9b4b4f98..13920a7e7f5 100644 --- a/docs/content/en/integrations/parsers/file/wazuh.md +++ b/docs/content/en/integrations/parsers/file/wazuh.md @@ -2,4 +2,10 @@ title: "Wazuh Scanner" toc_hide: true --- -Import findings from Wazuh. The export from wazuh should be done via the script [available here](https://github.com/quirinziessler/wazuh-findings-exporter). The script fetches the findings by Wazuh client groups and saves them as json, ready for upload. Have in mind to adjust the max file size via "DD_SCAN_FILE_MAX_SIZE" if the file is larger than the default value of 100MB. \ No newline at end of file + +Import findings from Wazuh. The export from Wazuh can be done via 2 ways. Choose the one which you prefer. + +- export the Wazuh findings from API and upload them to DefectDojo as json file. This method may be the easiest one but does export all known vulnerabilities at once. It is not possible to sort them after clients or any other categories. You will receive all vulnerabilities in one engagement. +- export the findings via the script [available here](https://github.com/quirinziessler/wazuh-findings-exporter). The script fetches the findings by Wazuh client groups and saves them as json, ready for upload. You will receive one file per group allowing you to separate the clients via engagements in Wazuh. It also exports the endpoints hostname and displays them in DefectDojo UI. + +Independent of your above choice: Have in mind to adjust the max file size via "DD_SCAN_FILE_MAX_SIZE" if you see files larger than the default value of 100MB. Depending on the amount and category of integrated devices, the file size jumps rapidly. \ No newline at end of file From 0bd516092801e30fdb6ac3b5947d983d91ca8278 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Quirin=20Zie=C3=9Fler?= Date: Mon, 15 Jan 2024 19:39:19 +0100 Subject: [PATCH 07/13] :memo: --- dojo/tools/wazuh/parser.py | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/dojo/tools/wazuh/parser.py b/dojo/tools/wazuh/parser.py index ef22bafd830..c550a8647c8 100644 --- a/dojo/tools/wazuh/parser.py +++ b/dojo/tools/wazuh/parser.py @@ -2,13 +2,8 @@ import json from dojo.models import Finding, Endpoint - class WazuhParser(object): """ - IMPORTANT: Please use the script available here https://github.com/quirinziessler/wazuh-findings-exporter to generate - the report for DefectDojo. This script fetches the findings from wazuh based on a single Wazuh group. - In DD please configure one engagement per group and upload the report. - The vulnerabilities with condition "Package unfixed" are skipped because there is no fix out yet. https://github.com/wazuh/wazuh/issues/14560 """ @@ -76,9 +71,9 @@ def get_findings(self, file, test): static_finding=True, component_name=package_name, component_version=package_version, - cvssv3_score = cvssv3_score, - publish_date = publish_date, - unique_id_from_tool = dupe_key, + cvssv3_score=cvssv3_score, + publish_date=publish_date, + unique_id_from_tool=dupe_key, ) # in some cases the agent_ip is not the perfect way on how to identify a host. Thus prefer the agent_name, if existant. if agent_ip and agent_name: From e39ecfc9ca9d71d3b9e32d447924b867c9fa314f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Quirin=20Zie=C3=9Fler?= Date: Mon, 15 Jan 2024 19:45:18 +0100 Subject: [PATCH 08/13] flake8 --- dojo/tools/wazuh/parser.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/dojo/tools/wazuh/parser.py b/dojo/tools/wazuh/parser.py index c550a8647c8..43e715ac510 100644 --- a/dojo/tools/wazuh/parser.py +++ b/dojo/tools/wazuh/parser.py @@ -2,6 +2,7 @@ import json from dojo.models import Finding, Endpoint + class WazuhParser(object): """ The vulnerabilities with condition "Package unfixed" are skipped because there is no fix out yet. @@ -84,4 +85,4 @@ def get_findings(self, file, test): find.unsaved_endpoints = [Endpoint(host=agent_name)] dupes[dupe_key] = find - return list(dupes.values()) \ No newline at end of file + return list(dupes.values()) From 377adf2959d715cc395ecae07096979b181e436c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Quirin=20Zie=C3=9Fler?= <19915467+quirinziessler@users.noreply.github.com> Date: Wed, 17 Jan 2024 20:57:45 +0100 Subject: [PATCH 09/13] :tada: recoded wazuh importer to support endpoints --- dojo/tools/wazuh/parser.py | 118 ++++++++++++++++++++----------------- 1 file changed, 63 insertions(+), 55 deletions(-) diff --git a/dojo/tools/wazuh/parser.py b/dojo/tools/wazuh/parser.py index 43e715ac510..d70c9dbded6 100644 --- a/dojo/tools/wazuh/parser.py +++ b/dojo/tools/wazuh/parser.py @@ -28,61 +28,69 @@ def get_findings(self, file, test): dupes = dict() # Loop through each element in the list - for entry in data: - vulnerabilities = entry.get("data", {}).get("affected_items", []) - for item in vulnerabilities: - if ( - item["condition"] != "Package unfixed" - and item["severity"] != "Untriaged" - ): - id = item.get("cve") - package_name = item.get("name") - package_version = item.get("version") - description = item.get("condition") - severity = item.get("severity").capitalize() - agent_ip = item.get("agent_ip") - links = item.get("external_references") - cvssv3_score = item.get("cvss3_score") - publish_date = item.get("published") - agent_name = item.get("agent_name") - - if links: - references = "\n".join(links) - else: - references = None - - title = ( - item.get("title") + " (version: " + package_version + ")" + vulnerabilities = data.get("data", {}).get("affected_items", []) + for item in vulnerabilities: + if ( + item["condition"] != "Package unfixed" + and item["severity"] != "Untriaged" + ): + cve = item.get("cve") + package_name = item.get("name") + package_version = item.get("version") + description = item.get("condition") + severity = item.get("severity").capitalize() + agent_ip = item.get("agent_ip") + links = item.get("external_references") + cvssv3_score = item.get("cvss3_score") + publish_date = item.get("published") + agent_name = item.get("agent_name") + agent_ip = item.get("agent_ip") + detection_time = item.get("detection_time") + + if links: + references = "\n".join(links) + else: + references = None + + title = ( + item.get("title") + " (version: " + package_version + ")" + ) + + if agent_name: + dupe_key = title + cve + agent_name + package_name + package_version + else: + dupe_key = title + cve + package_name + package_version + dupe_key = hashlib.sha256(dupe_key.encode('utf-8')).hexdigest() + + if dupe_key in dupes: + find = dupes[dupe_key] + else: + dupes[dupe_key] = True + + find = Finding( + title=title, + test=test, + description=description, + severity=severity, + references=references, + static_finding=True, + component_name=package_name, + component_version=package_version, + cvssv3_score=cvssv3_score, + publish_date=publish_date, + unique_id_from_tool=dupe_key, + date=detection_time, ) - dupe_key = title + id + agent_name + package_name + package_version - dupe_key = hashlib.sha256(dupe_key.encode('utf-8')).hexdigest() - - if dupe_key in dupes: - find = dupes[dupe_key] - else: - dupes[dupe_key] = True - - find = Finding( - title=title, - test=test, - description=description, - severity=severity, - mitigation="mitigation", - references=references, - static_finding=True, - component_name=package_name, - component_version=package_version, - cvssv3_score=cvssv3_score, - publish_date=publish_date, - unique_id_from_tool=dupe_key, - ) - # in some cases the agent_ip is not the perfect way on how to identify a host. Thus prefer the agent_name, if existant. - if agent_ip and agent_name: - find.unsaved_endpoints = [Endpoint(host=agent_name)] - elif agent_ip: - find.unsaved_endpoints = [Endpoint(host=agent_ip)] - elif agent_name: - find.unsaved_endpoints = [Endpoint(host=agent_name)] - dupes[dupe_key] = find + + # in some cases the agent_ip is not the perfect way on how to identify a host. Thus prefer the agent_name, if existant. + if agent_name: + find.unsaved_endpoints = [Endpoint(host=agent_name)] + elif agent_ip: + find.unsaved_endpoints = [Endpoint(host=agent_ip)] + + if id: + find.unsaved_vulnerability_ids = cve + + dupes[dupe_key] = find return list(dupes.values()) From 9ff13dc8afe32873b56895348f21c1c5616525a4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Quirin=20Zie=C3=9Fler?= <19915467+quirinziessler@users.noreply.github.com> Date: Wed, 17 Jan 2024 20:58:20 +0100 Subject: [PATCH 10/13] :white_check_mark: adjusted unittests --- .../wazuh/one_finding_with_endpoint.json | 29 +++++++++++++++++++ unittests/tools/test_wazuh_parser.py | 21 +++++++++++++- 2 files changed, 49 insertions(+), 1 deletion(-) create mode 100644 unittests/scans/wazuh/one_finding_with_endpoint.json diff --git a/unittests/scans/wazuh/one_finding_with_endpoint.json b/unittests/scans/wazuh/one_finding_with_endpoint.json new file mode 100644 index 00000000000..5363d0f13ed --- /dev/null +++ b/unittests/scans/wazuh/one_finding_with_endpoint.json @@ -0,0 +1,29 @@ +{ + "data": { + "affected_items": [ + { + "name": "asdf", + "version": "1", + "cve": "CVE-1234-1234", + "cvss2_score": 0, + "title": "CVE-1234-1234 affects curl", + "published": "2023-12-07", + "architecture": "amd64", + "status": "VALID", + "cvss3_score": 6.5, + "external_references": [ + "https://nvd.nist.gov/vuln/detail/CVE-1234-1234", + "https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-1234-1234" + ], + "updated": "2023-12-24", + "severity": "Medium", + "type": "PACKAGE", + "detection_time": "2023-12-13T22:11:57+00:00", + "condition": "Package less than 2", + "agent_ip": "111.111.111.111", + "agent_name": "agent-1" + } + ], + "total_affected_items": 1 + } +} \ No newline at end of file diff --git a/unittests/tools/test_wazuh_parser.py b/unittests/tools/test_wazuh_parser.py index 51c026304c4..1b64ed6dd83 100644 --- a/unittests/tools/test_wazuh_parser.py +++ b/unittests/tools/test_wazuh_parser.py @@ -20,7 +20,10 @@ def test_parse_one_finding(self): endpoint.clean() self.assertEqual(1, len(findings)) self.assertEqual("Medium", finding.severity) - self.assertEqual("CVE-1234-123123", finding.unsaved_vulnerability_ids[0]) + self.assertEqual("CVE-1234-123123", finding.unsaved_vulnerability_ids) + self.assertEqual("asdf", finding.component_name) + self.assertEqual("4.3.1", finding.component_version) + self.assertEqual(5.5, finding.cvssv3_score) def test_parse_many_finding(self): testfile = open("unittests/scans/wazuh/many_findings.json") @@ -30,3 +33,19 @@ def test_parse_many_finding(self): for endpoint in finding.unsaved_endpoints: endpoint.clean() self.assertEqual(6, len(findings)) + + def test_parse_one_finding_with_endpoint(self): + testfile = open("unittests/scans/wazuh/one_finding_with_endpoint.json") + parser = WazuhParser() + findings = parser.get_findings(testfile, Test()) + for finding in findings: + for endpoint in finding.unsaved_endpoints: + endpoint.clean() + self.assertEqual(1, len(findings)) + self.assertEqual("Medium", finding.severity) + self.assertEqual("CVE-1234-1234", finding.unsaved_vulnerability_ids) + self.assertEqual(6.5, finding.cvssv3_score) + endpoint = finding.unsaved_endpoints[0] + self.assertEqual("agent-1", endpoint.host) + self.assertEqual("asdf", finding.component_name) + self.assertEqual("1", finding.component_version) From 9fb13a793f35022c47879380738f9a93429a0d3f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Quirin=20Zie=C3=9Fler?= <19915467+quirinziessler@users.noreply.github.com> Date: Wed, 17 Jan 2024 21:37:50 +0100 Subject: [PATCH 11/13] :memo: --- .../en/integrations/parsers/file/wazuh.md | 45 +++++++++++++++++-- 1 file changed, 42 insertions(+), 3 deletions(-) diff --git a/docs/content/en/integrations/parsers/file/wazuh.md b/docs/content/en/integrations/parsers/file/wazuh.md index 13920a7e7f5..a151ffb0ed6 100644 --- a/docs/content/en/integrations/parsers/file/wazuh.md +++ b/docs/content/en/integrations/parsers/file/wazuh.md @@ -3,9 +3,48 @@ title: "Wazuh Scanner" toc_hide: true --- -Import findings from Wazuh. The export from Wazuh can be done via 2 ways. Choose the one which you prefer. +### File Types +DefectDojo parser accepts a .json file from [Wazuh](https://wazuh.com). The export from Wazuh can be done via 2 ways. Choose the one which you prefer. -- export the Wazuh findings from API and upload them to DefectDojo as json file. This method may be the easiest one but does export all known vulnerabilities at once. It is not possible to sort them after clients or any other categories. You will receive all vulnerabilities in one engagement. +- export the Wazuh findings from API and upload them to DefectDojo. This method may be the easiest one but does export all known vulnerabilities at once. It is not possible to sort them after clients or any other categories. You will receive all vulnerabilities in one engagement. It also does not output the endpoint of a finding. - export the findings via the script [available here](https://github.com/quirinziessler/wazuh-findings-exporter). The script fetches the findings by Wazuh client groups and saves them as json, ready for upload. You will receive one file per group allowing you to separate the clients via engagements in Wazuh. It also exports the endpoints hostname and displays them in DefectDojo UI. -Independent of your above choice: Have in mind to adjust the max file size via "DD_SCAN_FILE_MAX_SIZE" if you see files larger than the default value of 100MB. Depending on the amount and category of integrated devices, the file size jumps rapidly. \ No newline at end of file +Independent of your above choice: Have in mind to adjust the max file size via "DD_SCAN_FILE_MAX_SIZE" if you see files larger than the default value of 100MB. Depending on the amount and category of integrated devices, the file size jumps rapidly. + +### Acceptable JSON Format + +Parser expects a .json file structured as below. + +~~~ +{ + "data": { + "affected_items": [ + { + "architecture": "amd64", + "condition": "Package less than 4.3.2", + "cve": "CVE-1234-123123", + "cvss2_score": 0, + "cvss3_score": 5.5, + "detection_time": "2023-02-08T13:55:10Z", + "external_references": [ + "https://nvd.nist.gov/vuln/detail/CVE-YYYY-XXXXX", + "https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-YYYY-XXXXX" + ], + "name": "asdf", + "published": "2022-09-01", + "severity": "Medium", + "status": "VALID", + "title": "CVE-YYYY-XXXXX affects asdf", + "type": "PACKAGE", + "updated": "2022-09-07", + "version": "4.3.1" + } + ], + "failed_items": [], + "total_affected_items": 1, + "total_failed_items": 0 + }, + "error": 0, + "message": "All selected vulnerabilities were returned" +} +~~~ \ No newline at end of file From 840d2ba928ffceb4103d02d6c584d3f07d2ad6cc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Quirin=20Zie=C3=9Fler?= <19915467+quirinziessler@users.noreply.github.com> Date: Wed, 17 Jan 2024 21:39:26 +0100 Subject: [PATCH 12/13] :pencil2: --- docs/content/en/integrations/parsers/file/wazuh.md | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/content/en/integrations/parsers/file/wazuh.md b/docs/content/en/integrations/parsers/file/wazuh.md index a151ffb0ed6..2e19282b62c 100644 --- a/docs/content/en/integrations/parsers/file/wazuh.md +++ b/docs/content/en/integrations/parsers/file/wazuh.md @@ -4,6 +4,7 @@ toc_hide: true --- ### File Types + DefectDojo parser accepts a .json file from [Wazuh](https://wazuh.com). The export from Wazuh can be done via 2 ways. Choose the one which you prefer. - export the Wazuh findings from API and upload them to DefectDojo. This method may be the easiest one but does export all known vulnerabilities at once. It is not possible to sort them after clients or any other categories. You will receive all vulnerabilities in one engagement. It also does not output the endpoint of a finding. From 09862a433516ced57fe9051df201fe2f0368907d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Quirin=20Zie=C3=9Fler?= <19915467+quirinziessler@users.noreply.github.com> Date: Wed, 17 Jan 2024 21:39:43 +0100 Subject: [PATCH 13/13] :pencil2: --- docs/content/en/integrations/parsers/file/wazuh.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/docs/content/en/integrations/parsers/file/wazuh.md b/docs/content/en/integrations/parsers/file/wazuh.md index 2e19282b62c..fcfcb95b519 100644 --- a/docs/content/en/integrations/parsers/file/wazuh.md +++ b/docs/content/en/integrations/parsers/file/wazuh.md @@ -4,7 +4,6 @@ toc_hide: true --- ### File Types - DefectDojo parser accepts a .json file from [Wazuh](https://wazuh.com). The export from Wazuh can be done via 2 ways. Choose the one which you prefer. - export the Wazuh findings from API and upload them to DefectDojo. This method may be the easiest one but does export all known vulnerabilities at once. It is not possible to sort them after clients or any other categories. You will receive all vulnerabilities in one engagement. It also does not output the endpoint of a finding. @@ -13,7 +12,6 @@ DefectDojo parser accepts a .json file from [Wazuh](https://wazuh.com). The expo Independent of your above choice: Have in mind to adjust the max file size via "DD_SCAN_FILE_MAX_SIZE" if you see files larger than the default value of 100MB. Depending on the amount and category of integrated devices, the file size jumps rapidly. ### Acceptable JSON Format - Parser expects a .json file structured as below. ~~~