Skip to content

Commit

Permalink
Ruff: Add and fix RUF015
Browse files Browse the repository at this point in the history
  • Loading branch information
kiblik committed Feb 1, 2025
1 parent e0de953 commit 895b39b
Show file tree
Hide file tree
Showing 15 changed files with 63 additions and 48 deletions.
58 changes: 30 additions & 28 deletions dojo/reports/widgets.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
)
from dojo.forms import CustomReportOptionsForm
from dojo.models import Endpoint, Finding
from dojo.utils import get_page_items, get_system_setting, get_words_for_field
from dojo.utils import first_elem, get_page_items, get_system_setting, get_words_for_field

"""
Widgets are content sections that can be included on reports. The report builder will allow any number of widgets
Expand Down Expand Up @@ -369,10 +369,12 @@ def report_widget_factory(json_data=None, request=None, user=None, finding_notes
widgets = json.loads(json_data)

for idx, widget in enumerate(widgets):
if list(widget.keys())[0] == "page-break":
selected_widgets[list(widget.keys())[0] + "-" + str(idx)] = PageBreak()
first_widget_keys = first_elem(widget.keys())

if list(widget.keys())[0] == "endpoint-list":
if first_widget_keys == "page-break":
selected_widgets[first_widget_keys + "-" + str(idx)] = PageBreak()

if first_widget_keys == "endpoint-list":
endpoints = Endpoint.objects.filter(finding__active=True,
finding__false_p=False,
finding__duplicate=False,
Expand All @@ -384,7 +386,7 @@ def report_widget_factory(json_data=None, request=None, user=None, finding_notes
endpoints = endpoints.distinct()

d = QueryDict(mutable=True)
for item in widget.get(list(widget.keys())[0]):
for item in widget.get(first_widget_keys):
if item["name"] in d:
d.appendlist(item["name"], item["value"])
else:
Expand All @@ -398,12 +400,12 @@ def report_widget_factory(json_data=None, request=None, user=None, finding_notes
endpoints = EndpointList(request=request, endpoints=endpoints, finding_notes=finding_notes,
finding_images=finding_images, host=host, user_id=user_id)

selected_widgets[list(widget.keys())[0] + "-" + str(idx)] = endpoints
selected_widgets[first_widget_keys + "-" + str(idx)] = endpoints

if list(widget.keys())[0] == "finding-list":
if first_widget_keys == "finding-list":
findings = Finding.objects.all()
d = QueryDict(mutable=True)
for item in widget.get(list(widget.keys())[0]):
for item in widget.get(first_widget_keys):
if item["name"] in d:
d.appendlist(item["name"], item["value"])
else:
Expand All @@ -412,47 +414,47 @@ def report_widget_factory(json_data=None, request=None, user=None, finding_notes
filter_class = ReportFindingFilterWithoutObjectLookups if filter_string_matching else ReportFindingFilter
findings = filter_class(d, queryset=findings)
user_id = user.id if user is not None else None
selected_widgets[list(widget.keys())[0] + "-" + str(idx)] = FindingList(request=request, findings=findings,
selected_widgets[first_widget_keys + "-" + str(idx)] = FindingList(request=request, findings=findings,
finding_notes=finding_notes,
finding_images=finding_images,
host=host, user_id=user_id)

if list(widget.keys())[0] == "custom-content":
if first_widget_keys == "custom-content":
wysiwyg_content = WYSIWYGContent(request=request)
wysiwyg_content.heading = \
next((item for item in widget.get(list(widget.keys())[0]) if item["name"] == "heading"), None)["value"]
next((item for item in widget.get(first_widget_keys) if item["name"] == "heading"), None)["value"]
wysiwyg_content.content = \
next((item for item in widget.get(list(widget.keys())[0]) if item["name"] == "hidden_content"), None)["value"]
next((item for item in widget.get(first_widget_keys) if item["name"] == "hidden_content"), None)["value"]
wysiwyg_content.page_break_after = \
next((item for item in widget.get(list(widget.keys())[0]) if item["name"] == "page_break_after"),
next((item for item in widget.get(first_widget_keys) if item["name"] == "page_break_after"),
{"value": False})["value"]
selected_widgets[list(widget.keys())[0] + "-" + str(idx)] = wysiwyg_content
if list(widget.keys())[0] == "report-options":
selected_widgets[first_widget_keys + "-" + str(idx)] = wysiwyg_content
if first_widget_keys == "report-options":
options = ReportOptions(request=request)
options.include_finding_notes = \
next((item for item in widget.get(list(widget.keys())[0]) if item["name"] == "include_finding_notes"), None)[
next((item for item in widget.get(first_widget_keys) if item["name"] == "include_finding_notes"), None)[
"value"]
options.include_finding_images = \
next((item for item in widget.get(list(widget.keys())[0]) if item["name"] == "include_finding_images"), None)[
next((item for item in widget.get(first_widget_keys) if item["name"] == "include_finding_images"), None)[
"value"]
options.report_type = \
next((item for item in widget.get(list(widget.keys())[0]) if item["name"] == "report_type"), None)["value"]
next((item for item in widget.get(first_widget_keys) if item["name"] == "report_type"), None)["value"]
options.report_name = \
next((item for item in widget.get(list(widget.keys())[0]) if item["name"] == "report_name"), None)["value"]
selected_widgets[list(widget.keys())[0]] = options
if list(widget.keys())[0] == "table-of-contents":
next((item for item in widget.get(first_widget_keys) if item["name"] == "report_name"), None)["value"]
selected_widgets[first_widget_keys] = options
if first_widget_keys == "table-of-contents":
toc = TableOfContents(request=request)
toc.heading = next((item for item in widget.get(list(widget.keys())[0]) if item["name"] == "heading"), None)[
toc.heading = next((item for item in widget.get(first_widget_keys) if item["name"] == "heading"), None)[
"value"]
selected_widgets[list(widget.keys())[0]] = toc
if list(widget.keys())[0] == "cover-page":
selected_widgets[first_widget_keys] = toc
if first_widget_keys == "cover-page":
cover_page = CoverPage(request=request)
cover_page.heading = next((item for item in widget.get(list(widget.keys())[0]) if item["name"] == "heading"), None)[
cover_page.heading = next((item for item in widget.get(first_widget_keys) if item["name"] == "heading"), None)[
"value"]
cover_page.sub_heading = \
next((item for item in widget.get(list(widget.keys())[0]) if item["name"] == "sub_heading"), None)["value"]
next((item for item in widget.get(first_widget_keys) if item["name"] == "sub_heading"), None)["value"]
cover_page.meta_info = \
next((item for item in widget.get(list(widget.keys())[0]) if item["name"] == "meta_info"), None)["value"]
selected_widgets[list(widget.keys())[0]] = cover_page
next((item for item in widget.get(first_widget_keys) if item["name"] == "meta_info"), None)["value"]
selected_widgets[first_widget_keys] = cover_page

return selected_widgets
4 changes: 3 additions & 1 deletion dojo/tools/api_cobalt/api_client.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
import requests
from django.conf import settings

from dojo.utils import first_elem


class CobaltAPI:

Expand Down Expand Up @@ -90,7 +92,7 @@ def test_connection(self):
orgs = filter(
lambda org: org["resource"]["token"] == self.org_token, data,
)
org = list(orgs)[0]
org = first_elem(orgs)
org_name = org["resource"]["name"]
return f'You have access to the "{org_name}" organization'
msg = (
Expand Down
3 changes: 2 additions & 1 deletion dojo/tools/burp_enterprise/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from lxml import etree, html

from dojo.models import Endpoint, Finding
from dojo.utils import first_elem

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -117,7 +118,7 @@ def _get_content(self, container: etree.Element):
if stripped_text is not None:
value += stripped_text + "\n"
elif stripped_text.isspace():
value = list(elem.itertext())[0]
value = first_elem(elem.itertext())
elif elem.tag == "div" or elem.tag == "span":
value = elem.text_content().strip().replace("\n", "") + "\n"
else:
Expand Down
3 changes: 2 additions & 1 deletion dojo/tools/harbor_vulnerability/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import json

from dojo.models import Finding
from dojo.utils import first_elem


class HarborVulnerabilityParser:
Expand Down Expand Up @@ -32,7 +33,7 @@ def get_findings(self, filename, test):
vulnerability = data["vulnerabilities"]
# To be compatible with update in version
with contextlib.suppress(KeyError, StopIteration, TypeError):
vulnerability = data[next(iter(data.keys()))]["vulnerabilities"]
vulnerability = data[first_elem(data.keys())]["vulnerabilities"]

# Early exit if empty
if "vulnerability" not in locals() or vulnerability is None:
Expand Down
3 changes: 2 additions & 1 deletion dojo/tools/sarif/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@

from dojo.models import Finding
from dojo.tools.parser_test import ParserTest
from dojo.utils import first_elem

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -465,7 +466,7 @@ def get_items_from_result(result, rules, artifacts, run_date):
# compare it
if result.get("fingerprints"):
hashes = get_fingerprints_hashes(result["fingerprints"])
first_item = next(iter(hashes.items()))
first_item = first_elem(hashes.items())
finding.unique_id_from_tool = first_item[1]["value"]
elif result.get("partialFingerprints"):
# for this one we keep an order to have id that could be compared
Expand Down
7 changes: 4 additions & 3 deletions dojo/tools/sonarqube/soprasteria_html.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import logging

from dojo.tools.sonarqube.soprasteria_helper import SonarQubeSoprasteriaHelper
from dojo.utils import first_elem

logger = logging.getLogger(__name__)

Expand All @@ -23,13 +24,13 @@ def get_items(self, tree, test, mode):
rulesDic = {}
for rule in rules_table:
rule_properties = list(rule.iter("td"))
rule_name = list(rule_properties[0].iter("a"))[0].text.strip()
rule_details = list(rule_properties[1].iter("details"))[0]
rule_name = first_elem(rule_properties[0].iter("a")).text.strip()
rule_details = first_elem(rule_properties[1].iter("details"))
rulesDic[rule_name] = rule_details

for vuln in vulnerabilities_table:
vuln_properties = list(vuln.iter("td"))
rule_key = list(vuln_properties[0].iter("a"))[0].text
rule_key = first_elem(vuln_properties[0].iter("a")).text
vuln_rule_name = rule_key and rule_key.strip()
vuln_severity = SonarQubeSoprasteriaHelper().convert_sonar_severity(
vuln_properties[1].text and vuln_properties[1].text.strip(),
Expand Down
3 changes: 2 additions & 1 deletion dojo/tools/trufflehog/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import json

from dojo.models import Finding
from dojo.utils import first_elem


class TruffleHogParser:
Expand Down Expand Up @@ -112,7 +113,7 @@ def get_findings_v3(self, data, test):
source = {}
source_data = {}
if metadata:
source = list(metadata.keys())[0]
source = first_elem(metadata.keys())
source_data = metadata.get(source)

file = source_data.get("file", "")
Expand Down
3 changes: 2 additions & 1 deletion dojo/tools/veracode_sca/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from django.utils import timezone

from dojo.models import Finding
from dojo.utils import first_elem


class VeracodeScaParser:
Expand Down Expand Up @@ -159,7 +160,7 @@ def get_findings_csv(self, file, test):
issueId = row.get("Issue ID", None)
if not issueId:
# Workaround for possible encoding issue
issueId = list(row.values())[0]
issueId = first_elem(row.values())
library = row.get("Library", None)
if row.get("Package manager") == "MAVEN" and row.get(
"Coordinate 2",
Expand Down
8 changes: 7 additions & 1 deletion dojo/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@ def match_finding_to_existing_findings(finding, product=None, engagement=None, t

deduplicationLogger.debug(
"Matching finding %i:%s to existing findings in %s %s using %s as deduplication algorithm.",
finding.id, finding.title, custom_filter_type, list(custom_filter.values())[0], deduplication_algorithm,
finding.id, finding.title, custom_filter_type, first_elem(custom_filter.values()), deduplication_algorithm,
)

if deduplication_algorithm == "hash_code":
Expand Down Expand Up @@ -2709,3 +2709,9 @@ def generate_file_response_from_file_path(
response["Content-Disposition"] = f'attachment; filename="{full_file_name}"'
response["Content-Length"] = file_size
return response


def first_elem(x):
# This function is workaround for using of `list(...)[0]`.
# RUF015 recommends to use `next(iter(x))` but it is harder for reading in regular code
return next(iter(x))
1 change: 0 additions & 1 deletion ruff.toml
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,6 @@ ignore = [
"SIM115",
"SIM117",
"RUF012",
"RUF015",
"D205",
"D211", # `one-blank-line-before-class` (D203) and `no-blank-line-before-class` (D211) are incompatible.
"D212", # `multi-line-summary-first-line` (D212) and `multi-line-summary-second-line` (D213) are incompatible.
Expand Down
4 changes: 3 additions & 1 deletion tests/Import_scanner_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import Select

from dojo.utils import first_elem

dir_path = Path(os.path.realpath(__file__)).parent

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -171,7 +173,7 @@ def test_engagement_import_scan_result(self):
found_matches[index] = matches[0]

if len(found_matches) == 1:
index = list(found_matches.keys())[0]
index = first_elem(found_matches.keys())
scan_map[test] = options_text[index]
elif len(found_matches) > 1:
index = list(found_matches.values()).index(temp_test)
Expand Down
2 changes: 0 additions & 2 deletions unittests/tools/test_blackduck_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ def test_blackduck_csv_parser_has_many_findings(self):
parser = BlackduckParser()
findings = parser.get_findings(testfile, Test())
self.assertEqual(24, len(findings))
findings = list(findings)
self.assertEqual(1, len(findings[10].unsaved_vulnerability_ids))
self.assertEqual("CVE-2007-3386", findings[10].unsaved_vulnerability_ids[0])
self.assertEqual(findings[4].component_name, "Apache Tomcat")
Expand All @@ -34,7 +33,6 @@ def test_blackduck_csv_parser_new_format_has_many_findings(self):
testfile = get_unit_tests_scans_path("blackduck") / "many_vulns_new_format.csv"
parser = BlackduckParser()
findings = parser.get_findings(testfile, Test())
findings = list(findings)
self.assertEqual(9, len(findings))
self.assertEqual(findings[0].component_name, "kryo")
self.assertEqual(findings[2].component_name, "jackson-databind")
Expand Down
2 changes: 1 addition & 1 deletion unittests/tools/test_hadolint_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ def test_parse_file_with_one_dockerfile(self):
findings = parser.get_findings(testfile, Test())
testfile.close()
self.assertEqual(4, len(findings))
finding = list(findings)[0]
finding = findings[0]
self.assertEqual(finding.line, 9)
self.assertEqual(finding.file_path, "django-DefectDojo\\Dockerfile.django")

Expand Down
4 changes: 2 additions & 2 deletions unittests/tools/test_intsights_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ def test_intsights_parser_with_one_critical_vuln_has_one_findings_json(

self.assertEqual(1, len(findings))

finding = list(findings)[0]
finding = findings[0]

self.assertEqual(
"5c80dbf83b4a3900078b6be6",
Expand All @@ -32,7 +32,7 @@ def test_intsights_parser_with_one_critical_vuln_has_one_findings_csv(
findings = parser.get_findings(testfile, Test())
self.assertEqual(1, len(findings))

finding = list(findings)[0]
finding = findings[0]

self.assertEqual(
"mn7xy83finmmth4ja363rci9",
Expand Down
6 changes: 3 additions & 3 deletions unittests/tools/test_mend_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ def test_parse_file_with_one_vuln_has_one_findings(self):
parser = MendParser()
findings = parser.get_findings(testfile, Test())
self.assertEqual(1, len(findings))
finding = list(findings)[0]
finding = findings[0]
self.assertEqual(1, len(finding.unsaved_vulnerability_ids))
self.assertEqual("CVE-2019-9658", finding.unsaved_vulnerability_ids[0])
self.assertEqual("CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:L/I:N/A:N", finding.cvssv3)
Expand All @@ -41,7 +41,7 @@ def test_parse_file_with_one_sca_vuln_finding(self):
parser = MendParser()
findings = parser.get_findings(testfile, Test())
self.assertEqual(1, len(findings))
finding = list(findings)[0]
finding = findings[0]
self.assertEqual("**Locations Found**: D:\\MendRepo\\test-product\\test-project\\test-project-subcomponent\\path\\to\\the\\Java\\commons-codec-1.6_donotuse.jar", finding.steps_to_reproduce)
self.assertEqual("WS-2019-0379 | commons-codec-1.6.jar", finding.title)

Expand All @@ -56,7 +56,7 @@ def test_parse_file_with_one_vuln_has_one_findings_platform(self):
parser = MendParser()
findings = parser.get_findings(testfile, Test())
self.assertEqual(1, len(findings))
finding = list(findings)[0]
finding = findings[0]
self.assertEqual(1, len(finding.unsaved_vulnerability_ids))
self.assertEqual("CVE-2024-51744", finding.unsaved_vulnerability_ids[0])
self.assertEqual("CVSS:3.1/AV:N/AC:H/PR:N/UI:R/S:U/C:L/I:N/A:N", finding.cvssv3)
Expand Down

0 comments on commit 895b39b

Please sign in to comment.