Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Ruff: Add and fix ISC002 #11705

Merged
merged 1 commit into from
Feb 6, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 20 additions & 10 deletions dojo/endpoint/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,8 +105,10 @@ def err_log(message, html_log, endpoint_html_log, endpoint):

if parts.protocol:
if endpoint.protocol and (endpoint.protocol != parts.protocol):
message = f"has defined protocol ({endpoint.protocol}) and it is not the same as protocol in host " \
f"({parts.protocol})"
message = (
f"has defined protocol ({endpoint.protocol}) and it is not the same as protocol in host "
f"({parts.protocol})"
)
err_log(message, html_log, endpoint_html_log, endpoint)
else:
if change:
Expand All @@ -126,8 +128,10 @@ def err_log(message, html_log, endpoint_html_log, endpoint):
if parts.port:
try:
if (endpoint.port is not None) and (int(endpoint.port) != parts.port):
message = f"has defined port number ({endpoint.port}) and it is not the same as port number in " \
f"host ({parts.port})"
message = (
f"has defined port number ({endpoint.port}) and it is not the same as port number in "
f"host ({parts.port})"
)
err_log(message, html_log, endpoint_html_log, endpoint)
else:
if change:
Expand All @@ -138,26 +142,32 @@ def err_log(message, html_log, endpoint_html_log, endpoint):

if parts.path:
if endpoint.path and (endpoint.path != parts.path):
message = f"has defined path ({endpoint.path}) and it is not the same as path in host " \
f"({parts.path})"
message = (
f"has defined path ({endpoint.path}) and it is not the same as path in host "
f"({parts.path})"
)
err_log(message, html_log, endpoint_html_log, endpoint)
else:
if change:
endpoint.path = parts.path

if parts.query:
if endpoint.query and (endpoint.query != parts.query):
message = f"has defined query ({endpoint.query}) and it is not the same as query in host " \
f"({parts.query})"
message = (
f"has defined query ({endpoint.query}) and it is not the same as query in host "
f"({parts.query})"
)
err_log(message, html_log, endpoint_html_log, endpoint)
else:
if change:
endpoint.query = parts.query

if parts.fragment:
if endpoint.fragment and (endpoint.fragment != parts.fragment):
message = f"has defined fragment ({endpoint.fragment}) and it is not the same as fragment in host " \
f"({parts.fragment})"
message = (
f"has defined fragment ({endpoint.fragment}) and it is not the same as fragment in host "
f"({parts.fragment})"
)
err_log(message, html_log, endpoint_html_log, endpoint)
else:
if change:
Expand Down
10 changes: 6 additions & 4 deletions dojo/forms.py
Original file line number Diff line number Diff line change
Expand Up @@ -3063,9 +3063,10 @@ def __init__(self, *args, **kwargs):
if self.push_all:
# This will show the checkbox as checked and greyed out, this way the user is aware
# that issues will be pushed to JIRA, given their product-level settings.
self.fields["push_to_jira"].help_text = \
"Push all issues is enabled on this product. If you do not wish to push all issues" \
self.fields["push_to_jira"].help_text = (
"Push all issues is enabled on this product. If you do not wish to push all issues"
" to JIRA, please disable Push all issues on this product."
)
self.fields["push_to_jira"].widget.attrs["checked"] = "checked"
self.fields["push_to_jira"].disabled = True

Expand Down Expand Up @@ -3166,9 +3167,10 @@ def __init__(self, *args, **kwargs):
if self.push_all:
# This will show the checkbox as checked and greyed out, this way the user is aware
# that issues will be pushed to JIRA, given their product-level settings.
self.fields["push_to_jira"].help_text = \
"Push all issues is enabled on this product. If you do not wish to push all issues" \
self.fields["push_to_jira"].help_text = (
"Push all issues is enabled on this product. If you do not wish to push all issues"
" to JIRA, please disable Push all issues on this product."
)
self.fields["push_to_jira"].widget.attrs["checked"] = "checked"
self.fields["push_to_jira"].disabled = True

Expand Down
7 changes: 4 additions & 3 deletions dojo/management/commands/jira_status_reconciliation.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,10 +204,11 @@ class Command(BaseCommand):
help = "Reconcile finding status with JIRA issue status, stdout will contain semicolon seperated CSV results. \
Risk Accepted findings are skipped. Findings created before 1.14.0 are skipped."

mode_help = \
"- reconcile: (default)reconcile any differences in status between Defect Dojo and JIRA, will look at the latest status change timestamp in both systems to determine which one is the correct status" \
"- push_status_to_jira: update JIRA status for all JIRA issues connected to a Defect Dojo finding (will not push summary/description, only status)" \
mode_help = (
"- reconcile: (default)reconcile any differences in status between Defect Dojo and JIRA, will look at the latest status change timestamp in both systems to determine which one is the correct status"
"- push_status_to_jira: update JIRA status for all JIRA issues connected to a Defect Dojo finding (will not push summary/description, only status)"
"- import_status_from_jira: update Defect Dojo finding status from JIRA"
)

def add_arguments(self, parser):
parser.add_argument("--mode", help=self.mode_help)
Expand Down
14 changes: 8 additions & 6 deletions dojo/management/commands/stamp_finding_last_reviewed.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,12 +24,14 @@


class Command(BaseCommand):
help = "A new field last_reviewed has been added to the Finding model \n" \
"This script will update all findings with a last_reviewed date of the most current date from: \n" \
"1. Finding Date if no other evidence of activity is found \n" \
"2. Last note added date if a note is found \n" \
"3. Mitigation Date if finding is mitigated \n" \
"4. Last action_log entry date if Finding has been updated \n"
help = (
"A new field last_reviewed has been added to the Finding model \n"
"This script will update all findings with a last_reviewed date of the most current date from: \n"
"1. Finding Date if no other evidence of activity is found \n"
"2. Last note added date if a note is found \n"
"3. Mitigation Date if finding is mitigated \n"
"4. Last action_log entry date if Finding has been updated \n"
)

def handle(self, *args, **options):
findings = Finding.objects.all().order_by("id")
Expand Down
10 changes: 6 additions & 4 deletions dojo/reports/widgets.py
Original file line number Diff line number Diff line change
Expand Up @@ -269,8 +269,9 @@ def __init__(self, *args, **kwargs):
self.form = None
self.multiple = "true"
self.widget_class = "finding-list"
self.extra_help = "You can use this form to filter findings and select only the ones to be included in the " \
"report."
self.extra_help = (
"You can use this form to filter findings and select only the ones to be included in the report."
)
self.title_words = get_words_for_field(Finding, "title")
self.component_words = get_words_for_field(Finding, "component_name")

Expand Down Expand Up @@ -339,8 +340,9 @@ def __init__(self, *args, **kwargs):
else:
self.paged_endpoints = self.endpoints
self.multiple = "true"
self.extra_help = "You can use this form to filter endpoints and select only the ones to be included in the " \
"report."
self.extra_help = (
"You can use this form to filter endpoints and select only the ones to be included in the report."
)

def get_html(self):
html = render_to_string("dojo/custom_html_report_endpoint_list.html",
Expand Down
6 changes: 4 additions & 2 deletions dojo/templatetags/display_tags.py
Original file line number Diff line number Diff line change
Expand Up @@ -285,8 +285,10 @@ def finding_sla(finding):
sla_age) + " days or less since " + finding.get_sla_start_date().strftime("%b %d, %Y")

if find_sla is not None:
title = '<a class="has-popover" data-toggle="tooltip" data-placement="bottom" title="" href="#" data-content="' + status_text + '">' \
'<span class="label severity age-' + status + '">' + str(find_sla) + "</span></a>"
title = (
f'<a class="has-popover" data-toggle="tooltip" data-placement="bottom" title="" href="#" data-content="{status_text}">'
f'<span class="label severity age-{status}">{find_sla}</span></a>'
)

return mark_safe(title)

Expand Down
8 changes: 5 additions & 3 deletions dojo/tools/dependency_track/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,9 +172,11 @@ def _convert_dependency_track_finding_to_dojo_finding(self, dependency_track_fin
component_description = f"Version {component_version} of the {component_name} component"
else:
component_description = f"The {component_name} component"
vulnerability_description = "You are using a component with a known vulnerability. " \
f"{component_description} is affected by the vulnerability with an id of {vuln_id} as " \
f"identified by {source}."
vulnerability_description = (
"You are using a component with a known vulnerability. "
f"{component_description} is affected by the vulnerability with an id of {vuln_id} as "
f"identified by {source}."
)
# Append purl info if it is present
if "purl" in dependency_track_finding["component"] and dependency_track_finding["component"]["purl"] is not None:
component_purl = dependency_track_finding["component"]["purl"]
Expand Down
50 changes: 28 additions & 22 deletions dojo/tools/noseyparker/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,10 @@ def get_label_for_scan_types(self, scan_type):
return "Nosey Parker Scan"

def get_description_for_scan_types(self, scan_type):
return "Nosey Parker report file can be imported in JSON Lines format (option --jsonl). " \
"Supports v0.16.0 and v0.22.0 of https://github.com/praetorian-inc/noseyparker"
return (
"Nosey Parker report file can be imported in JSON Lines format (option --jsonl). "
"Supports v0.16.0 and v0.22.0 of https://github.com/praetorian-inc/noseyparker"
)

def get_findings(self, file, test):
"""
Expand Down Expand Up @@ -61,14 +63,15 @@ def version_0_16_0(self, line, test):
title = f"Secret(s) Found in Repository with Commit ID {json_path['commit_provenance']['commit_metadata']['commit_id']}"
filepath = json_path["commit_provenance"]["blob_path"]
line_num = match["location"]["source_span"]["start"]["line"]
description = f"Secret found of type: {rule_name} \n" \
f"SECRET starts with: '{secret[:3]}' \n" \
f"Committer Name: {json_path['commit_provenance']['commit_metadata']['committer_name']} \n" \
f"Committer Email: {json_path['commit_provenance']['commit_metadata']['committer_email']} \n" \
f"Commit ID: {json_path['commit_provenance']['commit_metadata']['commit_id']} \n" \
f"Location: {filepath} line #{line_num} \n" \
f"Line #{line_num} \n"

description = (
f"Secret found of type: {rule_name} \n"
f"SECRET starts with: '{secret[:3]}' \n"
f"Committer Name: {json_path['commit_provenance']['commit_metadata']['committer_name']} \n"
f"Committer Email: {json_path['commit_provenance']['commit_metadata']['committer_email']} \n"
f"Commit ID: {json_path['commit_provenance']['commit_metadata']['commit_id']} \n"
f"Location: {filepath} line #{line_num} \n"
f"Line #{line_num} \n"
)
# Internal de-duplication
key = hashlib.md5((filepath + "|" + secret + "|" + str(line_num)).encode("utf-8")).hexdigest()

Expand Down Expand Up @@ -112,22 +115,25 @@ def version_0_22_0(self, line, test):
if json_path.get("first_commit"):
title = f"Secret(s) Found in Repository with Commit ID {json_path['first_commit']['commit_metadata']['commit_id']}"
filepath = json_path["first_commit"]["blob_path"]
description = f"Secret found of type: {rule_name} \n" \
f"SECRET starts with: '{rule_text_id[:3]}' \n" \
f"Committer Name: {json_path['first_commit']['commit_metadata']['committer_name']} \n" \
f"Committer Email: {json_path['first_commit']['commit_metadata']['committer_email']} \n" \
f"Commit ID: {json_path['first_commit']['commit_metadata']['commit_id']} \n" \
f"Location: {filepath} line #{line_num} \n" \
f"Line #{line_num} \n"
description = (
f"Secret found of type: {rule_name} \n"
f"SECRET starts with: '{rule_text_id[:3]}' \n"
f"Committer Name: {json_path['first_commit']['commit_metadata']['committer_name']} \n"
f"Committer Email: {json_path['first_commit']['commit_metadata']['committer_email']} \n"
f"Commit ID: {json_path['first_commit']['commit_metadata']['commit_id']} \n"
f"Location: {filepath} line #{line_num} \n"
f"Line #{line_num} \n"
)
# scanned wihout git history
else:
title = "Secret(s) Found in Repository"
filepath = json_path["path"]
description = f"Secret found of type: {rule_name} \n" \
f"SECRET starts with: '{rule_text_id[:3]}' \n" \
f"Location: {filepath} line #{line_num} \n" \
f"Line #{line_num} \n"

description = (
f"Secret found of type: {rule_name} \n"
f"SECRET starts with: '{rule_text_id[:3]}' \n"
f"Location: {filepath} line #{line_num} \n"
f"Line #{line_num} \n"
)
# Internal de-duplication
key = hashlib.md5((filepath + "|" + rule_text_id + "|" + str(line_num)).encode("utf-8")).hexdigest()

Expand Down
2 changes: 1 addition & 1 deletion ruff.toml
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ select = [
"EM",
"EXE",
"FA",
"ISC001",
"ISC001", "ISC002",
"ICN",
"LOG",
"G001", "G002", "G01", "G1", "G2",
Expand Down