Skip to content

Commit

Permalink
Merge pull request #7851 from DefectDojo/release/2.20.2
Browse files Browse the repository at this point in the history
Release: Merge release into master from: release/2.20.2
  • Loading branch information
Maffooch authored Mar 20, 2023
2 parents 0e50e14 + ffade60 commit 0a88352
Show file tree
Hide file tree
Showing 22 changed files with 107 additions and 52 deletions.
3 changes: 2 additions & 1 deletion Dockerfile.django-alpine
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,8 @@ RUN \
chmod -R 775 /app/dojo/settings && \
mkdir /var/run/${appuser} && \
chown ${appuser} /var/run/${appuser} && \
chmod g=u /var/run/${appuser} && \
chmod g=u /var/run/${appuser} && \
chmod 775 /*.sh && \
mkdir -p media/threat && chown -R ${uid} media
USER ${uid}
ENV \
Expand Down
3 changes: 2 additions & 1 deletion Dockerfile.django-debian
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,8 @@ RUN \
chmod -R 775 /app/dojo/settings && \
mkdir /var/run/${appuser} && \
chown ${appuser} /var/run/${appuser} && \
chmod g=u /var/run/${appuser} && \
chmod g=u /var/run/${appuser} && \
chmod 775 /*.sh && \
mkdir -p media/threat && chown -R ${uid} media
USER ${uid}
ENV \
Expand Down
2 changes: 1 addition & 1 deletion components/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "defectdojo",
"version": "2.20.1",
"version": "2.20.2",
"license" : "BSD-3-Clause",
"private": true,
"dependencies": {
Expand Down
2 changes: 1 addition & 1 deletion dojo/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,6 @@
# Django starts so that shared_task will use this app.
from .celery import app as celery_app # noqa

__version__ = '2.20.1'
__version__ = '2.20.2'
__url__ = 'https://github.com/DefectDojo/django-DefectDojo'
__docs__ = 'https://documentation.defectdojo.com'
15 changes: 8 additions & 7 deletions dojo/api_v2/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -1160,12 +1160,13 @@ def get_decision(self, obj):
@extend_schema_field(serializers.CharField())
@swagger_serializer_method(serializers.CharField())
def get_path(self, obj):
risk_acceptance_id = obj.id
engagement_id = Engagement.objects.filter(risk_acceptance__id__in=[obj.id]).first().id
path = reverse('download_risk_acceptance', args=(engagement_id, risk_acceptance_id))
request = self.context.get("request")
if request:
path = request.build_absolute_uri(path)
engagement = Engagement.objects.filter(risk_acceptance__id__in=[obj.id]).first()
path = 'No proof has been supplied'
if engagement and obj.filename() is not None:
path = reverse('download_risk_acceptance', args=(engagement.id, obj.id))
request = self.context.get("request")
if request:
path = request.build_absolute_uri(path)
return path

@extend_schema_field(serializers.IntegerField())
Expand Down Expand Up @@ -1204,7 +1205,7 @@ class FindingEngagementSerializer(serializers.ModelSerializer):

class Meta:
model = Engagement
fields = ["id", "name", "product", "branch_tag", "build_id", "commit_hash", "version"]
fields = ["id", "name", "description", "product", "target_start", "target_end", "branch_tag", "engagement_type", "build_id", "commit_hash", "version", "created", "updated"]


class FindingEnvironmentSerializer(serializers.ModelSerializer):
Expand Down
2 changes: 1 addition & 1 deletion dojo/api_v2/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -536,7 +536,7 @@ def download_proof(self, request, pk=None):
risk_acceptance = self.get_object()
# Get the file object
file_object = risk_acceptance.path
if file_object is None:
if file_object is None or risk_acceptance.filename() is None:
return Response({"error": "Proof has not provided to this risk acceptance..."}, status=status.HTTP_404_NOT_FOUND)
# Get the path of the file in media root
file_path = f'{settings.MEDIA_ROOT}/{file_object.name}'
Expand Down
2 changes: 1 addition & 1 deletion dojo/db_migrations/0118_remove_finding_images.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def move_images_to_files(apps, schema_editor):
if not passed:
finding.files.add(file)
else:
logger.warn('unable to migrate image %s with caption %s', image.image.name, image.caption)
logger.warning('unable to migrate image %s with caption %s', image.image.name, image.caption)


class Migration(migrations.Migration):
Expand Down
2 changes: 1 addition & 1 deletion dojo/engagement/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -490,7 +490,7 @@ def prefetch_for_view_tests(tests):
prefetched = prefetched.annotate(total_reimport_count=Count('test_import__id', filter=Q(test_import__type=Test_Import.REIMPORT_TYPE), distinct=True))

else:
logger.warn('unable to prefetch because query was already executed')
logger.warning('unable to prefetch because query was already executed')

return prefetched

Expand Down
2 changes: 1 addition & 1 deletion dojo/finding/helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -481,7 +481,7 @@ def reconfigure_duplicate_cluster(original, cluster_outside):
def prepare_duplicates_for_delete(test=None, engagement=None):
logger.debug('prepare duplicates for delete, test: %s, engagement: %s', test.id if test else None, engagement.id if engagement else None)
if test is None and engagement is None:
logger.warn('nothing to prepare as test and engagement are None')
logger.warning('nothing to prepare as test and engagement are None')

fix_loop_duplicates()

Expand Down
17 changes: 17 additions & 0 deletions dojo/finding_group/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,23 @@ def view_finding_group(request, fgid):
edit_finding_group_form = EditFindingGroupForm(request.POST, instance=finding_group)
if edit_finding_group_form.is_valid():
finding_group.name = edit_finding_group_form.cleaned_data.get('name', '')
push_to_jira = edit_finding_group_form.cleaned_data.get('push_to_jira')
jira_issue = edit_finding_group_form.cleaned_data.get('jira_issue')

if jira_issue:
# See if the submitted issue was a issue key or the full URL
jira_instance = jira_helper.get_jira_project(finding_group).jira_instance
if jira_issue.startswith(jira_instance.url + '/browse/'):
jira_issue = jira_issue[len(jira_instance.url + '/browse/'):]

if finding_group.has_jira_issue and not jira_issue == jira_helper.get_jira_key(finding_group):
jira_helper.unlink_jira(request, finding_group)
jira_helper.finding_group_link_jira(request, finding_group, jira_issue)
elif not finding_group.has_jira_issue:
jira_helper.finding_group_link_jira(request, finding_group, jira_issue)
elif push_to_jira:
jira_helper.push_to_jira(finding_group, sync=True)

finding_group.save()
return HttpResponseRedirect(reverse('view_test', args=(finding_group.test.id,)))

Expand Down
17 changes: 17 additions & 0 deletions dojo/forms.py
Original file line number Diff line number Diff line change
Expand Up @@ -279,6 +279,23 @@ class Meta:

class EditFindingGroupForm(forms.ModelForm):
name = forms.CharField(max_length=255, required=True, label='Finding Group Name')
jira_issue = forms.CharField(max_length=255, required=False, label='Linked JIRA Issue',
help_text='Leave empty and check push to jira to create a new JIRA issue for this finding group.')

def __init__(self, *args, **kwargs):
super(EditFindingGroupForm, self).__init__(*args, **kwargs)
import dojo.jira_link.helper as jira_helper

self.fields['push_to_jira'] = forms.BooleanField()
self.fields['push_to_jira'].required = False
self.fields['push_to_jira'].help_text = "Checking this will overwrite content of your JIRA issue, or create one."

self.fields['push_to_jira'].label = "Push to JIRA"

if hasattr(self.instance, 'has_jira_issue') and self.instance.has_jira_issue:
jira_url = jira_helper.get_jira_url(self.instance)
self.fields['jira_issue'].initial = jira_url
self.fields['push_to_jira'].widget.attrs['checked'] = 'checked'

class Meta:
model = Finding_Group
Expand Down
42 changes: 37 additions & 5 deletions dojo/jira_link/helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -699,8 +699,8 @@ def add_jira_issue(obj, *args, **kwargs):
obj_can_be_pushed_to_jira, error_message, error_code = can_be_pushed_to_jira(obj)
if not obj_can_be_pushed_to_jira:
log_jira_alert(error_message, obj)
logger.warn("%s cannot be pushed to JIRA: %s.", to_str_typed(obj), error_message)
logger.warn("The JIRA issue will NOT be created.")
logger.warning("%s cannot be pushed to JIRA: %s.", to_str_typed(obj), error_message)
logger.warning("The JIRA issue will NOT be created.")
return False
logger.debug('Trying to create a new JIRA issue for %s...', to_str_typed(obj))
try:
Expand Down Expand Up @@ -1039,7 +1039,7 @@ def get_issuetype_fields(

except JIRAError as e:
e.text = f"Failed retrieving field metadata from Jira version: {jira._version}, project: {project_key}, issue type: {issuetype_name}. {e.text}"
logger.warn(e.text)
logger.warning(e.text)
add_error_message_to_response(e.text)

raise e
Expand Down Expand Up @@ -1113,7 +1113,7 @@ def close_epic(eng, push_to_jira, **kwargs):
try:
jissue = get_jira_issue(eng)
if jissue is None:
logger.warn("JIRA close epic failed: no issue found")
logger.warning("JIRA close epic failed: no issue found")
return False

req_url = jira_instance.url + '/rest/api/latest/issue/' + \
Expand All @@ -1124,7 +1124,7 @@ def close_epic(eng, push_to_jira, **kwargs):
auth=HTTPBasicAuth(jira_instance.username, jira_instance.password),
json=json_data)
if r.status_code != 204:
logger.warn("JIRA close epic failed with error: {}".format(r.text))
logger.warning("JIRA close epic failed with error: {}".format(r.text))
return False
return True
except JIRAError as e:
Expand Down Expand Up @@ -1322,6 +1322,38 @@ def finding_link_jira(request, finding, new_jira_issue_key):
return True


def finding_group_link_jira(request, finding_group, new_jira_issue_key):
logger.debug('linking existing jira issue %s for finding group %i', new_jira_issue_key, finding_group.id)

existing_jira_issue = jira_get_issue(get_jira_project(finding_group), new_jira_issue_key)

jira_project = get_jira_project(finding_group)

if not existing_jira_issue:
raise ValueError('JIRA issue not found or cannot be retrieved: ' + new_jira_issue_key)

jira_issue = JIRA_Issue(
jira_id=existing_jira_issue.id,
jira_key=existing_jira_issue.key,
finding_group=finding_group,
jira_project=jira_project)

jira_issue.jira_key = new_jira_issue_key
# jira timestampe are in iso format: 'updated': '2020-07-17T09:49:51.447+0200'
# seems to be a pain to parse these in python < 3.7, so for now just record the curent time as
# as the timestamp the jira link was created / updated in DD
jira_issue.jira_creation = timezone.now()
jira_issue.jira_change = timezone.now()

jira_issue.save()

finding_group.save()

jira_issue_url = get_jira_url(finding_group)

return True


def finding_unlink_jira(request, finding):
return unlink_jira(request, finding)

Expand Down
2 changes: 1 addition & 1 deletion dojo/management/commands/import_github_languages.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def handle(self, *args, **options):
try:
language_type, created = Language_Type.objects.get_or_create(language=name)
except Language_Type.MultipleObjectsReturned:
logger.warn('Language_Type {} exists multiple times'.format(name))
logger.warning('Language_Type {} exists multiple times'.format(name))
continue

if created:
Expand Down
2 changes: 1 addition & 1 deletion dojo/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def update_azure_groups(backend, uid, user=None, social=None, *args, **kwargs):
token = soc.extra_data['access_token']
group_names = []
if 'groups' not in kwargs['response'] or kwargs['response']['groups'] == "":
logger.warn("No groups in response. Stopping to update groups of user based on azureAD")
logger.warning("No groups in response. Stopping to update groups of user based on azureAD")
return
group_IDs = kwargs['response']['groups']
try:
Expand Down
2 changes: 1 addition & 1 deletion dojo/risk_acceptance/helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ def expiration_handler(*args, **kwargs):
try:
system_settings = System_Settings.objects.get()
except System_Settings.DoesNotExist:
logger.warn("Unable to get system_settings, skipping risk acceptance expiration job")
logger.warning("Unable to get system_settings, skipping risk acceptance expiration job")

risk_acceptances = get_expired_risk_acceptances_to_handle()

Expand Down
2 changes: 1 addition & 1 deletion dojo/templates/base.html
Original file line number Diff line number Diff line change
Expand Up @@ -1063,7 +1063,7 @@ <h3 class="no-margin-top" style="padding-bottom: 5px;">
License</a>. | <a href="https://github.com/DefectDojo/django-DefectDojo/blob/master/NOTICE"> Dependencies Notice.</a>
</p>
<p>
&copy; 2015-{% now "Y" %} DefectDojo, Inc. All rights reserved.
&copy; 2015-{% now "Y" %} DefectDojo, Inc. All rights reserved. DefectDojo is trademark of DefectDojo, Inc.
</p>
</div>
</div>
Expand Down
2 changes: 1 addition & 1 deletion dojo/tools/dependency_check/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,7 @@ def get_finding_from_vulnerability(self, dependency, related_dependency, vulnera
# default to 'Medium' and produce warnings in logs
if severity:
if severity.strip().lower() not in self.SEVERITY_MAPPING:
logger.warn(f"Warning: Unknow severity value detected '{severity}'. Bypass to 'Medium' value")
logger.warning(f"Warning: Unknow severity value detected '{severity}'. Bypass to 'Medium' value")
severity = "Medium"
else:
severity = self.SEVERITY_MAPPING[severity.strip().lower()]
Expand Down
2 changes: 1 addition & 1 deletion dojo/tools/dependency_track/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,7 @@ def _convert_dependency_track_finding_to_dojo_finding(self, dependency_track_fin
dependency_track_severity = dependency_track_finding['vulnerability']['severity']
vulnerability_severity = self._convert_dependency_track_severity_to_dojo_severity(dependency_track_severity)
if vulnerability_severity is None:
logger.warn("Detected severity of %s that could not be mapped for %s. Defaulting to Critical!", dependency_track_severity, title)
logger.warning("Detected severity of %s that could not be mapped for %s. Defaulting to Critical!", dependency_track_severity, title)
vulnerability_severity = "Critical"

# Use the analysis state from Dependency Track to determine if the finding has already been marked as a false positive upstream
Expand Down
26 changes: 6 additions & 20 deletions dojo/tools/veracode_sca/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,17 +65,10 @@ def _get_findings_json(self, file, test):
cvss_score = vulnerability.get("cvss3_score")
severity = self.__cvss_to_severity(cvss_score)

description = 'This library has known vulnerabilities.\n'
description += \
"**CVE:** {0} ({1})\n" \
"CVS Score: {2} ({3})\n" \
"Project name: {4}\n" \
"Title: \n>{5}" \
description = \
"Project name: {0}\n" \
"Title: \n>{1}" \
"\n\n-----\n\n".format(
vuln_id,
date,
cvss_score,
severity,
issue.get("project_name"),
vulnerability.get('title'))

Expand Down Expand Up @@ -151,17 +144,10 @@ def get_findings_csv(self, file, test):
severity = self.fix_severity(row.get('Severity', None))
cvss_score = float(row.get('CVSS score', 0))
date = datetime.strptime(row.get('Issue opened: Scan date'), '%d %b %Y %H:%M%p %Z')
description = 'This library has known vulnerabilities.\n'
description += \
"**CVE:** {0} ({1})\n" \
"CVS Score: {2} ({3})\n" \
"Project name: {4}\n" \
"Title: \n>{5}" \
description = \
"Project name: {0}\n" \
"Title: \n>{1}" \
"\n\n-----\n\n".format(
vuln_id,
date,
cvss_score,
severity,
row.get('Project'),
row.get('Title'))

Expand Down
4 changes: 2 additions & 2 deletions dojo/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -1649,15 +1649,15 @@ def get_full_url(relative_url):
if settings.SITE_URL:
return settings.SITE_URL + relative_url
else:
logger.warn('SITE URL undefined in settings, full_url cannot be created')
logger.warning('SITE URL undefined in settings, full_url cannot be created')
return "settings.SITE_URL" + relative_url


def get_site_url():
if settings.SITE_URL:
return settings.SITE_URL
else:
logger.warn('SITE URL undefined in settings, full_url cannot be created')
logger.warning('SITE URL undefined in settings, full_url cannot be created')
return "settings.SITE_URL"


Expand Down
4 changes: 2 additions & 2 deletions helm/defectdojo/Chart.yaml
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
apiVersion: v2
appVersion: "2.20.1"
appVersion: "2.20.2"
description: A Helm chart for Kubernetes to install DefectDojo
name: defectdojo
version: 1.6.59
version: 1.6.60
icon: https://www.defectdojo.org/img/favicon.ico
maintainers:
- name: madchap
Expand Down
4 changes: 2 additions & 2 deletions readme-docs/CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ We consider the open-source version of DefectDojo to be feature complete with th

## Modifying DefectDojo and Testing

Please use [these test scripts](./tests) to test your changes. These are the scripts we run in our [integration tests](DOCKER.md#run-the-tests-with-docker).
Please use [these test scripts](../tests) to test your changes. These are the scripts we run in our [integration tests](DOCKER.md#run-the-tests-with-docker).

For changes that require additional settings, you can now use local_settings.py file. See the logging section below for more information.

Expand All @@ -48,7 +48,7 @@ DefectDojo.

0. Make sure that the install is working properly.

0. All tests found in [these test scripts](./tests) should be passing.
0. All tests found in [these test scripts](../tests) should be passing.

0. All submitted code should conform to [__PEP8 standards__][pep8].

Expand Down

0 comments on commit 0a88352

Please sign in to comment.