From 7a1ea84acfbe6de08329bd9087a477ff02112319 Mon Sep 17 00:00:00 2001 From: DefectDojo release bot Date: Mon, 5 Feb 2024 23:04:46 +0000 Subject: [PATCH 01/24] Update versions in application files --- components/package.json | 2 +- dojo/__init__.py | 2 +- helm/defectdojo/Chart.yaml | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/components/package.json b/components/package.json index 6cb8985b65..9a57f7b78d 100644 --- a/components/package.json +++ b/components/package.json @@ -1,6 +1,6 @@ { "name": "defectdojo", - "version": "3.31.0", + "version": "2.32.0-dev", "license" : "BSD-3-Clause", "private": true, "dependencies": { diff --git a/dojo/__init__.py b/dojo/__init__.py index 03977b720f..f1c39c15ed 100644 --- a/dojo/__init__.py +++ b/dojo/__init__.py @@ -4,6 +4,6 @@ # Django starts so that shared_task will use this app. from .celery import app as celery_app # noqa -__version__ = '3.31.0' +__version__ = '2.32.0-dev' __url__ = 'https://github.com/DefectDojo/django-DefectDojo' __docs__ = 'https://documentation.defectdojo.com' diff --git a/helm/defectdojo/Chart.yaml b/helm/defectdojo/Chart.yaml index 50342639af..53bce7bc75 100644 --- a/helm/defectdojo/Chart.yaml +++ b/helm/defectdojo/Chart.yaml @@ -1,8 +1,8 @@ apiVersion: v2 -appVersion: "3.31.0" +appVersion: "2.32.0-dev" description: A Helm chart for Kubernetes to install DefectDojo name: defectdojo -version: 1.6.108 +version: 1.6.109-dev icon: https://www.defectdojo.org/img/favicon.ico maintainers: - name: madchap From 738dca4534382049ca7587a60fb6b33715213943 Mon Sep 17 00:00:00 2001 From: Cody Maffucci <46459665+Maffooch@users.noreply.github.com> Date: Mon, 5 Feb 2024 17:57:51 -0600 Subject: [PATCH 02/24] Update versions --- components/package.json | 2 +- dojo/__init__.py | 2 +- helm/defectdojo/Chart.yaml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/components/package.json b/components/package.json index 6cb8985b65..6bff77e4dc 100644 --- a/components/package.json +++ b/components/package.json @@ -1,6 +1,6 @@ { "name": "defectdojo", - "version": "3.31.0", + "version": "2.31.0", "license" : "BSD-3-Clause", "private": true, "dependencies": { diff --git a/dojo/__init__.py b/dojo/__init__.py index 03977b720f..c79303a1f2 100644 --- a/dojo/__init__.py +++ b/dojo/__init__.py @@ -4,6 +4,6 @@ # Django starts so that shared_task will use this app. from .celery import app as celery_app # noqa -__version__ = '3.31.0' +__version__ = '2.31.0' __url__ = 'https://github.com/DefectDojo/django-DefectDojo' __docs__ = 'https://documentation.defectdojo.com' diff --git a/helm/defectdojo/Chart.yaml b/helm/defectdojo/Chart.yaml index 50342639af..26edb33e5e 100644 --- a/helm/defectdojo/Chart.yaml +++ b/helm/defectdojo/Chart.yaml @@ -1,5 +1,5 @@ apiVersion: v2 -appVersion: "3.31.0" +appVersion: "2.31.0" description: A Helm chart for Kubernetes to install DefectDojo name: defectdojo version: 1.6.108 From 74665d7d3e83c5eedab054737e1a2578a9355d5b Mon Sep 17 00:00:00 2001 From: Colm O hEigeartaigh Date: Tue, 6 Feb 2024 23:01:07 +0000 Subject: [PATCH 03/24] Parse GitHub vulnerability version (#9462) --- dojo/tools/github_vulnerability/parser.py | 3 + .../github-vuln-version.json | 106 ++++++++++++++++++ .../tools/test_github_vulnerability_parser.py | 15 +++ 3 files changed, 124 insertions(+) create mode 100644 unittests/scans/github_vulnerability/github-vuln-version.json diff --git a/dojo/tools/github_vulnerability/parser.py b/dojo/tools/github_vulnerability/parser.py index 15bf37606c..3c134342d2 100644 --- a/dojo/tools/github_vulnerability/parser.py +++ b/dojo/tools/github_vulnerability/parser.py @@ -66,6 +66,9 @@ def get_findings(self, filename, test): if "vulnerableManifestPath" in alert: finding.file_path = alert["vulnerableManifestPath"] + if "vulnerableRequirements" in alert and alert["vulnerableRequirements"].startswith("= "): + finding.component_version = alert["vulnerableRequirements"][2:] + if "createdAt" in alert: finding.date = dateutil.parser.parse(alert["createdAt"]) diff --git a/unittests/scans/github_vulnerability/github-vuln-version.json b/unittests/scans/github_vulnerability/github-vuln-version.json new file mode 100644 index 0000000000..e80afe7e58 --- /dev/null +++ b/unittests/scans/github_vulnerability/github-vuln-version.json @@ -0,0 +1,106 @@ +{ + "data": { + "repository": { + "vulnerabilityAlerts": { + "nodes": [ + { + "id": "RVA_kwDOLJyUo88AAAABQUWapw", + "createdAt": "2024-01-26T02:42:32Z", + "vulnerableManifestPath": "sompath/pom.xml", + "securityVulnerability": { + "severity": "CRITICAL", + "updatedAt": "2022-12-09T22:02:22Z", + "package": { + "name": "org.springframework:spring-web", + "ecosystem": "MAVEN" + }, + "firstPatchedVersion": { + "identifier": "6.0.0" + }, + "vulnerableVersionRange": "< 6.0.0", + "advisory": { + "description": "Pivotal Spring Framework before 6.0.0 suffers from a potential remote code execution (RCE) issue if used for Java deserialization of untrusted data. Depending on how the library is implemented within a product, this issue may or not occur, and authentication may be required.\n\nMaintainers recommend investigating alternative components or a potential mitigating control. Version 4.2.6 and 3.2.17 contain [enhanced documentation](https://github.com/spring-projects/spring-framework/commit/5cbe90b2cd91b866a5a9586e460f311860e11cfa) advising users to take precautions against unsafe Java deserialization, version 5.3.0 [deprecate the impacted classes](https://github.com/spring-projects/spring-framework/issues/25379) and version 6.0.0 [removed it entirely](https://github.com/spring-projects/spring-framework/issues/27422).", + "summary": "Pivotal Spring Framework contains unsafe Java deserialization methods", + "identifiers": [ + { + "value": "GHSA-4wrc-f8pq-fpqp", + "type": "GHSA" + }, + { + "value": "CVE-2016-1000027", + "type": "CVE" + } + ], + "references": [ + { + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-1000027" + }, + { + "url": "https://bugzilla.redhat.com/show_bug.cgi?id=CVE-2016-1000027" + }, + { + "url": "https://security-tracker.debian.org/tracker/CVE-2016-1000027" + }, + { + "url": "https://www.tenable.com/security/research/tra-2016-20" + }, + { + "url": "https://github.com/spring-projects/spring-framework/issues/24434" + }, + { + "url": "https://github.com/spring-projects/spring-framework/issues/24434#issuecomment-1231625331" + }, + { + "url": "https://github.com/spring-projects/spring-framework/commit/5cbe90b2cd91b866a5a9586e460f311860e11cfa" + }, + { + "url": "https://support.contrastsecurity.com/hc/en-us/articles/4402400830612-Spring-web-Java-Deserialization-CVE-2016-1000027" + }, + { + "url": "https://github.com/spring-projects/spring-framework/issues/21680" + }, + { + "url": "https://github.com/spring-projects/spring-framework/commit/2b051b8b321768a4cfef83077db65c6328ffd60f" + }, + { + "url": "https://jira.spring.io/browse/SPR-17143?redirect=false" + }, + { + "url": "https://github.com/spring-projects/spring-framework/issues/24434#issuecomment-579669626" + }, + { + "url": "https://github.com/spring-projects/spring-framework/issues/24434#issuecomment-582313417" + }, + { + "url": "https://github.com/spring-projects/spring-framework/issues/24434#issuecomment-744519525" + }, + { + "url": "https://security.netapp.com/advisory/ntap-20230420-0009/" + }, + { + "url": "https://spring.io/blog/2022/05/11/spring-framework-5-3-20-and-5-2-22-available-now" + }, + { + "url": "https://github.com/advisories/GHSA-4wrc-f8pq-fpqp" + } + ], + "cvss": { + "vectorString": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H" + } + } + }, + "state": "OPEN", + "vulnerableManifestFilename": "pom.xml", + "vulnerableRequirements": "= 5.3.29", + "number": 1, + "dependencyScope": "RUNTIME", + "dismissComment": null, + "dismissReason": null, + "dismissedAt": null, + "fixedAt": null + } + ] + } + } + } +} diff --git a/unittests/tools/test_github_vulnerability_parser.py b/unittests/tools/test_github_vulnerability_parser.py index acc955e349..1453c02a39 100644 --- a/unittests/tools/test_github_vulnerability_parser.py +++ b/unittests/tools/test_github_vulnerability_parser.py @@ -251,3 +251,18 @@ def test_parse_state(self): self.assertEqual(finding.file_path, "apache/cxf/cxf-shiro/pom.xml") self.assertEqual(finding.active, False) self.assertEqual(finding.is_mitigated, True) + + def test_parser_version(self): + testfile = open("unittests/scans/github_vulnerability/github-vuln-version.json") + parser = GithubVulnerabilityParser() + findings = parser.get_findings(testfile, Test()) + self.assertEqual(1, len(findings)) + for finding in findings: + finding.clean() + + with self.subTest(i=0): + finding = findings[0] + self.assertEqual(finding.title, "Pivotal Spring Framework contains unsafe Java deserialization methods") + self.assertEqual(finding.severity, "Critical") + self.assertEqual(finding.component_name, "org.springframework:spring-web") + self.assertEqual(finding.component_version, "5.3.29") From 983d7eef24b001c10ea2162413be2f19061ccd58 Mon Sep 17 00:00:00 2001 From: Andrei Serebriakov Date: Wed, 7 Feb 2024 02:06:47 +0300 Subject: [PATCH 04/24] Fix SARIF parser with CodeQL rules (#9440) * fix for sarif parser with codeql rules * add check for extensions property * flake8 comparsion --- dojo/tools/sarif/parser.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/dojo/tools/sarif/parser.py b/dojo/tools/sarif/parser.py index 14d8184957..e7963612b4 100644 --- a/dojo/tools/sarif/parser.py +++ b/dojo/tools/sarif/parser.py @@ -77,7 +77,10 @@ def __get_last_invocation_date(self, data): def get_rules(run): rules = {} - for item in run["tool"]["driver"].get("rules", []): + rules_array = run["tool"]["driver"].get("rules", []) + if len(rules_array) == 0 and run["tool"].get("extensions") is not None: + rules_array = run["tool"]["extensions"][0].get("rules", []) + for item in rules_array: rules[item["id"]] = item return rules From 57bd0566ce08687e42496af3fd4391b2715db924 Mon Sep 17 00:00:00 2001 From: Blake Owens <76979297+blakeaowens@users.noreply.github.com> Date: Fri, 9 Feb 2024 14:34:06 -0600 Subject: [PATCH 05/24] finding sla expiration date field (part two) (#9494) * finding sla expiration date field (part two) * sla violation check updates * clean up of finding violates_sla property * flake8 fix * Update dojo/models.py Co-authored-by: Charles Neill <1749665+cneill@users.noreply.github.com> * Update 0201_populate_finding_sla_expiration_date.py --------- Co-authored-by: Charles Neill <1749665+cneill@users.noreply.github.com> --- ...01_populate_finding_sla_expiration_date.py | 133 ++++++++++++++++++ dojo/filters.py | 17 +-- dojo/models.py | 41 +++--- 3 files changed, 160 insertions(+), 31 deletions(-) create mode 100644 dojo/db_migrations/0201_populate_finding_sla_expiration_date.py diff --git a/dojo/db_migrations/0201_populate_finding_sla_expiration_date.py b/dojo/db_migrations/0201_populate_finding_sla_expiration_date.py new file mode 100644 index 0000000000..4b886301de --- /dev/null +++ b/dojo/db_migrations/0201_populate_finding_sla_expiration_date.py @@ -0,0 +1,133 @@ +from django.db import migrations +from django.utils import timezone +from datetime import datetime +from django.conf import settings +from dateutil.relativedelta import relativedelta +import logging + +from dojo.utils import get_work_days + +logger = logging.getLogger(__name__) + + +def calculate_sla_expiration_dates(apps, schema_editor): + System_Settings = apps.get_model('dojo', 'System_Settings') + + ss, _ = System_Settings.objects.get_or_create() + if not ss.enable_finding_sla: + return + + logger.info('Calculating SLA expiration dates for all findings') + + SLA_Configuration = apps.get_model('dojo', 'SLA_Configuration') + Finding = apps.get_model('dojo', 'Finding') + + findings = Finding.objects.filter(sla_expiration_date__isnull=True).order_by('id').only('id', 'sla_start_date', 'date', 'severity', 'test', 'mitigated') + + page_size = 1000 + total_count = Finding.objects.filter(id__gt=0).count() + logger.info('Found %d findings to be updated', total_count) + + i = 0 + batch = [] + last_id = 0 + total_pages = (total_count // page_size) + 2 + for p in range(1, total_pages): + page = findings.filter(id__gt=last_id)[:page_size] + for find in page: + i += 1 + last_id = find.id + + start_date = find.sla_start_date if find.sla_start_date else find.date + + sla_config = SLA_Configuration.objects.filter(id=find.test.engagement.product.sla_configuration_id).first() + sla_period = getattr(sla_config, find.severity.lower(), None) + + days = None + if settings.SLA_BUSINESS_DAYS: + if find.mitigated: + days = get_work_days(find.date, find.mitigated.date()) + else: + days = get_work_days(find.date, timezone.now().date()) + else: + if isinstance(start_date, datetime): + start_date = start_date.date() + + if find.mitigated: + days = (find.mitigated.date() - start_date).days + else: + days = (timezone.now().date() - start_date).days + + days = days if days > 0 else 0 + + days_remaining = None + if sla_period: + days_remaining = sla_period - days + + if days_remaining: + if find.mitigated: + find.sla_expiration_date = find.mitigated.date() + relativedelta(days=days_remaining) + else: + find.sla_expiration_date = timezone.now().date() + relativedelta(days=days_remaining) + + batch.append(find) + + if (i > 0 and i % page_size == 0): + Finding.objects.bulk_update(batch, ['sla_expiration_date']) + batch = [] + logger.info('%s out of %s findings processed...', i, total_count) + + Finding.objects.bulk_update(batch, ['sla_expiration_date']) + batch = [] + logger.info('%s out of %s findings processed...', i, total_count) + + +def reset_sla_expiration_dates(apps, schema_editor): + System_Settings = apps.get_model('dojo', 'System_Settings') + + ss, _ = System_Settings.objects.get_or_create() + if not ss.enable_finding_sla: + return + + logger.info('Resetting SLA expiration dates for all findings') + + Finding = apps.get_model('dojo', 'Finding') + + findings = Finding.objects.filter(sla_expiration_date__isnull=False).order_by('id').only('id') + + page_size = 1000 + total_count = Finding.objects.filter(id__gt=0).count() + logger.info('Found %d findings to be reset', total_count) + + i = 0 + batch = [] + last_id = 0 + total_pages = (total_count // page_size) + 2 + for p in range(1, total_pages): + page = findings.filter(id__gt=last_id)[:page_size] + for find in page: + i += 1 + last_id = find.id + + find.sla_expiration_date = None + batch.append(find) + + if (i > 0 and i % page_size == 0): + Finding.objects.bulk_update(batch, ['sla_expiration_date']) + batch = [] + logger.info('%s out of %s findings processed...', i, total_count) + + Finding.objects.bulk_update(batch, ['sla_expiration_date']) + batch = [] + logger.info('%s out of %s findings processed...', i, total_count) + + +class Migration(migrations.Migration): + + dependencies = [ + ('dojo', '0200_finding_sla_expiration_date_product_async_updating_and_more'), + ] + + operations = [ + migrations.RunPython(calculate_sla_expiration_dates, reset_sla_expiration_dates), + ] diff --git a/dojo/filters.py b/dojo/filters.py index 51279d76a9..723c52337f 100644 --- a/dojo/filters.py +++ b/dojo/filters.py @@ -11,6 +11,7 @@ from django.conf import settings import six from django.utils.translation import gettext_lazy as _ +from django.utils import timezone from django_filters import FilterSet, CharFilter, OrderingFilter, \ ModelMultipleChoiceFilter, ModelChoiceFilter, MultipleChoiceFilter, \ BooleanFilter, NumberFilter, DateFilter @@ -148,16 +149,12 @@ def any(self, qs, name): return qs def sla_satisfied(self, qs, name): - for finding in qs: - if finding.violates_sla: - qs = qs.exclude(id=finding.id) - return qs + # return findings that have an sla expiration date after today or no sla expiration date + return qs.filter(Q(sla_expiration_date__isnull=True) | Q(sla_expiration_date__gt=timezone.now().date())) def sla_violated(self, qs, name): - for finding in qs: - if not finding.violates_sla: - qs = qs.exclude(id=finding.id) - return qs + # return active findings that have an sla expiration date before today + return qs.filter(Q(active=True) & Q(sla_expiration_date__lt=timezone.now().date())) options = { None: (_('Any'), any), @@ -184,13 +181,13 @@ def any(self, qs, name): def sla_satisifed(self, qs, name): for product in qs: - if product.violates_sla: + if product.violates_sla(): qs = qs.exclude(id=product.id) return qs def sla_violated(self, qs, name): for product in qs: - if not product.violates_sla: + if not product.violates_sla(): qs = qs.exclude(id=product.id) return qs diff --git a/dojo/models.py b/dojo/models.py index 7bda3997c0..45d522963e 100755 --- a/dojo/models.py +++ b/dojo/models.py @@ -1102,7 +1102,7 @@ def findings_active_verified_count(self): @cached_property def endpoint_host_count(self): # active_endpoints is (should be) prefetched - endpoints = self.active_endpoints + endpoints = getattr(self, 'active_endpoints', None) hosts = [] for e in endpoints: @@ -1116,7 +1116,10 @@ def endpoint_host_count(self): @cached_property def endpoint_count(self): # active_endpoints is (should be) prefetched - return len(self.active_endpoints) + endpoints = getattr(self, 'active_endpoints', None) + if endpoints: + return len(self.active_endpoints) + return None def open_findings(self, start_date=None, end_date=None): if start_date is None or end_date is None: @@ -1192,13 +1195,11 @@ def get_absolute_url(self): from django.urls import reverse return reverse('view_product', args=[str(self.id)]) - @property def violates_sla(self): - findings = Finding.objects.filter(test__engagement__product=self, active=True) - for f in findings: - if f.violates_sla: - return True - return False + findings = Finding.objects.filter(test__engagement__product=self, + active=True, + sla_expiration_date__lt=timezone.now().date()) + return findings.count() > 0 class Product_Member(models.Model): @@ -2887,20 +2888,19 @@ def set_sla_expiration_date(self): self.sla_expiration_date = get_current_date() + relativedelta(days=days_remaining) def sla_days_remaining(self): - sla_calculation = None - sla_period = self.get_sla_period() - if sla_period: - sla_calculation = sla_period - self.sla_age - return sla_calculation - - def sla_deadline(self): - days_remaining = self.sla_days_remaining() - if days_remaining: + if self.sla_expiration_date: if self.mitigated: - return self.mitigated.date() + relativedelta(days=days_remaining) - return get_current_date() + relativedelta(days=days_remaining) + mitigated_date = self.mitigated + if isinstance(mitigated_date, datetime): + mitigated_date = self.mitigated.date() + return (self.sla_expiration_date - mitigated_date).days + else: + return (self.sla_expiration_date - get_current_date()).days return None + def sla_deadline(self): + return self.sla_expiration_date + def github(self): try: return self.github_issue @@ -3294,8 +3294,7 @@ def inherit_tags(self, potentially_existing_tags): @property def violates_sla(self): - days_remaining = self.sla_days_remaining() - return days_remaining < 0 if days_remaining else False + return (self.sla_expiration_date and self.sla_expiration_date < timezone.now()) class FindingAdmin(admin.ModelAdmin): From 00db247d5c02bc934f7faa39271f2536477b6d1a Mon Sep 17 00:00:00 2001 From: Cody Maffucci <46459665+Maffooch@users.noreply.github.com> Date: Sun, 11 Feb 2024 20:42:39 -0600 Subject: [PATCH 06/24] Jira Server/DataCenter: Update meta methods (#9512) --- dojo/jira_link/helper.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/dojo/jira_link/helper.py b/dojo/jira_link/helper.py index 8a8b208d45..ecd5da084f 100644 --- a/dojo/jira_link/helper.py +++ b/dojo/jira_link/helper.py @@ -1036,28 +1036,28 @@ def get_issuetype_fields( else: try: - issuetypes = jira.createmeta_issuetypes(project_key) + issuetypes = jira.project_issue_types(project_key) except JIRAError as e: e.text = f"Jira API call 'createmeta/issuetypes' failed with status: {e.status_code} and message: {e.text}. Project misconfigured or no permissions in Jira ?" raise e issuetype_id = None - for it in issuetypes['values']: - if it['name'] == issuetype_name: - issuetype_id = it['id'] + for it in issuetypes: + if it.name == issuetype_name: + issuetype_id = it.id break if not issuetype_id: raise JIRAError("Issue type ID can not be matched. Misconfigured default issue type ?") try: - issuetype_fields = jira.createmeta_fieldtypes(project_key, issuetype_id) + issuetype_fields = jira.project_issue_fields(project_key, issuetype_id) except JIRAError as e: e.text = f"Jira API call 'createmeta/fieldtypes' failed with status: {e.status_code} and message: {e.text}. Misconfigured project or default issue type ?" raise e try: - issuetype_fields = [f['fieldId'] for f in issuetype_fields['values']] + issuetype_fields = [f.fieldId for f in issuetype_fields] except Exception: raise JIRAError("Misconfigured default issue type ?") From 164c09c4c778792013dd450f0fb73b0bab368145 Mon Sep 17 00:00:00 2001 From: Cody Maffucci <46459665+Maffooch@users.noreply.github.com> Date: Sun, 11 Feb 2024 20:43:49 -0600 Subject: [PATCH 07/24] Jira Webhook: Catch comments from other issue updates (#9513) * Jira Webhook: Catch comments from other issue updates * Accommodate redirect responses * Update dojo/jira_link/views.py Co-authored-by: Charles Neill <1749665+cneill@users.noreply.github.com> * Fix syntax --------- Co-authored-by: Charles Neill <1749665+cneill@users.noreply.github.com> --- dojo/jira_link/views.py | 203 ++++++++++++++++++++++------------------ 1 file changed, 113 insertions(+), 90 deletions(-) diff --git a/dojo/jira_link/views.py b/dojo/jira_link/views.py index e05ea5ce21..a1a73f0b01 100644 --- a/dojo/jira_link/views.py +++ b/dojo/jira_link/views.py @@ -1,7 +1,7 @@ # Standard library imports import json import logging - +import datetime # Third party imports from django.contrib import messages from django.contrib.admin.utils import NestedObjects @@ -105,97 +105,13 @@ def webhook(request, secret=None): if findings: for finding in findings: jira_helper.process_resolution_from_jira(finding, resolution_id, resolution_name, assignee_name, jira_now, jissue) + # Check for any comment that could have come along with the resolution + if (error_response := check_for_and_create_comment(parsed)) is not None: + return error_response if parsed.get('webhookEvent') == 'comment_created': - """ - example incoming requests from JIRA Server 8.14.0 - { - "timestamp":1610269967824, - "webhookEvent":"comment_created", - "comment":{ - "self":"https://jira.host.com/rest/api/2/issue/115254/comment/466578", - "id":"466578", - "author":{ - "self":"https://jira.host.com/rest/api/2/user?username=defect.dojo", - "name":"defect.dojo", - "key":"defect.dojo", # seems to be only present on JIRA Server, not on Cloud - "avatarUrls":{ - "48x48":"https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=48", - "24x24":"https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=24", - "16x16":"https://www.gravatar.com/avatar9637bfb970eff6176357df615f548f1c?d=mm&s=16", - "32x32":"https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=32" - }, - "displayName":"Defect Dojo", - "active":true, - "timeZone":"Europe/Amsterdam" - }, - "body":"(Valentijn Scholten):test4", - "updateAuthor":{ - "self":"https://jira.host.com/rest/api/2/user?username=defect.dojo", - "name":"defect.dojo", - "key":"defect.dojo", - "avatarUrls":{ - "48x48":"https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=48", - "24x24""https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=24", - "16x16":"https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=16", - "32x32":"https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=32" - }, - "displayName":"Defect Dojo", - "active":true, - "timeZone":"Europe/Amsterdam" - }, - "created":"2021-01-10T10:12:47.824+0100", - "updated":"2021-01-10T10:12:47.824+0100" - } - } - """ - - comment_text = parsed['comment']['body'] - commentor = '' - if 'name' in parsed['comment']['updateAuthor']: - commentor = parsed['comment']['updateAuthor']['name'] - elif 'emailAddress' in parsed['comment']['updateAuthor']: - commentor = parsed['comment']['updateAuthor']['emailAddress'] - else: - logger.debug('Could not find the author of this jira comment!') - commentor_display_name = parsed['comment']['updateAuthor']['displayName'] - # example: body['comment']['self'] = "http://www.testjira.com/jira_under_a_path/rest/api/2/issue/666/comment/456843" - jid = parsed['comment']['self'].split('/')[-3] - jissue = get_object_or_404(JIRA_Issue, jira_id=jid) - logging.info(f"Received issue comment for {jissue.jira_key}") - logger.debug('jissue: %s', vars(jissue)) - - jira_usernames = JIRA_Instance.objects.values_list('username', flat=True) - for jira_userid in jira_usernames: - # logger.debug('incoming username: %s jira config username: %s', commentor.lower(), jira_userid.lower()) - if jira_userid.lower() == commentor.lower(): - logger.debug('skipping incoming JIRA comment as the user id of the comment in JIRA (%s) matches the JIRA username in DefectDojo (%s)', commentor.lower(), jira_userid.lower()) - return HttpResponse('') - - findings = None - if jissue.finding: - findings = [jissue.finding] - create_notification(event='other', title=f'JIRA incoming comment - {jissue.finding}', finding=jissue.finding, url=reverse("view_finding", args=(jissue.finding.id,)), icon='check') - - elif jissue.finding_group: - findings = [jissue.finding_group.findings.all()] - create_notification(event='other', title=f'JIRA incoming comment - {jissue.finding}', finding=jissue.finding, url=reverse("view_finding_group", args=(jissue.finding_group.id,)), icon='check') - - elif jissue.engagement: - return HttpResponse('Comment for engagement ignored') - else: - raise Http404(f'No finding or engagement found for JIRA issue {jissue.jira_key}') - - for finding in findings: - # logger.debug('finding: %s', vars(jissue.finding)) - new_note = Notes() - new_note.entry = f'({commentor_display_name} ({commentor})): {comment_text}' - new_note.author, created = User.objects.get_or_create(username='JIRA') - new_note.save() - finding.notes.add(new_note) - finding.jira_issue.jira_change = timezone.now() - finding.jira_issue.save() - finding.save() + if (error_response := check_for_and_create_comment(parsed)) is not None: + return error_response if parsed.get('webhookEvent') not in ['comment_created', 'jira:issue_updated']: logger.info(f"Unrecognized JIRA webhook event received: {parsed.get('webhookEvent')}") @@ -203,6 +119,7 @@ def webhook(request, secret=None): except Exception as e: if isinstance(e, Http404): logger.warning('404 error processing JIRA webhook') + logger.warning(str(e)) else: logger.exception(e) @@ -218,6 +135,112 @@ def webhook(request, secret=None): return HttpResponse('') +def check_for_and_create_comment(parsed_json): + """ + example incoming requests from JIRA Server 8.14.0 + { + "timestamp":1610269967824, + "webhookEvent":"comment_created", + "comment":{ + "self":"https://jira.host.com/rest/api/2/issue/115254/comment/466578", + "id":"466578", + "author":{ + "self":"https://jira.host.com/rest/api/2/user?username=defect.dojo", + "name":"defect.dojo", + "key":"defect.dojo", # seems to be only present on JIRA Server, not on Cloud + "avatarUrls":{ + "48x48":"https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=48", + "24x24":"https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=24", + "16x16":"https://www.gravatar.com/avatar9637bfb970eff6176357df615f548f1c?d=mm&s=16", + "32x32":"https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=32" + }, + "displayName":"Defect Dojo", + "active":true, + "timeZone":"Europe/Amsterdam" + }, + "body":"(Valentijn Scholten):test4", + "updateAuthor":{ + "self":"https://jira.host.com/rest/api/2/user?username=defect.dojo", + "name":"defect.dojo", + "key":"defect.dojo", + "avatarUrls":{ + "48x48":"https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=48", + "24x24""https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=24", + "16x16":"https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=16", + "32x32":"https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=32" + }, + "displayName":"Defect Dojo", + "active":true, + "timeZone":"Europe/Amsterdam" + }, + "created":"2021-01-10T10:12:47.824+0100", + "updated":"2021-01-10T10:12:47.824+0100" + } + } + """ + comment = parsed_json.get("comment", None) + if comment is None: + return + + comment_text = comment.get('body') + commenter = '' + if 'name' in comment.get('updateAuthor'): + commenter = comment.get('updateAuthor', {}).get('name') + elif 'emailAddress' in comment.get('updateAuthor'): + commenter = comment.get('updateAuthor', {}).get('emailAddress') + else: + logger.debug('Could not find the author of this jira comment!') + commenter_display_name = comment.get('updateAuthor', {}).get('displayName') + # example: body['comment']['self'] = "http://www.testjira.com/jira_under_a_path/rest/api/2/issue/666/comment/456843" + jid = comment.get('self', '').split('/')[-3] + jissue = get_object_or_404(JIRA_Issue, jira_id=jid) + logging.info(f"Received issue comment for {jissue.jira_key}") + logger.debug('jissue: %s', vars(jissue)) + + jira_usernames = JIRA_Instance.objects.values_list('username', flat=True) + for jira_user_id in jira_usernames: + # logger.debug('incoming username: %s jira config username: %s', commenter.lower(), jira_user_id.lower()) + if jira_user_id.lower() == commenter.lower(): + logger.debug('skipping incoming JIRA comment as the user id of the comment in JIRA (%s) matches the JIRA username in DefectDojo (%s)', commenter.lower(), jira_user_id.lower()) + return HttpResponse('') + + findings = None + if jissue.finding: + findings = [jissue.finding] + create_notification(event='other', title=f'JIRA incoming comment - {jissue.finding}', finding=jissue.finding, url=reverse("view_finding", args=(jissue.finding.id,)), icon='check') + + elif jissue.finding_group: + findings = [jissue.finding_group.findings.all()] + create_notification(event='other', title=f'JIRA incoming comment - {jissue.finding}', finding=jissue.finding, url=reverse("view_finding_group", args=(jissue.finding_group.id,)), icon='check') + + elif jissue.engagement: + return HttpResponse('Comment for engagement ignored') + else: + raise Http404(f'No finding or engagement found for JIRA issue {jissue.jira_key}') + + # Set the fields for the notes + author, _ = User.objects.get_or_create(username='JIRA') + entry = f'({commenter_display_name} ({commenter})): {comment_text}' + # Iterate (potentially) over each of the findings the note should be added to + for finding in findings: + # Determine if this exact note was created within the last 30 seconds to avoid duplicate notes + existing_notes = finding.notes.filter( + entry=entry, + author=author, + date__gte=(timezone.now() - datetime.timedelta(seconds=30)), + ) + # Check the query for any hits + if existing_notes.count() == 0: + new_note = Notes() + new_note.entry = entry + new_note.author = author + new_note.save() + finding.notes.add(new_note) + finding.jira_issue.jira_change = timezone.now() + finding.jira_issue.save() + finding.save() + + def get_custom_field(jira, label): url = jira._options["server"].strip('/') + '/rest/api/2/field' response = jira._session.get(url).json() From 7124335f213433b9f8cceddd9cd77499b46d71f9 Mon Sep 17 00:00:00 2001 From: tomaszn Date: Mon, 12 Feb 2024 03:45:30 +0100 Subject: [PATCH 08/24] add metrics page: "Product Tag Count" (fixes #9151) (#9152) * add metrics page: "Product Tag Count" It is fully based on "Product Type Count" metrics page. * fixup! add metrics page: "Product Tag Count" * Fix Flake8 * Update views.py --------- Co-authored-by: Cody Maffucci <46459665+Maffooch@users.noreply.github.com> --- docs/content/en/usage/features.md | 3 + dojo/forms.py | 20 +++- dojo/locale/en/LC_MESSAGES/django.po | 4 + dojo/metrics/urls.py | 2 + dojo/metrics/views.py | 164 ++++++++++++++++++++++++++- dojo/templates/base.html | 5 + dojo/templates/dojo/pt_counts.html | 10 +- dojo/utils.py | 10 +- 8 files changed, 205 insertions(+), 13 deletions(-) diff --git a/docs/content/en/usage/features.md b/docs/content/en/usage/features.md index fdd3e19480..470c009bf7 100644 --- a/docs/content/en/usage/features.md +++ b/docs/content/en/usage/features.md @@ -557,6 +557,9 @@ Product Type Counts ![Product Type Counts](../../images/met_2.png) +Product Tag Counts +: Same as above, but for a group of products sharing a tag. + Simple Metrics : Provides tabular data for all Product Types. The data displayed in this view is the total number of S0, S1, S2, S3, S4, Opened This diff --git a/dojo/forms.py b/dojo/forms.py index 558c09ae69..36b9cd1a90 100755 --- a/dojo/forms.py +++ b/dojo/forms.py @@ -2119,21 +2119,37 @@ def get_years(): return [(now.year, now.year), (now.year - 1, now.year - 1), (now.year - 2, now.year - 2)] -class ProductTypeCountsForm(forms.Form): +class ProductCountsFormBase(forms.Form): month = forms.ChoiceField(choices=list(MONTHS.items()), required=True, error_messages={ 'required': '*'}) year = forms.ChoiceField(choices=get_years, required=True, error_messages={ 'required': '*'}) + + +class ProductTypeCountsForm(ProductCountsFormBase): product_type = forms.ModelChoiceField(required=True, queryset=Product_Type.objects.none(), error_messages={ 'required': '*'}) def __init__(self, *args, **kwargs): - super(ProductTypeCountsForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.fields['product_type'].queryset = get_authorized_product_types(Permissions.Product_Type_View) +class ProductTagCountsForm(ProductCountsFormBase): + product_tag = forms.ModelChoiceField(required=True, + queryset=Product.tags.tag_model.objects.none().order_by('name'), + error_messages={ + 'required': '*'}) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + prods = get_authorized_products(Permissions.Product_View) + tags_available_to_user = Product.tags.tag_model.objects.filter(product__in=prods) + self.fields['product_tag'].queryset = tags_available_to_user + + class APIKeyForm(forms.ModelForm): id = forms.IntegerField(required=True, widget=forms.widgets.HiddenInput()) diff --git a/dojo/locale/en/LC_MESSAGES/django.po b/dojo/locale/en/LC_MESSAGES/django.po index dbb9e75655..ab26c8cbdb 100644 --- a/dojo/locale/en/LC_MESSAGES/django.po +++ b/dojo/locale/en/LC_MESSAGES/django.po @@ -2692,6 +2692,10 @@ msgstr "" msgid "Product Type Counts" msgstr "" +#: dojo/templates/base.html +msgid "Product Tag Counts" +msgstr "" + #: dojo/templates/base.html msgid "Users" msgstr "" diff --git a/dojo/metrics/urls.py b/dojo/metrics/urls.py index 06b0726a56..7b2683cf6f 100644 --- a/dojo/metrics/urls.py +++ b/dojo/metrics/urls.py @@ -18,6 +18,8 @@ views.metrics, name='product_type_metrics'), re_path(r'^metrics/product/type/counts$', views.product_type_counts, name='product_type_counts'), + re_path(r'^metrics/product/tag/counts$', + views.product_tag_counts, name='product_tag_counts'), re_path(r'^metrics/engineer$', views.engineer_metrics, name='engineer_metrics'), re_path(r'^metrics/engineer/(?P\d+)$', views.view_engineer, diff --git a/dojo/metrics/views.py b/dojo/metrics/views.py index e00cbcb857..4d9236fb58 100644 --- a/dojo/metrics/views.py +++ b/dojo/metrics/views.py @@ -21,7 +21,7 @@ from django.utils import timezone from dojo.filters import MetricsFindingFilter, UserFilter, MetricsEndpointFilter -from dojo.forms import SimpleMetricsForm, ProductTypeCountsForm +from dojo.forms import SimpleMetricsForm, ProductTypeCountsForm, ProductTagCountsForm from dojo.models import Product_Type, Finding, Product, Engagement, Test, \ Risk_Acceptance, Dojo_User, Endpoint_Status from dojo.utils import get_page_items, add_breadcrumb, findings_this_period, opened_in_period, count_findings, \ @@ -586,13 +586,13 @@ def product_type_counts(request): end_date.month, end_date.day, tzinfo=timezone.get_current_timezone()) - oip = opened_in_period(start_date, end_date, pt) + oip = opened_in_period(start_date, end_date, test__engagement__product__prod_type=pt) # trending data - 12 months for x in range(12, 0, -1): opened_in_period_list.append( opened_in_period(start_date + relativedelta(months=-x), end_of_month + relativedelta(months=-x), - pt)) + test__engagement__product__prod_type=pt)) opened_in_period_list.append(oip) @@ -697,6 +697,164 @@ def product_type_counts(request): ) +def product_tag_counts(request): + form = ProductTagCountsForm() + opened_in_period_list = [] + oip = None + cip = None + aip = None + all_current_in_pt = None + top_ten = None + pt = None + today = timezone.now() + first_of_month = today.replace(day=1, hour=0, minute=0, second=0, microsecond=0) + mid_month = first_of_month.replace(day=15, hour=23, minute=59, second=59, microsecond=999999) + end_of_month = mid_month.replace(day=monthrange(today.year, today.month)[1], hour=23, minute=59, second=59, + microsecond=999999) + start_date = first_of_month + end_date = end_of_month + + if request.method == 'GET' and 'month' in request.GET and 'year' in request.GET and 'product_tag' in request.GET: + form = ProductTagCountsForm(request.GET) + if form.is_valid(): + prods = get_authorized_products(Permissions.Product_View) + + pt = form.cleaned_data['product_tag'] + month = int(form.cleaned_data['month']) + year = int(form.cleaned_data['year']) + first_of_month = first_of_month.replace(month=month, year=year) + + month_requested = datetime(year, month, 1) + + end_of_month = month_requested.replace(day=monthrange(month_requested.year, month_requested.month)[1], + hour=23, minute=59, second=59, microsecond=999999) + start_date = first_of_month + start_date = datetime(start_date.year, + start_date.month, start_date.day, + tzinfo=timezone.get_current_timezone()) + end_date = end_of_month + end_date = datetime(end_date.year, + end_date.month, end_date.day, + tzinfo=timezone.get_current_timezone()) + + oip = opened_in_period(start_date, end_date, + test__engagement__product__tags__name=pt, + test__engagement__product__in=prods) + + # trending data - 12 months + for x in range(12, 0, -1): + opened_in_period_list.append( + opened_in_period(start_date + relativedelta(months=-x), end_of_month + relativedelta(months=-x), + test__engagement__product__tags__name=pt, test__engagement__product__in=prods)) + + opened_in_period_list.append(oip) + + closed_in_period = Finding.objects.filter(mitigated__date__range=[start_date, end_date], + test__engagement__product__tags__name=pt, + test__engagement__product__in=prods, + severity__in=('Critical', 'High', 'Medium', 'Low')).values( + 'numerical_severity').annotate(Count('numerical_severity')).order_by('numerical_severity') + + total_closed_in_period = Finding.objects.filter(mitigated__date__range=[start_date, end_date], + test__engagement__product__tags__name=pt, + test__engagement__product__in=prods, + severity__in=( + 'Critical', 'High', 'Medium', 'Low')).aggregate( + total=Sum( + Case(When(severity__in=('Critical', 'High', 'Medium', 'Low'), + then=Value(1)), + output_field=IntegerField())))['total'] + + overall_in_pt = Finding.objects.filter(date__lt=end_date, + verified=True, + false_p=False, + duplicate=False, + out_of_scope=False, + mitigated__isnull=True, + test__engagement__product__tags__name=pt, + test__engagement__product__in=prods, + severity__in=('Critical', 'High', 'Medium', 'Low')).values( + 'numerical_severity').annotate(Count('numerical_severity')).order_by('numerical_severity') + + total_overall_in_pt = Finding.objects.filter(date__lte=end_date, + verified=True, + false_p=False, + duplicate=False, + out_of_scope=False, + mitigated__isnull=True, + test__engagement__product__tags__name=pt, + test__engagement__product__in=prods, + severity__in=('Critical', 'High', 'Medium', 'Low')).aggregate( + total=Sum( + Case(When(severity__in=('Critical', 'High', 'Medium', 'Low'), + then=Value(1)), + output_field=IntegerField())))['total'] + + all_current_in_pt = Finding.objects.filter(date__lte=end_date, + verified=True, + false_p=False, + duplicate=False, + out_of_scope=False, + mitigated__isnull=True, + test__engagement__product__tags__name=pt, + test__engagement__product__in=prods, + severity__in=( + 'Critical', 'High', 'Medium', 'Low')).prefetch_related( + 'test__engagement__product', + 'test__engagement__product__prod_type', + 'test__engagement__risk_acceptance', + 'reporter').order_by( + 'numerical_severity') + + top_ten = Product.objects.filter(engagement__test__finding__date__lte=end_date, + engagement__test__finding__verified=True, + engagement__test__finding__false_p=False, + engagement__test__finding__duplicate=False, + engagement__test__finding__out_of_scope=False, + engagement__test__finding__mitigated__isnull=True, + engagement__test__finding__severity__in=( + 'Critical', 'High', 'Medium', 'Low'), + tags__name=pt, engagement__product__in=prods) + top_ten = severity_count(top_ten, 'annotate', 'engagement__test__finding__severity').order_by('-critical', '-high', '-medium', '-low')[:10] + + cip = {'S0': 0, + 'S1': 0, + 'S2': 0, + 'S3': 0, + 'Total': total_closed_in_period} + + aip = {'S0': 0, + 'S1': 0, + 'S2': 0, + 'S3': 0, + 'Total': total_overall_in_pt} + + for o in closed_in_period: + cip[o['numerical_severity']] = o['numerical_severity__count'] + + for o in overall_in_pt: + aip[o['numerical_severity']] = o['numerical_severity__count'] + else: + messages.add_message(request, messages.ERROR, _("Please choose month and year and the Product Tag."), + extra_tags='alert-danger') + + add_breadcrumb(title=_("Bi-Weekly Metrics"), top_level=True, request=request) + + return render(request, + 'dojo/pt_counts.html', + {'form': form, + 'start_date': start_date, + 'end_date': end_date, + 'opened_in_period': oip, + 'trending_opened': opened_in_period_list, + 'closed_in_period': cip, + 'overall_in_pt': aip, + 'all_current_in_pt': all_current_in_pt, + 'top_ten': top_ten, + 'pt': pt} + ) + + def engineer_metrics(request): # only superusers can select other users to view if request.user.is_superuser: diff --git a/dojo/templates/base.html b/dojo/templates/base.html index 8e42e4278a..f4043d42e3 100644 --- a/dojo/templates/base.html +++ b/dojo/templates/base.html @@ -407,6 +407,11 @@ {% trans "Product Type Counts" %} +
  • + + {% trans "Product Tag Counts" %} + +
  • {% trans "Simple Metrics" %} diff --git a/dojo/templates/dojo/pt_counts.html b/dojo/templates/dojo/pt_counts.html index 0c8728b42c..5cfc6a9693 100644 --- a/dojo/templates/dojo/pt_counts.html +++ b/dojo/templates/dojo/pt_counts.html @@ -12,7 +12,7 @@ {% block content %} {{ block.super }} -
    + {{ form.as_p }}
    @@ -20,8 +20,12 @@ {% if pt %}

    {% blocktrans with start_date=start_date.date end_date=end_date.date%}Finding Information For Period of {{ start_date }} - {{ end_date }} {% endblocktrans %}

    -

    {{ pt.name }}

    [ -
    {% trans "View Details" %}] +

    {{ pt.name }}

    + {% if pt|class_name == "Product_Type" %} + [{% trans "View Details" %}] + {% elif pt|class_name == "Tagulous_Product_tags" %} + [{% trans "View Details" %}] + {% endif %}

    {% trans "Total Security Bug Count In Period" %}

    diff --git a/dojo/utils.py b/dojo/utils.py index 135d341e54..42334262d9 100644 --- a/dojo/utils.py +++ b/dojo/utils.py @@ -1082,7 +1082,7 @@ def get_period_counts(findings, } -def opened_in_period(start_date, end_date, pt): +def opened_in_period(start_date, end_date, **kwargs): start_date = datetime( start_date.year, start_date.month, @@ -1095,7 +1095,7 @@ def opened_in_period(start_date, end_date, pt): tzinfo=timezone.get_current_timezone()) opened_in_period = Finding.objects.filter( date__range=[start_date, end_date], - test__engagement__product__prod_type=pt, + **kwargs, verified=True, false_p=False, duplicate=False, @@ -1107,7 +1107,7 @@ def opened_in_period(start_date, end_date, pt): Count('numerical_severity')).order_by('numerical_severity') total_opened_in_period = Finding.objects.filter( date__range=[start_date, end_date], - test__engagement__product__prod_type=pt, + **kwargs, verified=True, false_p=False, duplicate=False, @@ -1139,7 +1139,7 @@ def opened_in_period(start_date, end_date, pt): 'closed': Finding.objects.filter( mitigated__date__range=[start_date, end_date], - test__engagement__product__prod_type=pt, + **kwargs, severity__in=('Critical', 'High', 'Medium', 'Low')).aggregate( total=Sum( Case( @@ -1155,7 +1155,7 @@ def opened_in_period(start_date, end_date, pt): duplicate=False, out_of_scope=False, mitigated__isnull=True, - test__engagement__product__prod_type=pt, + **kwargs, severity__in=('Critical', 'High', 'Medium', 'Low')).count() } From 19db206c8332f2a3623bc41de6fce423b438c901 Mon Sep 17 00:00:00 2001 From: Cody Maffucci <46459665+Maffooch@users.noreply.github.com> Date: Mon, 12 Feb 2024 09:13:08 -0600 Subject: [PATCH 09/24] Release Drafter: Try validating inputs --- .github/workflows/fetch-oas.yml | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/.github/workflows/fetch-oas.yml b/.github/workflows/fetch-oas.yml index 44692ddb5c..0dd32805b5 100644 --- a/.github/workflows/fetch-oas.yml +++ b/.github/workflows/fetch-oas.yml @@ -10,6 +10,9 @@ on: This will override any version calculated by the release-drafter. required: true +env: + release_version: ${{ github.event.inputs.version || github.event.inputs.release_number }} + jobs: oas_fetch: name: Fetch OpenAPI Specifications @@ -21,19 +24,19 @@ jobs: - name: Checkout uses: actions/checkout@v4 with: - ref: ${{ github.event.inputs.version }} + ref: release/${{ env.release_version }} - name: Load docker images run: |- - docker pull defectdojo/defectdojo-django:${{ github.event.inputs.version }}-alpine - docker pull defectdojo/defectdojo-nginx:${{ github.event.inputs.version }}-alpine + docker pull defectdojo/defectdojo-django:${{ env.release_version }}-alpine + docker pull defectdojo/defectdojo-nginx:${{ env.release_version }}-alpine docker images - name: Start Dojo run: docker-compose --profile postgres-redis --env-file ./docker/environments/postgres-redis.env up --no-deps -d postgres nginx uwsgi env: - DJANGO_VERSION: ${{ github.event.inputs.version }}-alpine - NGINX_VERSION: ${{ github.event.inputs.version }}-alpine + DJANGO_VERSION: ${{ env.release_version }}-alpine + NGINX_VERSION: ${{ env.release_version }}-alpine - name: Download OpenAPI Specifications run: |- From b1890d5369037ee977e1610faa242b4718e6e806 Mon Sep 17 00:00:00 2001 From: Cody Maffucci <46459665+Maffooch@users.noreply.github.com> Date: Mon, 12 Feb 2024 13:28:30 -0600 Subject: [PATCH 10/24] Disallow duplicate tool types (#9530) * Disallow duplicate tool types * Fix Flake8 * Only validate on new creations * Force new name on tool type unit test --- dojo/api_v2/serializers.py | 8 ++++++++ dojo/forms.py | 17 +++++++++++++++++ unittests/test_swagger_schema.py | 3 +++ 3 files changed, 28 insertions(+) diff --git a/dojo/api_v2/serializers.py b/dojo/api_v2/serializers.py index 45d2707a6e..2d12611508 100644 --- a/dojo/api_v2/serializers.py +++ b/dojo/api_v2/serializers.py @@ -1133,6 +1133,14 @@ class Meta: model = Tool_Type fields = "__all__" + def validate(self, data): + if self.context["request"].method == "POST": + name = data.get("name") + # Make sure this will not create a duplicate test type + if Tool_Type.objects.filter(name=name).count() > 0: + raise serializers.ValidationError('A Tool Type with the name already exists') + return data + class RegulationSerializer(serializers.ModelSerializer): class Meta: diff --git a/dojo/forms.py b/dojo/forms.py index 558c09ae69..27a1fb0c28 100755 --- a/dojo/forms.py +++ b/dojo/forms.py @@ -2388,6 +2388,23 @@ class Meta: model = Tool_Type exclude = ['product'] + def __init__(self, *args, **kwargs): + instance = kwargs.get('instance', None) + self.newly_created = True + if instance is not None: + self.newly_created = instance.pk is None + super().__init__(*args, **kwargs) + + def clean(self): + form_data = self.cleaned_data + if self.newly_created: + name = form_data.get("name") + # Make sure this will not create a duplicate test type + if Tool_Type.objects.filter(name=name).count() > 0: + raise forms.ValidationError('A Tool Type with the name already exists') + + return form_data + class RegulationForm(forms.ModelForm): class Meta: diff --git a/unittests/test_swagger_schema.py b/unittests/test_swagger_schema.py index 9f1316b4d2..b126335937 100644 --- a/unittests/test_swagger_schema.py +++ b/unittests/test_swagger_schema.py @@ -785,6 +785,9 @@ def __init__(self, *args, **kwargs): self.viewset = ToolTypesViewSet self.model = Tool_Type self.serializer = ToolTypeSerializer + self.field_transformers = { + "name": lambda v: v + "_new" + } class UserTest(BaseClass.SchemaTest): From eaf9f176ff2961bf76136893a2fab6aa7ccd2125 Mon Sep 17 00:00:00 2001 From: Cody Maffucci <46459665+Maffooch@users.noreply.github.com> Date: Mon, 12 Feb 2024 13:29:55 -0600 Subject: [PATCH 11/24] Engagement Surveys: Add missing leading slash (#9531) URL redirects were behaving strangely without this leading slash. it seems it was missed when all the others were added --- dojo/templates/dojo/dashboard.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dojo/templates/dojo/dashboard.html b/dojo/templates/dojo/dashboard.html index 8d3227f975..8e04908609 100644 --- a/dojo/templates/dojo/dashboard.html +++ b/dojo/templates/dojo/dashboard.html @@ -207,7 +207,7 @@ {% else %} {% trans "View Responses" %} - {% trans "Create Engagement" %} + {% trans "Create Engagement" %} {% endif %} From 5ae08f404cc5462ac3ae274544254d1ccec8a869 Mon Sep 17 00:00:00 2001 From: DefectDojo release bot Date: Mon, 12 Feb 2024 19:33:02 +0000 Subject: [PATCH 12/24] Update versions in application files --- components/package.json | 2 +- dojo/__init__.py | 2 +- helm/defectdojo/Chart.yaml | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/components/package.json b/components/package.json index 9a57f7b78d..4c9fc573d8 100644 --- a/components/package.json +++ b/components/package.json @@ -1,6 +1,6 @@ { "name": "defectdojo", - "version": "2.32.0-dev", + "version": "2.31.1", "license" : "BSD-3-Clause", "private": true, "dependencies": { diff --git a/dojo/__init__.py b/dojo/__init__.py index f1c39c15ed..174901e835 100644 --- a/dojo/__init__.py +++ b/dojo/__init__.py @@ -4,6 +4,6 @@ # Django starts so that shared_task will use this app. from .celery import app as celery_app # noqa -__version__ = '2.32.0-dev' +__version__ = '2.31.1' __url__ = 'https://github.com/DefectDojo/django-DefectDojo' __docs__ = 'https://documentation.defectdojo.com' diff --git a/helm/defectdojo/Chart.yaml b/helm/defectdojo/Chart.yaml index 53bce7bc75..0af7d7c32b 100644 --- a/helm/defectdojo/Chart.yaml +++ b/helm/defectdojo/Chart.yaml @@ -1,8 +1,8 @@ apiVersion: v2 -appVersion: "2.32.0-dev" +appVersion: "2.31.1" description: A Helm chart for Kubernetes to install DefectDojo name: defectdojo -version: 1.6.109-dev +version: 1.6.109 icon: https://www.defectdojo.org/img/favicon.ico maintainers: - name: madchap From 93f449d3c8f45922f9dd1890c5a5cd20fb09ea57 Mon Sep 17 00:00:00 2001 From: DefectDojo release bot Date: Mon, 12 Feb 2024 20:12:56 +0000 Subject: [PATCH 13/24] Update versions in application files --- components/package.json | 2 +- dojo/__init__.py | 2 +- helm/defectdojo/Chart.yaml | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/components/package.json b/components/package.json index 4c9fc573d8..9a57f7b78d 100644 --- a/components/package.json +++ b/components/package.json @@ -1,6 +1,6 @@ { "name": "defectdojo", - "version": "2.31.1", + "version": "2.32.0-dev", "license" : "BSD-3-Clause", "private": true, "dependencies": { diff --git a/dojo/__init__.py b/dojo/__init__.py index 174901e835..f1c39c15ed 100644 --- a/dojo/__init__.py +++ b/dojo/__init__.py @@ -4,6 +4,6 @@ # Django starts so that shared_task will use this app. from .celery import app as celery_app # noqa -__version__ = '2.31.1' +__version__ = '2.32.0-dev' __url__ = 'https://github.com/DefectDojo/django-DefectDojo' __docs__ = 'https://documentation.defectdojo.com' diff --git a/helm/defectdojo/Chart.yaml b/helm/defectdojo/Chart.yaml index 0af7d7c32b..60c20292d0 100644 --- a/helm/defectdojo/Chart.yaml +++ b/helm/defectdojo/Chart.yaml @@ -1,8 +1,8 @@ apiVersion: v2 -appVersion: "2.31.1" +appVersion: "2.32.0-dev" description: A Helm chart for Kubernetes to install DefectDojo name: defectdojo -version: 1.6.109 +version: 1.6.110-dev icon: https://www.defectdojo.org/img/favicon.ico maintainers: - name: madchap From 10ac52dc8f3eba2a6db4f5db84d0881d4298cb2a Mon Sep 17 00:00:00 2001 From: kiblik Date: Mon, 12 Feb 2024 21:22:16 +0000 Subject: [PATCH 14/24] Dojo_Group: Support for "RemoteUser" in model (#9405) * Use correct name references * fix db_mig * Update and rename 0201_alter_dojo_group_social_provider.py to 0202_alter_dojo_group_social_provider.py --------- Co-authored-by: Cody Maffucci <46459665+Maffooch@users.noreply.github.com> --- .../0202_alter_dojo_group_social_provider.py | 18 ++++++++++++++++++ dojo/models.py | 4 +++- dojo/pipeline.py | 2 +- dojo/remote_user.py | 3 ++- unittests/test_remote_user.py | 4 ++-- 5 files changed, 26 insertions(+), 5 deletions(-) create mode 100644 dojo/db_migrations/0202_alter_dojo_group_social_provider.py diff --git a/dojo/db_migrations/0202_alter_dojo_group_social_provider.py b/dojo/db_migrations/0202_alter_dojo_group_social_provider.py new file mode 100644 index 0000000000..9bbc7e2e5c --- /dev/null +++ b/dojo/db_migrations/0202_alter_dojo_group_social_provider.py @@ -0,0 +1,18 @@ +# Generated by Django 4.1.13 on 2024-01-25 00:07 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('dojo', '0201_populate_finding_sla_expiration_date'), + ] + + operations = [ + migrations.AlterField( + model_name='dojo_group', + name='social_provider', + field=models.CharField(blank=True, choices=[('AzureAD', 'AzureAD'), ('Remote', 'Remote')], help_text='Group imported from a social provider.', max_length=10, null=True, verbose_name='Social Authentication Provider'), + ), + ] diff --git a/dojo/models.py b/dojo/models.py index 45d522963e..77dead1482 100755 --- a/dojo/models.py +++ b/dojo/models.py @@ -246,14 +246,16 @@ class UserContactInfo(models.Model): class Dojo_Group(models.Model): AZURE = 'AzureAD' + REMOTE = 'Remote' SOCIAL_CHOICES = ( (AZURE, _('AzureAD')), + (REMOTE, _('Remote')), ) name = models.CharField(max_length=255, unique=True) description = models.CharField(max_length=4000, null=True, blank=True) users = models.ManyToManyField(Dojo_User, through='Dojo_Group_Member', related_name='users', blank=True) auth_group = models.ForeignKey(Group, null=True, blank=True, on_delete=models.CASCADE) - social_provider = models.CharField(max_length=10, choices=SOCIAL_CHOICES, blank=True, null=True, help_text='Group imported from a social provider.', verbose_name='Social Authentication Provider') + social_provider = models.CharField(max_length=10, choices=SOCIAL_CHOICES, blank=True, null=True, help_text=_('Group imported from a social provider.'), verbose_name=_('Social Authentication Provider')) def __str__(self): return self.name diff --git a/dojo/pipeline.py b/dojo/pipeline.py index 0ce76220e9..130a795e09 100644 --- a/dojo/pipeline.py +++ b/dojo/pipeline.py @@ -98,7 +98,7 @@ def update_azure_groups(backend, uid, user=None, social=None, *args, **kwargs): except Exception as e: logger.error(f"Could not call microsoft graph API or save groups to member: {e}") if len(group_names) > 0: - assign_user_to_groups(user, group_names, 'AzureAD') + assign_user_to_groups(user, group_names, Dojo_Group.AZURE) if settings.AZUREAD_TENANT_OAUTH2_CLEANUP_GROUPS: cleanup_old_groups_for_user(user, group_names) diff --git a/dojo/remote_user.py b/dojo/remote_user.py index 875291c7ba..7ed5f0a6a4 100644 --- a/dojo/remote_user.py +++ b/dojo/remote_user.py @@ -6,6 +6,7 @@ from netaddr import IPAddress from django.conf import settings from dojo.pipeline import assign_user_to_groups, cleanup_old_groups_for_user +from dojo.models import Dojo_Group logger = logging.getLogger(__name__) @@ -77,7 +78,7 @@ def configure_user(self, request, user, created=True): if settings.AUTH_REMOTEUSER_GROUPS_HEADER and \ settings.AUTH_REMOTEUSER_GROUPS_HEADER in request.META: - assign_user_to_groups(user, request.META[settings.AUTH_REMOTEUSER_GROUPS_HEADER].split(','), 'Remote') + assign_user_to_groups(user, request.META[settings.AUTH_REMOTEUSER_GROUPS_HEADER].split(','), Dojo_Group.REMOTE) if settings.AUTH_REMOTEUSER_GROUPS_CLEANUP and \ settings.AUTH_REMOTEUSER_GROUPS_HEADER and \ diff --git a/unittests/test_remote_user.py b/unittests/test_remote_user.py index 384e4dda75..d764358e11 100644 --- a/unittests/test_remote_user.py +++ b/unittests/test_remote_user.py @@ -16,8 +16,8 @@ def setUp(self): last_name='original_last', email='original@mail.com', ) - self.group1, _ = Dojo_Group.objects.get_or_create(name="group1", social_provider="Remote") - self.group2, _ = Dojo_Group.objects.get_or_create(name="group2", social_provider="Remote") + self.group1, _ = Dojo_Group.objects.get_or_create(name="group1", social_provider=Dojo_Group.REMOTE) + self.group2, _ = Dojo_Group.objects.get_or_create(name="group2", social_provider=Dojo_Group.REMOTE) @override_settings(AUTH_REMOTEUSER_ENABLED=False) def test_disabled(self): From 98ecd13248737b9d49a9df9e3e7253d549e0e497 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Tue, 13 Feb 2024 14:01:41 -0600 Subject: [PATCH 15/24] Update rabbitmq:3.12.12-alpine Docker digest from 3.12.12 to 3.12.12-alpine (docker-compose.yml) (#9535) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index 4394261baf..36e83aeb5b 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -149,7 +149,7 @@ services: volumes: - defectdojo_postgres:/var/lib/postgresql/data rabbitmq: - image: rabbitmq:3.12.12-alpine@sha256:fcd6a66524be55c15c81011dc87cc4b6e4405130fbb950c21ad1d31e8f6322dd + image: rabbitmq:3.12.12-alpine@sha256:09216fbcc8cb9588556bcecaa274b4de94d5ec3c2e3ab9c6efdc64677750c648 profiles: - mysql-rabbitmq - postgres-rabbitmq From a5aac391178547283c883f7353fef12f5669bdc6 Mon Sep 17 00:00:00 2001 From: Felix Hernandez Date: Tue, 13 Feb 2024 14:02:19 -0600 Subject: [PATCH 16/24] remove flot-axis library (#9540) --- components/package.json | 1 - dojo/templates/dojo/dashboard-metrics.html | 1 - dojo/templates/dojo/endpoint_pdf_report.html | 1 - dojo/templates/dojo/engagement_pdf_report.html | 1 - dojo/templates/dojo/finding_pdf_report.html | 1 - dojo/templates/dojo/product_endpoint_pdf_report.html | 1 - dojo/templates/dojo/product_metrics.html | 1 - dojo/templates/dojo/product_pdf_report.html | 1 - dojo/templates/dojo/product_type_pdf_report.html | 1 - dojo/templates/dojo/test_pdf_report.html | 1 - dojo/templates/dojo/view_endpoint.html | 1 - 11 files changed, 11 deletions(-) diff --git a/components/package.json b/components/package.json index 9a57f7b78d..b4e7dc9db2 100644 --- a/components/package.json +++ b/components/package.json @@ -21,7 +21,6 @@ "drmonty-datatables-responsive": "^1.0.0", "easymde": "^2.18.0", "flot": "flot/flot#~0.8.3", - "flot-axis": "markrcote/flot-axislabels#*", "font-awesome": "^4.0.0", "fullcalendar": "^3.10.2", "google-code-prettify": "^1.0.0", diff --git a/dojo/templates/dojo/dashboard-metrics.html b/dojo/templates/dojo/dashboard-metrics.html index 2f62a8926a..929bea53e9 100644 --- a/dojo/templates/dojo/dashboard-metrics.html +++ b/dojo/templates/dojo/dashboard-metrics.html @@ -176,7 +176,6 @@

    {% blocktrans with start_date=start_date.date end_date=end_date.date%}{{ nam - {% if punchcard %} diff --git a/dojo/templates/dojo/endpoint_pdf_report.html b/dojo/templates/dojo/endpoint_pdf_report.html index d08e090173..b53c833742 100644 --- a/dojo/templates/dojo/endpoint_pdf_report.html +++ b/dojo/templates/dojo/endpoint_pdf_report.html @@ -279,7 +279,6 @@

    Notes
    - - - - {% if punchcard %} diff --git a/dojo/templates/dojo/product_metrics.html b/dojo/templates/dojo/product_metrics.html index 656bc4a8db..d5c0b65975 100644 --- a/dojo/templates/dojo/product_metrics.html +++ b/dojo/templates/dojo/product_metrics.html @@ -489,7 +489,6 @@

    - {% include "dojo/filter_js_snippet.html" %} {% if punchcard %} diff --git a/dojo/templates/dojo/product_pdf_report.html b/dojo/templates/dojo/product_pdf_report.html index eb80b8148a..aa413c72d1 100644 --- a/dojo/templates/dojo/product_pdf_report.html +++ b/dojo/templates/dojo/product_pdf_report.html @@ -383,7 +383,6 @@

    Notes
    - {% if punchcard %} diff --git a/dojo/templates/dojo/product_type_pdf_report.html b/dojo/templates/dojo/product_type_pdf_report.html index 9bd22d587b..f8c4175e5d 100644 --- a/dojo/templates/dojo/product_type_pdf_report.html +++ b/dojo/templates/dojo/product_type_pdf_report.html @@ -314,7 +314,6 @@
    Notes
    - - - {% block metrics %} {% endblock metrics %} From 329f9422c7ec634248ddd395c35e048b0b6de183 Mon Sep 17 00:00:00 2001 From: Sebastian Gumprich Date: Wed, 14 Feb 2024 19:00:07 +0100 Subject: [PATCH 17/24] use full url for helm-repos and alias in renovate.json (#9525) With this change, renovate will create PRs to update the helm-dependencies, just as with docker-compose. Note that only setting the repository to the full URL did not work, I also had to add the registryAlias. --- .github/renovate.json | 5 ++++- helm/defectdojo/Chart.yaml | 10 +++++----- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/.github/renovate.json b/.github/renovate.json index f64560dd54..7c9c6623cd 100644 --- a/.github/renovate.json +++ b/.github/renovate.json @@ -12,5 +12,8 @@ "commitMessageExtra": "from {{currentVersion}} to {{#if isMajor}}v{{{newMajor}}}{{else}}{{#if isSingleVersion}}v{{{toVersion}}}{{else}}{{{newValue}}}{{/if}}{{/if}}", "commitMessageSuffix": "({{packageFile}})", "labels": ["dependencies"] - }] + }], + "registryAliases": { + "bitnami": "https://charts.bitnami.com/bitnami" + } } diff --git a/helm/defectdojo/Chart.yaml b/helm/defectdojo/Chart.yaml index 60c20292d0..ecdffcc337 100644 --- a/helm/defectdojo/Chart.yaml +++ b/helm/defectdojo/Chart.yaml @@ -11,22 +11,22 @@ maintainers: dependencies: - name: mysql version: ~9.1.7 - repository: "@bitnami" + repository: "https://charts.bitnami.com/bitnami" condition: mysql.enabled - name: postgresql version: ~11.6.5 - repository: "@bitnami" + repository: "https://charts.bitnami.com/bitnami" condition: postgresql.enabled - name: postgresql-ha version: ~9.1.5 - repository: "@bitnami" + repository: "https://charts.bitnami.com/bitnami" alias: postgresqlha condition: postgresqlha.enabled - name: rabbitmq version: ~11.2.0 - repository: "@bitnami" + repository: "https://charts.bitnami.com/bitnami" condition: rabbitmq.enabled - name: redis version: ~16.12.0 - repository: "@bitnami" + repository: "https://charts.bitnami.com/bitnami" condition: redis.enabled From b008a8a62abd3eb51334fa88d20f2c14c2231844 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 14 Feb 2024 21:23:33 -0600 Subject: [PATCH 18/24] Update Helm release redis from 16.12.3 to ~16.13.0 (helm/defectdojo/Chart.yaml) (#9550) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- helm/defectdojo/Chart.lock | 6 +++--- helm/defectdojo/Chart.yaml | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/helm/defectdojo/Chart.lock b/helm/defectdojo/Chart.lock index f332717cd0..2bebae633d 100644 --- a/helm/defectdojo/Chart.lock +++ b/helm/defectdojo/Chart.lock @@ -13,6 +13,6 @@ dependencies: version: 11.2.2 - name: redis repository: https://charts.bitnami.com/bitnami - version: 16.12.3 -digest: sha256:f53ebb0cea44dfbb72ac96ae98680848acd5e17a0947a728e5646460d0da4ef9 -generated: "2023-03-06T17:08:53.379497544Z" + version: 16.13.2 +digest: sha256:f1dea5877872e8baa25492fb77c4468502bdfeb8f520f00f9598b4d33465ce82 +generated: "2024-02-14T22:26:01.747974179Z" diff --git a/helm/defectdojo/Chart.yaml b/helm/defectdojo/Chart.yaml index ecdffcc337..414168cc43 100644 --- a/helm/defectdojo/Chart.yaml +++ b/helm/defectdojo/Chart.yaml @@ -27,6 +27,6 @@ dependencies: repository: "https://charts.bitnami.com/bitnami" condition: rabbitmq.enabled - name: redis - version: ~16.12.0 + version: ~16.13.0 repository: "https://charts.bitnami.com/bitnami" condition: redis.enabled From 91d485df6724a7c5f544cdf7c81e81d2605ab508 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 14 Feb 2024 21:28:45 -0600 Subject: [PATCH 19/24] Update rabbitmq:3.12.12-alpine Docker digest from 3.12.12 to 3.12.12-alpine (docker-compose.yml) (#9541) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index 36e83aeb5b..89b06e264d 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -149,7 +149,7 @@ services: volumes: - defectdojo_postgres:/var/lib/postgresql/data rabbitmq: - image: rabbitmq:3.12.12-alpine@sha256:09216fbcc8cb9588556bcecaa274b4de94d5ec3c2e3ab9c6efdc64677750c648 + image: rabbitmq:3.12.12-alpine@sha256:9144c0eca261e36ffd1a3f9ef21a860242a4a60e0211bbade82c80910958a5e9 profiles: - mysql-rabbitmq - postgres-rabbitmq From 26f959abd11013565c3de42264ee60c7903e5c88 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 14 Feb 2024 21:29:07 -0600 Subject: [PATCH 20/24] Update postgres Docker tag from 16.1 to v16.2 (docker-compose.yml) (#9536) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index 89b06e264d..1b43001c3c 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -138,7 +138,7 @@ services: volumes: - defectdojo_data:/var/lib/mysql postgres: - image: postgres:16.1-alpine@sha256:17eb369d9330fe7fbdb2f705418c18823d66322584c77c2b43cc0e1851d01de7 + image: postgres:16.2-alpine@sha256:bbd7346fab25b7e0b25f214829d6ebfb78ef0465059492e46dee740ce8fcd844 profiles: - postgres-rabbitmq - postgres-redis From 16fc7a7c5ff1bce2762a4e231bca74627120fae3 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 14 Feb 2024 22:00:10 -0600 Subject: [PATCH 21/24] Update Helm release mysql from 9.1.8 to ~9.19.0 (helm/defectdojo/Chart.yaml) (#9545) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- helm/defectdojo/Chart.lock | 6 +++--- helm/defectdojo/Chart.yaml | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/helm/defectdojo/Chart.lock b/helm/defectdojo/Chart.lock index 2bebae633d..7152b06eea 100644 --- a/helm/defectdojo/Chart.lock +++ b/helm/defectdojo/Chart.lock @@ -1,7 +1,7 @@ dependencies: - name: mysql repository: https://charts.bitnami.com/bitnami - version: 9.1.8 + version: 9.19.1 - name: postgresql repository: https://charts.bitnami.com/bitnami version: 11.6.26 @@ -14,5 +14,5 @@ dependencies: - name: redis repository: https://charts.bitnami.com/bitnami version: 16.13.2 -digest: sha256:f1dea5877872e8baa25492fb77c4468502bdfeb8f520f00f9598b4d33465ce82 -generated: "2024-02-14T22:26:01.747974179Z" +digest: sha256:055c755109a79afc56850a8c742db9968c1ab1b64ea5b1c6c79dd26192ce14d3 +generated: "2024-02-15T03:24:53.319013122Z" diff --git a/helm/defectdojo/Chart.yaml b/helm/defectdojo/Chart.yaml index 414168cc43..9e1c002fe5 100644 --- a/helm/defectdojo/Chart.yaml +++ b/helm/defectdojo/Chart.yaml @@ -10,7 +10,7 @@ maintainers: url: https://github.com/DefectDojo/django-DefectDojo dependencies: - name: mysql - version: ~9.1.7 + version: ~9.19.0 repository: "https://charts.bitnami.com/bitnami" condition: mysql.enabled - name: postgresql From a2d2fc80839dd6eb4db51a092c6002476ebe1a82 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 15 Feb 2024 09:52:02 -0600 Subject: [PATCH 22/24] Update Helm release rabbitmq from 11.2.2 to ~11.16.0 (helm/defectdojo/Chart.yaml) (#9548) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- helm/defectdojo/Chart.lock | 6 +++--- helm/defectdojo/Chart.yaml | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/helm/defectdojo/Chart.lock b/helm/defectdojo/Chart.lock index 7152b06eea..d3996a3481 100644 --- a/helm/defectdojo/Chart.lock +++ b/helm/defectdojo/Chart.lock @@ -10,9 +10,9 @@ dependencies: version: 9.1.9 - name: rabbitmq repository: https://charts.bitnami.com/bitnami - version: 11.2.2 + version: 11.16.2 - name: redis repository: https://charts.bitnami.com/bitnami version: 16.13.2 -digest: sha256:055c755109a79afc56850a8c742db9968c1ab1b64ea5b1c6c79dd26192ce14d3 -generated: "2024-02-15T03:24:53.319013122Z" +digest: sha256:14ecb61931de83a912605f1cca4241184b9ffcddda9450f27883d2c2eab2930e +generated: "2024-02-15T04:14:46.515518985Z" diff --git a/helm/defectdojo/Chart.yaml b/helm/defectdojo/Chart.yaml index 9e1c002fe5..ff2a8a14c7 100644 --- a/helm/defectdojo/Chart.yaml +++ b/helm/defectdojo/Chart.yaml @@ -23,7 +23,7 @@ dependencies: alias: postgresqlha condition: postgresqlha.enabled - name: rabbitmq - version: ~11.2.0 + version: ~11.16.0 repository: "https://charts.bitnami.com/bitnami" condition: rabbitmq.enabled - name: redis From 9171885921deb83478611b0ac8553337ce8a13fc Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 15 Feb 2024 14:23:51 -0600 Subject: [PATCH 23/24] Update Helm release postgresql from 11.6.26 to ~11.9.0 (helm/defectdojo/Chart.yaml) (#9546) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- helm/defectdojo/Chart.lock | 6 +++--- helm/defectdojo/Chart.yaml | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/helm/defectdojo/Chart.lock b/helm/defectdojo/Chart.lock index d3996a3481..a1d949111d 100644 --- a/helm/defectdojo/Chart.lock +++ b/helm/defectdojo/Chart.lock @@ -4,7 +4,7 @@ dependencies: version: 9.19.1 - name: postgresql repository: https://charts.bitnami.com/bitnami - version: 11.6.26 + version: 11.9.13 - name: postgresql-ha repository: https://charts.bitnami.com/bitnami version: 9.1.9 @@ -14,5 +14,5 @@ dependencies: - name: redis repository: https://charts.bitnami.com/bitnami version: 16.13.2 -digest: sha256:14ecb61931de83a912605f1cca4241184b9ffcddda9450f27883d2c2eab2930e -generated: "2024-02-15T04:14:46.515518985Z" +digest: sha256:117e74aeca1950886c3ef4fc4eca1166b67f70cb0ba86d7cca8087d85c18297a +generated: "2024-02-15T16:02:04.692755051Z" diff --git a/helm/defectdojo/Chart.yaml b/helm/defectdojo/Chart.yaml index ff2a8a14c7..5dcf9bea6e 100644 --- a/helm/defectdojo/Chart.yaml +++ b/helm/defectdojo/Chart.yaml @@ -14,7 +14,7 @@ dependencies: repository: "https://charts.bitnami.com/bitnami" condition: mysql.enabled - name: postgresql - version: ~11.6.5 + version: ~11.9.0 repository: "https://charts.bitnami.com/bitnami" condition: postgresql.enabled - name: postgresql-ha From aba513fedf4ff704c33d7fb3feb35301648e84e7 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 15 Feb 2024 14:58:13 -0600 Subject: [PATCH 24/24] Update Helm release postgresql-ha from 9.1.9 to ~9.4.0 (helm/defectdojo/Chart.yaml) (#9547) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- helm/defectdojo/Chart.lock | 6 +++--- helm/defectdojo/Chart.yaml | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/helm/defectdojo/Chart.lock b/helm/defectdojo/Chart.lock index a1d949111d..784d84b484 100644 --- a/helm/defectdojo/Chart.lock +++ b/helm/defectdojo/Chart.lock @@ -7,12 +7,12 @@ dependencies: version: 11.9.13 - name: postgresql-ha repository: https://charts.bitnami.com/bitnami - version: 9.1.9 + version: 9.4.11 - name: rabbitmq repository: https://charts.bitnami.com/bitnami version: 11.16.2 - name: redis repository: https://charts.bitnami.com/bitnami version: 16.13.2 -digest: sha256:117e74aeca1950886c3ef4fc4eca1166b67f70cb0ba86d7cca8087d85c18297a -generated: "2024-02-15T16:02:04.692755051Z" +digest: sha256:50d07c49c1fb199a70fafd032712a1d5509a0352f090bfddd2e8a22b35be0961 +generated: "2024-02-15T20:24:24.560785941Z" diff --git a/helm/defectdojo/Chart.yaml b/helm/defectdojo/Chart.yaml index 5dcf9bea6e..1c44736daf 100644 --- a/helm/defectdojo/Chart.yaml +++ b/helm/defectdojo/Chart.yaml @@ -18,7 +18,7 @@ dependencies: repository: "https://charts.bitnami.com/bitnami" condition: postgresql.enabled - name: postgresql-ha - version: ~9.1.5 + version: ~9.4.0 repository: "https://charts.bitnami.com/bitnami" alias: postgresqlha condition: postgresqlha.enabled