diff --git a/CHANGELOG.md b/CHANGELOG.md index 3d1c968d9..d05787c2a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,10 +4,16 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). -## [Unreleased] - 13 September 2022 +## [v3.0.5] - 23 September 2022 + +### Fixed + +* Fixed finding guidance display when viewing a finding attached to a report +* Fixed connection errors with an AWS region causing remaining regions to be skipped in the cloud monitoring task ### Added +* Added an `attachFinding` mutation to the GraphQL API for easily attaching a copy of a finding from the library to a report * Added ability to copy/paste evidence into the upload form and view a preview (Thanks to @brandonscholet! Closes PR #228) ## [3.0.4] - 12 September 2022 @@ -113,7 +119,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Removed -* Removed old environmnt variable templates from the project because they are no longer used for setup or management +* Removed old environment variable templates from the project because they are no longer used for setup or management ## [2.3.0-rc2] - 3 June 2022 @@ -247,7 +253,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 * Changed default display filter to only show active projects * Adjusted project status filter to have three options: all projects, active projects, and completed projects * Updated dashboard and calendar to show past and current events for browsing history within the calendar - * Past events marked as completed will appear dimed with a strikethrough and `: Complete` added to the end + * Past events marked as completed will appear dimmed with a strike-through and `: Complete` added to the end * Upgraded dependencies to their latest versions (where possible) * Django v3.1.13 -> v3.2.11 * Did not upgrade `docxtpl` @@ -501,7 +507,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 * Adjusted Dockerfile files to fix potential filesystem issues with the latest Alpine Linux image \(submitted by @studebacon with PR \#143\). * Added a missing field in the Report Template admin panel * "Add to Report" on the finding details page now works -* Updated delete actions for operation logs to avoid anerror that could prevent the deletion of entries when deleting an entire log +* Updated delete actions for operation logs to avoid an error that could prevent the deletion of entries when deleting an entire log * Domain age calculations are now accurate * An invalid value for domain purchase date no longer causes a server error during validation * Constrained `Twisted` library to v20.3.0 to fix a potential issue that could come up with Django Channels diff --git a/VERSION b/VERSION index 90c80167e..cee732d07 100644 --- a/VERSION +++ b/VERSION @@ -1,2 +1,2 @@ -3.0.4 -12 September 2022 \ No newline at end of file +3.0.5 +23 September 2022 \ No newline at end of file diff --git a/compose/production/hasura/Dockerfile b/compose/production/hasura/Dockerfile index 0163e7d82..f9b161b7d 100644 --- a/compose/production/hasura/Dockerfile +++ b/compose/production/hasura/Dockerfile @@ -1 +1 @@ -FROM hasura/graphql-engine:v2.7.0.cli-migrations-v3 +FROM hasura/graphql-engine:v2.12.0.cli-migrations-v3 diff --git a/config/settings/base.py b/config/settings/base.py index 48342cebe..3607cdbe5 100644 --- a/config/settings/base.py +++ b/config/settings/base.py @@ -11,9 +11,9 @@ # 3rd Party Libraries import environ -__version__ = "3.0.4" +__version__ = "3.0.5" VERSION = __version__ -RELEASE_DATE = "12 September 2022" +RELEASE_DATE = "23 September 2022" ROOT_DIR = Path(__file__).resolve(strict=True).parent.parent.parent APPS_DIR = ROOT_DIR / "ghostwriter" diff --git a/ghostwriter/api/tests/test_views.py b/ghostwriter/api/tests/test_views.py index fd7e1ce8c..6ba8efb0e 100644 --- a/ghostwriter/api/tests/test_views.py +++ b/ghostwriter/api/tests/test_views.py @@ -17,6 +17,7 @@ DomainFactory, DomainStatusFactory, EvidenceFactory, + FindingFactory, HistoryFactory, OplogEntryFactory, ProjectAssignmentFactory, @@ -285,7 +286,7 @@ def test_with_invalid_json(self): self.assertJSONEqual(force_str(response.content), result) -# Tests related to theauthetnication webhook +# Tests related to the authentication webhook class HasuraWebhookTests(TestCase): @@ -976,6 +977,86 @@ def test_deleting_protected_template_without_access(self): self.assertEqual(response.status_code, 401) +class GraphqlAttachFindingAction(TestCase): + """Collection of tests for :view:`GraphqlAttachFinding`.""" + + @classmethod + def setUpTestData(cls): + cls.ReportFindingLink = ReportFindingLinkFactory._meta.model + + cls.user = UserFactory(password=PASSWORD) + cls.other_user = UserFactory(password=PASSWORD) + cls.mgr_user = UserFactory(password=PASSWORD, role="manager") + cls.uri = reverse("api:graphql_attach_finding") + + cls.project = ProjectFactory() + cls.report = ReportFactory(project=cls.project) + cls.finding = FindingFactory() + _ = ProjectAssignmentFactory(project=cls.project, operator=cls.user) + + def setUp(self): + self.client = Client() + + def generate_data(self, finding_id, report_id): + return {"input": {"findingId": finding_id, "reportId": report_id, }} + + def test_attaching_finding(self): + _, token = utils.generate_jwt(self.user) + response = self.client.post( + self.uri, + data=self.generate_data(self.finding.id, self.report.id), + content_type="application/json", + **{"HTTP_HASURA_ACTION_SECRET": f"{ACTION_SECRET}", "HTTP_AUTHORIZATION": f"Bearer {token}"}, + ) + self.assertEqual(response.status_code, 200) + new_finding = response.json()["id"] + self.assertTrue(self.ReportFindingLink.objects.filter(id=new_finding).exists()) + + def test_attaching_finding_with_invalid_report(self): + _, token = utils.generate_jwt(self.user) + response = self.client.post( + self.uri, + data=self.generate_data(self.finding.id, 999), + content_type="application/json", + **{"HTTP_HASURA_ACTION_SECRET": f"{ACTION_SECRET}", "HTTP_AUTHORIZATION": f"Bearer {token}"}, + ) + self.assertEqual(response.status_code, 400) + data = {"message": "Report does not exist", "extensions": {"code": "ReportDoesNotExist"}} + self.assertJSONEqual(force_str(response.content), data) + + def test_attaching_finding_with_invalid_finding(self): + _, token = utils.generate_jwt(self.user) + response = self.client.post( + self.uri, + data=self.generate_data(999, self.report.id), + content_type="application/json", + **{"HTTP_HASURA_ACTION_SECRET": f"{ACTION_SECRET}", "HTTP_AUTHORIZATION": f"Bearer {token}"}, + ) + self.assertEqual(response.status_code, 400) + data = {"message": "Finding does not exist", "extensions": {"code": "FindingDoesNotExist"}} + self.assertJSONEqual(force_str(response.content), data) + + def test_attaching_finding_with_mgr_access(self): + _, token = utils.generate_jwt(self.mgr_user) + response = self.client.post( + self.uri, + data=self.generate_data(self.finding.id, self.report.id), + content_type="application/json", + **{"HTTP_HASURA_ACTION_SECRET": f"{ACTION_SECRET}", "HTTP_AUTHORIZATION": f"Bearer {token}"}, + ) + self.assertEqual(response.status_code, 200) + + def test_attaching_finding_without_access(self): + _, token = utils.generate_jwt(self.other_user) + response = self.client.post( + self.uri, + data=self.generate_data(self.finding.id, self.report.id), + content_type="application/json", + **{"HTTP_HASURA_ACTION_SECRET": f"{ACTION_SECRET}", "HTTP_AUTHORIZATION": f"Bearer {token}"}, + ) + self.assertEqual(response.status_code, 401) + + # Tests related to Hasura Event Triggers diff --git a/ghostwriter/api/urls.py b/ghostwriter/api/urls.py index cfed6d3d2..f71ef5394 100644 --- a/ghostwriter/api/urls.py +++ b/ghostwriter/api/urls.py @@ -8,6 +8,7 @@ from ghostwriter.api.views import ( ApiKeyCreate, ApiKeyRevoke, + GraphqlAttachFinding, GraphqlAuthenticationWebhook, GraphqlCheckoutDomain, GraphqlCheckoutServer, @@ -29,11 +30,13 @@ app_name = "api" urlpatterns = [ + # Actions path("test", csrf_exempt(GraphqlTestView.as_view()), name="graphql_test"), path("test_event", csrf_exempt(GraphqlEventTestView.as_view()), name="graphql_event_test"), path("webhook", csrf_exempt(GraphqlAuthenticationWebhook.as_view()), name="graphql_webhook"), path("login", csrf_exempt(GraphqlLoginAction.as_view()), name="graphql_login"), path("whoami", csrf_exempt(GraphqlWhoami.as_view()), name="graphql_whoami"), + path("whoami", csrf_exempt(GraphqlWhoami.as_view()), name="graphql_whoami"), path("generateReport", csrf_exempt(GraphqlGenerateReport.as_view()), name="graphql_generate_report"), path("checkoutDomain", csrf_exempt(GraphqlCheckoutDomain.as_view()), name="graphql_checkout_domain"), path("checkoutServer", csrf_exempt(GraphqlCheckoutServer.as_view()), name="graphql_checkout_server"), @@ -41,6 +44,8 @@ path("deleteServerCheckout", csrf_exempt(GraphqlServerCheckoutDelete.as_view()), name="graphql_server_checkout_delete"), path("deleteEvidence", csrf_exempt(GraphqlDeleteEvidenceAction.as_view()), name="graphql_delete_evidence"), path("deleteTemplate", csrf_exempt(GraphqlDeleteReportTemplateAction.as_view()), name="graphql_delete_template"), + path("attachFinding", csrf_exempt(GraphqlAttachFinding.as_view()), name="graphql_attach_finding"), + # Events path("event/domain/update", csrf_exempt(GraphqlDomainUpdateEvent.as_view()), name="graphql_domain_update_event"), path("event/oplogentry/create", csrf_exempt(GraphqlOplogEntryCreateEvent.as_view()), name="graphql_oplogentry_create_event"), path("event/oplogentry/update", csrf_exempt(GraphqlOplogEntryUpdateEvent.as_view()), name="graphql_oplogentry_update_event"), diff --git a/ghostwriter/api/views.py b/ghostwriter/api/views.py index b57263ba7..bc8acfed8 100644 --- a/ghostwriter/api/views.py +++ b/ghostwriter/api/views.py @@ -28,9 +28,17 @@ from ghostwriter.api import utils from ghostwriter.api.forms import ApiKeyForm from ghostwriter.api.models import APIKey +from ghostwriter.modules.model_utils import to_dict from ghostwriter.modules.reportwriter import Reportwriter from ghostwriter.oplog.models import OplogEntry -from ghostwriter.reporting.models import Evidence, Report, ReportTemplate +from ghostwriter.reporting.models import ( + Evidence, + Finding, + Report, + ReportFindingLink, + ReportTemplate, +) +from ghostwriter.reporting.views import get_position from ghostwriter.rolodex.models import Project from ghostwriter.shepherd.models import ( ActivityType, @@ -162,7 +170,7 @@ def dispatch(self, request, *args, **kwargs): utils.generate_hasura_error_payload("Missing all required inputs", "InvalidRequestBody"), status=400 ) - # Hasura checks for required values, but we check here in case of a discrepency between the GraphQL schema and the view + # Hasura checks for required values, but we check here in case of a discrepancy between the GraphQL schema and the view for required_input in self.required_inputs: if required_input not in self.input: return JsonResponse( @@ -606,6 +614,40 @@ def post(self, request, *args, **kwargs): return JsonResponse(data, status=self.status) +class GraphqlAttachFinding(JwtRequiredMixin, HasuraActionView): + """ + Endpoint for attaching a :model:`reporting.Finding` to a :model:`reporting.Report` + as a new :model:`reporting.ReportFindingLink`. + """ + required_inputs = ["findingId", "reportId", ] + + def post(self, request, *args, **kwargs): + finding_id = self.input["findingId"] + report_id = self.input["reportId"] + try: + report = Report.objects.get(id=report_id) + except Report.DoesNotExist: + return JsonResponse(utils.generate_hasura_error_payload("Report does not exist", "ReportDoesNotExist"), status=400) + try: + finding = Finding.objects.get(id=finding_id) + except Finding.DoesNotExist: + return JsonResponse(utils.generate_hasura_error_payload("Finding does not exist", "FindingDoesNotExist"), status=400) + + if utils.verify_project_access(self.user_obj, report.project): + finding_dict = to_dict(finding, resolve_fk=True) + report_link = ReportFindingLink( + report=report, + assigned_to=self.user_obj, + position=get_position(report.id, finding.severity), + **finding_dict, + ) + report_link.save() + data = {"id": report_link.pk,} + return JsonResponse(data, status=self.status) + + return JsonResponse(utils.generate_hasura_error_payload("Unauthorized access", "Unauthorized"), status=401) + + ########################## # Hasura Event Endpoints # ########################## diff --git a/ghostwriter/home/views.py b/ghostwriter/home/views.py index 7a0e31b89..c28a78853 100644 --- a/ghostwriter/home/views.py +++ b/ghostwriter/home/views.py @@ -53,7 +53,6 @@ def update_session(request): "result": "success", "message": "Session updated", } - logger.info("Session updated for user %s", request.session["_auth_user_id"]) return JsonResponse(data) return HttpResponseNotAllowed(["POST"]) diff --git a/ghostwriter/modules/cloud_monitors.py b/ghostwriter/modules/cloud_monitors.py index 8765ad94a..1fbd21a26 100644 --- a/ghostwriter/modules/cloud_monitors.py +++ b/ghostwriter/modules/cloud_monitors.py @@ -9,7 +9,8 @@ import boto3 import pytz import requests -from botocore.exceptions import ClientError +from botocore.config import Config +from botocore.exceptions import ClientError, ConnectTimeoutError # Using __name__ resolves to ghostwriter.modules.cloud_monitors logger = logging.getLogger(__name__) @@ -73,7 +74,7 @@ def test_aws(aws_key, aws_secret): ``aws_secret`` AWS secret for the key """ - message = "" + messages = [] try: aws_sts = boto3.client( "sts", @@ -81,19 +82,16 @@ def test_aws(aws_key, aws_secret): aws_secret_access_key=aws_secret, ) aws_sts.get_caller_identity() - return {"capable": True, "message": message} + return {"capable": True, "message": messages} except ClientError: logger.error( "AWS could not validate the provided credentials with STS; check your AWS policies" ) - message = "AWS could not validate the provided credentials for EC2; check your attached AWS policies" + messages.append("AWS could not validate the provided credentials for EC2; check your attached AWS policies") except Exception: - trace = traceback.format_exc() logger.exception("Testing authentication to AWS failed") - message = "Testing authentication to AWS failed: {traceback}".format( - traceback=trace - ) - return {"capable": False, "message": message} + messages.append(f"Testing authentication to AWS failed: {traceback.format_exc()}") + return {"capable": False, "message": messages} def fetch_aws_ec2(aws_key, aws_secret, ignore_tags=None, only_running=False): @@ -111,16 +109,24 @@ def fetch_aws_ec2(aws_key, aws_secret, ignore_tags=None, only_running=False): ``aws_secret`` AWS secret for the key """ - message = "" + messages = [] instances = [] if ignore_tags is None: ignore_tags = [] try: + ec2_config = Config( + retries = { + "max_attempts": 1, + "mode": "standard", + }, + connect_timeout = 30 + ) client = boto3.client( "ec2", region_name="us-west-2", aws_access_key_id=aws_key, aws_secret_access_key=aws_secret, + config=ec2_config, ) regions = [ region["RegionName"] for region in client.describe_regions()["Regions"] @@ -134,6 +140,7 @@ def fetch_aws_ec2(aws_key, aws_secret, ignore_tags=None, only_running=False): region_name=region, aws_access_key_id=aws_key, aws_secret_access_key=aws_secret, + config=ec2_config, ) # Get all EC2 instances that are running if only_running: @@ -143,60 +150,66 @@ def fetch_aws_ec2(aws_key, aws_secret, ignore_tags=None, only_running=False): else: running_instances = ec2.instances.all() # Loop over running instances to generate info dict - for instance in running_instances: - # Calculate how long the instance has been running in UTC - time_up = months_between( - instance.launch_time.replace(tzinfo=utc), - datetime.today().replace(tzinfo=utc), - ) - tags = [] - name = "Blank" - ignore = False - if instance.tags: - for tag in instance.tags: - # AWS assigns names to instances via a ``Name`` key - if tag["Key"] == "Name": - name = tag["Value"] - else: - tags.append("{}: {}".format(tag["Key"], tag["Value"])) - # Check for "ignore tags" - if tag["Key"] in ignore_tags or tag["Value"] in ignore_tags: - ignore = True - pub_addresses = [] - pub_addresses.append(instance.public_ip_address) - priv_addresses = [] - priv_addresses.append(instance.private_ip_address) - # Add instance info to a dictionary - instances.append( - { - "id": instance.id, - "provider": "Amazon Web Services {}".format(region), - "service": "EC2", - "name": name, - "type": instance.instance_type, - "monthly_cost": None, # AWS cost is different and not easily calculated - "cost_to_date": None, # AWS cost is different and not easily calculated - "state": instance.state["Name"], - "private_ip": priv_addresses, - "public_ip": pub_addresses, - "launch_time": instance.launch_time.replace(tzinfo=utc), - "time_up": "{} months".format(time_up), - "tags": ", ".join(tags), - "ignore": ignore, - } - ) + try: + for instance in running_instances: + # Calculate how long the instance has been running in UTC + time_up = months_between( + instance.launch_time.replace(tzinfo=utc), + datetime.today().replace(tzinfo=utc), + ) + tags = [] + name = "Blank" + ignore = False + if instance.tags: + for tag in instance.tags: + # AWS assigns names to instances via a ``Name`` key + if tag["Key"] == "Name": + name = tag["Value"] + else: + tags.append("{}: {}".format(tag["Key"], tag["Value"])) + # Check for "ignore tags" + if tag["Key"] in ignore_tags or tag["Value"] in ignore_tags: + ignore = True + pub_addresses = [] + if instance.public_ip_address: + pub_addresses.append(instance.public_ip_address) + priv_addresses = [] + if instance.private_ip_address: + priv_addresses.append(instance.private_ip_address) + # Add instance info to a dictionary + instances.append( + { + "id": instance.id, + "provider": "Amazon Web Services {}".format(region), + "service": "EC2", + "name": name, + "type": instance.instance_type, + "monthly_cost": None, # AWS cost is different and not easily calculated + "cost_to_date": None, # AWS cost is different and not easily calculated + "state": instance.state["Name"], + "private_ip": priv_addresses, + "public_ip": pub_addresses, + "launch_time": instance.launch_time.replace(tzinfo=utc), + "time_up": "{} months".format(time_up), + "tags": ", ".join(tags), + "ignore": ignore, + } + ) + except ConnectTimeoutError: + logger.exception("AWS timed out while trying to describe instances in %s", region) + messages.append(f"AWS timed out while trying to describe instances in {region}: {traceback.format_exc()}") except ClientError: logger.error( "AWS denied access to EC2 for the supplied keys; check your AWS policies" ) - message = "AWS denied access to EC2 for the supplied keys; check your attached AWS policies" + messages.append("AWS denied access to EC2 for the supplied keys; check your attached AWS policies") + except ConnectTimeoutError: + logger.exception("AWS timed out while connecting to EC2 region") + messages.append(f"AWS timed out while connecting to EC2 region: {traceback.format_exc()}") except Exception: - trace = traceback.format_exc() logger.exception("Encountered an unexpected error with AWS EC2") - message = "Encountered an unexpected error with AWS EC2: {traceback}".format( - traceback=trace - ) - return {"instances": instances, "message": message} + messages.append(f"Encountered an unexpected error with AWS EC2: {traceback.format_exc()}") + return {"instances": instances, "message": messages} def fetch_aws_lightsail(aws_key, aws_secret, ignore_tags=None): @@ -290,13 +303,8 @@ def fetch_aws_lightsail(aws_key, aws_secret, ignore_tags=None): ) message = "AWS denied access to Lightsail for the supplied keys; check your attached AWS policies" except Exception: - trace = traceback.format_exc() logger.exception("Encountered an unexpected error with AWS Lightsail") - message = ( - "Encountered an unexpected error with AWS Lightsail: {traceback}".format( - traceback=trace - ) - ) + message = (f"Encountered an unexpected error with AWS Lightsail: {traceback.format_exc()}") return {"instances": instances, "message": message} @@ -360,11 +368,8 @@ def fetch_aws_s3(aws_key, aws_secret, ignore_tags=None): ) message = "AWS denied access to S3 for the supplied keys; check your attached AWS policies" except Exception: - trace = traceback.format_exc() logger.exception("Encountered an unexpected error with AWS S3") - message = "Encountered an unexpected error with AWS S3: {traceback}".format( - traceback=trace - ) + message = f"Encountered an unexpected error with AWS S3: {traceback.format_exc()}" return {"buckets": buckets, "message": message} @@ -424,13 +429,8 @@ def fetch_digital_ocean(api_key, ignore_tags=None): ) # Catch any other errors related to the web request except Exception: - trace = traceback.format_exc() logger.exception("Encountered an unexpected error with Digital Ocean") - message = ( - "Encountered an unexpected error with Digital Ocean: {traceback}".format( - traceback=trace - ) - ) + message = (f"Encountered an unexpected error with Digital Ocean: {traceback.format_exc()}") # Loop over the droplets to generate the info dict if capable and "droplets" in active_droplets: diff --git a/ghostwriter/modules/model_utils.py b/ghostwriter/modules/model_utils.py new file mode 100644 index 000000000..7a6c42876 --- /dev/null +++ b/ghostwriter/modules/model_utils.py @@ -0,0 +1,38 @@ +"""This contains utilities for managing and converting models.""" + +# Standard Libraries +from itertools import chain + +# Django Imports +import django +from django.db.models import ForeignKey + + +def to_dict(instance: django.db.models.Model, include_id: bool = False, resolve_fk: bool = False ) -> dict: + """ + Converts a model instance to a dictionary with only the desirable field + data. Extra fields provided by ``.__dict__``, like ``_state``, are removed. + + Ref: https://stackoverflow.com/questions/21925671/convert-django-model-object-to-dict-with-all-of-the-fields-intact + + **Parameters** + + ``instance`` + Instance of ``django.db.models.Model`` + ``include_id`` + Whether or not to include the ``id`` field in the dictionary (Default: False) + ``resolve_fk`` + Whether or not to resolve foreign key fields to an object (Default: False) + """ + opts = instance._meta + data = {} + for f in chain(opts.concrete_fields, opts.private_fields): + data[f.name] = f.value_from_object(instance) + if isinstance(f, ForeignKey) and resolve_fk: + fk_id = f.value_from_object(instance) + data[f.name] = f.related_model.objects.get(id=fk_id) + for f in opts.many_to_many: + data[f.name] = [i.id for i in f.value_from_object(instance)] + if not include_id: + del data["id"] + return data diff --git a/ghostwriter/modules/notifications_slack.py b/ghostwriter/modules/notifications_slack.py index 87d9b2979..65e541b79 100644 --- a/ghostwriter/modules/notifications_slack.py +++ b/ghostwriter/modules/notifications_slack.py @@ -29,7 +29,7 @@ def __init__(self): self.slack_channel = slack_config.slack_channel self.slack_alert_target = slack_config.slack_alert_target - def send_msg(self, message: str, channel: str = None, blocks: list = []) -> dict: + def send_msg(self, message: str, channel: str = None, blocks: list = None) -> dict: """ Send a basic Slack message using the Slack configuration. Returns a dictionary with errors, if any. The dictionary includes ``code`` and ``message`` keys. @@ -54,7 +54,7 @@ def send_msg(self, message: str, channel: str = None, blocks: list = []) -> dict if self.slack_alert_target: message = f"{self.slack_alert_target} {message}" - # Assemble the complte Slack POST data + # Assemble the complete Slack POST data slack_data = { "username": self.slack_username, "icon_emoji": self.slack_emoji, @@ -72,8 +72,7 @@ def send_msg(self, message: str, channel: str = None, blocks: list = []) -> dict # Responses for Incoming Webhooks are documented here: # https://api.slack.com/changelog/2016-05-17-changes-to-errors-for-incoming-webhooks if response.ok: - # Everything is fine; return - return error + logger.info("Slack message sent successfully") elif response.status_code == 400: if "user_not_found" in response.text: error["code"] = "user_not_found" @@ -168,11 +167,15 @@ def craft_cloud_msg( ``tags`` Any tags associated with the cloud asset """ - if isinstance(ip_address, list): - ip_address = ", ".join(ip_address) - elif ip_address is None: + if ip_address: + if isinstance(ip_address, list): + ip_address = ", ".join(filter(None, ip_address)) + else: ip_address = "None Assigned" + if not tags: + tags = "None Assigned" + blocks = [ { "type": "header", @@ -235,11 +238,15 @@ def craft_unknown_asset_msg( ``tags`` Any tags associated with the cloud asset """ - if isinstance(ip_address, list): - ip_address = ", ".join(ip_address) - elif ip_address is None: + if ip_address: + if isinstance(ip_address, list): + ip_address = ", ".join(filter(None, ip_address)) + else: ip_address = "None Assigned" + if not tags: + tags = "None Assigned" + blocks = [ { "type": "header", @@ -500,6 +507,6 @@ def send_slack_complete_msg(task: Task) -> None: ) if err: - logger.warning("Slack message failed with error: %s", err) + logger.warning("Attempt to send a Slack notification returned an error: %s", err) except Exception: logger.exception("Error sending Slack message") diff --git a/ghostwriter/modules/oplog_monitors.py b/ghostwriter/modules/oplog_monitors.py index 894666fac..e3fd2d992 100644 --- a/ghostwriter/modules/oplog_monitors.py +++ b/ghostwriter/modules/oplog_monitors.py @@ -94,7 +94,7 @@ def review_active_logs(hours: int = 24) -> dict: blocks=blocks, ) if err: - logger.warning("Slack message failed with error: %s", err) + logger.warning("Attempt to send a Slack notification returned an error: %s", err) results["errors"].append(err) else: if latest_log_entry: diff --git a/ghostwriter/reporting/templates/reporting/local_edit.html b/ghostwriter/reporting/templates/reporting/local_edit.html index 57ae19c9a..f147614bc 100644 --- a/ghostwriter/reporting/templates/reporting/local_edit.html +++ b/ghostwriter/reporting/templates/reporting/local_edit.html @@ -93,7 +93,7 @@
{% if reportfindinglink.finding_guidance %} -

{{ reportfindinglink.finding_guidance }}

+ {{ reportfindinglink.finding_guidance|bleach }} {% else %}

No guidance was provided for this finding in the library.

{% endif %} diff --git a/ghostwriter/rolodex/signals.py b/ghostwriter/rolodex/signals.py index 3643b5309..ca18b3359 100644 --- a/ghostwriter/rolodex/signals.py +++ b/ghostwriter/rolodex/signals.py @@ -50,7 +50,7 @@ def update_project(sender, instance, **kwargs): "Newly saved project was just created so skipping `post_save` Signal used for updates" ) # If Slack is configured for this project, send a confirmation message - if instance.slack_channel: + if instance.slack_channel and slack.enabled: blocks = [ { "type": "header", @@ -67,16 +67,22 @@ def update_project(sender, instance, **kwargs): }, }, ] - slack.send_msg( + err = slack.send_msg( "Slack Notifications Configured Successfully", channel=instance.slack_channel, blocks=blocks, ) + if err: + logger.warning( + "Attempt to send a Slack notification returned an error: %s", + err, + ) else: # If the ``slack_channel`` changed and a channel is still set, send a notification if ( instance.initial_slack_channel != instance.slack_channel and instance.slack_channel + and slack.enabled ): blocks = [ { @@ -94,12 +100,16 @@ def update_project(sender, instance, **kwargs): }, }, ] - slack.send_msg( + err = slack.send_msg( "Notifications Updated Successfully", channel=instance.slack_channel, blocks=blocks, ) - + if err: + logger.warning( + "Attempt to send a Slack notification returned an error: %s", + err, + ) # If project dates changed, update all checkouts if ( instance.initial_start_date != instance.start_date @@ -120,7 +130,7 @@ def update_project(sender, instance, **kwargs): logger.info("Start date changed by %s days", start_date_delta) logger.info("End date changed by %s days", end_date_delta) - if instance.slack_channel: + if slack.enabled and instance.slack_channel: blocks = [ { "type": "header", @@ -145,12 +155,16 @@ def update_project(sender, instance, **kwargs): }, }, ] - slack.send_msg( + err = slack.send_msg( "Updated Project Dates", channel=instance.slack_channel, blocks=blocks, ) - + if err: + logger.warning( + "Attempt to send a Slack notification returned an error: %s", + err, + ) for entry in domain_checkouts: # Don't adjust checkouts that are in the past if entry.end_date > today: diff --git a/ghostwriter/rolodex/tasks.py b/ghostwriter/rolodex/tasks.py index d500b13b4..7533db4e7 100644 --- a/ghostwriter/rolodex/tasks.py +++ b/ghostwriter/rolodex/tasks.py @@ -27,7 +27,14 @@ def check_project_freshness(): message = "{} : This project should now be complete but is not marked as such in Ghostwriter. Extend the end date or mark the project and check that all reports have been marked as completed and delivered.".format( project ) - if project.slack_channel: - slack.send_msg(message, project.slack_channel) - else: - slack.send_msg(message) + err = None + if slack.enabled: + if project.slack_channel: + err = slack.send_msg(message, project.slack_channel) + else: + err = slack.send_msg(message) + if err: + logger.warning( + "Attempt to send a Slack notification returned an error: %s", + err, + ) diff --git a/ghostwriter/shepherd/tasks.py b/ghostwriter/shepherd/tasks.py index 847cca443..1f75599ff 100644 --- a/ghostwriter/shepherd/tasks.py +++ b/ghostwriter/shepherd/tasks.py @@ -211,7 +211,13 @@ def release_domains(no_action=False): message = "Your domain, {}, will be released tomorrow! Modify the project's end date as needed.".format( domain.name ) - slack.send_msg(message, slack_channel) + if slack.enabled: + err = slack.send_msg(message, slack_channel) + if err: + logger.warning( + "Attempt to send a Slack notification returned an error: %s", + err, + ) except History.DoesNotExist: logger.warning( "The domain %s has no project history, so releasing it", domain.name @@ -236,7 +242,13 @@ def release_domains(no_action=False): else: logger.info("Releasing %s back into the pool.", domain.name) message = "Your domain, {}, has been released.".format(domain.name) - slack.send_msg(message, slack_channel) + if slack.enabled: + err = slack.send_msg(message, slack_channel) + if err: + logger.warning( + "Attempt to send a Slack notification returned an error: %s", + err, + ) domain.domain_status = DomainStatus.objects.get(domain_status="Available") domain.save() domain_updates[domain.id]["change"] = "released" @@ -300,7 +312,13 @@ def release_servers(no_action=False): message = "Your server, {}, will be released tomorrow! Modify the project's end date as needed.".format( server.ip_address ) - slack.send_msg(message, slack_channel) + if slack.enabled: + err = slack.send_msg(message, slack_channel) + if err: + logger.warning( + "Attempt to send a Slack notification returned an error: %s", + err, + ) except ServerHistory.DoesNotExist: logger.warning( "The server %s has no project history, so releasing it", @@ -323,7 +341,13 @@ def release_servers(no_action=False): else: logger.info("Releasing %s back into the pool.", server.ip_address) message = "Your server, {}, has been released.".format(server.ip_address) - slack.send_msg(message, slack_channel) + if slack.enabled: + err = slack.send_msg(message, slack_channel) + if err: + logger.warning( + "Attempt to send a Slack notification returned an error: %s", + err, + ) server.server_status = ServerStatus.objects.get(server_status="Available") server.save() server_updates[server.id]["change"] = "released" @@ -401,15 +425,16 @@ def check_domains(domain_id=None): lab_results[k]["categories"], lab_results[k]["burned_explanation"], ) - err = slack.send_msg( - message=f"Domain burned: {v['domain']}", - blocks=blocks, - ) - if err: - logger.warning( - "Attempt to send a Slack notification returned an error: %s", - err, + if slack.enabled: + err = slack.send_msg( + message=f"Domain burned: {v['domain']}", + blocks=blocks, ) + if err: + logger.warning( + "Attempt to send a Slack notification returned an error: %s", + err, + ) # Check if the domain is checked-out and send a message to that project channel try: @@ -419,6 +444,7 @@ def check_domains(domain_id=None): if ( latest_checkout.end_date >= date.today() and latest_checkout.project.slack_channel + and slack.enabled ): err = slack.send_msg( message=f"Domain burned: {v['domain']}", @@ -443,15 +469,16 @@ def check_domains(domain_id=None): "VirusTotal Submission", lab_results[k]["warnings"]["messages"], ) - err = slack.send_msg( - message=f"Domain event warning for {v['domain']}", - blocks=blocks, - ) - if err: - logger.warning( - "Attempt to send a Slack notification returned an error: %s", - err, + if slack.enabled: + err = slack.send_msg( + message=f"Domain event warning for {v['domain']}", + blocks=blocks, ) + if err: + logger.warning( + "Attempt to send a Slack notification returned an error: %s", + err, + ) # Update other fields for the domain object if lab_results[k]["burned"] and "burned_explanation" in lab_results[k]: if lab_results[k]["burned_explanation"]: @@ -632,10 +659,17 @@ def scan_servers(only_active=False): host, port ) latest = ServerHistory.objects.filter(server=server)[0] - if latest.project.slack_channel: - slack.send_msg(message, latest.project.slack_channel) - else: - slack.send_msg(message) + if slack.enabled: + err = None + if latest.project.slack_channel: + err = slack.send_msg(message, latest.project.slack_channel) + else: + err = slack.send_msg(message) + if err: + logger.warning( + "Attempt to send a Slack notification returned an error: %s", + err, + ) def fetch_namecheap_domains(): @@ -940,7 +974,7 @@ def json_datetime_converter(dt): """ if isinstance(dt, datetime): - return dt.__str__() + return str(dt) return None @@ -993,10 +1027,9 @@ def review_cloud_infrastructure(aws_only_running=False): cloud_config.aws_key, cloud_config.aws_secret, ignore_tags, aws_only_running ) if ec2_results["message"]: - vps_info["errors"]["ec2"] = results["message"] - else: - for instance in ec2_results["instances"]: - vps_info["instances"][instance["id"]] = instance + vps_info["errors"]["ec2"] = ec2_results["message"] + for instance in ec2_results["instances"]: + vps_info["instances"][instance["id"]] = instance # Check Lightsail logger.info("Checking Lightsail instances") @@ -1004,19 +1037,17 @@ def review_cloud_infrastructure(aws_only_running=False): cloud_config.aws_key, cloud_config.aws_secret, ignore_tags ) if lightsail_results["message"]: - vps_info["errors"]["lightsail"] = results["message"] - else: - for instance in lightsail_results["instances"]: - vps_info["instances"][instance["id"]] = instance + vps_info["errors"]["lightsail"] = lightsail_results["message"] + for instance in lightsail_results["instances"]: + vps_info["instances"][instance["id"]] = instance # Check S3 logger.info("Checking S3 buckets") s3_results = fetch_aws_s3(cloud_config.aws_key, cloud_config.aws_secret) if s3_results["message"]: - vps_info["errors"]["s3"] = results["message"] - else: - for bucket in s3_results["buckets"]: - vps_info["instances"][bucket["name"]] = bucket + vps_info["errors"]["s3"] = s3_results["message"] + for bucket in s3_results["buckets"]: + vps_info["instances"][bucket["name"]] = bucket else: vps_info["errors"]["aws"] = results["message"] @@ -1024,10 +1055,10 @@ def review_cloud_infrastructure(aws_only_running=False): # DO Section # ############### - logger.info("Checking EC2 instances") + logger.info("Checking Digital Ocean droplets") do_results = fetch_digital_ocean(cloud_config.do_api_key, ignore_tags) if do_results["message"]: - vps_info["errors"]["digital_ocean"] = results["message"] + vps_info["errors"]["digital_ocean"] = do_results["message"] else: if do_results["capable"]: for instance in do_results["instances"]: @@ -1043,10 +1074,12 @@ def review_cloud_infrastructure(aws_only_running=False): all_ip_addresses = [] if "public_ip" in instance: for address in instance["public_ip"]: - all_ip_addresses.append(address) + if address is not None: + all_ip_addresses.append(address) if "private_ip" in instance: for address in instance["private_ip"]: - all_ip_addresses.append(address) + if address is not None: + all_ip_addresses.append(address) # Set instance's name to its ID if no name is set if instance["name"]: instance_name = instance["name"] @@ -1079,22 +1112,19 @@ def review_cloud_infrastructure(aws_only_running=False): instance["public_ip"], instance["tags"], ) - if result.project.slack_channel: - err = slack.send_msg( - message=f"Teardown notification for {result.project}", - channel=result.project.slack_channel, - blocks=blocks, - ) - if err: - logger.warning( - "Attempt to send a Slack notification returned an error: %s", - err, + if slack.enabled: + err = None + if result.project.slack_channel: + err = slack.send_msg( + message=f"Teardown notification for {result.project}", + channel=result.project.slack_channel, + blocks=blocks, + ) + else: + err = slack.send_msg( + message=f"Teardown notification for {result.project}", + blocks=blocks, ) - else: - err = slack.send_msg( - message=f"Teardown notification for {result.project}", - blocks=blocks, - ) if err: logger.warning( "Attempt to send a Slack notification returned an error: %s", @@ -1136,7 +1166,7 @@ def review_cloud_infrastructure(aws_only_running=False): # Return the stale cloud asset data in JSON for the task results json_data = json.dumps(dict(vps_info), default=json_datetime_converter, indent=2) logger.info("Cloud review completed at %s", datetime.now()) - logger.info("JSON results:\n%s", json_data) + logger.debug("JSON results:\n%s", json_data) return json_data @@ -1358,10 +1388,10 @@ def test_slack_webhook(user): slack = SlackNotification() logger.info("Starting Slack Webhook test at %s", datetime.now()) try: - error = slack.send_msg("Hello from Ghostwriter :wave:") - if error: + err = slack.send_msg("Hello from Ghostwriter :wave:") + if err: level = "error" - message = error["message"] + message = err["message"] else: level = "success" message = f"Slack accepted the request and you should see a message posted in {slack.slack_channel}" diff --git a/hasura-docker/metadata/actions.graphql b/hasura-docker/metadata/actions.graphql index aa788abe5..4f693ac30 100644 --- a/hasura-docker/metadata/actions.graphql +++ b/hasura-docker/metadata/actions.graphql @@ -1,3 +1,10 @@ +type Mutation { + attachFinding( + findingId: Int! + reportId: Int! + ): attachFindingResponse +} + type Mutation { checkoutDomain( domainId: Int! @@ -88,3 +95,7 @@ type deleteResponse { result: String! } +type attachFindingResponse { + id: Int! +} + diff --git a/hasura-docker/metadata/actions.yaml b/hasura-docker/metadata/actions.yaml index 369efa594..1eb0e7746 100644 --- a/hasura-docker/metadata/actions.yaml +++ b/hasura-docker/metadata/actions.yaml @@ -1,4 +1,16 @@ actions: + - name: attachFinding + definition: + kind: synchronous + handler: '{{ACTIONS_URL_BASE}}/attachFinding' + forward_client_headers: true + headers: + - name: Hasura-Action-Secret + value_from_env: HASURA_ACTION_SECRET + permissions: + - role: user + - role: manager + comment: Attach a finding from the library to a report - name: checkoutDomain definition: kind: synchronous @@ -117,4 +129,5 @@ custom_types: - name: ReportResponse - name: checkoutResponse - name: deleteResponse + - name: attachFindingResponse scalars: []