Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Ramiroluz/issue 213 #298

Open
wants to merge 12 commits into
base: develop
Choose a base branch
from
3 changes: 3 additions & 0 deletions brasilio/settings.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from urllib.parse import urlparse
from pathlib import Path

import environ
import sentry_sdk
Expand Down Expand Up @@ -35,6 +36,7 @@
"corsheaders",
"django_extensions",
"rest_framework",
"rest_framework.authtoken",
"markdownx",
"django_rq",
# Project apps
Expand Down Expand Up @@ -224,6 +226,7 @@ def get_neo4j_config_dict(neo4j_uri):

# Covid19 import settings
COVID_IMPORT_PERMISSION_PREFIX = "can_import_covid_state_"
SAMPLE_SPREADSHEETS_DATA_DIR = Path(BASE_DIR).joinpath("covid19", "tests", "data")

# RockecChat config
ROCKETCHAT_BASE_URL = env("ROCKETCHAT_BASE_URL")
Expand Down
1 change: 0 additions & 1 deletion brazil_data/cities.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
from collections import namedtuple
from functools import lru_cache
from itertools import groupby
from pathlib import Path

import rows
import rows.utils
Expand Down
10 changes: 6 additions & 4 deletions covid19/stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ class Covid19Stats:
"new_deaths_indeterminate_2019": (Sum, "new_deaths_indeterminate_2019"),
"new_deaths_others_2019": (Sum, "new_deaths_others_2019"),
"new_deaths_pneumonia_2019": (Sum, "new_deaths_pneumonia_2019"),
"new_deaths_respiratory_failure_2019": (Sum, "new_deaths_respiratory_failure_2019"),
"new_deaths_respiratory_failure_2019": (Sum, "new_deaths_respiratory_failure_2019",),
"new_deaths_sars_2019": (Sum, "new_deaths_sars_2019"),
"new_deaths_septicemia_2019": (Sum, "new_deaths_septicemia_2019"),
"new_deaths_total_2019": (Sum, "new_deaths_total_2019"),
Expand Down Expand Up @@ -337,7 +337,9 @@ def aggregate_state_data(self, select_columns, groupby_columns, state=None):
return list(qs.order_by(*groupby_columns).values(*groupby_columns).annotate(**annotate_dict))

def aggregate_epiweek(self, data, group_key="epidemiological_week"):
row_key = lambda row: row[group_key]
def row_key(row):
return row[group_key]

result = []
data.sort(key=row_key)
for epiweek, group in groupby(data, key=row_key):
Expand All @@ -352,7 +354,7 @@ def aggregate_epiweek(self, data, group_key="epidemiological_week"):

def historical_case_data_for_state_per_day(self, state):
return self.aggregate_state_data(
groupby_columns=["date"], select_columns=self.graph_daily_cases_columns, state=state
groupby_columns=["date"], select_columns=self.graph_daily_cases_columns, state=state,
)

def historical_case_data_for_state_per_epiweek(self, state):
Expand All @@ -374,7 +376,7 @@ def aggregate_registry_data(self, select_columns, groupby_columns, state=None):
def historical_registry_data_for_state_per_day(self, state=None):
# If state = None, return data for Brazil
return self.aggregate_registry_data(
groupby_columns=["date"], select_columns=self.graph_daily_registry_deaths_columns, state=state
groupby_columns=["date"], select_columns=self.graph_daily_registry_deaths_columns, state=state,
)

def excess_deaths_registry_data_for_state_per_day(self, state=None):
Expand Down
1 change: 0 additions & 1 deletion covid19/tests/test_google_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@


class TestGoogleDataIntegration(TestCase):

@skip("This test won't work with Django's DummyCache, which is enabled for development")
def test_cache_general_spreadsheet(self):
cache.clear()
Expand Down
110 changes: 110 additions & 0 deletions covid19/tests/test_spreadsheet_api.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,110 @@
import shutil
from unittest.mock import patch, Mock, PropertyMock
from datetime import date, timedelta
from pathlib import Path
from model_bakery import baker

from django.conf import settings
from django.urls import reverse
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group
from django.core.files.uploadedfile import SimpleUploadedFile
from rest_framework.test import APITestCase
from rest_framework.authtoken.models import Token

from covid19.exceptions import SpreadsheetValidationErrors


class ImportSpreadsheetByDateAPIViewTests(APITestCase):
def setUp(self):
valid_csv = settings.SAMPLE_SPREADSHEETS_DATA_DIR / "sample-PR.csv"
assert valid_csv.exists()

self.data = {
"date": date.today(),
"boletim_urls": "http://google.com\r\n\r http://brasil.io",
"boletim_notes": "notes",
}

self.filename = f"sample.csv"

self.file_data = self.gen_file(self.filename, valid_csv.read_bytes())
self.data["file"] = self.file_data
self.setUp_user_credentials()

def setUp_user_credentials(self):
self.user = baker.make(get_user_model())
self.user.groups.add(Group.objects.get(name__endswith="Rio de Janeiro"))
self.user.groups.add(Group.objects.get(name__endswith="Paraná"))

self.token = baker.make(Token, user=self.user)
self.headers = {"Authorization": f"Token {self.token.key}"}
self.client.credentials(HTTP_AUTHORIZATION=self.headers["Authorization"])
self.client.force_login(user=self.user)

def gen_file(self, name, content):
if isinstance(content, str):
content = str.encode(content)
return SimpleUploadedFile(name, content)

def tearDown(self):
if Path(settings.MEDIA_ROOT).exists():
shutil.rmtree(settings.MEDIA_ROOT)

@patch(
"covid19.spreadsheet_validator.validate_historical_data", Mock(return_value=["warning 1", "warning 2"]),
)
@patch("covid19.models.StateSpreadsheet.admin_url", new_callable=PropertyMock)
def test_import_data_from_a_valid_state_spreadsheet_request(self, mock_admin_url):
mock_admin_url.return_value = "https://brasil.io/covid19/dataset/PR"

expected_status = 200
expected_response = {
"warnings": ["warning 1", "warning 2"],
"detail_url": "https://brasil.io/covid19/dataset/PR",
}

reverse_name = "covid19:statespreadsheet-list"
self.url = reverse(reverse_name, args=["PR"])

response = self.client.post(self.url, data=self.data, format="json")

assert expected_status == response.status_code
assert expected_response == response.json()

def test_403_login_required(self):
expected_status = 403
reverse_name = "covid19:statespreadsheet-list"

self.url = reverse(reverse_name, args=["PR"])

self.client.force_authenticate(user=None)

response = self.client.post(self.url, data=self.data, format="json")
assert expected_status == response.status_code

@patch("covid19.spreadsheet_validator.validate_historical_data", autospec=True)
def test_400_if_spreadsheet_error_on_import_data(self, mock_merge):
exception = SpreadsheetValidationErrors()
exception.new_error("error 1")
exception.new_error("error 2")
mock_merge.side_effect = exception

expected_status = 400
expected_exception_messages = ["error 1", "error 2"]
expected_response = {"errors": {"date": ["Campo não aceita datas futuras."]}}

tomorrow = date.today() + timedelta(days=1)
tomorrow = tomorrow.isoformat()
self.data["date"] = tomorrow

reverse_name = "covid19:statespreadsheet-list"

self.url = reverse(reverse_name, args=["RJ"])

response = self.client.post(self.url, data=self.data, format="json")

assert len(exception.error_messages) == 2
assert expected_exception_messages == sorted(exception.error_messages)
assert expected_status == response.status_code
assert expected_response == response.json()
3 changes: 2 additions & 1 deletion covid19/urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
path("historical/weekly/", views.historical_weekly, name="historical-weekly"),
path("cities/geo/", views.cities_geojson, name="cities-geo"),
path("states/geo/", views.states_geojson, name="states-geo"),
path("import-data/<str:state>/", views.import_spreadsheet_proxy, name="spreadsheet_proxy"),
path("import-data/<str:state>/", views.import_spreadsheet_proxy, name="spreadsheet_proxy",),
path("api/import-data/<str:state>/", views.state_spreadsheet_view_list, name="statespreadsheet-list",),
path("<str:state>/", views.dashboard, name="dashboard"),
]
46 changes: 41 additions & 5 deletions covid19/views.py
Original file line number Diff line number Diff line change
@@ -1,20 +1,28 @@
import datetime
import random
import json

from django.core.files.uploadedfile import SimpleUploadedFile
from django.http import Http404, HttpResponse, JsonResponse
from django.shortcuts import render
from django.db import transaction

from rest_framework import permissions
from rest_framework.decorators import api_view, permission_classes

from brazil_data.cities import get_state_info
from brazil_data.states import STATE_BY_ACRONYM, STATES
from core.middlewares import disable_non_logged_user_cache
from core.util import cached_http_get_json
from covid19.exceptions import SpreadsheetValidationErrors
from covid19.forms import StateSpreadsheetForm
from covid19.geo import city_geojson, state_geojson
from covid19.models import StateSpreadsheet
from covid19.spreadsheet import create_merged_state_spreadsheet
from covid19.stats import Covid19Stats, max_values
from covid19.util import row_to_column
from covid19.epiweek import get_epiweek
from covid19.signals import new_spreadsheet_imported_signal

stats = Covid19Stats()

Expand Down Expand Up @@ -61,11 +69,7 @@ def clean_weekly_data(data, skip=0, diff_days=-14):
now = datetime.datetime.now()
today = datetime.date(now.year, now.month, now.day)
_, until_epiweek = get_epiweek(today + datetime.timedelta(days=diff_days))
return [
row
for index, row in enumerate(data)
if index >= skip and row["epidemiological_week"] < until_epiweek
]
return [row for index, row in enumerate(data) if index >= skip and row["epidemiological_week"] < until_epiweek]


def historical_data(request, period):
Expand Down Expand Up @@ -300,3 +304,35 @@ def status(request):
data.append(table_entry)

return render(request, "covid-status.html", {"import_data": data})


@api_view(["POST"])
@permission_classes([permissions.IsAuthenticated])
def state_spreadsheet_view_list(request, *args, **kwargs):
def gen_file(name, content):
if isinstance(content, str):
content = str.encode(content)
return SimpleUploadedFile(name, content)

if request.method == "POST":
data = json.loads(request.body)

data["state"] = kwargs["state"] # sempre terá um state dado que ele está na URL

state = data["state"]
date = data["date"]
filename = f"{state}-{date}-import.csv"

file_data = {"file": gen_file(filename, "".join(data["file"]))}

form = StateSpreadsheetForm(data, file_data, user=request.user)

if form.is_valid():
transaction.on_commit(lambda: new_spreadsheet_imported_signal.send(sender=self, spreadsheet=spreadsheet))

spreadsheet = form.save()
spreadsheet.refresh_from_db()

return JsonResponse({"warnings": spreadsheet.warnings, "detail_url": spreadsheet.admin_url})

return JsonResponse({"errors": form.errors}, status=400)