Skip to content

Commit

Permalink
optimize the stakeholder count
Browse files Browse the repository at this point in the history
  • Loading branch information
sudan45 authored and AdityaKhatri committed Jan 16, 2024
1 parent 45eab06 commit 8ce7ac2
Show file tree
Hide file tree
Showing 10 changed files with 122 additions and 100 deletions.
2 changes: 1 addition & 1 deletion .vscode/settings.json
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"python.linting.mypyEnabled": true,
"python.linting.enabled": true,
"python.formatting.provider": "none"
"python.formatting.provider": "true"
}
5 changes: 5 additions & 0 deletions apps/assessment_registry/admin.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,11 @@ class QuestionAdmin(admin.ModelAdmin):
'modified_by',
'client_id',
)
exclude = (
'created_by',
'modified_by',
'client_id',
)

def save_model(self, request, obj, form, change):
if not obj.pk:
Expand Down
129 changes: 72 additions & 57 deletions apps/assessment_registry/dashboard_schema.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import graphene
from dataclasses import dataclass
from collections import defaultdict

from django.db.models import Count, Sum, Avg, Case, Value, When
from django.contrib.postgres.aggregates.general import ArrayAgg
Expand Down Expand Up @@ -95,12 +94,12 @@ def resolve_data_collection_technique_display(root, info):


class AssessmentDashboardGeographicalAreaType(graphene.ObjectType):
region = graphene.Int(required=True)
geo_area = graphene.Int(required=True)
region = graphene.ID(required=True)
geo_area = graphene.ID(required=True)
admin_level_id = graphene.ID(required=True)
code = graphene.ID(required=True)
count = graphene.Int(required=True)
assessment_ids = graphene.List(graphene.NonNull(graphene.ID))
assessment_ids = graphene.List(graphene.NonNull(graphene.ID), required=True)


class AssessmentCountByDateType(graphene.ObjectType):
Expand Down Expand Up @@ -152,7 +151,7 @@ class AssessmentPerAffectedGroupAndGeoAreaCountByDateType(AssessmentCountByDateT
affected_group_display = EnumDescription(required=True)

def resolve_geo_area(root, info):
return info.context.dl.geo.geo_area.load(root["geo_area"])
return info.context.dl.assessment_registry.geo_area.load(root["geo_area"])

def resolve_affected_group_display(root, info):
return AssessmentRegistry.AffectedGroupType(root["affected_group"]).label
Expand All @@ -165,7 +164,7 @@ class AssessmentPerSectorAndGeoAreaCountByDateType(graphene.ObjectType):
sector_display = EnumDescription(required=True)

def resolve_geo_area(root, info):
return info.context.dl.geo.geo_area.load(root["geo_area"])
return info.context.dl.assessment_registry.geo_area.load(root["geo_area"])

def resolve_sector_display(root, info):
return AssessmentRegistry.SectorType(root["sector"]).label
Expand Down Expand Up @@ -229,9 +228,9 @@ def resolve_data_collection_technique_display(root, info):


class AssessmentByGeographicalAndDataCollectionTechniqueCountByDateType(graphene.ObjectType):
admin_level_id = graphene.Int(required=True)
region = graphene.Int(required=True)
geo_area = graphene.Int(required=True)
admin_level_id = graphene.ID(required=True)
region = graphene.ID(required=True)
geo_area = graphene.ID(required=True)
data_collection_technique = graphene.Field(AssessmentRegistryDataCollectionTechniqueTypeEnum, required=True)
count = graphene.Int(required=True)
data_collection_technique_display = EnumDescription(required=True)
Expand All @@ -241,9 +240,9 @@ def resolve_data_collection_technique_display(root, info):


class AssessmentByGeographicalAndSamplingApproachCountByDateType(graphene.ObjectType):
admin_level_id = graphene.Int(required=True)
region = graphene.Int(required=True)
geo_area = graphene.Int(required=True)
admin_level_id = graphene.ID(required=True)
region = graphene.ID(required=True)
geo_area = graphene.ID(required=True)
sampling_approach = graphene.Field(AssessmentRegistrySamplingApproachTypeEnum, required=True)
count = graphene.Int(required=True)
sampling_approach_display = EnumDescription(required=True)
Expand All @@ -253,9 +252,9 @@ def resolve_sampling_approach_display(root, info):


class AssessmentByGeographicalAndProximityCountByDateType(graphene.ObjectType):
admin_level_id = graphene.Int(required=True)
region = graphene.Int(required=True)
geo_area = graphene.Int(required=True)
admin_level_id = graphene.ID(required=True)
region = graphene.ID(required=True)
geo_area = graphene.ID(required=True)
proximity = graphene.Field(AssessmentRegistryProximityTypeEnum, required=True)
count = graphene.Int(required=True)
proximity_display = EnumDescription(required=True)
Expand All @@ -265,9 +264,9 @@ def resolve_proximity_display(root, info):


class AssessmentByGeographicalAndUnit_Of_AnalysisCountByDateType(graphene.ObjectType):
admin_level_id = graphene.Int(required=True)
region = graphene.Int(required=True)
geo_area = graphene.Int(required=True)
admin_level_id = graphene.ID(required=True)
region = graphene.ID(required=True)
geo_area = graphene.ID(required=True)
unit_of_analysis = graphene.Field(AssessmentRegistryUnitOfAnalysisTypeEnum, required=True)
count = graphene.Int(required=True)
unit_of_analysis_display = EnumDescription(required=True)
Expand All @@ -277,9 +276,9 @@ def resolve_unit_of_analysis_display(root, info):


class AssessmentByGeographicalAndUnit_Of_ReportingCountByDateType(graphene.ObjectType):
admin_level_id = graphene.Int(required=True)
region = graphene.Int(required=True)
geo_area = graphene.Int(required=True)
admin_level_id = graphene.ID(required=True)
region = graphene.ID(required=True)
geo_area = graphene.ID(required=True)
unit_of_reporting = graphene.Field(AssessmentRegistryUnitOfReportingTypeEnum, required=True)
count = graphene.Int(required=True)
unit_of_reporting_display = EnumDescription(required=True)
Expand All @@ -289,9 +288,9 @@ def resolve_unit_of_reporting_display(root, info):


class MedianQualityScoreByGeographicalAreaDateType(graphene.ObjectType):
admin_level_id = graphene.Int(required=True)
region = graphene.Int(required=True)
geo_area = graphene.Int(required=True)
admin_level_id = graphene.ID(required=True)
region = graphene.ID(required=True)
geo_area = graphene.ID(required=True)
final_score = graphene.Float(required=True)


Expand Down Expand Up @@ -329,7 +328,7 @@ class MedianScoreOfGeographicalAndSectorDateType(graphene.ObjectType):
sector = graphene.Field(AssessmentRegistrySectorTypeEnum, required=True)

def resolve_geo_area(root, info, **kwargs):
return info.context.dl.geo.geo_area.load(root["geo_area"])
return info.context.dl.assessment_registry.geo_area.load(root["geo_area"])


class MedianScoreOfGeoAreaAndAffectedGroupDateType(graphene.ObjectType):
Expand All @@ -339,7 +338,7 @@ class MedianScoreOfGeoAreaAndAffectedGroupDateType(graphene.ObjectType):
affected_group = graphene.Field(AssessmentRegistryAffectedGroupTypeEnum, required=True)

def resolve_geo_area(root, info):
return info.context.dl.geo.geo_area.load(root["geo_area"])
return info.context.dl.assessment_registry.geo_area.load(root["geo_area"])


class MedianScoreOfSectorAndAffectedGroup(graphene.ObjectType):
Expand Down Expand Up @@ -462,23 +461,14 @@ def resolve_assessment_count(root: AssessmentDashboardStat, info):
)

@staticmethod
@node_cache(CacheKey.AssessmentDashboard.STAKEHOLDER_COUNT)
# @node_cache(CacheKey.AssessmentDashboard.STAKEHOLDER_COUNT)
def resolve_stakeholder_count(root: AssessmentDashboardStat, info):
stakeholder_counts = defaultdict(int)
organization_type_fields = ["stakeholders__organization_type__title"]

for field in organization_type_fields:
stakeholders = root.assessment_registry_qs.values(field)
for stakeholder in stakeholders:
if organization_type_title := stakeholder.get(field):
stakeholder_counts[organization_type_title] += 1
return [
StakeholderCountType(
stakeholder=org_type_title,
count=count,
)
for org_type_title, count in stakeholder_counts.items()
]
return (
root.assessment_registry_qs.values(stakeholder=models.F('stakeholders__organization_type__title'))
.annotate(count=Count('id'))
.order_by('stakeholder')
.values('count', 'stakeholder')
)

@staticmethod
@node_cache(CacheKey.AssessmentDashboard.COLLECTION_TECHNIQUE_COUNT)
Expand All @@ -487,6 +477,7 @@ def resolve_collection_technique_count(root: AssessmentDashboardStat, info):
root.methodology_attribute_qs.values("data_collection_technique")
.annotate(count=Count("data_collection_technique"))
.order_by("data_collection_technique")
.values('data_collection_technique', 'count')
)

@staticmethod
Expand All @@ -512,6 +503,14 @@ def resolve_assessment_geographic_areas(root: AssessmentDashboardStat, info):
admin_level_id=models.F("locations__admin_level_id"),
code=models.F("locations__code"),
)
.values(
'locations',
'count',
'assessment_ids',
'geo_area',
'admin_level_id',
'code',
)
.order_by("locations")
)

Expand All @@ -523,34 +522,46 @@ def resolve_assessment_by_over_time(root: AssessmentDashboardStat, info):
@staticmethod
@node_cache(CacheKey.AssessmentDashboard.ASSESSMENT_PER_FRAMEWORK_PILLAR)
def resolve_assessment_per_framework_pillar(root: AssessmentDashboardStat, info):
return root.assessment_registry_qs.values(date=TruncDay("created_at")).annotate(
count=Count("id"),
return root.assessment_registry_qs.annotate(
focus=models.Func(models.F("focuses"), function="unnest"),
)
).values('focus').order_by('focus').annotate(
count=Count('id')
).values('focus', 'count').annotate(
date=TruncDay('created_at')
).values('focus', 'count', 'date')

@staticmethod
@node_cache(CacheKey.AssessmentDashboard.ASSESSMENT_PER_AFFECTED_GROUP)
def resolve_assessment_per_affected_group(root: AssessmentDashboardStat, info):
return root.assessment_registry_qs.values(date=TruncDay("created_at")).annotate(
count=Count("id"),
affected_group=models.Func(models.F("affected_groups"), function="unnest"),
)
return root.assessment_registry_qs.annotate(
affected_group=models.Func(models.F('affected_groups'), function='unnest'),
).values('affected_group').order_by('affected_group').annotate(
count=Count('id')
).values('affected_group', 'count').annotate(
date=TruncDay('created_at')
).values('affected_group', 'count', 'date')

@staticmethod
@node_cache(CacheKey.AssessmentDashboard.ASSESSMENT_PER_HUMANITRATION_SECTOR)
def resolve_assessment_per_humanitarian_sector(root: AssessmentDashboardStat, info):
return root.assessment_registry_qs.values(date=TruncDay("created_at")).annotate(
count=Count("id"),
sector=models.Func(models.F("sectors"), function="unnest"),
)
return root.assessment_registry_qs.annotate(
sector=models.Func(models.F('sectors'), function='unnest'),
).values('sector').order_by('sector').annotate(
count=Count('id')
).values('sector', 'count').annotate(
date=TruncDay('created_at')
).values('sector', 'count', 'date')

@staticmethod
@node_cache(CacheKey.AssessmentDashboard.ASSESSMENT_PER_PROTECTION_MANAGEMENT)
def resolve_assessment_per_protection_management(root: AssessmentDashboardStat, info):
return root.assessment_registry_qs.values(date=TruncDay("created_at")).annotate(
count=Count("id"),
protection_management=models.Func(models.F("protection_info_mgmts"), function="unnest"),
)
return root.assessment_registry_qs.annotate(
protection_management=models.Func(models.F('protection_info_mgmts'), function='unnest'),
).values('protection_management').order_by('protection_management').annotate(
count=Count('id')
).values('protection_management', 'count').annotate(
date=TruncDay('created_at')
).values('protection_management', 'count', 'date')

@staticmethod
@node_cache(CacheKey.AssessmentDashboard.ASSESSMENT_AFFECTED_GROUP_AND_SECTOR)
Expand Down Expand Up @@ -699,6 +710,7 @@ def resolve_assessment_by_data_collection_technique_and_geolocation(root: Assess
admin_level_id=models.F("assessment_registry__locations__admin_level_id"),
)
.annotate(count=Count("assessment_registry__locations"))
.values('data_collection_technique', 'geo_area', 'region', 'admin_level_id')
.order_by("assessment_registry__locations")
)

Expand All @@ -713,6 +725,7 @@ def resolve_assessment_by_sampling_approach_and_geolocation(root: AssessmentDash
admin_level_id=models.F("assessment_registry__locations__admin_level_id"),
)
.annotate(count=Count("assessment_registry__locations"))
.values('sampling_approach', 'geo_area', 'region', 'admin_level_id')
.order_by("assessment_registry__locations")
)

Expand All @@ -727,6 +740,7 @@ def resolve_assessment_by_proximity_and_geolocation(root: AssessmentDashboardSta
admin_level_id=models.F("assessment_registry__locations__admin_level_id"),
)
.annotate(count=Count("assessment_registry__locations"))
.values('proximity', 'geo_area', 'region', 'admin_level_id')
.order_by("assessment_registry__locations")
)

Expand Down Expand Up @@ -755,6 +769,7 @@ def resolve_assessment_by_unit_of_reporting_and_geolocation(root: AssessmentDash
admin_level_id=models.F("assessment_registry__locations__admin_level_id"),
)
.annotate(count=Count("assessment_registry__locations"))
.values('unit_of_reporting' 'geo_area', 'region', 'admin_level_id')
.order_by("assessment_registry__locations")
)

Expand Down
14 changes: 14 additions & 0 deletions apps/assessment_registry/dataloaders.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

from django.utils.functional import cached_property
from django.db import connection as django_db_connection
from geo.schema import get_geo_area_queryset_for_project_geo_area_type
from utils.graphene.dataloaders import DataLoaderWithContext, WithContextMixin

from .models import (
Expand Down Expand Up @@ -88,6 +89,15 @@ def batch_load_fn(self, keys):
return Promise.resolve([_map.get(key, 0) for key in keys])


class GeoAreaLoader(DataLoaderWithContext):
def batch_load_fn(self, keys):
geo_area_qs = get_geo_area_queryset_for_project_geo_area_type()
_map = {}
for geo_area in geo_area_qs:
_map[geo_area.id] = geo_area
return Promise.resolve([_map.get(key) for key in keys])


class DataLoaders(WithContextMixin):
@cached_property
def stakeholders(self):
Expand All @@ -104,3 +114,7 @@ def child_issues(self):
@cached_property
def summary_issue_level(self):
return SummaryIssueLevelLoader(context=self.context)

@cached_property
def geo_area(self):
return GeoAreaLoader(context=self.context)
15 changes: 1 addition & 14 deletions apps/geo/dataloaders.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from utils.graphene.dataloaders import DataLoaderWithContext, WithContextMixin
from geo.schema import get_geo_area_queryset_for_project_geo_area_type

from .models import AdminLevel, GeoArea
from .models import AdminLevel
from assessment_registry.models import AssessmentRegistry


Expand All @@ -32,15 +32,6 @@ def batch_load_fn(self, keys):
return Promise.resolve([_map.get(key) for key in keys])


class GeoAreaLoader(DataLoaderWithContext):
def batch_load_fn(self, keys):
geo_area_qs = GeoArea.objects.filter(id__in=keys).defer('polygons', 'centroid', 'cached_data')
_map = defaultdict()
for geo_area in geo_area_qs:
_map[geo_area.id] = geo_area
return Promise.resolve([_map.get(key) for key in keys])


class DataLoaders(WithContextMixin):
@cached_property
def admin_levels_by_region(self):
Expand All @@ -49,7 +40,3 @@ def admin_levels_by_region(self):
@cached_property
def assessment_registry_locations(self):
return AssessmentRegistryGeoAreaLoader(context=self.context)

@cached_property
def geo_area(self):
return GeoAreaLoader(context=self.context)
2 changes: 1 addition & 1 deletion apps/geo/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def get_users_adminlevel_qs(info):
return AdminLevel.get_for(info.context.user).defer('geo_area_titles')


def get_geo_area_queryset_for_project_geo_area_type(queryset=None, defer_fields=('polygons', 'centroid')):
def get_geo_area_queryset_for_project_geo_area_type(queryset=None, defer_fields=('polygons', 'centroid', 'cached_data')):
_queryset = queryset
if _queryset is None:
_queryset = GeoArea.objects
Expand Down
4 changes: 2 additions & 2 deletions apps/organization/filters.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def queryset(self, request, queryset):

class OrganizationFilterSet(django_filters.FilterSet):
search = django_filters.CharFilter(method='search_filter')
used_in_project = IDFilter(method='filter_used_in_project')
used_in_project_by_lead = IDFilter(method='filter_used_in_project_by_lead')
used_in_project_by_assesment = IDFilter(method='filter_used_in_project_by_assesment')
ordering = MultipleInputFilter(
OrganizationOrderingEnum,
Expand All @@ -65,7 +65,7 @@ def search_filter(self, qs, _, value):
).distinct()
return qs

def filter_used_in_project(self, qs, _, value):
def filter_used_in_project_by_lead(self, qs, _, value):
if value:
user = getattr(self.request, 'user', None)
if user is None:
Expand Down
Loading

0 comments on commit 8ce7ac2

Please sign in to comment.