From 39683e748d5139d846a3680393c751bb3c90fd38 Mon Sep 17 00:00:00 2001 From: kshitijrajsharma Date: Mon, 26 Aug 2024 17:20:09 +0545 Subject: [PATCH 1/9] feature - Make sure API runs independently --- backend/Dockerfile.API | 60 ++++++++++ backend/aiproject/settings.py | 8 +- backend/api-entrypoint.sh | 7 ++ backend/api-requirements.txt | 3 +- backend/core/tasks.py | 16 +-- backend/core/urls.py | 11 +- backend/core/views.py | 199 +++++++++++++++++++--------------- 7 files changed, 199 insertions(+), 105 deletions(-) create mode 100644 backend/Dockerfile.API create mode 100644 backend/api-entrypoint.sh diff --git a/backend/Dockerfile.API b/backend/Dockerfile.API new file mode 100644 index 00000000..15e6b609 --- /dev/null +++ b/backend/Dockerfile.API @@ -0,0 +1,60 @@ +## docker build -t fair-api -f Dockerfile.API . + +## For Development: + +## docker run --env-file .env --rm -p 8000:8000 -v $(pwd):/app/code --name fair-api-container fair-api + +FROM python:3.11-slim-bookworm as build + +RUN apt-get update && apt-get --no-install-recommends -y install \ + build-essential \ + libpq-dev \ + libgdal-dev \ + python3-dev \ + && apt-get clean && rm -rf /var/lib/apt/lists/* + +ENV CPLUS_INCLUDE_PATH=/usr/include/gdal +ENV C_INCLUDE_PATH=/usr/include/gdal + +WORKDIR /app + +RUN python3 -m venv /app/venv + +RUN /app/venv/bin/pip install --no-cache-dir --upgrade pip setuptools wheel + +COPY api-requirements.txt api-requirements.txt ./ + +RUN /app/venv/bin/pip install --no-cache-dir -r api-requirements.txt + +WORKDIR /app/code +COPY aiproject /app/code/aiproject +COPY core /app/code/core +COPY login /app/code/login +COPY manage.py /app/code/manage.py +COPY tests /app/code/tests + +FROM python:3.11-slim-bookworm + +RUN apt-get update && apt-get --no-install-recommends -y install \ + libgdal-dev \ + && apt-get clean && rm -rf /var/lib/apt/lists/* + +ENV CPLUS_INCLUDE_PATH=/usr/include/gdal +ENV C_INCLUDE_PATH=/usr/include/gdal + +WORKDIR /app + +COPY --from=build /app/venv /app/venv + +COPY --from=build /app/code /app/code +ENV PATH="/app/venv/bin:$PATH" +COPY api-entrypoint.sh ./api-entrypoint-lock.sh +RUN chmod +x ./api-entrypoint-lock.sh + +WORKDIR /app/code + +EXPOSE 8000 + +ENTRYPOINT ["/app/api-entrypoint-lock.sh"] + +CMD ["python", "manage.py", "runserver", "0.0.0.0:8000"] diff --git a/backend/aiproject/settings.py b/backend/aiproject/settings.py index a37d0884..a21ade24 100644 --- a/backend/aiproject/settings.py +++ b/backend/aiproject/settings.py @@ -56,7 +56,9 @@ # Limiter EPOCHS_LIMIT = env("EPOCHS_LIMIT", default=30) BATCH_SIZE_LIMIT = env("BATCH_SIZE_LIMIT", default=8) -TRAINING_WORKSPACE_DOWNLOAD_LIMIT = env("TRAINING_WORKSPACE_DOWNLOAD_LIMIT", default=200) +TRAINING_WORKSPACE_DOWNLOAD_LIMIT = env( + "TRAINING_WORKSPACE_DOWNLOAD_LIMIT", default=200 +) # Application definition @@ -98,7 +100,7 @@ CORS_ORIGIN_WHITELIST = ALLOWED_ORIGINS -CORS_ORIGIN_ALLOW_ALL = env("CORS_ORIGIN_ALLOW_ALL", default= False) +CORS_ORIGIN_ALLOW_ALL = env("CORS_ORIGIN_ALLOW_ALL", default=False) REST_FRAMEWORK = { "DEFAULT_SCHEMA_CLASS": "rest_framework.schemas.coreapi.AutoSchema", @@ -211,3 +213,5 @@ TRAINING_WORKSPACE = env( "TRAINING_WORKSPACE", default=os.path.join(os.getcwd(), "training") ) + +ENABLE_PREDICTION_API = env("ENABLE_PREDICTION_API", default=False) diff --git a/backend/api-entrypoint.sh b/backend/api-entrypoint.sh new file mode 100644 index 00000000..ba7ab36b --- /dev/null +++ b/backend/api-entrypoint.sh @@ -0,0 +1,7 @@ +#!/bin/bash +set -e +echo "Applying database migrations..." +python manage.py makemigrations login core +python manage.py migrate +echo "Starting Django server..." +exec "$@" diff --git a/backend/api-requirements.txt b/backend/api-requirements.txt index 0b047bea..ffe5281e 100644 --- a/backend/api-requirements.txt +++ b/backend/api-requirements.txt @@ -1,6 +1,7 @@ django==4.1.4 # gdal==3.6.2 psycopg2 +numpy<2.0 djangorestframework==3.14.0 djangorestframework-gis==1.0 dj-database-url==1.2.0 @@ -20,4 +21,4 @@ geojson2osm==0.0.1 osmconflator==0.0.9 orthogonalizer==0.0.4 fairpredictor==0.0.26 -tflite-runtime==2.14.0 \ No newline at end of file +# tflite-runtime==2.14.0 diff --git a/backend/core/tasks.py b/backend/core/tasks.py index b037ab01..366af18c 100644 --- a/backend/core/tasks.py +++ b/backend/core/tasks.py @@ -7,8 +7,13 @@ import traceback from shutil import rmtree - from celery import shared_task +from django.conf import settings +from django.contrib.gis.db.models.aggregates import Extent +from django.contrib.gis.geos import GEOSGeometry +from django.shortcuts import get_object_or_404 +from django.utils import timezone + from core.models import AOI, Feedback, FeedbackAOI, FeedbackLabel, Label, Training from core.serializers import ( AOISerializer, @@ -18,12 +23,6 @@ LabelFileSerializer, ) from core.utils import bbox, is_dir_empty -from django.conf import settings -from django.contrib.gis.db.models.aggregates import Extent -from django.contrib.gis.geos import GEOSGeometry -from django.shortcuts import get_object_or_404 -from django.utils import timezone -from predictor import download_imagery, get_start_end_download_coords logger = logging.getLogger(__name__) @@ -78,12 +77,13 @@ def train_model( feedback=None, freeze_layers=False, ): - #importing them here so that it won't be necessary when sending tasks ( api only) + # importing them here so that it won't be necessary when sending tasks ( api only) import hot_fair_utilities import ramp.utils import tensorflow as tf from hot_fair_utilities import preprocess, train from hot_fair_utilities.training import run_feedback + from predictor import download_imagery, get_start_end_download_coords training_instance = get_object_or_404(Training, id=training_id) training_instance.status = "RUNNING" diff --git a/backend/core/urls.py b/backend/core/urls.py index d740e4dd..42002e50 100644 --- a/backend/core/urls.py +++ b/backend/core/urls.py @@ -1,11 +1,11 @@ +from django.conf import settings from django.conf.urls import include from django.urls import path from rest_framework import routers # now import the views.py file into this code -from .views import ( +from .views import ( # APIStatus, AOIViewSet, - # APIStatus, ConflateGeojson, DatasetViewSet, FeedbackAOIViewset, @@ -16,7 +16,6 @@ GenerateGpxView, LabelViewSet, ModelViewSet, - PredictionView, RawdataApiAOIView, RawdataApiFeedbackView, TrainingViewSet, @@ -28,6 +27,9 @@ run_task_status, ) +if settings.ENABLE_PREDICTION_API: + from .views import PredictionView + # CURD Block router = routers.DefaultRouter() router.register(r"dataset", DatasetViewSet) @@ -50,7 +52,6 @@ # path("download//", download_training_data), path("training/status//", run_task_status), path("training/publish//", publish_training), - path("prediction/", PredictionView.as_view()), path("feedback/training/submit/", FeedbackView.as_view()), # path("status/", APIStatus.as_view()), path("geojson2osm/", geojson2osmconverter, name="geojson2osmconverter"), @@ -65,3 +66,5 @@ ), path("workspace//", TrainingWorkspaceView.as_view()), ] +if settings.ENABLE_PREDICTION_API: + urlpatterns.append(path("prediction/", PredictionView.as_view())) diff --git a/backend/core/views.py b/backend/core/views.py index d920cd40..60bde9c2 100644 --- a/backend/core/views.py +++ b/backend/core/views.py @@ -26,11 +26,8 @@ from django_filters.rest_framework import DjangoFilterBackend from drf_yasg.utils import swagger_auto_schema from geojson2osm import geojson2osm -from login.authentication import OsmAuthentication -from login.permissions import IsOsmAuthenticated from orthogonalizer import othogonalize_poly from osmconflator import conflate_geojson -from predictor import predict from rest_framework import decorators, serializers, status, viewsets from rest_framework.decorators import api_view from rest_framework.exceptions import ValidationError @@ -38,6 +35,9 @@ from rest_framework.views import APIView from rest_framework_gis.filters import InBBoxFilter, TMSTileFilter +from login.authentication import OsmAuthentication +from login.permissions import IsOsmAuthenticated + from .models import ( AOI, Dataset, @@ -63,6 +63,9 @@ from .tasks import train_model from .utils import get_dir_size, gpx_generator, process_rawdata, request_rawdata +if settings.ENABLE_PREDICTION_API: + from predictor import predict + def home(request): return redirect("schema-swagger-ui") @@ -194,7 +197,7 @@ class FeedbackLabelViewset(viewsets.ModelViewSet): bbox_filter_field = "geom" filter_backends = ( InBBoxFilter, # it will take bbox like this api/v1/label/?in_bbox=-90,29,-89,35 , - DjangoFilterBackend + DjangoFilterBackend, ) bbox_filter_include_overlapping = True filterset_fields = ["feedback_aoi", "feedback_aoi__training"] @@ -345,9 +348,9 @@ def download_training_data(request, dataset_id: int): response = HttpResponse(open(zip_temp_path, "rb")) response.headers["Content-Type"] = "application/x-zip-compressed" - response.headers[ - "Content-Disposition" - ] = f"attachment; filename=training_{dataset_id}_all_data.zip" + response.headers["Content-Disposition"] = ( + f"attachment; filename=training_{dataset_id}_all_data.zip" + ) return response else: # "error": "File Doesn't Exist or has been cleared up from system", @@ -493,105 +496,116 @@ def post(self, request, *args, **kwargs): DEFAULT_TILE_SIZE = 256 +if settings.ENABLE_PREDICTION_API: -class PredictionView(APIView): - authentication_classes = [OsmAuthentication] - permission_classes = [IsOsmAuthenticated] + class PredictionView(APIView): + authentication_classes = [OsmAuthentication] + permission_classes = [IsOsmAuthenticated] - @swagger_auto_schema( - request_body=PredictionParamSerializer, responses={status.HTTP_200_OK: "ok"} - ) - def post(self, request, *args, **kwargs): - """Predicts on bbox by published model""" - res_serializer = PredictionParamSerializer(data=request.data) - if res_serializer.is_valid(raise_exception=True): - deserialized_data = res_serializer.data - bbox = deserialized_data["bbox"] - use_josm_q = deserialized_data["use_josm_q"] - model_instance = get_object_or_404(Model, id=deserialized_data["model_id"]) - if not model_instance.published_training: - return Response("Model is not published yet", status=404) - training_instance = get_object_or_404( - Training, id=model_instance.published_training - ) + @swagger_auto_schema( + request_body=PredictionParamSerializer, responses={status.HTTP_200_OK: "ok"} + ) + def post(self, request, *args, **kwargs): + """Predicts on bbox by published model""" + res_serializer = PredictionParamSerializer(data=request.data) + if res_serializer.is_valid(raise_exception=True): + deserialized_data = res_serializer.data + bbox = deserialized_data["bbox"] + use_josm_q = deserialized_data["use_josm_q"] + model_instance = get_object_or_404( + Model, id=deserialized_data["model_id"] + ) + if not model_instance.published_training: + return Response("Model is not published yet", status=404) + training_instance = get_object_or_404( + Training, id=model_instance.published_training + ) - source_img_in_dataset = model_instance.dataset.source_imagery - source = ( - deserialized_data["source"] - if deserialized_data["source"] - else source_img_in_dataset - ) - zoom_level = deserialized_data["zoom_level"] - try: - start_time = time.time() - model_path = os.path.join( - settings.TRAINING_WORKSPACE, - f"dataset_{model_instance.dataset.id}", - "output", - f"training_{training_instance.id}", - "checkpoint.tflite", + source_img_in_dataset = model_instance.dataset.source_imagery + source = ( + deserialized_data["source"] + if deserialized_data["source"] + else source_img_in_dataset ) - # give high priority to tflite model format if not avilable fall back to .h5 if not use default .tf - if not os.path.exists(model_path): + zoom_level = deserialized_data["zoom_level"] + try: + start_time = time.time() model_path = os.path.join( settings.TRAINING_WORKSPACE, f"dataset_{model_instance.dataset.id}", "output", f"training_{training_instance.id}", - "checkpoint.h5", + "checkpoint.tflite", ) + # give high priority to tflite model format if not avilable fall back to .h5 if not use default .tf if not os.path.exists(model_path): model_path = os.path.join( settings.TRAINING_WORKSPACE, f"dataset_{model_instance.dataset.id}", "output", f"training_{training_instance.id}", - "checkpoint.tf", + "checkpoint.h5", ) - geojson_data = predict( - bbox=bbox, - model_path=model_path, - zoom_level=zoom_level, - tms_url=source, - tile_size=DEFAULT_TILE_SIZE, - confidence=deserialized_data["confidence"] / 100 - if "confidence" in deserialized_data - else 0.5, - tile_overlap_distance=deserialized_data["tile_overlap_distance"] - if "tile_overlap_distance" in deserialized_data - else 0.15, - ) - print( - f"It took {round(time.time()-start_time)}sec for generating predictions" - ) - for feature in geojson_data["features"]: - feature["properties"]["building"] = "yes" - feature["properties"]["source"] = "fAIr" - if use_josm_q is True: - feature["geometry"] = othogonalize_poly( - feature["geometry"], - maxAngleChange=deserialized_data["max_angle_change"] - if "max_angle_change" in deserialized_data - else 15, - skewTolerance=deserialized_data["skew_tolerance"] - if "skew_tolerance" in deserialized_data - else 15, - ) - - print( - f"Prediction API took ({round(time.time()-start_time)} sec) in total" - ) + if not os.path.exists(model_path): + model_path = os.path.join( + settings.TRAINING_WORKSPACE, + f"dataset_{model_instance.dataset.id}", + "output", + f"training_{training_instance.id}", + "checkpoint.tf", + ) + geojson_data = predict( + bbox=bbox, + model_path=model_path, + zoom_level=zoom_level, + tms_url=source, + tile_size=DEFAULT_TILE_SIZE, + confidence=( + deserialized_data["confidence"] / 100 + if "confidence" in deserialized_data + else 0.5 + ), + tile_overlap_distance=( + deserialized_data["tile_overlap_distance"] + if "tile_overlap_distance" in deserialized_data + else 0.15 + ), + ) + print( + f"It took {round(time.time()-start_time)}sec for generating predictions" + ) + for feature in geojson_data["features"]: + feature["properties"]["building"] = "yes" + feature["properties"]["source"] = "fAIr" + if use_josm_q is True: + feature["geometry"] = othogonalize_poly( + feature["geometry"], + maxAngleChange=( + deserialized_data["max_angle_change"] + if "max_angle_change" in deserialized_data + else 15 + ), + skewTolerance=( + deserialized_data["skew_tolerance"] + if "skew_tolerance" in deserialized_data + else 15 + ), + ) + + print( + f"Prediction API took ({round(time.time()-start_time)} sec) in total" + ) - ## TODO : can send osm xml format from here as well using geojson2osm - return Response(geojson_data, status=status.HTTP_201_CREATED) - except ValueError as e: - if str(e) == "No Features Found": - return Response("No features found", status=204) - else: - return Response(str(e), status=500) - except Exception as ex: - print(ex) - return Response("Prediction Error", status=500) + ## TODO : can send osm xml format from here as well using geojson2osm + return Response(geojson_data, status=status.HTTP_201_CREATED) + except ValueError as e: + if str(e) == "No Features Found": + return Response("No features found", status=204) + else: + return Response(str(e), status=500) + except Exception as ex: + print(ex) + return Response("Prediction Error", status=500) @api_view(["POST"]) @@ -688,9 +702,14 @@ def get(self, request, lookup_dir): if os.path.isdir(base_dir) else os.path.getsize(base_dir) ) / (1024**2) - if size > settings.TRAINING_WORKSPACE_DOWNLOAD_LIMIT: # if file is greater than 200 mb exit + if ( + size > settings.TRAINING_WORKSPACE_DOWNLOAD_LIMIT + ): # if file is greater than 200 mb exit return Response( - {f"Errr: File Size {size} MB Exceed More than {settings.TRAINING_WORKSPACE_DOWNLOAD_LIMIT} MB"}, status=403 + { + f"Errr: File Size {size} MB Exceed More than {settings.TRAINING_WORKSPACE_DOWNLOAD_LIMIT} MB" + }, + status=403, ) if os.path.isfile(base_dir): From bd692029ff432dfe2ff046a7b79cf3fe44c80f95 Mon Sep 17 00:00:00 2001 From: kshitijrajsharma Date: Mon, 26 Aug 2024 17:45:50 +0545 Subject: [PATCH 2/9] add referer to fAIr requests --- backend/core/utils.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/backend/core/utils.py b/backend/core/utils.py index e6ed7083..6fda5626 100644 --- a/backend/core/utils.py +++ b/backend/core/utils.py @@ -63,7 +63,11 @@ def __init__(self, BASE_API_URL): self.BASE_API_URL = BASE_API_URL def request_snapshot(self, geometry): - headers = {"accept": "application/json", "Content-Type": "application/json"} + headers = { + "accept": "application/json", + "Content-Type": "application/json", + "Referer": "fAIr", + } # Lets start with buildings for now payload = { "geometry": json.loads(geometry), @@ -124,9 +128,7 @@ def process_rawdata(file_download_url, aoi_id, feedback=False): """This will create temp directory , Downloads file from URL provided, Unzips it Finds a geojson file , Process it and finally removes processed Geojson file and downloaded zip file from Directory""" - headers = { - 'Referer': 'https://fair-dev.hotosm.org/' # TODO : Use request uri - } + headers = {"Referer": "https://fair-dev.hotosm.org/"} # TODO : Use request uri r = requests.get(file_download_url, headers=headers) # Check whether the export path exists or not path = "temp/" @@ -250,7 +252,7 @@ def process_geojson(geojson_file_path, aoi_id, feedback=False): ) # leave one cpu free always if feedback: FeedbackLabel.objects.filter(feedback_aoi__id=aoi_id).delete() - else : + else: Label.objects.filter(aoi__id=aoi_id).delete() # max_workers = os.cpu_count() # get total cpu count available on the From 8ead61d3d6b076ecd40d28b4e0bcb8a02cc0c884 Mon Sep 17 00:00:00 2001 From: kshitijrajsharma Date: Tue, 3 Sep 2024 14:54:02 +0545 Subject: [PATCH 3/9] feat(approvedpredictions): feature to upload approbed predictions as well as curd op --- backend/Dockerfile.API | 4 +++- backend/aiproject/settings.py | 5 +++-- backend/api-requirements.txt | 6 ++++-- backend/core/models.py | 12 +++++++++++ backend/core/serializers.py | 10 ++++++++- backend/core/tasks.py | 2 ++ backend/core/urls.py | 2 ++ backend/core/views.py | 39 +++++++++++++++++++++++++++++++++++ 8 files changed, 74 insertions(+), 6 deletions(-) diff --git a/backend/Dockerfile.API b/backend/Dockerfile.API index 15e6b609..f62f16d8 100644 --- a/backend/Dockerfile.API +++ b/backend/Dockerfile.API @@ -4,7 +4,9 @@ ## docker run --env-file .env --rm -p 8000:8000 -v $(pwd):/app/code --name fair-api-container fair-api -FROM python:3.11-slim-bookworm as build +## in order to access localhost port to your system from docker env you might wanna use : host.docker.internal or ip + +FROM python:3.11-slim-bookworm AS build RUN apt-get update && apt-get --no-install-recommends -y install \ build-essential \ diff --git a/backend/aiproject/settings.py b/backend/aiproject/settings.py index a21ade24..86027c1d 100644 --- a/backend/aiproject/settings.py +++ b/backend/aiproject/settings.py @@ -206,8 +206,9 @@ } } # get ramp home and set it to environ -RAMP_HOME = env("RAMP_HOME") -os.environ["RAMP_HOME"] = RAMP_HOME +RAMP_HOME = env("RAMP_HOME",default=None) +if RAMP_HOME: + os.environ["RAMP_HOME"] = RAMP_HOME # training workspace TRAINING_WORKSPACE = env( diff --git a/backend/api-requirements.txt b/backend/api-requirements.txt index ffe5281e..b7073a23 100644 --- a/backend/api-requirements.txt +++ b/backend/api-requirements.txt @@ -1,7 +1,6 @@ django==4.1.4 # gdal==3.6.2 -psycopg2 -numpy<2.0 +psycopg2==2.9.9 djangorestframework==3.14.0 djangorestframework-gis==1.0 dj-database-url==1.2.0 @@ -21,4 +20,7 @@ geojson2osm==0.0.1 osmconflator==0.0.9 orthogonalizer==0.0.4 fairpredictor==0.0.26 +rasterio==1.3.8 +numpy==1.26.4 + # tflite-runtime==2.14.0 diff --git a/backend/core/models.py b/backend/core/models.py index 4b054ea6..ba5cf22c 100644 --- a/backend/core/models.py +++ b/backend/core/models.py @@ -2,6 +2,7 @@ from django.contrib.postgres.fields import ArrayField from django.core.validators import MaxValueValidator, MinValueValidator from django.db import models + from login.models import OsmUser # Create your models here. @@ -133,3 +134,14 @@ class FeedbackLabel(models.Model): geom = geomodels.PolygonField(srid=4326) created_at = models.DateTimeField(auto_now_add=True) + + +class ApprovedPredictions(models.Model): + training = models.ForeignKey(Training, to_field="id", on_delete=models.DO_NOTHING) + config = models.JSONField( + null=True, blank=True + ) ### Config meant to be kept for vectorization config / zoom config , to know what user is using for the most of the time + geom = geomodels.GeometryField( + srid=4326 + ) ## Making this geometry field to support point/line prediction later on + approved_at = models.DateTimeField(auto_now_add=True) diff --git a/backend/core/serializers.py b/backend/core/serializers.py index 24939aa9..7465bcf2 100644 --- a/backend/core/serializers.py +++ b/backend/core/serializers.py @@ -1,10 +1,11 @@ from django.conf import settings -from login.models import OsmUser from rest_framework import serializers from rest_framework_gis.serializers import ( GeoFeatureModelSerializer, # this will be used if we used to serialize as geojson ) +from login.models import OsmUser + from .models import * # from .tasks import train_model @@ -113,6 +114,13 @@ class Meta: # read_only_fields = ("created_at", "osm_id") +class ApprovedPredictionsSerializer(GeoFeatureModelSerializer): + class Meta: + model = ApprovedPredictions + geo_field = "geom" + fields = "__all__" + + class FeedbackLabelSerializer(GeoFeatureModelSerializer): class Meta: model = FeedbackLabel diff --git a/backend/core/tasks.py b/backend/core/tasks.py index 366af18c..0708d0c2 100644 --- a/backend/core/tasks.py +++ b/backend/core/tasks.py @@ -89,6 +89,8 @@ def train_model( training_instance.status = "RUNNING" training_instance.started_at = timezone.now() training_instance.save() + if settings.RAMP_HOME is None: + raise ValueError("Ramp Home is not configured") try: ## -----------IMAGE DOWNLOADER--------- diff --git a/backend/core/urls.py b/backend/core/urls.py index 42002e50..8212eade 100644 --- a/backend/core/urls.py +++ b/backend/core/urls.py @@ -6,6 +6,7 @@ # now import the views.py file into this code from .views import ( # APIStatus, AOIViewSet, + ApprovedPredictionsViewSet, ConflateGeojson, DatasetViewSet, FeedbackAOIViewset, @@ -35,6 +36,7 @@ router.register(r"dataset", DatasetViewSet) router.register(r"aoi", AOIViewSet) router.register(r"label", LabelViewSet) +router.register(r"approved-prediction", ApprovedPredictionsViewSet) router.register(r"training", TrainingViewSet) router.register(r"model", ModelViewSet) router.register(r"feedback", FeedbackViewset) diff --git a/backend/core/views.py b/backend/core/views.py index 60bde9c2..a183cc4d 100644 --- a/backend/core/views.py +++ b/backend/core/views.py @@ -40,6 +40,7 @@ from .models import ( AOI, + ApprovedPredictions, Dataset, Feedback, FeedbackAOI, @@ -50,6 +51,7 @@ ) from .serializers import ( AOISerializer, + ApprovedPredictionsSerializer, DatasetSerializer, FeedbackAOISerializer, FeedbackFileSerializer, @@ -263,6 +265,43 @@ def create(self, request, *args, **kwargs): return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) +class ApprovedPredictionsViewSet(viewsets.ModelViewSet): + authentication_classes = [OsmAuthentication] + permission_classes = [IsOsmAuthenticated] + permission_allowed_methods = ["GET"] + queryset = ApprovedPredictions.objects.all() + serializer_class = ApprovedPredictionsSerializer + bbox_filter_field = "geom" + filter_backends = ( + InBBoxFilter, + # TMSTileFilter, + DjangoFilterBackend, + ) + bbox_filter_include_overlapping = True + filterset_fields = ["training"] + + def create(self, request, *args, **kwargs): + training_id = request.data.get("training") + geom = request.data.get("geom") + + existing_approved_feature = ApprovedPredictions.objects.filter( + training=training_id, geom=geom + ).first() + + if existing_approved_feature: + serializer = ApprovedPredictionsSerializer( + existing_approved_feature, data=request.data + ) + else: + + serializer = ApprovedPredictionsSerializer(data=request.data) + + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + class RawdataApiFeedbackView(APIView): authentication_classes = [OsmAuthentication] permission_classes = [IsOsmAuthenticated] From c7bcfc9a73c2d22d9d20b0c5d10b804e71e7a5d1 Mon Sep 17 00:00:00 2001 From: kshitijrajsharma Date: Tue, 3 Sep 2024 15:25:15 +0545 Subject: [PATCH 4/9] Upgraded requirements to move tf runtime to requirements --- backend/api-requirements.txt | 2 -- backend/requirements.txt | 1 + 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/backend/api-requirements.txt b/backend/api-requirements.txt index 0dc2e759..04fd7143 100644 --- a/backend/api-requirements.txt +++ b/backend/api-requirements.txt @@ -24,6 +24,4 @@ fairpredictor==0.0.26 rasterio==1.3.8 numpy==1.26.4 -# tflite-runtime==2.14.0 - diff --git a/backend/requirements.txt b/backend/requirements.txt index 97733fef..2244ac9e 100644 --- a/backend/requirements.txt +++ b/backend/requirements.txt @@ -1,2 +1,3 @@ -r api-requirements.txt hot-fair-utilities==1.2.3 +tflite-runtime==2.14.0 \ No newline at end of file From b5ef5a783e76c316df4e2a898fb202d8d7bc99a5 Mon Sep 17 00:00:00 2001 From: kshitijrajsharma Date: Tue, 3 Sep 2024 15:57:51 +0545 Subject: [PATCH 5/9] ci(nupy): don't hard restrict numpy version --- backend/api-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/api-requirements.txt b/backend/api-requirements.txt index 04fd7143..380aa13b 100644 --- a/backend/api-requirements.txt +++ b/backend/api-requirements.txt @@ -22,6 +22,6 @@ orthogonalizer==0.0.4 fairpredictor==0.0.26 rasterio==1.3.8 -numpy==1.26.4 +numpy<2.0.0 From 59c66579cdfe254dd3180f6c288b5b48c07ee5d0 Mon Sep 17 00:00:00 2001 From: kshitijrajsharma Date: Tue, 3 Sep 2024 16:39:39 +0545 Subject: [PATCH 6/9] fix(trainingviews): fixes syntax bug while running prediction --- backend/core/views.py | 48 ------------------------------------------- 1 file changed, 48 deletions(-) diff --git a/backend/core/views.py b/backend/core/views.py index 4e15fedd..5ee2217d 100644 --- a/backend/core/views.py +++ b/backend/core/views.py @@ -609,51 +609,6 @@ def post(self, request, *args, **kwargs): f"training_{training_instance.id}", "checkpoint.tf", ) - geojson_data = predict( - bbox=bbox, - model_path=model_path, - zoom_level=zoom_level, - tms_url=source, - tile_size=DEFAULT_TILE_SIZE, - confidence=( - deserialized_data["confidence"] / 100 - if "confidence" in deserialized_data - else 0.5 - ), - tile_overlap_distance=( - deserialized_data["tile_overlap_distance"] - if "tile_overlap_distance" in deserialized_data - else 0.15 - ), - ) - print( - f"It took {round(time.time()-start_time)}sec for generating predictions" - ) - for feature in geojson_data["features"]: - feature["properties"]["building"] = "yes" - feature["properties"]["source"] = "fAIr" - if use_josm_q is True: - feature["geometry"] = othogonalize_poly( - feature["geometry"], - maxAngleChange=( - deserialized_data["max_angle_change"] - if "max_angle_change" in deserialized_data - else 15 - ), - skewTolerance=( - deserialized_data["skew_tolerance"] - if "skew_tolerance" in deserialized_data - else 15 - ), - ) - if not os.path.exists(model_path): - model_path = os.path.join( - settings.TRAINING_WORKSPACE, - f"dataset_{model_instance.dataset.id}", - "output", - f"training_{training_instance.id}", - "checkpoint.tf", - ) geojson_data = predict( bbox=bbox, model_path=model_path, @@ -703,9 +658,6 @@ def post(self, request, *args, **kwargs): return Response("No features found", status=204) else: return Response(str(e), status=500) - except Exception as ex: - print(ex) - return Response("Prediction Error", status=500) @api_view(["POST"]) From 7f90d87a701634ae8a95addbd14a0ea05516ea33 Mon Sep 17 00:00:00 2001 From: kshitijrajsharma Date: Tue, 3 Sep 2024 17:09:06 +0545 Subject: [PATCH 7/9] ci(testinglogin): added login key from secrets in ci --- .github/workflows/backend_build.yml | 11 +---------- backend/login/views.py | 3 ++- 2 files changed, 3 insertions(+), 11 deletions(-) diff --git a/.github/workflows/backend_build.yml b/.github/workflows/backend_build.yml index 7e879cd0..5a890425 100644 --- a/.github/workflows/backend_build.yml +++ b/.github/workflows/backend_build.yml @@ -35,14 +35,6 @@ jobs: with: python-version: 3.8 - - name: Get my current working dir - run: pwd - - - name: Test env vars for python - env: - TESTING_TOKEN: ${{ secrets.TESTING_TOKEN }} - run: python -c "import os; print(os.environ['TESTING_TOKEN'])" - - name: Clone Ramp run: git clone https://github.com/kshitijrajsharma/ramp-code-fAIr.git ramp-code @@ -92,7 +84,6 @@ jobs: - name: Create env run: | cd backend/ - mv sample_env .env export DATABASE_URL=postgis://admin:password@localhost:5432/ai export RAMP_HOME="/home/runner/work/fAIr/fAIr" export TRAINING_WORKSPACE="/home/runner/work/fAIr/fAIr/backend/training" @@ -134,7 +125,7 @@ jobs: TESTING_TOKEN: ${{ secrets.TESTING_TOKEN }} OSM_CLIENT_ID: ${{ secrets.OSM_CLIENT_ID }} OSM_CLIENT_SECRET: ${{ secrets.OSM_CLIENT_SECRET }} - OSM_SECRET_KEY: "" + OSM_SECRET_KEY: ${{ secrets.OSM_SECRET_KEY }} run : | cd backend/ diff --git a/backend/login/views.py b/backend/login/views.py index 321af752..47d52198 100644 --- a/backend/login/views.py +++ b/backend/login/views.py @@ -48,7 +48,8 @@ def get(self, request, format=None): # pragma: no cover json: access_token """ # Generating token through osm_auth library method - token = osm_auth.callback(request.build_absolute_uri()) + uri=request.build_absolute_uri() + token = osm_auth.callback(uri) return JsonResponse(json.loads(token)) From 882d9d9bb47e9df5afc665e505d9a47f80636968 Mon Sep 17 00:00:00 2001 From: kshitijrajsharma Date: Tue, 3 Sep 2024 17:21:02 +0545 Subject: [PATCH 8/9] ci(tests): env variable setup while running migrations --- .github/workflows/backend_build.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/backend_build.yml b/.github/workflows/backend_build.yml index 5a890425..e34ddc94 100644 --- a/.github/workflows/backend_build.yml +++ b/.github/workflows/backend_build.yml @@ -111,6 +111,9 @@ jobs: - name: Run migrations env: TESTING_TOKEN: ${{ secrets.TESTING_TOKEN }} + OSM_CLIENT_ID: ${{ secrets.OSM_CLIENT_ID }} + OSM_CLIENT_SECRET: ${{ secrets.OSM_CLIENT_SECRET }} + OSM_SECRET_KEY: ${{ secrets.OSM_SECRET_KEY }} run: | cd backend/ python manage.py makemigrations From eb10a1b5cc75d82ba9ce85d062cc532bdcf02dad Mon Sep 17 00:00:00 2001 From: kshitijrajsharma Date: Tue, 3 Sep 2024 19:10:02 +0545 Subject: [PATCH 9/9] ci(test-db): don't destroy test db after test is done for now --- backend/aiproject/settings.py | 3 +++ backend/tests/test_runners.py | 7 +++++++ 2 files changed, 10 insertions(+) create mode 100644 backend/tests/test_runners.py diff --git a/backend/aiproject/settings.py b/backend/aiproject/settings.py index c82e4c4c..5b10632e 100644 --- a/backend/aiproject/settings.py +++ b/backend/aiproject/settings.py @@ -217,3 +217,6 @@ ) ENABLE_PREDICTION_API = env("ENABLE_PREDICTION_API", default=False) + + +TEST_RUNNER = 'tests.test_runners.NoDestroyTestRunner' diff --git a/backend/tests/test_runners.py b/backend/tests/test_runners.py new file mode 100644 index 00000000..342194fb --- /dev/null +++ b/backend/tests/test_runners.py @@ -0,0 +1,7 @@ +from django.test.runner import DiscoverRunner +from django.db import connections + +class NoDestroyTestRunner(DiscoverRunner): + def teardown_databases(self, old_config, **kwargs): + ## TODO : Do proper teardown + pass