From a113ae46f144d9efcc9ab7d16bf0e01c78dc039f Mon Sep 17 00:00:00 2001
From: kshtiijrajsharma <skshitizraj@gmail.com>
Date: Thu, 18 Apr 2024 15:25:03 +0545
Subject: [PATCH 01/35] Sep api and worker

---
 backend/core/urls.py     |  4 ++--
 backend/core/views.py    | 32 +++++++++++++++++---------------
 backend/requirements.txt | 13 ++++++++-----
 3 files changed, 27 insertions(+), 22 deletions(-)

diff --git a/backend/core/urls.py b/backend/core/urls.py
index c67abd63..d740e4dd 100644
--- a/backend/core/urls.py
+++ b/backend/core/urls.py
@@ -5,7 +5,7 @@
 # now import the views.py file into this code
 from .views import (
     AOIViewSet,
-    APIStatus,
+    # APIStatus,
     ConflateGeojson,
     DatasetViewSet,
     FeedbackAOIViewset,
@@ -52,7 +52,7 @@
     path("training/publish/<int:training_id>/", publish_training),
     path("prediction/", PredictionView.as_view()),
     path("feedback/training/submit/", FeedbackView.as_view()),
-    path("status/", APIStatus.as_view()),
+    # path("status/", APIStatus.as_view()),
     path("geojson2osm/", geojson2osmconverter, name="geojson2osmconverter"),
     path("conflate/", ConflateGeojson, name="Conflate Geojson"),
     path("aoi/gpx/<int:aoi_id>/", GenerateGpxView.as_view()),
diff --git a/backend/core/views.py b/backend/core/views.py
index 03f4ffe4..9526cfbe 100644
--- a/backend/core/views.py
+++ b/backend/core/views.py
@@ -12,7 +12,7 @@
 from datetime import datetime
 from tempfile import NamedTemporaryFile
 
-import tensorflow as tf
+# import tensorflow as tf
 from celery import current_app
 from celery.result import AsyncResult
 from django.conf import settings
@@ -60,7 +60,8 @@
     ModelSerializer,
     PredictionParamSerializer,
 )
-from .tasks import train_model
+# from .tasks import train_model
+from celery import Celery
 from .utils import get_dir_size, gpx_generator, process_rawdata, request_rawdata
 
 
@@ -127,9 +128,9 @@ def create(self, validated_data):
         validated_data["created_by"] = user
         # create the model instance
         instance = Training.objects.create(**validated_data)
-
+        celery = Celery()
         # run your function here
-        task = train_model.delay(
+        task = celery.train_model.delay(
             dataset_id=instance.model.dataset.id,
             training_id=instance.id,
             epochs=instance.epochs,
@@ -469,8 +470,9 @@ def post(self, request, *args, **kwargs):
                 batch_size=batch_size,
                 source_imagery=training_instance.source_imagery,
             )
+            celery = Celery()
 
-            task = train_model.delay(
+            task = celery.train_model.delay(
                 dataset_id=instance.model.dataset.id,
                 training_id=instance.id,
                 epochs=instance.epochs,
@@ -612,16 +614,16 @@ def publish_training(request, training_id: int):
     return Response("Training Published", status=status.HTTP_201_CREATED)
 
 
-class APIStatus(APIView):
-    def get(self, request):
-        res = {
-            "tensorflow_version": tf.__version__,
-            "No of GPU Available": len(
-                tf.config.experimental.list_physical_devices("GPU")
-            ),
-            "API Status": "Healthy",  # static for now should be dynamic TODO
-        }
-        return Response(res, status=status.HTTP_200_OK)
+# class APIStatus(APIView):
+#     def get(self, request):
+#         res = {
+#             "tensorflow_version": tf.__version__,
+#             "No of GPU Available": len(
+#                 tf.config.experimental.list_physical_devices("GPU")
+#             ),
+#             "API Status": "Healthy",  # static for now should be dynamic TODO
+#         }
+#         return Response(res, status=status.HTTP_200_OK)
 
 
 class GenerateGpxView(APIView):
diff --git a/backend/requirements.txt b/backend/requirements.txt
index 3ac6c0cd..0cb3549c 100644
--- a/backend/requirements.txt
+++ b/backend/requirements.txt
@@ -1,12 +1,15 @@
 django==4.1.4
-# gdal
+
+gdal==3.6.2
 psycopg2
+
+
 djangorestframework==3.14.0
 djangorestframework-gis==1.0
 dj-database-url==1.2.0
 django-leaflet==0.28.3
 drf-yasg==1.21.4
-Pillow
+# Pillow
 django-environ==0.9.0 # used for environment
 django-filter==22.1
 django-cors-headers==3.13.0 # used for enabling cors when frontend is hosted on different server / origin
@@ -19,7 +22,7 @@ validators==0.20.0
 gpxpy==1.5.0
 hot-fair-utilities==1.2.3
 geojson2osm==0.0.1
-osmconflator
-orthogonalizer
+osmconflator==0.0.9
+orthogonalizer==0.0.4
 fairpredictor==0.0.26
-tflite-runtime==2.14.0
\ No newline at end of file
+tflite-runtime==2.14.0

From 8dd7532ab79312a6a9027f29e32365fe1dee9950 Mon Sep 17 00:00:00 2001
From: Kshitij Raj Sharma <36752999+kshitijrajsharma@users.noreply.github.com>
Date: Thu, 23 May 2024 16:16:27 +0545
Subject: [PATCH 02/35] Update frontend_build_push.yml

---
 .github/workflows/frontend_build_push.yml | 5 -----
 1 file changed, 5 deletions(-)

diff --git a/.github/workflows/frontend_build_push.yml b/.github/workflows/frontend_build_push.yml
index 79b01c75..e74f6cdc 100644
--- a/.github/workflows/frontend_build_push.yml
+++ b/.github/workflows/frontend_build_push.yml
@@ -6,11 +6,6 @@ on:
     paths:
       - 'frontend/**'
       - '.github/workflows/frontend_build_push.yml'
-  pull_request:
-    branches: [ master ]
-    paths:
-      - 'frontend/**'
-      - '.github/workflows/frontend_build_push.yml'
 
 permissions:
   id-token: write

From 68622aebfe7cf52d4c65de118665269c023461b9 Mon Sep 17 00:00:00 2001
From: Omran NAJJAR <omran.najjar@gmail.com>
Date: Mon, 27 May 2024 13:01:42 +0200
Subject: [PATCH 03/35] fix #246 with dynamic API base

---
 .../components/Layout/Feedback/FeedbackAOI.js   |  4 +++-
 .../Layout/TrainingDS/DatasetEditor/AOI.js      | 17 ++++++++++-------
 2 files changed, 13 insertions(+), 8 deletions(-)

diff --git a/frontend/src/components/Layout/Feedback/FeedbackAOI.js b/frontend/src/components/Layout/Feedback/FeedbackAOI.js
index 4469037f..20839512 100644
--- a/frontend/src/components/Layout/Feedback/FeedbackAOI.js
+++ b/frontend/src/components/Layout/Feedback/FeedbackAOI.js
@@ -282,7 +282,9 @@ const FeedbackAOI = (props) => {
                               props.sourceImagery
                                 ? "custom:" + props.sourceImagery
                                 : "Bing"
-                            }&disable_features=boundaries&gpx=https://fair-dev.hotosm.org/api/v1/feedback-aoi/gpx/${
+                            }&disable_features=boundaries&gpx=${
+                              process.env.REACT_APP_API_BASE
+                            }/feedback-aoi/gpx/${
                               layer.id
                             }&map=10.70/18.9226/81.6991`;
                             console.log(url);
diff --git a/frontend/src/components/Layout/TrainingDS/DatasetEditor/AOI.js b/frontend/src/components/Layout/TrainingDS/DatasetEditor/AOI.js
index 5f8be66d..53f28be8 100644
--- a/frontend/src/components/Layout/TrainingDS/DatasetEditor/AOI.js
+++ b/frontend/src/components/Layout/TrainingDS/DatasetEditor/AOI.js
@@ -183,7 +183,9 @@ const AOI = (props) => {
                               props.oamImagery
                                 ? "custom:" + props.oamImagery.url
                                 : "Bing"
-                            }&datasets=fbRoads,msBuildings&disable_features=boundaries&map=16.00/17.9253/120.4841&gpx=&gpx=https://fair-dev.hotosm.org/api/v1/aoi/gpx/${
+                            }&datasets=fbRoads,msBuildings&disable_features=boundaries&map=16.00/17.9253/120.4841&gpx=&gpx=${
+                              process.env.REACT_APP_API_BASE
+                            }/aoi/gpx/${
                               layer.aoiId
                             }`,
                             "_blank",
@@ -275,9 +277,9 @@ const AOI = (props) => {
                               props.oamImagery
                                 ? "custom:" + props.oamImagery.url
                                 : "Bing"
-                            }&disable_features=boundaries&gpx=https://fair-dev.hotosm.org/api/v1/aoi/gpx/${
-                              layer.aoiId
-                            }&map=10.70/18.9226/81.6991`,
+                            }&disable_features=boundaries&gpx=${
+                              process.env.REACT_APP_API_BASE
+                            }/aoi/gpx/${layer.aoiId}&map=10.70/18.9226/81.6991`,
                             "_blank",
                             "noreferrer"
                           );
@@ -286,7 +288,8 @@ const AOI = (props) => {
                         {/* <MapTwoTone   /> */}
                         <img
                           alt="OSM logo"
-                          className="osm-logo-small"                         src="https://upload.wikimedia.org/wikipedia/commons/thumb/b/b0/Openstreetmap_logo.svg/256px-Openstreetmap_logo.svg.png"
+                          className="osm-logo-small"
+                          src="https://upload.wikimedia.org/wikipedia/commons/thumb/b/b0/Openstreetmap_logo.svg/256px-Openstreetmap_logo.svg.png"
                         />
                       </IconButton>
                     </Tooltip>
@@ -365,8 +368,8 @@ const AOI = (props) => {
         </Demo>
         {props.mapLayers && props.mapLayers.length === 0 && (
           <Typography variant="body1" component="h2">
-            No TAs yet, start creating one by clicking Draw a rectangle, 3rd down at the top 
-            left of the image/map
+            No TAs yet, start creating one by clicking Draw a rectangle, 3rd
+            down at the top left of the image/map
           </Typography>
         )}
       </Grid>

From 71d4cdbcbd7b03e2a2f99f7054e4fa4a59138642 Mon Sep 17 00:00:00 2001
From: Kshitij Raj Sharma <36752999+kshitijrajsharma@users.noreply.github.com>
Date: Mon, 27 May 2024 17:41:30 +0545
Subject: [PATCH 04/35] Update frontend_build_push.yml

---
 .github/workflows/frontend_build_push.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.github/workflows/frontend_build_push.yml b/.github/workflows/frontend_build_push.yml
index e74f6cdc..d54b03e5 100644
--- a/.github/workflows/frontend_build_push.yml
+++ b/.github/workflows/frontend_build_push.yml
@@ -21,7 +21,7 @@ jobs:
 
     strategy:
       matrix:
-        node-version: [ 16 ]
+        node-version: [ 16.14.2 ]
 
     steps:
     - uses: actions/checkout@v4

From 79d14e5a9fb0f9f7181a89af0d1125abaf63c8fe Mon Sep 17 00:00:00 2001
From: kshitijrajsharma <skshitizraj@gmail.com>
Date: Thu, 30 May 2024 11:59:22 +0545
Subject: [PATCH 05/35] Add readme for API requirements

---
 backend/README.md            | 24 +++++++-----------------
 backend/api-requirements.txt | 23 +++++++++++++++++++++++
 backend/requirements.txt     | 28 +---------------------------
 backend/sample_env           |  1 +
 4 files changed, 32 insertions(+), 44 deletions(-)
 create mode 100644 backend/api-requirements.txt

diff --git a/backend/README.md b/backend/README.md
index 2e88a113..392dcc44 100644
--- a/backend/README.md
+++ b/backend/README.md
@@ -14,33 +14,27 @@ This project was bootstrapped with  [Geodjango Template](https://github.com/itsk
     source ./env/bin/activate
 
 ##### Setup Basemodels (Ramp Supported Currently)
+- Install git lfs
+```bash
+sudo apt-get install git-lfs
+```
+
 - Clone Ramp Basemodel 
 ```
 git clone https://github.com/radiantearth/model_ramp_baseline.git
 ```
-OR Download from google drive 
-```
-pip install gdown
-gdown --fuzzy https://drive.google.com/file/d/1wvJhkiOrSlHmmvJ0avkAdu9sslFf5_I0/view?usp=sharing
-```
 
 - Clone Ramp - Code 
 Note: This clone location will be your RAMP_HOME 
 ```
 git clone https://github.com/kshitijrajsharma/ramp-code-fAIr.git ramp-code
 ```
+
 - Copy Basemodel checkpoint to ramp-code
 ```
 cp -r model_ramp_baseline/data/input/checkpoint.tf ramp-code/ramp/checkpoint.tf
 ```
 
-Our Basemodel is available for public download [here](https://drive.google.com/file/d/1wvJhkiOrSlHmmvJ0avkAdu9sslFf5_I0/view?usp=sharing)
-
-You can unzip and  move the downloaded basemodel 
-```
-unzip checkpoint.tf.zip -d ramp-code/ramp  
-```
-
 
 - Remove basemodel repo we don't need it anymore 
 ```
@@ -136,11 +130,7 @@ pip install -r requirements.txt
     You will need more env variables (Such as Ramp home, Training Home) that can be found on ```.sample_env```  
 
 #### Now change your username, password and db name in settings.py accordingly to your database
-    python manage.py makemigrations login
-    python manage.py migrate login
-    python manage.py makemigrations core
-    python manage.py migrate core 
-    python manage.py makemigrations 
+    python manage.py makemigrations login core
     python manage.py migrate
     python manage.py runserver
 ### Now server will be available in your 8000 port on web, you can check out your localhost:8000/admin for admin panel 
diff --git a/backend/api-requirements.txt b/backend/api-requirements.txt
new file mode 100644
index 00000000..47dda210
--- /dev/null
+++ b/backend/api-requirements.txt
@@ -0,0 +1,23 @@
+django==4.1.4
+# gdal==3.6.2
+# psycopg2==2.9.9
+djangorestframework==3.14.0
+djangorestframework-gis==1.0
+dj-database-url==1.2.0
+django-leaflet==0.28.3
+drf-yasg==1.21.4
+django-environ==0.9.0 # used for environment
+django-filter==22.1
+django-cors-headers==3.13.0 # used for enabling cors when frontend is hosted on different server / origin
+osm-login-python==0.0.2
+celery==5.2.7
+redis==4.4.0
+django_celery_results==2.4.0
+flower==1.2.0
+validators==0.20.0
+gpxpy==1.5.0
+geojson2osm==0.0.1
+osmconflator==0.0.9
+orthogonalizer==0.0.4
+fairpredictor==0.0.26
+tflite-runtime==2.14.0
\ No newline at end of file
diff --git a/backend/requirements.txt b/backend/requirements.txt
index 0cb3549c..97733fef 100644
--- a/backend/requirements.txt
+++ b/backend/requirements.txt
@@ -1,28 +1,2 @@
-django==4.1.4
-
-gdal==3.6.2
-psycopg2
-
-
-djangorestframework==3.14.0
-djangorestframework-gis==1.0
-dj-database-url==1.2.0
-django-leaflet==0.28.3
-drf-yasg==1.21.4
-# Pillow
-django-environ==0.9.0 # used for environment
-django-filter==22.1
-django-cors-headers==3.13.0 # used for enabling cors when frontend is hosted on different server / origin
-osm-login-python==0.0.2
-celery==5.2.7
-redis==4.4.0
-django_celery_results==2.4.0
-flower==1.2.0
-validators==0.20.0
-gpxpy==1.5.0
+-r api-requirements.txt
 hot-fair-utilities==1.2.3
-geojson2osm==0.0.1
-osmconflator==0.0.9
-orthogonalizer==0.0.4
-fairpredictor==0.0.26
-tflite-runtime==2.14.0
diff --git a/backend/sample_env b/backend/sample_env
index 21e8bae9..a47d9ae5 100644
--- a/backend/sample_env
+++ b/backend/sample_env
@@ -1,3 +1,4 @@
+DEBUG=True
 SECRET_KEY=yl2w)c0boi_ma-1v5)935^2#&m*r!1s9z9^*9e5co^08_ixzo6
 DATABASE_URL=postgis://admin:password@localhost:5432/ai
 EXPORT_TOOL_API_URL=MY_RAW_DATA_URL

From afc23a766bc706349896ba8ca440c0238217fa7d Mon Sep 17 00:00:00 2001
From: kshitijrajsharma <skshitizraj@gmail.com>
Date: Thu, 30 May 2024 12:15:56 +0545
Subject: [PATCH 06/35] Enable psycopg2

---
 backend/api-requirements.txt | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/backend/api-requirements.txt b/backend/api-requirements.txt
index 47dda210..0b047bea 100644
--- a/backend/api-requirements.txt
+++ b/backend/api-requirements.txt
@@ -1,6 +1,6 @@
 django==4.1.4
 # gdal==3.6.2
-# psycopg2==2.9.9
+psycopg2
 djangorestframework==3.14.0
 djangorestframework-gis==1.0
 dj-database-url==1.2.0

From 7e5bb7e468158829e26f05149b444008823b6c17 Mon Sep 17 00:00:00 2001
From: kshitijrajsharma <skshitizraj@gmail.com>
Date: Thu, 30 May 2024 14:06:47 +0545
Subject: [PATCH 07/35] add chips length in models and training

---
 backend/core/models.py |  1 +
 backend/core/tasks.py  | 58 ++++++++++++++++++++++++++++++------------
 2 files changed, 43 insertions(+), 16 deletions(-)

diff --git a/backend/core/models.py b/backend/core/models.py
index 5ad0e284..4b054ea6 100644
--- a/backend/core/models.py
+++ b/backend/core/models.py
@@ -84,6 +84,7 @@ class Training(models.Model):
     finished_at = models.DateTimeField(null=True, blank=True)
     accuracy = models.FloatField(null=True, blank=True)
     epochs = models.PositiveIntegerField()
+    chips_length = models.PositiveIntegerField(default=0)
     batch_size = models.PositiveIntegerField()
     freeze_layers = models.BooleanField(default=False)
 
diff --git a/backend/core/tasks.py b/backend/core/tasks.py
index a3ec613b..8fbca094 100644
--- a/backend/core/tasks.py
+++ b/backend/core/tasks.py
@@ -3,23 +3,14 @@
 import os
 import shutil
 import sys
+import tarfile
 import traceback
 from shutil import rmtree
-import tarfile
 
 import hot_fair_utilities
 import ramp.utils
 import tensorflow as tf
 from celery import shared_task
-from django.conf import settings
-from django.contrib.gis.db.models.aggregates import Extent
-from django.contrib.gis.geos import GEOSGeometry
-from django.shortcuts import get_object_or_404
-from django.utils import timezone
-from hot_fair_utilities import preprocess, train
-from hot_fair_utilities.training import run_feedback
-from predictor import download_imagery, get_start_end_download_coords
-
 from core.models import AOI, Feedback, FeedbackAOI, FeedbackLabel, Label, Training
 from core.serializers import (
     AOISerializer,
@@ -29,6 +20,14 @@
     LabelFileSerializer,
 )
 from core.utils import bbox, is_dir_empty
+from django.conf import settings
+from django.contrib.gis.db.models.aggregates import Extent
+from django.contrib.gis.geos import GEOSGeometry
+from django.shortcuts import get_object_or_404
+from django.utils import timezone
+from hot_fair_utilities import preprocess, train
+from hot_fair_utilities.training import run_feedback
+from predictor import download_imagery, get_start_end_download_coords
 
 logger = logging.getLogger(__name__)
 
@@ -37,6 +36,7 @@
 
 DEFAULT_TILE_SIZE = 256
 
+
 def xz_folder(folder_path, output_filename, remove_original=False):
     """
     Compresses a folder and its contents into a .tar.xz file and optionally removes the original folder.
@@ -47,8 +47,8 @@ def xz_folder(folder_path, output_filename, remove_original=False):
     - remove_original: If True, the original folder is removed after compression.
     """
 
-    if not output_filename.endswith('.tar.xz'):
-        output_filename += '.tar.xz'
+    if not output_filename.endswith(".tar.xz"):
+        output_filename += ".tar.xz"
 
     with tarfile.open(output_filename, "w:xz") as tar:
         tar.add(folder_path, arcname=os.path.basename(folder_path))
@@ -57,6 +57,20 @@ def xz_folder(folder_path, output_filename, remove_original=False):
         shutil.rmtree(folder_path)
 
 
+def get_file_count(path):
+    try:
+        return len(
+            [
+                entry
+                for entry in os.listdir(path)
+                if os.path.isfile(os.path.join(path, entry))
+            ]
+        )
+    except Exception as e:
+        print(f"An error occurred: {e}")
+        return 0
+
+
 @shared_task
 def train_model(
     dataset_id,
@@ -189,7 +203,9 @@ def train_model(
                 rasterize_options=["binary"],
                 georeference_images=True,
             )
-
+            training_instance.chips_length = get_file_count(
+                os.path.join(preprocess_output, "chips")
+            )
             # train
 
             train_output = f"{base_path}/train"
@@ -272,9 +288,19 @@ def train_model(
                 f.write(json.dumps(aoi_serializer.data))
 
             # copy aois and labels to preprocess output before compressing it to tar
-            shutil.copyfile(os.path.join(output_path, "aois.geojson"), os.path.join(preprocess_output,'aois.geojson'))
-            shutil.copyfile(os.path.join(output_path, "labels.geojson"), os.path.join(preprocess_output,'labels.geojson'))
-            xz_folder(preprocess_output, os.path.join(output_path, "preprocessed.tar.xz"), remove_original=True)
+            shutil.copyfile(
+                os.path.join(output_path, "aois.geojson"),
+                os.path.join(preprocess_output, "aois.geojson"),
+            )
+            shutil.copyfile(
+                os.path.join(output_path, "labels.geojson"),
+                os.path.join(preprocess_output, "labels.geojson"),
+            )
+            xz_folder(
+                preprocess_output,
+                os.path.join(output_path, "preprocessed.tar.xz"),
+                remove_original=True,
+            )
 
             # now remove the ramp-data all our outputs are copied to our training workspace
             shutil.rmtree(base_path)

From d10c6470087848b36ac68341569de8ff9cf0b6d9 Mon Sep 17 00:00:00 2001
From: kshitijrajsharma <skshitizraj@gmail.com>
Date: Thu, 30 May 2024 09:36:51 +0000
Subject: [PATCH 08/35] Restrict using native celery function instead import
 the lower level function

---
 backend/core/views.py | 11 +++--------
 1 file changed, 3 insertions(+), 8 deletions(-)

diff --git a/backend/core/views.py b/backend/core/views.py
index a2163ac9..d920cd40 100644
--- a/backend/core/views.py
+++ b/backend/core/views.py
@@ -60,8 +60,7 @@
     ModelSerializer,
     PredictionParamSerializer,
 )
-# from .tasks import train_model
-from celery import Celery
+from .tasks import train_model
 from .utils import get_dir_size, gpx_generator, process_rawdata, request_rawdata
 
 
@@ -129,10 +128,8 @@ def create(self, validated_data):
         # create the model instance
         instance = Training.objects.create(**validated_data)
 
-        celery = Celery()
-
         # run your function here
-        task = celery.train_model.delay(
+        task = train_model.delay(
             dataset_id=instance.model.dataset.id,
             training_id=instance.id,
             epochs=instance.epochs,
@@ -474,9 +471,7 @@ def post(self, request, *args, **kwargs):
                 batch_size=batch_size,
                 source_imagery=training_instance.source_imagery,
             )
-            celery = Celery()
-
-            task = celery.train_model.delay(
+            task = train_model.delay(
                 dataset_id=instance.model.dataset.id,
                 training_id=instance.id,
                 epochs=instance.epochs,

From e784173073361550fab0d67e0f5dcff502a01076 Mon Sep 17 00:00:00 2001
From: kshitijrajsharma <skshitizraj@gmail.com>
Date: Thu, 30 May 2024 09:41:55 +0000
Subject: [PATCH 09/35] tasks - save chip length when preprocessing steps is
 finished

---
 backend/core/tasks.py | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/backend/core/tasks.py b/backend/core/tasks.py
index 8fbca094..bfbf7c3b 100644
--- a/backend/core/tasks.py
+++ b/backend/core/tasks.py
@@ -206,6 +206,8 @@ def train_model(
             training_instance.chips_length = get_file_count(
                 os.path.join(preprocess_output, "chips")
             )
+            training_instance.save()
+
             # train
 
             train_output = f"{base_path}/train"

From b642e9ad1fdba3c9329057cd06a0794244adfe05 Mon Sep 17 00:00:00 2001
From: Omran NAJJAR <omran.najjar@gmail.com>
Date: Thu, 30 May 2024 12:10:57 +0200
Subject: [PATCH 10/35] showing dataset size from the chip count field in
 trainngs table

---
 .../Layout/AIModels/AIModelEditor/Trainings.js       | 12 +++---------
 1 file changed, 3 insertions(+), 9 deletions(-)

diff --git a/frontend/src/components/Layout/AIModels/AIModelEditor/Trainings.js b/frontend/src/components/Layout/AIModels/AIModelEditor/Trainings.js
index 5b3a4cf7..e53d032d 100644
--- a/frontend/src/components/Layout/AIModels/AIModelEditor/Trainings.js
+++ b/frontend/src/components/Layout/AIModels/AIModelEditor/Trainings.js
@@ -50,7 +50,7 @@ const TrainingsList = (props) => {
 
       if (res.error) setError(res.error.response.statusText);
       else {
-        // console.log("gettraining", res.data);
+        //console.log("gettraining", res.data);
 
         return res.data;
       }
@@ -123,17 +123,11 @@ const TrainingsList = (props) => {
       },
     },
     {
-      field: "c",
+      field: "chips_length",
       headerName: "DS size",
       flex: 1,
       renderCell: (params) => {
-        if (params.row.status === "FINISHED")
-          return (
-            <TrainingSize
-              datasetId={props.datasetId}
-              trainingId={params.row.id}
-            ></TrainingSize>
-          );
+        return <>{`${params.value === 0 ? "" : params.value}`}</>;
       },
     },
     {

From 483c18b1851673f6964d585c249193ed70f9332e Mon Sep 17 00:00:00 2001
From: Omran NAJJAR <omran.najjar@gmail.com>
Date: Thu, 30 May 2024 12:23:34 +0200
Subject: [PATCH 11/35] adding hashtags when opening JOSM using env variables

---
 frontend/src/components/Layout/Start/Prediction/Prediction.js | 4 ++++
 1 file changed, 4 insertions(+)

diff --git a/frontend/src/components/Layout/Start/Prediction/Prediction.js b/frontend/src/components/Layout/Start/Prediction/Prediction.js
index 519c594c..0716def0 100644
--- a/frontend/src/components/Layout/Start/Prediction/Prediction.js
+++ b/frontend/src/components/Layout/Start/Prediction/Prediction.js
@@ -383,6 +383,10 @@ const Prediction = () => {
         loadurl.searchParams.set("top", bounds._northEast.lat);
         loadurl.searchParams.set("left", bounds._southWest.lng);
         loadurl.searchParams.set("right", bounds._northEast.lng);
+        loadurl.searchParams.set(
+          "changeset_hashtags",
+          process.env.REACT_APP_HASHTAG_PREFIX
+        );
         const loadResponse = await fetch(loadurl);
 
         if (!josmResponse.ok) {

From 05fa247a576f907dd17bc91fadf44da2446a4919 Mon Sep 17 00:00:00 2001
From: Omran NAJJAR <omran.najjar@gmail.com>
Date: Thu, 30 May 2024 12:26:11 +0200
Subject: [PATCH 12/35] Adding new env var to samples

---
 frontend/.env_sample | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/frontend/.env_sample b/frontend/.env_sample
index 79ee0133..a7b64f82 100644
--- a/frontend/.env_sample
+++ b/frontend/.env_sample
@@ -2,4 +2,5 @@ REACT_APP_CONNECT_ID=
 REACT_APP_TM_API=https://tasking-manager-tm4-production-api.hotosm.org/api/v2/projects/PROJECT_ID/tasks/
 REACT_APP_ENV=Dev
 REACT_APP_API_BASE=http://127.0.0.1:8000/api/v1
-REACT_APP_PREDICTOR_API_BASE=http://127.0.0.1:8001
\ No newline at end of file
+REACT_APP_PREDICTOR_API_BASE=http://127.0.0.1:8001
+REACT_APP_HASHTAG_PREFIX="#fAIr;#hotosm-fAIr"
\ No newline at end of file

From 4b2121d033a49c16e6b509dc0dfc6ab376cde7c7 Mon Sep 17 00:00:00 2001
From: kshitijrajsharma <skshitizraj@gmail.com>
Date: Thu, 30 May 2024 10:32:57 +0000
Subject: [PATCH 13/35] Fix Docker build for backend

---
 backend/Dockerfile     | 2 ++
 backend/Dockerfile_CPU | 1 +
 2 files changed, 3 insertions(+)

diff --git a/backend/Dockerfile b/backend/Dockerfile
index 11d652bf..99137694 100644
--- a/backend/Dockerfile
+++ b/backend/Dockerfile
@@ -20,6 +20,8 @@ RUN pip install -r /tmp/docker-requirements.txt
 
 RUN pip install --upgrade setuptools
 COPY requirements.txt /tmp/requirements.txt
+COPY api-requirements.txt /tmp/api-requirements.txt
+
 RUN pip install -r /tmp/requirements.txt
 
 
diff --git a/backend/Dockerfile_CPU b/backend/Dockerfile_CPU
index 159cae07..b6921403 100644
--- a/backend/Dockerfile_CPU
+++ b/backend/Dockerfile_CPU
@@ -21,6 +21,7 @@ RUN pip install -r /tmp/docker-requirements.txt
 RUN pip install --upgrade setuptools
 
 COPY requirements.txt /tmp/requirements.txt
+COPY api-requirements.txt /tmp/api-requirements.txt
 RUN pip install -r /tmp/requirements.txt
 
 COPY docker/ramp/solaris /tmp/solaris

From ec9dfab500b5733f637dfffe78449143b15dd4ff Mon Sep 17 00:00:00 2001
From: kshitijrajsharma <skshitizraj@gmail.com>
Date: Thu, 30 May 2024 10:42:35 +0000
Subject: [PATCH 14/35] Only push frontend once new release is made to
 production , also publish release images

---
 .github/workflows/backend_build.yml        | 13 +++--
 .github/workflows/docker_publish_image.yml |  8 +--
 .github/workflows/frontend_build_push.yml  | 59 +++++++++++-----------
 3 files changed, 40 insertions(+), 40 deletions(-)

diff --git a/.github/workflows/backend_build.yml b/.github/workflows/backend_build.yml
index 7d53f39b..7983b9a6 100644
--- a/.github/workflows/backend_build.yml
+++ b/.github/workflows/backend_build.yml
@@ -3,16 +3,15 @@ on:
   push:
     branches:
       - master
-      - fix/build
     paths:
-      - 'backend/**'
-      - '.github/workflows/backend_build.yml'
+      - "backend/**"
+      - ".github/workflows/backend_build.yml"
   pull_request:
     branches:
       - master
     paths:
-      - 'backend/**'
-      - '.github/workflows/backend_build.yml'
+      - "backend/**"
+      - ".github/workflows/backend_build.yml"
 
 jobs:
   Build_on_ubuntu:
@@ -111,7 +110,7 @@ jobs:
           pip install numpy
           pip install GDAL==$(gdal-config --version) --global-option=build_ext --global-option="-I/usr/include/gdal"
 
-      - name : Check Opencv version
+      - name: Check Opencv version
         run: |
           pip freeze | grep opencv
           pip install opencv-python-headless==4.7.0.68
@@ -121,7 +120,7 @@ jobs:
           TESTING_TOKEN: ${{ secrets.TESTING_TOKEN }}
         run: |
           cd backend/
-          
+
 
           export TESTING_TOKEN=$TESTING_TOKEN
           python manage.py makemigrations
diff --git a/.github/workflows/docker_publish_image.yml b/.github/workflows/docker_publish_image.yml
index 2bdb097a..3bf592da 100644
--- a/.github/workflows/docker_publish_image.yml
+++ b/.github/workflows/docker_publish_image.yml
@@ -5,9 +5,11 @@ on:
     branches:
       - master
     paths-ignore:
-      - '.github/workflows/backend_build.yml'
-      - '.github/workflows/frontend_build.yml'
-      - '.github/workflows/frontend_build_push.yml'
+      - ".github/workflows/backend_build.yml"
+      - ".github/workflows/frontend_build.yml"
+      - ".github/workflows/frontend_build_push.yml"
+  release:
+    types: [released]
 
 env:
   REGISTRY: ghcr.io
diff --git a/.github/workflows/frontend_build_push.yml b/.github/workflows/frontend_build_push.yml
index d54b03e5..4bd108c5 100644
--- a/.github/workflows/frontend_build_push.yml
+++ b/.github/workflows/frontend_build_push.yml
@@ -1,11 +1,11 @@
 name: Frontend Build and upload to S3
 
 on:
-  push:
-    branches: [ master ]
+  release:
+    types: [released]
     paths:
-      - 'frontend/**'
-      - '.github/workflows/frontend_build_push.yml'
+      - "frontend/**"
+      - ".github/workflows/frontend_build_push.yml"
 
 permissions:
   id-token: write
@@ -13,7 +13,6 @@ permissions:
 
 jobs:
   build_and_upload:
-
     runs-on: ubuntu-latest
     environment: Production
     env:
@@ -21,30 +20,30 @@ jobs:
 
     strategy:
       matrix:
-        node-version: [ 16.14.2 ]
+        node-version: [16.14.2]
 
     steps:
-    - uses: actions/checkout@v4
-    - uses: actions/setup-node@v4
-      with:
-        node-version: ${{ matrix.node-version }}
-
-    - name: Install dependencies
-      run: cd frontend/ && npm install --legacy-peer-deps
-
-    - name: Build frontend
-      run: cd frontend/ && npm run build
-      env:
-        REACT_APP_API_BASE: ${{ vars.REACT_APP_API_BASE }}
-        REACT_APP_PREDICTOR_API_BASE: ${{ vars.REACT_APP_PREDICTOR_API_BASE }}
-        REACT_APP_ENV: Dev
-
-    - name: Authenticate to AWS
-      uses: aws-actions/configure-aws-credentials@v4
-      with:
-        aws-region: us-east-1
-        role-to-assume: ${{ secrets.AWS_OIDC_ROLE }}
-        role-session-name: fAIrGithub
-
-    - name: Upload to S3
-      run: cd frontend/build && aws s3 sync . s3://${{ vars.FRONTEND_BUCKET }}/
+      - uses: actions/checkout@v4
+      - uses: actions/setup-node@v4
+        with:
+          node-version: ${{ matrix.node-version }}
+
+      - name: Install dependencies
+        run: cd frontend/ && npm install --legacy-peer-deps
+
+      - name: Build frontend
+        run: cd frontend/ && npm run build
+        env:
+          REACT_APP_API_BASE: ${{ vars.REACT_APP_API_BASE }}
+          REACT_APP_PREDICTOR_API_BASE: ${{ vars.REACT_APP_PREDICTOR_API_BASE }}
+          REACT_APP_ENV: Dev
+
+      - name: Authenticate to AWS
+        uses: aws-actions/configure-aws-credentials@v4
+        with:
+          aws-region: us-east-1
+          role-to-assume: ${{ secrets.AWS_OIDC_ROLE }}
+          role-session-name: fAIrGithub
+
+      - name: Upload to S3
+        run: cd frontend/build && aws s3 sync . s3://${{ vars.FRONTEND_BUCKET }}/

From e013b29308f8d96f8e4f49fd0beed4b75657422f Mon Sep 17 00:00:00 2001
From: kshitijrajsharma <skshitizraj@gmail.com>
Date: Thu, 30 May 2024 16:38:32 +0545
Subject: [PATCH 15/35] feat(commitzen): adds commitizen for release strategy

BREAKING CHANGE: first public release
---
 .cz.toml | 6 ++++++
 1 file changed, 6 insertions(+)
 create mode 100644 .cz.toml

diff --git a/.cz.toml b/.cz.toml
new file mode 100644
index 00000000..63d04477
--- /dev/null
+++ b/.cz.toml
@@ -0,0 +1,6 @@
+[tool.commitizen]
+name = "cz_conventional_commits"
+tag_format = "\"v$version\""
+version_scheme = "semver2"
+version = "0.1.0"
+update_changelog_on_bump = true

From 412bd02f76c1e30e36e2d1687958cebfeff51ec5 Mon Sep 17 00:00:00 2001
From: kshitijrajsharma <skshitizraj@gmail.com>
Date: Thu, 30 May 2024 16:52:31 +0545
Subject: [PATCH 16/35] =?UTF-8?q?bump:=20version=200.1.0=20=E2=86=92=201.0?=
 =?UTF-8?q?.0?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .cz.toml     |  2 +-
 CHANGELOG.md | 17 +++++++++++++++++
 2 files changed, 18 insertions(+), 1 deletion(-)
 create mode 100644 CHANGELOG.md

diff --git a/.cz.toml b/.cz.toml
index 63d04477..c8877c72 100644
--- a/.cz.toml
+++ b/.cz.toml
@@ -2,5 +2,5 @@
 name = "cz_conventional_commits"
 tag_format = "\"v$version\""
 version_scheme = "semver2"
-version = "0.1.0"
+version = "1.0.0"
 update_changelog_on_bump = true
diff --git a/CHANGELOG.md b/CHANGELOG.md
new file mode 100644
index 00000000..5ee95684
--- /dev/null
+++ b/CHANGELOG.md
@@ -0,0 +1,17 @@
+## "v1.0.0" (2024-05-30)
+
+### BREAKING CHANGE
+
+- first public release
+
+### Feat
+
+- **commitzen**: adds commitizen for release strategy
+
+### Fix
+
+- image fix
+
+## v0.1.0 (2024-05-30)
+
+## v0.0.1 (2022-12-22)

From 44e5b0f79bbbfaf8ee335ecbbe64dcf79c9e556c Mon Sep 17 00:00:00 2001
From: Kshitij Raj Sharma <36752999+kshitijrajsharma@users.noreply.github.com>
Date: Thu, 30 May 2024 17:03:08 +0545
Subject: [PATCH 17/35] Create Release.md

---
 docs/Release.md | 10 ++++++++++
 1 file changed, 10 insertions(+)
 create mode 100644 docs/Release.md

diff --git a/docs/Release.md b/docs/Release.md
new file mode 100644
index 00000000..bd080f42
--- /dev/null
+++ b/docs/Release.md
@@ -0,0 +1,10 @@
+We use [commitizen](https://pypi.org/project/commitizen/) to manage our release version 
+
+- Install commitizen
+  ```bash
+  pip install commitizen
+  ```
+- Make sure you are following the commitizen commit using `cz commit` to create new features
+- Hit `cz bump`
+- Push your changes `git push`
+- Push your new tags `git push --tags`

From 0a821118dd6d979ffbaf534cd8351466675e5c4b Mon Sep 17 00:00:00 2001
From: kshitijrajsharma <skshitizraj@gmail.com>
Date: Thu, 30 May 2024 20:30:41 +0545
Subject: [PATCH 18/35] Move import dependencies

---
 backend/core/tasks.py | 13 ++++++++-----
 1 file changed, 8 insertions(+), 5 deletions(-)

diff --git a/backend/core/tasks.py b/backend/core/tasks.py
index bfbf7c3b..f8c4f87b 100644
--- a/backend/core/tasks.py
+++ b/backend/core/tasks.py
@@ -7,9 +7,7 @@
 import traceback
 from shutil import rmtree
 
-import hot_fair_utilities
-import ramp.utils
-import tensorflow as tf
+
 from celery import shared_task
 from core.models import AOI, Feedback, FeedbackAOI, FeedbackLabel, Label, Training
 from core.serializers import (
@@ -25,8 +23,6 @@
 from django.contrib.gis.geos import GEOSGeometry
 from django.shortcuts import get_object_or_404
 from django.utils import timezone
-from hot_fair_utilities import preprocess, train
-from hot_fair_utilities.training import run_feedback
 from predictor import download_imagery, get_start_end_download_coords
 
 logger = logging.getLogger(__name__)
@@ -82,6 +78,13 @@ def train_model(
     feedback=None,
     freeze_layers=False,
 ):
+    #importing them here so that it won't be necessary when sending tasks
+    import hot_fair_utilities
+    import ramp.utils
+    import tensorflow as tf
+    from hot_fair_utilities import preprocess, train
+    from hot_fair_utilities.training import run_feedback
+
     training_instance = get_object_or_404(Training, id=training_id)
     training_instance.status = "RUNNING"
     training_instance.started_at = timezone.now()

From 9e8c553c32cd252a0edac2abc8dd70382f608f50 Mon Sep 17 00:00:00 2001
From: kshitijrajsharma <skshitizraj@gmail.com>
Date: Thu, 30 May 2024 21:29:32 +0545
Subject: [PATCH 19/35] fix(tasks): only import lib when function is executed

fixes the bug about not being able to run api only using api-requirements
---
 backend/core/tasks.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/backend/core/tasks.py b/backend/core/tasks.py
index f8c4f87b..b037ab01 100644
--- a/backend/core/tasks.py
+++ b/backend/core/tasks.py
@@ -78,7 +78,7 @@ def train_model(
     feedback=None,
     freeze_layers=False,
 ):
-    #importing them here so that it won't be necessary when sending tasks
+    #importing them here so that it won't be necessary when sending tasks ( api only)
     import hot_fair_utilities
     import ramp.utils
     import tensorflow as tf

From 7e3a7509f6062442c295ca1d4dd7d9116b1b2b21 Mon Sep 17 00:00:00 2001
From: kshitijrajsharma <skshitizraj@gmail.com>
Date: Thu, 30 May 2024 21:32:52 +0545
Subject: [PATCH 20/35] =?UTF-8?q?bump:=20version=201.0.0=20=E2=86=92=201.0?=
 =?UTF-8?q?.1?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .cz.toml             |  2 +-
 backend/CHANGELOG.md | 23 +++++++++++++++++++++++
 2 files changed, 24 insertions(+), 1 deletion(-)
 create mode 100644 backend/CHANGELOG.md

diff --git a/.cz.toml b/.cz.toml
index c8877c72..d24d55a3 100644
--- a/.cz.toml
+++ b/.cz.toml
@@ -2,5 +2,5 @@
 name = "cz_conventional_commits"
 tag_format = "\"v$version\""
 version_scheme = "semver2"
-version = "1.0.0"
+version = "1.0.1"
 update_changelog_on_bump = true
diff --git a/backend/CHANGELOG.md b/backend/CHANGELOG.md
new file mode 100644
index 00000000..65f61e2a
--- /dev/null
+++ b/backend/CHANGELOG.md
@@ -0,0 +1,23 @@
+## "v1.0.1" (2024-05-30)
+
+### Fix
+
+- **tasks**: only import lib when function is executed
+
+## v1.0.0 (2024-05-30)
+
+### BREAKING CHANGE
+
+- first public release
+
+### Feat
+
+- **commitzen**: adds commitizen for release strategy
+
+### Fix
+
+- image fix
+
+## v0.1.0 (2024-05-30)
+
+## v0.0.1 (2022-12-22)

From f6d6696d1f70ccf89b5b88262c56d0273e0a9872 Mon Sep 17 00:00:00 2001
From: Oluwanifemi Daramola <76186151+nifedara@users.noreply.github.com>
Date: Sat, 22 Jun 2024 09:43:48 +0100
Subject: [PATCH 21/35] feat(pre-commit-&-pdm): adds pre-commit hooks and pdm
 for dependency management

---
 backend/.gitignore              | 162 ++++++++++++++++++++++++++++++++
 backend/.pre-commit-config.yaml |  23 +++++
 backend/pyproject.toml          |  56 +++++++++++
 3 files changed, 241 insertions(+)
 create mode 100644 backend/.gitignore
 create mode 100644 backend/.pre-commit-config.yaml
 create mode 100644 backend/pyproject.toml

diff --git a/backend/.gitignore b/backend/.gitignore
new file mode 100644
index 00000000..3a8816c9
--- /dev/null
+++ b/backend/.gitignore
@@ -0,0 +1,162 @@
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+#  Usually these files are written by a python script from a template
+#  before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+cover/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+.pybuilder/
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+#   For a library or package, you might want to ignore these files since the code is
+#   intended to run in multiple environments; otherwise, check them in:
+# .python-version
+
+# pipenv
+#   According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+#   However, in case of collaboration, if having platform-specific dependencies or dependencies
+#   having no cross-platform support, pipenv may install dependencies that don't work, or not
+#   install all needed dependencies.
+#Pipfile.lock
+
+# poetry
+#   Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
+#   This is especially recommended for binary packages to ensure reproducibility, and is more
+#   commonly ignored for libraries.
+#   https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
+#poetry.lock
+
+# pdm
+#   Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
+#pdm.lock
+#   pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
+#   in version control.
+#   https://pdm-project.org/#use-with-ide
+.pdm.toml
+.pdm-python
+.pdm-build/
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+# pytype static type analyzer
+.pytype/
+
+# Cython debug symbols
+cython_debug/
+
+# PyCharm
+#  JetBrains specific template is maintained in a separate JetBrains.gitignore that can
+#  be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
+#  and can be added to the global gitignore or merged into this file.  For a more nuclear
+#  option (not recommended) you can uncomment the following to ignore the entire idea folder.
+#.idea/
diff --git a/backend/.pre-commit-config.yaml b/backend/.pre-commit-config.yaml
new file mode 100644
index 00000000..b8b8d8ba
--- /dev/null
+++ b/backend/.pre-commit-config.yaml
@@ -0,0 +1,23 @@
+repos:
+
+  # Versioning: Commit messages & changelog
+  - repo: https://github.com/commitizen-tools/commitizen
+    rev: v3.27.0
+    hooks:
+      - id: commitizen
+        stages: [commit-msg]
+
+  # Lint / autoformat: Python code
+  - repo: https://github.com/astral-sh/ruff-pre-commit
+    rev: "v0.4.8"
+    hooks:
+      - id: ruff
+      - id: ruff-format
+
+  # Trailing whitespace
+  -   repo: https://github.com/pre-commit/pre-commit-hooks
+      rev: v2.3.0
+      hooks:
+      -   id: check-yaml
+      -   id: end-of-file-fixer
+      -   id: trailing-whitespace
diff --git a/backend/pyproject.toml b/backend/pyproject.toml
new file mode 100644
index 00000000..3c813601
--- /dev/null
+++ b/backend/pyproject.toml
@@ -0,0 +1,56 @@
+[project]
+name = "fAIr"
+version = "1.0.1"
+description = "AI Assisted Mapping"
+authors = [
+    {name = "HOTOSM", email = "sysadmin@hotosm.org"},
+]
+dependencies = [
+    "django==4.1.4",
+    "psycopg2",
+    "djangorestframework==3.14.0",
+    "djangorestframework-gis==1.0",
+    "dj-database-url==1.2.0",
+    "django-leaflet==0.28.3",
+    "drf-yasg==1.21.4",
+    "django-environ==0.9.0",
+    "django-filter==22.1",
+    "django-cors-headers==3.13.0",
+    "osm-login-python==0.0.2",
+    "celery==5.2.7",
+    "redis==4.4.0",
+    "django-celery-results==2.4.0",
+    "flower==1.2.0",
+    "validators==0.20.0",
+    "gpxpy==1.5.0",
+    "geojson2osm==0.0.1",
+    "osmconflator==0.0.9",
+    "orthogonalizer==0.0.4",
+    "fairpredictor==0.0.26",
+    "tflite-runtime==2.14.0",
+    "hot-fair-utilities==1.2.3",
+]
+requires-python = ">=3.10"
+readme = "README.md"
+license = {text = "AGPL-3.0 license"}
+
+[build-system]
+requires = ["pdm-backend"]
+build-backend = "pdm.backend"
+
+
+[tool.pdm]
+distribution = true
+
+[tool.pdm.dev-dependencies]
+dev = [
+    "commitizen>=3.27.0",
+    "ruff>=0.4.9",
+]
+
+[tool.commitizen]
+name = "cz_conventional_commits"
+tag_format = "\"v$version\""
+version_scheme = "semver2"
+version = "1.0.1"
+update_changelog_on_bump = true

From 95b4d3df7b85beb8dac6c6a1bf88eae752c2c276 Mon Sep 17 00:00:00 2001
From: Oluwanifemi Daramola <76186151+nifedara@users.noreply.github.com>
Date: Sat, 22 Jun 2024 10:02:56 +0100
Subject: [PATCH 22/35] docs(pdm): adds doc about how to install pdm

updates ReadMe with how to install pdm
---
 backend/README.md | 82 +++++++++++++++++++++++++----------------------
 1 file changed, 44 insertions(+), 38 deletions(-)

diff --git a/backend/README.md b/backend/README.md
index 392dcc44..f86dbff7 100644
--- a/backend/README.md
+++ b/backend/README.md
@@ -3,7 +3,7 @@
 ## Backend is created with [Django](https://www.djangoproject.com/)
 This project was bootstrapped with  [Geodjango Template](https://github.com/itskshitiz321/geodjangotemplate.git)
 #### For Quickly Getting Started
-**Note:** Depending upon your OS and Env installation will vary, This project tightly depends on [Tensorflow](https://www.tensorflow.org/install/pip) with GPU support so accordingly build your development environment 
+**Note:** Depending upon your OS and Env installation will vary, This project tightly depends on [Tensorflow](https://www.tensorflow.org/install/pip) with GPU support so accordingly build your development environment
 ### Install Python3, pip and virtualenv first
 ##### Skip this, step if you already have one
     sudo apt-get install python3
@@ -19,13 +19,13 @@ This project was bootstrapped with  [Geodjango Template](https://github.com/itsk
 sudo apt-get install git-lfs
 ```
 
-- Clone Ramp Basemodel 
+- Clone Ramp Basemodel
 ```
 git clone https://github.com/radiantearth/model_ramp_baseline.git
 ```
 
-- Clone Ramp - Code 
-Note: This clone location will be your RAMP_HOME 
+- Clone Ramp - Code
+Note: This clone location will be your RAMP_HOME
 ```
 git clone https://github.com/kshitijrajsharma/ramp-code-fAIr.git ramp-code
 ```
@@ -36,19 +36,19 @@ cp -r model_ramp_baseline/data/input/checkpoint.tf ramp-code/ramp/checkpoint.tf
 ```
 
 
-- Remove basemodel repo we don't need it anymore 
+- Remove basemodel repo we don't need it anymore
 ```
 rm -rf model_ramp_baseline
 ```
-- Install numpy 
-Numpy needs to be installed before gdal 
+- Install numpy
+Numpy needs to be installed before gdal
 ```
 pip install numpy==1.23.5
 ```
 
-- Install gdal and rasetrio 
-Based on your env : You can either use conda / setup manually on your os 
-for eg on ubuntu : 
+- Install gdal and rasetrio
+Based on your env : You can either use conda / setup manually on your os
+for eg on ubuntu :
 ```
 sudo add-apt-repository ppa:ubuntugis/ppa && sudo apt-get update
 sudo apt-get install gdal-bin
@@ -58,12 +58,12 @@ export C_INCLUDE_PATH=/usr/include/gdal
 pip install --global-option=build_ext --global-option="-I/usr/include/gdal" GDAL==`gdal-config --version`
 ```
 
-- Install Ramp - Dependecies 
+- Install Ramp - Dependecies
 ```
 cd ramp-code && cd colab && make install
 ```
 
-- For Conda users : You may need to install rtree, gdal , rasterio & imagecodecs separately 
+- For Conda users : You may need to install rtree, gdal , rasterio & imagecodecs separately
 
 ```
 conda install -c conda-forge rtree
@@ -82,14 +82,14 @@ conda install -c conda-forge imagecodecs
 pip install --upgrade setuptools
 ```
 
-- Install fAIr Utilities 
+- Install fAIr Utilities
 ```
 pip install hot-fair-utilities==1.0.41
 ```
 
-**Remember In order to run fAIr , You need to configure your PC with tensorflow - GPU Support** 
+**Remember In order to run fAIr , You need to configure your PC with tensorflow - GPU Support**
 
-You can check your GPU by : 
+You can check your GPU by :
 
 ```
 import tensorflow as tf
@@ -98,82 +98,88 @@ print("Num GPUs Available: ", len(tf.config.experimental.list_physical_devices('
 
 
 - Install psycopg2
-Again based on your os/env you can do manual installation 
-for eg : on ubuntu : 
+Again based on your os/env you can do manual installation
+for eg : on ubuntu :
 ```
 sudo apt-get install python3-psycopg2
 ```
 
-- Install redis server on your pc 
+- Install redis server on your pc
 
 ```
 sudo apt install redis
 ```
 
-- Finally installl pip dependencies 
+- Install pdm for dependency management
 
 ```
-pip install -r requirements.txt
+pip install pdm
+```
+
+- Finally install project dependencies
+
+```
+pdm install
 ```
 
 ### Make sure you have postgresql installed with postgis extension enabled
 
 
-#### Configure .env: 
+#### Configure .env:
     Create .env in the root backend project , and add the credentials as provided on .env_sample , Export your secret key and database url to your env
 
-    Export your database url 
+    Export your database url
     ```
     export DATABASE_URL=postgis://postgres:postgres@localhost:5432/ai
     ```
-    
-    You will need more env variables (Such as Ramp home, Training Home) that can be found on ```.sample_env```  
+
+    You will need more env variables (Such as Ramp home, Training Home) that can be found on ```.sample_env```
 
 #### Now change your username, password and db name in settings.py accordingly to your database
     python manage.py makemigrations login core
     python manage.py migrate
     python manage.py runserver
-### Now server will be available in your 8000 port on web, you can check out your localhost:8000/admin for admin panel 
+### Now server will be available in your 8000 port on web, you can check out your localhost:8000/admin for admin panel
 To login on admin panel, create your superuser and login with your credentials restarting the server
 
     python manage.py createsuperuser
 
-## Authentication 
+## Authentication
 fAIr uses oauth2.0 Authentication using [osm-login-python](https://github.com/kshitijrajsharma/osm-login-python)
 1. Get your login Url
     Hit ```/api/v1/auth/login/ ```
-    - URL will give you login URL which you can use to provide your osm credentials and authorize fAIr 
+    - URL will give you login URL which you can use to provide your osm credentials and authorize fAIr
     - After successful login  you will get access-token that you can use across all osm login required endpoints in fAIr
-2. Check authentication by getting back your data 
+2. Check authentication by getting back your data
     Hit ```/api/v1/auth/me/```
-    - URL requires access-token as header and in return you will see your osm username, id and image url 
+    - URL requires access-token as header and in return you will see your osm username, id and image url
 
 
-## Start celery workers 
+## Start celery workers
 
--  Start celery workers 
+-  Start celery workers
 
 ```
 celery -A aiproject worker --loglevel=debug -n my_worker
 ```
 
-- Monitor using flower 
-if  you are using redis as result backend, api supports both options django / redis 
+- Monitor using flower
+if  you are using redis as result backend, api supports both options django / redis
 You can start flower to start monitoring your tasks
 ```
-celery -A aiproject  --broker=redis://127.0.0.1:6379/0 flower 
+celery -A aiproject  --broker=redis://127.0.0.1:6379/0 flower
 ```
 
-## Run Tests 
+## Run Tests
 
 ```
 python manage.py test
 ```
 
 
-# Build fAIr with Docker for Development 
-- Install all the required drivers for your graphics to access it from containers, and check your graphics and drivers with ```nvidia-smi``` . Up to now only nvidia is Supported 
-- Follow docker_sample_env to create ```.env``` file in your dir 
+# Build fAIr with Docker for Development
+- Install all the required drivers for your graphics to access it from containers, and check your graphics and drivers with ```nvidia-smi``` . Up to now only nvidia is Supported
+- Follow docker_sample_env to create ```.env``` file in your dir
 - Build the Image
 
 ```

From 779e9cf54e856d102e4e0e6a1550770efc46f571 Mon Sep 17 00:00:00 2001
From: natrimmer <nicholas.trimmer@mcgovern.org>
Date: Wed, 26 Jun 2024 16:25:06 -0700
Subject: [PATCH 23/35] Closes #213

---
 frontend/src/components/Layout/Home/Home.js | 20 +++++---------------
 1 file changed, 5 insertions(+), 15 deletions(-)

diff --git a/frontend/src/components/Layout/Home/Home.js b/frontend/src/components/Layout/Home/Home.js
index 4211cf6d..8dd5072c 100644
--- a/frontend/src/components/Layout/Home/Home.js
+++ b/frontend/src/components/Layout/Home/Home.js
@@ -26,23 +26,13 @@ const GetStarted = () => {
         variant="body1"
         style={{ color: "#3D3D3D", fontSize: "18px", marginBottom: "50px" }}
       >
-        fAIr is an open AI-assisted mapping service developed by the
-        Humanitarian OpenStreetMap Team (HOT) that aims to improve the
-        efficiency and accuracy of mapping efforts for humanitarian purposes.
-        The service uses AI models, specifically computer vision techniques, to
-        detect objects such as buildings, roads, waterways, and trees from
-        satellite and UAV imagery. The name fAIr is derived from the following
-        terms:
+        fAIr performs mapping in the same way as human mappers using HOT's Tasking Manager. It looks at UAV imagery and produces map data that can be added to OpenStreetMap (OSM). Tests show a 100% speedup compared to manual mapping. It uses Artificial Intelligence (AI) to accomplish this.
         <br />
         <br />
-        <ul style={{ listStyleType: "none", paddingLeft: 0 }}>
-          <li>f: for freedom and free and open-source software</li>
-          <li>AI: for Artificial Intelligence</li>
-          <li>
-            r: for resilience and our responsibility for our communities and the
-            role we play within humanitarian mapping
-          </li>
-        </ul>
+        fAIr is developed by the Humanitarian OpenStreetMap Team (HOT) and all the software is free and open source.
+        <br />
+        <br />
+        Before fAIr is used, it needs to be fine-tuned by training on high quality map data for a small representative part of the geographical region where it is to be used.
       </Typography>
       <div
         style={{

From 8033d1ea88c289e8b6464aa59b4a454b4fbffb25 Mon Sep 17 00:00:00 2001
From: kshitijrajsharma <skshitizraj@gmail.com>
Date: Wed, 17 Jul 2024 16:13:35 +0545
Subject: [PATCH 24/35] feat(aoi-upload): let user upload aoi from geojson file

---
 frontend/package.json                         |   1 +
 .../Layout/TrainingDS/DatasetEditor/AOI.js    | 188 +++++++++++-------
 .../TrainingDS/DatasetEditor/DatasetEditor.js |   1 +
 3 files changed, 119 insertions(+), 71 deletions(-)

diff --git a/frontend/package.json b/frontend/package.json
index 4604544b..8e5c18ea 100644
--- a/frontend/package.json
+++ b/frontend/package.json
@@ -18,6 +18,7 @@
     "@mui/material": "^5.6.1",
     "@mui/styles": "^5.12.0",
     "@mui/x-data-grid": "^5.17.12",
+    "@terraformer/wkt": "^2.2.1",
     "@testing-library/jest-dom": "^5.16.4",
     "@testing-library/react": "^12.1.4",
     "@testing-library/user-event": "^13.5.0",
diff --git a/frontend/src/components/Layout/TrainingDS/DatasetEditor/AOI.js b/frontend/src/components/Layout/TrainingDS/DatasetEditor/AOI.js
index 53f28be8..667dcbc2 100644
--- a/frontend/src/components/Layout/TrainingDS/DatasetEditor/AOI.js
+++ b/frontend/src/components/Layout/TrainingDS/DatasetEditor/AOI.js
@@ -11,24 +11,25 @@ import {
   ListItemText,
   Pagination,
   Snackbar,
-  SvgIcon,
+  Tooltip,
   Typography,
+  Button,
 } from "@mui/material";
-import Tooltip from "@mui/material/Tooltip";
 import { styled } from "@mui/material/styles";
 import DeleteIcon from "@mui/icons-material/Delete";
-import MapIcon from "@mui/icons-material/Map";
+import AddIcon from "@mui/icons-material/Add";
 import FolderIcon from "@mui/icons-material/Folder";
 import { MapTwoTone, ZoomInMap } from "@mui/icons-material";
 import usePagination from "./Pagination";
 import { makeStyles, withStyles } from "@material-ui/core/styles";
 import ScreenshotMonitorIcon from "@mui/icons-material/ScreenshotMonitor";
-
 import PlaylistRemoveIcon from "@mui/icons-material/PlaylistRemove";
 import { useMutation } from "react-query";
 import axios from "../../../../axios";
 import AOIDetails from "./AOIDetails";
 import AuthContext from "../../../../Context/AuthContext";
+import * as Terraformer from "@terraformer/wkt";
+
 const Demo = styled("div")(({ theme }) => ({
   backgroundColor: theme.palette.background.paper,
 }));
@@ -40,11 +41,36 @@ const ListItemWithWiderSecondaryAction = withStyles({
 })(ListItem);
 
 const PER_PAGE = 5;
+const DEFAULT_FILTER = {
+  items: [],
+  linkOperator: "and",
+  quickFilterValues: [],
+  quickFilterLogicOperator: "and",
+};
+
+const postAoi = async (polygon, dataset, accessToken) => {
+  console.log("Posting AOI");
+  console.log(dataset);
+  const headers = {
+    "Content-Type": "application/json",
+    "access-token": accessToken,
+  };
+  const data = {
+    geom: `SRID=4326;${polygon}`,
+    dataset,
+  };
+  const response = await axios.post("/aoi/", data, { headers });
+  console.log(response.data);
+  return response.data;
+};
+
 const AOI = (props) => {
   const [dense, setDense] = useState(true);
   const count = Math.ceil(props.mapLayers.length / PER_PAGE);
   let [page, setPage] = useState(1);
   const [openSnack, setOpenSnack] = useState(false);
+  const [fileError, setFileError] = useState(null);
+  const [geoJsonFile, setGeoJsonFile] = useState(null);
   let _DATA = usePagination(
     props.mapLayers.filter((e) => e.type === "aoi"),
     PER_PAGE
@@ -53,7 +79,7 @@ const AOI = (props) => {
     setPage(p);
     _DATA.jump(p);
   };
-  // console.log("_DATA", _DATA);
+
   useEffect(() => {
     return () => {};
   }, [props]);
@@ -70,16 +96,12 @@ const AOI = (props) => {
       });
 
       if (res.error) {
-        // setMapError(res.error.response.statusText);
         console.log(res.error.response.statusText);
       } else {
-        // success full fetch
-
         return res.data;
       }
     } catch (e) {
       console.log("isError", e);
-    } finally {
     }
   };
   const { mutate: mutateFetch, data: fetchResult } =
@@ -106,11 +128,74 @@ const AOI = (props) => {
       }
     } catch (e) {
       console.log("isError", e);
-    } finally {
     }
   };
   const { mutate: mutateDeleteAOI } = useMutation(DeleteAOI);
 
+  const handleFileUpload = async (event) => {
+    const file = event.target.files[0];
+    if (file) {
+      const fileName = file.name.toLowerCase();
+      if (!fileName.endsWith(".geojson")) {
+        setFileError("Invalid file format. Please upload a .geojson file.");
+        return;
+      }
+      const reader = new FileReader();
+      reader.onload = async (e) => {
+        try {
+          const geoJson = JSON.parse(e.target.result);
+          let geometry;
+
+          if (geoJson.type === "FeatureCollection") {
+            // if (geoJson.features.length > 1) {
+            //   setFileError(
+            //     "Feature collection contains multiple features. Only uploaded first one"
+            //   );
+            // }
+            // TODO : for featurecollection loop through the features and add AOI one by one
+            const feature = geoJson.features[0];
+            if (
+              feature.geometry.type !== "Polygon" &&
+              feature.geometry.type !== "MultiPolygon"
+            ) {
+              setFileError("GeoJSON must contain a Polygon or MultiPolygon.");
+              return;
+            }
+            geometry = feature.geometry;
+          } else if (geoJson.type === "Feature") {
+            if (
+              geoJson.geometry.type !== "Polygon" &&
+              geoJson.geometry.type !== "MultiPolygon"
+            ) {
+              setFileError(
+                "Feature geometry type must be Polygon or MultiPolygon."
+              );
+              return;
+            }
+            geometry = geoJson.geometry;
+          } else if (
+            geoJson.type === "Polygon" ||
+            geoJson.type === "MultiPolygon"
+          ) {
+            geometry = geoJson;
+          } else {
+            setFileError("Invalid GeoJSON format.");
+            return;
+          }
+
+          const wkt = Terraformer.geojsonToWKT(geometry);
+          await postAoi(wkt, props.datasetId, accessToken);
+          setFileError(null);
+          setGeoJsonFile(null);
+        } catch (error) {
+          console.error(error);
+          setFileError("Error processing GeoJSON file.");
+        }
+      };
+      reader.readAsText(file);
+    }
+  };
+
   return (
     <>
       <Grid item md={12} className="card" marginBottom={1}>
@@ -119,6 +204,28 @@ const AOI = (props) => {
             Training Areas{` (${props.mapLayers.length})`}
           </Typography>
         </Tooltip>
+        <input
+          accept=".geojson"
+          style={{ display: "none" }}
+          id="geojson-upload"
+          type="file"
+          onChange={handleFileUpload}
+        />
+        <label htmlFor="geojson-upload">
+          <Button
+            variant="contained"
+            color="primary"
+            component="span"
+            startIcon={<AddIcon />}
+          >
+            Upload
+          </Button>
+        </label>
+        {fileError && (
+          <Alert severity="error" onClose={() => setFileError(null)}>
+            {fileError}
+          </Alert>
+        )}
         <Demo>
           {props.mapLayers && props.mapLayers.length > PER_PAGE && (
             <Pagination
@@ -159,7 +266,6 @@ const AOI = (props) => {
                             ""
                           )}
                         </span>
-                        {/* add here a container to get the AOI status from DB */}
                         {layer.aoiId && (
                           <AOIDetails aoiId={layer.aoiId}></AOIDetails>
                         )}
@@ -167,40 +273,6 @@ const AOI = (props) => {
                     }
                   />
                   <ListItemSecondaryAction>
-                    {/* <IconButton aria-label="comments">
-                   <DeleteIcon />
-                </IconButton> */}
-                    {/* <Tooltip title="Create map data in RapID Editor">
-                      <IconButton
-                        aria-label="comments"
-                        sx={{ width: 24, height: 24 }}
-                        className="margin1 transparent"
-                        onClick={(e) => {
-                          // mutateFetch(layer.aoiId);
-                          // console.log("Open in Editor")
-                          window.open(
-                            `https://rapideditor.org/rapid#background=${
-                              props.oamImagery
-                                ? "custom:" + props.oamImagery.url
-                                : "Bing"
-                            }&datasets=fbRoads,msBuildings&disable_features=boundaries&map=16.00/17.9253/120.4841&gpx=&gpx=${
-                              process.env.REACT_APP_API_BASE
-                            }/aoi/gpx/${
-                              layer.aoiId
-                            }`,
-                            "_blank",
-                            "noreferrer"
-                          );
-                        }}
-                      >
-                       
-                        <img
-                          alt="RapiD logo"
-                          className="editor-logo-small"
-                          src="/rapid-logo.png"
-                        />
-                      </IconButton>
-                    </Tooltip> */}
                     <Tooltip title="Create map data in JOSM Editor">
                       <IconButton
                         aria-label="comments"
@@ -208,9 +280,6 @@ const AOI = (props) => {
                         className="margin1 transparent"
                         onClick={async (e) => {
                           try {
-                            // mutateFetch(layer.aoiId);
-                            console.log("layer", layer);
-
                             const Imgurl = new URL(
                               "http://127.0.0.1:8111/imagery"
                             );
@@ -224,10 +293,6 @@ const AOI = (props) => {
                               props.oamImagery.url
                             );
                             const imgResponse = await fetch(Imgurl);
-                            // bounds._southWest.lng,
-                            // bounds._southWest.lat,
-                            // bounds._northEast.lng,
-                            // bounds._northEast.lat,
                             const loadurl = new URL(
                               "http://127.0.0.1:8111/load_and_zoom"
                             );
@@ -270,8 +335,6 @@ const AOI = (props) => {
                         sx={{ width: 24, height: 24 }}
                         className="margin1 transparent"
                         onClick={(e) => {
-                          // mutateFetch(layer.aoiId);
-                          // console.log("Open in Editor")
                           window.open(
                             `https://www.openstreetmap.org/edit/#background=${
                               props.oamImagery
@@ -285,7 +348,6 @@ const AOI = (props) => {
                           );
                         }}
                       >
-                        {/* <MapTwoTone   /> */}
                         <img
                           alt="OSM logo"
                           className="osm-logo-small"
@@ -300,22 +362,12 @@ const AOI = (props) => {
                         className="margin1"
                         onClick={(e) => {
                           mutateFetch(layer.aoiId);
-                          console.log("Call raw data API to fetch OSM data");
                         }}
                       >
                         <MapTwoTone fontSize="small" />
                       </IconButton>
                     </Tooltip>
 
-                    {/* <IconButton aria-label="comments"
-                className="margin1"
-                disabled
-                onClick={(e)=> {
-
-                  console.log("Remove labels")
-                }}>
-                   <PlaylistRemoveIcon />
-                </IconButton> */}
                     <Tooltip title="Zoom to TA">
                       <IconButton
                         sx={{ width: 24, height: 24 }}
@@ -339,7 +391,6 @@ const AOI = (props) => {
                               return accumulator + curValue.lng;
                             },
                             0) / layer.latlngs.length;
-                          // [lat, lng] are the centroid of the polygon
                           props.selectAOIHandler([lat, lng], 17);
                         }}
                       >
@@ -352,9 +403,6 @@ const AOI = (props) => {
                         sx={{ width: 24, height: 24 }}
                         className="margin-left-13"
                         onClick={(e) => {
-                          // console.log(
-                          //   `layer.aoiId ${layer.aoiId} and layer.id ${layer.id}`
-                          // );
                           mutateDeleteAOI(layer.aoiId, layer.id);
                         }}
                       >
@@ -377,7 +425,6 @@ const AOI = (props) => {
         open={openSnack}
         autoHideDuration={5000}
         onClose={() => {
-          console.log("openSnack", openSnack);
           setOpenSnack(false);
         }}
         message={
@@ -395,7 +442,6 @@ const AOI = (props) => {
             </span>
           </Alert>
         }
-        // action={action}
         color="red"
         anchorOrigin={{ vertical: "bottom", horizontal: "right" }}
       />
diff --git a/frontend/src/components/Layout/TrainingDS/DatasetEditor/DatasetEditor.js b/frontend/src/components/Layout/TrainingDS/DatasetEditor/DatasetEditor.js
index 10bfb8f4..10560775 100644
--- a/frontend/src/components/Layout/TrainingDS/DatasetEditor/DatasetEditor.js
+++ b/frontend/src/components/Layout/TrainingDS/DatasetEditor/DatasetEditor.js
@@ -117,6 +117,7 @@ function DatasetEditor() {
               mapLayers={mapLayers.filter((i) => i.type === "aoi")}
               selectAOIHandler={selectAOIHandler}
               deleteAOIButton={deleteAOIButton}
+              datasetId={dataset.id}
             ></AOI>
             <TMProject addtoMap={AddtoMapHandler}></TMProject>
           </Grid>

From 8adf03bb0ebe016d492ba105dd8878b232b3101c Mon Sep 17 00:00:00 2001
From: kshitijrajsharma <skshitizraj@gmail.com>
Date: Wed, 17 Jul 2024 16:36:31 +0545
Subject: [PATCH 25/35] refactor(aoi): clean unused code

---
 .../src/components/Layout/TrainingDS/DatasetEditor/AOI.js   | 6 ------
 1 file changed, 6 deletions(-)

diff --git a/frontend/src/components/Layout/TrainingDS/DatasetEditor/AOI.js b/frontend/src/components/Layout/TrainingDS/DatasetEditor/AOI.js
index 667dcbc2..2dc6860b 100644
--- a/frontend/src/components/Layout/TrainingDS/DatasetEditor/AOI.js
+++ b/frontend/src/components/Layout/TrainingDS/DatasetEditor/AOI.js
@@ -41,12 +41,6 @@ const ListItemWithWiderSecondaryAction = withStyles({
 })(ListItem);
 
 const PER_PAGE = 5;
-const DEFAULT_FILTER = {
-  items: [],
-  linkOperator: "and",
-  quickFilterValues: [],
-  quickFilterLogicOperator: "and",
-};
 
 const postAoi = async (polygon, dataset, accessToken) => {
   console.log("Posting AOI");

From c7f8d2e45debbc99c3e325440e8ee05bdd9328b9 Mon Sep 17 00:00:00 2001
From: kshitijrajsharma <skshitizraj@gmail.com>
Date: Wed, 17 Jul 2024 16:42:41 +0545
Subject: [PATCH 26/35] Check for frontend build , Remove old version of node

---
 .github/workflows/frontend_build.yml | 50 +++++++++++++---------------
 1 file changed, 24 insertions(+), 26 deletions(-)

diff --git a/.github/workflows/frontend_build.yml b/.github/workflows/frontend_build.yml
index 23b091c1..ab456ba2 100644
--- a/.github/workflows/frontend_build.yml
+++ b/.github/workflows/frontend_build.yml
@@ -2,43 +2,41 @@ name: Frontend Build
 
 on:
   push:
-    branches: [ master ]
+    branches: [master]
     paths:
-      - 'frontend/**'
-      - '.github/workflows/frontend_build.yml'
+      - "frontend/**"
+      - ".github/workflows/frontend_build.yml"
   pull_request:
-    branches: [ master ]
+    branches: [master]
     paths:
-      - 'frontend/**'
-      - '.github/workflows/frontend_build.yml'
+      - "frontend/**"
+      - ".github/workflows/frontend_build.yml"
 
 jobs:
   Build_On_Ubuntu:
-
     runs-on: ubuntu-latest
     env:
       CI: false
 
     strategy:
       matrix:
-        node-version: [ 16.14.2, 16, 18, 20 ]
+        node-version: [18, 20]
 
     steps:
-    - name: Checkout repository
-      uses: actions/checkout@v4
-
-    - name: Set up Node.js ${{ matrix.node-version }}
-      uses: actions/setup-node@v4
-      with:
-        node-version: ${{ matrix.node-version }}
-
-    - name: Install dependencies
-      run: |
-        cd frontend/
-        npm install --legacy-peer-deps
-
-    - name: Build
-      run: |
-        cd frontend/
-        npm run build
-
+      - name: Checkout repository
+        uses: actions/checkout@v4
+
+      - name: Set up Node.js ${{ matrix.node-version }}
+        uses: actions/setup-node@v4
+        with:
+          node-version: ${{ matrix.node-version }}
+
+      - name: Install dependencies
+        run: |
+          cd frontend/
+          npm install --legacy-peer-deps
+
+      - name: Build
+        run: |
+          cd frontend/
+          npm run build

From 9d65e427f840be31ddb1fb941513f5cee6e3afe7 Mon Sep 17 00:00:00 2001
From: kshitijrajsharma <skshitizraj@gmail.com>
Date: Wed, 17 Jul 2024 16:45:52 +0545
Subject: [PATCH 27/35] Remove node restriction

---
 frontend/package.json | 5 +----
 1 file changed, 1 insertion(+), 4 deletions(-)

diff --git a/frontend/package.json b/frontend/package.json
index 8e5c18ea..6f992877 100644
--- a/frontend/package.json
+++ b/frontend/package.json
@@ -2,9 +2,6 @@
   "name": "fair",
   "version": "0.1.0",
   "private": true,
-  "engines": {
-    "node": "16.14.2"
-  },
   "dependencies": {
     "@emotion/react": "^11.9.0",
     "@emotion/styled": "^11.8.1",
@@ -64,4 +61,4 @@
       "last 1 safari version"
     ]
   }
-}
+}
\ No newline at end of file

From ec9752121c1088667d409dba3e693be1ff69de82 Mon Sep 17 00:00:00 2001
From: kshitijrajsharma <skshitizraj@gmail.com>
Date: Wed, 17 Jul 2024 16:54:40 +0545
Subject: [PATCH 28/35] Add dev dependecies ajv

---
 frontend/package.json | 5 ++++-
 1 file changed, 4 insertions(+), 1 deletion(-)

diff --git a/frontend/package.json b/frontend/package.json
index 6f992877..bcb8a0b7 100644
--- a/frontend/package.json
+++ b/frontend/package.json
@@ -60,5 +60,8 @@
       "last 1 firefox version",
       "last 1 safari version"
     ]
+  },
+  "devDependencies": {
+    "ajv": "^7.2.4"
   }
-}
\ No newline at end of file
+}

From aff00d192f601bba40b97f5fd314eae623e9c1c5 Mon Sep 17 00:00:00 2001
From: kshitijrajsharma <skshitizraj@gmail.com>
Date: Wed, 17 Jul 2024 16:58:55 +0545
Subject: [PATCH 29/35] Restore check for 16 version of node !

---
 .github/workflows/frontend_build.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.github/workflows/frontend_build.yml b/.github/workflows/frontend_build.yml
index ab456ba2..c9b1ec9a 100644
--- a/.github/workflows/frontend_build.yml
+++ b/.github/workflows/frontend_build.yml
@@ -20,7 +20,7 @@ jobs:
 
     strategy:
       matrix:
-        node-version: [18, 20]
+        node-version: [16, 18, 20]
 
     steps:
       - name: Checkout repository

From 6ac1607de2e1dc59bd0120c668f0fbe63f720e42 Mon Sep 17 00:00:00 2001
From: kshitijrajsharma <skshitizraj@gmail.com>
Date: Wed, 24 Jul 2024 22:08:59 +0545
Subject: [PATCH 30/35] Upgrade treeview

---
 frontend/package.json                                       | 1 +
 .../components/Layout/AIModels/AIModelEditor/FilesTree.js   | 6 +++---
 2 files changed, 4 insertions(+), 3 deletions(-)

diff --git a/frontend/package.json b/frontend/package.json
index bcb8a0b7..b0ab2404 100644
--- a/frontend/package.json
+++ b/frontend/package.json
@@ -15,6 +15,7 @@
     "@mui/material": "^5.6.1",
     "@mui/styles": "^5.12.0",
     "@mui/x-data-grid": "^5.17.12",
+    "@mui/x-tree-view": "^7.11.0",
     "@terraformer/wkt": "^2.2.1",
     "@testing-library/jest-dom": "^5.16.4",
     "@testing-library/react": "^12.1.4",
diff --git a/frontend/src/components/Layout/AIModels/AIModelEditor/FilesTree.js b/frontend/src/components/Layout/AIModels/AIModelEditor/FilesTree.js
index 48729c50..9ff8f7cf 100644
--- a/frontend/src/components/Layout/AIModels/AIModelEditor/FilesTree.js
+++ b/frontend/src/components/Layout/AIModels/AIModelEditor/FilesTree.js
@@ -1,5 +1,5 @@
 import React, { useEffect, useState } from "react";
-import TreeView from "@mui/lab/TreeView";
+import { SimpleTreeView } from "@mui/x-tree-view";
 import TreeItem from "@mui/lab/TreeItem";
 import ExpandMoreIcon from "@mui/icons-material/ExpandMore";
 import ExpandLessIcon from "@mui/icons-material/ExpandLess";
@@ -73,7 +73,7 @@ const FilesTree = (props) => {
           <LinearProgress color="hot" />
         </Stack>
       )}
-      <TreeView
+      <SimpleTreeView
         aria-label="file system navigator"
         defaultCollapseIcon={<ExpandMoreIcon />}
         defaultExpandIcon={<ExpandLessIcon />}
@@ -126,7 +126,7 @@ const FilesTree = (props) => {
         {/* <TreeItem key={key} nodeId="1" label="Applications">
               <TreeItem nodeId="2" label="Calendar" />
             </TreeItem> */}
-      </TreeView>
+      </SimpleTreeView>
     </>
   );
 };

From 26cd894fbc03a8c8d242cebeb10e64291b4b0423 Mon Sep 17 00:00:00 2001
From: kshitijrajsharma <skshitizraj@gmail.com>
Date: Wed, 31 Jul 2024 07:35:45 +0545
Subject: [PATCH 31/35] Adds testing to backend build ci

---
 .github/workflows/backend_build.yml | 20 +++++++++++++++-----
 1 file changed, 15 insertions(+), 5 deletions(-)

diff --git a/.github/workflows/backend_build.yml b/.github/workflows/backend_build.yml
index 7983b9a6..db9c5137 100644
--- a/.github/workflows/backend_build.yml
+++ b/.github/workflows/backend_build.yml
@@ -1,8 +1,9 @@
-name: Backend Build
+name: Backend Build and Tests
 on:
   push:
     branches:
       - master
+      - fix/ci-tests
     paths:
       - "backend/**"
       - ".github/workflows/backend_build.yml"
@@ -115,17 +116,26 @@ jobs:
           pip freeze | grep opencv
           pip install opencv-python-headless==4.7.0.68
 
-      - name: Run tests
+      - name: Run migrations
         env:
           TESTING_TOKEN: ${{ secrets.TESTING_TOKEN }}
         run: |
           cd backend/
-
-
-          export TESTING_TOKEN=$TESTING_TOKEN
           python manage.py makemigrations
           python manage.py makemigrations core
           python manage.py makemigrations login
           python manage.py migrate
           python manage.py migrate login
           python manage.py migrate core
+
+      - name : Run tests
+        env :
+          TESTING_TOKEN: ${{ secrets.TESTING_TOKEN }}
+          OSM_CLIENT_ID: ${{ secrets.OSM_CLIENT_ID }}
+          OSM_CLIENT_SECRET: ${{ secrets.OSM_CLIENT_SECRET }}
+          OSM_SECRET_KEY: ""
+
+        run : |
+          cd backend/
+          coverage run manage.py test tests
+          coverage report

From 1c81ec72f5e707234ad45c7f8099eb1955fc8941 Mon Sep 17 00:00:00 2001
From: kshitijrajsharma <skshitizraj@gmail.com>
Date: Wed, 31 Jul 2024 07:47:11 +0545
Subject: [PATCH 32/35] [ci] Fix installation requirements for teset

---
 .github/workflows/backend_build.yml | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/.github/workflows/backend_build.yml b/.github/workflows/backend_build.yml
index db9c5137..1a5967d8 100644
--- a/.github/workflows/backend_build.yml
+++ b/.github/workflows/backend_build.yml
@@ -87,6 +87,8 @@ jobs:
         run: |
           cd backend/
           pip install -r requirements.txt
+          pip install coverage
+          pip install factory-boy
 
       - name: Creating env
         run: |

From c7408470ac134c7737b38e350ca8ed4b34b7ba61 Mon Sep 17 00:00:00 2001
From: kshitijrajsharma <skshitizraj@gmail.com>
Date: Wed, 31 Jul 2024 08:28:05 +0545
Subject: [PATCH 33/35] [refactor] Remove failing test cases to check if only
 creation works

---
 backend/tests/test_endpoints.py | 59 +++------------------------------
 1 file changed, 5 insertions(+), 54 deletions(-)

diff --git a/backend/tests/test_endpoints.py b/backend/tests/test_endpoints.py
index a0447590..4610b38d 100644
--- a/backend/tests/test_endpoints.py
+++ b/backend/tests/test_endpoints.py
@@ -89,58 +89,9 @@ def test_create_dataset(self):
 
         # download labels from osm for 1
 
-        res = self.client.post(
-            f"{API_BASE}/label/osm/fetch/1/", "", headers=headersList
-        )
-        self.assertEqual(res.status_code, status.HTTP_201_CREATED)
-
-        # download labels from osm for 2
-
-        res = self.client.post(
-            f"{API_BASE}/label/osm/fetch/2/", "", headers=headersList
-        )
-        self.assertEqual(res.status_code, status.HTTP_201_CREATED)
-
-        # build the dataset
-
-        build_dt_payload = {"dataset_id": 1, "zoom_level": ["19"]}
-        res = self.client.post(
-            f"{API_BASE}/dataset/image/build/",
-            json.dumps(build_dt_payload),
-            headers=json_type_header,
-        )
-        self.assertEqual(res.status_code, status.HTTP_201_CREATED)
-
-        # build dataset on multiple zoom levels
+        ## Fetch AOI
 
-        build_dt_payload = {"dataset_id": 1, "zoom_level": ["19", "20"]}
-        res = self.client.post(
-            f"{API_BASE}/dataset/image/build/",
-            json.dumps(build_dt_payload),
-            headers=json_type_header,
-        )
-        self.assertEqual(res.status_code, status.HTTP_201_CREATED)
-
-        # create model
-
-        model_payload = {"name": "My test model", "dataset": 1}
-        res = self.client.post(
-            f"{API_BASE}/model/", json.dumps(model_payload), headers=json_type_header
-        )
-        self.assertEqual(res.status_code, status.HTTP_201_CREATED)
-
-        # create training inside model
-        training_payload = {
-            "description": "My very first training",
-            "epochs": 1,
-            "batch_size": 1,
-            "model": 1,
-        }
-        res = self.client.post(
-            f"{API_BASE}/training/",
-            json.dumps(training_payload),
-            headers=json_type_header,
-        )
-        print(res.json())
-        self.assertEqual(res.status_code, status.HTTP_201_CREATED)
-        # test
+        aoi_res = self.client.get(f"{API_BASE}/aoi/?dataset=1")
+        self.assertEqual(aoi_res.status_code, 200)
+        aoi_res_json = aoi_res.json()
+        self.assertEqual(len(aoi_res_json["features"]), 2)

From 0c9128b9bd7f85eaa16a5dc7500b3c94125a7e08 Mon Sep 17 00:00:00 2001
From: kshitijrajsharma <skshitizraj@gmail.com>
Date: Wed, 31 Jul 2024 19:54:07 +0545
Subject: [PATCH 34/35] refactor(backend_build): removes the run from test
 branch

---
 backend/.DS_Store       | Bin 0 -> 8196 bytes
 backend/core/.DS_Store  | Bin 0 -> 6148 bytes
 backend/login/.DS_Store | Bin 0 -> 6148 bytes
 3 files changed, 0 insertions(+), 0 deletions(-)
 create mode 100644 backend/.DS_Store
 create mode 100644 backend/core/.DS_Store
 create mode 100644 backend/login/.DS_Store

diff --git a/backend/.DS_Store b/backend/.DS_Store
new file mode 100644
index 0000000000000000000000000000000000000000..7b316faa4cfd5335544b8f2d5b8f67383bc22082
GIT binary patch
literal 8196
zcmeHMJ#W-N5S_g<<PeIKG7^m&BnlD{@dr$FfrbK+iYDT6cRD`q&ekQ+Vo^Z_4K)ov
zf*y$ift24s1szmWNW9tIvu4+JlvE%xEA5WsH?wd2_QSUh5t;2Lqb;I!BI=>CUAu^;
zp)k(1RyzL9Rak>~qMRPln1*yXoz%SbhAN;6r~;~hDxeDd3ku+!&864!-gl_Bs(>nR
zCKcfQAw^}Id0RSbw+=M+2mqVlwk^2EIl#uG-e%sG&e~A)=}r&EhZ=vxFfJT;%I>h4
zx23Z#oQw-6<C%>=p%}}KxYW9n$(+?z6;K803h>$e0uAUP6||7&_pRMwJ}X>6|9P|5
zEbsUCr};imUVhK6U;cQb&2w}@&*Fh(K0<fcA%@6oLF4H}V4X{MB-WBk6+6otGpO)@
zw_7src^ZLtNRRPNV<EX$HecM0@fQA|kCq$)ADPF>%^#dc;`2C=Igq*bNn#QI*G)GT
zRcypv!zHiZHM@B3IX#uE=GK*8VD(7IS-$k+U!hX*7?phHRf<m0>lAuVBp=ZZZ}c7s
z`DpnJ6)O2+RPqtMX1;Td4|9JWa^Btx(TI0B&qaKsZi7d_@7_wQSaUfY&>>BDcH?RT
zBR#<11DQ|qZSOtuImSE6@2ww)V<Q5sP7opUPretdUxG}>G^0XhOTXXTlUS77Qkg2Y
zMY#%>yk?D;JjRWAS+{US(CW_+jkuTdMZ`z!4IYx;|F-FYd13d=InSMN#&$lxJ3<yU
zpD33P8*Y6z_yr@=>L>H+d|_F%r~;=_pzFNX`TW0wxvk#;P9>i@s{;Rp0w(G2_P6mU
z{%l=)DE<kSqTWa4!hWT*HUtg3@O^t6hpql$i0jm2nYX30Kv4d|hX8&5TjjMm%L@Df
Dxkhhv

literal 0
HcmV?d00001

diff --git a/backend/core/.DS_Store b/backend/core/.DS_Store
new file mode 100644
index 0000000000000000000000000000000000000000..40dc42a976c649d0fc49a7b3f0edf9a1b2893a27
GIT binary patch
literal 6148
zcmeHKu}%U(5S<ZX2w2dVSgxPIADreatoZ;GFhIg_L@~zhdOHhZVr6S#;y=JYSlU_m
z2NuRRyBoR7aS$70WG0z?ce^ulZ(p*zw?xE;m)!<Yoro%M!EzPN5|eT9oNf7@9U$8p
z9Xh2B_34aSxoGWB1yq5*rhu&78dhtI#&nIfyI8-iVXx8b#L?JMo<9^vMsXOnhjAMb
zl^gSU_5L<LSQXv<LG-wvV!;MwsY(%ej^I7)G!cA6lPufq=c2eY*O$lL`_j`)xGt?3
z#S5nEVlzC+proxi0LODMozQ?{@Vv<KJf0QBrFl;8rnmba-#l|Q(>&M3W_i8}Pqocd
zfZen!wauKq&RP{v1y(2^`-6cC1|AcKcI!Z6uK>U@x~<_^?h=w?JO&;UhZupeO9i@A
z_E!vc>F75;F7TK*bm_$Y^1=Sg>~AP+zmEFGh7$)4wN(XFfvf^0_ga_p|ET!>pH0#?
zRX`Q^R|;@BY=(!Jl0RE>lasU7z)#>}5|=o%DQNt0Y#-z(-h*4in2`ry;4yKC9+>_J
MSQ)fY1%6e5FIP5$)&Kwi

literal 0
HcmV?d00001

diff --git a/backend/login/.DS_Store b/backend/login/.DS_Store
new file mode 100644
index 0000000000000000000000000000000000000000..ae16fafc9c71591713f722c7a42d731b4740447e
GIT binary patch
literal 6148
zcmeHKOHRW;4E2;Q0$tD*f(25p5Qzf}RX9Np5NLv0q=ZC#7VPG3uwucIJHR0j&*M?s
zC`}O?R3TgPyotx2IB%qJOhjh#SR_PaB8s4lgMADegx6U+G6)u4G=7hqt|+GkT~pTa
zR>Lkbz-KqYX=T*V6V7gRepxwBrg>d8lJ-f_?^)GpIxFiLJfdfllt-U0=i9uCUwAi*
z^(-8=r30$yhURpKY#%VI>6Y%>*`B`j`K{;rdUp8!_2F~fxMx+bGX2T7ou}nIRg?i`
zKpEIo22itEqLHAE%78MU3@jPo`yqxh+JH{bUmX~H3jiF#>;-e~B{(JyXahPytU!!}
z0wvUh7Q;w5>~Zk30iB?PlL^g-32!!`LoxpC*gy8}WUQc$%78M^W?)Yq$6Ws}`uqQO
zkUl8`%D}&3zzoxAI>9G}+S>UzuC+1r63W7UPH-85i7&<Q<x+eM^#Xg$6QB*~1Yv>b
OkASB^2W8+-8Tbac>RN&T

literal 0
HcmV?d00001


From f3f58869824f2afa2a5a7775aef587d8a20691c0 Mon Sep 17 00:00:00 2001
From: kshitijrajsharma <skshitizraj@gmail.com>
Date: Wed, 31 Jul 2024 19:56:32 +0545
Subject: [PATCH 35/35] (CI): refactor the test branch

---
 .github/workflows/backend_build.yml | 1 -
 1 file changed, 1 deletion(-)

diff --git a/.github/workflows/backend_build.yml b/.github/workflows/backend_build.yml
index 1a5967d8..e0749daa 100644
--- a/.github/workflows/backend_build.yml
+++ b/.github/workflows/backend_build.yml
@@ -3,7 +3,6 @@ on:
   push:
     branches:
       - master
-      - fix/ci-tests
     paths:
       - "backend/**"
       - ".github/workflows/backend_build.yml"