From 779e9cf54e856d102e4e0e6a1550770efc46f571 Mon Sep 17 00:00:00 2001 From: natrimmer Date: Wed, 26 Jun 2024 16:25:06 -0700 Subject: [PATCH 01/24] Closes #213 --- frontend/src/components/Layout/Home/Home.js | 20 +++++--------------- 1 file changed, 5 insertions(+), 15 deletions(-) diff --git a/frontend/src/components/Layout/Home/Home.js b/frontend/src/components/Layout/Home/Home.js index 4211cf6d..8dd5072c 100644 --- a/frontend/src/components/Layout/Home/Home.js +++ b/frontend/src/components/Layout/Home/Home.js @@ -26,23 +26,13 @@ const GetStarted = () => { variant="body1" style={{ color: "#3D3D3D", fontSize: "18px", marginBottom: "50px" }} > - fAIr is an open AI-assisted mapping service developed by the - Humanitarian OpenStreetMap Team (HOT) that aims to improve the - efficiency and accuracy of mapping efforts for humanitarian purposes. - The service uses AI models, specifically computer vision techniques, to - detect objects such as buildings, roads, waterways, and trees from - satellite and UAV imagery. The name fAIr is derived from the following - terms: + fAIr performs mapping in the same way as human mappers using HOT's Tasking Manager. It looks at UAV imagery and produces map data that can be added to OpenStreetMap (OSM). Tests show a 100% speedup compared to manual mapping. It uses Artificial Intelligence (AI) to accomplish this.

- + fAIr is developed by the Humanitarian OpenStreetMap Team (HOT) and all the software is free and open source. +
+
+ Before fAIr is used, it needs to be fine-tuned by training on high quality map data for a small representative part of the geographical region where it is to be used.
Date: Mon, 15 Jul 2024 08:54:31 +0100 Subject: [PATCH 02/24] test(exclude-coverage): excludes callback function from coverage --- backend/login/views.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/login/views.py b/backend/login/views.py index bb75c9d2..321af752 100644 --- a/backend/login/views.py +++ b/backend/login/views.py @@ -38,7 +38,7 @@ def get(self, request, format=None): class callback(APIView): - def get(self, request, format=None): + def get(self, request, format=None): # pragma: no cover """Callback method redirected from osm callback method Args: From 8721fd8fb3fd5b51ec9aba4263f9589b0ef2a29e Mon Sep 17 00:00:00 2001 From: Oluwanifemi Daramola <76186151+nifedara@users.noreply.github.com> Date: Mon, 15 Jul 2024 08:59:44 +0100 Subject: [PATCH 03/24] test(model-factories): adds model factories to isolate test data --- backend/tests/factories.py | 137 +++++++++++++++++++++++++++++++++++++ 1 file changed, 137 insertions(+) create mode 100644 backend/tests/factories.py diff --git a/backend/tests/factories.py b/backend/tests/factories.py new file mode 100644 index 00000000..829c1b4e --- /dev/null +++ b/backend/tests/factories.py @@ -0,0 +1,137 @@ +import factory +from login.models import OsmUser +from django.contrib.gis.geos import Polygon +from core.models import ( + Dataset, + AOI, + Label, + Model, + Training, + Feedback, + FeedbackAOI, + FeedbackLabel, +) + + +class OsmUserFactory(factory.django.DjangoModelFactory): + class Meta: + model = OsmUser + + osm_id = 123456 + + +class DatasetFactory(factory.django.DjangoModelFactory): + class Meta: + model = Dataset + + name = "My test dataset" + source_imagery = "https://tiles.openaerialmap.org/5ac4fc6f26964b0010033112/0/5ac4fc6f26964b0010033113/{z}/{x}/{y}" + created_by = factory.SubFactory(OsmUserFactory) + + +class AoiFactory(factory.django.DjangoModelFactory): + class Meta: + model = AOI + + geom = Polygon( + ( + (32.588507094820351, 0.348666499011499), + (32.588517512656978, 0.348184682976698), + (32.588869114643053, 0.348171660921362), + (32.588840465592334, 0.348679521066151), + (32.588507094820351, 0.348666499011499), + ) + ) + dataset = factory.SubFactory(DatasetFactory) + + +class LabelFactory(factory.django.DjangoModelFactory): + class Meta: + model = Label + + aoi = factory.SubFactory(AoiFactory) + geom = Polygon( + ( + (32.588507094820351, 0.348666499011499), + (32.588517512656978, 0.348184682976698), + (32.588869114643053, 0.348171660921362), + (32.588840465592334, 0.348679521066151), + (32.588507094820351, 0.348666499011499), + ) + ) + + +class ModelFactory(factory.django.DjangoModelFactory): + class Meta: + model = Model + + dataset = factory.SubFactory(DatasetFactory) + name = "My test model" + created_by = factory.SubFactory(OsmUserFactory) + + +class TrainingFactory(factory.django.DjangoModelFactory): + class Meta: + model = Training + + model = factory.SubFactory(ModelFactory) + description = "My very first training" + created_by = factory.SubFactory(OsmUserFactory) + epochs = 1 + zoom_level = [20, 21] + batch_size = 1 + + +class FeedbackFactory(factory.django.DjangoModelFactory): + class Meta: + model = Feedback + + geom = Polygon( + ( + (32.588507094820351, 0.348666499011499), + (32.588517512656978, 0.348184682976698), + (32.588869114643053, 0.348171660921362), + (32.588840465592334, 0.348679521066151), + (32.588507094820351, 0.348666499011499), + ) + ) + training = factory.SubFactory(TrainingFactory) + zoom_level = 19 + feedback_type = "TP" + user = factory.SubFactory(OsmUserFactory) + source_imagery = "https://tiles.openaerialmap.org/5ac4fc6f26964b0010033112/0/5ac4fc6f26964b0010033113/{z}/{x}/{y}" + + +class FeedbackAoiFactory(factory.django.DjangoModelFactory): + class Meta: + model = FeedbackAOI + + training = factory.SubFactory(TrainingFactory) + geom = Polygon( + ( + (32.588507094820351, 0.348666499011499), + (32.588517512656978, 0.348184682976698), + (32.588869114643053, 0.348171660921362), + (32.588840465592334, 0.348679521066151), + (32.588507094820351, 0.348666499011499), + ) + ) + label_status = -1 + source_imagery = "https://tiles.openaerialmap.org/5ac4fc6f26964b0010033112/0/5ac4fc6f26964b0010033113/{z}/{x}/{y}" + user = factory.SubFactory(OsmUserFactory) + + +class FeedbackLabelFactory(factory.django.DjangoModelFactory): + class Meta: + model = FeedbackLabel + + feedback_aoi = factory.SubFactory(FeedbackAoiFactory) + geom = Polygon( + ( + (32.588507094820351, 0.348666499011499), + (32.588517512656978, 0.348184682976698), + (32.588869114643053, 0.348171660921362), + (32.588840465592334, 0.348679521066151), + (32.588507094820351, 0.348666499011499), + ) + ) From 8c366fad2845610f28d37623001ca3215ec92cbc Mon Sep 17 00:00:00 2001 From: Oluwanifemi Daramola <76186151+nifedara@users.noreply.github.com> Date: Mon, 15 Jul 2024 09:07:27 +0100 Subject: [PATCH 04/24] test(endpoints-&-view): adds tests for the endpoints also adds test for view home - redirect --- backend/tests/test_endpoints.py | 268 ++++++++++++++++++++++++++------ backend/tests/test_views.py | 17 ++ 2 files changed, 234 insertions(+), 51 deletions(-) create mode 100644 backend/tests/test_views.py diff --git a/backend/tests/test_endpoints.py b/backend/tests/test_endpoints.py index a0447590..7b962117 100644 --- a/backend/tests/test_endpoints.py +++ b/backend/tests/test_endpoints.py @@ -2,9 +2,17 @@ import os import validators -from django.conf import settings from rest_framework import status from rest_framework.test import APILiveServerTestCase, RequestsClient +from .factories import ( + OsmUserFactory, + TrainingFactory, + DatasetFactory, + AoiFactory, + LabelFactory, + ModelFactory, + FeedbackAoiFactory, +) API_BASE = "http://testserver/api/v1" @@ -19,6 +27,12 @@ class TaskApiTest(APILiveServerTestCase): def setUp(self): # Create a request factory instance self.client = RequestsClient() + self.user = OsmUserFactory(osm_id=123) + self.dataset = DatasetFactory(created_by=self.user) + self.aoi = AoiFactory(dataset=self.dataset) + self.model = ModelFactory(dataset=self.dataset, created_by=self.user) + self.json_type_header = headersList.copy() + self.json_type_header["content-type"] = "application/json" def test_auth_me(self): res = self.client.get(f"{API_BASE}/auth/me/", headers=headersList) @@ -32,9 +46,11 @@ def test_auth_login(self): self.assertEqual(validators.url(res_body["login_url"]), True) def test_create_dataset(self): + # create dataset + payload = { - "name": "My test dataset", - "source_imagery": "https://tiles.openaerialmap.org/5ac4fc6f26964b0010033112/0/5ac4fc6f26964b0010033113/{z}/{x}/{y}", + "name": self.dataset.name, + "source_imagery": self.dataset.source_imagery, } # test without authentication should be forbidden res = self.client.post(f"{API_BASE}/dataset/", payload) @@ -43,22 +59,11 @@ def test_create_dataset(self): res = self.client.post(f"{API_BASE}/dataset/", payload, headers=headersList) self.assertEqual(res.status_code, status.HTTP_201_CREATED) - # now dataset is created , create first aoi inside it - payload_second = { - "geom": { - "type": "Polygon", - "coordinates": [ - [ - [32.588507094820351, 0.348666499011499], - [32.588517512656978, 0.348184682976698], - [32.588869114643053, 0.348171660921362], - [32.588840465592334, 0.348679521066151], - [32.588507094820351, 0.348666499011499], - ] - ], - }, - "dataset": 1, - } + def test_create_training(self): + # now dataset is created, create first aoi inside it + + payload_second = {"geom": self.aoi.geom.json, "dataset": self.dataset.id} + json_type_header = headersList json_type_header["content-type"] = "application/json" res = self.client.post( @@ -66,75 +71,115 @@ def test_create_dataset(self): ) self.assertEqual(res.status_code, status.HTTP_201_CREATED) - # create second aoi too , to test multiple aois + # create second aoi too, to test multiple aois + payload_third = { - "geom": { - "type": "Polygon", - "coordinates": [ - [ - [32.588046105549715, 0.349843692679227], - [32.588225813231475, 0.349484284008701], - [32.588624295482369, 0.349734307433132], - [32.588371662944233, 0.350088507273009], - [32.588046105549715, 0.349843692679227], - ] - ], - }, - "dataset": 1, + "geom": self.aoi.geom.json, + "dataset": self.dataset.id, } res = self.client.post( f"{API_BASE}/aoi/", json.dumps(payload_third), headers=json_type_header ) self.assertEqual(res.status_code, status.HTTP_201_CREATED) + # create model + + model_payload = {"name": self.model.name, "dataset": self.dataset.id} + res = self.client.post( + f"{API_BASE}/model/", json.dumps(model_payload), headers=json_type_header + ) + self.assertEqual(res.status_code, status.HTTP_201_CREATED) + + # create training without label + + training_payload = { + "description": "My very first training", + "epochs": 1, + "zoom_level": [20, 21], + "batch_size": 1, + "model": self.model.id, + } + res = self.client.post( + f"{API_BASE}/training/", + json.dumps(training_payload), + headers=json_type_header, + ) + print(res.json()) + self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST) + # download labels from osm for 1 res = self.client.post( - f"{API_BASE}/label/osm/fetch/1/", "", headers=headersList + f"{API_BASE}/label/osm/fetch/{self.aoi.id}/", "", headers=headersList ) self.assertEqual(res.status_code, status.HTTP_201_CREATED) # download labels from osm for 2 res = self.client.post( - f"{API_BASE}/label/osm/fetch/2/", "", headers=headersList + f"{API_BASE}/label/osm/fetch/{self.aoi.id}/", "", headers=headersList ) self.assertEqual(res.status_code, status.HTTP_201_CREATED) - # build the dataset + # create training with epochs greater than the limit - build_dt_payload = {"dataset_id": 1, "zoom_level": ["19"]} + training_payload = { + "description": "My very first training", + "epochs": 31, + "zoom_level": [20, 21], + "batch_size": 1, + "model": self.model.id, + } res = self.client.post( - f"{API_BASE}/dataset/image/build/", - json.dumps(build_dt_payload), + f"{API_BASE}/training/", + json.dumps(training_payload), headers=json_type_header, ) - self.assertEqual(res.status_code, status.HTTP_201_CREATED) + print(res.json()) + self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST) - # build dataset on multiple zoom levels + # create training with batch size greater than the limit - build_dt_payload = {"dataset_id": 1, "zoom_level": ["19", "20"]} + training_payload = { + "description": "My very first training", + "epochs": 1, + "zoom_level": [20, 21], + "batch_size": 9, + "model": self.model.id, + } res = self.client.post( - f"{API_BASE}/dataset/image/build/", - json.dumps(build_dt_payload), + f"{API_BASE}/training/", + json.dumps(training_payload), headers=json_type_header, ) - self.assertEqual(res.status_code, status.HTTP_201_CREATED) + print(res.json()) + self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST) - # create model + # create training inside model - model_payload = {"name": "My test model", "dataset": 1} + training_payload = { + "description": "My very first training", + "epochs": 1, + "zoom_level": [20, 21], + "batch_size": 1, + "model": self.model.id, + } res = self.client.post( - f"{API_BASE}/model/", json.dumps(model_payload), headers=json_type_header + f"{API_BASE}/training/", + json.dumps(training_payload), + headers=json_type_header, ) + print(res.json()) self.assertEqual(res.status_code, status.HTTP_201_CREATED) - # create training inside model + # create another training for the same model + training_payload = { "description": "My very first training", "epochs": 1, + "zoom_level": [20, 21], "batch_size": 1, - "model": 1, + "model": self.model.id, } res = self.client.post( f"{API_BASE}/training/", @@ -142,5 +187,126 @@ def test_create_dataset(self): headers=json_type_header, ) print(res.json()) + self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST) + + self.training = TrainingFactory(model=self.model, created_by=self.user) + + def test_create_label(self): + self.label = LabelFactory(aoi=self.aoi) + self.training = TrainingFactory(model=self.model, created_by=self.user) + + # create label + + label_payload = { + "geom": self.label.geom.json, + "aoi": self.aoi.id, + } + + res = self.client.post( + f"{API_BASE}/label/", + json.dumps(label_payload), + headers=self.json_type_header, + ) + self.assertEqual(res.status_code, status.HTTP_200_OK) # 201- for create + + # create another label with the same geom and aoi + + label_payload2 = { + "geom": self.label.geom.json, + "aoi": self.aoi.id, + } + + res = self.client.post( + f"{API_BASE}/label/", + json.dumps(label_payload2), + headers=self.json_type_header, + ) + self.assertEqual(res.status_code, status.HTTP_200_OK) # 200- for update + + # create another label with error + + label_payload3 = { + "geom": self.label.geom.json, + "aoi": 40, # non-existent aoi + } + res = self.client.post( + f"{API_BASE}/label/", + json.dumps(label_payload3), + headers=self.json_type_header, + ) + self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST) + + def test_fetch_feedbackAoi_osm_label(self): + # create feedback aoi + training = TrainingFactory(model=self.model, created_by=self.user) + feedbackAoi = FeedbackAoiFactory(training=training, user=self.user) + + # download available osm data as labels for the feedback aoi + + res = self.client.post( + f"{API_BASE}/label/feedback/osm/fetch/{feedbackAoi.id}/", + "", + headers=headersList, + ) + self.assertEqual(res.status_code, status.HTTP_201_CREATED) + + def test_get_runStatus(self): + training = TrainingFactory(model=self.model, created_by=self.user) + + # get running training status + + res = self.client.get( + f"{API_BASE}/training/status/{training.id}/", headers=headersList + ) + self.assertEqual(res.status_code, status.HTTP_200_OK) + + def test_submit_training_feedback(self): + training = TrainingFactory(model=self.model, created_by=self.user) + + # apply feedback to training published checkpoints + + training_feedback_payload = { + "training_id": training.id, + "epochs": 20, + "batch_size": 8, + "zoom_level": [19, 20], + } + res = self.client.post( + f"{API_BASE}/feedback/training/submit/", + json.dumps(training_feedback_payload), + headers=self.json_type_header, + ) + # submit unfished/unpublished training feedback should not pass + self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST) + + def test_publish_training(self): + training = TrainingFactory(model=self.model, created_by=self.user) + + # publish an unfinished training should not pass + + res = self.client.post( + f"{API_BASE}/training/publish/{training.id}/", headers=headersList + ) + self.assertEqual(res.status_code, status.HTTP_404_NOT_FOUND) + + def test_get_GpxView(self): + training = TrainingFactory(model=self.model, created_by=self.user) + feedbackAoi = FeedbackAoiFactory(training=training, user=self.user) + + # generate aoi GPX view - aoi_id + + res = self.client.get(f"{API_BASE}/aoi/gpx/{self.aoi.id}/", headers=headersList) + self.assertEqual(res.status_code, status.HTTP_200_OK) + + # generate feedback aoi GPX view - feedback aoi_id + + res = self.client.get( + f"{API_BASE}/feedback-aoi/gpx/{feedbackAoi.id}/", headers=headersList + ) + self.assertEqual(res.status_code, status.HTTP_200_OK) + + def test_get_workspace(self): + # get training workspace + + res = self.client.get(f"{API_BASE}/workspace/", headers=headersList) self.assertEqual(res.status_code, status.HTTP_201_CREATED) - # test diff --git a/backend/tests/test_views.py b/backend/tests/test_views.py new file mode 100644 index 00000000..991ae1bd --- /dev/null +++ b/backend/tests/test_views.py @@ -0,0 +1,17 @@ +from django.test import TestCase +from django.urls import reverse +from rest_framework import status +from rest_framework.test import APIClient + +BASE_URL = "http://testserver/api" + + +class CoreViewsTest(TestCase): + def setUp(self): + self.client = APIClient() + self.home_url = f"{BASE_URL}/" + + def test_home_redirect(self): + res = self.client.get(self.home_url) + self.assertEqual(res.status_code, status.HTTP_302_FOUND) + self.assertRedirects(res, reverse("schema-swagger-ui")) From 47f59fec4a3119773f8753c5bc0b7e44bc602ec9 Mon Sep 17 00:00:00 2001 From: Oluwanifemi Daramola <76186151+nifedara@users.noreply.github.com> Date: Tue, 16 Jul 2024 10:38:09 +0100 Subject: [PATCH 05/24] test(exclude-coverage): omit everything in /usr from coverage --- backend/pyproject.toml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/backend/pyproject.toml b/backend/pyproject.toml index 3c813601..e1eb0f5b 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -46,6 +46,7 @@ distribution = true dev = [ "commitizen>=3.27.0", "ruff>=0.4.9", + "coverage>=7.6.0", ] [tool.commitizen] @@ -54,3 +55,8 @@ tag_format = "\"v$version\"" version_scheme = "semver2" version = "1.0.1" update_changelog_on_bump = true + +[tool.coverage.run] +omit = [ + "/usr/*" +] From 0aee788eaa91c03973d4e8d6051898232fdc7833 Mon Sep 17 00:00:00 2001 From: Oluwanifemi Daramola <76186151+nifedara@users.noreply.github.com> Date: Tue, 16 Jul 2024 11:50:56 +0100 Subject: [PATCH 06/24] test(test-backend-build): adds run project tests in Github action workflow to run automatic tests on PR --- .github/workflows/backend_build.yml | 24 +++++++++--------------- 1 file changed, 9 insertions(+), 15 deletions(-) diff --git a/.github/workflows/backend_build.yml b/.github/workflows/backend_build.yml index 7983b9a6..aff2922b 100644 --- a/.github/workflows/backend_build.yml +++ b/.github/workflows/backend_build.yml @@ -55,10 +55,15 @@ jobs: - name: Unzip and Move Basemodel run: unzip checkpoint.tf.zip -d ramp-code/ramp + - name: Install numpy + run: | + pip install numpy + - name: Install gdal run: | sudo apt-get update && sudo apt-get -y install gdal-bin libgdal-dev python3-gdal && sudo apt-get -y autoremove && sudo apt-get clean pip install GDAL==$(gdal-config --version) + - name: Install ramp dependecies run: | cd ramp-code && cd colab && make install @@ -85,7 +90,8 @@ jobs: - name: Install Dependencies run: | cd backend/ - pip install -r requirements.txt + pip install pdm + pdm install - name: Creating env run: | @@ -104,12 +110,6 @@ jobs: cd backend/ celery -A aiproject --broker=redis://localhost:6379/ flower & - - name: Fix gdal array - run: | - pip uninstall -y gdal - pip install numpy - pip install GDAL==$(gdal-config --version) --global-option=build_ext --global-option="-I/usr/include/gdal" - - name: Check Opencv version run: | pip freeze | grep opencv @@ -121,11 +121,5 @@ jobs: run: | cd backend/ - - export TESTING_TOKEN=$TESTING_TOKEN - python manage.py makemigrations - python manage.py makemigrations core - python manage.py makemigrations login - python manage.py migrate - python manage.py migrate login - python manage.py migrate core + coverage run manage.py test tests + coverage report From f86dc4d7249244ab56feafe520e6fb5c6caf44d1 Mon Sep 17 00:00:00 2001 From: Oluwanifemi Daramola <76186151+nifedara@users.noreply.github.com> Date: Tue, 16 Jul 2024 12:06:37 +0100 Subject: [PATCH 07/24] test(fix-backend-build): fixes install dependecies failing --- .github/workflows/backend_build.yml | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/.github/workflows/backend_build.yml b/.github/workflows/backend_build.yml index aff2922b..06abd1b0 100644 --- a/.github/workflows/backend_build.yml +++ b/.github/workflows/backend_build.yml @@ -55,15 +55,10 @@ jobs: - name: Unzip and Move Basemodel run: unzip checkpoint.tf.zip -d ramp-code/ramp - - name: Install numpy - run: | - pip install numpy - - name: Install gdal run: | sudo apt-get update && sudo apt-get -y install gdal-bin libgdal-dev python3-gdal && sudo apt-get -y autoremove && sudo apt-get clean pip install GDAL==$(gdal-config --version) - - name: Install ramp dependecies run: | cd ramp-code && cd colab && make install @@ -110,6 +105,12 @@ jobs: cd backend/ celery -A aiproject --broker=redis://localhost:6379/ flower & + - name: Fix gdal array + run: | + pip uninstall -y gdal + pip install numpy + pip install GDAL==$(gdal-config --version) --global-option=build_ext --global-option="-I/usr/include/gdal" + - name: Check Opencv version run: | pip freeze | grep opencv @@ -121,5 +122,7 @@ jobs: run: | cd backend/ + + export TESTING_TOKEN=$TESTING_TOKEN coverage run manage.py test tests coverage report From 9c0e5ff2d78437857bf59240ba387ecc547e2f51 Mon Sep 17 00:00:00 2001 From: Oluwanifemi Daramola <76186151+nifedara@users.noreply.github.com> Date: Tue, 16 Jul 2024 12:20:51 +0100 Subject: [PATCH 08/24] test(backend-build): restores install project dependencies --- .github/workflows/backend_build.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/backend_build.yml b/.github/workflows/backend_build.yml index 06abd1b0..b0e23575 100644 --- a/.github/workflows/backend_build.yml +++ b/.github/workflows/backend_build.yml @@ -85,8 +85,7 @@ jobs: - name: Install Dependencies run: | cd backend/ - pip install pdm - pdm install + pip install -r requirements.txt - name: Creating env run: | From 656e26858d1fdac4d9de8bac66f9bb5104d4aa9c Mon Sep 17 00:00:00 2001 From: Oluwanifemi Daramola <76186151+nifedara@users.noreply.github.com> Date: Tue, 16 Jul 2024 13:19:29 +0100 Subject: [PATCH 09/24] test(backend-build): adds install coverage --- .github/workflows/backend_build.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/backend_build.yml b/.github/workflows/backend_build.yml index b0e23575..f0fe307b 100644 --- a/.github/workflows/backend_build.yml +++ b/.github/workflows/backend_build.yml @@ -121,6 +121,7 @@ jobs: run: | cd backend/ + pip install coverage export TESTING_TOKEN=$TESTING_TOKEN coverage run manage.py test tests From 8033d1ea88c289e8b6464aa59b4a454b4fbffb25 Mon Sep 17 00:00:00 2001 From: kshitijrajsharma Date: Wed, 17 Jul 2024 16:13:35 +0545 Subject: [PATCH 10/24] feat(aoi-upload): let user upload aoi from geojson file --- frontend/package.json | 1 + .../Layout/TrainingDS/DatasetEditor/AOI.js | 188 +++++++++++------- .../TrainingDS/DatasetEditor/DatasetEditor.js | 1 + 3 files changed, 119 insertions(+), 71 deletions(-) diff --git a/frontend/package.json b/frontend/package.json index 4604544b..8e5c18ea 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -18,6 +18,7 @@ "@mui/material": "^5.6.1", "@mui/styles": "^5.12.0", "@mui/x-data-grid": "^5.17.12", + "@terraformer/wkt": "^2.2.1", "@testing-library/jest-dom": "^5.16.4", "@testing-library/react": "^12.1.4", "@testing-library/user-event": "^13.5.0", diff --git a/frontend/src/components/Layout/TrainingDS/DatasetEditor/AOI.js b/frontend/src/components/Layout/TrainingDS/DatasetEditor/AOI.js index 53f28be8..667dcbc2 100644 --- a/frontend/src/components/Layout/TrainingDS/DatasetEditor/AOI.js +++ b/frontend/src/components/Layout/TrainingDS/DatasetEditor/AOI.js @@ -11,24 +11,25 @@ import { ListItemText, Pagination, Snackbar, - SvgIcon, + Tooltip, Typography, + Button, } from "@mui/material"; -import Tooltip from "@mui/material/Tooltip"; import { styled } from "@mui/material/styles"; import DeleteIcon from "@mui/icons-material/Delete"; -import MapIcon from "@mui/icons-material/Map"; +import AddIcon from "@mui/icons-material/Add"; import FolderIcon from "@mui/icons-material/Folder"; import { MapTwoTone, ZoomInMap } from "@mui/icons-material"; import usePagination from "./Pagination"; import { makeStyles, withStyles } from "@material-ui/core/styles"; import ScreenshotMonitorIcon from "@mui/icons-material/ScreenshotMonitor"; - import PlaylistRemoveIcon from "@mui/icons-material/PlaylistRemove"; import { useMutation } from "react-query"; import axios from "../../../../axios"; import AOIDetails from "./AOIDetails"; import AuthContext from "../../../../Context/AuthContext"; +import * as Terraformer from "@terraformer/wkt"; + const Demo = styled("div")(({ theme }) => ({ backgroundColor: theme.palette.background.paper, })); @@ -40,11 +41,36 @@ const ListItemWithWiderSecondaryAction = withStyles({ })(ListItem); const PER_PAGE = 5; +const DEFAULT_FILTER = { + items: [], + linkOperator: "and", + quickFilterValues: [], + quickFilterLogicOperator: "and", +}; + +const postAoi = async (polygon, dataset, accessToken) => { + console.log("Posting AOI"); + console.log(dataset); + const headers = { + "Content-Type": "application/json", + "access-token": accessToken, + }; + const data = { + geom: `SRID=4326;${polygon}`, + dataset, + }; + const response = await axios.post("/aoi/", data, { headers }); + console.log(response.data); + return response.data; +}; + const AOI = (props) => { const [dense, setDense] = useState(true); const count = Math.ceil(props.mapLayers.length / PER_PAGE); let [page, setPage] = useState(1); const [openSnack, setOpenSnack] = useState(false); + const [fileError, setFileError] = useState(null); + const [geoJsonFile, setGeoJsonFile] = useState(null); let _DATA = usePagination( props.mapLayers.filter((e) => e.type === "aoi"), PER_PAGE @@ -53,7 +79,7 @@ const AOI = (props) => { setPage(p); _DATA.jump(p); }; - // console.log("_DATA", _DATA); + useEffect(() => { return () => {}; }, [props]); @@ -70,16 +96,12 @@ const AOI = (props) => { }); if (res.error) { - // setMapError(res.error.response.statusText); console.log(res.error.response.statusText); } else { - // success full fetch - return res.data; } } catch (e) { console.log("isError", e); - } finally { } }; const { mutate: mutateFetch, data: fetchResult } = @@ -106,11 +128,74 @@ const AOI = (props) => { } } catch (e) { console.log("isError", e); - } finally { } }; const { mutate: mutateDeleteAOI } = useMutation(DeleteAOI); + const handleFileUpload = async (event) => { + const file = event.target.files[0]; + if (file) { + const fileName = file.name.toLowerCase(); + if (!fileName.endsWith(".geojson")) { + setFileError("Invalid file format. Please upload a .geojson file."); + return; + } + const reader = new FileReader(); + reader.onload = async (e) => { + try { + const geoJson = JSON.parse(e.target.result); + let geometry; + + if (geoJson.type === "FeatureCollection") { + // if (geoJson.features.length > 1) { + // setFileError( + // "Feature collection contains multiple features. Only uploaded first one" + // ); + // } + // TODO : for featurecollection loop through the features and add AOI one by one + const feature = geoJson.features[0]; + if ( + feature.geometry.type !== "Polygon" && + feature.geometry.type !== "MultiPolygon" + ) { + setFileError("GeoJSON must contain a Polygon or MultiPolygon."); + return; + } + geometry = feature.geometry; + } else if (geoJson.type === "Feature") { + if ( + geoJson.geometry.type !== "Polygon" && + geoJson.geometry.type !== "MultiPolygon" + ) { + setFileError( + "Feature geometry type must be Polygon or MultiPolygon." + ); + return; + } + geometry = geoJson.geometry; + } else if ( + geoJson.type === "Polygon" || + geoJson.type === "MultiPolygon" + ) { + geometry = geoJson; + } else { + setFileError("Invalid GeoJSON format."); + return; + } + + const wkt = Terraformer.geojsonToWKT(geometry); + await postAoi(wkt, props.datasetId, accessToken); + setFileError(null); + setGeoJsonFile(null); + } catch (error) { + console.error(error); + setFileError("Error processing GeoJSON file."); + } + }; + reader.readAsText(file); + } + }; + return ( <> @@ -119,6 +204,28 @@ const AOI = (props) => { Training Areas{` (${props.mapLayers.length})`} + + + {fileError && ( + setFileError(null)}> + {fileError} + + )} {props.mapLayers && props.mapLayers.length > PER_PAGE && ( { "" )} - {/* add here a container to get the AOI status from DB */} {layer.aoiId && ( )} @@ -167,40 +273,6 @@ const AOI = (props) => { } /> - {/* - - */} - {/* - { - // mutateFetch(layer.aoiId); - // console.log("Open in Editor") - window.open( - `https://rapideditor.org/rapid#background=${ - props.oamImagery - ? "custom:" + props.oamImagery.url - : "Bing" - }&datasets=fbRoads,msBuildings&disable_features=boundaries&map=16.00/17.9253/120.4841&gpx=&gpx=${ - process.env.REACT_APP_API_BASE - }/aoi/gpx/${ - layer.aoiId - }`, - "_blank", - "noreferrer" - ); - }} - > - - RapiD logo - - */} { className="margin1 transparent" onClick={async (e) => { try { - // mutateFetch(layer.aoiId); - console.log("layer", layer); - const Imgurl = new URL( "http://127.0.0.1:8111/imagery" ); @@ -224,10 +293,6 @@ const AOI = (props) => { props.oamImagery.url ); const imgResponse = await fetch(Imgurl); - // bounds._southWest.lng, - // bounds._southWest.lat, - // bounds._northEast.lng, - // bounds._northEast.lat, const loadurl = new URL( "http://127.0.0.1:8111/load_and_zoom" ); @@ -270,8 +335,6 @@ const AOI = (props) => { sx={{ width: 24, height: 24 }} className="margin1 transparent" onClick={(e) => { - // mutateFetch(layer.aoiId); - // console.log("Open in Editor") window.open( `https://www.openstreetmap.org/edit/#background=${ props.oamImagery @@ -285,7 +348,6 @@ const AOI = (props) => { ); }} > - {/* */} OSM logo { className="margin1" onClick={(e) => { mutateFetch(layer.aoiId); - console.log("Call raw data API to fetch OSM data"); }} > - {/* { - - console.log("Remove labels") - }}> - - */} { return accumulator + curValue.lng; }, 0) / layer.latlngs.length; - // [lat, lng] are the centroid of the polygon props.selectAOIHandler([lat, lng], 17); }} > @@ -352,9 +403,6 @@ const AOI = (props) => { sx={{ width: 24, height: 24 }} className="margin-left-13" onClick={(e) => { - // console.log( - // `layer.aoiId ${layer.aoiId} and layer.id ${layer.id}` - // ); mutateDeleteAOI(layer.aoiId, layer.id); }} > @@ -377,7 +425,6 @@ const AOI = (props) => { open={openSnack} autoHideDuration={5000} onClose={() => { - console.log("openSnack", openSnack); setOpenSnack(false); }} message={ @@ -395,7 +442,6 @@ const AOI = (props) => { } - // action={action} color="red" anchorOrigin={{ vertical: "bottom", horizontal: "right" }} /> diff --git a/frontend/src/components/Layout/TrainingDS/DatasetEditor/DatasetEditor.js b/frontend/src/components/Layout/TrainingDS/DatasetEditor/DatasetEditor.js index 10bfb8f4..10560775 100644 --- a/frontend/src/components/Layout/TrainingDS/DatasetEditor/DatasetEditor.js +++ b/frontend/src/components/Layout/TrainingDS/DatasetEditor/DatasetEditor.js @@ -117,6 +117,7 @@ function DatasetEditor() { mapLayers={mapLayers.filter((i) => i.type === "aoi")} selectAOIHandler={selectAOIHandler} deleteAOIButton={deleteAOIButton} + datasetId={dataset.id} > From 8adf03bb0ebe016d492ba105dd8878b232b3101c Mon Sep 17 00:00:00 2001 From: kshitijrajsharma Date: Wed, 17 Jul 2024 16:36:31 +0545 Subject: [PATCH 11/24] refactor(aoi): clean unused code --- .../src/components/Layout/TrainingDS/DatasetEditor/AOI.js | 6 ------ 1 file changed, 6 deletions(-) diff --git a/frontend/src/components/Layout/TrainingDS/DatasetEditor/AOI.js b/frontend/src/components/Layout/TrainingDS/DatasetEditor/AOI.js index 667dcbc2..2dc6860b 100644 --- a/frontend/src/components/Layout/TrainingDS/DatasetEditor/AOI.js +++ b/frontend/src/components/Layout/TrainingDS/DatasetEditor/AOI.js @@ -41,12 +41,6 @@ const ListItemWithWiderSecondaryAction = withStyles({ })(ListItem); const PER_PAGE = 5; -const DEFAULT_FILTER = { - items: [], - linkOperator: "and", - quickFilterValues: [], - quickFilterLogicOperator: "and", -}; const postAoi = async (polygon, dataset, accessToken) => { console.log("Posting AOI"); From c7f8d2e45debbc99c3e325440e8ee05bdd9328b9 Mon Sep 17 00:00:00 2001 From: kshitijrajsharma Date: Wed, 17 Jul 2024 16:42:41 +0545 Subject: [PATCH 12/24] Check for frontend build , Remove old version of node --- .github/workflows/frontend_build.yml | 50 +++++++++++++--------------- 1 file changed, 24 insertions(+), 26 deletions(-) diff --git a/.github/workflows/frontend_build.yml b/.github/workflows/frontend_build.yml index 23b091c1..ab456ba2 100644 --- a/.github/workflows/frontend_build.yml +++ b/.github/workflows/frontend_build.yml @@ -2,43 +2,41 @@ name: Frontend Build on: push: - branches: [ master ] + branches: [master] paths: - - 'frontend/**' - - '.github/workflows/frontend_build.yml' + - "frontend/**" + - ".github/workflows/frontend_build.yml" pull_request: - branches: [ master ] + branches: [master] paths: - - 'frontend/**' - - '.github/workflows/frontend_build.yml' + - "frontend/**" + - ".github/workflows/frontend_build.yml" jobs: Build_On_Ubuntu: - runs-on: ubuntu-latest env: CI: false strategy: matrix: - node-version: [ 16.14.2, 16, 18, 20 ] + node-version: [18, 20] steps: - - name: Checkout repository - uses: actions/checkout@v4 - - - name: Set up Node.js ${{ matrix.node-version }} - uses: actions/setup-node@v4 - with: - node-version: ${{ matrix.node-version }} - - - name: Install dependencies - run: | - cd frontend/ - npm install --legacy-peer-deps - - - name: Build - run: | - cd frontend/ - npm run build - + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up Node.js ${{ matrix.node-version }} + uses: actions/setup-node@v4 + with: + node-version: ${{ matrix.node-version }} + + - name: Install dependencies + run: | + cd frontend/ + npm install --legacy-peer-deps + + - name: Build + run: | + cd frontend/ + npm run build From 9d65e427f840be31ddb1fb941513f5cee6e3afe7 Mon Sep 17 00:00:00 2001 From: kshitijrajsharma Date: Wed, 17 Jul 2024 16:45:52 +0545 Subject: [PATCH 13/24] Remove node restriction --- frontend/package.json | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/frontend/package.json b/frontend/package.json index 8e5c18ea..6f992877 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -2,9 +2,6 @@ "name": "fair", "version": "0.1.0", "private": true, - "engines": { - "node": "16.14.2" - }, "dependencies": { "@emotion/react": "^11.9.0", "@emotion/styled": "^11.8.1", @@ -64,4 +61,4 @@ "last 1 safari version" ] } -} +} \ No newline at end of file From ec9752121c1088667d409dba3e693be1ff69de82 Mon Sep 17 00:00:00 2001 From: kshitijrajsharma Date: Wed, 17 Jul 2024 16:54:40 +0545 Subject: [PATCH 14/24] Add dev dependecies ajv --- frontend/package.json | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/frontend/package.json b/frontend/package.json index 6f992877..bcb8a0b7 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -60,5 +60,8 @@ "last 1 firefox version", "last 1 safari version" ] + }, + "devDependencies": { + "ajv": "^7.2.4" } -} \ No newline at end of file +} From aff00d192f601bba40b97f5fd314eae623e9c1c5 Mon Sep 17 00:00:00 2001 From: kshitijrajsharma Date: Wed, 17 Jul 2024 16:58:55 +0545 Subject: [PATCH 15/24] Restore check for 16 version of node ! --- .github/workflows/frontend_build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/frontend_build.yml b/.github/workflows/frontend_build.yml index ab456ba2..c9b1ec9a 100644 --- a/.github/workflows/frontend_build.yml +++ b/.github/workflows/frontend_build.yml @@ -20,7 +20,7 @@ jobs: strategy: matrix: - node-version: [18, 20] + node-version: [16, 18, 20] steps: - name: Checkout repository From 7b8ac72aba11e876a7d05f29af6723483b3b49cb Mon Sep 17 00:00:00 2001 From: Oluwanifemi Daramola <76186151+nifedara@users.noreply.github.com> Date: Wed, 17 Jul 2024 14:56:48 +0100 Subject: [PATCH 16/24] test(github-action): restores makemigrations --- .github/workflows/backend_build.yml | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/.github/workflows/backend_build.yml b/.github/workflows/backend_build.yml index f0fe307b..992e1eac 100644 --- a/.github/workflows/backend_build.yml +++ b/.github/workflows/backend_build.yml @@ -121,8 +121,15 @@ jobs: run: | cd backend/ - pip install coverage export TESTING_TOKEN=$TESTING_TOKEN + python manage.py makemigrations + python manage.py makemigrations core + python manage.py makemigrations login + python manage.py migrate + python manage.py migrate login + python manage.py migrate core + + pip install coverage coverage run manage.py test tests coverage report From 7fd9f858fd8709b20ce14d532002207e77d972f2 Mon Sep 17 00:00:00 2001 From: Oluwanifemi Daramola <76186151+nifedara@users.noreply.github.com> Date: Wed, 17 Jul 2024 15:14:49 +0100 Subject: [PATCH 17/24] test(github-action): adds install factory boy --- .github/workflows/backend_build.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/backend_build.yml b/.github/workflows/backend_build.yml index 992e1eac..c27c7675 100644 --- a/.github/workflows/backend_build.yml +++ b/.github/workflows/backend_build.yml @@ -131,5 +131,6 @@ jobs: python manage.py migrate core pip install coverage + pip install factory-boy coverage run manage.py test tests coverage report From c66dcbc04eaedfd3cb7ab5a5b6eb660b6053fb66 Mon Sep 17 00:00:00 2001 From: Oluwanifemi Daramola <76186151+nifedara@users.noreply.github.com> Date: Tue, 23 Jul 2024 13:31:33 +0100 Subject: [PATCH 18/24] test(gothub-action): sets TESTING_TOKEN --- .github/workflows/backend_build.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/backend_build.yml b/.github/workflows/backend_build.yml index c27c7675..ecbc3337 100644 --- a/.github/workflows/backend_build.yml +++ b/.github/workflows/backend_build.yml @@ -91,6 +91,7 @@ jobs: run: | cd backend/ mv sample_env .env + sed -i 's/TESTING_TOKEN = .*/TESTING_TOKEN=${{ secrets.TESTING_TOKEN }}/' .env export DATABASE_URL=postgis://admin:password@localhost:5432/ai export RAMP_HOME="/home/runner/work/fAIr/fAIr" export TRAINING_WORKSPACE="/home/runner/work/fAIr/fAIr/backend/training" From 6ac1607de2e1dc59bd0120c668f0fbe63f720e42 Mon Sep 17 00:00:00 2001 From: kshitijrajsharma Date: Wed, 24 Jul 2024 22:08:59 +0545 Subject: [PATCH 19/24] Upgrade treeview --- frontend/package.json | 1 + .../components/Layout/AIModels/AIModelEditor/FilesTree.js | 6 +++--- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/frontend/package.json b/frontend/package.json index bcb8a0b7..b0ab2404 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -15,6 +15,7 @@ "@mui/material": "^5.6.1", "@mui/styles": "^5.12.0", "@mui/x-data-grid": "^5.17.12", + "@mui/x-tree-view": "^7.11.0", "@terraformer/wkt": "^2.2.1", "@testing-library/jest-dom": "^5.16.4", "@testing-library/react": "^12.1.4", diff --git a/frontend/src/components/Layout/AIModels/AIModelEditor/FilesTree.js b/frontend/src/components/Layout/AIModels/AIModelEditor/FilesTree.js index 48729c50..9ff8f7cf 100644 --- a/frontend/src/components/Layout/AIModels/AIModelEditor/FilesTree.js +++ b/frontend/src/components/Layout/AIModels/AIModelEditor/FilesTree.js @@ -1,5 +1,5 @@ import React, { useEffect, useState } from "react"; -import TreeView from "@mui/lab/TreeView"; +import { SimpleTreeView } from "@mui/x-tree-view"; import TreeItem from "@mui/lab/TreeItem"; import ExpandMoreIcon from "@mui/icons-material/ExpandMore"; import ExpandLessIcon from "@mui/icons-material/ExpandLess"; @@ -73,7 +73,7 @@ const FilesTree = (props) => { )} - } defaultExpandIcon={} @@ -126,7 +126,7 @@ const FilesTree = (props) => { {/* */} - + ); }; From 8110271e3524ef4fcf183f926f2a666ee056271e Mon Sep 17 00:00:00 2001 From: Oluwanifemi Daramola <76186151+nifedara@users.noreply.github.com> Date: Fri, 26 Jul 2024 14:31:23 +0100 Subject: [PATCH 20/24] test(github-action): add OSM client id, secret, secret key --- .github/workflows/backend_build.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/backend_build.yml b/.github/workflows/backend_build.yml index ecbc3337..900ba2b0 100644 --- a/.github/workflows/backend_build.yml +++ b/.github/workflows/backend_build.yml @@ -119,6 +119,9 @@ jobs: - name: Run tests env: TESTING_TOKEN: ${{ secrets.TESTING_TOKEN }} + OSM_CLIENT_ID: ${{ secrets.OSM_CLIENT_ID}} + OSM_CLIENT_SECRET: ${{ secrets.OSM_CLIENT_SECRET}} + OSM_SECRET_KEY: ${{ secrets.OSM_SECRET_KEY}} run: | cd backend/ From 4c3a4abfd03f8ae985179706c5ac4db3e8a9a79a Mon Sep 17 00:00:00 2001 From: Oluwanifemi Daramola <76186151+nifedara@users.noreply.github.com> Date: Fri, 26 Jul 2024 19:13:35 +0100 Subject: [PATCH 21/24] test(github-action): updates env setting --- .github/workflows/backend_build.yml | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/.github/workflows/backend_build.yml b/.github/workflows/backend_build.yml index 900ba2b0..22a47605 100644 --- a/.github/workflows/backend_build.yml +++ b/.github/workflows/backend_build.yml @@ -87,11 +87,8 @@ jobs: cd backend/ pip install -r requirements.txt - - name: Creating env + - name: Set environment variables run: | - cd backend/ - mv sample_env .env - sed -i 's/TESTING_TOKEN = .*/TESTING_TOKEN=${{ secrets.TESTING_TOKEN }}/' .env export DATABASE_URL=postgis://admin:password@localhost:5432/ai export RAMP_HOME="/home/runner/work/fAIr/fAIr" export TRAINING_WORKSPACE="/home/runner/work/fAIr/fAIr/backend/training" @@ -125,8 +122,13 @@ jobs: run: | cd backend/ - export TESTING_TOKEN=$TESTING_TOKEN + export OSM_CLIENT_ID=$OSM_CLIENT_ID + export OSM_CLIENT_SECRET=$OSM_CLIENT_SECRET + export OSM_SECRET_KEY=$OSM_SECRET_KEY + + echo "TESTING_TOKEN starts with: ${TESTING_TOKEN:0:6}..." + python manage.py makemigrations python manage.py makemigrations core python manage.py makemigrations login From 567fbef1688c4b27f84e26a23068d1d20fca319f Mon Sep 17 00:00:00 2001 From: Oluwanifemi Daramola <76186151+nifedara@users.noreply.github.com> Date: Tue, 30 Jul 2024 10:29:37 +0100 Subject: [PATCH 22/24] test(github-action): updates env variables --- .github/workflows/backend_build.yml | 17 +++++------------ 1 file changed, 5 insertions(+), 12 deletions(-) diff --git a/.github/workflows/backend_build.yml b/.github/workflows/backend_build.yml index 22a47605..48b5ae53 100644 --- a/.github/workflows/backend_build.yml +++ b/.github/workflows/backend_build.yml @@ -38,11 +38,6 @@ jobs: - name: Get my current working dir run: pwd - - name: Test env vars for python - env: - TESTING_TOKEN: ${{ secrets.TESTING_TOKEN }} - run: python -c "import os; print(os.environ['TESTING_TOKEN'])" - - name: Clone Ramp run: git clone https://github.com/kshitijrajsharma/ramp-code-fAIr.git ramp-code @@ -87,11 +82,11 @@ jobs: cd backend/ pip install -r requirements.txt - - name: Set environment variables - run: | - export DATABASE_URL=postgis://admin:password@localhost:5432/ai - export RAMP_HOME="/home/runner/work/fAIr/fAIr" - export TRAINING_WORKSPACE="/home/runner/work/fAIr/fAIr/backend/training" + - name: Creating env + env: + DATABASE_URL: postgis://admin:password@localhost:5432/ai + RAMP_HOME: "/home/runner/work/fAIr/fAIr" + TRAINING_WORKSPACE: "/home/runner/work/fAIr/fAIr/backend/training" - name: Run celery worker run: | @@ -127,8 +122,6 @@ jobs: export OSM_CLIENT_SECRET=$OSM_CLIENT_SECRET export OSM_SECRET_KEY=$OSM_SECRET_KEY - echo "TESTING_TOKEN starts with: ${TESTING_TOKEN:0:6}..." - python manage.py makemigrations python manage.py makemigrations core python manage.py makemigrations login From 595639bd846a7f94740284baee7b19f19baac814 Mon Sep 17 00:00:00 2001 From: Oluwanifemi Daramola <76186151+nifedara@users.noreply.github.com> Date: Tue, 30 Jul 2024 10:38:42 +0100 Subject: [PATCH 23/24] test(github-action): updates env setting --- .github/workflows/backend_build.yml | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/.github/workflows/backend_build.yml b/.github/workflows/backend_build.yml index 48b5ae53..a27c7fd7 100644 --- a/.github/workflows/backend_build.yml +++ b/.github/workflows/backend_build.yml @@ -38,6 +38,11 @@ jobs: - name: Get my current working dir run: pwd + - name: Test env vars for python + env: + TESTING_TOKEN: ${{ secrets.TESTING_TOKEN }} + run: python -c "import os; print(os.environ['TESTING_TOKEN'])" + - name: Clone Ramp run: git clone https://github.com/kshitijrajsharma/ramp-code-fAIr.git ramp-code @@ -83,10 +88,16 @@ jobs: pip install -r requirements.txt - name: Creating env - env: - DATABASE_URL: postgis://admin:password@localhost:5432/ai - RAMP_HOME: "/home/runner/work/fAIr/fAIr" - TRAINING_WORKSPACE: "/home/runner/work/fAIr/fAIr/backend/training" + run: | + cd backend/ + mv sample_env .env + sed -i 's/TESTING_TOKEN = .*/TESTING_TOKEN=${{ secrets.TESTING_TOKEN }}/' .env + sed -i 's/OSM_CLIENT_ID = .*/OSM_CLIENT_ID=${{ secrets.OSM_CLIENT_ID }}/' .env + sed -i 's/OSM_CLIENT_SECRET = .*/OSM_CLIENT_SECRET=${{ secrets.OSM_CLIENT_SECRET }}/' .env + sed -i 's/OSM_SECRET_KEY = .*/OSM_SECRET_KEY=${{ secrets.OSM_SECRET_KEY }}/' .env + export DATABASE_URL=postgis://admin:password@localhost:5432/ai + export RAMP_HOME="/home/runner/work/fAIr/fAIr" + export TRAINING_WORKSPACE="/home/runner/work/fAIr/fAIr/backend/training" - name: Run celery worker run: | From a1ab09c6dd7e84c975ec3dd63e1c664d20f7c607 Mon Sep 17 00:00:00 2001 From: Oluwanifemi Daramola <76186151+nifedara@users.noreply.github.com> Date: Tue, 30 Jul 2024 11:02:38 +0100 Subject: [PATCH 24/24] test(github-action): updates env variables --- .github/workflows/backend_build.yml | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/.github/workflows/backend_build.yml b/.github/workflows/backend_build.yml index a27c7fd7..02f2bb9b 100644 --- a/.github/workflows/backend_build.yml +++ b/.github/workflows/backend_build.yml @@ -87,17 +87,17 @@ jobs: cd backend/ pip install -r requirements.txt - - name: Creating env + - name: Setting env vaariables + env: + DATABASE_URL: postgis://admin:password@localhost:5432/ai + RAMP_HOME: "/home/runner/work/fAIr/fAIr" + TRAINING_WORKSPACE: "/home/runner/work/fAIr/fAIr/backend/training" run: | cd backend/ - mv sample_env .env - sed -i 's/TESTING_TOKEN = .*/TESTING_TOKEN=${{ secrets.TESTING_TOKEN }}/' .env - sed -i 's/OSM_CLIENT_ID = .*/OSM_CLIENT_ID=${{ secrets.OSM_CLIENT_ID }}/' .env - sed -i 's/OSM_CLIENT_SECRET = .*/OSM_CLIENT_SECRET=${{ secrets.OSM_CLIENT_SECRET }}/' .env - sed -i 's/OSM_SECRET_KEY = .*/OSM_SECRET_KEY=${{ secrets.OSM_SECRET_KEY }}/' .env - export DATABASE_URL=postgis://admin:password@localhost:5432/ai - export RAMP_HOME="/home/runner/work/fAIr/fAIr" - export TRAINING_WORKSPACE="/home/runner/work/fAIr/fAIr/backend/training" + + export DATABASE_URL=$DATABASE_URL + export RAMP_HOME=$RAMP_HOME + export TRAINING_WORKSPACE=$TRAINING_WORKSPACE - name: Run celery worker run: |