Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature : Upload Accepted Predictions #269

Merged
merged 10 commits into from
Sep 3, 2024
Merged
14 changes: 4 additions & 10 deletions .github/workflows/backend_build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,14 +35,6 @@ jobs:
with:
python-version: 3.8

- name: Get my current working dir
run: pwd

- name: Test env vars for python
env:
TESTING_TOKEN: ${{ secrets.TESTING_TOKEN }}
run: python -c "import os; print(os.environ['TESTING_TOKEN'])"

- name: Clone Ramp
run: git clone https://github.com/kshitijrajsharma/ramp-code-fAIr.git ramp-code

Expand Down Expand Up @@ -92,7 +84,6 @@ jobs:
- name: Create env
run: |
cd backend/
mv sample_env .env
export DATABASE_URL=postgis://admin:password@localhost:5432/ai
export RAMP_HOME="/home/runner/work/fAIr/fAIr"
export TRAINING_WORKSPACE="/home/runner/work/fAIr/fAIr/backend/training"
Expand Down Expand Up @@ -120,6 +111,9 @@ jobs:
- name: Run migrations
env:
TESTING_TOKEN: ${{ secrets.TESTING_TOKEN }}
OSM_CLIENT_ID: ${{ secrets.OSM_CLIENT_ID }}
OSM_CLIENT_SECRET: ${{ secrets.OSM_CLIENT_SECRET }}
OSM_SECRET_KEY: ${{ secrets.OSM_SECRET_KEY }}
run: |
cd backend/
python manage.py makemigrations
Expand All @@ -134,7 +128,7 @@ jobs:
TESTING_TOKEN: ${{ secrets.TESTING_TOKEN }}
OSM_CLIENT_ID: ${{ secrets.OSM_CLIENT_ID }}
OSM_CLIENT_SECRET: ${{ secrets.OSM_CLIENT_SECRET }}
OSM_SECRET_KEY: ""
OSM_SECRET_KEY: ${{ secrets.OSM_SECRET_KEY }}

run : |
cd backend/
Expand Down
62 changes: 62 additions & 0 deletions backend/Dockerfile.API
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
## docker build -t fair-api -f Dockerfile.API .

## For Development:

## docker run --env-file .env --rm -p 8000:8000 -v $(pwd):/app/code --name fair-api-container fair-api

## in order to access localhost port to your system from docker env you might wanna use : host.docker.internal or ip

FROM python:3.11-slim-bookworm AS build

RUN apt-get update && apt-get --no-install-recommends -y install \
build-essential \
libpq-dev \
libgdal-dev \
python3-dev \
&& apt-get clean && rm -rf /var/lib/apt/lists/*

ENV CPLUS_INCLUDE_PATH=/usr/include/gdal
ENV C_INCLUDE_PATH=/usr/include/gdal

WORKDIR /app

RUN python3 -m venv /app/venv

RUN /app/venv/bin/pip install --no-cache-dir --upgrade pip setuptools wheel

COPY api-requirements.txt api-requirements.txt ./

RUN /app/venv/bin/pip install --no-cache-dir -r api-requirements.txt

WORKDIR /app/code
COPY aiproject /app/code/aiproject
COPY core /app/code/core
COPY login /app/code/login
COPY manage.py /app/code/manage.py
COPY tests /app/code/tests

FROM python:3.11-slim-bookworm

RUN apt-get update && apt-get --no-install-recommends -y install \
libgdal-dev \
&& apt-get clean && rm -rf /var/lib/apt/lists/*

ENV CPLUS_INCLUDE_PATH=/usr/include/gdal
ENV C_INCLUDE_PATH=/usr/include/gdal

WORKDIR /app

COPY --from=build /app/venv /app/venv

COPY --from=build /app/code /app/code
ENV PATH="/app/venv/bin:$PATH"
COPY api-entrypoint.sh ./api-entrypoint-lock.sh
RUN chmod +x ./api-entrypoint-lock.sh

WORKDIR /app/code

EXPOSE 8000

ENTRYPOINT ["/app/api-entrypoint-lock.sh"]

CMD ["python", "manage.py", "runserver", "0.0.0.0:8000"]
16 changes: 12 additions & 4 deletions backend/aiproject/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,9 @@
# Limiter
EPOCHS_LIMIT = env("EPOCHS_LIMIT", default=30)
BATCH_SIZE_LIMIT = env("BATCH_SIZE_LIMIT", default=8)
TRAINING_WORKSPACE_DOWNLOAD_LIMIT = env("TRAINING_WORKSPACE_DOWNLOAD_LIMIT", default=200)
TRAINING_WORKSPACE_DOWNLOAD_LIMIT = env(
"TRAINING_WORKSPACE_DOWNLOAD_LIMIT", default=200
)


# Application definition
Expand Down Expand Up @@ -98,7 +100,7 @@

CORS_ORIGIN_WHITELIST = ALLOWED_ORIGINS

CORS_ORIGIN_ALLOW_ALL = env("CORS_ORIGIN_ALLOW_ALL", default= False)
CORS_ORIGIN_ALLOW_ALL = env("CORS_ORIGIN_ALLOW_ALL", default=False)

REST_FRAMEWORK = {
"DEFAULT_SCHEMA_CLASS": "rest_framework.schemas.coreapi.AutoSchema",
Expand Down Expand Up @@ -205,10 +207,16 @@
}
}
# get ramp home and set it to environ
RAMP_HOME = env("RAMP_HOME")
os.environ["RAMP_HOME"] = RAMP_HOME
RAMP_HOME = env("RAMP_HOME",default=None)
if RAMP_HOME:
os.environ["RAMP_HOME"] = RAMP_HOME

# training workspace
TRAINING_WORKSPACE = env(
"TRAINING_WORKSPACE", default=os.path.join(os.getcwd(), "training")
)

ENABLE_PREDICTION_API = env("ENABLE_PREDICTION_API", default=False)


TEST_RUNNER = 'tests.test_runners.NoDestroyTestRunner'
7 changes: 7 additions & 0 deletions backend/api-entrypoint.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
#!/bin/bash
set -e
echo "Applying database migrations..."
python manage.py makemigrations login core
python manage.py migrate
echo "Starting Django server..."
exec "$@"
8 changes: 6 additions & 2 deletions backend/api-requirements.txt
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
django==4.1.4
# gdal==3.6.2
psycopg2
psycopg2==2.9.9
djangorestframework==3.14.0
djangorestframework-gis==1.0
dj-database-url==1.2.0
Expand All @@ -20,4 +20,8 @@ geojson2osm==0.0.1
osmconflator==0.0.11
orthogonalizer==0.0.4
fairpredictor==0.0.26
tflite-runtime==2.14.0

rasterio==1.3.8
numpy<2.0.0


12 changes: 12 additions & 0 deletions backend/core/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
from django.contrib.postgres.fields import ArrayField
from django.core.validators import MaxValueValidator, MinValueValidator
from django.db import models

from login.models import OsmUser

# Create your models here.
Expand Down Expand Up @@ -133,3 +134,14 @@ class FeedbackLabel(models.Model):

geom = geomodels.PolygonField(srid=4326)
created_at = models.DateTimeField(auto_now_add=True)


class ApprovedPredictions(models.Model):
training = models.ForeignKey(Training, to_field="id", on_delete=models.DO_NOTHING)
config = models.JSONField(
null=True, blank=True
) ### Config meant to be kept for vectorization config / zoom config , to know what user is using for the most of the time
geom = geomodels.GeometryField(
srid=4326
) ## Making this geometry field to support point/line prediction later on
approved_at = models.DateTimeField(auto_now_add=True)
10 changes: 9 additions & 1 deletion backend/core/serializers.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
from django.conf import settings
from login.models import OsmUser
from rest_framework import serializers
from rest_framework_gis.serializers import (
GeoFeatureModelSerializer, # this will be used if we used to serialize as geojson
)

from login.models import OsmUser

from .models import *

# from .tasks import train_model
Expand Down Expand Up @@ -113,6 +114,13 @@ class Meta:
# read_only_fields = ("created_at", "osm_id")


class ApprovedPredictionsSerializer(GeoFeatureModelSerializer):
class Meta:
model = ApprovedPredictions
geo_field = "geom"
fields = "__all__"


class FeedbackLabelSerializer(GeoFeatureModelSerializer):
class Meta:
model = FeedbackLabel
Expand Down
18 changes: 10 additions & 8 deletions backend/core/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,13 @@
import traceback
from shutil import rmtree


from celery import shared_task
from django.conf import settings
from django.contrib.gis.db.models.aggregates import Extent
from django.contrib.gis.geos import GEOSGeometry
from django.shortcuts import get_object_or_404
from django.utils import timezone

from core.models import AOI, Feedback, FeedbackAOI, FeedbackLabel, Label, Training
from core.serializers import (
AOISerializer,
Expand All @@ -18,12 +23,6 @@
LabelFileSerializer,
)
from core.utils import bbox, is_dir_empty
from django.conf import settings
from django.contrib.gis.db.models.aggregates import Extent
from django.contrib.gis.geos import GEOSGeometry
from django.shortcuts import get_object_or_404
from django.utils import timezone
from predictor import download_imagery, get_start_end_download_coords

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -81,17 +80,20 @@ def train_model(
input_contact_spacing=8,
input_boundary_width=3,
):
#importing them here so that it won't be necessary when sending tasks ( api only)
# importing them here so that it won't be necessary when sending tasks ( api only)
import hot_fair_utilities
import ramp.utils
import tensorflow as tf
from hot_fair_utilities import preprocess, train
from hot_fair_utilities.training import run_feedback
from predictor import download_imagery, get_start_end_download_coords

training_instance = get_object_or_404(Training, id=training_id)
training_instance.status = "RUNNING"
training_instance.started_at = timezone.now()
training_instance.save()
if settings.RAMP_HOME is None:
raise ValueError("Ramp Home is not configured")

try:
## -----------IMAGE DOWNLOADER---------
Expand Down
13 changes: 9 additions & 4 deletions backend/core/urls.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
from django.conf import settings
from django.conf.urls import include
from django.urls import path
from rest_framework import routers

# now import the views.py file into this code
from .views import (
from .views import ( # APIStatus,
AOIViewSet,
# APIStatus,
ApprovedPredictionsViewSet,
ConflateGeojson,
DatasetViewSet,
FeedbackAOIViewset,
Expand All @@ -16,7 +17,6 @@
GenerateGpxView,
LabelViewSet,
ModelViewSet,
PredictionView,
RawdataApiAOIView,
RawdataApiFeedbackView,
TrainingViewSet,
Expand All @@ -28,11 +28,15 @@
run_task_status,
)

if settings.ENABLE_PREDICTION_API:
from .views import PredictionView

# CURD Block
router = routers.DefaultRouter()
router.register(r"dataset", DatasetViewSet)
router.register(r"aoi", AOIViewSet)
router.register(r"label", LabelViewSet)
router.register(r"approved-prediction", ApprovedPredictionsViewSet)
router.register(r"training", TrainingViewSet)
router.register(r"model", ModelViewSet)
router.register(r"feedback", FeedbackViewset)
Expand All @@ -50,7 +54,6 @@
# path("download/<int:dataset_id>/", download_training_data),
path("training/status/<str:run_id>/", run_task_status),
path("training/publish/<int:training_id>/", publish_training),
path("prediction/", PredictionView.as_view()),
path("feedback/training/submit/", FeedbackView.as_view()),
# path("status/", APIStatus.as_view()),
path("geojson2osm/", geojson2osmconverter, name="geojson2osmconverter"),
Expand All @@ -65,3 +68,5 @@
),
path("workspace/<path:lookup_dir>/", TrainingWorkspaceView.as_view()),
]
if settings.ENABLE_PREDICTION_API:
urlpatterns.append(path("prediction/", PredictionView.as_view()))
12 changes: 7 additions & 5 deletions backend/core/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,11 @@ def __init__(self, BASE_API_URL):
self.BASE_API_URL = BASE_API_URL

def request_snapshot(self, geometry):
headers = {"accept": "application/json", "Content-Type": "application/json"}
headers = {
"accept": "application/json",
"Content-Type": "application/json",
"Referer": "fAIr",
}
# Lets start with buildings for now
payload = {
"geometry": json.loads(geometry),
Expand Down Expand Up @@ -124,9 +128,7 @@ def process_rawdata(file_download_url, aoi_id, feedback=False):
"""This will create temp directory , Downloads file from URL provided,
Unzips it Finds a geojson file , Process it and finally removes
processed Geojson file and downloaded zip file from Directory"""
headers = {
'Referer': 'https://fair-dev.hotosm.org/' # TODO : Use request uri
}
headers = {"Referer": "https://fair-dev.hotosm.org/"} # TODO : Use request uri
r = requests.get(file_download_url, headers=headers)
# Check whether the export path exists or not
path = "temp/"
Expand Down Expand Up @@ -250,7 +252,7 @@ def process_geojson(geojson_file_path, aoi_id, feedback=False):
) # leave one cpu free always
if feedback:
FeedbackLabel.objects.filter(feedback_aoi__id=aoi_id).delete()
else :
else:
Label.objects.filter(aoi__id=aoi_id).delete()
# max_workers = os.cpu_count() # get total cpu count available on the

Expand Down
Loading
Loading