From 3f84952829eccfe2d643ad7ea134f50f54a036c6 Mon Sep 17 00:00:00 2001 From: Ubuntu Date: Wed, 18 Sep 2024 09:56:20 +0000 Subject: [PATCH 1/3] first push, github actions --- .github/workflows/run-unit-tests.yml | 41 ++ .gitignore | 16 + data/vector/config.json | 20 + docker-compose.yml | 105 ++++ docker/backend/Dockerfile | 33 + docker/backend/fastapi/main.py | 762 ++++++++++++++++++++++++ docker/backend/fastapi/requirements.txt | 11 + docker/backend/fastapi/unittests.py | 230 +++++++ docker/frontend/Dockerfile | 8 + docker/frontend/index.html | 721 ++++++++++++++++++++++ docker/tileserver/Dockerfile | 44 ++ docs/architecture.md | 0 12 files changed, 1991 insertions(+) create mode 100644 .github/workflows/run-unit-tests.yml create mode 100644 data/vector/config.json create mode 100644 docker-compose.yml create mode 100644 docker/backend/Dockerfile create mode 100644 docker/backend/fastapi/main.py create mode 100644 docker/backend/fastapi/requirements.txt create mode 100644 docker/backend/fastapi/unittests.py create mode 100644 docker/frontend/Dockerfile create mode 100644 docker/frontend/index.html create mode 100644 docker/tileserver/Dockerfile create mode 100644 docs/architecture.md diff --git a/.github/workflows/run-unit-tests.yml b/.github/workflows/run-unit-tests.yml new file mode 100644 index 0000000..ce4ef25 --- /dev/null +++ b/.github/workflows/run-unit-tests.yml @@ -0,0 +1,41 @@ +# .github/workflows/run-unit-tests.yml + +name: Run Unit Tests + +# Trigger the workflow on push or pull request to the main branch +on: + push: + branches: [ main ] + pull_request: + branches: [ main ] + +jobs: + build: + runs-on: ubuntu-latest + + steps: + # Step 1: Checkout the repository + - name: Checkout Repository + uses: actions/checkout@v3 + + # Step 2: Set up Python environment + - name: Set Up Python 3.8 + uses: actions/setup-python@v4 + with: + python-version: '3.8' + + # Step 3: Install dependencies + - name: Install Dependencies + run: | + # Navigate to the backend directory + cd docker/backend/fastapi/ + # Upgrade pip + python -m pip install --upgrade pip + # Install required packages + pip install -r requirements.txt + + # Step 4: Run Unit Tests + - name: Run Unit Tests + run: | + cd docker/backend/fastapi/ + pytest unittests.py diff --git a/.gitignore b/.gitignore index 82f9275..2a0b30a 100644 --- a/.gitignore +++ b/.gitignore @@ -160,3 +160,19 @@ cython_debug/ # and can be added to the global gitignore or merged into this file. For a more nuclear # option (not recommended) you can uncomment the following to ignore the entire idea folder. #.idea/ + +*.parquet +*.gpkg +*.mbtiles +*.tif + +# OS +*.DS_Store +Thumbs.db + +# IDE +.vscode/ + +# Virtual environments +*.venv +.env \ No newline at end of file diff --git a/data/vector/config.json b/data/vector/config.json new file mode 100644 index 0000000..4036398 --- /dev/null +++ b/data/vector/config.json @@ -0,0 +1,20 @@ +{ + "options": { + "paths": { + "root": "/usr/lib/node_modules/tileserver-gl/node_modules/tileserver-gl-styles", + "fonts": "fonts", + "styles": "styles", + "mbtiles": "/data", + "pmtiles": "/data" + } + }, + "styles": {}, + "data": { + "buildings": { + "mbtiles": "buildings.mbtiles" + }, + "parcel": { + "mbtiles": "parcel_intersecting_grid.mbtiles" + } + } +} \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..f8517f3 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,105 @@ +version: '3.8' + +services: + + dozzle: + image: amir20/dozzle:latest + deploy: + mode: global + resources: + limits: + cpus: "0.25" + memory: 1G + restart_policy: + condition: on-failure + max_attempts: 2 + environment: + - DOZZLE_MODE=swarm + - DOZZLE_ADDR=:9200 + volumes: + - /var/run/docker.sock:/var/run/docker.sock + ports: + - 9200:9200 + networks: + - appconnector + + + frontend: + image: credium-fe:latest + deploy: + replicas: 1 + resources: + limits: + cpus: "0.25" + memory: 1G + restart_policy: + condition: on-failure + max_attempts: 2 + networks: + - appconnector + ports: + - "8081:80" + + backend: + image: credium-be:latest + deploy: + replicas: 1 + resources: + limits: + cpus: "1" + memory: 2G + restart_policy: + condition: on-failure + max_attempts: 2 + volumes: + - ${PWD}/data:/var/task/fastapi/data + - ${PWD}/db:/var/task/fastapi/db + networks: + - appconnector + ports: + - "8080:8080" + + raster-titiler: + image: ghcr.io/developmentseed/titiler:latest + environment: + - PORT=8000 + - WORKERS_PER_CORE=1 + - TITILER_API_CORS_ALLOW_METHODS='GET,POST' + deploy: + replicas: 1 + resources: + limits: + cpus: "1" + memory: 1G + restart_policy: + condition: on-failure + max_attempts: 2 + volumes: + - ${PWD}/data/raster:/data + ports: + - "8000:8000" + networks: + - appconnector + + vector-tilserver: + image: vector-tileserver:latest + deploy: + replicas: 1 + resources: + limits: + cpus: "1" + memory: 1G + restart_policy: + condition: on-failure + max_attempts: 2 + volumes: + - ${PWD}/data/vector:/data + ports: + - "9100:9100" + networks: + - appconnector + +networks: + appconnector: + driver: overlay + attachable: true diff --git a/docker/backend/Dockerfile b/docker/backend/Dockerfile new file mode 100644 index 0000000..9a00aa9 --- /dev/null +++ b/docker/backend/Dockerfile @@ -0,0 +1,33 @@ +FROM ghcr.io/osgeo/gdal:ubuntu-small-latest + +ENV TASK_ROOT=/var/task +ENV PATH="$TASK_ROOT/bin:$PATH" + +RUN apt-get update && \ + apt-get install -y build-essential gcc-aarch64-linux-gnu ca-certificates curl gnupg vim libasound2-dev libatk1.0-0 libc6 libcairo2 libcups2 \ + libdbus-1-3 libexpat1 libfontconfig1 libgcc1 libgdk-pixbuf2.0-0 libglib2.0-0 libgtk-3-0 libnspr4 \ + libpango-1.0-0 libpangocairo-1.0-0 libstdc++6 libx11-6 libx11-xcb1 libxcb1 libxcomposite1 libxcursor1 libxdamage1 \ + libxext6 libxfixes3 libxi6 libxrandr2 libxrender1 libxss1 libxtst6 ca-certificates fonts-liberation libnss3 \ + lsb-release xdg-utils wget ca-certificates libclang-dev libgc1 libobjc4 libpq5 libxcb-xkb1 \ + libxkbcommon-x11-0 libgbm-dev python3-pip python3-venv && \ + python3 -m venv $TASK_ROOT && \ + $TASK_ROOT/bin/pip install --upgrade pip + +COPY ./fastapi/requirements.txt ${TASK_ROOT}/fastapi/requirements.txt + +RUN $TASK_ROOT/bin/pip install --no-cache-dir -r ${TASK_ROOT}/fastapi/requirements.txt --break-system-packages + +WORKDIR ${TASK_ROOT} + +RUN mkdir ${TASK_ROOT}/fastapi/data ${TASK_ROOT}/fastapi/db + +RUN groupadd -r appgroup && useradd -r -g appgroup -d ${TASK_ROOT} -s /bin/bash appuser && \ + chown -R appuser:appgroup ${TASK_ROOT} + +USER appuser +EXPOSE 8080 + +COPY ./fastapi/main.py ${TASK_ROOT}/fastapi/main.py +COPY ./fastapi/unittests.py ${TASK_ROOT}/fastapi/unittests.py + +CMD ["bash", "-c", "PYTHONPATH=${TASK_ROOT}/fastapi uvicorn main:app --host 0.0.0.0 --port 8080 --log-level debug --timeout-keep-alive 300"] diff --git a/docker/backend/fastapi/main.py b/docker/backend/fastapi/main.py new file mode 100644 index 0000000..e3fd8bb --- /dev/null +++ b/docker/backend/fastapi/main.py @@ -0,0 +1,762 @@ +from typing import List, Dict, Optional +from fastapi import FastAPI, HTTPException, Depends +from fastapi.middleware.cors import CORSMiddleware +from pydantic import BaseModel, Field +import rasterio +from rasterio.mask import mask +import os +import numpy as np +from shapely.geometry import shape, Polygon +import geopandas as gpd +import pygeohash as pgh +import math +from multiprocessing import Pool, cpu_count +import logging +from fastapi.openapi.docs import get_swagger_ui_html + +# Configure logging +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +# --------------------------- +# Pydantic Models +# --------------------------- + +class GeoClipRequest(BaseModel): + geojson: dict = Field(..., example={ + "type": "FeatureCollection", + "features": [ + { + "type": "Feature", + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [ + 9.176925637402405, + 48.773048426764745 + ], + [ + 9.177896941776226, + 48.77325189677234 + ], + [ + 9.177966320660289, + 48.77301184779935 + ], + [ + 9.177282938654315, + 48.772611763627964 + ], + [ + 9.176842382742109, + 48.772915827889165 + ], + [ + 9.176925637402405, + 48.773048426764745 + ] + ] + ] + }, + "properties": {} + } + ] + }) + tif_url: str = Field(..., example="cog_merged_slope.tif") + +class GeoInsights(BaseModel): + geojson: dict = Field(..., example={ + "type": "FeatureCollection", + "features": [ + { + "type": "Feature", + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [ + 9.176925637402405, + 48.773048426764745 + ], + [ + 9.177896941776226, + 48.77325189677234 + ], + [ + 9.177966320660289, + 48.77301184779935 + ], + [ + 9.177282938654315, + 48.772611763627964 + ], + [ + 9.176842382742109, + 48.772915827889165 + ], + [ + 9.176925637402405, + 48.773048426764745 + ] + ] + ] + }, + "properties": {} + } + ] + }) + +class RasterStatsResponse(BaseModel): + min: Optional[float] + max: Optional[float] + +class HealthResponse(BaseModel): + status: str + +class BuildingReport(BaseModel): + building_id: str + zonal_variation: dict + zonal_variation_text: dict + neighborhood_understanding: dict + neighborhood_understanding_text: dict + +class StatsResponse(BaseModel): + building_reports: List[BuildingReport] + + +# --------------------------- +# Services +# --------------------------- + +from typing import List, Dict, Optional +from fastapi import FastAPI, HTTPException, Depends +from fastapi.middleware.cors import CORSMiddleware +from pydantic import BaseModel, Field +import rasterio +from rasterio.mask import mask +import os +import numpy as np +from shapely.geometry import shape, Polygon +import geopandas as gpd +import pygeohash as pgh +import math +from multiprocessing import Pool, cpu_count +import logging + +# Configure logging +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +# --------------------------- +# Services +# --------------------------- + +class RasterService: + def __init__(self, raster_paths: Dict[str, str]): + self.raster_paths = raster_paths + logger.info("RasterService initialized with raster paths.") + + def get_raster_stats(self, raster_key: str, zone_geom: Polygon) -> Optional[float]: + logger.info(f"Starting get_raster_stats for raster_key: {raster_key}") + raster_path = self.raster_paths.get(raster_key) + if not raster_path: + logger.error(f"No raster path found for key: {raster_key}") + return None + + try: + logger.info(f"Opening raster file: {raster_path}") + with rasterio.open(raster_path) as src: + logger.info(f"Masking raster with provided geometry.") + out_image, _ = mask(src, [zone_geom], crop=True, all_touched=True) + data = out_image + + if src.nodata is not None: + logger.info(f"Removing nodata values from raster data.") + data = data[data != src.nodata] + if data.size == 0: + logger.warning(f"No data found in raster {raster_path} for zone {zone_geom}.") + return np.nan + mean_val = float(data.mean()) + logger.info(f"Computed mean value for raster {raster_key}: {mean_val}") + return mean_val + except Exception as e: + logger.error(f"Error processing raster {raster_path} for zone {zone_geom}: {e}") + return np.nan + + def clip_raster_stats(self, geojson: dict, tif_path: str) -> Dict[str, Optional[float]]: + logger.info(f"Starting clip_raster_stats for TIFF path: {tif_path}") + try: + logger.info(f"Opening raster file: {tif_path}") + with rasterio.open(tif_path) as src: + geometries = [shape(feature['geometry']) for feature in geojson['features']] + logger.info(f"Masking raster with provided GeoJSON geometries.") + clipped_image, _ = mask(src, geometries, crop=True, all_touched=True) + + if clipped_image.size == 0: + logger.warning("Clipped image has no data.") + return {"min": None, "max": None} + + # Remove nodata values + if src.nodata is not None: + logger.info("Removing nodata values from clipped raster data.") + clipped_image = clipped_image[clipped_image != src.nodata] + + if clipped_image.size == 0: + logger.warning("Clipped image has no valid data after masking.") + return {"min": None, "max": None} + + min_val = float(np.min(clipped_image)) + max_val = float(np.max(clipped_image)) + + logger.info(f"Raster stats - min: {min_val}, max: {max_val}") + + return {"min": min_val, "max": max_val} + except Exception as e: + logger.error(f"Error processing raster {tif_path}: {e}") + raise + +class GeohashService: + def get_geohash_bbox(self, geohash: str) -> Polygon: + logger.info(f"Starting get_geohash_bbox for geohash: {geohash}") + try: + lat_min, lon_min, lat_max, lon_max = pgh.decode_exactly(geohash)[:4] + bbox = Polygon([ + (lon_min, lat_min), + (lon_max, lat_min), + (lon_max, lat_max), + (lon_min, lat_max), + (lon_min, lat_min) + ]) + logger.info(f"Generated bounding box for geohash {geohash}.") + return bbox + except Exception as e: + logger.error(f"Error decoding geohash {geohash}: {e}") + return Polygon() + + def geohash_grid_covering_polygon(self, polygon: Polygon, resolution: int) -> List[str]: + logger.info(f"Starting geohash_grid_covering_polygon with resolution: {resolution}") + try: + minx, miny, maxx, maxy = polygon.bounds + lat_steps = 100 # Adjust based on desired granularity + lon_steps = 100 + + latitudes = np.linspace(miny, maxy, lat_steps) + longitudes = np.linspace(minx, maxx, lon_steps) + + geohashes = set() + logger.info("Generating geohash grid covering the polygon.") + for lat in latitudes: + for lon in longitudes: + geohash = pgh.encode(lat, lon, precision=resolution) + geohashes.add(geohash) + + logger.info(f"Generated {len(geohashes)} geohashes covering the polygon.") + return list(geohashes) + except Exception as e: + logger.error(f"Error generating geohash grid: {e}") + return [] + + def filter_intersecting_geohashes(self, polygon: Polygon, geohashes: List[str]) -> List[str]: + logger.info("Starting filter_intersecting_geohashes.") + intersecting_geohashes = [] + for geohash in geohashes: + logger.info(f"Checking intersection for geohash: {geohash}") + geohash_polygon = self.get_geohash_bbox(geohash) + if polygon.intersects(geohash_polygon): + logger.info(f"Geohash {geohash} intersects with the polygon.") + intersecting_geohashes.append(geohash) + else: + logger.info(f"Geohash {geohash} does not intersect with the polygon.") + logger.info(f"Total intersecting geohashes: {len(intersecting_geohashes)}") + return intersecting_geohashes + +class InterpretationService: + def interpret_slope(self, slope_value: float) -> str: + logger.info(f"Interpreting slope value: {slope_value}") + if slope_value < 10: + return "gentle" + elif 10 <= slope_value < 30: + return "moderate" + else: + return "steep" + + def interpret_aspect(self, aspect_value: float) -> str: + logger.info(f"Interpreting aspect value: {aspect_value}") + if 0 <= aspect_value < 45 or 315 <= aspect_value <= 360: + return "north" + elif 45 <= aspect_value < 135: + return "east" + elif 135 <= aspect_value < 225: + return "south" + elif 225 <= aspect_value < 315: + return "west" + else: + return "unknown" + + def interpret_solar_potential(self, solar_value: float, solar_min: float, solar_max: float) -> str: + logger.info(f"Interpreting solar potential value: {solar_value} with min: {solar_min}, max: {solar_max}") + if solar_value is None or np.isnan(solar_value): + return "unknown" + if solar_value < solar_min + (solar_max - solar_min) * 0.33: + return "lower end" + elif solar_min + (solar_max - solar_min) * 0.33 <= solar_value < solar_min + (solar_max - solar_min) * 0.66: + return "middle range" + else: + return "higher end" + + def determine_aspect_relation(self, direction: str, aspect_value: float) -> str: + logger.info(f"Determining aspect relation for direction: {direction}, aspect_value: {aspect_value}") + towards_aspect = { + 'north': 180, + 'south': 0, # or 360 + 'east': 270, + 'west': 90 + } + + threshold = 45 # degrees + expected = towards_aspect.get(direction, None) + + if expected is None: + logger.warning(f"Unknown direction: {direction}") + return "unknown relation" + + lower = (expected - threshold) % 360 + upper = (expected + threshold) % 360 + + if lower < upper: + if lower <= aspect_value < upper: + return 'towards' + else: + return 'away' + else: + if aspect_value >= lower or aspect_value < upper: + return 'towards' + else: + return 'away' + +class ReportService: + def __init__(self, interpretation_service: InterpretationService): + self.interpretation_service = interpretation_service + logger.info("ReportService initialized with InterpretationService.") + + def generate_textual_report(self, zonal_variation: dict, raster_stats: dict) -> dict: + logger.info("Generating textual report for zonal variation.") + descriptions = {} + solar_min, solar_max = raster_stats.get('solar', (0, 1)) # Avoid division by zero + + for zone, values in zonal_variation.items(): + slope_value = np.round(values.get('slope', np.nan), 2) + aspect_value = np.round(values.get('aspect', np.nan), 2) + solar_value = np.round(values.get('solar', np.nan), 2) + + logger.info(f"Processing zone: {zone} with slope: {slope_value}, aspect: {aspect_value}, solar: {solar_value}") + + slope_description = self.interpretation_service.interpret_slope(slope_value) + aspect_description = self.interpretation_service.interpret_aspect(aspect_value) + + if solar_value is not None and not np.isnan(solar_value): + solar_description = self.interpretation_service.interpret_solar_potential(solar_value, solar_min, solar_max) + solar_text = f"The solar potential is in the {solar_description}. Value is {solar_value}." + else: + solar_text = "The solar potential data is unavailable." + + descriptions[zone] = { + 'slope': f"The slope is {slope_description}. Value is {slope_value}.", + 'aspect': f"The aspect is facing {aspect_description}. Value is {aspect_value}.", + 'solar': solar_text + } + logger.info(f"Generated textual description for zone {zone}.") + + logger.info("Completed generating textual report for zonal variation.") + return descriptions + + def generate_neighborhood_report(self, neighborhood_stats: dict, raster_stats: dict) -> dict: + logger.info("Generating neighborhood report.") + descriptions = {} + solar_min, solar_max = raster_stats.get('solar', (0, 1)) # Avoid division by zero + + for direction, stats in neighborhood_stats.items(): + slope_value = stats.get('slope', None) + aspect_value = stats.get('aspect', None) + + logger.info(f"Processing neighborhood direction: {direction} with slope: {slope_value}, aspect: {aspect_value}") + + if slope_value is not None and not np.isnan(slope_value): + slope_description = self.interpretation_service.interpret_slope(slope_value) + else: + slope_description = "unknown slope" + + if aspect_value is not None and not np.isnan(aspect_value): + aspect_direction = self.interpretation_service.interpret_aspect(aspect_value) + relation = self.interpretation_service.determine_aspect_relation(direction, aspect_value) + if relation == 'towards': + relation_text = "facing towards the building." + elif relation == 'away': + relation_text = "facing away from the building." + else: + relation_text = "facing an unknown direction relative to the building." + else: + aspect_direction = "unknown aspect" + relation_text = "unknown relation to the building." + + descriptions[direction] = { + 'slope': f"The terrain to the {direction} has a {slope_description} slope.", + 'aspect': f"It is facing {aspect_direction} and is {relation_text}" + } + logger.info(f"Generated neighborhood description for direction {direction}.") + + logger.info("Completed generating neighborhood report.") + return descriptions + +class BuildingService: + def __init__(self, raster_service: RasterService, geohash_service: GeohashService, report_service: ReportService, db_path: str): + self.raster_service = raster_service + self.geohash_service = geohash_service + self.report_service = report_service + self.db_path = db_path + logger.info("BuildingService initialized with RasterService, GeohashService, and ReportService.") + + def get_raster_stats_for_zone(self, raster_key: str, zone_geom: Polygon) -> Optional[float]: + logger.info(f"Retrieving raster stats for key: {raster_key}") + stats = self.raster_service.get_raster_stats(raster_key, zone_geom) + if stats is not None: + logger.info(f"Retrieved raster stats for {raster_key}: {stats}") + else: + logger.warning(f"Raster stats for {raster_key} could not be retrieved.") + return stats + + def calculate_zonal_variation(self, building_geom: Polygon) -> dict: + logger.info("Calculating zonal variation for building geometry.") + minx, miny, maxx, maxy = building_geom.bounds + width = maxx - minx + height = maxy - miny + + zone_percentage = 0.4 # Adjust this value to change the size of the zones + + zones = { + 'north': building_geom.intersection(Polygon([ + (minx, maxy - height * zone_percentage), + (maxx, maxy - height * zone_percentage), + (maxx, maxy), + (minx, maxy) + ])), + 'south': building_geom.intersection(Polygon([ + (minx, miny), + (maxx, miny), + (maxx, miny + height * zone_percentage), + (minx, miny + height * zone_percentage) + ])), + 'east': building_geom.intersection(Polygon([ + (maxx - width * zone_percentage, miny), + (maxx, miny), + (maxx, maxy), + (maxx - width * zone_percentage, maxy) + ])), + 'west': building_geom.intersection(Polygon([ + (minx, miny), + (minx + width * zone_percentage, miny), + (minx + width * zone_percentage, maxy), + (minx, maxy) + ])) + } + + logger.info("Generated zonal geometries for north, south, east, and west.") + + zonal_stats = {} + for zone_name, zone_geom in zones.items(): + if not zone_geom.is_empty: + logger.info(f"Calculating raster stats for zone: {zone_name}") + zonal_stats[zone_name] = { + 'slope': self.get_raster_stats_for_zone('slope', zone_geom), + 'aspect': self.get_raster_stats_for_zone('aspect', zone_geom), + 'solar': self.get_raster_stats_for_zone('solar', zone_geom) + } + else: + logger.info(f"No geometry found for zone: {zone_name}. Setting stats to None.") + zonal_stats[zone_name] = { + 'slope': None, + 'aspect': None, + 'solar': None + } + + logger.info("Completed calculating zonal variation.") + return zonal_stats + + def calculate_neighborhood_analysis(self, building_geom: Polygon) -> dict: + logger.info("Starting neighborhood analysis for building geometry.") + buffer_distance = 0.0001 # Adjust this value as needed + buffered_polygon = building_geom.buffer(buffer_distance).simplify(0.5) + buffer_ring = buffered_polygon.difference(building_geom) + + minx, miny, maxx, maxy = buffer_ring.bounds + width = maxx - minx + height = maxy - miny + + direction_percentage = 0.4 + + directions = { + 'north': buffer_ring.intersection(Polygon([ + (minx, maxy - height * direction_percentage), + (maxx, maxy - height * direction_percentage), + (maxx, maxy), + (minx, maxy) + ])), + 'south': buffer_ring.intersection(Polygon([ + (minx, miny), + (maxx, miny), + (maxx, miny + height * direction_percentage), + (minx, miny + height * direction_percentage) + ])), + 'east': buffer_ring.intersection(Polygon([ + (maxx - width * direction_percentage, miny), + (maxx, miny), + (maxx, maxy), + (maxx - width * direction_percentage, maxy) + ])), + 'west': buffer_ring.intersection(Polygon([ + (minx, miny), + (minx + width * direction_percentage, miny), + (minx + width * direction_percentage, maxy), + (minx, maxy) + ])) + } + + logger.info("Generated neighborhood geometries for north, south, east, and west.") + + neighborhood_stats = {} + for direction, direction_geom in directions.items(): + if not direction_geom.is_empty: + logger.info(f"Calculating raster stats for neighborhood direction: {direction}") + neighborhood_stats[direction] = { + 'slope': self.get_raster_stats_for_zone('slope', direction_geom), + 'aspect': self.get_raster_stats_for_zone('aspect', direction_geom) + } + else: + logger.info(f"No geometry found for neighborhood direction: {direction}. Setting stats to None.") + neighborhood_stats[direction] = { + 'slope': None, + 'aspect': None + } + + logger.info("Completed neighborhood analysis.") + return neighborhood_stats + + def generate_textual_report(self, zonal_variation: dict, raster_stats: dict) -> dict: + logger.info("Generating textual report for building.") + return self.report_service.generate_textual_report(zonal_variation, raster_stats) + + def generate_neighborhood_report(self, neighborhood_stats: dict, raster_stats: dict) -> dict: + logger.info("Generating textual neighborhood report for building.") + return self.report_service.generate_neighborhood_report(neighborhood_stats, raster_stats) + + def process_building(self, building: gpd.GeoSeries, input_geom: Polygon, raster_stats: dict) -> Optional[dict]: + building_id = building.get('gmlid', 'unknown') + logger.info(f"Processing building with ID: {building_id}") + + building_geom = building['geometry'] + + if not building_geom.intersects(input_geom): + logger.info(f"Building ID {building_id} does not intersect with input geometry. Skipping.") + return None + + logger.info(f"Building ID {building_id} intersects with input geometry. Calculating zonal variation.") + zonal_variation = self.calculate_zonal_variation(building_geom) + zonal_text = self.generate_textual_report(zonal_variation, raster_stats) + + logger.info(f"Building ID {building_id}: Completed zonal variation report. Starting neighborhood analysis.") + neighborhood_understanding = self.calculate_neighborhood_analysis(building_geom) + neighborhood_text = self.generate_neighborhood_report(neighborhood_understanding, raster_stats) + + logger.info(f"Building ID {building_id}: Completed neighborhood analysis.") + + report = { + 'building_id': building_id, + 'zonal_variation': zonal_variation, + 'zonal_variation_text': zonal_text, + 'neighborhood_understanding': neighborhood_understanding, + 'neighborhood_understanding_text': neighborhood_text + } + + logger.info(f"Building ID {building_id}: Report generation complete.") + return report + + def process_geohash(self, geohash: str, input_geom: Polygon, raster_stats: dict) -> List[dict]: + logger.info(f"Processing geohash: {geohash}") + building_path = os.path.join(self.db_path, f"{geohash}/buildings.parquet") + + if not os.path.exists(building_path): + logger.warning(f"Building path {building_path} does not exist. Skipping geohash {geohash}.") + return [] + + try: + logger.info(f"Reading buildings from {building_path}") + building_df = gpd.read_parquet(building_path).sjoin( + gpd.GeoDataFrame(geometry=[input_geom], crs='EPSG:4326'), + how='inner', + predicate='intersects' + ) + building_df = building_df.drop_duplicates(subset='geometry') + logger.info(f"Found {building_df.shape[0]} buildings intersecting with input geometry in geohash {geohash}.") + + if building_df.shape[0] > 10: + logger.info(f"Sampling 10 buildings from geohash {geohash} for processing.") + building_df = building_df.sample(10) + + except Exception as e: + logger.error(f"Error reading/parsing buildings for geohash {geohash}: {e}") + return [] + + if building_df.empty: + logger.info(f"No intersecting buildings found in geohash {geohash}.") + return [] + + args = [(building, input_geom, raster_stats) for _, building in building_df.iterrows()] + logger.info(f"Starting multiprocessing pool with {cpu_count()} workers for geohash {geohash}.") + with Pool(cpu_count()) as pool: + building_reports = pool.starmap(self.process_building, args) + + logger.info(f"Completed processing buildings for geohash {geohash}.") + return [report for report in building_reports if report] + + def generate_building_reports(self, geojson: dict, raster_stats: dict, db_path: Optional[str] = None) -> List[dict]: + logger.info("Generating building reports from GeoJSON input.") + try: + input_gdf = gpd.GeoDataFrame.from_features(geojson["features"]) + input_gdf.set_crs('EPSG:4326', inplace=True) + input_geom = input_gdf.geometry.iloc[0] + logger.info("Parsed GeoJSON input successfully.") + except Exception as e: + logger.error(f"Error parsing GeoJSON input: {e}") + return [] + + geohashes = self.geohash_service.geohash_grid_covering_polygon(input_geom, resolution=6) + logger.info(f"Found {len(geohashes)} geohashes covering the input polygon.") + + building_reports = [] + for geohash in geohashes: + logger.info(f"Processing buildings in geohash: {geohash}") + reports = self.process_geohash(geohash, input_geom, raster_stats) + building_reports.extend(reports) + logger.info(f"Accumulated {len(building_reports)} building reports so far.") + + logger.info(f"Generated reports for {len(building_reports)} buildings in total.") + return building_reports + +class ReportCleaner: + @staticmethod + def remove_nan_values(data): + """ Recursively replace NaN and Inf values with None in nested dictionaries and lists. """ + if isinstance(data, dict): + return {k: ReportCleaner.remove_nan_values(v) for k, v in data.items()} + elif isinstance(data, list): + return [ReportCleaner.remove_nan_values(i) for i in data] + elif isinstance(data, float) and (math.isnan(data) or math.isinf(data)): + logger.info("Replacing NaN or Inf value with None.") + return None # Replace NaN or infinite values with None + return data + + +# --------------------------- +# Application Initialization +# --------------------------- + +class GeoApp: + def __init__(self): + self.app = FastAPI( + title="GeoTerrain API", + description="API for interacting with Terrain Analysis portal", + version="1.0.0", + contact={ + "name": "Jaskaran", + } + ) + self.configure_middleware() + self.configure_services() + self.configure_routes() + + def configure_middleware(self): + self.app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_methods=["GET", "POST", "OPTIONS"], + allow_headers=["*"], + allow_credentials=True, + ) + + def configure_services(self): + terrain_rasters = { + 'slope': '/var/task/fastapi/data/raster/cog_merged_slope.tif', + 'aspect': '/var/task/fastapi/data/raster/cog_merged_aspect.tif', + 'solar': '/var/task/fastapi/data/raster/cog_global_solar_potential.tif' + } + self.raster_service = RasterService(terrain_rasters) + self.geohash_service = GeohashService() + self.interpretation_service = InterpretationService() + self.report_service = ReportService(self.interpretation_service) + self.building_service = BuildingService( + raster_service=self.raster_service, + geohash_service=self.geohash_service, + report_service=self.report_service, + db_path='/var/task/fastapi/db/' + ) + self.report_cleaner = ReportCleaner() + + def configure_routes(self): + app = self.app + building_service = self.building_service + raster_service = self.raster_service + + @app.post( + "/rasterstats", + response_model=RasterStatsResponse, + summary="Clip Raster and Get Statistics For Terrain Raster", + description="Clips a raster file based on the provided GeoJSON geometry and returns the minimum and maximum values within the clipped area.", + tags=["Raster Operations"] + ) + def clip_and_stats(request_data: GeoClipRequest): + base_path = '/var/task/fastapi/data/raster/' + geojson = request_data.geojson + tif_url = os.path.join(base_path, request_data.tif_url) + + if not os.path.exists(tif_url): + logger.error(f"Raster file {tif_url} does not exist.") + raise HTTPException(status_code=404, detail="Raster file not found.") + + try: + stats = raster_service.clip_raster_stats(geojson, tif_url) + return stats + except Exception as e: + logger.error(f"Error in /rasterstats: {e}") + raise HTTPException(status_code=500, detail="Error processing raster data.") + + @app.get( + "/health", + response_model=HealthResponse, + summary="Health Check", + description="Returns the health status of the App.", + tags=["Health Check"] + ) + def health(): + return {'status': 'Healthy'} + + @app.post( + "/stats", + response_model=StatsResponse, + summary="Generate Building Insights", + description="Processes building data within a GeoJSON polygon and returns detailed reports.", + tags=["Building Insights"] + ) + def bbox_insights(request_data: GeoInsights): + raster_stats = { + "slope": [101.018, 657.570], + "aspect": [0, 360], + "solar": [0, 975] + } + building_reports = building_service.generate_building_reports(request_data.geojson, raster_stats) + cleaned_reports = ReportCleaner.remove_nan_values(building_reports) + return {'building_reports': cleaned_reports} + +# Instantiate the application +geo_app = GeoApp() +app = geo_app.app diff --git a/docker/backend/fastapi/requirements.txt b/docker/backend/fastapi/requirements.txt new file mode 100644 index 0000000..d029b06 --- /dev/null +++ b/docker/backend/fastapi/requirements.txt @@ -0,0 +1,11 @@ +geopandas +pandas +numpy +pyarrow +fastapi[standard] +pydantic +shapely +fiona +pygeohash +geojson +rasterio \ No newline at end of file diff --git a/docker/backend/fastapi/unittests.py b/docker/backend/fastapi/unittests.py new file mode 100644 index 0000000..afa0880 --- /dev/null +++ b/docker/backend/fastapi/unittests.py @@ -0,0 +1,230 @@ +import unittest +from fastapi.testclient import TestClient +from unittest.mock import patch, MagicMock +import json +import os +import numpy as np +from main import GeoApp +import logging + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +class TestGeoTerrainAPI(unittest.TestCase): + def setUp(self): + self.app = GeoApp().app + self.client = TestClient(self.app) + + def test_health_check(self): + response = self.client.get("/health") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), {"status": "Healthy"}) + + @patch('main.rasterio.open') + def test_clip_and_stats_success(self, mock_rasterio_open): + logger.info("Testing /rasterstats endpoint with valid input.") + mock_src = MagicMock() + mock_src.nodata = None + mock_src.__enter__.return_value = mock_src + mock_rasterio_open.return_value = mock_src + + # Mock the mask function to return a numpy array + with patch('main.mask', return_value=(np.array([[1, 2], [3, 4]]), None)): + response = self.client.post( + "/rasterstats", + json={ + "geojson": { + "type": "FeatureCollection", + "features": [{ + "type": "Feature", + "geometry": { + "type": "Polygon", + "coordinates": [[[0, 0], [0, 1], [1, 1], [1, 0], [0, 0]]] + } + }] + }, + "tif_url": "cog_merged_slope.tif" + } + ) + + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), {"min": 1.0, "max": 4.0}) + + @patch('main.os.path.exists') + def test_clip_and_stats_file_not_found(self, mock_exists): + logger.info("Testing /rasterstats endpoint with nonexistent raster file.") + mock_exists.return_value = False + + response = self.client.post( + "/rasterstats", + json={ + "geojson": { + "type": "FeatureCollection", + "features": [{ + "type": "Feature", + "geometry": { + "type": "Polygon", + "coordinates": [[[0, 0], [0, 1], [1, 1], [1, 0], [0, 0]]] + } + }] + }, + "tif_url": "nonexistent_file.tif" + } + ) + + self.assertEqual(response.status_code, 404) + self.assertEqual(response.json(), {"detail": "Raster file not found."}) + + @patch('main.BuildingService.generate_building_reports') + def test_bbox_insights_success(self, mock_generate_reports): + logger.info("Testing /stats endpoint with successful building report generation.") + mock_generate_reports.return_value = [ + { + "building_id": "test_id", + "zonal_variation": {"north": {"slope": 10.5}}, + "zonal_variation_text": {"north": {"slope": "The slope is moderate. Value is 10.5."}}, + "neighborhood_understanding": {"north": {"slope": 11.0}}, + "neighborhood_understanding_text": {"north": {"slope": "The terrain to the north has a moderate slope."}} + } + ] + + response = self.client.post( + "/stats", + json={ + "geojson": { + "type": "FeatureCollection", + "features": [{ + "type": "Feature", + "geometry": { + "type": "Polygon", + "coordinates": [[[0, 0], [0, 1], [1, 1], [1, 0], [0, 0]]] + } + }] + } + } + ) + + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.json()["building_reports"]), 1) + self.assertEqual(response.json()["building_reports"][0]["building_id"], "test_id") + + @patch('main.BuildingService.generate_building_reports') + def test_bbox_insights_no_buildings(self, mock_generate_reports): + logger.info("Testing /stats endpoint with no buildings found.") + mock_generate_reports.return_value = [] + + response = self.client.post( + "/stats", + json={ + "geojson": { + "type": "FeatureCollection", + "features": [{ + "type": "Feature", + "geometry": { + "type": "Polygon", + "coordinates": [[[0, 0], [0, 1], [1, 1], [1, 0], [0, 0]]] + } + }] + } + } + ) + + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), {"building_reports": []}) + + @patch('main.mask') + @patch('main.rasterio.open') + def test_clip_and_stats_with_nodata_values(self, mock_rasterio_open, mock_mask): + logger.info("Testing /rasterstats endpoint with nodata values in raster data.") + mock_src = MagicMock() + mock_src.nodata = -9999 + mock_src.__enter__.return_value = mock_src + mock_rasterio_open.return_value = mock_src + + # Mock the mask function to return data with nodata values + mock_mask.return_value = (np.array([[-9999, 2], [3, 4]]), None) + + response = self.client.post( + "/rasterstats", + json={ + "geojson": { + "type": "FeatureCollection", + "features": [{ + "type": "Feature", + "geometry": { + "type": "Polygon", + "coordinates": [[[0, 0], [0, 1], [1, 1], [1, 0], [0, 0]]] + } + }] + }, + "tif_url": "cog_merged_slope.tif" + } + ) + + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), {"min": 2.0, "max": 4.0}) + + + + def test_clip_and_stats_with_malformed_geojson(self): + logger.info("Testing /rasterstats endpoint with malformed GeoJSON.") + response = self.client.post( + "/rasterstats", + json={ + "geojson": "This is not a valid GeoJSON", + "tif_url": "cog_merged_slope.tif" + } + ) + + self.assertEqual(response.status_code, 422) # Unprocessable Entity + + def test_bbox_insights_invalid_input(self): + logger.info("Testing /stats endpoint with invalid input data.") + response = self.client.post( + "/stats", + json={ + "invalid_field": { + "type": "FeatureCollection", + "features": [] + } + } + ) + + self.assertEqual(response.status_code, 422) # Unprocessable Entity + + @patch('main.BuildingService.generate_building_reports') + def test_bbox_insights_large_dataset(self, mock_generate_reports): + logger.info("Testing /stats endpoint with a large dataset of building reports.") + mock_generate_reports.return_value = [ + { + "building_id": f"test_id_{i}", + "zonal_variation": {"north": {"slope": 10.5}}, + "zonal_variation_text": {"north": {"slope": f"The slope is moderate. Value is 10.5."}}, + "neighborhood_understanding": {"north": {"slope": 11.0}}, + "neighborhood_understanding_text": {"north": {"slope": f"The terrain to the north has a moderate slope."}} + } for i in range(100) # Simulating 100 building reports + ] + + response = self.client.post( + "/stats", + json={ + "geojson": { + "type": "FeatureCollection", + "features": [{ + "type": "Feature", + "geometry": { + "type": "Polygon", + "coordinates": [[[0, 0], [0, 1], [1, 1], [1, 0], [0, 0]]] + } + }] + } + } + ) + + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.json()["building_reports"]), 100) + self.assertEqual(response.json()["building_reports"][0]["building_id"], "test_id_0") + self.assertEqual(response.json()["building_reports"][-1]["building_id"], "test_id_99") + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/docker/frontend/Dockerfile b/docker/frontend/Dockerfile new file mode 100644 index 0000000..9c7cc29 --- /dev/null +++ b/docker/frontend/Dockerfile @@ -0,0 +1,8 @@ +FROM nginx:alpine + +WORKDIR /usr/share/nginx/html + +COPY ./index.html /usr/share/nginx/html +EXPOSE 80 + +CMD ["nginx", "-g", "daemon off;"] diff --git a/docker/frontend/index.html b/docker/frontend/index.html new file mode 100644 index 0000000..f9adf63 --- /dev/null +++ b/docker/frontend/index.html @@ -0,0 +1,721 @@ + + + + + + Terrain Analysis + + + + + + + +
+
+ +
+ +
+
+ + +
+

Terrain Analysis

+
+ + + +
+
+
+
+ Karlsruhe +
+
+ Stuttgart +
+
+ Tiengen +
+
+
+
+ + +
+

Legend

+
+
+
+
+ + + + + +
+
+
+ + +
+ +
+ + +
+

Footprint Analysis

+ +

+ Insights Extracted:
+

    +
  • Zonal-level Variation: The building footprint is divided into North, West, East, and South zones. For each zone, mean solar, aspect, and slope values are calculated from raster data sources. Building IDs are retrieved from Parquet partitions and used to highlight vectors on the map. Due to scaling constraints, only 10 footprints are processed at random within the provided polygon.
  • +
  • Neighborhood Variation: A buffer is drawn around the building geometry, and the building footprint itself is removed using the difference operation. The same statistics (mean solar, aspect, and slope) are provided for the buffered neighborhood region.
  • +
+

+ + +
Difference Between Zonal-level Variation and Neighborhood Variation:
+
    +
  • Zonal-level Variation: Focuses on internal divisions of the building footprint to analyze terrain variations directly around the building.
  • +
  • Neighborhood Variation: Examines the surrounding area by to understand the broader terrain context
  • +
+ +
+ + + + + +
+ + + + + + +
+ +
+
+
+ + + + + + + + + + + + + + diff --git a/docker/tileserver/Dockerfile b/docker/tileserver/Dockerfile new file mode 100644 index 0000000..f7638f6 --- /dev/null +++ b/docker/tileserver/Dockerfile @@ -0,0 +1,44 @@ +# Start from the minimal Ubuntu 22.04 image +FROM ubuntu:22.04 + +# Set environment variables to avoid interactive prompts +ENV DEBIAN_FRONTEND=noninteractive + +# Install curl, Node.js, npm, and essential dependencies for tileserver-gl +RUN apt-get update && apt-get install -y --no-install-recommends \ + curl \ + ca-certificates \ + build-essential \ + pkg-config \ + xvfb \ + libglfw3-dev \ + libuv1-dev \ + libjpeg-turbo8 \ + libicu70 \ + libcairo2-dev \ + libpango1.0-dev \ + libjpeg-dev \ + libgif-dev \ + librsvg2-dev \ + gir1.2-rsvg-2.0 \ + librsvg2-2 \ + librsvg2-common \ + libcurl4-openssl-dev \ + libpixman-1-dev \ + libpixman-1-0 \ + git \ + && curl -sL https://deb.nodesource.com/setup_18.x | bash - \ + && apt-get install -y nodejs \ + && npm install -g tileserver-gl \ + && apt-get remove --purge -y git curl build-essential \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* + +# Create a working directory +WORKDIR /data + +# Expose the default port for tileserver-gl +EXPOSE 9100 + +# Command to start tileserver-gl with the configuration file and keep the container running +CMD ["tileserver-gl", "--config", "/data/config.json", "--port", "9100", "--verbose"] diff --git a/docs/architecture.md b/docs/architecture.md new file mode 100644 index 0000000..e69de29 From 04b865316595cb435e6440a06e46744f7b933b5d Mon Sep 17 00:00:00 2001 From: Ubuntu Date: Wed, 18 Sep 2024 09:59:21 +0000 Subject: [PATCH 2/3] missing dependency --- docker/backend/fastapi/requirements.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docker/backend/fastapi/requirements.txt b/docker/backend/fastapi/requirements.txt index d029b06..c4a8615 100644 --- a/docker/backend/fastapi/requirements.txt +++ b/docker/backend/fastapi/requirements.txt @@ -8,4 +8,5 @@ shapely fiona pygeohash geojson -rasterio \ No newline at end of file +rasterio +pytest \ No newline at end of file From c00ef9371789e7782d203621db17d619179d6147 Mon Sep 17 00:00:00 2001 From: Ubuntu Date: Wed, 18 Sep 2024 10:19:09 +0000 Subject: [PATCH 3/3] unit test changes --- .github/workflows/run-unit-tests.yml | 9 +- docker/backend/fastapi/requirements.txt | 3 +- docker/backend/fastapi/test_ci_unittests.py | 189 ++++++++++++++++++ .../{unittests.py => test_unittests.py} | 0 4 files changed, 193 insertions(+), 8 deletions(-) create mode 100644 docker/backend/fastapi/test_ci_unittests.py rename docker/backend/fastapi/{unittests.py => test_unittests.py} (100%) diff --git a/.github/workflows/run-unit-tests.yml b/.github/workflows/run-unit-tests.yml index ce4ef25..b58487a 100644 --- a/.github/workflows/run-unit-tests.yml +++ b/.github/workflows/run-unit-tests.yml @@ -19,23 +19,20 @@ jobs: uses: actions/checkout@v3 # Step 2: Set up Python environment - - name: Set Up Python 3.8 + - name: Set Up Python 3.12 uses: actions/setup-python@v4 with: - python-version: '3.8' + python-version: '3.12' # Step 3: Install dependencies - name: Install Dependencies run: | - # Navigate to the backend directory cd docker/backend/fastapi/ - # Upgrade pip python -m pip install --upgrade pip - # Install required packages pip install -r requirements.txt # Step 4: Run Unit Tests - name: Run Unit Tests run: | cd docker/backend/fastapi/ - pytest unittests.py + python test_ci_unittests.py diff --git a/docker/backend/fastapi/requirements.txt b/docker/backend/fastapi/requirements.txt index c4a8615..d029b06 100644 --- a/docker/backend/fastapi/requirements.txt +++ b/docker/backend/fastapi/requirements.txt @@ -8,5 +8,4 @@ shapely fiona pygeohash geojson -rasterio -pytest \ No newline at end of file +rasterio \ No newline at end of file diff --git a/docker/backend/fastapi/test_ci_unittests.py b/docker/backend/fastapi/test_ci_unittests.py new file mode 100644 index 0000000..f08424f --- /dev/null +++ b/docker/backend/fastapi/test_ci_unittests.py @@ -0,0 +1,189 @@ +# test_ci_unittests.py + +import unittest +from unittest.mock import patch, MagicMock +import numpy as np +from fastapi.testclient import TestClient +from main import GeoApp +import logging + +# Configure logging +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +class TestCIUnitGeoTerrainAPI(unittest.TestCase): + def setUp(self): + self.app = GeoApp().app + self.client = TestClient(self.app) + + def test_health_check(self): + response = self.client.get("/health") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), {"status": "Healthy"}) + + @patch('main.os.path.exists') + def test_clip_and_stats_file_not_found(self, mock_exists): + logger.info("Testing /rasterstats endpoint with nonexistent raster file.") + + # Mock os.path.exists to return False + mock_exists.return_value = False + + # Make the POST request + response = self.client.post( + "/rasterstats", + json={ + "geojson": { + "type": "FeatureCollection", + "features": [{ + "type": "Feature", + "geometry": { + "type": "Polygon", + "coordinates": [[[0, 0], [0, 1], [1, 1], [1, 0], [0, 0]]] + } + }] + }, + "tif_url": "nonexistent_file.tif" + } + ) + + # Assert response + self.assertEqual(response.status_code, 404) + self.assertEqual(response.json(), {"detail": "Raster file not found."}) + + @patch('main.BuildingService.generate_building_reports') + def test_bbox_insights_success(self, mock_generate_reports): + logger.info("Testing /stats endpoint with successful building report generation.") + + # Mock BuildingService.generate_building_reports to return a sample report + mock_generate_reports.return_value = [ + { + "building_id": "test_id", + "zonal_variation": {"north": {"slope": 10.5}}, + "zonal_variation_text": {"north": {"slope": "The slope is moderate. Value is 10.5."}}, + "neighborhood_understanding": {"north": {"slope": 11.0}}, + "neighborhood_understanding_text": {"north": {"slope": "The terrain to the north has a moderate slope."}} + } + ] + + # Make the POST request + response = self.client.post( + "/stats", + json={ + "geojson": { + "type": "FeatureCollection", + "features": [{ + "type": "Feature", + "geometry": { + "type": "Polygon", + "coordinates": [[[0, 0], [0, 1], [1, 1], [1, 0], [0, 0]]] + } + }] + } + } + ) + + # Assert response + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.json()["building_reports"]), 1) + self.assertEqual(response.json()["building_reports"][0]["building_id"], "test_id") + + @patch('main.BuildingService.generate_building_reports') + def test_bbox_insights_no_buildings(self, mock_generate_reports): + logger.info("Testing /stats endpoint with no buildings found.") + + # Mock BuildingService.generate_building_reports to return an empty list + mock_generate_reports.return_value = [] + + # Make the POST request + response = self.client.post( + "/stats", + json={ + "geojson": { + "type": "FeatureCollection", + "features": [{ + "type": "Feature", + "geometry": { + "type": "Polygon", + "coordinates": [[[0, 0], [0, 1], [1, 1], [1, 0], [0, 0]]] + } + }] + } + } + ) + + # Assert response + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), {"building_reports": []}) + + def test_clip_and_stats_with_malformed_geojson(self): + logger.info("Testing /rasterstats endpoint with malformed GeoJSON.") + + # Make the POST request with invalid GeoJSON + response = self.client.post( + "/rasterstats", + json={ + "geojson": "This is not a valid GeoJSON", + "tif_url": "cog_merged_slope.tif" + } + ) + + # Assert response + self.assertEqual(response.status_code, 422) # Unprocessable Entity + + def test_bbox_insights_invalid_input(self): + logger.info("Testing /stats endpoint with invalid input data.") + + # Make the POST request with invalid input + response = self.client.post( + "/stats", + json={ + "invalid_field": { + "type": "FeatureCollection", + "features": [] + } + } + ) + + # Assert response + self.assertEqual(response.status_code, 422) # Unprocessable Entity + + @patch('main.BuildingService.generate_building_reports') + def test_bbox_insights_large_dataset(self, mock_generate_reports): + logger.info("Testing /stats endpoint with a large dataset of building reports.") + + # Mock BuildingService.generate_building_reports to return a large number of reports + mock_generate_reports.return_value = [ + { + "building_id": f"test_id_{i}", + "zonal_variation": {"north": {"slope": 10.5}}, + "zonal_variation_text": {"north": {"slope": f"The slope is moderate. Value is 10.5."}}, + "neighborhood_understanding": {"north": {"slope": 11.0}}, + "neighborhood_understanding_text": {"north": {"slope": f"The terrain to the north has a moderate slope."}} + } for i in range(100) # Simulating 100 building reports + ] + + # Make the POST request + response = self.client.post( + "/stats", + json={ + "geojson": { + "type": "FeatureCollection", + "features": [{ + "type": "Feature", + "geometry": { + "type": "Polygon", + "coordinates": [[[0, 0], [0, 1], [1, 1], [1, 0], [0, 0]]] + } + }] + } + } + ) + + # Assert response + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.json()["building_reports"]), 100) + self.assertEqual(response.json()["building_reports"][0]["building_id"], "test_id_0") + self.assertEqual(response.json()["building_reports"][-1]["building_id"], "test_id_99") + +if __name__ == '__main__': + unittest.main() diff --git a/docker/backend/fastapi/unittests.py b/docker/backend/fastapi/test_unittests.py similarity index 100% rename from docker/backend/fastapi/unittests.py rename to docker/backend/fastapi/test_unittests.py