diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml index 32318d1b..8f6d8070 100644 --- a/.devcontainer/docker-compose.yml +++ b/.devcontainer/docker-compose.yml @@ -36,6 +36,8 @@ services: AWS_LOCALSTACK: "True" AXE_CORE_URLS_TOPIC: "arn:aws:sns:us-east-1:000000000000:axe-core-urls-topic" + ports: + - "8080:8080" db: image: postgres:11.2 volumes: diff --git a/api/api_gateway/api.py b/api/api_gateway/api.py deleted file mode 100644 index fb77e7c0..00000000 --- a/api/api_gateway/api.py +++ /dev/null @@ -1,47 +0,0 @@ -from os import environ -from fastapi import FastAPI -from sqlalchemy.exc import SQLAlchemyError -from database.db import db_session -from logger import log - -# from crawler.crawler import crawl -# import uuid -from pydantic import BaseModel - - -app = FastAPI() - - -@app.get("/version") -async def version(): - return {"version": environ.get("GIT_SHA", "unknown")} - - -def get_db_version(sessionmaker): - session = sessionmaker() - - query = "SELECT version_num FROM alembic_version" - full_name = session.execute(query).fetchone()[0] - return full_name - - -@app.get("/healthcheck") -async def healthcheck(): - try: - full_name = get_db_version(db_session) - db_status = {"able_to_connect": True, "db_version": full_name} - except SQLAlchemyError as err: - log.error(err) - db_status = {"able_to_connect": False} - - return {"database": db_status} - - -class CrawlUrl(BaseModel): - url: str - - -# @app.post("/crawl") -# async def crawl_endpoint(crawl_url: CrawlUrl): -# log.info(f"Crawling {crawl_url}") -# crawl(uuid.uuid4(), crawl_url.url) diff --git a/api/api_gateway/v1/__init__.py b/api/api_gateway/v1/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/api/api_gateway/v1/api.py b/api/api_gateway/v1/api.py new file mode 100644 index 00000000..0b490306 --- /dev/null +++ b/api/api_gateway/v1/api.py @@ -0,0 +1,76 @@ +from os import environ +from fastapi import Depends, FastAPI, HTTPException +from sqlalchemy.exc import SQLAlchemyError +from sqlalchemy.orm import Session +from fastapi.responses import RedirectResponse +from database.db import db_session +from logger import log + +from models.Organisation import Organisation +from schemas.Organization import OrganizationCreate + +# from crawler.crawler import crawl +# import uuid +from pydantic import BaseModel + +app = FastAPI() + + +# Dependency +def get_db(): + db = db_session() + try: + yield db + finally: + db.close() + + +@app.get("/api/v1/version") +def version(): + return {"version": environ.get("GIT_SHA", "unknown")} + + +def get_db_version(session): + + query = "SELECT version_num FROM alembic_version" + full_name = session.execute(query).fetchone()[0] + return full_name + + +@app.get("/api/v1/healthcheck") +def healthcheck(session: Session = Depends(get_db)): + try: + full_name = get_db_version(session) + db_status = {"able_to_connect": True, "db_version": full_name} + except SQLAlchemyError as err: + log.error(err) + db_status = {"able_to_connect": False} + + return {"database": db_status} + + +# TODO Require auth and redirect to home +# TODO Push errors to cloudwatch metric and response when debug enabled +@app.post("/api/v1/organisation", response_class=RedirectResponse) +def create_organisation( + organisation: OrganizationCreate, session: Session = Depends(get_db) +): + + try: + new_organisation = Organisation(name=organisation.name) + session.add(new_organisation) + session.commit() + return RedirectResponse("/dashboard") + except Exception as e: + log.error(e) + raise HTTPException(status_code=500, detail=str(e)) + + +class CrawlUrl(BaseModel): + url: str + + +# @app.post("/crawl") +# def crawl_endpoint(crawl_url: CrawlUrl): +# log.info(f"Crawling {crawl_url}") +# crawl(uuid.uuid4(), crawl_url.url) diff --git a/api/front_end/templates/base.html b/api/front_end/templates/base.html new file mode 100644 index 00000000..a5dc7949 --- /dev/null +++ b/api/front_end/templates/base.html @@ -0,0 +1,49 @@ + + +
+ + + + + ++ Name + | ++ Configure + | +
---|---|
+
+ {{ organisation.name }}
+
+ |
+ + Edit + | +