Skip to content

Commit

Permalink
Merge branch 'main' into 182-internal-server-error-bug-when-400+-requ…
Browse files Browse the repository at this point in the history
…ests
  • Loading branch information
qcdyx committed Jan 11, 2024
2 parents d465ab0 + 96ddfd6 commit 68000db
Show file tree
Hide file tree
Showing 54 changed files with 1,132 additions and 1,136 deletions.
8 changes: 0 additions & 8 deletions .env

This file was deleted.

22 changes: 1 addition & 21 deletions .github/workflows/api-deployer.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,21 +12,6 @@ on:
OAUTH2_CLIENT_SECRET:
description: Oauth client secret part of the the Identity Aware Proxy configuration
required: true
POSTGRES_DB:
description: Postgres database name
required: true
POSTGRES_HOST:
description: Postgres host
required: true
POSTGRES_PASSWORD:
description: Postgres password
required: true
POSTGRES_PORT:
description: Postgres port
required: true
POSTGRES_USER:
description: Postgres user
required: true
inputs:
ENVIRONMENT:
description: API environment. Possible values prod, staging and dev
Expand Down Expand Up @@ -178,16 +163,11 @@ jobs:
echo "OAUTH2_CLIENT_ID=${{ secrets.OAUTH2_CLIENT_ID }}" >> $GITHUB_ENV
echo "OAUTH2_CLIENT_SECRET=${{ secrets.OAUTH2_CLIENT_SECRET }}" >> $GITHUB_ENV
echo "GLOBAL_RATE_LIMIT_REQ_PER_MINUTE=${{ inputs.GLOBAL_RATE_LIMIT_REQ_PER_MINUTE }}" >> $GITHUB_ENV
echo "POSTGRES_DB=${{ secrets.POSTGRES_DB }}" >> $GITHUB_ENV
echo "POSTGRES_HOST=${{ secrets.POSTGRES_HOST }}" >> $GITHUB_ENV
echo "POSTGRES_PASSWORD=${{ secrets.POSTGRES_PASSWORD }}" >> $GITHUB_ENV
echo "POSTGRES_PORT=${{ secrets.POSTGRES_PORT }}" >> $GITHUB_ENV
echo "POSTGRES_USER=${{ secrets.POSTGRES_USER }}" >> $GITHUB_ENV
- name: Populate Variables
run: |
scripts/replace-variables.sh -in_file infra/backend.conf.rename_me -out_file infra/backend.conf -variables BUCKET_NAME,OBJECT_PREFIX
scripts/replace-variables.sh -in_file infra/vars.tfvars.rename_me -out_file infra/vars.tfvars -variables PROJECT_ID,REGION,ENVIRONMENT,DEPLOYER_SERVICE_ACCOUNT,FEED_API_IMAGE_VERSION,OAUTH2_CLIENT_ID,OAUTH2_CLIENT_SECRET,GLOBAL_RATE_LIMIT_REQ_PER_MINUTE,,POSTGRES_DB,POSTGRES_HOST,POSTGRES_PASSWORD,POSTGRES_PORT,POSTGRES_USER
scripts/replace-variables.sh -in_file infra/vars.tfvars.rename_me -out_file infra/vars.tfvars -variables PROJECT_ID,REGION,ENVIRONMENT,DEPLOYER_SERVICE_ACCOUNT,FEED_API_IMAGE_VERSION,OAUTH2_CLIENT_ID,OAUTH2_CLIENT_SECRET,GLOBAL_RATE_LIMIT_REQ_PER_MINUTE
- uses: hashicorp/setup-terraform@v2
with:
Expand Down
5 changes: 0 additions & 5 deletions .github/workflows/api-dev.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,3 @@ jobs:
GCP_MOBILITY_FEEDS_SA_KEY: ${{ secrets.DEV_GCP_MOBILITY_FEEDS_SA_KEY }}
OAUTH2_CLIENT_ID: ${{ secrets.DEV_MOBILITY_FEEDS_OAUTH2_CLIENT_ID}}
OAUTH2_CLIENT_SECRET: ${{ secrets.DEV_MOBILITY_FEEDS_OAUTH2_CLIENT_SECRET}}
POSTGRES_DB: ${{ vars.DEV_POSTGRE_SQL_DB_NAME }}
POSTGRES_PASSWORD: ${{ secrets.DEV_POSTGRE_USER_PASSWORD }}
POSTGRES_USER: ${{ secrets.DEV_POSTGRE_USER_NAME }}
POSTGRES_HOST: ${{ secrets.DEV_DB_PUBLIC_IP }}
POSTGRES_PORT: ${{ secrets.DEV_DB_PUBLIC_PORT }}
5 changes: 0 additions & 5 deletions .github/workflows/api-prod.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,3 @@ jobs:
GCP_MOBILITY_FEEDS_SA_KEY: ${{ secrets.PROD_GCP_MOBILITY_FEEDS_SA_KEY }}
OAUTH2_CLIENT_ID: ${{ secrets.PROD_MOBILITY_FEEDS_OAUTH2_CLIENT_ID}}
OAUTH2_CLIENT_SECRET: ${{ secrets.PROD_MOBILITY_FEEDS_OAUTH2_CLIENT_SECRET}}
POSTGRES_DB: ${{ vars.PROD_POSTGRE_SQL_DB_NAME }}
POSTGRES_PASSWORD: ${{ secrets.PROD_POSTGRE_USER_PASSWORD }}
POSTGRES_USER: ${{ secrets.PROD_POSTGRE_USER_NAME }}
POSTGRES_HOST: ${{ secrets.PROD_DB_PUBLIC_IP }}
POSTGRES_PORT: ${{ secrets.PROD_DB_PUBLIC_PORT }}
7 changes: 1 addition & 6 deletions .github/workflows/api-qa.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,4 @@ jobs:
secrets:
GCP_MOBILITY_FEEDS_SA_KEY: ${{ secrets.QA_GCP_MOBILITY_FEEDS_SA_KEY }}
OAUTH2_CLIENT_ID: ${{ secrets.DEV_MOBILITY_FEEDS_OAUTH2_CLIENT_ID}}
OAUTH2_CLIENT_SECRET: ${{ secrets.DEV_MOBILITY_FEEDS_OAUTH2_CLIENT_SECRET}}
POSTGRES_DB: ${{ vars.QA_POSTGRE_SQL_DB_NAME }}
POSTGRES_PASSWORD: ${{ secrets.QA_POSTGRE_USER_PASSWORD }}
POSTGRES_USER: ${{ secrets.QA_POSTGRE_USER_NAME }}
POSTGRES_HOST: ${{ secrets.QA_DB_PUBLIC_IP }}
POSTGRES_PORT: ${{ secrets.QA_DB_PUBLIC_PORT }}
OAUTH2_CLIENT_SECRET: ${{ secrets.DEV_MOBILITY_FEEDS_OAUTH2_CLIENT_SECRET}}
9 changes: 0 additions & 9 deletions .github/workflows/build-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,15 +27,6 @@ jobs:
with:
python-version: ${{ env.python_version }}

- name: Authenticate to Google Cloud
id: gcloud_auth
uses: google-github-actions/auth@v1
with:
credentials_json: ${{ secrets.DEV_GCP_MOBILITY_FEEDS_SA_KEY }}

- name: GCloud Setup
uses: google-github-actions/setup-gcloud@v1

- name: Docker Compose DB/Liquibase for db-gen.sh
run: docker-compose --env-file ./config/.env.local up -d liquibase
working-directory: ${{ github.workspace }}
Expand Down
47 changes: 43 additions & 4 deletions .github/workflows/db-deployer.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,9 @@ on:
GCP_MOBILITY_FEEDS_SA_KEY:
description: Service account key
required: true
DEV_GCP_MOBILITY_FEEDS_SA_KEY:
description: Service account key for dev
required: false
POSTGRE_USER_NAME:
description: PostgreSQL User Name
required: true
Expand Down Expand Up @@ -59,9 +62,11 @@ on:

jobs:
terraform:
name: 'Terraform'
name: 'Deploy Database'
permissions: write-all
runs-on: ubuntu-latest
outputs:
db_instance_host: ${{ steps.get_outputs.outputs.db_instance_host }}
steps:
- name: Checkout code
uses: actions/checkout@v3
Expand Down Expand Up @@ -134,6 +139,40 @@ jobs:
run: |
DB_INSTANCE_HOST_RAW=$(terraform output -json instance_address)
DB_INSTANCE_HOST=$(echo $DB_INSTANCE_HOST_RAW | jq -r .)
echo "DB_INSTANCE_HOST=$DB_INSTANCE_HOST" >> $GITHUB_ENV
echo "$DB_INSTANCE_HOST"
working-directory: infra/postgresql
echo "db_instance_host=$DB_INSTANCE_HOST" >> $GITHUB_OUTPUT
working-directory: infra/postgresql

update-secret-in-dev:
name: Copy DB Secret
needs: terraform
if: ${{ inputs.ENVIRONMENT == 'dev' }}
runs-on: ubuntu-latest
env:
POSTGRE_USER_NAME: ${{ secrets.POSTGRE_USER_NAME }}
POSTGRE_USER_PASSWORD: ${{ secrets.POSTGRE_USER_PASSWORD }}
POSTGRE_SQL_DB_NAME: ${{ inputs.POSTGRE_SQL_DB_NAME }}
DB_INSTANCE_HOST: ${{ needs.terraform.outputs.db_instance_host }}
steps:
- name: Authenticate to Google Cloud DEV
uses: google-github-actions/auth@v1
with:
credentials_json: ${{ secrets.DEV_GCP_MOBILITY_FEEDS_SA_KEY }}

- name: Google Cloud Setup DEV
uses: google-github-actions/setup-gcloud@v1

- name: Create or Update Secret in DEV
run: |
SECRET_NAME="DEV_FEEDS_DATABASE_URL"
SECRET_VALUE="postgresql://${{ env.POSTGRE_USER_NAME }}:${{ env.POSTGRE_USER_PASSWORD }}@${{ env.DB_INSTANCE_HOST }}/${{ env.POSTGRE_SQL_DB_NAME }}"
echo $SECRET_VALUE
if gcloud secrets describe $SECRET_NAME --project=mobility-feeds-dev; then
echo "Secret $SECRET_NAME already exists, updating..."
echo -n "$SECRET_VALUE" | gcloud secrets versions add $SECRET_NAME --data-file=- --project=mobility-feeds-dev
else
echo "Secret $SECRET_NAME does not exist, creating..."
echo -n "$SECRET_VALUE" | gcloud secrets create $SECRET_NAME --data-file=- --replication-policy="automatic" --project=mobility-feeds-dev
fi

9 changes: 5 additions & 4 deletions .github/workflows/db-dev.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,11 @@ jobs:
uses: ./.github/workflows/db-deployer.yml
with:
ENVIRONMENT: ${{ vars.DEV_MOBILITY_FEEDS_ENVIRONMENT }}
BUCKET_NAME: ${{ vars.DEV_MOBILITY_FEEDS_TF_STATE_BUCKET }}
BUCKET_NAME: ${{ vars.QA_MOBILITY_FEEDS_TF_STATE_BUCKET }}
OBJECT_PREFIX: ${{ vars.DEV_MOBILITY_DB_TF_STATE_OBJECT_PREFIX }}
PROJECT_ID: ${{ vars.DEV_MOBILITY_FEEDS_PROJECT_ID }}
PROJECT_ID: ${{ vars.QA_MOBILITY_FEEDS_PROJECT_ID }}
REGION: ${{ vars.DEV_MOBILITY_FEEDS_REGION }}
DEPLOYER_SERVICE_ACCOUNT: ${{ vars.DEV_MOBILITY_FEEDS_DEPLOYER_SERVICE_ACCOUNT }}
DEPLOYER_SERVICE_ACCOUNT: ${{ vars.QA_MOBILITY_FEEDS_DEPLOYER_SERVICE_ACCOUNT }}
POSTGRE_SQL_DB_NAME: ${{ vars.DEV_POSTGRE_SQL_DB_NAME }}
POSTGRE_INSTANCE_TIER: ${{ vars.DEV_POSTGRE_INSTANCE_TIER }}
MAX_CONNECTIONS: ${{ vars.DEV_DB_MAX_CONNECTIONS }}
Expand All @@ -21,4 +21,5 @@ jobs:
POSTGRE_USER_PASSWORD: ${{ secrets.DEV_POSTGRE_USER_PASSWORD }}
POSTGRE_USER_NAME: ${{ secrets.DEV_POSTGRE_USER_NAME }}
POSTGRE_SQL_INSTANCE_NAME: ${{ secrets.DB_INSTANCE_NAME }}
GCP_MOBILITY_FEEDS_SA_KEY: ${{ secrets.DEV_GCP_MOBILITY_FEEDS_SA_KEY }}
GCP_MOBILITY_FEEDS_SA_KEY: ${{ secrets.QA_GCP_MOBILITY_FEEDS_SA_KEY }}
DEV_GCP_MOBILITY_FEEDS_SA_KEY: ${{ secrets.DEV_GCP_MOBILITY_FEEDS_SA_KEY }}
6 changes: 3 additions & 3 deletions .github/workflows/db-update-dev.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,17 +12,17 @@ on:
RUN_POPULATE_SCRIPT:
description: 'Run populate script (true/false)'
required: false
default: true
default: 'true'
jobs:
update:
uses: ./.github/workflows/db-update.yml
with:
PROJECT_ID: ${{ vars.DEV_MOBILITY_FEEDS_PROJECT_ID }}
PROJECT_ID: ${{ vars.QA_MOBILITY_FEEDS_PROJECT_ID }}
DB_NAME: ${{ vars.DEV_POSTGRE_SQL_DB_NAME }}
REGION: ${{ vars.DEV_MOBILITY_FEEDS_REGION }}
RUN_POPULATE_SCRIPT: ${{ github.event.inputs.RUN_POPULATE_SCRIPT || 'true' }}
secrets:
DB_USER_PASSWORD: ${{ secrets.DEV_POSTGRE_USER_PASSWORD }}
DB_USER_NAME: ${{ secrets.DEV_POSTGRE_USER_NAME }}
DB_INSTANCE_NAME: ${{ secrets.DB_INSTANCE_NAME }}
GCP_MOBILITY_FEEDS_SA_KEY: ${{ secrets.DEV_GCP_MOBILITY_FEEDS_SA_KEY }}
GCP_MOBILITY_FEEDS_SA_KEY: ${{ secrets.QA_GCP_MOBILITY_FEEDS_SA_KEY }}
2 changes: 1 addition & 1 deletion .github/workflows/db-update.yml
Original file line number Diff line number Diff line change
Expand Up @@ -85,12 +85,12 @@ jobs:

- name: Update .env file
run: |
echo "POSTGRES_USER=${{ secrets.DB_USER_NAME }}" > config/.env.local
echo "PGUSER=${{ secrets.DB_USER_NAME }}" >> config/.env.local
echo "POSTGRES_PASSWORD=${{ secrets.DB_USER_PASSWORD }}" >> config/.env.local
echo "POSTGRES_DB=${{ inputs.DB_NAME }}" >> config/.env.local
echo "POSTGRES_PORT=5432" >> config/.env.local
echo "POSTGRES_HOST=${{ env.DB_IP }}" >> config/.env.local
echo "FEEDS_DATABASE_URL=postgresql://${{ secrets.DB_USER_NAME }}:${{ secrets.DB_USER_PASSWORD }}@${{ env.DB_IP }}:5432/${{ inputs.DB_NAME }}" >> config/.env.local
echo "ENV=dev" >> config/.env.local
cat config/.env.local
Expand Down
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -65,3 +65,6 @@ __pycache__
/functions-python/*/.env.local

**/node_modules

# Environment file
.env
27 changes: 2 additions & 25 deletions api/src/database/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,6 @@
import uuid
from typing import Type, Callable
from dotenv import load_dotenv

from google.cloud.sql.connector import Connector
from sqlalchemy import create_engine, inspect
from sqlalchemy.orm import Session, load_only, Query

Expand Down Expand Up @@ -41,28 +39,10 @@ def __new__(cls, *args, **kwargs):

def __init__(self):
load_dotenv()
username = os.getenv("POSTGRES_USER")
password = os.getenv("POSTGRES_PASSWORD")
schema = os.getenv("POSTGRES_DB")
port = os.getenv("POSTGRES_PORT")
host = os.getenv("POSTGRES_HOST")
self.logger = Logger(Database.__module__).get_logger()
self.engine = None
self.connection_attempts = 0
self.SQLALCHEMY_DATABASE_URL = f"postgresql://{username}:{password}@{host}:{port}/{schema}"

# set up GCP SQL Connector
connector = Connector()
instance_name = os.getenv("INSTANCE_NAME")
self.get_connection = None
if instance_name is not None:
self.get_connection = lambda: connector.connect(
instance_name,
"pg8000",
user=username,
password=password,
db=schema,
)
self.SQLALCHEMY_DATABASE_URL = os.getenv("FEEDS_DATABASE_URL")
self.start_session()

def is_connected(self):
Expand All @@ -82,10 +62,7 @@ def start_session(self):
if self.engine is None:
self.connection_attempts += 1
self.logger.debug(f"Database connection attempt #{self.connection_attempts}.")
if self.get_connection is not None:
self.engine = create_engine("postgresql+pg8000://", creator=self.get_connection)
else:
self.engine = create_engine(self.SQLALCHEMY_DATABASE_URL, echo=True)
self.engine = create_engine(self.SQLALCHEMY_DATABASE_URL, echo=True)
self.logger.debug("Database connected.")
if global_session is not None and global_session.is_active:
self.logger.info("Database session reused.")
Expand Down
2 changes: 1 addition & 1 deletion api/src/feeds/impl/datasets_api_impl.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def apply_bounding_filtering(
)
)
elif bounding_filter_method == "completely_enclosed":
return query.filter(Gtfsdataset.bounding_box.ST_Covers(bounding_box))
return query.filter(bounding_box.ST_Covers(Gtfsdataset.bounding_box))
elif bounding_filter_method == "disjoint":
return query.filter(Gtfsdataset.bounding_box.ST_Disjoint(bounding_box))
else:
Expand Down
44 changes: 28 additions & 16 deletions api/src/feeds/impl/feeds_api_impl.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import json
from typing import List, Type, Set, Union

from fastapi import HTTPException
Expand Down Expand Up @@ -26,6 +27,7 @@
from feeds.impl.datasets_api_impl import DatasetsApiImpl
from feeds_gen.apis.feeds_api_base import BaseFeedsApi
from feeds_gen.models.basic_feed import BasicFeed
from feeds_gen.models.bounding_box import BoundingBox
from feeds_gen.models.external_id import ExternalId
from feeds_gen.models.gtfs_dataset import GtfsDataset
from feeds_gen.models.gtfs_feed import GtfsFeed
Expand Down Expand Up @@ -159,6 +161,7 @@ def _get_order_by_key(order_by: list[str] = None):
FeedsApiImpl._create_feeds_query(Gtfsfeed)
.join(Gtfsdataset, Gtfsfeed.id == Gtfsdataset.feed_id, isouter=True)
.add_entity(Gtfsdataset)
.add_column(Gtfsdataset.bounding_box.ST_AsGeoJSON())
.join(t_locationfeed, t_locationfeed.c.feed_id == Gtfsfeed.id, isouter=True)
.join(Location, t_locationfeed.c.location_id == Location.id, isouter=True)
.add_entity(Location)
Expand All @@ -177,7 +180,9 @@ def _get_order_by_key(order_by: list[str] = None):
)
gtfs_feeds = []
for feed_group in feed_groups:
feed_objects, redirect_ids, external_ids, redirect_comments, latest_datasets, locations = zip(*feed_group)
feed_objects, redirect_ids, external_ids, redirect_comments, datasets, bounding_boxes, locations = zip(
*feed_group
)

# We use a set to eliminate duplicate in the Redirects.
# But we can't use the Redirect object directly since they are not hashable and making them
Expand All @@ -202,11 +207,24 @@ def _get_order_by_key(order_by: list[str] = None):
for location in locations
if location is not None
]
if latest_dataset := next(filter(lambda x: x is not None and x.latest, latest_datasets), None):
# better check if there are more than one latest dataset
gtfs_feed.latest_dataset = LatestDataset(
id=latest_dataset.stable_id, hosted_url=latest_dataset.hosted_url
)
latest_dataset, bounding_box = next(
filter(
lambda dataset: dataset[0] is not None and dataset[1] is not None and dataset[0].latest,
zip(datasets, bounding_boxes),
),
(None, None),
)
if latest_dataset:
api_dataset = LatestDataset(id=latest_dataset.stable_id, hosted_url=latest_dataset.hosted_url)
if bounding_box:
coordinates = json.loads(bounding_box)["coordinates"][0]
api_dataset.bounding_box = BoundingBox(
minimum_latitude=coordinates[0][1],
maximum_latitude=coordinates[2][1],
minimum_longitude=coordinates[0][0],
maximum_longitude=coordinates[2][0],
)
gtfs_feed.latest_dataset = api_dataset

gtfs_feeds.append(gtfs_feed)

Expand Down Expand Up @@ -305,19 +323,13 @@ def get_gtfs_feed_datasets(
downloaded_date_gte: str,
downloaded_date_lte: str,
sort: str,
bounding_latitudes: str,
bounding_longitudes: str,
bounding_filter_method: str,
) -> List[GtfsDataset]:
"""Get a list of datasets related to a feed."""
# getting the bounding box as JSON to make it easier to process
query = GtfsDatasetFilter(
download_date__lte=downloaded_date_lte,
download_date__gte=downloaded_date_gte,
).filter(DatasetsApiImpl.create_dataset_query().filter(Feed.stable_id == id))
query = DatasetsApiImpl.apply_bounding_filtering(
query, bounding_latitudes, bounding_longitudes, bounding_filter_method
)

if latest:
query = query.filter(Gtfsdataset.latest)
Expand All @@ -334,8 +346,8 @@ def get_gtfs_feeds(
subdivision_name: str,
municipality: str,
sort: str,
bounding_latitudes: str,
bounding_longitudes: str,
dataset_latitudes: str,
dataset_longitudes: str,
bounding_filter_method: str,
order_by: list[str],
) -> List[GtfsFeed]:
Expand All @@ -354,8 +366,8 @@ def get_gtfs_feeds(
feed_filter,
limit=limit,
offset=offset,
bounding_latitudes=bounding_latitudes,
bounding_longitudes=bounding_longitudes,
bounding_latitudes=dataset_latitudes,
bounding_longitudes=dataset_longitudes,
bounding_filter_method=bounding_filter_method,
order_by=order_by,
)
Expand Down
Loading

0 comments on commit 68000db

Please sign in to comment.