diff --git a/.env b/.env
deleted file mode 100644
index a54b3721c..000000000
--- a/.env
+++ /dev/null
@@ -1,8 +0,0 @@
-POSTGRES_USER=postgres
-POSTGRES_PASSWORD=postgres
-POSTGRES_DB=MobilityDatabase
-POSTGRES_PORT=5432
-PGUSER=postgres
-POSTGRES_HOST=localhost
-ENV=local
-SCHEMA_SPY_DOC=schemaspy-dev
\ No newline at end of file
diff --git a/.github/workflows/api-deployer.yml b/.github/workflows/api-deployer.yml
index 89c4586da..4e76241a8 100644
--- a/.github/workflows/api-deployer.yml
+++ b/.github/workflows/api-deployer.yml
@@ -12,21 +12,6 @@ on:
OAUTH2_CLIENT_SECRET:
description: Oauth client secret part of the the Identity Aware Proxy configuration
required: true
- POSTGRES_DB:
- description: Postgres database name
- required: true
- POSTGRES_HOST:
- description: Postgres host
- required: true
- POSTGRES_PASSWORD:
- description: Postgres password
- required: true
- POSTGRES_PORT:
- description: Postgres port
- required: true
- POSTGRES_USER:
- description: Postgres user
- required: true
inputs:
ENVIRONMENT:
description: API environment. Possible values prod, staging and dev
@@ -178,16 +163,11 @@ jobs:
echo "OAUTH2_CLIENT_ID=${{ secrets.OAUTH2_CLIENT_ID }}" >> $GITHUB_ENV
echo "OAUTH2_CLIENT_SECRET=${{ secrets.OAUTH2_CLIENT_SECRET }}" >> $GITHUB_ENV
echo "GLOBAL_RATE_LIMIT_REQ_PER_MINUTE=${{ inputs.GLOBAL_RATE_LIMIT_REQ_PER_MINUTE }}" >> $GITHUB_ENV
- echo "POSTGRES_DB=${{ secrets.POSTGRES_DB }}" >> $GITHUB_ENV
- echo "POSTGRES_HOST=${{ secrets.POSTGRES_HOST }}" >> $GITHUB_ENV
- echo "POSTGRES_PASSWORD=${{ secrets.POSTGRES_PASSWORD }}" >> $GITHUB_ENV
- echo "POSTGRES_PORT=${{ secrets.POSTGRES_PORT }}" >> $GITHUB_ENV
- echo "POSTGRES_USER=${{ secrets.POSTGRES_USER }}" >> $GITHUB_ENV
- name: Populate Variables
run: |
scripts/replace-variables.sh -in_file infra/backend.conf.rename_me -out_file infra/backend.conf -variables BUCKET_NAME,OBJECT_PREFIX
- scripts/replace-variables.sh -in_file infra/vars.tfvars.rename_me -out_file infra/vars.tfvars -variables PROJECT_ID,REGION,ENVIRONMENT,DEPLOYER_SERVICE_ACCOUNT,FEED_API_IMAGE_VERSION,OAUTH2_CLIENT_ID,OAUTH2_CLIENT_SECRET,GLOBAL_RATE_LIMIT_REQ_PER_MINUTE,,POSTGRES_DB,POSTGRES_HOST,POSTGRES_PASSWORD,POSTGRES_PORT,POSTGRES_USER
+ scripts/replace-variables.sh -in_file infra/vars.tfvars.rename_me -out_file infra/vars.tfvars -variables PROJECT_ID,REGION,ENVIRONMENT,DEPLOYER_SERVICE_ACCOUNT,FEED_API_IMAGE_VERSION,OAUTH2_CLIENT_ID,OAUTH2_CLIENT_SECRET,GLOBAL_RATE_LIMIT_REQ_PER_MINUTE
- uses: hashicorp/setup-terraform@v2
with:
diff --git a/.github/workflows/api-dev.yml b/.github/workflows/api-dev.yml
index 82f3d2093..d9acb8fb7 100644
--- a/.github/workflows/api-dev.yml
+++ b/.github/workflows/api-dev.yml
@@ -21,8 +21,3 @@ jobs:
GCP_MOBILITY_FEEDS_SA_KEY: ${{ secrets.DEV_GCP_MOBILITY_FEEDS_SA_KEY }}
OAUTH2_CLIENT_ID: ${{ secrets.DEV_MOBILITY_FEEDS_OAUTH2_CLIENT_ID}}
OAUTH2_CLIENT_SECRET: ${{ secrets.DEV_MOBILITY_FEEDS_OAUTH2_CLIENT_SECRET}}
- POSTGRES_DB: ${{ vars.DEV_POSTGRE_SQL_DB_NAME }}
- POSTGRES_PASSWORD: ${{ secrets.DEV_POSTGRE_USER_PASSWORD }}
- POSTGRES_USER: ${{ secrets.DEV_POSTGRE_USER_NAME }}
- POSTGRES_HOST: ${{ secrets.DEV_DB_PUBLIC_IP }}
- POSTGRES_PORT: ${{ secrets.DEV_DB_PUBLIC_PORT }}
diff --git a/.github/workflows/api-prod.yml b/.github/workflows/api-prod.yml
index dc2779ddf..3cc1df3c3 100644
--- a/.github/workflows/api-prod.yml
+++ b/.github/workflows/api-prod.yml
@@ -22,8 +22,3 @@ jobs:
GCP_MOBILITY_FEEDS_SA_KEY: ${{ secrets.PROD_GCP_MOBILITY_FEEDS_SA_KEY }}
OAUTH2_CLIENT_ID: ${{ secrets.PROD_MOBILITY_FEEDS_OAUTH2_CLIENT_ID}}
OAUTH2_CLIENT_SECRET: ${{ secrets.PROD_MOBILITY_FEEDS_OAUTH2_CLIENT_SECRET}}
- POSTGRES_DB: ${{ vars.PROD_POSTGRE_SQL_DB_NAME }}
- POSTGRES_PASSWORD: ${{ secrets.PROD_POSTGRE_USER_PASSWORD }}
- POSTGRES_USER: ${{ secrets.PROD_POSTGRE_USER_NAME }}
- POSTGRES_HOST: ${{ secrets.PROD_DB_PUBLIC_IP }}
- POSTGRES_PORT: ${{ secrets.PROD_DB_PUBLIC_PORT }}
diff --git a/.github/workflows/api-qa.yml b/.github/workflows/api-qa.yml
index 56093e019..5cbd623dd 100644
--- a/.github/workflows/api-qa.yml
+++ b/.github/workflows/api-qa.yml
@@ -22,9 +22,4 @@ jobs:
secrets:
GCP_MOBILITY_FEEDS_SA_KEY: ${{ secrets.QA_GCP_MOBILITY_FEEDS_SA_KEY }}
OAUTH2_CLIENT_ID: ${{ secrets.DEV_MOBILITY_FEEDS_OAUTH2_CLIENT_ID}}
- OAUTH2_CLIENT_SECRET: ${{ secrets.DEV_MOBILITY_FEEDS_OAUTH2_CLIENT_SECRET}}
- POSTGRES_DB: ${{ vars.QA_POSTGRE_SQL_DB_NAME }}
- POSTGRES_PASSWORD: ${{ secrets.QA_POSTGRE_USER_PASSWORD }}
- POSTGRES_USER: ${{ secrets.QA_POSTGRE_USER_NAME }}
- POSTGRES_HOST: ${{ secrets.QA_DB_PUBLIC_IP }}
- POSTGRES_PORT: ${{ secrets.QA_DB_PUBLIC_PORT }}
+ OAUTH2_CLIENT_SECRET: ${{ secrets.DEV_MOBILITY_FEEDS_OAUTH2_CLIENT_SECRET}}
diff --git a/.github/workflows/build-test.yml b/.github/workflows/build-test.yml
index cb9590b5b..eb3a0d064 100644
--- a/.github/workflows/build-test.yml
+++ b/.github/workflows/build-test.yml
@@ -27,15 +27,6 @@ jobs:
with:
python-version: ${{ env.python_version }}
- - name: Authenticate to Google Cloud
- id: gcloud_auth
- uses: google-github-actions/auth@v1
- with:
- credentials_json: ${{ secrets.DEV_GCP_MOBILITY_FEEDS_SA_KEY }}
-
- - name: GCloud Setup
- uses: google-github-actions/setup-gcloud@v1
-
- name: Docker Compose DB/Liquibase for db-gen.sh
run: docker-compose --env-file ./config/.env.local up -d liquibase
working-directory: ${{ github.workspace }}
diff --git a/.github/workflows/db-deployer.yml b/.github/workflows/db-deployer.yml
index aef8dce24..a0d9a043a 100644
--- a/.github/workflows/db-deployer.yml
+++ b/.github/workflows/db-deployer.yml
@@ -6,6 +6,9 @@ on:
GCP_MOBILITY_FEEDS_SA_KEY:
description: Service account key
required: true
+ DEV_GCP_MOBILITY_FEEDS_SA_KEY:
+ description: Service account key for dev
+ required: false
POSTGRE_USER_NAME:
description: PostgreSQL User Name
required: true
@@ -59,9 +62,11 @@ on:
jobs:
terraform:
- name: 'Terraform'
+ name: 'Deploy Database'
permissions: write-all
runs-on: ubuntu-latest
+ outputs:
+ db_instance_host: ${{ steps.get_outputs.outputs.db_instance_host }}
steps:
- name: Checkout code
uses: actions/checkout@v3
@@ -134,6 +139,40 @@ jobs:
run: |
DB_INSTANCE_HOST_RAW=$(terraform output -json instance_address)
DB_INSTANCE_HOST=$(echo $DB_INSTANCE_HOST_RAW | jq -r .)
- echo "DB_INSTANCE_HOST=$DB_INSTANCE_HOST" >> $GITHUB_ENV
- echo "$DB_INSTANCE_HOST"
- working-directory: infra/postgresql
\ No newline at end of file
+ echo "db_instance_host=$DB_INSTANCE_HOST" >> $GITHUB_OUTPUT
+ working-directory: infra/postgresql
+
+ update-secret-in-dev:
+ name: Copy DB Secret
+ needs: terraform
+ if: ${{ inputs.ENVIRONMENT == 'dev' }}
+ runs-on: ubuntu-latest
+ env:
+ POSTGRE_USER_NAME: ${{ secrets.POSTGRE_USER_NAME }}
+ POSTGRE_USER_PASSWORD: ${{ secrets.POSTGRE_USER_PASSWORD }}
+ POSTGRE_SQL_DB_NAME: ${{ inputs.POSTGRE_SQL_DB_NAME }}
+ DB_INSTANCE_HOST: ${{ needs.terraform.outputs.db_instance_host }}
+ steps:
+ - name: Authenticate to Google Cloud DEV
+ uses: google-github-actions/auth@v1
+ with:
+ credentials_json: ${{ secrets.DEV_GCP_MOBILITY_FEEDS_SA_KEY }}
+
+ - name: Google Cloud Setup DEV
+ uses: google-github-actions/setup-gcloud@v1
+
+ - name: Create or Update Secret in DEV
+ run: |
+ SECRET_NAME="DEV_FEEDS_DATABASE_URL"
+ SECRET_VALUE="postgresql://${{ env.POSTGRE_USER_NAME }}:${{ env.POSTGRE_USER_PASSWORD }}@${{ env.DB_INSTANCE_HOST }}/${{ env.POSTGRE_SQL_DB_NAME }}"
+ echo $SECRET_VALUE
+
+ if gcloud secrets describe $SECRET_NAME --project=mobility-feeds-dev; then
+ echo "Secret $SECRET_NAME already exists, updating..."
+ echo -n "$SECRET_VALUE" | gcloud secrets versions add $SECRET_NAME --data-file=- --project=mobility-feeds-dev
+ else
+ echo "Secret $SECRET_NAME does not exist, creating..."
+ echo -n "$SECRET_VALUE" | gcloud secrets create $SECRET_NAME --data-file=- --replication-policy="automatic" --project=mobility-feeds-dev
+ fi
+
+
diff --git a/.github/workflows/db-dev.yml b/.github/workflows/db-dev.yml
index 197e3f71c..88d59c56f 100644
--- a/.github/workflows/db-dev.yml
+++ b/.github/workflows/db-dev.yml
@@ -8,11 +8,11 @@ jobs:
uses: ./.github/workflows/db-deployer.yml
with:
ENVIRONMENT: ${{ vars.DEV_MOBILITY_FEEDS_ENVIRONMENT }}
- BUCKET_NAME: ${{ vars.DEV_MOBILITY_FEEDS_TF_STATE_BUCKET }}
+ BUCKET_NAME: ${{ vars.QA_MOBILITY_FEEDS_TF_STATE_BUCKET }}
OBJECT_PREFIX: ${{ vars.DEV_MOBILITY_DB_TF_STATE_OBJECT_PREFIX }}
- PROJECT_ID: ${{ vars.DEV_MOBILITY_FEEDS_PROJECT_ID }}
+ PROJECT_ID: ${{ vars.QA_MOBILITY_FEEDS_PROJECT_ID }}
REGION: ${{ vars.DEV_MOBILITY_FEEDS_REGION }}
- DEPLOYER_SERVICE_ACCOUNT: ${{ vars.DEV_MOBILITY_FEEDS_DEPLOYER_SERVICE_ACCOUNT }}
+ DEPLOYER_SERVICE_ACCOUNT: ${{ vars.QA_MOBILITY_FEEDS_DEPLOYER_SERVICE_ACCOUNT }}
POSTGRE_SQL_DB_NAME: ${{ vars.DEV_POSTGRE_SQL_DB_NAME }}
POSTGRE_INSTANCE_TIER: ${{ vars.DEV_POSTGRE_INSTANCE_TIER }}
MAX_CONNECTIONS: ${{ vars.DEV_DB_MAX_CONNECTIONS }}
@@ -21,4 +21,5 @@ jobs:
POSTGRE_USER_PASSWORD: ${{ secrets.DEV_POSTGRE_USER_PASSWORD }}
POSTGRE_USER_NAME: ${{ secrets.DEV_POSTGRE_USER_NAME }}
POSTGRE_SQL_INSTANCE_NAME: ${{ secrets.DB_INSTANCE_NAME }}
- GCP_MOBILITY_FEEDS_SA_KEY: ${{ secrets.DEV_GCP_MOBILITY_FEEDS_SA_KEY }}
+ GCP_MOBILITY_FEEDS_SA_KEY: ${{ secrets.QA_GCP_MOBILITY_FEEDS_SA_KEY }}
+ DEV_GCP_MOBILITY_FEEDS_SA_KEY: ${{ secrets.DEV_GCP_MOBILITY_FEEDS_SA_KEY }}
diff --git a/.github/workflows/db-update-dev.yml b/.github/workflows/db-update-dev.yml
index 27a140151..32b08bdc3 100644
--- a/.github/workflows/db-update-dev.yml
+++ b/.github/workflows/db-update-dev.yml
@@ -12,12 +12,12 @@ on:
RUN_POPULATE_SCRIPT:
description: 'Run populate script (true/false)'
required: false
- default: true
+ default: 'true'
jobs:
update:
uses: ./.github/workflows/db-update.yml
with:
- PROJECT_ID: ${{ vars.DEV_MOBILITY_FEEDS_PROJECT_ID }}
+ PROJECT_ID: ${{ vars.QA_MOBILITY_FEEDS_PROJECT_ID }}
DB_NAME: ${{ vars.DEV_POSTGRE_SQL_DB_NAME }}
REGION: ${{ vars.DEV_MOBILITY_FEEDS_REGION }}
RUN_POPULATE_SCRIPT: ${{ github.event.inputs.RUN_POPULATE_SCRIPT || 'true' }}
@@ -25,4 +25,4 @@ jobs:
DB_USER_PASSWORD: ${{ secrets.DEV_POSTGRE_USER_PASSWORD }}
DB_USER_NAME: ${{ secrets.DEV_POSTGRE_USER_NAME }}
DB_INSTANCE_NAME: ${{ secrets.DB_INSTANCE_NAME }}
- GCP_MOBILITY_FEEDS_SA_KEY: ${{ secrets.DEV_GCP_MOBILITY_FEEDS_SA_KEY }}
+ GCP_MOBILITY_FEEDS_SA_KEY: ${{ secrets.QA_GCP_MOBILITY_FEEDS_SA_KEY }}
diff --git a/.github/workflows/db-update.yml b/.github/workflows/db-update.yml
index c13e90821..2d824a60e 100644
--- a/.github/workflows/db-update.yml
+++ b/.github/workflows/db-update.yml
@@ -85,12 +85,12 @@ jobs:
- name: Update .env file
run: |
- echo "POSTGRES_USER=${{ secrets.DB_USER_NAME }}" > config/.env.local
echo "PGUSER=${{ secrets.DB_USER_NAME }}" >> config/.env.local
echo "POSTGRES_PASSWORD=${{ secrets.DB_USER_PASSWORD }}" >> config/.env.local
echo "POSTGRES_DB=${{ inputs.DB_NAME }}" >> config/.env.local
echo "POSTGRES_PORT=5432" >> config/.env.local
echo "POSTGRES_HOST=${{ env.DB_IP }}" >> config/.env.local
+ echo "FEEDS_DATABASE_URL=postgresql://${{ secrets.DB_USER_NAME }}:${{ secrets.DB_USER_PASSWORD }}@${{ env.DB_IP }}:5432/${{ inputs.DB_NAME }}" >> config/.env.local
echo "ENV=dev" >> config/.env.local
cat config/.env.local
diff --git a/.gitignore b/.gitignore
index c40309fef..5658aaee3 100644
--- a/.gitignore
+++ b/.gitignore
@@ -65,3 +65,6 @@ __pycache__
/functions-python/*/.env.local
**/node_modules
+
+# Environment file
+.env
\ No newline at end of file
diff --git a/api/src/database/database.py b/api/src/database/database.py
index 0c8eacfe8..64f40869f 100644
--- a/api/src/database/database.py
+++ b/api/src/database/database.py
@@ -4,8 +4,6 @@
import uuid
from typing import Type, Callable
from dotenv import load_dotenv
-
-from google.cloud.sql.connector import Connector
from sqlalchemy import create_engine, inspect
from sqlalchemy.orm import Session, load_only, Query
@@ -41,28 +39,10 @@ def __new__(cls, *args, **kwargs):
def __init__(self):
load_dotenv()
- username = os.getenv("POSTGRES_USER")
- password = os.getenv("POSTGRES_PASSWORD")
- schema = os.getenv("POSTGRES_DB")
- port = os.getenv("POSTGRES_PORT")
- host = os.getenv("POSTGRES_HOST")
self.logger = Logger(Database.__module__).get_logger()
self.engine = None
self.connection_attempts = 0
- self.SQLALCHEMY_DATABASE_URL = f"postgresql://{username}:{password}@{host}:{port}/{schema}"
-
- # set up GCP SQL Connector
- connector = Connector()
- instance_name = os.getenv("INSTANCE_NAME")
- self.get_connection = None
- if instance_name is not None:
- self.get_connection = lambda: connector.connect(
- instance_name,
- "pg8000",
- user=username,
- password=password,
- db=schema,
- )
+ self.SQLALCHEMY_DATABASE_URL = os.getenv("FEEDS_DATABASE_URL")
self.start_session()
def is_connected(self):
@@ -82,10 +62,7 @@ def start_session(self):
if self.engine is None:
self.connection_attempts += 1
self.logger.debug(f"Database connection attempt #{self.connection_attempts}.")
- if self.get_connection is not None:
- self.engine = create_engine("postgresql+pg8000://", creator=self.get_connection)
- else:
- self.engine = create_engine(self.SQLALCHEMY_DATABASE_URL, echo=True)
+ self.engine = create_engine(self.SQLALCHEMY_DATABASE_URL, echo=True)
self.logger.debug("Database connected.")
if global_session is not None and global_session.is_active:
self.logger.info("Database session reused.")
diff --git a/api/src/feeds/impl/datasets_api_impl.py b/api/src/feeds/impl/datasets_api_impl.py
index 4220abbb6..97757409f 100644
--- a/api/src/feeds/impl/datasets_api_impl.py
+++ b/api/src/feeds/impl/datasets_api_impl.py
@@ -76,7 +76,7 @@ def apply_bounding_filtering(
)
)
elif bounding_filter_method == "completely_enclosed":
- return query.filter(Gtfsdataset.bounding_box.ST_Covers(bounding_box))
+ return query.filter(bounding_box.ST_Covers(Gtfsdataset.bounding_box))
elif bounding_filter_method == "disjoint":
return query.filter(Gtfsdataset.bounding_box.ST_Disjoint(bounding_box))
else:
diff --git a/api/src/feeds/impl/feeds_api_impl.py b/api/src/feeds/impl/feeds_api_impl.py
index 1baefbf53..5cf90a568 100644
--- a/api/src/feeds/impl/feeds_api_impl.py
+++ b/api/src/feeds/impl/feeds_api_impl.py
@@ -1,3 +1,4 @@
+import json
from typing import List, Type, Set, Union
from fastapi import HTTPException
@@ -26,6 +27,7 @@
from feeds.impl.datasets_api_impl import DatasetsApiImpl
from feeds_gen.apis.feeds_api_base import BaseFeedsApi
from feeds_gen.models.basic_feed import BasicFeed
+from feeds_gen.models.bounding_box import BoundingBox
from feeds_gen.models.external_id import ExternalId
from feeds_gen.models.gtfs_dataset import GtfsDataset
from feeds_gen.models.gtfs_feed import GtfsFeed
@@ -159,6 +161,7 @@ def _get_order_by_key(order_by: list[str] = None):
FeedsApiImpl._create_feeds_query(Gtfsfeed)
.join(Gtfsdataset, Gtfsfeed.id == Gtfsdataset.feed_id, isouter=True)
.add_entity(Gtfsdataset)
+ .add_column(Gtfsdataset.bounding_box.ST_AsGeoJSON())
.join(t_locationfeed, t_locationfeed.c.feed_id == Gtfsfeed.id, isouter=True)
.join(Location, t_locationfeed.c.location_id == Location.id, isouter=True)
.add_entity(Location)
@@ -177,7 +180,9 @@ def _get_order_by_key(order_by: list[str] = None):
)
gtfs_feeds = []
for feed_group in feed_groups:
- feed_objects, redirect_ids, external_ids, redirect_comments, latest_datasets, locations = zip(*feed_group)
+ feed_objects, redirect_ids, external_ids, redirect_comments, datasets, bounding_boxes, locations = zip(
+ *feed_group
+ )
# We use a set to eliminate duplicate in the Redirects.
# But we can't use the Redirect object directly since they are not hashable and making them
@@ -202,11 +207,24 @@ def _get_order_by_key(order_by: list[str] = None):
for location in locations
if location is not None
]
- if latest_dataset := next(filter(lambda x: x is not None and x.latest, latest_datasets), None):
- # better check if there are more than one latest dataset
- gtfs_feed.latest_dataset = LatestDataset(
- id=latest_dataset.stable_id, hosted_url=latest_dataset.hosted_url
- )
+ latest_dataset, bounding_box = next(
+ filter(
+ lambda dataset: dataset[0] is not None and dataset[1] is not None and dataset[0].latest,
+ zip(datasets, bounding_boxes),
+ ),
+ (None, None),
+ )
+ if latest_dataset:
+ api_dataset = LatestDataset(id=latest_dataset.stable_id, hosted_url=latest_dataset.hosted_url)
+ if bounding_box:
+ coordinates = json.loads(bounding_box)["coordinates"][0]
+ api_dataset.bounding_box = BoundingBox(
+ minimum_latitude=coordinates[0][1],
+ maximum_latitude=coordinates[2][1],
+ minimum_longitude=coordinates[0][0],
+ maximum_longitude=coordinates[2][0],
+ )
+ gtfs_feed.latest_dataset = api_dataset
gtfs_feeds.append(gtfs_feed)
@@ -305,9 +323,6 @@ def get_gtfs_feed_datasets(
downloaded_date_gte: str,
downloaded_date_lte: str,
sort: str,
- bounding_latitudes: str,
- bounding_longitudes: str,
- bounding_filter_method: str,
) -> List[GtfsDataset]:
"""Get a list of datasets related to a feed."""
# getting the bounding box as JSON to make it easier to process
@@ -315,9 +330,6 @@ def get_gtfs_feed_datasets(
download_date__lte=downloaded_date_lte,
download_date__gte=downloaded_date_gte,
).filter(DatasetsApiImpl.create_dataset_query().filter(Feed.stable_id == id))
- query = DatasetsApiImpl.apply_bounding_filtering(
- query, bounding_latitudes, bounding_longitudes, bounding_filter_method
- )
if latest:
query = query.filter(Gtfsdataset.latest)
@@ -334,8 +346,8 @@ def get_gtfs_feeds(
subdivision_name: str,
municipality: str,
sort: str,
- bounding_latitudes: str,
- bounding_longitudes: str,
+ dataset_latitudes: str,
+ dataset_longitudes: str,
bounding_filter_method: str,
order_by: list[str],
) -> List[GtfsFeed]:
@@ -354,8 +366,8 @@ def get_gtfs_feeds(
feed_filter,
limit=limit,
offset=offset,
- bounding_latitudes=bounding_latitudes,
- bounding_longitudes=bounding_longitudes,
+ bounding_latitudes=dataset_latitudes,
+ bounding_longitudes=dataset_longitudes,
bounding_filter_method=bounding_filter_method,
order_by=order_by,
)
diff --git a/api/tests/test_database.py b/api/tests/test_database.py
index b6fbc9e9c..3358e9100 100644
--- a/api/tests/test_database.py
+++ b/api/tests/test_database.py
@@ -29,11 +29,11 @@ def assert_bounding_box_found(latitudes, longitudes, method, expected_found, tes
@pytest.mark.parametrize(
"latitudes,longitudes,method,expected_found",
[
- ("37.7, 38", "-84.7,-84.6", "completely_enclosed", True),
- ("37, 38", "-84.7,-84.6", "completely_enclosed", False), # min latitude is too low
- ("37.7, 39", "-84.7,-84.6", "completely_enclosed", False), # max latitude is too high
- ("37.7, 38", "-85,-84.6", "completely_enclosed", False), # min longitude is too low
- ("37.7, 38", "-84.7,-83", "completely_enclosed", False), # max longitude is too high
+ ("37, 39", "-85,-84", "completely_enclosed", True), # completely enclosed
+ ("37.7, 39", "-85,-84", "completely_enclosed", False), # min latitude is too high
+ ("37, 38", "-85,-84", "completely_enclosed", False), # max latitude is too low
+ ("37, 39", "-84.7,-84", "completely_enclosed", False), # min longitude is too low
+ ("37, 39", "-85,-84.5", "completely_enclosed", False), # max longitude is too high
],
)
def test_bounding_box_completed_closed(latitudes, longitudes, method, expected_found, test_database):
diff --git a/api/tests/unittest/test_feeds.py b/api/tests/unittest/test_feeds.py
index 2b37da40f..2949809ba 100644
--- a/api/tests/unittest/test_feeds.py
+++ b/api/tests/unittest/test_feeds.py
@@ -1,3 +1,5 @@
+import json
+
from fastapi.testclient import TestClient
from tests.test_utils.token import authHeaders
@@ -86,8 +88,32 @@ def test_gtfs_feeds_get(client: TestClient, mocker):
subdivision_name="test_subdivision_name",
municipality="test_municipality",
)
+ mock_bounding_box = json.dumps(
+ {
+ "type": "Polygon",
+ "coordinates": [
+ [
+ [-70.248666, 43.655373],
+ [-70.248666, 43.71619],
+ [-70.11018, 43.71619],
+ [-70.11018, 43.655373],
+ [-70.248666, 43.655373],
+ ]
+ ],
+ }
+ )
mock_select.return_value = [
- [(mock_feed, redirect_target_id, mock_external_id, redirect_comment, mock_latest_datasets, mock_locations)]
+ [
+ (
+ mock_feed,
+ redirect_target_id,
+ mock_external_id,
+ redirect_comment,
+ mock_latest_datasets,
+ mock_bounding_box,
+ mock_locations,
+ )
+ ]
]
response = client.request(
@@ -146,8 +172,32 @@ def test_gtfs_feed_get(client: TestClient, mocker):
subdivision_name="test_subdivision_name",
municipality="test_municipality",
)
+ mock_bounding_box = json.dumps(
+ {
+ "type": "Polygon",
+ "coordinates": [
+ [
+ [-70.248666, 43.655373],
+ [-70.248666, 43.71619],
+ [-70.11018, 43.71619],
+ [-70.11018, 43.655373],
+ [-70.248666, 43.655373],
+ ]
+ ],
+ }
+ )
mock_select.return_value = [
- [(mock_feed, redirect_target_id, mock_external_id, redirect_comment, mock_latest_datasets, mock_locations)]
+ [
+ (
+ mock_feed,
+ redirect_target_id,
+ mock_external_id,
+ redirect_comment,
+ mock_latest_datasets,
+ mock_bounding_box,
+ mock_locations,
+ )
+ ]
]
response = client.request(
diff --git a/config/.env.local b/config/.env.local
index a54b3721c..b63187c4d 100644
--- a/config/.env.local
+++ b/config/.env.local
@@ -5,4 +5,5 @@ POSTGRES_PORT=5432
PGUSER=postgres
POSTGRES_HOST=localhost
ENV=local
-SCHEMA_SPY_DOC=schemaspy-dev
\ No newline at end of file
+SCHEMA_SPY_DOC=schemaspy-dev
+FEEDS_DATABASE_URL=postgresql://postgres:postgres@localhost:5432/MobilityDatabase
\ No newline at end of file
diff --git a/docs/DatabaseCatalogAPI.yaml b/docs/DatabaseCatalogAPI.yaml
index 1360607e3..91c351edc 100644
--- a/docs/DatabaseCatalogAPI.yaml
+++ b/docs/DatabaseCatalogAPI.yaml
@@ -93,8 +93,8 @@ paths:
- $ref: "#/components/parameters/subdivision_name"
- $ref: "#/components/parameters/municipality"
- $ref: "#/components/parameters/sort"
- - $ref: "#/components/parameters/boundingLatitudes"
- - $ref: "#/components/parameters/boundingLongitudes"
+ - $ref: "#/components/parameters/datasetLatitudes"
+ - $ref: "#/components/parameters/datasetLongitudes"
- $ref: "#/components/parameters/boundingFilterMethod"
- $ref: "#/components/parameters/order_by"
@@ -185,9 +185,6 @@ paths:
- $ref: "#/components/parameters/downloaded_date_gte"
- $ref: "#/components/parameters/downloaded_date_lte"
- $ref: "#/components/parameters/sort"
- - $ref: "#/components/parameters/boundingLatitudes"
- - $ref: "#/components/parameters/boundingLongitudes"
- - $ref: "#/components/parameters/boundingFilterMethod"
security:
- Authentication: []
@@ -375,6 +372,8 @@ components:
type: string
format: url
example: https://storage.googleapis.com/storage/v1/b/mdb-latest/o/us-maine-casco-bay-lines-gtfs-1.zip?alt=media
+ bounding_box:
+ $ref: "#/components/schemas/BoundingBox"
# Have to put the enum inline because of a bug in openapi-generator
# EntityTypes:
@@ -672,23 +671,25 @@ components:
type: string
example: +status
- boundingLatitudes:
- name: boundingLatitudes
+ datasetLatitudes:
+ name: datasetLatitudes
in: query
description: >
- Specify the minimum and maximum latitudes of the bounding box to use for filtering.
- Must be specified alongside `boundingLongitudes`.
+ Specify the minimum and maximum latitudes of the bounding box to use for filtering.
+ Filters by the bounding box of the `LatestDataset` for a feed.
+ Must be specified alongside `datasetLongitudes`.
required: False
schema:
type: string
example: 41.46,42.67
- boundingLongitudes:
- name: boundingLongitudes
+ datasetLongitudes:
+ name: datasetLongitudes
in: query
description: >
- Specify the minimum and maximum longitudes of the bounding box to use for filtering.
- Must be specified alongside `boundingLatitudes`.
+ Specify the minimum and maximum longitudes of the bounding box to use for filtering.
+ Filters by the bounding box of the `LatestDataset` for a feed.
+ Must be specified alongside `datasetLatitudes`.
required: False
schema:
type: string
@@ -706,7 +707,7 @@ components:
- disjoint
default: completely_enclosed
description: >
- Specify the filtering method to use with the boundingLatitudes and boundingLongitudes parameters.
+ Specify the filtering method to use with the datasetLatitudes and datasetLongitudes parameters.
* `completely_enclosed` - Get resources that are completely enclosed in the specified bounding box.
* `partially_enclosed` - Get resources that are partially enclosed in the specified bounding box.
* `disjoint` - Get resources that are completely outside the specified bounding box.
diff --git a/docs/DatabaseCatalogAPI_IAP.yaml b/docs/DatabaseCatalogAPI_IAP.yaml
deleted file mode 100644
index 195d69c53..000000000
--- a/docs/DatabaseCatalogAPI_IAP.yaml
+++ /dev/null
@@ -1,806 +0,0 @@
-openapi: 3.0.0
-info:
- version: 0.1.0
- title: Mobility Data Catalog API
- description: API as required in the _Proposed Version 1_ from the _Product Requirement Document for the Mobility Database_
- termsOfService: https://mobilitydata.org/terms-of-use
- contact:
- name: MobilityData
- url: https://mobilitydata.org/
- email: it@mobilitydata.org
- license:
- name: MobilityData License
- url: https://mobilitydata.org/license
-
-servers:
-# Replace beta address with prod address when ready
- - url: https://beta.mobilitydatabase.org/
- description: Beta release environment
- - url: https://api-qa.mobilitydatabase.org/
- description: Pre-prod environment
- - url: https://api-dev.mobilitydatabase.org/
- description: Development environment
- - url: http://localhost:8080/
- description: Local development environment
-
-tags:
- - name: "feeds"
- description: "Feeds of the Mobility Database"
- - name: "datasets"
- description: "Datasets of the Mobility Database"
- - name: "metadata"
- description: "Metadata about the API"
-
-paths:
- /v1/feeds:
- get:
- description: Get some (or all) feeds from the Mobility Database.
- tags:
- - "feeds"
- operationId: getFeeds
- parameters:
- - $ref: "#/components/parameters/limitQueryParam"
- - $ref: "#/components/parameters/offset"
- - $ref: "#/components/parameters/status"
- - $ref: "#/components/parameters/provider"
- - $ref: "#/components/parameters/producer_url"
- - $ref: "#/components/parameters/sort"
-
- security:
- - Authentication: [ ]
- responses:
- 200:
- description: >
- Successful pull of the feeds common info.
- This info has a reduced set of fields that are common to all types of feeds.
- content:
- application/json:
- schema:
- $ref: "#/components/schemas/BasicFeeds"
-
- /v1/feeds/{id}:
- parameters:
- - $ref: "#/components/parameters/feedIdPathParam"
- get:
- description: Get the specified feed from the Mobility Database.
- tags:
- - "feeds"
- operationId: getFeed
- security:
- - Authentication: []
- responses:
- 200:
- description: >
- Successful pull of the feeds common info for the provided ID.
- This info has a reduced set of fields that are common to all types of feeds.
- content:
- application/json:
- schema:
- $ref: "#/components/schemas/BasicFeed"
-
- /v1/gtfs_feeds:
- get:
- description: Get some (or all) GTFS feeds from the Mobility Database.
- tags:
- - "feeds"
- operationId: getGtfsFeeds
- parameters:
- - $ref: "#/components/parameters/limitQueryParam"
- - $ref: "#/components/parameters/offset"
- - $ref: "#/components/parameters/provider"
- - $ref: "#/components/parameters/producer_url"
- - $ref: "#/components/parameters/country_code"
- - $ref: "#/components/parameters/subdivision_name"
- - $ref: "#/components/parameters/municipality"
- - $ref: "#/components/parameters/sort"
- - $ref: "#/components/parameters/boundingLatitudes"
- - $ref: "#/components/parameters/boundingLongitudes"
- - $ref: "#/components/parameters/boundingFilterMethod"
- - $ref: "#/components/parameters/order_by"
-
- security:
- - Authentication: []
- responses:
- 200:
- description: Successful pull of the GTFS feeds info.
- content:
- application/json:
- schema:
- $ref: "#/components/schemas/GtfsFeeds"
-
- /v1/gtfs_rt_feeds:
- get:
- description: Get some (or all) GTFS Realtime feeds from the Mobility Database.
- tags:
- - "feeds"
- operationId: getGtfsRtFeeds
- parameters:
- - $ref: "#/components/parameters/limitQueryParam"
- - $ref: "#/components/parameters/offset"
- - $ref: "#/components/parameters/provider"
- - $ref: "#/components/parameters/producer_url"
- - $ref: "#/components/parameters/entity_types"
- - $ref: "#/components/parameters/sort"
- security:
- - Authentication: []
- responses:
- 200:
- description: Successful pull of the GTFS Realtime feeds info.
- content:
- application/json:
- schema:
- $ref: "#/components/schemas/GtfsRTFeeds"
-
- /v1/gtfs_feeds/{id}:
- parameters:
- - $ref: "#/components/parameters/feedIdPathParam"
- get:
- description: Get the specified GTFS feed from the Mobility Database.
- tags:
- - "feeds"
- operationId: getGtfsFeed
-
- security:
- - Authentication: []
- responses:
- 200:
- description: Successful pull of the requested feed.
- content:
- application/json:
- schema:
- $ref: "#/components/schemas/GtfsFeed"
-
- /v1/gtfs_rt_feeds/{id}:
- parameters:
- - $ref: "#/components/parameters/feedIdPathParam"
- get:
- description: Get the specified GTFS Realtime feed from the Mobility Database.
- tags:
- - "feeds"
- operationId: getGtfsRtFeed
-
- security:
- - Authentication: []
- responses:
- 200:
- description: Successful pull of the requested feed.
- content:
- application/json:
- schema:
- $ref: "#/components/schemas/GtfsRTFeed"
-
-
- /v1/gtfs_feeds/{id}/datasets:
- parameters:
- - $ref: "#/components/parameters/feedIdOfDatasetsPathParam"
- get:
- description: Get a list of datasets related to a GTFS feed.
- tags:
- - "feeds"
- operationId: getGtfsFeedDatasets
- parameters:
- - $ref: "#/components/parameters/latestQueryParam"
- - $ref: "#/components/parameters/limitQueryParam"
- - $ref: "#/components/parameters/offset"
- - $ref: "#/components/parameters/downloaded_date_gte"
- - $ref: "#/components/parameters/downloaded_date_lte"
- - $ref: "#/components/parameters/sort"
- - $ref: "#/components/parameters/boundingLatitudes"
- - $ref: "#/components/parameters/boundingLongitudes"
- - $ref: "#/components/parameters/boundingFilterMethod"
-
- security:
- - Authentication: []
- responses:
- 200:
- description: Successful pull of the requested datasets.
- content:
- application/json:
- schema:
- $ref: "#/components/schemas/GtfsDatasets"
-
- /v1/datasets/gtfs/{id}:
- get:
- description: Get the specified dataset from the Mobility Database.
- tags:
- - "datasets"
- operationId: getDatasetGtfs
- parameters:
- - $ref: "#/components/parameters/datasetIdPathParam"
- security:
- - Authentication: []
- responses:
- 200:
- description: Successful pull of the requested dataset.
- content:
- application/json:
- schema:
- $ref: "#/components/schemas/GtfsDataset"
-
- /v1/metadata:
- get:
- description: Get metadata about this API.
- tags:
- - "metadata"
- operationId: getMetadata
- security:
- - Authentication: []
- GoogleIap: []
- responses:
- 200:
- description: Successful pull of the metadata.
- content:
- application/json:
- schema:
- $ref: "#/components/schemas/Metadata"
-
-components:
- schemas:
- Redirect:
- type: object
- properties:
- target_id:
- description: The feed ID that should be used in replacement of the current one.
- type: string
- example: mdb-10
- comment:
- description: A comment explaining the redirect.
- type: string
- example: Redirected because of a change of URL.
- BasicFeed:
- type: object
- properties:
- id:
- description: Unique identifier used as a key for the feeds table.
- type: string
- example: mdb-10
- data_type:
- type: string
- enum:
- - gtfs
- - gtfs_rt
- example: gtfs
-# Have to put the enum inline because of a bug in openapi-generator
-# $ref: "#/components/schemas/DataType"
- status:
- description: >
- Describes status of the Feed. Should be one of
- * `active` Feed should be used in public trip planners.
- * `deprecated` Feed is explicitly deprecated and should not be used in public trip planners.
- * `inactive` Feed hasn't been recently updated and should be used at risk of providing outdated information.
- * `development` Feed is being used for development purposes and should not be used in public trip planners.
- type: string
- enum:
- - active
- - deprecated
- - inactive
- - development
- example: active
-# Have to put the enum inline because of a bug in openapi-generator
-# $ref: "#/components/schemas/FeedStatus"
-
- external_ids:
- $ref: "#/components/schemas/ExternalIds"
- provider:
- description: A commonly used name for the transit provider included in the feed.
- type: string
- example: London Transit Commission
- feed_name:
- description: >
- An optional description of the data feed, e.g to specify if the data feed is an aggregate of
- multiple providers, or which network is represented by the feed.
- type: string
- example: Atlantic Station Shuttle (FREE RIDE)
- note:
- description: A note to clarify complex use cases for consumers.
- type: string
- feed_contact_email:
- description: Use to contact the feed producer.
- type: string
- source_info:
- $ref: "#/components/schemas/SourceInfo"
- redirects:
- type: array
- items:
- $ref: "#/components/schemas/Redirect"
-
- GtfsFeed:
- allOf:
- - $ref: "#/components/schemas/BasicFeed"
- - type: object
- properties:
- locations:
- $ref: "#/components/schemas/Locations"
- - type: object
- properties:
- latest_dataset:
- $ref: "#/components/schemas/LatestDataset"
-
- GtfsRTFeed:
- allOf:
- - $ref: "#/components/schemas/BasicFeed"
- - type: object
- properties:
- entity_types:
- type: array
- items:
- type: string
- enum:
- - vp
- - tu
- - sa
- example: vp
- description: >
- The type of realtime entry:
- * vp - vehicle positions
- * tu - trip updates
- * sa - service alerts
-# Have to put the enum inline because of a bug in openapi-generator
-# $ref: "#/components/schemas/EntityTypes"
- feed_references:
- description:
- A list of the GTFS feeds that the real time source is associated with, represented by their MDB source IDs.
- type: array
- items:
- type: string
- example: "mdb-20"
-
- BasicFeeds:
- type: array
- items:
- $ref: "#/components/schemas/BasicFeed"
-
- GtfsFeeds:
- type: array
- items:
- $ref: "#/components/schemas/GtfsFeed"
-
- GtfsRTFeeds:
- type: array
- items:
- $ref: "#/components/schemas/GtfsRTFeed"
-
- LatestDataset:
- type: object
- properties:
- id:
- description: Identifier of the latest dataset for this feed.
- type: string
- example: dataset_0
- hosted_url:
- description: >
- As a convenience, the URL of the latest uploaded dataset hosted by MobilityData.
- It should be the same URL as the one found in the latest dataset id dataset.
- An alternative way to find this is to use the latest dataset id to obtain the dataset and then use its hosted_url.
- type: string
- format: url
- example: https://storage.googleapis.com/storage/v1/b/mdb-latest/o/us-maine-casco-bay-lines-gtfs-1.zip?alt=media
-
-# Have to put the enum inline because of a bug in openapi-generator
-# EntityTypes:
-# type: array
-# items:
-# $ref: "#/components/schemas/EntityType"
-
-# EntityType:
-# type: string
-# enum:
-# - vp
-# - tu
-# - sa
-# example: vp
-# description: >
-# The type of realtime entry:
-# * vp - vehicle positions
-# * tu - trip updates
-# * sa - service alerts
-
- ExternalIds:
- type: array
- items:
- $ref: "#/components/schemas/ExternalId"
-
- ExternalId:
- type: object
- properties:
- external_id:
- description: The ID that can be use to find the feed data in an external or legacy database.
- type: string
- example: 411
- source:
- description: The source of the external ID, e.g. the name of the database where the external ID can be used.
- type: string
- example: MobilityDataSpreadsheetCatalog
-
- SourceInfo:
- type: object
- properties:
- producer_url:
- description: >
- URL where the producer is providing the dataset.
- Refer to the authentication information to know how to access this URL.
- type: string
- format: url
- example: http://www.londontransit.ca/gtfsfeed/google_transit.zip
- authentication_type:
- description: >
- Defines the type of authentication required to access the `producer_url`. Valid values for this field are:
- * 0 or (empty) - No authentication required.
- * 1 - The authentication requires an API key, which should be passed as value of the parameter api_key_parameter_name in the URL. Please visit URL in authentication_info_url for more information.
- * 2 - The authentication requires an HTTP header, which should be passed as the value of the header api_key_parameter_name in the HTTP request.
- When not provided, the authentication type is assumed to be 0.
- type: integer
- enum:
- - 0
- - 1
- - 2
- authentication_info_url:
- description: >
- Contains a URL to a human-readable page describing how the authentication should be performed and how credentials can be created.
- This field is required for `authentication_type=1` and `authentication_type=2`.
- type: string
- format: url
- api_key_parameter_name:
- type: string
- description: >
- Defines the name of the parameter to pass in the URL to provide the API key.
- This field is required for `authentication_type=1` and `authentication_type=2`.
- license_url:
- description: A URL where to find the license for the feed.
- type: string
- format: url
- example: https://www.londontransit.ca/open-data/ltcs-open-data-terms-of-use/
-
- Locations:
- type: array
- items:
- $ref: "#/components/schemas/Location"
-
- Location:
- type: object
- properties:
- country_code:
- description: >
- ISO 3166-1 alpha-2 code designating the country where the system is located.
- For a list of valid codes [see here](https://unece.org/trade/uncefact/unlocode-country-subdivisions-iso-3166-2).
- type: string
- example: USA
- subdivision_name:
- description: >
- ISO 3166-2 subdivision name designating the subdivision (e.g province, state, region) where the system is located.
- For a list of valid names [see here](https://unece.org/trade/uncefact/unlocode-country-subdivisions-iso-3166-2).
- type: string
- example: California
- municipality:
- description: Primary municipality in which the transit system is located.
- type: string
- example: Los Angeles
-
-# Have to put the enum inline because of a bug in openapi-generator
-# FeedStatus:
-# description: >
-# Describes status of the Feed. Should be one of
-# * `active` Feed should be used in public trip planners.
-# * `deprecated` Feed is explicitly deprecated and should not be used in public trip planners.
-# * `inactive` Feed hasn't been recently updated and should be used at risk of providing outdated information.
-# * `development` Feed is being used for development purposes and should not be used in public trip planners.
-# type: string
-# enum:
-# - active
-# - deprecated
-# - inactive
-# - development
-# example: active
-
- BasicDataset:
- type: object
- properties:
- id:
- description: Unique identifier used as a key for the datasets table.
- type: string
- example: dataset_0
- feed_id:
- description: ID of the feed related to this dataset.
- type: string
- example: mdb-99
-
- GtfsDataset:
- allOf:
- - $ref: "#/components/schemas/BasicDataset"
- - type: object
- properties:
- hosted_url:
- description: The URL of the dataset data as hosted by MobilityData. No authentication required.
- type: string
- example: https://storage.googleapis.com/storage/v1/b/mdb-latest/o/us-maine-casco-bay-lines-gtfs-1.zip?alt=media
- note:
- description: A note to clarify complex use cases for consumers.
- type: string
- downloaded_at:
- description: The date and time the dataset was downloaded from the producer, in ISO 8601 format.
- type: string
- example: 2023-07-10T22:06:00Z
- format: datetime
- hash:
- description: A MD5 hash of the dataset.
- type: string
- example: a_long_sha1_hash
- bounding_box:
- $ref: "#/components/schemas/BoundingBox"
- components:
- description: An array of components for this dataset.
- type: array
- items:
- type: string
- enum:
- - fares-v2
- - fares-v1
- - flex-v1
- - flex-v2
- - pathways
- example: fares-v2
-
- BoundingBox:
- description: Bounding box of the dataset when it was first added to the catalog.
- type: object
- properties:
- minimum_latitude:
- description: The minimum latitude for the dataset bounding box.
- type: number
- example: 43.655373
- maximum_latitude:
- description: The maximum latitude for the dataset bounding box.
- type: number
- example: 43.71619
- minimum_longitude:
- description: The minimum longitude for the dataset bounding box.
- type: number
- example: -70.248666
- maximum_longitude:
- description: The maximum longitude for the dataset bounding box.
- type: number
- example: -70.11018
-
- GtfsDatasets:
- type: array
- items:
- $ref: "#/components/schemas/GtfsDataset"
-
- Metadata:
- type: object
- properties:
- version:
- type: string
- example: 1.0.0
-
-# Have to put the enum inline because of a bug in openapi-generator
-# DataType:
-# type: string
-# enum:
-# - gtfs
-# - gtfs_rt
-# example: gtfs
- parameters:
-
- filter:
- name: filter
- in: query
- description: A filter to apply to the returned data. Exact syntax to be designed
- required: False
- schema:
- type: string
- example: status=active
- status:
- name: status
- in: query
- description: Filter feeds by their status
- required: false
- schema:
- type: string
- enum:
- - active
- - deprecated
- - inactive
- - development
- provider:
- name: provider
- in: query
- description: Filter feeds by provider name. Phrase is matched case insensitively to actual provider names.
- required: false
- schema:
- type: string
- example: London Transit
- producer_url:
- name: producer_url
- in: query
- required: false
- description: >
- Filter feeds by URL where the producer is providing the dataset. Phrase is matched case insensitively.
- schema:
- type: string
- format: url
- example: londontransit.ca/gtfsfeed
- entity_types:
- name: entity_types
- in: query
- description: Filter feeds by their entity type. Expects a comma separated list of all types to fetch.
- required: false
- schema:
- type: string
- example: vp,sa
- country_code:
- name: country_code
- in: query
- description: Filter feeds by their exact country code.
- schema:
- type: string
- example: USA
- subdivision_name:
- name: subdivision_name
- in: query
- description: Filter feeds by subdivision name. Phrase is matched case insensitively to actual subdivision names.
- schema:
- type: string
- example: California
- municipality:
- name: municipality
- in: query
- description: Filter feeds by municipality name. Phrase is matched case insensitively to actual municipality names.
- schema:
- type: string
- example: Los Angeles
- downloaded_date_gte:
- name: downloaded_date_gte
- in: query
- description: Filter feed datasets with downloaded date greater or equal to given date.
- schema:
- type: string
- example: 2023-07-00T22:06:00Z
- downloaded_date_lte:
- name: downloaded_date_lte
- in: query
- description: Filter feed datasets with downloaded date less or equal to given date.
- schema:
- type: string
- example: 2023-07-20T22:06:00Z
- sort:
- name: sort
- in: query
- description: A specification of the sort order of the returned data. Exact syntax to be designed
- required: False
- schema:
- type: string
- example: +status
-
- boundingLatitudes:
- name: boundingLatitudes
- in: query
- description: >
- Specify the minimum and maximum latitudes of the bounding box to use for filtering.
- Must be specified alongside `boundingLongitudes`.
- required: False
- schema:
- type: string
- example: 41.46,42.67
-
- boundingLongitudes:
- name: boundingLongitudes
- in: query
- description: >
- Specify the minimum and maximum longitudes of the bounding box to use for filtering.
- Must be specified alongside `boundingLatitudes`.
- required: False
- schema:
- type: string
- example: -78.58,-87-29
-
- boundingFilterMethod:
- name: boundingFilterMethod
- in: query
- required: False
- schema:
- type: string
- enum:
- - completely_enclosed
- - partially_enclosed
- - disjoint
- default: completely_enclosed
- description: >
- Specify the filtering method to use with the boundingLatitudes and boundingLongitudes parameters.
- * `completely_enclosed` - Get resources that are completely enclosed in the specified bounding box.
- * `partially_enclosed` - Get resources that are partially enclosed in the specified bounding box.
- * `disjoint` - Get resources that are completely outside the specified bounding box.
- example: completely_enclosed
-
- order_by:
- name: order_by
- in: query
- required: False
- schema:
- type: array
- items:
- type: string
- enum:
- - external_id
- - +external_id
- - -external_id
- - country_code
- - +country_code
- - -country_code
- default:
- - "external_id"
- description: >
- Specify the optional field to sort records by.
- example: external_id
-
- insideBoundingBox:
- name: insideBoundingBox
- in: query
- description: Only include datasets where the specified geographical point is inside the bounding box
- required: False
- schema:
- type: string
- example: 43.71,-70.24
-
- latestQueryParam:
- name: latest
- in: query
- description: If true, only return the latest dataset.
- required: False
- schema:
- type: boolean
- default: false
-
- limitQueryParam:
- name: limit
- in: query
- description: The number of items to be returned.
- required: False
- schema:
- type: integer
- minimum: 0
- example: 10
- offset:
- name: offset
- in: query
- description: Offset of the first item to return.
- required: False
- schema:
- type: integer
- minimum: 0
- default: 0
- example: 10
-
- feedIdPathParam:
- name: id
- in: path
- description: The feed ID of the requested feed.
- required: True
- schema:
- type: string
- example: mdb-10
-
- feedIdOfDatasetsPathParam:
- name: id
- in: path
- description: The ID of the feed for which to obtain datasets.
- required: True
- schema:
- type: string
- example: mdb-10
-
- datasetIdPathParam:
- name: id
- in: path
- description: The ID of the requested dataset.
- required: True
- schema:
- type: string
- example: dataset_0
-
- securitySchemes:
- Authentication:
- $ref: "./IAPAuthenticationSchema.yaml#/components/securitySchemes/Authentication"
-
-security:
- - Authentication: []
diff --git a/docs/DB_SELECTION.md b/docs/DatabaseSelection.md
similarity index 100%
rename from docs/DB_SELECTION.md
rename to docs/DatabaseSelection.md
diff --git a/docs/batch-processing/BatchProcessing.md b/docs/batch-processing/BatchProcessing.md
new file mode 100644
index 000000000..077793dbf
--- /dev/null
+++ b/docs/batch-processing/BatchProcessing.md
@@ -0,0 +1,52 @@
+# Historical Datasets Batch Processing Documentation
+
+## Overview
+
+This document provides a comprehensive overview of the workflow for batch processing and storing historical datasets in a Google Cloud Platform (GCP) bucket. Our process strategically combines GCP Scheduler, HTTP-triggered functions, and Pub/Sub-triggered functions to ensure efficient and reliable data handling.
+
+## Workflow Description
+The following schema provides a high-level overview of the workflow process:
+
+
+
+### Cloud Scheduler: `dataset-batch-job`
+
+The `dataset-batch-job` in Cloud Scheduler is a pivotal component of our workflow. It is configured to run at specific intervals, which can vary based on the environment setting. Its primary role is to initiate the data processing sequence by making an HTTP call to the `batch-datasets` function.
+
+### Function Descriptions
+
+Each cloud function encapsulates a specific part of the workflow:
+- `batch-datasets`: HTTP-triggered function that retrieves information about feeds and publishes this data to the Pub/Sub topic for further processing. It serves as the orchestrator for dataset updates, ensuring that new data is queued for processing.
+
+- `batch-process-dataset`: Pub/Sub-triggered function that performs the actual data processing. It downloads the feed data, compares it to the previous version, and, if necessary, updates the dataset information in the system. This function is crucial for maintaining the latest data within our storage and ensuring that users have access to the most current datasets.
+
+#### HTTP Cloud Function: `batch-datasets`
+
+
+This function serves as the starting point for the batch processing workflow. It queries for active feeds and publishes their information to a specified Pub/Sub topic. The messages contain details necessary to process each feed, including URLs, feed IDs, and authentication details if applicable. The structure of the message it sends is detailed as follows:
+
+```json
+{
+ "execution_id": "",
+ "producer_url": "",
+ "feed_stable_id": "",
+ "feed_id": "",
+ "authentication_type": "",
+ "authentication_info_url": "",
+ "api_key_parameter_name": "",
+ "dataset_id": "",
+ "dataset_hash": ""
+}
+```
+
+#### Pub/Sub Cloud Function: `batch-process-dataset`
+
+
+Subscribed to the topic set in the `batch-datasets` function, `batch-process-dataset` is triggered for each message published. It handles the processing of each feed individually, ensuring data consistency and integrity. The function performs the following operations:
+
+1. **Download Data**: It retrieves the feed data from the provided URL.
+2. **Compare Hashes**: The SHA256 hash of the downloaded data is compared to the hash of the last stored version to detect changes.
+ - If the hash is unchanged, the dataset is considered up-to-date, and no further action is taken.
+ - If the hash has changed, it is indicative of an update, and a new `Dataset` entity is created and stored with the corresponding feed information.
+
+The URL format for accessing these datasets is standardized as `//.zip`, ensuring a consistent and predictable path for data retrieval.
diff --git a/docs/batch-processing/batch_datasets.png b/docs/batch-processing/batch_datasets.png
new file mode 100644
index 000000000..80627e511
Binary files /dev/null and b/docs/batch-processing/batch_datasets.png differ
diff --git a/docs/batch-processing/batch_process_dataset.png b/docs/batch-processing/batch_process_dataset.png
new file mode 100644
index 000000000..480c41c1f
Binary files /dev/null and b/docs/batch-processing/batch_process_dataset.png differ
diff --git a/docs/batch-processing/batch_processing_schema.png b/docs/batch-processing/batch_processing_schema.png
new file mode 100644
index 000000000..0e648edea
Binary files /dev/null and b/docs/batch-processing/batch_processing_schema.png differ
diff --git a/functions-python/batch_datasets/src/main.py b/functions-python/batch_datasets/src/main.py
index 10c304696..eb9615246 100644
--- a/functions-python/batch_datasets/src/main.py
+++ b/functions-python/batch_datasets/src/main.py
@@ -21,6 +21,7 @@
import functions_framework
from google.cloud import pubsub_v1
+from google.cloud.pubsub_v1 import PublisherClient
from google.cloud.pubsub_v1.futures import Future
from sqlalchemy import or_
from sqlalchemy.orm import Session
@@ -28,11 +29,17 @@
from dataset_service.main import BatchExecutionService, BatchExecution
from helpers.database import start_db_session, close_db_session
-publisher = pubsub_v1.PublisherClient()
pubsub_topic_name = os.getenv("PUBSUB_TOPIC_NAME")
project_id = os.getenv("PROJECT_ID")
+def get_pubsub_client():
+ """
+ Returns a Pub/Sub client.
+ """
+ return pubsub_v1.PublisherClient()
+
+
def publish_callback(future: Future, stable_id: str, topic_path: str):
"""
Callback function for when the message is published to Pub/Sub.
@@ -46,7 +53,7 @@ def publish_callback(future: Future, stable_id: str, topic_path: str):
print(f"Published stable_id={stable_id}.")
-def publish(topic_path: str, data_bytes: bytes) -> Future:
+def publish(publisher: PublisherClient, topic_path: str, data_bytes: bytes) -> Future:
"""
Publishes the given data to the Pub/Sub topic.
"""
@@ -108,6 +115,7 @@ def batch_datasets(request):
close_db_session(session)
print(f"Retrieved {len(active_feeds)} active feeds.")
+ publisher = get_pubsub_client()
topic_path = publisher.topic_path(project_id, pubsub_topic_name)
trace_id = request.headers.get("X-Cloud-Trace-Context")
execution_id = (
@@ -128,7 +136,7 @@ def batch_datasets(request):
}
data_str = json.dumps(payload)
print(f"Publishing {data_str} to {topic_path}.")
- future = publish(topic_path, data_str.encode("utf-8"))
+ future = publish(publisher, topic_path, data_str.encode("utf-8"))
future.add_done_callback(
lambda _: publish_callback(future, active_feed["stable_id"], topic_path)
)
diff --git a/functions-python/batch_datasets/tests/test_batch_datasets_main.py b/functions-python/batch_datasets/tests/test_batch_datasets_main.py
index b512e58fc..e2e5a41cf 100644
--- a/functions-python/batch_datasets/tests/test_batch_datasets_main.py
+++ b/functions-python/batch_datasets/tests/test_batch_datasets_main.py
@@ -16,7 +16,7 @@
import json
import os
from unittest import mock
-from unittest.mock import Mock, patch
+from unittest.mock import Mock, patch, MagicMock
from batch_datasets.src.main import get_active_feeds, batch_datasets
from test_utils.database_utils import get_testing_session, default_db_url
@@ -35,7 +35,9 @@ def test_get_active_feeds():
{"FEEDS_DATABASE_URL": default_db_url, "FEEDS_PUBSUB_TOPIC_NAME": "test_topic"},
)
@patch("batch_datasets.src.main.publish")
-def test_batch_datasets(mock_publish):
+@patch("batch_datasets.src.main.get_pubsub_client")
+def test_batch_datasets(mock_client, mock_publish):
+ mock_client.return_value = MagicMock()
with get_testing_session() as session:
active_feeds = get_active_feeds(session)
with patch(
@@ -50,7 +52,7 @@ def test_batch_datasets(mock_publish):
# active feeds
for i in range(3):
message = json.loads(
- mock_publish.call_args_list[i][0][1].decode("utf-8")
+ mock_publish.call_args_list[i][0][2].decode("utf-8")
)
assert message["feed_stable_id"] in [
feed.stable_id for feed in active_feeds
diff --git a/infra/feed-api/main.tf b/infra/feed-api/main.tf
index 6d376adee..1d92c7f6a 100644
--- a/infra/feed-api/main.tf
+++ b/infra/feed-api/main.tf
@@ -25,46 +25,12 @@ data "google_project" "project" {
locals {
env = {
- "POSTGRES_DB" = {
- secret_id = "${var.environment}_POSTGRES_DB",
- secret_data = var.feed_api_postgres_db
- },
- "POSTGRES_HOST" = {
- secret_id = "${var.environment}_POSTGRES_HOST",
- secret_data = var.feed_api_postgres_host
- },
- "POSTGRES_PASSWORD" = {
- secret_id = "${var.environment}_POSTGRES_PASSWORD"
- secret_data = var.feed_api_postgres_password
- },
- "POSTGRES_PORT" = {
- secret_id = "${var.environment}_POSTGRES_PORT"
- secret_data = var.feed_api_postgres_port
+ "FEEDS_DATABASE_URL" = {
+ secret_id = "${var.environment}_FEEDS_DATABASE_URL"
}
- "POSTGRES_USER" = {
- secret_id = "${var.environment}_POSTGRES_USER"
- secret_data = var.feed_api_postgres_user
- }
- }
-}
-
-resource "google_secret_manager_secret" "secret" {
- for_each = local.env
-
- project = var.project_id
- secret_id = each.value.secret_id
- replication {
- automatic = true
}
}
-resource "google_secret_manager_secret_version" "secret_version" {
- for_each = local.env
-
- secret = google_secret_manager_secret.secret[each.key].id
- secret_data = each.value.secret_data
-}
-
# Service account to execute the cloud run service
resource "google_service_account" "containers_service_account" {
account_id = "containers-service-account"
@@ -82,50 +48,14 @@ resource "google_cloud_run_v2_service" "mobility-feed-api" {
containers {
image = "${var.gcp_region}-docker.pkg.dev/${var.project_id}/${var.docker_repository_name}/${var.feed_api_service}:${var.feed_api_image_version}"
env {
- name = "POSTGRES_DB"
+ name = "FEEDS_DATABASE_URL"
value_source {
secret_key_ref {
- secret = google_secret_manager_secret.secret["POSTGRES_DB"].id
+ secret = "${upper(var.environment)}_FEEDS_DATABASE_URL"
version = "latest"
}
}
}
- env {
- name = "POSTGRES_HOST"
- value_source {
- secret_key_ref {
- secret = google_secret_manager_secret.secret["POSTGRES_HOST"].id
- version = "latest"
- }
- }
- }
- env {
- name = "POSTGRES_PASSWORD"
- value_source {
- secret_key_ref {
- secret = google_secret_manager_secret.secret["POSTGRES_PASSWORD"].id
- version = "latest"
- }
- }
- }
- env {
- name = "POSTGRES_PORT"
- value_source {
- secret_key_ref {
- secret = google_secret_manager_secret.secret["POSTGRES_PORT"].id
- version = "latest"
- }
- }
- }
- env {
- name = "POSTGRES_USER"
- value_source {
- secret_key_ref {
- secret = google_secret_manager_secret.secret["POSTGRES_USER"].id
- version = "latest"
- }
- }
- }
}
}
}
@@ -159,7 +89,7 @@ resource "google_secret_manager_secret_iam_policy" "policy" {
for_each = local.env
project = var.project_id
- secret_id = google_secret_manager_secret.secret[each.key].id
+ secret_id = "${upper(var.environment)}_${each.key}"
policy_data = data.google_iam_policy.secret_access.policy_data
}
diff --git a/infra/feed-api/vars.tf b/infra/feed-api/vars.tf
index 1649c53a7..bf79656a1 100644
--- a/infra/feed-api/vars.tf
+++ b/infra/feed-api/vars.tf
@@ -44,27 +44,3 @@ variable "feed_api_image_version" {
description = "Docker image version"
}
-variable "feed_api_postgres_db" {
- type = string
- description = "Feed API DB"
-}
-
-variable "feed_api_postgres_host" {
- type = string
- description = "Feed API DB host"
-}
-
-variable "feed_api_postgres_password" {
- type = string
- description = "Feed API DB password"
-}
-
-variable "feed_api_postgres_port" {
- type = string
- description = "Feed API DB port"
-}
-
-variable "feed_api_postgres_user" {
- type = string
- description = "Feed API DB user"
-}
diff --git a/infra/functions-python/main.tf b/infra/functions-python/main.tf
index 766299f56..9647ca66c 100644
--- a/infra/functions-python/main.tf
+++ b/infra/functions-python/main.tf
@@ -24,7 +24,7 @@ locals {
# Service account to execute the cloud functions
resource "google_service_account" "functions_service_account" {
- account_id = "containers-service-account"
+ account_id = "functions-service-account"
display_name = "Functions Service Account"
}
@@ -43,13 +43,13 @@ data "google_iam_policy" "secret_access" {
binding {
role = "roles/secretmanager.secretAccessor"
members = [
- "serviceAccount:${google_service_account.containers_service_account.email}"
+ "serviceAccount:${google_service_account.functions_service_account.email}"
]
}
}
resource "google_secret_manager_secret_iam_policy" "policy" {
- for_each = { for x in local.function_tokens_config.secret_environment_variables: x.key => x}
+ for_each = { for x in local.function_tokens_config.secret_environment_variables: x.key => x }
project = var.project_id
secret_id = "${upper(var.environment)}_${each.key}"
@@ -86,7 +86,7 @@ resource "google_cloudfunctions2_function" "tokens" {
version = "latest"
}
}
- service_account_email = google_service_account.containers_service_account.email
+ service_account_email = google_service_account.functions_service_account.email
ingress_settings = local.function_tokens_config.ingress_settings
}
}
diff --git a/infra/main.tf b/infra/main.tf
index f829aa152..3092715b9 100644
--- a/infra/main.tf
+++ b/infra/main.tf
@@ -93,12 +93,6 @@ module "feed-api" {
feed_api_service = "feed-api"
feed_api_image_version = var.feed_api_image_version
- feed_api_postgres_db = var.feed_api_postgres_db
- feed_api_postgres_host = var.feed_api_postgres_host
- feed_api_postgres_password = var.feed_api_postgres_password
- feed_api_postgres_port = var.feed_api_postgres_port
- feed_api_postgres_user = var.feed_api_postgres_user
-
source = "./feed-api"
}
diff --git a/infra/vars.tf b/infra/vars.tf
index 41cd90aca..dabc21920 100644
--- a/infra/vars.tf
+++ b/infra/vars.tf
@@ -52,29 +52,4 @@ variable "oauth2_client_secret" {
variable "global_rate_limit_req_per_minute" {
type = string
description = "Global load balancer rate limit"
-}
-
-variable "feed_api_postgres_db" {
- type = string
- description = "Feed API DB"
-}
-
-variable "feed_api_postgres_host" {
- type = string
- description = "Feed API DB host"
-}
-
-variable "feed_api_postgres_password" {
- type = string
- description = "Feed API DB password"
-}
-
-variable "feed_api_postgres_port" {
- type = string
- description = "Feed API DB port"
-}
-
-variable "feed_api_postgres_user" {
- type = string
- description = "Feed API DB user"
-}
+}
\ No newline at end of file
diff --git a/infra/vars.tfvars.rename_me b/infra/vars.tfvars.rename_me
index cbdd4f634..1f191f902 100644
--- a/infra/vars.tfvars.rename_me
+++ b/infra/vars.tfvars.rename_me
@@ -1,4 +1,4 @@
-# This file is a template for populating the terraform-init module variables
+# This file is a template for populating the feed-api module variables
# To set variables:
# - Rename this file to vars.tfvars.
# - Replace variable values.
@@ -13,10 +13,4 @@ feed_api_image_version = {{FEED_API_IMAGE_VERSION}}
oauth2_client_id = {{OAUTH2_CLIENT_ID}}
oauth2_client_secret = {{OAUTH2_CLIENT_SECRET}}
-global_rate_limit_req_per_minute = {{GLOBAL_RATE_LIMIT_REQ_PER_MINUTE}}
-
-feed_api_postgres_db = {{POSTGRES_DB}}
-feed_api_postgres_host = {{POSTGRES_HOST}}
-feed_api_postgres_password = {{POSTGRES_PASSWORD}}
-feed_api_postgres_port = {{POSTGRES_PORT}}
-feed_api_postgres_user = {{POSTGRES_USER}}
\ No newline at end of file
+global_rate_limit_req_per_minute = {{GLOBAL_RATE_LIMIT_REQ_PER_MINUTE}}
\ No newline at end of file
diff --git a/scripts/api-gen.sh b/scripts/api-gen.sh
index 858246f27..eccaf36be 100755
--- a/scripts/api-gen.sh
+++ b/scripts/api-gen.sh
@@ -13,7 +13,13 @@ GENERATOR_VERSION=7.0.1
# relative path
SCRIPT_PATH="$(dirname -- "${BASH_SOURCE[0]}")"
-OPENAPI_SCHEMA=$SCRIPT_PATH/../docs/DatabaseCatalogAPI_IAP.yaml
+OPENAPI_SCHEMA=$SCRIPT_PATH/../docs/DatabaseCatalogAPI.yaml
+OPENAPI_SCHEMA_IAP=$SCRIPT_PATH/../docs/DatabaseCatalogAPI_IAP.yaml
OUTPUT_PATH=$SCRIPT_PATH/../api
CONFIG_FILE=$SCRIPT_PATH/gen-config.yaml
-OPENAPI_GENERATOR_VERSION=$GENERATOR_VERSION $SCRIPT_PATH/bin/openapitools/openapi-generator-cli generate -g python-fastapi -i $OPENAPI_SCHEMA -o $OUTPUT_PATH -c $CONFIG_FILE
\ No newline at end of file
+
+sed 's%$ref: "./BearerTokenSchema.yaml#/components/securitySchemes/Authentication"%$ref: "./IAPAuthenticationSchema.yaml#/components/securitySchemes/Authentication"%g' $OPENAPI_SCHEMA > $OPENAPI_SCHEMA_IAP
+
+OPENAPI_GENERATOR_VERSION=$GENERATOR_VERSION $SCRIPT_PATH/bin/openapitools/openapi-generator-cli generate -g python-fastapi -i $OPENAPI_SCHEMA_IAP -o $OUTPUT_PATH -c $CONFIG_FILE
+
+rm -f $OPENAPI_SCHEMA_IAP
\ No newline at end of file
diff --git a/scripts/api-tests.sh b/scripts/api-tests.sh
index 680f4e725..ace534808 100755
--- a/scripts/api-tests.sh
+++ b/scripts/api-tests.sh
@@ -53,6 +53,8 @@ while [[ $# -gt 0 ]]; do
esac
done
+cat $ABS_SCRIPTPATH/../config/.env.local > $ABS_SCRIPTPATH/../.env
+
execute_tests() {
printf "\nExecuting tests in $1\n"
cd $ABS_SCRIPTPATH/$1/ || exit 1
@@ -70,7 +72,7 @@ execute_tests() {
}
if [[ ! -z "${TEST_FILE}" && ! -z "${FOLDER}" ]]; then
- echo "The parameters -test_file and -folder are mutualy exclusive."
+ echo "The parameters -test_file and -folder are mutually exclusive."
exit 1
fi
diff --git a/web-app/cypress/e2e/changepassword.cy.ts b/web-app/cypress/e2e/changepassword.cy.ts
index e4146dba5..58ca6a6fc 100644
--- a/web-app/cypress/e2e/changepassword.cy.ts
+++ b/web-app/cypress/e2e/changepassword.cy.ts
@@ -12,7 +12,7 @@ describe('Change Password Screen', () => {
cy.get('input[id="password"]').clear().type(currentPassword);
cy.get('button[type="submit"]').click();
// Wait for the user to be redirected to the home page
- cy.location('pathname').should('eq', '/account');
+ cy.location('pathname').should('eq', '/account', { timeout: 30000 });
// Visit the change password page
cy.visit('/change-password');
});
diff --git a/web-app/cypress/e2e/home.cy.ts b/web-app/cypress/e2e/home.cy.ts
index 2deeb3bab..290c89dab 100644
--- a/web-app/cypress/e2e/home.cy.ts
+++ b/web-app/cypress/e2e/home.cy.ts
@@ -9,7 +9,7 @@ describe('Home page', () => {
.contains('Mobility Database');
});
- it('should render signin', () => {
- cy.get('[data-testid=signin]').should('exist');
+ it('should render home page title', () => {
+ cy.get('[data-testid=home-title]').should('exist');
});
});
diff --git a/web-app/cypress/e2e/resetpassword.cy.ts b/web-app/cypress/e2e/resetpassword.cy.ts
index 3eb2352e9..1caca269e 100644
--- a/web-app/cypress/e2e/resetpassword.cy.ts
+++ b/web-app/cypress/e2e/resetpassword.cy.ts
@@ -9,7 +9,7 @@ describe('Reset Password Screen', () => {
it('should show error when email no email is provided', () => {
cy.get('input[id="email"]').type('not an email', { force: true });
-
+ cy.get('[type="submit"]').click();
cy.get('[data-testid=emailError]').should('exist');
});
diff --git a/web-app/cypress/e2e/signin.cy.ts b/web-app/cypress/e2e/signin.cy.ts
new file mode 100644
index 000000000..014609a12
--- /dev/null
+++ b/web-app/cypress/e2e/signin.cy.ts
@@ -0,0 +1,15 @@
+describe('Sign In page', () => {
+ beforeEach(() => {
+ cy.visit('/sign-in');
+ });
+
+ it('should render page header', () => {
+ cy.get('[data-testid=websiteTile]')
+ .should('exist')
+ .contains('Mobility Database');
+ });
+
+ it('should render signin', () => {
+ cy.get('[data-testid=signin]').should('exist');
+ });
+});
diff --git a/web-app/cypress/e2e/signup.cy.ts b/web-app/cypress/e2e/signup.cy.ts
index c069d66ec..76e1a169a 100644
--- a/web-app/cypress/e2e/signup.cy.ts
+++ b/web-app/cypress/e2e/signup.cy.ts
@@ -14,7 +14,7 @@ describe('Sign up screen', () => {
it('should show the password error when password length is less than 12', () => {
cy.get('input[id="password"]').type('short', { force: true });
-
+ cy.get('button[id="sign-up-button"]').click();
cy.get('[data-testid=passwordError]')
.should('exist')
.contains('Password must');
@@ -22,7 +22,7 @@ describe('Sign up screen', () => {
it('should show the password error when password do not contain lowercase', () => {
cy.get('input[id="password"]').type('UPPERCASE_10_!', { force: true });
-
+ cy.get('button[id="sign-up-button"]').click();
cy.get('[data-testid=passwordError]')
.should('exist')
.contains('Password must');
@@ -30,7 +30,7 @@ describe('Sign up screen', () => {
it('should show the password error when password do not contain uppercase', () => {
cy.get('input[id="password"]').type('lowercase_10_!', { force: true });
-
+ cy.get('button[id="sign-up-button"]').click();
cy.get('[data-testid=passwordError]')
.should('exist')
.contains('Password must');
@@ -38,13 +38,12 @@ describe('Sign up screen', () => {
it('should not show the password error when password is valid', () => {
cy.get('input[id="password"]').type('UP_lowercase_10_!', { force: true });
-
cy.get('[data-testid=passwordError]').should('not.exist');
});
it('should show the password error when password do not match', () => {
cy.get('input[id="password"]').type('UP_lowercase_10_!', { force: true });
-
+ cy.get('button[id="sign-up-button"]').click();
cy.get('input[id="confirmPassword"]').type('UP_lowercase_11_!', {
force: true,
});
diff --git a/web-app/src/app/components/Header.tsx b/web-app/src/app/components/Header.tsx
index 0be36065b..b509df4f8 100644
--- a/web-app/src/app/components/Header.tsx
+++ b/web-app/src/app/components/Header.tsx
@@ -34,6 +34,8 @@ import { useNavigate } from 'react-router-dom';
import { useSelector } from 'react-redux';
import { selectIsAuthenticated } from '../store/selectors';
import LogoutConfirmModal from './LogoutConfirmModal';
+import { OpenInNew } from '@mui/icons-material';
+import '../styles/Header.css';
const drawerWidth = 240;
const websiteTile = 'Mobility Database';
@@ -44,7 +46,12 @@ const DrawerContent: React.FC<{
}> = ({ onClick, onNavigationClick }) => {
return (
-
+
{websiteTile}
@@ -79,7 +86,9 @@ export default function DrawerAppBar(): React.ReactElement {
};
const handleNavigation = (navigationItem: NavigationItem): void => {
- navigateTo(navigationItem.target);
+ if (navigationItem.external === true)
+ window.open(navigationItem.target, '_blank', 'noopener noreferrer');
+ else navigateTo(navigationItem.target);
};
const handleLogoutClick = (): void => {
@@ -113,24 +122,40 @@ export default function DrawerAppBar(): React.ReactElement {
elevation={0}
sx={{ background: 'white' }}
>
-
-
-
-
-
-
+
- {websiteTile}
-
+
+
+
+
+
+
+ {websiteTile}
+
+
+
{navigationItems.map((item) => (
: null}
>
{item.title}
diff --git a/web-app/src/app/constants/Navigation.ts b/web-app/src/app/constants/Navigation.ts
index 3f8ba1afc..c7b2a2939 100644
--- a/web-app/src/app/constants/Navigation.ts
+++ b/web-app/src/app/constants/Navigation.ts
@@ -15,27 +15,29 @@ export const MOBILITY_DATA_LINKS = {
export const navigationItems: NavigationItem[] = [
{ title: 'About', target: 'about', color: 'inherit', variant: 'text' },
- { title: 'API Docs', target: 'api', color: 'inherit', variant: 'text' },
+ { title: 'FAQ', target: 'faq', color: 'inherit', variant: 'text' },
{
title: 'Contribute',
target: 'contribute',
color: 'inherit',
variant: 'text',
},
+ {
+ title: 'API Docs',
+ target:
+ 'https://mobilitydata.github.io/mobility-feed-api/SwaggerUI/index.html',
+ color: 'inherit',
+ variant: 'text',
+ external: true,
+ },
{
title: 'Contact Us',
- target: 'contact-us',
+ target: 'mailto:api@mobilitydata.org',
color: 'inherit',
variant: 'text',
+ external: true,
},
];
-
-export const navigationSignoutItem: NavigationItem = {
- title: 'Sign Out',
- target: SIGN_OUT_TARGET,
- color: 'inherit',
- variant: 'text',
-};
export const navigationHelpItem: NavigationItem = {
title: 'Help',
target: 'help', // TODO generate help page
diff --git a/web-app/src/app/interface/Navigation.ts b/web-app/src/app/interface/Navigation.ts
index a9002b6a5..4715d7cf7 100644
--- a/web-app/src/app/interface/Navigation.ts
+++ b/web-app/src/app/interface/Navigation.ts
@@ -3,4 +3,5 @@ export default interface NavigationItem {
color: string;
target: string;
variant: 'text' | 'outlined' | 'contained' | undefined;
+ external?: boolean;
}
diff --git a/web-app/src/app/router/Router.tsx b/web-app/src/app/router/Router.tsx
index b97f46aab..156c4c450 100644
--- a/web-app/src/app/router/Router.tsx
+++ b/web-app/src/app/router/Router.tsx
@@ -7,12 +7,16 @@ import ContactInformation from '../screens/ContactInformation';
import { ProtectedRoute } from './ProtectedRoute';
import CompleteRegistration from '../screens/CompleteRegistration';
import ChangePassword from '../screens/ChangePassword';
+import Home from '../screens/Home';
import ForgotPassword from '../screens/ForgotPassword';
+import FAQ from '../screens/FAQ';
+import About from '../screens/About';
+import Contribute from '../screens/Contribute';
export const AppRouter: React.FC = () => {
return (
- } />
+ } />
} />
} />
}>
@@ -29,6 +33,9 @@ export const AppRouter: React.FC = () => {
} />
} />
+ } />
+ } />
+ } />
);
};
diff --git a/web-app/src/app/screens/About.tsx b/web-app/src/app/screens/About.tsx
new file mode 100644
index 000000000..5c215baa8
--- /dev/null
+++ b/web-app/src/app/screens/About.tsx
@@ -0,0 +1,102 @@
+import * as React from 'react';
+import CssBaseline from '@mui/material/CssBaseline';
+import Box from '@mui/material/Box';
+import Container from '@mui/material/Container';
+import '../styles/SignUp.css';
+import { Button, Typography } from '@mui/material';
+import { OpenInNew } from '@mui/icons-material';
+
+export default function About(): React.ReactElement {
+ return (
+
+
+
+
+ About{' '}
+
+
+
+ The Mobility Database is hosted and supported by MobilityData, a
+ non-profit organization that improves and extends mobility data
+ formats, including GTFS, GTFS Realtime and GBFS.
+
+ MobilityData is currently working on the Mobility Database because
+ of the need for a sustainable, community-supported hub for
+ international mobility datasets.
+
+
+
+ The History
+
+
+ Discoverability is at the heart of mobility: travelers need to know
+ the mobility options available and understand their intricacies to
+ plan their journey; app creators need simplified access to data to
+ relay to app users. Discoverability is the cement of the community
+ that MobilityData is building around open data formats (such as GTFS
+ and GBFS) and their datasets.
+
+
+ A need to improve discoverability gave rise to the TransitFeeds.com
+ project, which made it easier to find and query accurate and
+ up-to-date GTFS, GTFS Realtime, GBFS, and datasets. This project was
+ housed by MobilityData following a transition from ActionFigure
+ (formerly TransitScreen).
+
+
+ MobilityData created a long-term roadmap for the project, taking
+ into account the repeated historic challenges the GTFS repositories
+ have encountered and the need to expand to accommodate additional
+ modes of transport and data formats.
+
+
+ About MobilityData
+
+
+ MobilityData began in 2015 as a Rocky Mountain Institute project and
+ became a Canadian non-profit in 2019 with the mission to improve
+ traveler information. Building on the strength of nearly 20
+ employees, MobilityData brings together and supports mobility
+ stakeholders such as transport agencies, software vendors, mobility
+ apps, and cities to standardize and expand data formats for public
+ transport (GTFS) and shared mobility (GBFS).
+
+ }>
+
+ Learn more about MobilityData
+
+
+
+
+
+ );
+}
diff --git a/web-app/src/app/screens/ChangePassword.tsx b/web-app/src/app/screens/ChangePassword.tsx
index 93d470d3b..edaeb3d4e 100644
--- a/web-app/src/app/screens/ChangePassword.tsx
+++ b/web-app/src/app/screens/ChangePassword.tsx
@@ -37,11 +37,16 @@ export default function ChangePassword(): React.ReactElement {
const navigateTo = useNavigate();
const changePasswordError = useSelector(selectChangePasswordError);
const changePasswordStatus = useSelector(selectChangePasswordStatus);
+ const [isSubmitted, setIsSubmitted] = React.useState(false);
+
const ChangePasswordSchema = Yup.object().shape({
currentPassword: Yup.string().required('Password is required'),
newPassword: Yup.string()
.required('New Password is required')
- .min(12, 'Password is too short - should be 12 chars minimum')
+ .min(
+ 12,
+ 'Password is too short. Password should be 12 characters minimum',
+ )
.matches(passwordValidationRegex, 'Password error'),
confirmNewPassword: Yup.string()
.required('Confirm New Password is required')
@@ -56,6 +61,8 @@ export default function ChangePassword(): React.ReactElement {
confirmNewPassword: '',
},
validationSchema: ChangePasswordSchema,
+ validateOnChange: isSubmitted,
+ validateOnBlur: true,
onSubmit: (values) => {
dispatch(
changePassword({
@@ -229,7 +236,9 @@ export default function ChangePassword(): React.ReactElement {
marginLeft: 'auto',
marginRight: 'auto',
}}
- onClick={() => formik.handleChange}
+ onClick={() => {
+ setIsSubmitted(true);
+ }}
>
Save Changes
diff --git a/web-app/src/app/screens/CompleteRegistration.tsx b/web-app/src/app/screens/CompleteRegistration.tsx
index 5987002dc..5afba5bd6 100644
--- a/web-app/src/app/screens/CompleteRegistration.tsx
+++ b/web-app/src/app/screens/CompleteRegistration.tsx
@@ -33,6 +33,8 @@ export default function CompleteRegistration(): React.ReactElement {
const userProfileStatus = useSelector(selectUserProfileStatus);
const registrationError = useSelector(selectRegistrationError);
+ const [isSubmitted, setIsSubmitted] = React.useState(false);
+
React.useEffect(() => {
if (userProfileStatus === 'registered') {
navigateTo(ACCOUNT_TARGET);
@@ -59,6 +61,8 @@ export default function CompleteRegistration(): React.ReactElement {
agreeToPrivacyPolicy: false,
},
validationSchema: CompleteRegistrationSchema,
+ validateOnChange: isSubmitted,
+ validateOnBlur: true,
onSubmit: async (values) => {
if (user != null) {
dispatch(
@@ -175,7 +179,13 @@ export default function CompleteRegistration(): React.ReactElement {
justifyContent: 'center',
}}
>
-
diff --git a/web-app/src/app/screens/Contribute.tsx b/web-app/src/app/screens/Contribute.tsx
new file mode 100644
index 000000000..43164ba1f
--- /dev/null
+++ b/web-app/src/app/screens/Contribute.tsx
@@ -0,0 +1,328 @@
+import * as React from 'react';
+import CssBaseline from '@mui/material/CssBaseline';
+import Box from '@mui/material/Box';
+import Container from '@mui/material/Container';
+import '../styles/SignUp.css';
+import { Typography } from '@mui/material';
+
+export default function Contribute(): React.ReactElement {
+ return (
+
+
+
+
+ Contribute a Feed{' '}
+
+
+
+ Sharing GTFS and GTFS Realtime feeds means that more journey
+ planning apps can discover the data and share it with travelers.{' '}
+
+ Service planning tools and researchers also rely on data aggregators
+ like the Mobility Database catalogs to evaluate services and plan
+ future ones.{' '}
+
+
+
+
+ What is a GTFS feed?
+
+
+ A GTFS feed is a downloadable set of files that adhere to the{' '}
+
+ General Transit Feed Specification
+
+ .
+
+ A GTFS Schedule feed that includes static information about a
+ transit service is a collection of text (.txt) files that are
+ contained in a single ZIP file.A GTFS Realtime feed that provides
+ realtime updates to riders is formatted as{' '}
+
+ Protocol Buffer data
+ {' '}
+ and shared as a proto file. A GTFS Realtime feed can include a mix
+ of Trip Updates, Vehicle Positions, and Service Alerts or there can
+ be separate feeds for each type of realtime information.
+
+
+ Each direct download URL for a GTFS feed has to open a file. For
+ example, a URL that points to an agency's GTFS explainer page
+ such as{' '}
+
+ https://www.bctransit.com/open-data
+ {' '}
+ is not a valid GTFS feed URL. However,{' '}
+
+ https://www.bctransit.com/data/gtfs/powell-river.zip
+ {' '}
+ is a valid GTFS feed download link because it directly opens the
+ GTFS feed. The same principle is used for GTFS feeds that are
+ accessible via an API: a generic link to the API, such as
+
+ http://api.511.org/transit/datafeeds
+
+ , is invalid. A valid download URL would need to include an API
+ query that returns a GTFS feed, such as
+
+ http://api.511.org/transit/datafeeds?operator_id=3D
+
+ .
+
+
+ Why would I want to add or update a feed?{' '}
+
+
+ Adding a feed means that more journey planning apps can discover the
+ data and share it with travelers. Service planning tools and
+ researchers also rely on data aggregators like the Mobility Database
+ catalogs to evaluate services and plan future ones.
+
+
+ To ensure that travelers have access to the most up-to-date
+ information, transit providers should add a new feed on the catalogs
+ when their feed URL changes. Transit providers should review{' '}
+
+ the spreadsheet of feeds already in the Mobility Database
+ {' '}
+ to see if an old URL of their feed is in the Mobility Database and
+ request that its status be set to deprecated under Issue Type in the
+ form below.
+
+
+ Deprecated is a manually set status within the Mobility
+ Database that indicates that a feed has been replaced with a new
+ URL. MobilityData staff will deprecate the old feed and set a{' '}
+ redirect to indicate that the new feed should be used instead
+ of the deprecated one.
+
+
+ If transit providers would like to share old feed URLs for
+ researchers and analysts to use, please add the feed to the form
+ below and request that its status be set to deprecated.
+
+
+ When should I contribute a feed?
+
+
+ To ensure that travelers have access to the most up-to-date
+ information, transit providers should add a new feed on the catalogs
+ when there are major changes to their URL. Examples of changes
+ include:
+
+
The feed URL changes
+
+ The feed is combined with several other feeds (for example:
+ several providers' feeds are combined together)
+
+
+ The feed is split from a combined/aggregated feed (for example:
+ a provider whose GTFS was only available in an aggregate feed
+ now has their own independent feed)
+
+
+
+
+ Who can contribute a feed?
+
+
+ Anyone can add or update a feed, and it is currently merged manually
+ into the catalogs by the MobilityData team. The name of the person
+ requesting the feed is captured in the PR, either via their GitHub
+ profile or based on the information shared in the form below.
+
+
+ In order to verify the validity of a GTFS schedule source, an
+ automated test is also run to check if the direct download URL
+ provided opens a functional ZIP file.
+
+
+ How do I contribute a feed?
+
+
+ There are two ways to update a feed:
+
+
+
+ 1. If you're not comfortable with GitHub or only have a few
+ feeds to add:
+ {' '}
+ use the form below to request a feed change. The feed will be added
+ as a pull request in GitHub viewable to the public within a week of
+ being submitted. You can verify the change has been made to the
+ catalogs by reviewing this CSV file. In the future, this process
+ will be automated so the PR is automatically created once submitted
+ and merged when tests pass.
+
+
+
+ 2. If you want to add feeds directly:
+ {' '}
+ you can follow{' '}
+
+ the CONTRIBUTING.MD file
+ {' '}
+ in GitHub to add sources.
+
+
+ If you have any questions or concerns about this process, you can
+ email{' '}
+
+ emma@mobilitydata.org
+ {' '}
+ for support in getting your feed added.{' '}
+
+
+ What if I want to remove a feed?
+
+
+ Feeds are only removed in instances when it is requested by the
+ producer of the data because of licensing issues. In all other
+ cases, feeds are set to a status of deprecated so it's possible
+ to include their historical data within the Mobility Database.
+
+
+ Shoutout to our incredible contributors
+
+
+ 🎉 Thanks to all those who have contributed. This includes any
+ organizations or unaffiliated individuals who have added data,
+ updated data, or contributed code since 2021.
+
+
+ Organizations:
+
+
Adelaide Metro
+
Bettendorf Transit
+
Bi-State Regional Commission
+
BreizhGo
+
Cal-ITP
+
Commerce Municipal Bus Lines
+
Corpus Christi Regional Transportation Authority
+
County of Hawai’i Mass Transit Agency
+
DART Delaware
+
+ Department of Municipalities and Transport, Abu Dhabi, United
+ Arab Emirates
+
San Francisco Municipal Transportation Agency (SFMTA)
+
San Luis Obispo Regional Transit Authority
+
Santiago Directorio de Transporte Público Metropolitano
+
Skedgo
+
Société nationale des chemins de fer français (SNCF)
+
Sound Transit
+
Springfield Mass Transit District (SMTD)
+
Ticpoi
+
Transcollines
+
Transport for Cairo
+
Two Sigma Data Clinic
+
UCSC Transporation and Parking Services
+
Unobus
+
Volánbusz
+
Walker Consultants
+
+
+ Individuals:
+ If you are listed here and would like to add your organization,{' '}
+
+ let MobilityData know
+
+ .
+
+
@1-Byte on GitHub
+
Allan Fernando
+
Eloi Torrents
+
Florian Maunier
+
Gábor Kovács
+
Jessica Rapson
+
Joop Kiefte
+
Justin Brooks
+
Kevin Butler
+
Kovács Áron
+
Oliver Hattshire
+
Saipraneeth Devunuri
+
+
+
+
+
+ );
+}
diff --git a/web-app/src/app/screens/FAQ.tsx b/web-app/src/app/screens/FAQ.tsx
new file mode 100644
index 000000000..7e1629af8
--- /dev/null
+++ b/web-app/src/app/screens/FAQ.tsx
@@ -0,0 +1,125 @@
+import * as React from 'react';
+import CssBaseline from '@mui/material/CssBaseline';
+import Box from '@mui/material/Box';
+import Container from '@mui/material/Container';
+import '../styles/SignUp.css';
+import '../styles/FAQ.css';
+import { Typography } from '@mui/material';
+
+export default function FAQ(): React.ReactElement {
+ return (
+
+
+
+
+ Frequently Asked Questions (FAQ){' '}
+
+
+
+ Why would I use the Mobility Database?
+
+
+ The Mobility Database catalogs has over 100 feeds that were
+ inaccurate on TransitFeeds, and over 150 new feeds. It's a more
+ accurate and comprehensive resource for ensuring your data is
+ discoverable and for scraping the data you need. The community
+ regularly adds and updates feeds using Github.
+
+
+ Why are you making this change?
+
+
+ The mobility community has created several hubs for international
+ GTFS feeds over the years (including the GTFS Data Exchange and
+ legacy TransitFeeds site). There have been consistent issues with
+ sustaining these platforms in the long term, and creating community
+ processes so it's clear how decisions are made and how
+ stakeholders across the mobility industry can contribute to the
+ platform.
+
+ That's the need we're working to meet with the Mobility
+ Database, so more stakeholders can trust the longevity of this
+ platform and it can become an increasingly valuable source for
+ creating and improving mobility data as a community.
+
+ As TransitFeeds becomes increasingly stale and difficult to
+ maintain, it becomes more critical that the consumers have
+ up-to-date data to share with travelers and make planning decisions.
+ The catalogs will be a starting point for providing up-to-date data
+ the community can easily leverage and contribute to while we explore
+ longer term solutions for the architecture that require more
+ community investment.
+
+ What’s coming next?
+
+ The MobilityData team is working to add validation info from the
+
+ Canonical GTFS Schedule Validator
+ {' '}
+ for each feed, and create a user interface.
+
+
+ You can add ideas and vote on our current roadmap
+
+ . We anticipate an influx of new feedback as we transition away from
+ TransitFeeds and intend to adapt our plan to the emerging needs of
+ the community. How quickly we scale the Mobility Database
+ architecture depends on how much engagement and contribution we get
+ from the community in this phase.
+
+
+ What about the TransitFeeds user interface?
+
+
+ We plan to develop a new user interface as part of the Mobility
+ Database by summer 2024, since this is critical for making data
+ discoverable and fostering collaboration on data quality
+ improvements.
+
+ In order to ensure the community has access to more up-to-date data
+ as soon as possible, we've focused on providing a catalog of
+ data without an interface as a first step. How quickly we scale the
+ architecture to build the user interface depends on how much
+ engagement and contribution we get from the community in this phase.
+
+
+ What about the TransitFeeds API?
+
+
+ You can use the Mobility Database API instead to access up-to-date
+ GTFS and GTFS Realtime data. If you need to access historical data
+ from the TransitFeeds API, you are still able to. Your systems will
+ be unaffected until the to-be-determined deprecation date, when the
+ TransitFeeds API will no longer be available. MobilityData will
+ migrate the historical data from TransitFeeds to the Mobility
+ Database before deprecating the old API.
+
+
+
+
+ );
+}
diff --git a/web-app/src/app/screens/ForgotPassword.tsx b/web-app/src/app/screens/ForgotPassword.tsx
index bd1f4d051..b959e799a 100644
--- a/web-app/src/app/screens/ForgotPassword.tsx
+++ b/web-app/src/app/screens/ForgotPassword.tsx
@@ -31,9 +31,12 @@ export default function ForgotPassword(): React.ReactElement {
const userProfileStatus = useSelector(selectUserProfileStatus);
const resetPasswordError = useSelector(selectResetPasswordError);
const resetPasswordSuccess = useSelector(selectIsRecoveryEmailSent);
+ const [isSubmitted, setIsSubmitted] = React.useState(false);
const ForgotPasswordScheme = Yup.object().shape({
- email: Yup.string().email().required('Email is required'),
+ email: Yup.string()
+ .email('Email format is invalid.')
+ .required('Email is required'),
reCaptcha: Yup.string().required('You must verify you are not a robot.'),
});
@@ -42,6 +45,8 @@ export default function ForgotPassword(): React.ReactElement {
email: '',
reCaptcha: null,
},
+ validateOnChange: isSubmitted,
+ validateOnBlur: true,
validationSchema: ForgotPasswordScheme,
onSubmit: (values) => {
dispatch(resetPassword(values.email));
@@ -130,7 +135,9 @@ export default function ForgotPassword(): React.ReactElement {
type='submit'
variant='contained'
sx={{ mt: 3, mb: 2 }}
- onClick={() => formik.handleChange}
+ onClick={() => {
+ setIsSubmitted(true);
+ }}
data-testid='signin'
>
Send Recovery Email
diff --git a/web-app/src/app/screens/Home.tsx b/web-app/src/app/screens/Home.tsx
new file mode 100644
index 000000000..f4e826bba
--- /dev/null
+++ b/web-app/src/app/screens/Home.tsx
@@ -0,0 +1,146 @@
+import * as React from 'react';
+import CssBaseline from '@mui/material/CssBaseline';
+import Box from '@mui/material/Box';
+import Typography from '@mui/material/Typography';
+import Container from '@mui/material/Container';
+import '../styles/SignUp.css';
+import { Button, Grid } from '@mui/material';
+import { Download, Login } from '@mui/icons-material';
+
+export default function Home(): React.ReactElement {
+ return (
+
+
+
+
+
+
+ The Mobility Database
+
+
+
+
+ The Mobility Database catalogs is a repository of 1800+ mobility
+ feeds across the world. It has over 150 updated feeds previously
+ unavailable on TransitFeeds (OpenMobilityData).
+
+
+ We’re in the first phase of building a sustainable, central hub
+ for mobility data internationally.
+
+
+
+
+ Currently serving data from over{' '}
+ 1400 transit providers in{' '}
+ 69 countries.
+
+
+ }>
+
+ Download the entire catalog
+
+
+ }>
+
+ Sign up for the API
+
+
+
+
+
+
+
+ What About TransitFeeds?
+
+ You’ll be able to access transitfeeds.com until a deprecation date is
+ decided
+
+
+ The data on TransitFeeds is becoming increasingly out of date and
+ cannot be updated, which is negatively impacting travelers. That’s
+ why we’re encouraging users to use the Mobility Database instead,
+ which they can actively contribute to and improve.
+
+
+ We will discuss the transition process in greater depth before
+ committing to a specific date to remove access to transitfeeds.com.
+ No decision has been made yet. If you want to participated in a
+ discussion about the deprecation of transitfeeds.com, let us know in
+ the catalogs GtiHub repo. We commit to giving 6 months notice once
+ the decision is finalized.
+
+
+
+
+ );
+}
diff --git a/web-app/src/app/screens/SignIn.tsx b/web-app/src/app/screens/SignIn.tsx
index 671a78ee1..0ef033fbe 100644
--- a/web-app/src/app/screens/SignIn.tsx
+++ b/web-app/src/app/screens/SignIn.tsx
@@ -38,14 +38,20 @@ export default function SignIn(): React.ReactElement {
const navigateTo = useNavigate();
const userProfileStatus = useSelector(selectUserProfileStatus);
const emailLoginError = useSelector(selectEmailLoginError);
+ const [isSubmitted, setIsSubmitted] = React.useState(false);
const [showPassword, setShowPassword] = React.useState(false);
const SignInSchema = Yup.object().shape({
- email: Yup.string().email().required('Email is required'),
+ email: Yup.string()
+ .email('Email format is invalid.')
+ .required('Email is required'),
password: Yup.string()
.required('Password is required')
- .min(12, 'Password is too short - should be 12 chars minimum'),
+ .min(
+ 12,
+ 'Password is too short. Your password should be 12 characters minimum',
+ ),
});
const formik = useFormik({
@@ -54,6 +60,8 @@ export default function SignIn(): React.ReactElement {
password: '',
},
validationSchema: SignInSchema,
+ validateOnChange: isSubmitted,
+ validateOnBlur: true,
onSubmit: (values) => {
const emailLogin: EmailLogin = {
email: values.email,
@@ -196,7 +204,9 @@ export default function SignIn(): React.ReactElement {
type='submit'
variant='contained'
sx={{ mt: 3, mb: 2 }}
- onClick={() => formik.handleChange}
+ onClick={() => {
+ setIsSubmitted(true);
+ }}
data-testid='signin'
>
Sign In
diff --git a/web-app/src/app/screens/SignUp.tsx b/web-app/src/app/screens/SignUp.tsx
index 6366a2882..e1d60229a 100644
--- a/web-app/src/app/screens/SignUp.tsx
+++ b/web-app/src/app/screens/SignUp.tsx
@@ -42,8 +42,12 @@ export default function SignUp(): React.ReactElement {
const dispatch = useAppDispatch();
const signUpError = useSelector(selectSignUpError);
const userProfileStatus = useSelector(selectUserProfileStatus);
+ const [isSubmitted, setIsSubmitted] = React.useState(false);
+
const SignUpSchema = Yup.object().shape({
- email: Yup.string().email().required('Email is required'),
+ email: Yup.string()
+ .email('Email format is invalid.')
+ .required('Email is required'),
confirmEmail: Yup.string().oneOf(
[Yup.ref('email'), ''],
'Emails do not match',
@@ -71,6 +75,8 @@ export default function SignUp(): React.ReactElement {
reCaptcha: null,
},
validationSchema: SignUpSchema,
+ validateOnChange: isSubmitted,
+ validateOnBlur: true,
onSubmit: (values) => {
dispatch(
signUp({
@@ -293,7 +299,9 @@ export default function SignUp(): React.ReactElement {
type='submit'
variant='contained'
sx={{ mt: 3, mb: 2, alignSelf: 'center' }}
- onClick={formik.handleChange}
+ onClick={() => {
+ setIsSubmitted(true);
+ }}
id='sign-up-button'
>
Sign Up
diff --git a/web-app/src/app/styles/FAQ.css b/web-app/src/app/styles/FAQ.css
new file mode 100644
index 000000000..66dc70ff8
--- /dev/null
+++ b/web-app/src/app/styles/FAQ.css
@@ -0,0 +1,32 @@
+.question {
+ color: #000;
+ font-size: 25px !important;
+ font-style: normal;
+ font-weight: 700 !important;
+ line-height: normal;
+ text-decoration-line: underline;
+ margin-bottom: 5px;
+}
+
+.answer {
+ color: #000;
+ font-style: normal;
+ font-weight: 400;
+ line-height: normal;
+ margin-bottom: 40px !important;
+}
+
+a:not(.btn-link) {
+ color: #000;
+ padding: 2px;
+}
+
+a:hover:not(.btn-link) {
+ background-color: #3959fa21 !important;
+ border-radius: 5px;
+}
+
+.btn-link {
+ color: inherit !important;
+ text-decoration: none;
+}
diff --git a/web-app/src/app/styles/Header.css b/web-app/src/app/styles/Header.css
new file mode 100644
index 000000000..504070171
--- /dev/null
+++ b/web-app/src/app/styles/Header.css
@@ -0,0 +1,4 @@
+.website-title {
+ font-weight: 700 !important;
+ color: #3959fa;
+}