diff --git a/__mocks__/auth0-js.js b/__mocks__/auth0-js.js
deleted file mode 100644
index 0eeb36d26..000000000
--- a/__mocks__/auth0-js.js
+++ /dev/null
@@ -1,41 +0,0 @@
-import jwt from 'jsonwebtoken'
-
-module.exports = {
- WebAuth: class WebAuth {
- constructor ({domain, clientID}) {
- if (!domain) {
- throw new Error('Domain required')
- }
- if (!clientID) {
- throw new Error('Client ID required')
- }
- }
-
- renewAuth (
- {
- audience,
- nonce,
- postMessageDataType,
- redirectUri,
- scope,
- usePostMessage
- },
- callback
- ) {
- return callback(null, {
- accessToken: jwt.sign(
- {
- nonce
- },
- 'signingKey'
- ),
- idToken: jwt.sign(
- {
- nonce
- },
- 'signingKey'
- )
- })
- }
- }
-}
diff --git a/__mocks__/auth0-lock.js b/__mocks__/auth0-lock.js
deleted file mode 100644
index 59313dab7..000000000
--- a/__mocks__/auth0-lock.js
+++ /dev/null
@@ -1,3 +0,0 @@
-// TODO: remove. There was an import issue, so this is a temporary hack.
-// perhaps a later version of Auth0 will have negate the need for this file.
-module.exports = function () {}
diff --git a/__tests__/e2e/Dockerfile b/__tests__/e2e/Dockerfile
new file mode 100644
index 000000000..2e252b42b
--- /dev/null
+++ b/__tests__/e2e/Dockerfile
@@ -0,0 +1,23 @@
+# syntax=docker/dockerfile:1
+FROM public.ecr.aws/s2a5w2n9/puppeteer:latest
+WORKDIR /datatools-ui
+
+USER root
+RUN apk add --no-cache git
+
+RUN yarn global add https://github.com/ibi-group/otp-runner.git
+RUN yarn global add miles-grant-ibigroup/mastarm#f61ca541a788e8cae8a0e32b886de754846ea16f
+
+COPY package.json yarn.lock /datatools-ui/
+RUN yarn
+COPY . /datatools-ui/
+
+RUN mkdir -p /opt/otp
+RUN mkdir -p /datatools-ui/e2e-test-results/
+RUN mkdir ~/.aws && printf '%s\n' '[default]' 'aws_access_key_id=${AWS_ACCESS_KEY_ID}' 'aws_secret_access_key=${AWS_SECRET_ACCESS_KEY}' 'region=${AWS_REGION}' > ~/.aws/config
+
+RUN wget https://raw.githubusercontent.com/ettore26/wait-for-command/master/wait-for-command.sh
+RUN chmod +x ./wait-for-command.sh
+
+ENV TEST_FOLDER_PATH=/datatools-ui/e2e-test-results
+ENV IS_DOCKER=true
\ No newline at end of file
diff --git a/__tests__/e2e/docker-compose.yml b/__tests__/e2e/docker-compose.yml
new file mode 100644
index 000000000..f83daa7dd
--- /dev/null
+++ b/__tests__/e2e/docker-compose.yml
@@ -0,0 +1,88 @@
+version: '3.8'
+
+x-common-variables: &common-variables
+ - BUGSNAG_KEY=${BUGSNAG_KEY}
+ - E2E_AUTH0_USERNAME=${E2E_AUTH0_USERNAME:?err}
+ - E2E_AUTH0_PASSWORD=${E2E_AUTH0_PASSWORD:?err}
+ - S3_BUCKET=${S3_BUCKET:?err}
+ - LOGS_S3_BUCKET=${LOGS_S3_BUCKET}
+ - MS_TEAMS_WEBHOOK_URL=${MS_TEAMS_WEBHOOK_URL}
+ - MAPBOX_ACCESS_TOKEN=${MAPBOX_ACCESS_TOKEN}
+ - GITHUB_SHA=${GITHUB_SHA}
+ - GITHUB_REF_SLUG=${GITHUB_REF_SLUG}
+ - TRANSITFEEDS_KEY=${TRANSITFEEDS_KEY}
+ - GITHUB_REPOSITORY=${GITHUB_REPOSITORY}
+ - GITHUB_WORKSPACE=${GITHUB_WORKSPACE}
+ - GITHUB_RUN_ID=${GITHUB_RUN_ID}
+ - AUTH0_CLIENT_ID=${AUTH0_CLIENT_ID:?err}
+ - AUTH0_PUBLIC_KEY=${AUTH0_PUBLIC_KEY:?err}
+ - AUTH0_CONNECTION_NAME=${AUTH0_CONNECTION_NAME:?err}
+ - AUTH0_DOMAIN=${AUTH0_DOMAIN:?err}
+ - AUTH0_API_CLIENT=${AUTH0_API_CLIENT:?err}
+ - AUTH0_API_SECRET=${AUTH0_API_SECRET:?err}
+ - OSM_VEX=${OSM_VEX}
+ - SPARKPOST_KEY=${SPARKPOST_KEY}
+ - SPARKPOST_EMAIL=${SPARKPOST_EMAIL}
+ - GTFS_DATABASE_URL=jdbc:postgresql://postgres/dmtest
+ - GTFS_DATABASE_USER=root
+ - GTFS_DATABASE_PASSWORD=e2e
+ - MONGO_DB_NAME=data_manager
+ - MONGO_HOST=mongo:27017
+ - AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:?err}
+ - AWS_REGION=${AWS_REGION:?err}
+ - AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:?err}
+
+services:
+ datatools-e2e-tests:
+ build:
+ context: ../../
+ args: *common-variables
+ dockerfile: ./__tests__/e2e/Dockerfile
+
+ command: /bin/sh -c "./wait-for-command.sh -t 15 -c 'nc -z datatools-ui 9966' && sleep 25 && yarn test-end-to-end"
+ shm_size: '2gb'
+ cap_add:
+ - ALL
+ depends_on:
+ - "datatools-server"
+ - "datatools-ui"
+ - "datatools-ui-proxy"
+ - "mongo"
+ - "postgres"
+ environment: *common-variables
+
+ mongo:
+ image: mongo
+ restart: always
+ datatools-server:
+ build:
+ context: ../../
+ dockerfile: ./__tests__/e2e/server/Dockerfile
+ args: *common-variables
+ restart: always
+ environment: *common-variables
+ ports:
+ - "4000:4000"
+ datatools-ui-proxy:
+ build:
+ context: ../../
+ dockerfile: ./__tests__/e2e/ui-proxy/Dockerfile
+ ports:
+ - "443:443"
+ datatools-ui:
+ build:
+ context: ../../
+ dockerfile: ./__tests__/e2e/ui/Dockerfile
+ args: *common-variables
+ restart: always
+ environment: *common-variables
+ ports:
+ - "9966:9966"
+ postgres:
+ environment:
+ POSTGRES_HOST_AUTH_METHOD: trust
+ POSTGRES_USER: root
+ POSTGRES_PASS: e2e
+ POSTGRES_DB: dmtest
+ image: postgres
+ restart: always
\ No newline at end of file
diff --git a/__tests__/e2e/puppeteer/Dockerfile b/__tests__/e2e/puppeteer/Dockerfile
new file mode 100644
index 000000000..9a6aa84d7
--- /dev/null
+++ b/__tests__/e2e/puppeteer/Dockerfile
@@ -0,0 +1,28 @@
+FROM alpine
+
+# Installs latest Chromium (100) package.
+RUN apk add --no-cache \
+ chromium \
+ nss \
+ freetype \
+ harfbuzz \
+ ca-certificates \
+ ttf-freefont \
+ nodejs \
+ yarn
+
+# Tell Puppeteer to skip installing Chrome. We'll be using the installed package.
+ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true \
+ PUPPETEER_EXECUTABLE_PATH=/usr/bin/chromium-browser
+
+# Puppeteer v13.5.0 works with Chromium 100.
+RUN yarn add puppeteer
+
+# Add user so we don't need --no-sandbox.
+RUN addgroup -S pptruser && adduser -S -G pptruser pptruser \
+ && mkdir -p /home/pptruser/Downloads /app \
+ && chown -R pptruser:pptruser /home/pptruser \
+ && chown -R pptruser:pptruser /app
+
+# Run everything after as non-privileged user.
+USER pptruser
\ No newline at end of file
diff --git a/__tests__/e2e/server/Dockerfile b/__tests__/e2e/server/Dockerfile
new file mode 100644
index 000000000..68b7af3e6
--- /dev/null
+++ b/__tests__/e2e/server/Dockerfile
@@ -0,0 +1,62 @@
+# syntax=docker/dockerfile:1
+FROM maven:3.8.7-openjdk-18
+
+WORKDIR /datatools
+
+ARG E2E_AUTH0_USERNAME
+ARG E2E_AUTH0_PASSWORD
+ARG E2E_S3_BUCKET
+ARG MS_TEAMS_WEBHOOK_URL
+ARG GITHUB_REF_SLUG
+ARG GITHUB_SHA
+ARG TRANSITFEEDS_KEY
+ARG GITHUB_REPOSITORY
+ARG GITHUB_WORKSPACE
+ARG GITHUB_RUN_ID
+ARG AUTH0_CLIENT_ID
+ARG AUTH0_PUBLIC_KEY
+ARG AUTH0_DOMAIN
+ARG AUTH0_CONNECTION_NAME
+ARG AUTH0_API_CLIENT
+ARG AUTH0_API_SECRET
+ARG OSM_VEX
+ARG SPARKPOST_KEY
+ARG SPARKPOST_EMAIL
+ARG GTFS_DATABASE_URL
+ARG GTFS_DATABASE_USER
+ARG GTFS_DATABASE_PASSWORD
+ARG MONGO_DB_NAME
+ARG MONGO_HOST
+ARG AWS_ACCESS_KEY_ID
+ARG AWS_REGION
+ARG AWS_SECRET_ACCESS_KEY
+
+# Grab latest dev build of Datatools Server
+RUN git clone https://github.com/ibi-group/datatools-server.git
+RUN microdnf install wget
+WORKDIR /datatools/datatools-server
+
+RUN mvn package -DskipTests
+RUN cp target/dt*.jar ./datatools-server-3.8.1-SNAPSHOT.jar
+
+# Grab latest dev build of OTP
+RUN wget https://repo1.maven.org/maven2/org/opentripplanner/otp/1.4.0/otp-1.4.0-shaded.jar
+RUN mkdir -p /tmp/otp/graphs
+RUN mkdir -p /var/datatools_gtfs
+
+RUN mkdir ~/.aws && printf '%s\n' '[default]' 'aws_access_key_id=${AWS_ACCESS_KEY_ID}' 'aws_secret_access_key=${AWS_SECRET_ACCESS_KEY}' 'region=${AWS_REGION}' > ~/.aws/config
+
+# Grab server config
+RUN mkdir /config
+RUN wget https://raw.githubusercontent.com/ibi-group/datatools-server/dev/configurations/default/server.yml.tmp -O /config/server.yml
+
+# The enviornment variables contain everything needed on the server
+COPY __tests__/e2e/server/datatools.pem /datatools/
+RUN touch /config/env.yml
+RUN env | sed 's/\=/\: /' > /config/env.yml
+
+COPY __tests__/e2e/server/launch.sh launch.sh
+RUN chmod +x launch.sh
+CMD ./launch.sh
+EXPOSE 8080
+EXPOSE 4000
diff --git a/__tests__/e2e/server/launch.sh b/__tests__/e2e/server/launch.sh
new file mode 100644
index 000000000..63fdc2870
--- /dev/null
+++ b/__tests__/e2e/server/launch.sh
@@ -0,0 +1,13 @@
+#!/bin/bash
+
+# Start the first process
+java -jar otp-1.4.0-shaded.jar --server --autoScan --basePath /tmp/otp --insecure --router default &
+
+# Start the second process
+java -jar datatools-server-3.8.1-SNAPSHOT.jar /config/env.yml /config/server.yml &
+
+# Wait for any process to exit
+wait -n
+
+# Exit with status of process that exited first
+exit $?
\ No newline at end of file
diff --git a/__tests__/e2e/ui-proxy/Caddyfile b/__tests__/e2e/ui-proxy/Caddyfile
new file mode 100644
index 000000000..41b5d29a0
--- /dev/null
+++ b/__tests__/e2e/ui-proxy/Caddyfile
@@ -0,0 +1,4 @@
+datatools-ui-proxy {
+ reverse_proxy datatools-ui:9966
+ tls internal
+}
diff --git a/__tests__/e2e/ui-proxy/Dockerfile b/__tests__/e2e/ui-proxy/Dockerfile
new file mode 100644
index 000000000..e8e7ecf90
--- /dev/null
+++ b/__tests__/e2e/ui-proxy/Dockerfile
@@ -0,0 +1,3 @@
+FROM caddy:latest
+COPY ./__tests__/e2e/ui-proxy/Caddyfile /etc/caddy/Caddyfile
+EXPOSE 443
\ No newline at end of file
diff --git a/__tests__/e2e/ui/Dockerfile b/__tests__/e2e/ui/Dockerfile
new file mode 100644
index 000000000..122ffa3b3
--- /dev/null
+++ b/__tests__/e2e/ui/Dockerfile
@@ -0,0 +1,13 @@
+# syntax=docker/dockerfile:1
+FROM node:14
+WORKDIR /datatools-build
+
+ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD true
+ARG BUGSNAG_KEY
+
+RUN cd /datatools-build
+COPY package.json yarn.lock patches /datatools-build/
+RUN yarn
+COPY . /datatools-build/
+COPY configurations/default /datatools-config/
+CMD yarn run mastarm build --env dev --serve --proxy http://datatools-server:4000/api #
\ No newline at end of file
diff --git a/__tests__/end-to-end.js b/__tests__/end-to-end.js
index 7b0280bdd..0f4eba539 100644
--- a/__tests__/end-to-end.js
+++ b/__tests__/end-to-end.js
@@ -4,16 +4,15 @@ import path from 'path'
import fs from 'fs-extra'
import fetch from 'isomorphic-fetch'
-import {safeLoad} from 'js-yaml'
import md5File from 'md5-file/promise'
import moment from 'moment'
import SimpleNodeLogger from 'simple-node-logger'
import uuidv4 from 'uuid/v4'
+// $FlowFixMe we rely on puppeteer being imported externally, as the latest version conflicts with mastarm
+import puppeteer from 'puppeteer'
+import { PuppeteerScreenRecorder } from 'puppeteer-screen-recorder'
-import {collectingCoverage, getTestFolderFilename, isCi} from './test-utils/utils'
-
-// not imported because of weird flow error
-const puppeteer = require('puppeteer')
+import {collectingCoverage, getTestFolderFilename, isCi, isDocker} from './test-utils/utils'
// if the ISOLATED_TEST is defined, only the specifed test (and any dependet
// tests) will be ran and all others will be skipped.
@@ -23,12 +22,13 @@ const ISOLATED_TEST = null // null means run all tests
// TODO: Allow the below options (puppeteer and test) to be enabled via command
// line options parsed by mastarm.
const puppeteerOptions = {
- headless: isCi,
+ // dumpio: true, // dumps all browser console to docker logs
+ headless: isCi || isDocker,
// The following options can be enabled manually to help with debugging.
// dumpio: true, // Logs all of browser console to stdout
- // slowMo: 30 // puts xx milliseconds between events (for easier watching in non-headless)
+ // slowMo: 50, // puts xx milliseconds between events (for easier watching in non-headless)
// NOTE: In order to run on Travis CI, use args --no-sandbox option
- args: isCi ? ['--no-sandbox'] : []
+ args: isCi || isDocker ? ['--no-sandbox', '--disable-setuid-sandbox', '--disable-gpu', '--ignore-certificate-errors'] : []
}
const testOptions = {
// If enabled, failFast will break out of the test script immediately.
@@ -36,14 +36,12 @@ const testOptions = {
}
let failingFast = false
let successfullyCreatedTestProject = false
-let config: {
- password: string,
- username: string
-}
let browser
let page
+let recorder
+let cdpSession
const gtfsUploadFile = './configurations/end-to-end/test-gtfs-to-upload.zip'
-const OTP_ROOT = 'http://localhost:8080/otp/routers/'
+const OTP_ROOT = 'http://datatools-server:8080/otp/routers/'
const testTime = moment().format()
const fileSafeTestTime = moment().format('YYYY-MM-DDTHH-mm-ss')
const testProjectName = `test-project-${testTime}`
@@ -245,9 +243,18 @@ async function sendCoverageToServer () {
* Expect the innerHTML obtained from the given selector to contain the
* given string.
*/
-async function expectSelectorToContainHtml (selector: string, html: string) {
- const innerHTML = await getInnerHTMLFromSelector(selector)
- expect(innerHTML).toContain(html)
+async function expectSelectorToContainHtml (selector: string, html: string, retry: ?boolean) {
+ try {
+ const innerHTML = await getInnerHTMLFromSelector(selector)
+ expect(innerHTML).toContain(html)
+ } catch {
+ // Some parts of datatools can sometimes get stuck and need a refresh
+ if (!retry) {
+ log.warn('failed to find selector on problematic page, attempting page reload before retrying')
+ await page.reload({ waitUntil: 'networkidle0' })
+ await expectSelectorToContainHtml(selector, html, true)
+ }
+ }
}
/**
@@ -279,19 +286,22 @@ async function expectFeedVersionValidityDates (startDate: string, endDate: strin
*/
async function createProject (projectName: string) {
log.info(`creating project with name: ${projectName}`)
+ await wait(3000)
await click('#context-dropdown')
+ await wait(5000)
await waitForAndClick('a[href="/project/new"]')
+ await wait(3000)
await waitForSelector('[data-test-id="project-name-input-container"]')
await type('[data-test-id="project-name-input-container"] input', projectName)
await click('[data-test-id="project-settings-form-save-button"]')
log.info('saving new project')
- await wait(2000, 'for project to get saved')
+ await wait(3500, 'for project to get saved')
// verify that the project was created with the proper name
await expectSelectorToContainHtml('.project-header', projectName)
// go back to project list
- await goto('http://localhost:9966/project', {waitUntil: 'networkidle0'})
+ await goto('https://datatools-ui-proxy/project', {waitUntil: 'networkidle0'})
// verify the new project is listed in the project list
await expectSelectorToContainHtml('[data-test-id="project-list-table"]', projectName)
@@ -304,7 +314,7 @@ async function createProject (projectName: string) {
async function deleteProject (projectId: string) {
log.info(`deleting project with id: ${projectId}`)
// navigate to that project's settings
- await goto(`http://localhost:9966/project/${projectId}/settings`)
+ await goto(`https://datatools-ui-proxy/project/${projectId}/settings`)
// delete that project
await waitForAndClick('[data-test-id="delete-project-button"]')
@@ -313,7 +323,8 @@ async function deleteProject (projectId: string) {
log.info('deleted project')
// verify deletion
- await goto(`http://localhost:9966/project/${projectId}`)
+ await goto(`https://datatools-ui-proxy/project/${projectId}`)
+ await wait(3000, 'for project page to load')
await waitForSelector('.project-not-found')
await wait(5000, 'for previously rendered project markup to be removed')
await expectSelectorToContainHtml('.project-not-found', projectId)
@@ -393,7 +404,7 @@ async function createFeedSourceViaProjectHeaderButton (feedSourceName) {
log.info(`create Feed Source with name: ${feedSourceName} via project header button`)
// go to project page
await goto(
- `http://localhost:9966/project/${testProjectId}`,
+ `https://datatools-ui-proxy/project/${testProjectId}`,
{
waitUntil: 'networkidle0'
}
@@ -517,7 +528,7 @@ async function createStop ({
// save
await click('[data-test-id="save-entity-button"]')
- await wait(2000, 'for save to happen')
+ await wait(5000, 'for save to happen')
log.info(`created stop with name: ${name}`)
}
@@ -527,6 +538,7 @@ async function createStop ({
* @param {string} searchText the text to enter into the search input
*/
async function filterUsers (searchText: string) {
+ await wait(8000, 'for user list to load')
// type in text
await type('[data-test-id="search-user-input"]', searchText)
@@ -604,15 +616,18 @@ function formatSecondsElapsed (startTime: number) {
*/
async function waitAndClearCompletedJobs () {
const startTime = new Date()
- // wait for jobs to get completed
- await wait(500, 'for job monitoring to begin')
- // wait for an active job to appear
- await waitForSelector('[data-test-id="possibly-active-jobs"]')
- // All jobs completed span will appear when all jobs are done.
- await waitForSelector(
- '[data-test-id="all-jobs-completed"]',
- {timeout: defaultJobTimeout}
- )
+ try {
+ // wait for an active job to appear
+ await waitForSelector('[data-test-id="possibly-active-jobs"]')
+ // All jobs completed span will appear when all jobs are done.
+ await waitForSelector(
+ '[data-test-id="all-jobs-completed"]',
+ {timeout: defaultJobTimeout}
+ )
+ } catch {
+ console.log("couldn't find active job panel. assuming job completed")
+ }
+
await waitForSelector('[data-test-id="clear-completed-jobs-button"]')
// Clear retired jobs to remove all jobs completed span.
await click('[data-test-id="clear-completed-jobs-button"]')
@@ -674,7 +689,7 @@ async function elementClick (elementHandle: any, selector: string) {
/**
* Waits for a selector to show up and then clicks on it.
*/
-async function waitForAndClick (selector: string, waitOptions?: any) {
+async function waitForAndClick (selector: string, waitOptions?: any, retry?: boolean) {
await waitForSelector(selector, waitOptions)
await click(selector)
}
@@ -685,7 +700,7 @@ async function waitForAndClick (selector: string, waitOptions?: any) {
*/
async function wait (milliseconds: number, reason?: string) {
log.info(`waiting ${milliseconds} ms${reason ? ` ${reason}` : ''}...`)
- await page.waitFor(milliseconds)
+ await page.waitForTimeout(milliseconds)
}
/**
@@ -770,6 +785,7 @@ async function getAllElements (selector: string) {
*/
async function type (selector: string, text: string) {
log.info(`typing text: "${text}" into selector: ${selector}`)
+ await page.focus(selector)
await page.type(selector, text)
}
@@ -795,8 +811,6 @@ async function elementType (elementHandle: any, selector: string, text: string)
describe('end-to-end', () => {
beforeAll(async () => {
- config = (safeLoad(fs.readFileSync('configurations/end-to-end/env.yml')): any)
-
// Ping the otp endpoint to ensure the server is running.
try {
log.info(`Pinging OTP at ${OTP_ROOT}`)
@@ -813,6 +827,9 @@ describe('end-to-end', () => {
log.info('Launching chromium for testing...')
browser = await puppeteer.launch(puppeteerOptions)
page = await browser.newPage()
+ cdpSession = await page.target().createCDPSession()
+ recorder = new PuppeteerScreenRecorder(page)
+ await recorder.start('/datatools-ui/e2e-test-results/recording.mp4')
// setup listeners for various events that happen in the browser. In each of
// the following instances, write to the browser events log that will be
@@ -821,6 +838,9 @@ describe('end-to-end', () => {
// log everything that was logged to the browser console
page.on('console', msg => { browserEventLogs.info(msg.text()) })
// log all errors that were logged to the browser console
+ page.on('warn', warn => {
+ browserEventLogs.error(warn)
+ })
page.on('error', error => {
browserEventLogs.error(error)
browserEventLogs.error(error.stack)
@@ -832,12 +852,12 @@ describe('end-to-end', () => {
browserEventLogs.error(`Request failed: ${req.method()} ${req.url()}`)
})
// log all successful requests
- page.on('requestfinished', req => {
- browserEventLogs.info(`Request finished: ${req.method()} ${req.url()}`)
- })
+ // page.on('requestfinished', req => {
+ // browserEventLogs.info(`Request finished: ${req.method()} ${req.url()}`)
+ // })
// set the default download behavior to download files to the cwd
- page._client.send(
+ cdpSession.send(
'Page.setDownloadBehavior',
{ behavior: 'allow', downloadPath: './' }
)
@@ -857,6 +877,7 @@ describe('end-to-end', () => {
}
}
// close browser
+ await recorder.stop()
await page.close()
await browser.close()
log.info('Chromium closed.')
@@ -872,19 +893,23 @@ describe('end-to-end', () => {
// ---------------------------------------------------------------------------
makeTest('should load the page', async () => {
- await goto('http://localhost:9966')
+ await goto('https://datatools-ui-proxy')
await waitForSelector('h1')
await expectSelectorToContainHtml('h1', 'Data Tools')
testResults['should load the page'] = true
})
makeTest('should login', async () => {
- await goto('http://localhost:9966', { waitUntil: 'networkidle0' })
+ const username = process.env.E2E_AUTH0_USERNAME
+ const password = process.env.E2E_AUTH0_PASSWORD
+ if (!username || !password) throw Error('E2E username and password must be set!')
+
+ await goto('https://datatools-ui-proxy', { waitUntil: 'networkidle0' })
await waitForAndClick('[data-test-id="header-log-in-button"]')
await waitForSelector('button[class="auth0-lock-submit"]', { visible: true })
await waitForSelector('input[class="auth0-lock-input"][name="email"]')
- await type('input[class="auth0-lock-input"][name="email"]', config.username)
- await type('input[class="auth0-lock-input"][name="password"]', config.password)
+ await type('input[class="auth0-lock-input"][name="email"]', username)
+ await type('input[class="auth0-lock-input"][name="password"]', password)
await click('button[class="auth0-lock-submit"]')
await waitForSelector('#context-dropdown')
await wait(2000, 'for projects to load')
@@ -895,7 +920,7 @@ describe('end-to-end', () => {
const testUserSlug = testUserEmail.split('@')[0]
makeTestPostLogin('should allow admin user to create another user', async () => {
// navigage to admin page
- await goto('http://localhost:9966/admin/users', { waitUntil: 'networkidle0' })
+ await goto('https://datatools-ui-proxy/admin/users', { waitUntil: 'networkidle0' })
// click on create user button
await waitForAndClick('[data-test-id="create-user-button"]')
@@ -967,7 +992,7 @@ describe('end-to-end', () => {
describe('project', () => {
makeTestPostLogin('should create a project', async () => {
- await goto('http://localhost:9966/home', { waitUntil: 'networkidle0' })
+ await goto('https://datatools-ui-proxy/home', { waitUntil: 'networkidle0' })
await createProject(testProjectName)
// go into the project page and verify that it looks ok-ish
@@ -994,7 +1019,7 @@ describe('end-to-end', () => {
makeTestPostLogin('should update a project by adding an otp server', async () => {
// navigate to server admin page
await goto(
- `http://localhost:9966/admin/servers`,
+ `https://datatools-ui-proxy/admin/servers`,
{
waitUntil: 'networkidle0'
}
@@ -1014,12 +1039,12 @@ describe('end-to-end', () => {
await elementType(
newServerPanel,
'input[name="otpServers.$index.publicUrl"]',
- 'http://localhost:8080'
+ 'http://datatools-server:8080'
)
await elementType(
newServerPanel,
'input[name="otpServers.$index.internalUrl"]',
- 'http://localhost:8080/otp'
+ 'http://datatools-server:8080/otp'
)
await elementClick(newServerPanel, '[data-test-id="save-item-button"]')
@@ -1034,7 +1059,7 @@ describe('end-to-end', () => {
// navigate to home project view
await goto(
- `http://localhost:9966/home/${testProjectId}`,
+ `https://datatools-ui-proxy/home/${testProjectId}`,
{
waitUntil: 'networkidle0'
}
@@ -1074,7 +1099,7 @@ describe('end-to-end', () => {
makeTestPostLogin('should create feed source', async () => {
// go to project page
await goto(
- `http://localhost:9966/project/${testProjectId}`,
+ `https://datatools-ui-proxy/project/${testProjectId}`,
{
waitUntil: 'networkidle0'
}
@@ -1215,17 +1240,18 @@ describe('end-to-end', () => {
describe('feed version', () => {
makeTestPostFeedSource('should download a feed version', async () => {
- await goto(`http://localhost:9966/feed/${feedSourceId}`)
+ await goto(`https://datatools-ui-proxy/feed/${feedSourceId}`)
// Select previous version
await waitForAndClick('[data-test-id="decrement-feed-version-button"]')
await wait(2000, 'for previous version to be active')
// Download version
await click('[data-test-id="download-feed-version-button"]')
- await wait(5000, 'for file to download')
+ await wait(15000, 'for file to download')
// file should get saved to the current root directory, go looking for it
// verify that file exists
const downloadsDir = './'
+ // $FlowFixMe old version of flow doesn't know latest fs methods
const files = await fs.readdir(downloadsDir)
let feedVersionDownloadFile = ''
// assume that this file will be the only one matching the feed source ID
@@ -1244,6 +1270,7 @@ describe('end-to-end', () => {
expect(await md5File(filePath)).toEqual(await md5File(gtfsUploadFile))
// delete file
+ // $FlowFixMe old version of flow doesn't know latest fs methods
await fs.remove(filePath)
}, defaultTestTimeout)
@@ -1252,7 +1279,7 @@ describe('end-to-end', () => {
// feed versions after this test takes place
makeTestPostFeedSource('should delete a feed version', async () => {
// browse to feed source page
- await goto(`http://localhost:9966/feed/${feedSourceId}`)
+ await goto(`https://datatools-ui-proxy/feed/${feedSourceId}`)
// for whatever reason, waitUntil: networkidle0 was not working with the
// above goto, so wait for a few seconds here
await wait(5000, 'additional time for page to load')
@@ -1334,6 +1361,12 @@ describe('end-to-end', () => {
// all of the following editor tests assume the use of the scratch feed
describe('feed info', () => {
makeEditorEntityTest('should create feed info data', async () => {
+ // If the editor doesn't load properly, reload the page in hopes of fixing it
+ try {
+ await waitForSelector('[data-test-id="editor-feedinfo-nav-button"]:not([disabled])')
+ } catch {
+ await page.reload({ waitUntil: 'networkidle0' })
+ }
// open feed info sidebar
await click('[data-test-id="editor-feedinfo-nav-button"]')
@@ -1564,7 +1597,9 @@ describe('end-to-end', () => {
describe('routes', () => {
makeEditorEntityTest('should create route', async () => {
// open routes sidebar
- await click('[data-test-id="editor-route-nav-button"]')
+ await waitForAndClick('[data-test-id="editor-route-nav-button"]')
+
+ await wait(1500, 'for route page to open')
// wait for route sidebar form to appear and click button to open form
// to create route
@@ -1573,13 +1608,6 @@ describe('end-to-end', () => {
await waitForSelector('[data-test-id="route-route_id-input-container"]')
// fill out form
-
- // set public to yes
- await page.select(
- '[data-test-id="route-publicly_visible-input-container"] select',
- '1'
- )
-
// set route_id
await clearAndType(
'[data-test-id="route-route_id-input-container"] input',
@@ -2027,7 +2055,7 @@ describe('end-to-end', () => {
describe('exceptions', () => {
makeEditorEntityTest('should create exception', async () => {
// open exception sidebar
- await click('[data-test-id="exception-tab-button"]')
+ await waitForAndClick('[data-test-id="exception-tab-button"]')
// wait for exception sidebar form to appear and click button to open
// form to create exception
@@ -2054,6 +2082,8 @@ describe('end-to-end', () => {
await waitForSelector(
'[data-test-id="exception-dates-container"] input'
)
+ await wait(250, 'for date range picker to load')
+ await wait(1000, 'for date range animation to finish')
await clearAndType(
'[data-test-id="exception-dates-container"] input',
'07/04/18'
@@ -2115,6 +2145,7 @@ describe('end-to-end', () => {
)
// set new date
+ await wait(1250, 'for date range picker to load')
await clearAndType(
'[data-test-id="exception-dates-container"] input',
'07/05/18'
@@ -2149,6 +2180,68 @@ describe('end-to-end', () => {
'test exception updated to delete'
)
}, defaultTestTimeout, 'should create calendar')
+
+ makeEditorEntityTest('should create exception range', async () => {
+ // create a new exception
+ await waitForAndClick('[data-test-id="new-scheduleexception-button"]')
+
+ // name
+ await type(
+ '[data-test-id="exception-name-input-container"] input',
+ 'test exception range'
+ )
+
+ // exception type
+ await page.select(
+ '[data-test-id="exception-type-input-container"] select',
+ '7' // no service
+ )
+
+ // add start range exception date
+ await click('[data-test-id="exception-add-date-button"]')
+ await waitForSelector(
+ '[data-test-id="exception-dates-container"] input'
+ )
+ await wait(1050, 'for date range picker to load')
+ await clearAndType(
+ '[data-test-id="exception-dates-container"] input',
+ '08/04/18'
+ )
+
+ await wait(1050, 'for date range picker to load')
+ await click('[data-test-id="exception-add-range"]')
+
+ // add end of range exception date (July 10, 2018)
+ await wait(1050, 'for date range picker to load')
+ await waitForSelector(
+ '[data-test-id="exception-date-range-0-2"] input'
+ )
+
+ await wait(1050, 'for date range picker to load')
+ await clearAndType(
+ '[data-test-id="exception-date-range-0-2"] input',
+ '08/10/18'
+ )
+
+ // save
+ await click('[data-test-id="save-entity-button"]')
+ await wait(2000, 'for save to happen')
+
+ // reload to make sure stuff was saved
+ await page.reload({ waitUntil: 'networkidle0' })
+
+ // wait for exception sidebar form to appear
+ await waitForSelector(
+ '[data-test-id="exception-name-input-container"]'
+ )
+
+ // verify data was saved and retrieved from server
+ // TODO: verify the contents of the range?
+ await expectSelectorToContainHtml(
+ '[data-test-id="exception-name-input-container"]',
+ 'test exception range'
+ )
+ }, defaultTestTimeout, 'should create calendar')
})
// ---------------------------------------------------------------------------
@@ -2341,7 +2434,8 @@ describe('end-to-end', () => {
'should create pattern',
async () => {
// open route sidebar
- await click('[data-test-id="editor-route-nav-button"]')
+ await waitForAndClick('[data-test-id="editor-route-nav-button"]')
+ await wait(2000, 'for page to catch up with itself')
// wait for route sidebar form to appear and select first route
await waitForAndClick('.entity-list-row')
@@ -2357,7 +2451,7 @@ describe('end-to-end', () => {
await wait(2000, 'for page to catch up with itself')
// click add stop by name
- await click('[data-test-id="add-stop-by-name-button"]')
+ await waitForAndClick('[data-test-id="add-stop-by-name-button"]')
// wait for stop selector to show up
await waitForSelector('.pattern-stop-card .Select-control')
@@ -2374,6 +2468,10 @@ describe('end-to-end', () => {
await click('[data-test-id="add-pattern-stop-button"]')
await wait(2000, 'for auto-save to happen')
+ // save
+ await click('[data-test-id="save-entity-button"]')
+ await wait(2000, 'for save to happen')
+
// reload to make sure stuff was saved
await page.reload({ waitUntil: 'networkidle0' })
@@ -2382,6 +2480,7 @@ describe('end-to-end', () => {
'[data-test-id="pattern-title-New Pattern"]'
)
+ await wait(2000, 'for trip pattern list to load')
// verify data was saved and retrieved from server
await expectSelectorToContainHtml(
'.trip-pattern-list',
@@ -2507,22 +2606,22 @@ describe('end-to-end', () => {
await page.keyboard.press('Enter')
// Laurel Dr arrival
- await page.keyboard.type('1234')
+ await page.keyboard.type('12:34')
await page.keyboard.press('Tab')
await page.keyboard.press('Enter')
// Laurel Dr departure
- await page.keyboard.type('1235')
+ await page.keyboard.type('12:35')
await page.keyboard.press('Tab')
await page.keyboard.press('Enter')
// Russell Av arrival
- await page.keyboard.type('1244')
+ await page.keyboard.type('12:44')
await page.keyboard.press('Tab')
await page.keyboard.press('Enter')
// Russell Av departure
- await page.keyboard.type('1245')
+ await page.keyboard.type('12:45')
await page.keyboard.press('Enter')
// save
@@ -2599,11 +2698,11 @@ describe('end-to-end', () => {
await page.keyboard.press('Enter')
await page.keyboard.type('test-trip-to-delete')
await page.keyboard.press('Enter')
- await wait(2000, 'for save to happen')
+ await wait(4000, 'for save to happen')
// save
await click('[data-test-id="save-trip-button"]')
- await wait(2000, 'for save to happen')
+ await wait(6000, 'for save to happen')
// reload to make sure stuff was saved
await page.reload({ waitUntil: 'networkidle0' })
@@ -2627,7 +2726,8 @@ describe('end-to-end', () => {
// confirm delete
await waitForAndClick('[data-test-id="modal-confirm-ok-button"]')
- await wait(2000, 'for delete to happen')
+ await wait(3000, 'for delete to happen')
+ await page.reload({ waitUntil: 'networkidle0' })
// verify that trip to delete is no longer listed
await expectSelectorToNotContainHtml(
@@ -2668,7 +2768,7 @@ describe('end-to-end', () => {
makeEditorEntityTest('should make snapshot active version', async () => {
// go back to feed
// not sure why, but clicking on the nav home button doesn't work
- await goto(`http://localhost:9966/feed/${scratchFeedSourceId}`)
+ await goto(`https://datatools-ui-proxy/feed/${scratchFeedSourceId}`)
// wait for page to be visible and go to snapshots tab
await waitForAndClick('#feed-source-viewer-tabs-tab-snapshots')
@@ -2676,6 +2776,10 @@ describe('end-to-end', () => {
// wait for snapshots tab to load and publish snapshot
await waitForAndClick('[data-test-id="publish-snapshot-button"]')
+
+ // wait for snapshot export modal and click "no" to proprietary file export
+ await waitForAndClick('[data-test-id="export-patterns-modal-no"]')
+
// wait for version to get created
await waitAndClearCompletedJobs()
@@ -2709,6 +2813,7 @@ describe('end-to-end', () => {
await waitForAndClick('[data-test-id="deploy-server-0-button"]')
// wait for deployment dialog to appear
await waitForSelector('[data-test-id="confirm-deploy-server-button"]')
+ await wait(1500, 'for deployment panel to properly load')
// get the router name
const innerHTML = await getInnerHTMLFromSelector(
@@ -2725,7 +2830,7 @@ describe('end-to-end', () => {
// wait for jobs to complete
await waitAndClearCompletedJobs()
- }, defaultTestTimeout + 30000) // Add thirty seconds for deployment job
+ }, defaultTestTimeout + 60000) // Add sixty seconds for deployment job
makeEditorEntityTest('should be able to do a trip plan on otp', async () => {
await wait(15000, 'for OTP to pick up the newly-built graph')
diff --git a/__tests__/test-utils/mock-data/manager.js b/__tests__/test-utils/mock-data/manager.js
index 5db2d031a..ac4fb3129 100644
--- a/__tests__/test-utils/mock-data/manager.js
+++ b/__tests__/test-utils/mock-data/manager.js
@@ -13,6 +13,20 @@ import type {
let COUNTER = 0
+/**
+ * Make a mock deployment summary given a project and some FeedVersions. This is a
+ * function so that circular references can be defined.
+ */
+export function makeMockDeploymentSummary () {
+ return {
+ dateCreated: 1553292345720,
+ deployedTo: null,
+ // Don't increment counter as we want to match the main deployment
+ id: `mock-deployment-id-${COUNTER}`,
+ lastDeployed: null,
+ name: 'mock-deployment'
+ }
+}
/**
* Make a mock deployment given a project and some FeedVersions. This is a
* function so that circular references can be defined.
@@ -57,6 +71,7 @@ export function makeMockDeployment (
peliasCsvFiles: [],
peliasResetDb: null,
peliasUpdate: null,
+ peliasSynonymsBase64: null,
pinnedfeedVersionIds: [],
projectBounds: {east: 0, west: 0, north: 0, south: 0},
projectId: project.id,
@@ -95,13 +110,14 @@ export const mockProject = {
pinnedDeploymentId: null,
peliasWebhookUrl: null,
routerConfig: {
- carDropoffTime: null,
- numItineraries: null,
+ driveDistanceReluctance: null,
+ itineraryFilters: {nonTransitGeneralizedCostLimit: null},
requestLogFile: null,
stairsReluctance: null,
updaters: null,
walkSpeed: null
},
+ sharedStopsConfig: null,
useCustomOsmBounds: false,
user: null
}
@@ -114,7 +130,6 @@ export const mockFeedWithVersion = {
externalProperties: {},
id: 'mock-feed-with-version-id',
isPublic: false,
- lastFetched: 1543389038810,
lastUpdated: 1543389038810,
latestValidation: {
agencies: null,
@@ -153,6 +168,38 @@ export const mockFeedWithVersion = {
versionCount: 1
}
+// a mock feed source summary
+export const mockFeedSourceSummaryWithVersion = {
+ deployable: false,
+ id: 'mock-feed-with-version-id',
+ isPublic: false,
+ lastUpdated: 1543389038810,
+ latestValidation: {
+ agencies: null,
+ agencyCount: 1,
+ avgDailyRevenueTime: 0,
+ bounds: {
+ north: 39.0486949672717,
+ south: 38.92884,
+ east: -76.481211,
+ west: -76.5673055566884
+ },
+ endDate: '20190801',
+ errorCount: 78,
+ feedVersionId: 'mock-feed-version-id',
+ loadFailureReason: null,
+ loadStatus: 'SUCCESS',
+ routeCount: 10,
+ startDate: '20180801',
+ stopCount: 237,
+ stopTimesCount: 11170,
+ tripCount: 415
+ },
+ labelIds: [],
+ name: 'test feed with a version',
+ projectId: mockProject.id
+}
+
// a mock feed with no versions
export const mockFeedWithoutVersion = {
dateCreated: 1544831411569,
@@ -161,7 +208,6 @@ export const mockFeedWithoutVersion = {
externalProperties: {},
id: 'mock-feed-without-version-id',
isPublic: false,
- lastFetched: null,
name: 'test feed with no version',
labelIds: [],
noteCount: 0,
@@ -326,6 +372,7 @@ export const mockFeedVersion = {
feedVersionId: 'mock-feed-version-id',
loadFailureReason: null,
loadStatus: 'SUCCESS',
+ mobilityDataResult: {},
routeCount: 10,
startDate: '20180801',
stopCount: 237,
@@ -353,6 +400,7 @@ export const mockDeployment = makeMockDeployment(
mockProjectWithDeployment,
[mockFeedVersion]
)
+export const mockDeploymentSummary = makeMockDeploymentSummary()
mockProjectWithDeployment.deployments.push(mockDeployment)
mockProjectWithDeployment.feedSources.push(mockFeedWithVersion)
@@ -370,7 +418,6 @@ function makeUser (profile) {
profile,
permissions: new UserPermissions(profile.app_metadata.datatools),
recentActivity: null,
- redirectOnSuccess: null,
subscriptions: new UserSubscriptions(profile.app_metadata.datatools)
}
}
diff --git a/__tests__/test-utils/mock-data/store.js b/__tests__/test-utils/mock-data/store.js
index a99f04611..e59464c12 100644
--- a/__tests__/test-utils/mock-data/store.js
+++ b/__tests__/test-utils/mock-data/store.js
@@ -4,7 +4,7 @@ import multi from '@conveyal/woonerf/store/multi'
import promise from '@conveyal/woonerf/store/promise'
import {middleware as fetchMiddleware} from '@conveyal/woonerf/fetch'
import Enzyme, {mount} from 'enzyme'
-import EnzymeReactAdapter from 'enzyme-adapter-react-15.4'
+import EnzymeReactAdapter from '@wojtekmaj/enzyme-adapter-react-17'
import {mountToJson} from 'enzyme-to-json'
import clone from 'lodash/cloneDeep'
import {get} from 'object-path'
@@ -43,10 +43,10 @@ import {defaultState as defaultManagerProjectsState} from '../../../lib/manager/
import {defaultState as defaultManagerStatusState} from '../../../lib/manager/reducers/status'
import {defaultState as defaultManagerUiState} from '../../../lib/manager/reducers/ui'
import {defaultState as defaultManagerUserState} from '../../../lib/manager/reducers/user'
-import * as manager from './manager'
-
import type {AppState} from '../../../lib/types/reducers'
+import * as manager from './manager'
+
Enzyme.configure({ adapter: new EnzymeReactAdapter() })
const defaultManagerState = {
diff --git a/__tests__/test-utils/setup-e2e.js b/__tests__/test-utils/setup-e2e.js
index 995892a19..353ace053 100644
--- a/__tests__/test-utils/setup-e2e.js
+++ b/__tests__/test-utils/setup-e2e.js
@@ -12,6 +12,7 @@ const {
downloadFile,
getTestFolderFilename,
isCi,
+ isDocker,
isUiRepo,
loadYamlFile,
requireEnvVars,
@@ -25,16 +26,18 @@ const otpJarForOtpRunner = '/opt/otp/otp-v1.4.0'
const ENV_YML_VARIABLES = [
'AUTH0_CLIENT_ID',
'AUTH0_DOMAIN',
- 'AUTH0_SECRET',
'AUTH0_API_CLIENT',
'AUTH0_API_SECRET',
'GTFS_DATABASE_PASSWORD',
'GTFS_DATABASE_USER',
'GTFS_DATABASE_URL',
+ 'MONGO_HOST',
+ 'MONGO_DB_NAME',
'OSM_VEX',
'SPARKPOST_KEY',
'SPARKPOST_EMAIL'
]
+
/**
* download, configure and start an instance of datatools-server
*/
@@ -130,7 +133,7 @@ async function startBackendServer () {
} = process.env
const serverEnv = results.readServerTemplate
- serverEnv.application.client_assets_url = 'http://localhost:4000'
+ serverEnv.application.client_assets_url = 'http://datatools-server:4000'
serverEnv.application.data.gtfs_s3_bucket = S3_BUCKET
serverEnv.application.data.use_s3_storage = true
serverEnv.extensions.transitfeeds.key = TRANSITFEEDS_KEY
@@ -385,24 +388,17 @@ function recreateEndToEndTestResultDirectory () {
async function verifySetupForLocalEnvironment () {
const errors = []
- // make sure e2e.yml exists
- try {
- await fs.stat('configurations/end-to-end/env.yml')
- } catch (e) {
- errors.push(new Error('Failed to detect file `configurations/end-to-end/env.yml`'))
- }
-
// make sure services are running
const endpointChecks = [
{
name: 'Front-end server',
- url: 'http://localhost:9966'
+ url: 'http://datatools-ui:9966'
}, {
name: 'Back-end server',
- url: 'http://localhost:4000'
+ url: 'http://datatools-server:4000'
}, {
name: 'OTP server',
- url: 'http://localhost:8080'
+ url: 'http://datatools-server:8080'
}
]
@@ -415,7 +411,7 @@ async function verifySetupForLocalEnvironment () {
// Download OTP jar into /opt/otp/ if not already present.
const otpJarExists = await fs.exists(otpJarForOtpRunner)
- if (!otpJarExists) {
+ if (!otpJarExists && !isDocker) {
await downloadFile(otpJarMavenUrl, otpJarForOtpRunner)
}
@@ -499,7 +495,7 @@ module.exports = async function () {
// do different setup depending on runtime environment
const setupItems = []
- if (isCi) {
+ if (isCi && !isDocker) {
setupItems.push(setupForContinuousIntegrationEnvironment())
} else {
setupItems.push(verifySetupForLocalEnvironment())
diff --git a/__tests__/test-utils/teardown-e2e.js b/__tests__/test-utils/teardown-e2e.js
index 232de18b7..aefbd5c29 100644
--- a/__tests__/test-utils/teardown-e2e.js
+++ b/__tests__/test-utils/teardown-e2e.js
@@ -22,7 +22,7 @@ const logsZipfile = 'logs.zip'
const repo = process.env.GITHUB_WORKSPACE
? process.env.GITHUB_WORKSPACE.split(path.sep).pop()
: ''
-const buildNum = process.env.GITHUB_RUN_ID
+const buildNum = process.env.GITHUB_RUN_ID || 'localhost'
const uploadedLogsFilename = `${repo}-build-${buildNum}-e2e-logs.zip`
const {LOGS_S3_BUCKET} = process.env
@@ -155,9 +155,14 @@ async function uploadToMicrosoftTeams () {
console.log('posting message to MS Teams')
- const testResults = require(
- path.resolve(`./${getTestFolderFilename('results.json')}`)
- )
+ let testResults = {success: false, numPassedTests: 0, numTotalTests: 0}
+ try {
+ testResults = require(
+ path.resolve('/datatools-ui/e2e-test-results/results.json')
+ )
+ } catch {
+ console.warn("Couldn't read results.json!")
+ }
const actions = [{
'@type': 'OpenUri',
name: `View GitHub Action Build #${buildNum}`,
@@ -183,7 +188,7 @@ async function uploadToMicrosoftTeams () {
}
let fetchResponse
- const commit = process.env.GITHUB_SHA
+ const commit = process.env.GITHUB_SHA || 'localhost'
const baseRepoUrl = `https://github.com/ibi-group/datatools-${isUiRepo ? 'ui' : 'server'}`
const commitUrl = `${baseRepoUrl}/commit/${commit}`
try {
@@ -196,7 +201,7 @@ async function uploadToMicrosoftTeams () {
'@type': 'MessageCard',
themeColor: '0072C6',
title: `${repo} e2e test ${testResults.success ? 'passed. â
' : 'failed. â'}`,
- text: `đ **branch:** ${process.env.GITHUB_REF_SLUG}\n
+ text: `đ **branch:** ${process.env.GITHUB_REF_SLUG || 'branch not detected'}\n
đ **commit:** [${commit.slice(0, 6)}](${commitUrl})\n
đ **result:** ${testResults.numPassedTests} / ${testResults.numTotalTests} tests passed\n
`,
@@ -247,10 +252,10 @@ async function uploadToSlack () {
* slack or MS Teams channel (if defined)
*/
function uploadLogs () {
- if (!(slackConfigured || msTeamsConfigured)) {
- console.warn('Log upload environment variables undefined, not uploading logs anywhere!')
- return
- }
+ // if (!(slackConfigured || msTeamsConfigured)) {
+ // console.warn('Log upload environment variables undefined, not uploading logs anywhere!')
+ // return
+ // }
const output = fs.createWriteStream(logsZipfile)
const archive = archiver('zip')
@@ -273,6 +278,7 @@ function uploadLogs () {
})
.catch(err => {
if (err) {
+ console.log(err)
return makeUploadFailureHandler(
'An error occurred while uploading the logs'
)(err)
diff --git a/__tests__/test-utils/utils.js b/__tests__/test-utils/utils.js
index e7ca9774f..40d928e6d 100644
--- a/__tests__/test-utils/utils.js
+++ b/__tests__/test-utils/utils.js
@@ -8,8 +8,9 @@ const request = require('request')
const collectingCoverage = process.env.COLLECT_COVERAGE
const isCi = !!process.env.CI
+const isDocker = !!process.env.IS_DOCKER
const isUiRepo = process.env.GITHUB_REPOSITORY === 'ibi-group/datatools-ui'
-const testFolderPath = 'e2e-test-results'
+const testFolderPath = process.env.TEST_FOLDER_PATH || 'e2e-test-results'
/**
* Download a file using a stream
@@ -148,6 +149,7 @@ module.exports = {
downloadFile,
getTestFolderFilename,
isCi,
+ isDocker,
isUiRepo,
killDetachedProcess,
loadYamlFile,
diff --git a/configurations/default/env.yml.tmp b/configurations/default/env.yml.tmp
index d05c954af..03d0c7c68 100644
--- a/configurations/default/env.yml.tmp
+++ b/configurations/default/env.yml.tmp
@@ -1,13 +1,27 @@
AUTH0_CLIENT_ID: your-auth0-client-id
+AUTH0_CONNECTION_NAME: your-auth0-connection-name
AUTH0_DOMAIN: your-auth0-domain
BUGSNAG_KEY: optional-bugsnag-key
MAPBOX_ACCESS_TOKEN: your-mapbox-access-token
MAPBOX_MAP_ID: mapbox/outdoors-v11
MAPBOX_ATTRIBUTION: © Mapbox © OpenStreetMap Improve this map
+MAP_BASE_URL: optional-map-tile-url
+# MAP_BASE_URL: http://tile.openstreetmap.org/{z}/{x}/{y}.png # Uncomment it if maps are gray
SLACK_CHANNEL: optional-slack-channel
SLACK_WEBHOOK: optional-slack-webhook
GRAPH_HOPPER_KEY: your-graph-hopper-key
# Optional override to use a custom service instead of the graphhopper.com hosted service.
# GRAPH_HOPPER_URL: http://localhost:8989/
-GOOGLE_ANALYTICS_TRACKING_ID: optional-ga-key
+# Optional overrides to use custom service or different api key for certain bounding boxes.
+# (uncomment below to enable)
+# GRAPH_HOPPER_ALTERNATES:
+# - URL: http://localhost:8989/
+# KEY: your-localhost-graph-hopper-key
+# BBOX:
+# - -170
+# - 6
+# - -46
+# - 83
# GRAPH_HOPPER_POINT_LIMIT: 10 # Defaults to 30
+GOOGLE_ANALYTICS_TRACKING_ID: optional-ga-key
+DISABLE_AUTH: true
diff --git a/configurations/mtc-docs/.readthedocs.yaml b/configurations/mtc-docs/.readthedocs.yaml
new file mode 100644
index 000000000..3b057f68f
--- /dev/null
+++ b/configurations/mtc-docs/.readthedocs.yaml
@@ -0,0 +1,20 @@
+# Read the Docs configuration file for MkDocs projects
+# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
+
+# Required
+version: 2
+
+# Set the version of Python and other tools you might need
+build:
+ os: ubuntu-22.04
+ tools:
+ python: "3.13"
+
+# Install mkdocs macros and other plugins
+# Per readthedocs: paths are relative to the root of the project.
+python:
+ install:
+ - requirements: docs/requirements.txt
+
+mkdocs:
+ configuration: configurations/mtc-docs/mkdocs-mtc.yml
diff --git a/configurations/mtc-docs/mkdocs-mtc.yml b/configurations/mtc-docs/mkdocs-mtc.yml
new file mode 100644
index 000000000..df89de75f
--- /dev/null
+++ b/configurations/mtc-docs/mkdocs-mtc.yml
@@ -0,0 +1,29 @@
+INHERIT: ../../mkdocs.yml
+site_name: Transit Data Manager Docs
+docs_dir: ../../docs
+
+extra:
+ product_name: MTC Transit Data Manager (TDM)
+ support_email: transitdata@511.org
+
+nav:
+- Home: 'index.md'
+- Data Manager:
+ - 'Introduction': 'user/introduction.md'
+ - 'Managing Projects & Feeds': 'user/managing-projects-feeds.md'
+ - 'Publishing Feeds': 'user/publishing-feeds.md'
+ - 'Merging Feeds': 'user/merging-feeds.md'
+ - 'Feed Version Summary': 'user/feed-version-summary.md'
+ - 'Managing Users': 'user/managing-users.md'
+ - 'Service Alerts Manager': 'user/service-alerts.md'
+ - 'Searching for Routes and Stops': 'user/searching-routes-stops.md'
+ - 'GTFS+ Editor': 'user/gtfs-plus-editor.md'
+- 'GTFS Editor':
+ - Getting Started: 'user/editor/getting-started.md'
+ - Stops: 'user/editor/stops.md'
+ - Routes: 'user/editor/routes.md'
+ - Patterns: 'user/editor/patterns.md'
+ - Schedules: 'user/editor/schedules.md'
+ - Fares: 'user/editor/fares.md'
+- Appendices:
+ - GTFS Validation Warnings: 'user/appendix-gtfs-warnings.md'
diff --git a/configurations/test/env.yml b/configurations/test/env.yml
index 373ab9582..1b41ff506 100644
--- a/configurations/test/env.yml
+++ b/configurations/test/env.yml
@@ -1,4 +1,5 @@
AUTH0_CLIENT_ID: mock-client-id
+AUTH0_CONNECTION_NAME: auth0-connection-name
AUTH0_DOMAIN: test.domain.com
GRAPH_HOPPER_KEY: test
MAPBOX_ACCESS_TOKEN: test
diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml
new file mode 100644
index 000000000..9055475a6
--- /dev/null
+++ b/docker/docker-compose.yml
@@ -0,0 +1,71 @@
+version: "3.8"
+
+x-common-variables: &common-variables
+ - BUGSNAG_KEY=${BUGSNAG_KEY}
+ - S3_BUCKET=${S3_BUCKET}
+ - LOGS_S3_BUCKET=${LOGS_S3_BUCKET}
+ - MS_TEAMS_WEBHOOK_URL=${MS_TEAMS_WEBHOOK_URL}
+ - MAPBOX_ACCESS_TOKEN=${MAPBOX_ACCESS_TOKEN}
+ - GITHUB_SHA=${GITHUB_SHA}
+ - GITHUB_REF_SLUG=${GITHUB_REF_SLUG}
+ - TRANSITFEEDS_KEY=${TRANSITFEEDS_KEY}
+ - GITHUB_REPOSITORY=${GITHUB_REPOSITORY}
+ - GITHUB_WORKSPACE=${GITHUB_WORKSPACE}
+ - GITHUB_RUN_ID=${GITHUB_RUN_ID}
+ - AUTH0_CLIENT_ID=${AUTH0_CLIENT_ID}
+ - AUTH0_PUBLIC_KEY=${AUTH0_PUBLIC_KEY}
+ - AUTH0_CONNECTION_NAME=${AUTH0_CONNECTION_NAME}
+ - AUTH0_DOMAIN=${AUTH0_DOMAIN}
+ - AUTH0_API_CLIENT=${AUTH0_API_CLIENT}
+ - AUTH0_API_SECRET=${AUTH0_API_SECRET}
+ - OSM_VEX=${OSM_VEX}
+ - SPARKPOST_KEY=${SPARKPOST_KEY}
+ - SPARKPOST_EMAIL=${SPARKPOST_EMAIL}
+ - GTFS_DATABASE_URL=jdbc:postgresql://postgres/dmtest
+ - GTFS_DATABASE_USER=root
+ - GTFS_DATABASE_PASSWORD=e2e
+ - MONGO_DB_NAME=data_manager
+ - MONGO_HOST=mongo:27017
+ - AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID}
+ - AWS_REGION=${AWS_REGION}
+ - AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY}
+ - DISABLE_AUTH=true
+
+services:
+ datatools-server:
+ image: ghcr.io/ibi-group/datatools-server:dev
+ restart: always
+ environment: *common-variables
+ volumes:
+ - type: bind
+ source: ./server/
+ target: /config
+ ports:
+ - "4000:4000"
+ datatools-ui:
+ build:
+ context: ../
+ dockerfile: ./docker/ui/Dockerfile
+ args: *common-variables
+ restart: always
+ environment: *common-variables
+ ports:
+ - "9966:9966"
+ mongo:
+ image: mongo
+ restart: always
+ volumes:
+ - dt-mongo:/data/db
+ postgres:
+ environment:
+ POSTGRES_HOST_AUTH_METHOD: trust
+ POSTGRES_USER: root
+ POSTGRES_PASS: e2e
+ POSTGRES_DB: dmtest
+ image: postgres
+ restart: always
+ volumes:
+ - dt-postgres:/var/lib/postgresql/data
+volumes:
+ dt-postgres:
+ dt-mongo:
diff --git a/docker/server/env.yml b/docker/server/env.yml
new file mode 100644
index 000000000..9fe4c3cbb
--- /dev/null
+++ b/docker/server/env.yml
@@ -0,0 +1,5 @@
+DISABLE_AUTH: TRUE
+GTFS_DATABASE_URL: jdbc:postgresql://postgres/dmtest
+MONGO_DB_NAME: data_manager
+MONGO_HOST: mongo
+AUTH0_CLIENT_ID: disable_auth
\ No newline at end of file
diff --git a/docker/server/server.yml b/docker/server/server.yml
new file mode 100644
index 000000000..f212cbe7e
--- /dev/null
+++ b/docker/server/server.yml
@@ -0,0 +1,67 @@
+application:
+ title: Data Tools
+ logo: https://d2tyb7byn1fef9.cloudfront.net/ibi_group-128x128.png
+ logo_large: https://d2tyb7byn1fef9.cloudfront.net/ibi_group_black-512x512.png
+ client_assets_url: https://example.com
+ shortcut_icon_url: https://d2tyb7byn1fef9.cloudfront.net/ibi-logo-original%402x.png
+ public_url: http://localhost:9966
+ notifications_enabled: false
+ docs_url: http://conveyal-data-tools.readthedocs.org
+ support_email: support@ibigroup.com
+ port: 4000
+ data:
+ gtfs: /tmp
+ use_s3_storage: false
+ s3_region: us-east-1
+ gtfs_s3_bucket: bucket-name
+modules:
+ enterprise:
+ enabled: false
+ editor:
+ enabled: true
+ deployment:
+ enabled: true
+ ec2:
+ enabled: false
+ default_ami: ami-your-ami-id
+ tag_key: a-tag-key-to-add-to-all-instances
+ tag_value: a-tag-value-to-add-to-all-instances
+ # Note: using a cloudfront URL for these download URLs will greatly
+ # increase download/deploy speed.
+ otp_download_url: https://optional-otp-repo.com
+ user_admin:
+ enabled: false
+ gtfsapi:
+ enabled: true
+ load_on_fetch: false
+ # use_extension: mtc
+ # update_frequency: 30 # in seconds
+ manager:
+ normalizeFieldTransformation:
+ # Enter capitalization exceptions (e.g. acronyms), in the desired case, and separated by commas.
+ defaultCapitalizationExceptions:
+ - ACE
+ - BART
+ # Enter substitutions (e.g. substitute '@' with 'at'), one dashed entry for each substitution, with:
+ # - pattern: the regex string pattern that will be replaced,
+ # - replacement: the replacement string for that pattern,
+ # - normalizeSpace: if true, the resulting field value will include one space before and after the replacement string.
+ # Note: if the replacement must be blank, then normalizeSpace should be set to false
+ # and whitespace management should be handled in pattern instead.
+ # Substitutions are executed in order they appear in the list.
+ defaultSubstitutions:
+ - description: "Replace '@' with 'at', and normalize space."
+ pattern: "@"
+ replacement: at
+ normalizeSpace: true
+ - description: "Replace '+' (\\+ in regex) and '&' with 'and', and normalize space."
+ pattern: "[\\+&]"
+ replacement: and
+ normalizeSpace: true
+extensions:
+ transitland:
+ enabled: true
+ api: https://transit.land/api/v1/feeds
+ transitfeeds:
+ enabled: true
+ api: http://api.transitfeeds.com/v1/getFeeds
\ No newline at end of file
diff --git a/docker/ui/Dockerfile b/docker/ui/Dockerfile
new file mode 100644
index 000000000..a3f1bc811
--- /dev/null
+++ b/docker/ui/Dockerfile
@@ -0,0 +1,16 @@
+FROM node:14
+WORKDIR /datatools-build
+
+ARG BUGSNAG_KEY
+
+RUN cd /datatools-build
+COPY package.json yarn.lock patches /datatools-build/
+RUN yarn
+COPY . /datatools-build/
+COPY configurations/default /datatools-config/
+
+
+# Copy the tmp file to the env.yml if no env.yml is present
+RUN cp -R -u -p /datatools-config/env.yml.tmp /datatools-config/env.yml
+
+CMD yarn run mastarm build --env dev --serve --proxy http://datatools-server:4000/api #
\ No newline at end of file
diff --git a/docs/dev/api_interaction.md b/docs/dev/api_interaction.md
index 75537569b..2916742c7 100644
--- a/docs/dev/api_interaction.md
+++ b/docs/dev/api_interaction.md
@@ -1,4 +1,4 @@
-# API Interaction Transcript
+# API Interaction
The following is a set of instructions on API calls needed to upload and validate
a feed, wait for the tasks' completion, and then browse its contents. All of the
endpoints needed to load and process a GTFS file are REST-based. The endpoints
diff --git a/docs/dev/deployment.md b/docs/dev/deployment.md
index cde4c3ffb..7bbda2f57 100644
--- a/docs/dev/deployment.md
+++ b/docs/dev/deployment.md
@@ -110,7 +110,7 @@ Auth0 is used for authentication in the application. If you don't need authentic
- Application level
- Allowed Callback URLs
- Allowed Origins (CORS)
- - keep all other default settings
+ - Keep all other default settings
#### Creating your first user
Create your first Auth0 user through Auth0 web console (Users > Create User). In
@@ -141,9 +141,9 @@ AUTH0_DOMAIN: your-auth0-domain.auth.com
AUTH0_CLIENT_ID: your-auth0-client-id
```
-Update the following properties in `datatools-server` `env.yml` to reflect the secure Auth0 application settings.
+Update the following properties in `datatools-server` and `env.yml` to reflect the secure Auth0 application settings.
-**Note:** for older Auth0 accounts/tenants, it is possible to use the Auth0 secret token, which uses the HS256 algorithm, but newer Auth0 tenants will need to specify the absolute path of their `.pem` file in the `AUTH0_PUBLIC_KEY` property. This public key only needs to be downloaded one time for your Auth0 tenant at `https://[your_domain].auth0.com/pem`.
+**Note:** For older Auth0 accounts or tenants, it is possible to use the Auth0 secret token with the HS256 algorithm is possible. However, newer Auth0 tenants will need to specify the absolute path of their `.pem` file in the `AUTH0_PUBLIC_KEY` property. This public key only needs to be downloaded one time for your Auth0 tenant at `https://[your_domain].auth0.com/pem`.
```yaml
AUTH0_SECRET: your-auth0-client-secret # used for pre-September 2017 Auth0 accounts
@@ -151,34 +151,39 @@ AUTH0_PUBLIC_KEY: /location/of/auth0-account.pem # used for post-September 2017
AUTH0_TOKEN: your-auth0-api-token
```
-**Note**: to generate the `api_token`, go to Documentation > Management API. After adding scopes, your token will appear in the input field.
+**Note**: To generate the `api_token`, go to Documentation > Management API. After adding scopes, your token will appear in the input field.
-
+
To allow for the creation, deletion and editing of users you must generate a token for the following scopes:
- **users**:
- read, update, create and delete
- **users_app_metadata**:
- - read, update, create and delete`
+ - read, update, create and delete
-#### Auth0 Rule Configuration: making app_metadata and user_metadata visible via token (only required for "new" Auth0 accounts/tenants)
-If using OIDC-conformant clients/APIs (which appears to be mandatory for new Auth0 tenants), you must set up a custom Auth0 rule to add app_metadata and user_metadata to the user's token (Note: this is not the default for older, "legacy" Auth0 accounts). Go to Rules > Create Rule > empty rule and add the following code snippet. If you'd like the rule to only apply to certain clients, you can keep the conditional block that checks for `context.clientID` value. Otherwise, this conditional block is unnecessary.
+#### Auth0 Post-Login Action Configuration: making `app_metadata` and `user_metadata` visible via token
-```
-function (user, context, callback) {
- if (context.clientID === 'YOUR_CLIENT_ID') {
- var namespace = 'http://datatools/';
- if (context.idToken && user.user_metadata) {
- context.idToken[namespace + 'user_metadata'] = user.user_metadata;
- }
- if (context.idToken && user.app_metadata) {
- context.idToken[namespace + 'app_metadata'] = user.app_metadata;
- }
+If using OIDC-conformant clients/APIs (which appears to be mandatory for new Auth0 tenants), you must set up a custom Auth0 action to add `app_metadata` and `user_metadata` to the user's id token (Note: this is not the default for older, "legacy" Auth0 accounts).
+
+To set up the action, go to Actions > Flows > Login, then under Add action > Custom, click `Create Action`. Fill in the action name and pick a recommended runtime, and click `Create`. Modify the function `onExecutePostLogin` as follows, then click `Save Draft`:
+
+```js
+exports.onExecutePostLogin = async (event, api) => {
+ if (event.authorization) {
+ const namespace = 'http://datatools';
+ api.idToken.setCustomClaim(`${namespace}/user_metadata`, event.user.user_metadata);
+ api.idToken.setCustomClaim(`${namespace}/app_metadata`, event.user.app_metadata);
}
- callback(null, user, context);
-}
+};
```
+If you want the rule to apply only to specific clients, you can retain the conditional block that checks the `context.clientID` value. Otherwise, you can remove this conditional block if it's not needed.
+This rule will ensure that `app_metadata` and `user_metadata` are included in the user's token, as required for OIDC-conformant clients/APIs in new Auth0 tenants.
+
+You can test the action with mock token data using the Test tab. Once ready, click `Deploy`, then click `Back to Flow`.
+In the diagram, drag the action between the Start and Complete steps, then click `Apply`.
+You can test that the action is correctly executed by logging-in to datatools with an admin user
+and checking that the Admin functionality is available.
## Building and Running the Application
@@ -239,6 +244,8 @@ Enables the GTFS Editor module.
- `MAPBOX_ACCESS_TOKEN`
- `R5_URL` (optional parameter for r5 routing in editor pattern drawing)
+**Note:** If maps are gray, add the property `MAP_BASE_URL: http://tile.openstreetmap.org/{z}/{x}/{y}.png` into `env.yml`.
+
### R5 network validation
While the application handles basic validation even without the `r5_network`
@@ -304,6 +311,7 @@ extensions:
```
### Integration with [TransitFeeds](http://transitfeeds.com/)
+**Note**: TransitFeeds is not regularly updated and is being replaced by the [MobilityData Database](https://database.mobilitydata.org/)
Ensure that the `extensions:transitfeeds:enabled` flag is set to `true` in
`config.yml`, and provide your API key:
diff --git a/docs/dev/development.md b/docs/dev/development.md
index 21988c174..8e8ae5221 100644
--- a/docs/dev/development.md
+++ b/docs/dev/development.md
@@ -1,5 +1,5 @@
# Development
-These instructions should allow you to get Data Tools / Editor / Catalogue up and running within an integrated development environment, allowing you to work on the code and debug it. We all use IntelliJ so instructions will currently be only for that environment.
+These instructions should allow you to get TRANSIT-data-tools / Editor / Catalogue up and running within an integrated development environment, allowing you to work on the code and debug it. We all use IntelliJ so instructions will currently be only for that environment.
## Components
The system is made up of two different projects:
@@ -44,3 +44,9 @@ To specify your own configuration that overrides the defaults:
```
yarn start -- --config /path/to/config
```
+
+## E2E tests
+
+The e2e tests have been Dockerized, which allows them to be run easily anywhere `docker compose` works. To run them on localhost, first create a `.env` file in the `__tests__/e2e`. `docker compose` will alert you as to which variables must be present.
+
+To run the tests, run `docker compose -f docker-compose.yml up --abort-on-container-exit` in the `__tests__/e2e/` directory.
diff --git a/docs/dev/localization.md b/docs/dev/localization.md
new file mode 100644
index 000000000..49f385314
--- /dev/null
+++ b/docs/dev/localization.md
@@ -0,0 +1,34 @@
+# Localization
+
+## Adding translations for a new language
+To add support for a new language, you need to perform the following steps:
+
+1. Create a new language file in folder `i18n`, e.g. by copying the `english.yml` file.
+2. In the newly created `
+
+- **Zoom to pattern extents:**
+Clicking the search (đ) button (in the top toolbar) with a pattern selected adjusts the map view to show the entire pattern you are editing.
+- **Duplicating pattern:**
+Used to create a similar, but different trip pattern. When duplicating of the active pattern, its name becomes `[Pattern name] copy`.
+- **Reverse pattern:**
+To reverse the sequence of stops for a pattern, click the button with opposing arrows. Note: this is a destructive action and should usually only be used after duplicating a pattern.
+- **Delete pattern:** Deletes the active pattern. Note: deleted patterns cannot be recovered.
+
+
## Stop sequence
### Adding stops
To begin constructing a pattern, click `+ Add stop`. You can then search for stops to add by name in the dropdown that appears or zoom into the map and add stops by clicking on them and clicking `+ Add stop`.
@@ -45,30 +49,25 @@ To begin constructing a pattern, click `+ Add stop`. You can then search for sto
As you add stops, the pattern shape will update according to the street network. Keep selecting and adding stops in the sequence the route passes through them until the route is complete.
### Inserting stops mid-sequence
-If you realize you have missed a stop from the sequence it can be added in after the fact. When adding via the dropdown, stops can only be added to the end of the sequence; however, if you're adding via the map, you can choose where exactly to insert the stop.
+First, click the `From Stops` button so that Datatools knows the pattern can be regenerated by editing stops. If you realize that you have missed a stop from the sequence, it can be added after the fact. When adding via the dropdown, stops can only be added to the end of the sequence; however, if you're adding via the map, you can choose where exactly to insert the stop.
-
+
### Pattern stop toolbar
The pattern stop toolbar can be found by clicking the pattern stop in the map view.
-
-
-- **Save** - saves any edits to the travel and dwell times.
-- **Edit** - allows user to begin editing the stop's details (clicking the browser **Back** button will bring you back to editing the pattern). **Note**: be sure you have saved any pattern information before jumping to edit a stop.
-- **Remove** - removes the stop from the pattern.
-- **Add** - add the stop to the sequence at an additional location. This is useful for patterns that might visit the same stop more than once (e.g., loop routes).
+- **Save:** saves any edits to the travel and dwell times.
+- **Edit:** allows user to begin editing the stop's details (clicking the browser **Back** button will bring you back to editing the pattern). **Note**: be sure you have saved any pattern information before jumping to edit a stop.
+- **Remove:** removes the stop from the pattern.
+- **Add:** add the stop to the sequence at an additional location. This is useful for patterns that might visit the same stop more than once (e.g., loop routes).
### Changing the order of stops
-If a stop is in the wrong order, you can click and drag the stop from the list view into the correct sequence. The sequence displayed on the stop being dragged will only update after being dropped into place.
+First, click the `From Stops` button so that Datatools knows the pattern can be regenerated by editing stops. If a stop is in the wrong order, you can click and drag and drop the stop from the list view into the correct sequence. The sequence displayed on the stop being dragged will only update after being dropped into place.
-
-
-**Note**: if inserting stops mid-sequence or changing the order of stops, the pattern shape will not update automatically. You must edit the pattern manually or click the `Create` button to regenerate the pattern shape from the stops.
+
### Removing stops
-Stops can be removed from the pattern by clicking the red đ button found in the expanded stop card or by clicking the pattern stop on the map view.
-
+Stops can be removed from the pattern by clicking the red trash (đ) icon found in the expanded stop card or by clicking the pattern stop on the map view.
## Stop timings
### Pattern stop colors
@@ -86,26 +85,41 @@ Once you have adjusted the stop timings, another warning will appear prompting y

### Calculate timings
-The average speed for the route can be used to calculate all the time gaps between stops in one go. Once the stops have been added and the pattern alignment is drawn simply click **Calc. Times** and the time between stops will automatically fill in.
+The average speed for the route can be used to calculate all the time gaps between stops in one go. A few parameters can be specified before calculating times:
- **Average speed** - average speed (in km/hr) to use when calculating timings
- **Dwell time** â allows you to assign the same dwell time to all stops within a trip
pattern
-### Manual timings
-Individual timings for each pattern stop can also be assigned either through expanded the stop card in the list of stops or via clicking the pattern stop on the map view.
+Once the stops have been added and the pattern alignment is drawn simply click `Calc. times` and the time between stops will automatically fill in.
-- Travel time â the time it took to get from the previous station (should be 00:00 on the first stop of the sequence)
-- Dwell time â the time the vehicle rests at the stop
+Another option is to click the `Normalize stop times` button above all the stops make all arrival and departure times on the pattern match a default travel time for each stop.
+### Manual timings
+Specific timings for each pattern stop can be set by either clicking on the stop on the map view or expanding the stop card in the list view. The two types of times that can be edited are:
+
+- **Travel time:** the time it took to get from the previous station (should be 00:00 on the first stop of the sequence)
+- **Dwell time:** the time the vehicle rests at the stop
+
+### Tutorial Video: Editing/Creating Patterns
+The following video demonstrates how to create patterns as outlined above, in a step by step manner.
+
+
-Choose a route from the list or search by route name in the dropdown. To create a new route, click `+ New route`. **Note:** as with all newly created items (except patterns), the new route will not be saved until the save icon (đž) is clicked.
+Choose a route from the list or search by route name in the dropdown. To create a new route, click `New route` or `+ Create first route` if this is the first route being created.
-## Zoom to route extents
+### Zoom to route extents
Clicking the đ button (in the top toolbar) with a route selected adjusts the map view to show the entire route (i.e., all patterns) you are editing.
## Route details
-Some of the fields to be filled in are required before you can âSave and Continueâ and move to the next step, these are:
+The following fields are required before you can hit `Save and Continue`:
-- **Short name** â name of the service/route, this may just be a number
-- **Long name** â the full name of the route, often including the origin and destination
-- **Route Type** â the type of vehicle/transport used on the route
-Other fields in this section are optional and do not have to be filled in, these are:
-- **Description** â a description of the route, do not simply repeat the information in âLong nameâ
-- **URL** â a link to a webpage with information on the specific route, such as the timetable
-- **Color** â if a route has a color (for use in trip planners etc) it can be assigned here
-- **Text color** â if a route has a text color (for use in trip planners etc) it can be assigned here
-- **Comments** â any additional information about the route can be placed here
+- **Status:** Takes the following values:
+ - **In-Progress:** Showing a route has not been completely entered
+ - **Pending Approval:** A route has all the information entered and is awaiting a senior person to sign it off
+ - **Approved:** All the above stages have been completed
+- **Publicly Visible?** This must be set to "Yes" for the route to be included in a GTFS output.
+- **route_id:** An identifier for the route. A randomly generated ID is provided by default.
+- **route_short_name:** Name of the service/route, this may just be a number
+- **route_long_name:** The full name of the route, often including the origin and destination
+- **route_type:** The type of vehicle/transport used on the route
-Once all the required fields, and any of the desired optional fields, are filled in click âSave and continueâ.
+The following fields are optional:
+- **agency_id:** The agency identifier from the defined agencies. Generally this field is automatically populated.
+- **route_desc:** A description of the route, do not simply repeat the information in âLong nameâ
+- **route_sort_order:** Orders the routes for presentation to GTFS consumers. Routes with smaller route_sort_order values should be displayed first
+- **route_url:** A link to a webpage with information on the specific route, such as the timetable
+- **route_color:** If a route has a color (for use in trip planners etc) it can be assigned here
+- **route_text_color:** If a route has a text color (for use in trip planners etc) it can be assigned here
+- **Is route wheelchair accessible?** Either "Yes", "No", or "No Information"
+- **Route branding URL:** A link to a webpage with route branding information
+- **Route branding asset:** A route image
-## Review
+Once all the required fields and any of the desired optional fields are filled in, click `Save`.
-This final page allows you to show if a route has been completely entered, and if it has whether it has been checked and approved for inclusion in the GTFS feed.
+**Note:** as with all newly created items (except patterns), the new route will not be saved until the save icon (đž) is clicked.
### Status
@@ -54,3 +64,15 @@ Routes in the Pending Approval or In Progress phase will not be publicly visible
Once you've created and saved a route, you can then begin creating trip patterns for the route.
[Learn about editing trip patterns Âģ](patterns)
+
+### Tutorial Video: Editing/Creating Routes
+This video provides a step-by-step demonstration of how to edit or create a route.
+
+
diff --git a/docs/user/editor/schedules.md b/docs/user/editor/schedules.md
index d46c7f8b4..d845ae9e2 100644
--- a/docs/user/editor/schedules.md
+++ b/docs/user/editor/schedules.md
@@ -1,4 +1,9 @@
-# Trips
+# Schedules/Calendars
+
+## Schedule and Calendar Overview
+The schedule editor allows for the creation of trips/frequencies for any combination of route, pattern, and/or calendar. To manage or edit schedules or exceptions, navigate to the `Calendar` tab located in the left-hand menu:
+
+
## Keyboard shortcuts
There are a number of keyboard shortcuts for jumping between and modifying trips. To view these, click the `?` in the top righthand corner of the timetable editor. You can also view these at any time while editing trips by typing `?`. The shortcuts are shown below:
@@ -8,7 +13,85 @@ There are a number of keyboard shortcuts for jumping between and modifying trips
## Selecting schedules
The schedule editor allows users to create trips/frequencies for any route, pattern, and calendar combination. The selectors at the top of the page allow users to navigate between calendars for a given pattern or switch between patterns for a route or even routes within the feed.
-Each selection has a set of statistics associated with it to give you a sense of how it fits in with the feed:
+### Calendars
+Transit calendars in GTFS are used to define the days of the week on which transit services are available. See the [GTFS specification calendar reference](https://gtfs.org/schedule/reference/#calendartxt) for more information.
+
+### Exceptions
+Exceptions are deviations from the regular transit service schedule, such as holidays, special events, cancellations and service disruptions. See the [GTFS specification calendar dates reference](https://gtfs.org/schedule/reference/#calendar_datestxt) for more information.
+
+### Schedules/Timetable-based routes
+Timetable-based routes follow a fixed schedule in which the start time, end time, and all the intermediate stops are pre-defined.
+
+### Frequency-based routes
+Unlike the fixed nature of timetable-based trips, frequency-based trips run at regular intervals, with a fixed amount of time between consecutive trips. Frequency-based service offers more flexibility and easier adjustment to changing demand. Visit [GTFS specification frequency reference](https://gtfs.org/schedule/reference/#frequenciestxt) for more information.
+
+## Editing/Creating Calendars
+To start editing a calendar, click `+ Create first calendar` if this is the first calendar being added or click an existing calendar to begin adding/editing its properties which include:
+
+- **Service ID:** Unique ID for the calendar
+- **Description:** Optional description for calendar (defaults to initial days of week specified)
+- **Days of service:** Days of week on which the service operates
+- **Start/End dates:** The first and last day of that service assigned to the calendar should run
+
+**Note: Be sure to click the save button (đž) after changes any changes to calendars are made.**
+
+### Tutorial Video: Creating/Editing Calendars
+
+
+
+You will be able to add or edit properties such as:
+
+- **Name:** Name of schedule exception
+- **Schedule to run:** The chosen schedule that should replace the regularly operating calendars (see below Exception types)
+- **Dates:** One or more dates to which the schedule exception applies
+
+### Exception types
+
+There are a number of built-in exception types (or available schedules to run) that allow for a high degree of flexibility when assigning special services.
+
+- **[Su, M, Tu, W, Th, F, Sa]** - replaces all service for the specified dates with the calendar(s) that operate on the chosen day of the week
+- **No service** - indicates that no service of any kind should operated on the specified dates
+- **Custom** - replace all service for the specified dates with trips operating on the one or more calendars specified with this option. E.g., run only `holiday` and `holiday-plus` calendar on Thanksgiving Day.
+- **Swap** - similar to the **Custom** option, however this option allows for removing one or more specific calendars for the specified dates and/or adding one or more specific calendars. This option is especially useful if only certain routes have altered service on specific dates. For example, a user could remove the `weekday-route-1` calendar and add the `special-route-1` calendar.
+
+**Note: Be sure to click the save button (đž) after changes any changes to exceptions are made.**
+### Tutorial Video: Creating/Editing Exceptions
+
+
+
+**Note**: At least one route, pattern and calendar must have been created to edit schedules.
+
+The selectors located at the top of the page allow users to navigate between calendars for a specific pattern or switch between patterns for a route or multiple routes within the feed. Variations of route, pattern and the schedule can be selected to generate the desired timetable.
+
+
+
+Each selection has a set of statistics associated with it shown as a number in a grey or green box, that, when hovered over, provides the following information:
- **Route**
- \# of trips for the entire route
@@ -18,11 +101,22 @@ Each selection has a set of statistics associated with it to give you a sense of
- \# of trips for selected pattern
- \# of trips in calendar for entire feed
-
+Once a route, pattern and calendar is selected, a timetable with the following trip details will appear:
-## Schedule toolbar
+- **Block ID** - identifies the vehicle used for the trip
+- **Trip ID** - unique identifier for the trip
+- **Trip Headsign** - headsign for the trip
+- **Arrival/Departure Times** - arrival and departure times (departures shown in grey text) for each stop along the pattern
+
+
+
+To select trips to offset, duplicate or delete, click the row number on the lefthand side of the row. To toggle selection of all trips, click the box in the upper lefthand corner.
-
+
+
+After trips are selected, navigate to the schedule toolbar at the top right of the screen.
+
+## Schedule toolbar
- **Add trip** - add blank trip (first timepoint is `00:00:00`)
- **Duplicate trip(s)** - duplicate the last trip in the spreadsheet or whichever rows are selected
@@ -31,40 +125,48 @@ Each selection has a set of statistics associated with it to give you a sense of
- **Save** - save all changes
- **Offset trip(s)** - specify an offset (`HH:MM`) to offset the last trip in the spreadsheet or whichever rows are selected
-## Selecting trips
-To select trips to offset, duplicate or delete, click the row number on the lefthand side of the row. To toggle selection of all trips, click the box in the upper lefthand corner.
-
+
+
+** Note: When entering times manually into the schedule editor they will automatically be converted to a standardized format `13:00:00`**
-## Recognized time entry formats
-When entering times manually into the schedule editor they will automatically be converted to a standardized format `13:00:00`. The following time formats are automatically recognized and converted:
+The following time formats are automatically recognized and converted:
- 24-hr
- `13:00:00`
- `13:00`
+ - `1300`
- 12-hr
- - `1:00p`
+ - `1pm`
- `1:00pm`
- `1:00 pm`
+ - `1:00:00pm`
- `1:00:00 pm`
-## Editing timetables
-Trip details include:
+### Tutorial Video: Editing/Creating Timetables
+The following video demonstrates the creation and editing of timetables described above.
-- **Block ID** - identifies the vehicle used for the trip
-- **Trip ID** - unique identifier for the trip
-- **Trip Headsign** - headsign for the trip
-- **Arrival/Departure Times** - arrival and departure times (departures shown in grey text) for each stop along the pattern
+
-
+
\ No newline at end of file
diff --git a/docs/user/editor/stops.md b/docs/user/editor/stops.md
index 054b9ee36..86134a0f6 100644
--- a/docs/user/editor/stops.md
+++ b/docs/user/editor/stops.md
@@ -2,37 +2,66 @@
## Editing stops
-To begin editing stops, click the map marker icon button on the lefthand navigation bar.
+To begin editing stops, click the map marker icon button on the lefthand navigation bar (outlined in red).
-
+
-## Selecting a stop
+### Selecting a stop
Choose a stop from the list or search by stop name in the dropdown.
You can also **zoom into the map** while the stop list is visible and once you're close enough you'll begin to see stops displayed. Click one to begin editing its details.
-## Creating a stop: right-click on map
+### Creating a stop
-To create a new stop, **right-click on the map** in the location you would like to place the stop. **Note:** as with all newly created items (except patterns), the new stop will not be saved until the save icon (đž) is clicked.
+To create a new stop, **right-click on the map** in the location you would like to place the stop.
-## Moving a stop
+**Note:** as with all newly created items (except patterns), the new stop will not be saved until the save icon (đž) is clicked.
-To move a selected stop simply **click and drag the stop to the new location**. Or, if already you know the latitude and longitude coordinates, you can copy these into the text fields. After moving the stop, click save to keep the changes.
+### Editing a stop
+Once a stop is created or selected, the following parameters are required:
+- **Stop ID (`stop_id`):** Identifies a stop, station, or station entrance.
+- **Location (`stop_lat/stop_lon`):** These are defined by latitude and longitude. **Note:** Stop locations should have an error of no more than four meters when compared to the actual stop position.
-
+### Moving a stop
-## View all stops for feed
+To move a selected stop, **click and drag the stop to the new location**. Or, if you already know the latitude and longitude coordinates, you can copy these into the text fields. After moving the stop, click save to keep the changes.
+
+### View All Stops for a Feed
To view all stops for a feed, hover over the map layers icon (in the top, lefthand corner of the map) and turn on the `Stop locations` layer. When you do, you'll see all of the stops (which appear as grey circles) for the feed even at wide zoom levels. This layer can be viewed whether or not the stop list is visible, so it can be helpful for users who would like to view stop locations alongside routes or trip patterns.
-
+
Clicking on a stop shown in this layer will select the stop for editing, but be carefulâit can be tricky to select the right stop from very far away!
+### Tutorial Video: Editing/Creating Stops
+The following video demonstrates the creation and editing of stops outlined below, in a step by step manner. The video covers:
+- Adding stops
+- Editing stop positions
+- Editing stop details
+- Showing all stops on map interface
+
+
+
OLD POSITION --X--> O
+ // O --------------------------> O
+ if (!movedFromEnd) {
+ // Delete old "to" segment control points and segments, no "to" segment if we're moving from the end
+ // $FlowFixMe
+ const previousToSegments = previousToStopControlPoint.cpIndex - movedStopControlPoint.cpIndex; // Semi-colon for babel parsing.
+ [deletedControlPoints, deletedSegments] = updateControlPointsAndSegments(deletedControlPoints, deletedSegments, movedStopControlPoint.cpIndex, previousToSegments)
+ }
+
+ if (!movedFromStart) {
+ // Delete old "from" segment control points and segments, no "from" segment if we're moving from the start
+ // $FlowFixMe
+ const previousFromSegments = movedStopControlPoint.cpIndex - previousFromStopControlPoint.cpIndex; // Semi-colon for babel parsing.
+ // $FlowFixMe
+ [deletedControlPoints, deletedSegments] = updateControlPointsAndSegments(deletedControlPoints, deletedSegments, previousFromStopControlPoint.cpIndex, previousFromSegments)
+ }
+
+ return [deletedControlPoints, deletedSegments]
+}
+
+/**
+ * Method to remove a segment and insert a new one that points to the new stop that is being inserted.
+ * TODO: Refactor this method to make things cleaner.
+ */
+const removeNewSegments = (
+ deletedControlPoints: Array{this.messages('help.body.0')}
-{this.messages('help.body.1')}
+{this.messages('help.body.0')}
+{this.messages('help.body.1')}
+