Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
26 commits
Select commit Hold shift + click to select a range
9efc8fe
refactor(Update to include fares v2 data manager refs and tests):
br648 Aug 20, 2024
8e42038
refactor(Bumped GTFS-Lib version):
br648 Oct 18, 2024
c81ffe4
fix(Fixed merge conflicts):
br648 Jan 28, 2025
468a586
improvement(pom.xml): Bumped GTFS-lib version to lastest fares v2 dev…
br648 Jan 28, 2025
8f0474f
improvement(pom.xml): Bumped GTFS lib version
br648 Jan 31, 2025
b4deec9
improvement(pom.xml): Bumped GTFS-Lib version
br648 Feb 5, 2025
df16b7e
improvement(Unit test): Added a simple test to update a fare product
br648 Feb 6, 2025
7b446d1
fix(pom.xml): Removed unwanted GTFS lib dependency.
br648 Feb 6, 2025
fe4e278
improvement(EditorControllerTest.java): Updated fare product test to …
br648 Feb 7, 2025
0cff428
improvement(Fixed merge conflict):
br648 Jun 17, 2025
e9bc793
improvement(pom.xml): Updated the GTFS lib version
br648 Jun 17, 2025
88b5a79
Merge branch 'dev' into feature/DT-448-fares-v2
br648 Jun 17, 2025
b42bd84
Merge branch 'dev' into feature/DT-448-fares-v2
br648 Jul 4, 2025
d2df5ca
Merge branch 'dev' into feature/DT-448-fares-v2
br648 Jul 15, 2025
6f6e617
improvement(Bumped GTFS-Lib version):
br648 Jul 31, 2025
bc3426f
improvement(Reverted gtfs-lib version): Newer version had breaking ch…
br648 Aug 1, 2025
14e116d
feat(Address changes in gtfs-lib): Merge stop areas into stops update
br648 Aug 1, 2025
3f867f9
improvement(GTFS-Lib version): Bumped to latest for merge stop areas
br648 Aug 5, 2025
9cd803c
Merge branch 'dev' into feature/DT-448-fares-v2
br648 Aug 5, 2025
3b7d073
Merge branch 'dev' into feature/DT-448-fares-v2
br648 Aug 7, 2025
f36adff
improvement(pom.xml): Bumped GTFS-Lib version to include entry in zip…
br648 Aug 11, 2025
adb8ebd
improvement(GTFS-Lib): Bumped version to include export for stop areas
br648 Aug 14, 2025
8c9091f
improvement(Bumped GTFS-Lib version):
br648 Aug 22, 2025
bb5b3fb
improvement(Bumped GTFS-Lib version):
br648 Aug 26, 2025
30f3fbf
improvement(Bumped GTFS-Lib version):
br648 Aug 27, 2025
78aa3f2
Merge pull request #632 from ibi-group/merge-stop-areas
br648 Aug 27, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -272,7 +272,7 @@
<groupId>com.github.ibi-group</groupId>
<artifactId>gtfs-lib</artifactId>
<!-- Latest dev build on jitpack.io -->
<version>5e004388b2473c391412fdd4954068c35186e231</version>
<version>bf7aaa904bbc0a8705295914baebc0675253e487</version>
<!-- Exclusions added in order to silence SLF4J warnings about multiple bindings:
http://www.slf4j.org/codes.html#multiple_bindings
-->
Expand Down
8 changes: 8 additions & 0 deletions src/main/java/com/conveyal/datatools/manager/DataManager.java
Original file line number Diff line number Diff line change
Expand Up @@ -207,19 +207,27 @@ static void registerRoutes() throws IOException {
SnapshotController.register(EDITOR_API_PREFIX);
EditorLockController.register(EDITOR_API_PREFIX);

new EditorControllerImpl(EDITOR_API_PREFIX, Table.AREAS, DataManager.GTFS_DATA_SOURCE);
new EditorControllerImpl(EDITOR_API_PREFIX, Table.AGENCY, DataManager.GTFS_DATA_SOURCE);
new EditorControllerImpl(EDITOR_API_PREFIX, Table.ATTRIBUTIONS, DataManager.GTFS_DATA_SOURCE);
new EditorControllerImpl(EDITOR_API_PREFIX, Table.CALENDAR, DataManager.GTFS_DATA_SOURCE);
// NOTE: fare_attributes controller handles updates to nested table fare_rules.
new EditorControllerImpl(EDITOR_API_PREFIX, Table.FARE_ATTRIBUTES, DataManager.GTFS_DATA_SOURCE);
new EditorControllerImpl(EDITOR_API_PREFIX, Table.FARE_LEG_RULES, DataManager.GTFS_DATA_SOURCE);
new EditorControllerImpl(EDITOR_API_PREFIX, Table.FARE_MEDIAS, DataManager.GTFS_DATA_SOURCE);
new EditorControllerImpl(EDITOR_API_PREFIX, Table.FARE_PRODUCTS, DataManager.GTFS_DATA_SOURCE);
new EditorControllerImpl(EDITOR_API_PREFIX, Table.FARE_TRANSFER_RULES, DataManager.GTFS_DATA_SOURCE);
new EditorControllerImpl(EDITOR_API_PREFIX, Table.FEED_INFO, DataManager.GTFS_DATA_SOURCE);
new EditorControllerImpl(EDITOR_API_PREFIX, Table.NETWORKS, DataManager.GTFS_DATA_SOURCE);
new EditorControllerImpl(EDITOR_API_PREFIX, Table.ROUTES, DataManager.GTFS_DATA_SOURCE);
// NOTE: Patterns controller handles updates to nested tables shapes, pattern stops, and frequencies.
new EditorControllerImpl(EDITOR_API_PREFIX, Table.PATTERNS, DataManager.GTFS_DATA_SOURCE);
new EditorControllerImpl(EDITOR_API_PREFIX, Table.SCHEDULE_EXCEPTIONS, DataManager.GTFS_DATA_SOURCE);
new EditorControllerImpl(EDITOR_API_PREFIX, Table.STOPS, DataManager.GTFS_DATA_SOURCE);
new EditorControllerImpl(EDITOR_API_PREFIX, Table.TIME_FRAMES, DataManager.GTFS_DATA_SOURCE);
new EditorControllerImpl(EDITOR_API_PREFIX, Table.TRANSLATIONS, DataManager.GTFS_DATA_SOURCE);
new EditorControllerImpl(EDITOR_API_PREFIX, Table.TRIPS, DataManager.GTFS_DATA_SOURCE);

// TODO: Add transfers.txt controller?
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import com.conveyal.gtfs.loader.Field;
import com.conveyal.gtfs.loader.ReferenceTracker;
import com.conveyal.gtfs.loader.Table;
import com.conveyal.gtfs.util.CsvReaderUtil;
import com.csvreader.CsvReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand Down Expand Up @@ -131,7 +132,7 @@ public void startNewFeed(int feedIndex) throws IOException {
keyFieldMissing = false;

idScope = makeIdScope(version);
csvReader = table.getCsvReader(feed.zipFile, null);
csvReader = CsvReaderUtil.getCsvReaderAccordingToFileName(table, feed.zipFile, null);
// If csv reader is null, the table was not found in the zip file. There is no need
// to handle merging this table for this zip file.
// No need to iterate over second (active) file if strategy is to simply extend the future GTFS
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import com.conveyal.gtfs.error.NewGTFSError;
import com.conveyal.gtfs.loader.Field;
import com.conveyal.gtfs.loader.Table;
import com.conveyal.gtfs.util.CsvReaderUtil;
import com.csvreader.CsvReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand Down Expand Up @@ -74,7 +75,7 @@ private void checkThatStopCodesArePopulatedWhereRequired() throws IOException {
int stopCodeIndex = getFieldIndex("stop_code");
// Get special stops reader to iterate over every stop and determine if stop_code values
// are present.
CsvReader stopsReader = table.getCsvReader(feed.zipFile, null);
CsvReader stopsReader = CsvReaderUtil.getCsvReaderAccordingToFileName(table, feed.zipFile, null);
while (stopsReader.readRecord()) {
stopsCount++;
// Special stop records (i.e., a station, entrance, or anything with
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import com.conveyal.datatools.manager.utils.json.JsonUtil;
import com.conveyal.gtfs.loader.Field;
import com.conveyal.gtfs.loader.Table;
import com.conveyal.gtfs.util.CsvReaderUtil;
import com.csvreader.CsvReader;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.text.WordUtils;
Expand Down Expand Up @@ -199,7 +200,11 @@ public void transform(FeedTransformZipTarget zipTarget, MonitorableJob.Status st
status.fail(String.format("Unsupported GTFS file '%s'", tableName));
return;
}
CsvReader csvReader = gtfsTable.getCsvReader(new ZipFile(tempZipPath.toAbsolutePath().toString()), null);
CsvReader csvReader = CsvReaderUtil.getCsvReaderAccordingToFileName(
gtfsTable,
new ZipFile(tempZipPath.toAbsolutePath().toString()),
null
);
if (csvReader == null) {
status.fail(String.format("'Normalize Field' failed because file '%s' was not found in the GTFS archive", tableName));
return;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import com.conveyal.datatools.manager.utils.GtfsUtils;
import com.conveyal.gtfs.loader.Field;
import com.conveyal.gtfs.loader.Table;
import com.conveyal.gtfs.util.CsvReaderUtil;
import com.csvreader.CsvReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand Down Expand Up @@ -58,7 +59,11 @@ public void transform(FeedTransformZipTarget zipTarget, MonitorableJob.Status st
Files.copy(originalZipPath, tempZipPath, StandardCopyOption.REPLACE_EXISTING);

Table gtfsTable = GtfsUtils.getGtfsTable("stop_times");
CsvReader csvReaderForStopTimes = gtfsTable.getCsvReader(new ZipFile(tempZipPath.toAbsolutePath().toString()), null);
CsvReader csvReaderForStopTimes = CsvReaderUtil.getCsvReaderAccordingToFileName(
gtfsTable,
new ZipFile(tempZipPath.toAbsolutePath().toString()),
null
);
final String[] headersForStopTime = csvReaderForStopTimes.getHeaders();
Field[] fieldsFoundInStopTimes = gtfsTable.getFieldsFromFieldHeaders(headersForStopTime, null);
Map<String, Integer> fieldIndexes = getFieldIndexes(fieldsFoundInStopTimes);
Expand All @@ -73,7 +78,11 @@ public void transform(FeedTransformZipTarget zipTarget, MonitorableJob.Status st
);

gtfsTable = GtfsUtils.getGtfsTable("trips");
CsvReader csvReaderForTrips = gtfsTable.getCsvReader(new ZipFile(tempZipPath.toAbsolutePath().toString()), null);
CsvReader csvReaderForTrips = CsvReaderUtil.getCsvReaderAccordingToFileName(
gtfsTable,
new ZipFile(tempZipPath.toAbsolutePath().toString()),
null
);
final String[] headersForTrips = csvReaderForTrips.getHeaders();
Field[] fieldsFoundInStopTrips = gtfsTable.getFieldsFromFieldHeaders(headersForTrips, null);
int tripIdFieldIndex = getFieldIndex(fieldsFoundInStopTrips, TRIP_ID_FIELD_NAME);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
import com.conveyal.gtfs.loader.Field;
import com.conveyal.gtfs.loader.Table;
import com.conveyal.gtfs.model.StopTime;
import com.conveyal.gtfs.util.CsvReaderUtil;
import com.csvreader.CsvReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand Down Expand Up @@ -45,7 +46,7 @@ public class MergeFeedUtils {
public static Set<String> getIdsForTable(ZipFile zipFile, Table table) throws IOException {
Set<String> ids = new HashSet<>();
String keyField = table.getKeyFieldName();
CsvReader csvReader = table.getCsvReader(zipFile, null);
CsvReader csvReader = CsvReaderUtil.getCsvReaderAccordingToFileName(table, zipFile, null);
if (csvReader == null) {
LOG.warn("Table {} not found in zip file: {}", table.name, zipFile.getName());
return ids;
Expand Down Expand Up @@ -117,7 +118,7 @@ public static Set<Field> getAllFields(List<FeedToMerge> feedsToMerge, Table tabl
Set<Field> sharedFields = new HashSet<>();
// First, iterate over each feed to collect the shared fields that need to be output in the merged table.
for (FeedToMerge feed : feedsToMerge) {
CsvReader csvReader = table.getCsvReader(feed.zipFile, null);
CsvReader csvReader = CsvReaderUtil.getCsvReaderAccordingToFileName(table, feed.zipFile, null);
// If csv reader is null, the table was not found in the zip file.
if (csvReader == null) {
continue;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import io.restassured.http.Method;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
Expand All @@ -27,10 +28,13 @@
import java.io.IOException;
import java.sql.SQLException;
import java.util.Date;
import java.util.Objects;
import java.util.stream.Stream;

import static com.conveyal.datatools.TestUtils.assertThatSqlCountQueryYieldsExpectedCount;
import static com.conveyal.datatools.TestUtils.createFeedVersion;
import static com.conveyal.datatools.TestUtils.createFeedVersionFromGtfsZip;
import static com.conveyal.datatools.TestUtils.zipFolderFiles;
import static com.conveyal.datatools.manager.auth.Auth0Users.USERS_API_PATH;
import static com.conveyal.datatools.manager.controllers.api.UserController.TEST_AUTH0_DOMAIN;
import static io.restassured.RestAssured.given;
Expand All @@ -45,8 +49,10 @@ public class EditorControllerTest extends UnitTest {
private static Project project;
private static FeedSource feedSource;
private static FeedSource feedSourceCascadeDelete;
private static FeedSource faresV2FeedSource;
private static FeedVersion feedVersion;
private static FeedVersion feedVersionCascadeDelete;
private static FeedVersion faresV2Version;
private static final ObjectMapper mapper = new ObjectMapper();

/**
Expand All @@ -72,12 +78,18 @@ public static void setUp() throws Exception {
feedSourceCascadeDelete.projectId = project.id;
Persistence.feedSources.create(feedSourceCascadeDelete);

faresV2FeedSource = new FeedSource("FaresV2");
faresV2FeedSource.projectId = project.id;
Persistence.feedSources.create(faresV2FeedSource);

feedVersion = createFeedVersionFromGtfsZip(feedSource, "bart_old.zip");
feedVersionCascadeDelete = createFeedVersionFromGtfsZip(feedSourceCascadeDelete, "bart_old.zip");
faresV2Version = createFeedVersion(faresV2FeedSource, zipFolderFiles("fake-agency-with-fares-v2"));

// Create and run snapshot jobs
crateAndRunSnapshotJob(feedVersion.name, feedSource.id, feedVersion.namespace);
crateAndRunSnapshotJob(feedVersionCascadeDelete.name, feedSourceCascadeDelete.id, feedVersionCascadeDelete.namespace);
createAndRunSnapshotJob(feedVersion.name, feedSource.id, feedVersion.namespace);
createAndRunSnapshotJob(feedVersionCascadeDelete.name, feedSourceCascadeDelete.id, feedVersionCascadeDelete.namespace);
createAndRunSnapshotJob(faresV2Version.name, faresV2FeedSource.id, faresV2Version.namespace);
LOG.info("{} setup completed in {} ms", EditorControllerTest.class.getSimpleName(), System.currentTimeMillis() - startTime);
}

Expand All @@ -86,15 +98,22 @@ public static void tearDown() {
project.delete();
feedSource.delete();
feedSourceCascadeDelete.delete();
faresV2FeedSource.delete();
}

/**
* Create and run a snapshot job in the current thread (so tests do not run until this is complete).
*/
private static void crateAndRunSnapshotJob(String feedVersionName, String feedSourceId, String namespace) {
private static void createAndRunSnapshotJob(String feedVersionName, String feedSourceId, String namespace) {
Snapshot snapshot = new Snapshot("Snapshot of " + feedVersionName, feedSourceId, namespace);
CreateSnapshotJob createSnapshotJob =
new CreateSnapshotJob(Auth0UserProfile.createTestAdminUser(), snapshot, true, false, false);
new CreateSnapshotJob(
Auth0UserProfile.createTestAdminUser(),
snapshot,
true,
false,
false
);
createSnapshotJob.run();
}

Expand All @@ -111,14 +130,7 @@ private static Stream<Arguments> createPatchTableTests() {
*/
@ParameterizedTest
@MethodSource("createPatchTableTests")
public void canPatchTableTests(
String field,
String entity,
int expectedCount,
String graphQLQueryFile,
String table
) throws IOException {

void canPatchTableTests(String field, String entity, int expectedCount, String graphQLQueryFile, String table) throws IOException {
LOG.info("Making patch {} request", table);
String value = "NEW";
ObjectNode jsonBody = mapper.createObjectNode();
Expand All @@ -138,7 +150,7 @@ public void canPatchTableTests(
* Make sure the patch table endpoint can patch stops conditionally with query.
*/
@Test
public void canPatchStopsConditionally() throws IOException {
void canPatchStopsConditionally() throws IOException {
LOG.info("Making conditional patch stops request");
ObjectNode jsonBody = mapper.createObjectNode();
String field = "stop_desc";
Expand Down Expand Up @@ -187,13 +199,7 @@ void canCascadeDeleteStop() throws IOException, SQLException {
stopId,
feedVersionCascadeDelete.feedSourceId
);
String response = given()
.port(DataManager.PORT)
.delete(path)
.then()
.extract()
.response()
.asString();
String response = makeRequest(Method.DELETE, path, "");
JsonNode json = mapper.readTree(response);
assertEquals(OK_200, json.get("code").asInt());

Expand All @@ -204,20 +210,63 @@ void canCascadeDeleteStop() throws IOException, SQLException {
}

/**
* Perform patch table request on the feed source ID with the requested query and patch JSON. A null query will
* apply the patch JSON to the entire table.
* Confirm that a fare product can be created, updated and deleted.
*/
private static int patchTableRequest(String entity, String feedId, String query, JsonNode oatchJSON) throws IOException {
String path = String.format("/api/editor/secure/%s?feedId=%s", entity, feedId);
if (query != null) path += "&" + query;
String response = given()
@Test
void canCreateUpdateAndDeleteFareProduct() throws IOException {
String fareProductId = "AERIAL_TRAM_ROUND_TRIP";
String fareProductIdUpdated = "AERIAL_TRAM_ROUND_TRIP_UPDATED";
String urlPrefix = "/api/editor/secure/fareproduct";
String urlSuffix = String.format("?feedId=%s&sessionId=test", faresV2Version.feedSourceId);
String payload =
"{" +
"\"fare_product_id\":\"" + fareProductId + "\"," +
"\"fare_product_name\":\"Portland Aerial Tram Single Round Trip\"," +
"\"fare_media_id\":\"1\"," +
"\"amount\":\"13.5\"," +
"\"currency\":\"USD\"" +
"}";

// Create.
String response = makeRequest(Method.POST, String.format("%s%s", urlPrefix, urlSuffix), payload);
JsonNode json = mapper.readTree(response);
String id = json.get("id").asText();
assertEquals(fareProductId, json.get("fare_product_id").asText());

// Update.
payload = payload.replace(fareProductId, fareProductIdUpdated);
response = makeRequest(Method.PUT, String.format("%s/%s%s", urlPrefix, id, urlSuffix), payload);
json = mapper.readTree(response);
assertEquals(fareProductIdUpdated, json.get("fare_product_id").asText());

// Delete.
response = makeRequest(Method.DELETE, String.format("%s/%s%s", urlPrefix, id, urlSuffix), "");
json = mapper.readTree(response);
assertEquals(200, json.get("code").asInt());
}

/**
* Make request and return the response.
*/
private static String makeRequest(Method method, String path, Object payload) {
return given()
.port(DataManager.PORT)
.body(oatchJSON)
.patch(path)
.body(payload)
.request(method, path)
.then()
.extract()
.response()
.asString();
}

/**
* Perform patch table request on the feed source ID with the requested query and patch JSON. A null query will
* apply the patch JSON to the entire table.
*/
private static int patchTableRequest(String entity, String feedId, String query, JsonNode patchJSON) throws IOException {
String path = String.format("/api/editor/secure/%s?feedId=%s", entity, feedId);
if (query != null) path += "&" + query;
String response = makeRequest(Method.PATCH, path, patchJSON);
JsonNode json = mapper.readTree(response);
return json.get("count").asInt();
}
Expand All @@ -230,16 +279,9 @@ private static JsonNode graphqlQuery (String namespace, String graphQLQueryFile)
ObjectNode variables = mapper.createObjectNode();
variables.put("namespace", namespace);
graphQLBody.set("variables", variables);
String query = IOUtils.toString(EditorControllerTest.class.getClassLoader().getResourceAsStream(graphQLQueryFile));
String query = IOUtils.toString(Objects.requireNonNull(EditorControllerTest.class.getClassLoader().getResourceAsStream(graphQLQueryFile)));
graphQLBody.put("query", query);
String graphQLString = given()
.port(DataManager.PORT)
.body(graphQLBody)
.post("api/manager/secure/gtfs/graphql")
.then()
.extract()
.response()
.asString();
String graphQLString = makeRequest(Method.POST, "api/manager/secure/gtfs/graphql", graphQLBody);
return mapper.readTree(graphQLString);
}

Expand Down
Loading
Loading