Skip to content

Commit

Permalink
Update gtfs2lc dependency and adjust to it
Browse files Browse the repository at this point in the history
  • Loading branch information
julianrojas87 committed Jan 3, 2025
1 parent 59eb43d commit 7a1cb7b
Show file tree
Hide file tree
Showing 2 changed files with 42 additions and 43 deletions.
79 changes: 39 additions & 40 deletions lib/manager/dataset_manager.js
Original file line number Diff line number Diff line change
Expand Up @@ -216,46 +216,45 @@ export class DatasetManager {
const connsPath = `${this.storage}/linked_connections/${companyName}`;
logger.info(`Creating ${companyName} Linked Connections...`);

converter.resultStream(uncompressed_feed, connsPath, async rawConns => {

logger.info(`Sorting and fragmenting ${companyName} Linked Connections...`);
// Create folder for Linked Connection fragments
fs.mkdirSync(`${this.storage}/linked_pages/${companyName}/${file_name}`);

// Proceed to sort and fragment the Linked Connections graph
const sorted = await this.sortLCByDepartureTime(rawConns);

sorted.pipe(JsonLParser.parser())
.pipe(new PageWriterStream(`${this.storage}/linked_pages/${companyName}/${file_name}`,
dataset.fragmentSize || 300))
.on('finish', async () => {
const t1 = (new Date().getTime() - t0) / 1000;
logger.info(`Dataset conversion for ${companyName} completed successfully (took ${t1} seconds)`);

// Update Catalog, Stops and Routes
const [stops, routes] = await Promise.all([
new Stops(uncompressed_feed).createStopList(companyName),
new Routes(uncompressed_feed).createRouteList(companyName),
]);
await Promise.all([
writeFile(`${this.storage}/stops/${companyName}/stops.json`, JSON.stringify(stops), 'utf8'),
writeFile(`${this.storage}/routes/${companyName}/routes.json`, JSON.stringify(routes), 'utf8'),
]);

const catalog = await new Catalog().createCatalog(companyName);
await writeFile(`${this.storage}/catalog/${companyName}/catalog.json`, JSON.stringify(catalog), 'utf8'),

logger.info('DCAT catalog updated correctly');
logger.info(`Stop dataset for ${companyName} updated`);
logger.info(`Route dataset for ${companyName} updated`);

// Clean up
await del([
lockPath,
uncompressed_feed,
], { force: true });
});
});
const rawConns = await converter.convert(uncompressed_feed, connsPath);

logger.info(`Sorting and fragmenting ${companyName} Linked Connections...`);
// Create folder for Linked Connection fragments
fs.mkdirSync(`${this.storage}/linked_pages/${companyName}/${file_name}`);

// Proceed to sort and fragment the Linked Connections graph
const sorted = await this.sortLCByDepartureTime(rawConns);

sorted.pipe(JsonLParser.parser())
.pipe(new PageWriterStream(`${this.storage}/linked_pages/${companyName}/${file_name}`,
dataset.fragmentSize || 300))
.on('finish', async () => {
const t1 = (new Date().getTime() - t0) / 1000;
logger.info(`Dataset conversion for ${companyName} completed successfully (took ${t1} seconds)`);

// Update Catalog, Stops and Routes
const [stops, routes] = await Promise.all([
new Stops(uncompressed_feed).createStopList(companyName),
new Routes(uncompressed_feed).createRouteList(companyName),
]);
await Promise.all([
writeFile(`${this.storage}/stops/${companyName}/stops.json`, JSON.stringify(stops), 'utf8'),
writeFile(`${this.storage}/routes/${companyName}/routes.json`, JSON.stringify(routes), 'utf8'),
]);

const catalog = await new Catalog().createCatalog(companyName);
await writeFile(`${this.storage}/catalog/${companyName}/catalog.json`, JSON.stringify(catalog), 'utf8'),

logger.info('DCAT catalog updated correctly');
logger.info(`Stop dataset for ${companyName} updated`);
logger.info(`Route dataset for ${companyName} updated`);

// Clean up
await del([
lockPath,
uncompressed_feed,
], { force: true });
});
} else {
logger.warn(companyName + " dataset was already downloaded");
}
Expand Down
6 changes: 3 additions & 3 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

0 comments on commit 7a1cb7b

Please sign in to comment.