Skip to content

Commit

Permalink
chore: job de découpage des isochrones
Browse files Browse the repository at this point in the history
  • Loading branch information
K4ST0R committed Aug 28, 2024
1 parent 9e646cf commit c0c81a5
Show file tree
Hide file tree
Showing 6 changed files with 117,748 additions and 116,431 deletions.
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -109,3 +109,6 @@ dist
trajectoires-pro.Rproj
.Rhistory
.Rdata

server/data/isochrones/
server/data/isochrones_output/
233,811 changes: 117,471 additions & 116,340 deletions server/data/acce_etablissements.csv

Large diffs are not rendered by default.

2 changes: 2 additions & 0 deletions server/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@
"geotoolbox": "2.0.3",
"iconv-lite": "0.6.3",
"joi": "17.6.0",
"kysely": "0.27.4",
"lodash-es": "4.17.21",
"luxon": "2.3.2",
"migrate-mongo": "10.0.0",
Expand All @@ -51,6 +52,7 @@
"node-cache": "5.1.2",
"node-xml-stream": "1.0.2",
"oleoduc": "0.8.0",
"pg": "8.12.0",
"pretty-ms": "7.0.1",
"rate-limiter-flexible": "2.3.7",
"remove-accents": "0.5.0",
Expand Down
32 changes: 32 additions & 0 deletions server/src/cli.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ import { importIndicateurEntree } from "./jobs/formations/importIndicateurEntree
import { computeFormationTag } from "./jobs/formations/tag/computeFormationTag.js";
import { importIndicateurPoursuite } from "./jobs/formations/importIndicateurPoursuite.js";
import { importIdeoFichesFormations } from "./jobs/formations/importIdeoFichesFormations.js";
import { splitIsochrones } from "./jobs/isochrones/splitIsochrones.ts";

const cli = new Command();

Expand Down Expand Up @@ -131,4 +132,35 @@ cli
});
});

cli
.command("splitIsochrones")
.description(
`Simplifie et découpe un ensemble d'isochrones correspondant à plusieurs durées en utilisant PostGIS\n` +
`Le dossier d'entrée contenant les isochrones doit avoir la structure suivante :\n` +
`folder/[duration]/[name].json\n` +
`Example : \n folder/5400/0010001W.json \n folder/3600/0010001W.json`
)
.requiredOption("-d, --db <db>", "URI de connexion PostgreSQL (nécessite PostGIS activé)")
.requiredOption("-i, --input <input>", "Dossier contenant les isochrones")
.requiredOption("-o, --output <output>", "Dossier de sortie")
.requiredOption(
"-k, --key <key>",
"Geometry key path (lodash path format). \n Exemple pour Graphhopper : polygons[0].geometry"
)
.requiredOption(
"-b, --buckets",
"Liste des durées des différents buckets en ordre décroissant (séparés par des virgules)",
"5400,3600,2700,1800,900"
)
.action((options) => {
const { input, output, buckets, key, db } = options;
return splitIsochrones({
input,
output,
key,
buckets: buckets.split(",").map((b) => parseInt(b)),
connectionString: db,
});
});

cli.parse(process.argv);
144 changes: 144 additions & 0 deletions server/src/jobs/isochrones/splitIsochrones.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,144 @@
import fs from "fs";
import { filterData, oleoduc, writeData, transformData } from "oleoduc";
import { Readable } from "stream";
import { get, flow } from "lodash-es";
import { getLoggerWithContext } from "#src/common/logger.js";

import { ExpressionWrapper, Kysely, PostgresDialect, RawBuilder, sql } from "kysely";
import pg from "pg";
import path from "path";

const logger = getLoggerWithContext("isochrones");

async function createOutputFolder(output, buckets) {
for (const bucket of buckets) {
const dir = path.join(output, bucket.toString());
await fs.promises.mkdir(dir, { recursive: true });
}
}

async function getGeometry(input, bucket, file, key) {
const data = JSON.parse(await fs.promises.readFile(path.join(input, bucket.toString(), file), "utf8"));
const geometry = get(data, key);
if (geometry === undefined) {
throw new Error(`Le fichier ${bucket}/${file} est invalide.`);
}
return { bucket, geometry };
}

function kyselyChainFn(
eb,
fns: { fn: string; args: (ExpressionWrapper<unknown, never, any> | string)[] }[],
val: RawBuilder<unknown> | string
) {
return fns.reduce((acc, { fn, args }) => {
return eb.fn(fn, [acc, ...args]);
}, val);
}

export async function splitIsochrones({
input,
output,
buckets,
key,
connectionString,
}: {
input: string;
output: string;
buckets: number[];
key: string;
connectionString: string;
}) {
const bufferPrecision = 0.001; // ~100m
const simplifyPrecision = 0.0001; // ~10m
const divideMaxVertices = 2048;

console.log(input, output, buckets, key, connectionString);

const files = (await fs.promises.readdir(path.join(input, buckets[0].toString()))).filter((s) => s.match(/\.json/));
await createOutputFolder(output, buckets);

const dialect = new PostgresDialect({
pool: new pg.Pool({
connectionString,
}),
});
const db = new Kysely({ dialect });

await oleoduc(
//Readable.from(files.slice(0, 1)),
Readable.from(files),
// Verify file for each buckets
transformData(async (file) => {
const name = file.replace(/\.json$/, "");
try {
const data = await Promise.all(buckets.map(async (bucket) => getGeometry(input, bucket, file, key)));
return { uai: name, data };
} catch (err) {
logger.error(err.message);
return null;
}
}),
filterData((d) => d),
transformData(async ({ uai, data }) => {
const result = await db
.selectFrom((eb) => {
return db
.selectNoFrom((eb) => {
return data.map(({ geometry, bucket }) => {
return kyselyChainFn(
eb,
[
{ fn: "ST_GeomFromGeoJSON", args: [] },
{ fn: "ST_Buffer", args: [eb.val(bufferPrecision)] },
{ fn: "ST_Simplify", args: [eb.val(simplifyPrecision), eb.val(true)] },
{ fn: "ST_MakeValid", args: [eb.val("method=structure")] },
],
sql`${geometry}`
).as(bucket.toString());
});
})
.as("buckets");
})
.select(({ eb, fn }) => {
return data.map(({ bucket }, index) => {
return kyselyChainFn(
eb,
[
...(index < data.length - 1
? [
{
fn: "ST_Difference",
args: [`buckets.${data[index + 1].bucket}`],
},
]
: []),
{ fn: "ST_Subdivide", args: [eb.val(divideMaxVertices)] },
{ fn: "ST_AsGeoJSON", args: [] },
],
`buckets.${bucket}`
).as(bucket.toString());
});
})
.execute();
return { result, uai };
}),
writeData(
async ({ result, uai }) => {
for (const index in result) {
const r = result[index];
for (const bucket of Object.keys(r)) {
if (!r[bucket]) {
continue;
}
await fs.promises.writeFile(path.join(output, bucket, `${uai}_${index.padStart(4, "0")}.json`), r[bucket]);
}
}
logger.info(`Découpage et simplification de ${uai} fini.`);
},
{ parallel: 10 }
)
);

await db.destroy();
}
Loading

0 comments on commit c0c81a5

Please sign in to comment.