Skip to content

Commit

Permalink
Migrated from got to node-fetch package
Browse files Browse the repository at this point in the history
  • Loading branch information
antoineludeau committed Jul 5, 2023
1 parent f55c61f commit a3c86a6
Show file tree
Hide file tree
Showing 13 changed files with 100 additions and 233 deletions.
44 changes: 24 additions & 20 deletions .env.sample
Original file line number Diff line number Diff line change
@@ -1,33 +1,37 @@
MONGODB_URL=mongodb://127.0.0.1:27017
MONGODB_PORT=27017
# Mongo DB
MONGODB_URL=mongodb://127.0.0.1:27017 # Not used for deployment with Docker Compose
MONGODB_PORT=27017 # Used only for deployment with Docker Compose
MONGODB_DBNAME=ban

REDIS_URL=redis://127.0.0.1:6379
REDIS_PORT=6379
# Redis
REDIS_URL=redis://127.0.0.1:6379 # Not used for deployment with Docker Compose
REDIS_PORT=6379 # Used only for Docker Compose

# APIs
# API BAN
BAN_API_URL=https://plateforme.adresse.data.gouv.fr
BAN_API_AUTHORIZED_TOKENS=
ADMIN_TOKEN= # Used for legacy routes
BAN_API_AUTHORIZED_TOKENS= # Used for new ban-id api routes
PORT=5000

# relative path from the "root" directory for all the following path variables :
# API de dépôt
API_DEPOT_URL=https://plateforme.adresse.data.gouv.fr/api-depot

# API ID-Fix
API_IDFIX_URL=https://plateforme.adresse.data.gouv.fr/api-idfix
API_IDFIX_TOKEN=

# Path to data files
# Relative path from the "root" directory for all the following path variables :
FANTOIR_PATH=data/fantoir.sqlite
GAZETTEER_DB_PATH=data/gazetteer.sqlite
MAJIC_PATH=/data/majic.sqlite
CONTOURS_DATA_PATH=data/communes-50m.sqlite
COMMUNES_LOCAUX_ADRESSES_DATA_PATH=data/communes-locaux-adresses.json

DEPARTEMENTS=

# Others
DEPARTEMENTS= # Comma separated list of departements for dev only
JOB_STATUS_LIMIT_DURATION='90d' # Duration max of job status in database
MAX_CONCURRENT_WORKERS=1

DATAGOUV_API_KEY=

API_DEPOT_URL=https://plateforme.adresse.data.gouv.fr/api-depot

API_IDFIX_URL=https://plateforme.adresse.data.gouv.fr/api-idfix
API_IDFIX_TOKEN=

ADMIN_TOKEN=

JOB_STATUS_LIMIT_DURATION='90d'

PORT=5000
PROXY_URL= # To use only if you are behind a proxy
2 changes: 1 addition & 1 deletion lib/api/legacy-routes.cjs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ const fs = require('node:fs')
const express = require('express')
const Papa = require('papaparse')
const {snakeCase, mapKeys} = require('lodash')
const fetch = require('node-fetch')
const fetch = require('../util/fetch.cjs')
const currentDate = require('../util/local-current-date.cjs')
const {computeFilteredStats} = require('../models/ban.cjs')
const {
Expand Down
17 changes: 9 additions & 8 deletions lib/import/source-part-updater.cjs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
const got = require('got')
const revisionHash = require('rev-hash')
const {omit} = require('lodash')
const fetch = require('../util/fetch.cjs')
const source = require('../models/source.cjs')

async function fetchResource(resourceInfo, options = {}) {
Expand All @@ -17,26 +17,27 @@ async function fetchResource(resourceInfo, options = {}) {
}

try {
const response = await got(resourceInfo.url, {responseType: 'buffer', headers: requestHeaders})
const {body, headers, statusCode} = response
const response = await fetch(resourceInfo.url, {headers: requestHeaders})
const {headers, status} = response

if (statusCode === 304) {
if (status === 304) {
return
}

const revision = revisionHash(body)
const buffer = await response.buffer()
const revision = revisionHash(buffer)

if (!options.force && resourceInfo.revision && resourceInfo.revision === revision) {
return
}

resourceInfo.headers = headers
resourceInfo.revision = revision
resourceInfo.data = body
resourceInfo.size = body.length
resourceInfo.data = buffer
resourceInfo.size = buffer.length
resourceInfo.updatedAt = new Date()
} catch (error) {
if (error && error.response && error.response.statusCode === 404 && options.allowNotFound) {
if (error && error.response && error.response.status === 404 && options.allowNotFound) {
resourceInfo.notFound = true
return
}
Expand Down
13 changes: 8 additions & 5 deletions lib/util/api-depot.cjs
Original file line number Diff line number Diff line change
@@ -1,13 +1,14 @@
const process = require('process')
const got = require('got')
const fetch = require('./fetch.cjs')

const API_DEPOT_URL = process.env.API_DEPOT_URL || 'https://plateforme.adresse.data.gouv.fr/api-depot'

async function getCurrentRevision(codeCommune) {
try {
return await got(`${API_DEPOT_URL}/communes/${codeCommune}/current-revision`).json()
const response = await fetch(`${API_DEPOT_URL}/communes/${codeCommune}/current-revision`)
return await response.json()
} catch (error) {
if (error.response?.statusCode === 404) {
if (error.response?.status === 404) {
return
}

Expand All @@ -16,11 +17,13 @@ async function getCurrentRevision(codeCommune) {
}

async function getRevisionFile(revisionId) {
return got(`${API_DEPOT_URL}/revisions/${revisionId}/files/bal/download`).buffer()
const response = await fetch(`${API_DEPOT_URL}/revisions/${revisionId}/files/bal/download`)
return response.buffer()
}

async function getCurrentRevisons() {
return got(`${API_DEPOT_URL}/current-revisions`).json()
const response = await fetch(`${API_DEPOT_URL}/current-revisions`)
return response.json()
}

module.exports = {getCurrentRevision, getRevisionFile, getCurrentRevisons}
5 changes: 3 additions & 2 deletions lib/util/contours.cjs
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@ const Keyv = require('keyv')
const Cache = require('lru-cache')
const {throttle} = require('lodash')
const ms = require('ms')
const got = require('got')
const ora = require('ora')
const fetch = require('./fetch.cjs')

const contoursDataPath = process.env.CONTOURS_DATA_PATH || 'data/communes-50m.sqlite'

Expand All @@ -19,7 +19,8 @@ function db() {
}

async function getCommunesFeatures() {
const communesFile = await got('https://adresse.data.gouv.fr/data/contours-administratifs/2023/geojson/communes-50m.geojson').json()
const response = await fetch('https://adresse.data.gouv.fr/data/contours-administratifs/2023/geojson/communes-50m.geojson')
const communesFile = await response.json()

return communesFile.features
}
Expand Down
13 changes: 7 additions & 6 deletions lib/util/datagouv.cjs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
const got = require('got')
const FormData = require('form-data')
const fetch = require('./fetch.cjs')

async function replaceResourceFile(datasetId, resourceId, fileName, fileContent) {
const url = `https://www.data.gouv.fr/api/1/datasets/${datasetId}/resources/${resourceId}/upload/`
Expand All @@ -9,15 +9,16 @@ async function replaceResourceFile(datasetId, resourceId, fileName, fileContent)
const form = new FormData()
form.append('file', fileBuffer, {filename: fileName})

const gotOptions = {
const fetchOptions = {
method: 'POST',
headers: {
'X-API-Key': process.env.DATAGOUV_API_KEY
'X-API-Key': process.env.DATAGOUV_API_KEY,
...form.getHeaders()
},
body: form,
responseType: 'json'
body: form
}

await got.post(url, gotOptions)
await fetch(url, fetchOptions)
}

module.exports = {replaceResourceFile}
35 changes: 35 additions & 0 deletions lib/util/fetch.cjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
const fetch = require('node-fetch')
const HttpsProxyAgent = require('https-proxy-agent')

const PROXY_URL = process.env.PROXY_URL || ''

class HTTPResponseError extends Error {
constructor(response) {
super(`HTTP Error Response: ${response.status} ${response.statusText}`)
this.response = response
}
}

const fetchWithProxy = async (url, options) => {
try {
let response
if (PROXY_URL) {
const agent = new HttpsProxyAgent(PROXY_URL)
response = await fetch(url, {...options, agent})
} else {
response = await fetch(url, options)
}

if (response.status >= 400) {
throw new HTTPResponseError(response)
}

return response
} catch (error) {
// Handle any network or other errors
console.error(`Request failed : ${error.message}`)
throw error
}
}

module.exports = fetchWithProxy
34 changes: 0 additions & 34 deletions lib/util/get-as-stream.cjs

This file was deleted.

1 change: 0 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,6 @@
"geojson-stream": "^0.1.0",
"geojson-vt": "^3.2.1",
"get-stream": "^6.0.1",
"got": "^11.8.5",
"gunzip-stream": "^1.0.1",
"iconv-lite": "^0.6.3",
"into-stream": "^6.0.0",
Expand Down
11 changes: 6 additions & 5 deletions scripts/build-geo.cjs
Original file line number Diff line number Diff line change
@@ -1,20 +1,21 @@
#!/usr/bin/env node
/* eslint unicorn/prefer-object-from-entries: off */
require('dotenv').config()
const {createGunzip} = require('zlib')
const {join} = require('path')
const got = require('got')
const {center, bbox} = require('@turf/turf')
const getStream = require('get-stream')
const {outputJson} = require('fs-extra')
const fetch = require('../lib/util/fetch.cjs')

const communes = 'http://etalab-datasets.geo.data.gouv.fr/contours-administratifs/2023/geojson/communes-100m.geojson.gz'

async function getFeatures(url) {
const buffer = await getStream.buffer(
got.stream(url).pipe(createGunzip())
)
const response = await fetch(url)
const unzippedStream = response.body.pipe(createGunzip())
const bufferData = await getStream.buffer(unzippedStream)

return JSON.parse(buffer.toString()).features
return JSON.parse(bufferData.toString()).features
}

function toPrecision(float, precision) {
Expand Down
6 changes: 4 additions & 2 deletions scripts/download-datasets.cjs
Original file line number Diff line number Diff line change
@@ -1,17 +1,19 @@
#!/usr/bin/env node
require('dotenv').config()
const path = require('path')
const {createWriteStream} = require('fs')
const {pipeline} = require('stream/promises')
const {mkdirp} = require('fs-extra')
const got = require('got')
const ora = require('ora')
const fetch = require('../lib/util/fetch.cjs')

const dataDir = path.join(__dirname, '..', 'data')

async function downloadFile(url, fileName) {
const spinner = ora(`Téléchargement du fichier ${fileName}`).start()
const response = await fetch(url)
await pipeline(
got.stream(url, {responseType: 'buffer'}),
response.body,
createWriteStream(path.join(dataDir, fileName))
)
spinner.succeed()
Expand Down
1 change: 1 addition & 0 deletions scripts/prepare-contours.cjs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
#!/usr/bin/env node
require('dotenv').config()
const path = require('path')
const {mkdirp} = require('fs-extra')
const {prepareContours} = require('../lib/util/contours.cjs')
Expand Down
Loading

0 comments on commit a3c86a6

Please sign in to comment.