Skip to content

Commit

Permalink
upgrade to node22; update dependencies
Browse files Browse the repository at this point in the history
  • Loading branch information
pleary committed Jul 3, 2024
1 parent b149195 commit 62773c9
Show file tree
Hide file tree
Showing 16 changed files with 9,029 additions and 8,196 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/CI-build-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ jobs:
- name: Use Node.js
uses: actions/setup-node@v3
with:
node-version: '16.x'
node-version: '22.x'

- name: Configure sysctl limits
run: |
Expand Down
5 changes: 2 additions & 3 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
.DS_Store
.coveralls.yml
.ruby-gemset
.nyc_output
coverage
node_modules
config.js
Expand All @@ -14,15 +15,13 @@ public/taxa.txt
public/uploads/
lib/vision/**
!lib/vision/cache_images.js
!lib/vision/cv_stats.js
!lib/vision/file_cache.js
!lib/vision/image_cache.js
!lib/vision/photos_csv_transformer.js
!lib/vision/run_stats.js
cache/**
log/**
openapi/uploads/**
lib/tasks/**.csv
env.list
build
.vscode
.vscode
2 changes: 1 addition & 1 deletion .nvmrc
Original file line number Diff line number Diff line change
@@ -1 +1 @@
16.13.2
22.4.0
4 changes: 2 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
# Platform should be forced to amd64
# because node-mapnik is not available in arm64
FROM --platform=linux/amd64 node:16 as base
FROM --platform=linux/amd64 node:22 as base

RUN apt-get update -qq && apt-get install -y postgresql-client-11
RUN apt-get update -qq && apt-cache search postgresql && apt-get install -y postgresql-client-15

ENV NODE_ENV=development

Expand Down
16 changes: 9 additions & 7 deletions lib/controllers/v1/computervision_controller.js
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
const { readFile } = require( "node:fs/promises" );
const _ = require( "lodash" );
const fs = require( "fs" );
const fetch = require( "node-fetch" );
const FormData = require( "form-data" );
const path = require( "path" );
const squel = require( "safe-squel" );
const md5 = require( "md5" );
const PromisePool = require( "es6-promise-pool" );
const csv = require( "fast-csv" );
const crypto = require( "crypto" );
const pgClient = require( "../../pg_client" );
const TaxaController = require( "./taxa_controller" );
const InaturalistAPI = require( "../../inaturalist_api" );
Expand Down Expand Up @@ -131,7 +130,11 @@ const ComputervisionController = class ComputervisionController {
}
// download the JPG
const parsedPhotoURL = path.parse( photoURL );
const tmpFilename = `${md5( photoURL )}${parsedPhotoURL.ext.replace( /\?.+/, "" )}`;
const md5PhotoURL = crypto
.createHash( "md5" )
.update( photoURL, "utf8" )
.digest( "hex" );
const tmpFilename = `${md5PhotoURL}${parsedPhotoURL.ext.replace( /\?.+/, "" )}`;
const tmpPath = path.resolve( config.imageProcesing.uploadsDir, tmpFilename );

const imageRequestAbortController = new AbortController( );
Expand Down Expand Up @@ -206,9 +209,8 @@ const ComputervisionController = class ComputervisionController {

static async scoreImagePath( uploadPath, req ) {
const formData = new FormData();
formData.append( "image", fs.createReadStream( uploadPath ), {
type: req.file.mimetype,
knownLength: fs.statSync( uploadPath ).size
formData.append( "image", new Blob( [await readFile( uploadPath )] ), {
type: req.file.mimetype
} );
formData.append( "geomodel", "true" );
if ( req.body.delegate_ca || req.query.delegate_ca
Expand Down
4 changes: 1 addition & 3 deletions lib/controllers/v1/observations_controller.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
const _ = require( "lodash" );
const moment = require( "moment" );
const extend = require( "node.extend" );
const squel = require( "safe-squel" );
const { observations } = require( "inaturalistjs" );
const RedisCacheClient = require( "../../redis_cache_client" );
Expand Down Expand Up @@ -1207,8 +1206,7 @@ ObservationsController.observers = async req => {
ObservationsController.observationsObserversResponse = async (
req, observers, speciesObservers
) => {
// using the node.extend package for a deep clone to merge these objects
const userIndexedCounts = extend( true, { }, observers.counts, speciesObservers.counts );
const userIndexedCounts = _.merge( {}, observers.counts, speciesObservers.counts );
const orderField = ( req.query.order_by === "species_count" )
? "species_count" : "observation_count";
const { page, perPage } = InaturalistAPI.paginationData( req,
Expand Down
11 changes: 5 additions & 6 deletions lib/inaturalist_api.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
require( "intl" );
const { readFile } = require( "node:fs/promises" );
const _ = require( "lodash" );
const jwt = require( "jsonwebtoken" );
const fs = require( "fs" );
Expand Down Expand Up @@ -700,7 +700,7 @@ InaturalistAPI.lookupInstancesMiddleware = ( req, res, next ) => {
// most other contexts an iNaturalistAPI instance will be the host for GET
// requests. If it seems like iNaturalistAPI should just be calling itself in
// an infinite loop here, that's why it isn't.
InaturalistAPI.iNatJSWrap = ( method, req ) => {
InaturalistAPI.iNatJSWrap = async ( method, req ) => {
const params = { ...req.body, ...req.query };
_.each( req.params, ( v, k ) => {
params[k] = req.params[k];
Expand All @@ -719,14 +719,13 @@ InaturalistAPI.iNatJSWrap = ( method, req ) => {
// multipart files are handled by multer
// pass a custom parameter that will handled with FormData
// FormData seems to need the knownLength to proper offsets
_.each( req.files, ( files, key ) => {
await Promise.all( _.map( req.files, async ( files, key ) => {
const file = files[0];
params[key] = {
type: "custom",
value: fs.createReadStream( file.path ),
options: { knownLength: file.size }
value: new Blob( [await readFile( file.path )] )
};
} );
} ) );
if ( !options.remote_ip && req.connection && req.connection.remoteAddress ) {
options.remote_ip = req.connection.remoteAddress;
}
Expand Down
6 changes: 3 additions & 3 deletions lib/models/observation_query_builder.js
Original file line number Diff line number Diff line change
Expand Up @@ -582,10 +582,10 @@ ObservationQueryBuilder.reqToElasticQueryComponents = async req => {
// if there's only one set of collection project filters, then apply that
// projects filters directly to the current query
if ( shoulds.length === 1 ) {
if ( !_.isEmpty( shoulds[0].bool.filter ) ) {
if ( shoulds[0].bool && !_.isEmpty( shoulds[0].bool.filter ) ) {
searchFilters = searchFilters.concat( shoulds[0].bool.filter );
}
if ( !_.isEmpty( shoulds[0].bool.must_not ) ) {
if ( shoulds[0].bool && !_.isEmpty( shoulds[0].bool.must_not ) ) {
inverseFilters = inverseFilters.concat( shoulds[0].bool.must_not );
}
} else {
Expand Down Expand Up @@ -1515,7 +1515,7 @@ ObservationQueryBuilder.applyCollectionProjectRules = async ( req, options = { }
// the user requested to filter by a new-style project, but for some reason
// none of them have search parameters. Return an unmatchable filter
// indicating no obervations match the rules of these projects
req.query.collectionProjectFilters = [{ term: { id: -1 } }];
req.query.collectionProjectFilters = [{ bool: { filter: [{ term: { id: -1 } }] } }];
// override the project_id to exclude IDs of new-style projects
if ( _.isEmpty( queryProjectIDs ) ) {
delete req.query.project_id;
Expand Down
16 changes: 0 additions & 16 deletions lib/models/place.js
Original file line number Diff line number Diff line change
Expand Up @@ -24,22 +24,6 @@ const Place = class Place extends Model {
? null : getResponse.docs[0]._source;
}

static async findByLocaleString( locale ) {
let localeObj;
try {
localeObj = new Intl.Locale( locale );
} catch ( err ) {
// continue if locale is invalid
return null;
}
const localeRegionCode = localeObj.region;
if ( !localeRegionCode ) {
return null;
}
// lookup the place by code and admin-level = country
return Place.findByLocaleCode( localeRegionCode );
}

static async findByLocaleCode( code ) {
const query = squel.select( )
.field( "id, name, ancestry" )
Expand Down
1 change: 0 additions & 1 deletion lib/test_helper.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
const fs = require( "fs" );
const _ = require( "lodash" );
const timersPromises = require( "timers/promises" );
const Promise = require( "bluebird" );
const { expect } = require( "chai" ); // eslint-disable-line import/no-extraneous-dependencies
const sinon = require( "sinon" ); // eslint-disable-line import/no-extraneous-dependencies
const pgClient = require( "./pg_client" );
Expand Down
10 changes: 7 additions & 3 deletions lib/util.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
/* eslint-disable no-console */
const _ = require( "lodash" );
const moment = require( "moment" );
const md5 = require( "md5" );
const crypto = require( "crypto" );
const config = require( "../config" );
const Logstasher = require( "./logstasher" );

Expand Down Expand Up @@ -373,11 +373,15 @@ const util = class util {
// to the non-query data to consider. Results from users with different name
// priorities will be cached separated, even if the original URL is the same
if ( req.userSession && !_.isEmpty( req.userSession.taxonNamePriorities ) ) {
reqInatDup.taxonNamePrioritiesHash = md5( _.map(
const taxonNamePrioritiesSettings = _.map(
_.sortBy( req.userSession.taxonNamePriorities, "position" ), tnp => (
[tnp.lexicon, tnp.place_id].join( ":" )
)
).join( "," ) );
).join( "," );
reqInatDup.taxonNamePrioritiesHash = crypto
.createHash( "md5" )
.update( taxonNamePrioritiesSettings, "utf8" )
.digest( "hex" );
}

if ( prefix === "ObservationsController.identifiers"
Expand Down
Loading

0 comments on commit 62773c9

Please sign in to comment.