diff --git a/.deploy/default/settings.json b/.deploy/default/settings.json index febd8fc51..ab469f44e 100644 --- a/.deploy/default/settings.json +++ b/.deploy/default/settings.json @@ -1,5 +1,15 @@ { "public": { + }, + "packages": { + "mongo": { + "options": { + "minPoolSize": 100, + "maxPoolSize": 1000 + }, + "oplogUrl": "", + "oplogIncludeCollections": [] + } } } diff --git a/.deploy/ghcr.io/settings.json b/.deploy/ghcr.io/settings.json new file mode 100644 index 000000000..ab469f44e --- /dev/null +++ b/.deploy/ghcr.io/settings.json @@ -0,0 +1,15 @@ +{ + "public": { + + }, + "packages": { + "mongo": { + "options": { + "minPoolSize": 100, + "maxPoolSize": 1000 + }, + "oplogUrl": "", + "oplogIncludeCollections": [] + } + } +} diff --git a/.env b/.env index 85c792594..aa068e278 100644 --- a/.env +++ b/.env @@ -3,5 +3,6 @@ PORT=3000 MONGO_VERSION=5.0 MONGO_URL=mongodb://patient-db:27017/meteor +MONGO_OPLOG_URL="" HTTP_FORWARDED_COUNT=1 diff --git a/.github/workflows/ci:build.yml b/.github/workflows/ci:build.yml index b7821a82f..f4a09ca18 100644 --- a/.github/workflows/ci:build.yml +++ b/.github/workflows/ci:build.yml @@ -88,6 +88,7 @@ jobs: - uses: actions/checkout@v4 with: sparse-checkout: | + settings.json scripts/healthcheck.cjs scripts/assert-replica-set.js .github/actions/docker/container/is-healthy/action.yml @@ -176,11 +177,18 @@ jobs: container: mongodb timeout: 60 + - name: Configure server + id: configure-server + run: | + echo "settings=$(tr -d '\r\n' < settings.json)" >> "${GITHUB_OUTPUT}" + - name: Run server id: server env: ROOT_URL: http://localhost MONGO_URL: mongodb://localhost:27017/meteor + MONGO_OPLOG_URL: "" + METEOR_SETTINGS: ${{ steps.configure-server.outputs.meteor-settings }} PORT: 3000 run: | cd dist diff --git a/.github/workflows/ci:build:image.yml b/.github/workflows/ci:build:image.yml index 7cc7518e2..9fb0a61a4 100644 --- a/.github/workflows/ci:build:image.yml +++ b/.github/workflows/ci:build:image.yml @@ -84,6 +84,7 @@ jobs: - uses: actions/checkout@v4 with: sparse-checkout: | + settings.json scripts/assert-replica-set.js .github/actions/docker/container/ip-address/action.yml .github/actions/docker/container/is-healthy/action.yml @@ -138,10 +139,17 @@ jobs: container: mongodb timeout: 60 + - name: Configure server + id: configure-server + run: | + echo "settings=$(tr -d '\r\n' < settings.json)" >> "${GITHUB_OUTPUT}" + - name: Run server env: ROOT_URL: http://localhost MONGO_URL: mongodb://mongodb:27017/meteor + MONGO_OPLOG_URL: "" + METEOR_SETTINGS: ${{ steps.configure-server.outputs.meteor-settings }} PORT: 3000 run: | docker container run \ @@ -151,6 +159,8 @@ jobs: --publish "${PORT}:${PORT}" \ --env "ROOT_URL=${ROOT_URL}" \ --env "MONGO_URL=${MONGO_URL}" \ + --env "MONGO_OPLOG_URL=${MONGO_OPLOG_URL}" \ + --env "METEOR_SETTINGS=${METEOR_SETTINGS}" \ --env "PORT=${PORT}" \ --name server \ ${{ steps.server-image-url.outputs.url }} diff --git a/Dockerfile b/Dockerfile index 0740afe95..568efee45 100644 --- a/Dockerfile +++ b/Dockerfile @@ -104,7 +104,8 @@ EXPOSE 3000 ENV \ ROOT_URL="http://localhost" \ PORT="3000" \ - MONGO_URL="mongodb://localhost:27017/meteor" + MONGO_URL="mongodb://localhost:27017/meteor" \ + MONGO_OPLOG_URL="" HEALTHCHECK \ --interval=21s \ diff --git a/codecov.yml b/codecov.yml index acb9b1e20..1fca4c0fa 100644 --- a/codecov.yml +++ b/codecov.yml @@ -2,7 +2,7 @@ coverage: status: project: default: - target: 67% + target: 71% threshold: 1% patch: default: diff --git a/compose.yaml b/compose.yaml index 54934088c..c6b8ec735 100644 --- a/compose.yaml +++ b/compose.yaml @@ -8,6 +8,8 @@ services: - ROOT_URL=${ROOT_URL} - PORT=${PORT} - MONGO_URL=${MONGO_URL} + - MONGO_OPLOG_URL=${MONGO_OPLOG_URL} + - METEOR_SETTINGS=${METEOR_SETTINGS} - HTTP_FORWARDED_COUNT=${HTTP_FORWARDED_COUNT} depends_on: patient-db: diff --git a/imports/_test/fixtures.ts b/imports/_test/fixtures.ts index f65648c23..580ad3761 100644 --- a/imports/_test/fixtures.ts +++ b/imports/_test/fixtures.ts @@ -3,11 +3,18 @@ import 'regenerator-runtime/runtime.js'; import {assert, expect} from 'chai'; -import {cleanup as unmount} from '@testing-library/react'; +import {Meteor} from 'meteor/meteor'; + import totalOrder from 'total-order'; import {sorted} from '@iterable-iterator/sorted'; +import sleep from '../lib/async/sleep'; + +// eslint-disable-next-line import/no-unassigned-import +import '../api/endpoint/_dev/_disableRateLimiting'; import logout from '../api/user/logout'; +import define from '../api/collection/define'; +import {removeCollection} from '../api/collection/registry'; import invoke from '../api/endpoint/invoke'; import call from '../api/endpoint/call'; import reset from '../api/endpoint/_dev/reset'; @@ -16,6 +23,10 @@ import type Document from '../api/Document'; import type Selector from '../api/query/Selector'; import appIsReady from '../app/isReady'; import isAppTest from '../app/isAppTest'; +import {_router} from '../ui/App'; +import {getWatchStreamCount} from '../api/query/watch'; + +import {unmount as _unmount} from './react'; export { default as randomId, @@ -25,6 +36,28 @@ export { let resolving = 0; +export const waitFor = async (condition: () => boolean) => { + while (!condition()) { + // eslint-disable-next-line no-await-in-loop + await sleep(50); + } +}; + +export const withMockCollection = + ( + callback: (collection: Collection) => Promise | void, + ) => + async () => { + const collectionName = '__mocks__'; + const collection = define(collectionName); + try { + await callback(collection); + } finally { + await collection.dropCollectionAsync(); + removeCollection(collectionName); + } + }; + const resolveOnPopstate = async () => new Promise((resolve) => { let pos = ++resolving; @@ -77,17 +110,43 @@ const forgetHistory = async () => { return history.length - 1; }; +const mount = isAppTest() + ? () => { + _router.navigate('/'); + } + : // eslint-disable-next-line @typescript-eslint/no-empty-function + () => {}; + +const unmount = isAppTest() + ? () => { + _router.navigate('/_test/unmount'); + } + : () => { + _unmount(); + }; + +const assertChangeStreamWatchersAreOff = () => { + const n = getWatchStreamCount(); + if (n !== 0) { + console.warn(`ChangeStream watch count is different from 0 (got ${n})!`); + } +}; + export const client = (title, fn) => { if (Meteor.isClient) { const cleanup = async () => { await logout(); unmount(); + assertChangeStreamWatchersAreOff(); + mount(); await call(reset); }; let original; const prepare = async () => { + // @ts-expect-error TODO + await import('./mocha.css'); await import('../../client/polyfill'); original = await forgetHistory(); @@ -174,23 +233,71 @@ export const dropId = ({_id, ...rest}) => { export const dropIds = (x) => x.map(dropId); -export const create = (template, extra) => { - if (typeof template === 'function') return extra ?? template(); +export const dropOwner = ({owner, ...rest}) => { + assert(typeof owner === 'string'); + return rest; +}; + +export const dropOwners = (x) => x.map(dropOwner); + +type Created = T extends {[K in keyof T]: T[K]} + ? {[K in keyof T]: Created} + : T extends () => any + ? ReturnType + : never; + +type Extra = T extends any[] + ? Created + : T extends {[K in keyof T]: T[K]} + ? Partial> + : T extends () => any + ? ReturnType + : never; + +export function create any>( + template: T, + extra?: Extra, + hasExtra?: boolean, +): Created; +export function create( + template: T, + extra?: Extra, + hasExtra?: boolean, +): Created; + +export function create( + template: T, + extra?: Extra, + hasExtra?: boolean, +): Created { + if (typeof template === 'function') return hasExtra ? extra : template(); if (Array.isArray(template)) { - return template - .map((x, i) => create(x, extra?.[i])) - .concat(extra?.slice(template.length) ?? []); + return (template as Array) + .map((x, i) => + create( + x, + extra?.[i], + Object.prototype.hasOwnProperty.call(extra ?? [], i), + ), + ) + .concat(extra?.slice(template.length) ?? []) as Created; } return Object.fromEntries( (extra === undefined ? [] : Object.entries(extra)).concat( - Object.entries(template).map(([key, value]) => [ - key, - create(value, extra?.[key]), - ]), + Object.entries(template as {[K in keyof T]: T[K]}).map( + ([key, value]: [string, V]) => [ + key, + create( + value, + extra?.[key], + Object.prototype.hasOwnProperty.call(extra ?? {}, key), + ), + ], + ), ), - ); -}; + ) as Created; +} export const findOneOrThrow = async ( collection: Collection, @@ -204,4 +311,7 @@ export const findOneOrThrow = async ( return result!; }; -export const makeTemplate = (template) => (extra?) => create(template, extra); +export const makeTemplate = (template) => (extra?) => + create(template, extra, extra !== undefined); + +export const isNode = () => Meteor.isServer; diff --git a/imports/_test/mocha.css b/imports/_test/mocha.css new file mode 100644 index 000000000..ef83d8ddf --- /dev/null +++ b/imports/_test/mocha.css @@ -0,0 +1,38 @@ +div#mocha { + z-index: 99999999; + opacity: 0; + position: fixed; + top: 64px; + left: 73px; + right: 0; + background: white; + margin: 0; + padding: 1em; + overflow: auto; + height: calc(100% - 64px); +} + +div#mocha:hover { + opacity: 0.9; +} + +div#mocha > ul#mocha-report > li.suite > ul > li.test::before { + font-size: 18px; + line-height: 1em; +} + +div#mocha > ul#mocha-report > li.suite > ul > li.test.pass::before { + color: #0b9; +} + +div#mocha > ul#mocha-report > li.suite > ul > li.test.fail::before { + color: #b00; +} + +div#mocha > ul#mocha-stats { + position: initial; +} + +div#mocha > ul#mocha-report > li.suite > h1 { + font-size: 0.8em; +} diff --git a/imports/_test/react.ts b/imports/_test/react.ts new file mode 100644 index 000000000..a6afaa619 --- /dev/null +++ b/imports/_test/react.ts @@ -0,0 +1,13 @@ +import {configure as configureReact} from '@testing-library/react'; + +configureReact({ + reactStrictMode: true, +}); + +export { + render, + renderHook, + waitFor, + waitForElementToBeRemoved, + cleanup as unmount, +} from '@testing-library/react'; diff --git a/imports/api/DocumentUpdate.ts b/imports/api/DocumentUpdate.ts index f55d8f04a..13003256b 100644 --- a/imports/api/DocumentUpdate.ts +++ b/imports/api/DocumentUpdate.ts @@ -1,9 +1,23 @@ import schema from '../lib/schema'; +export type RequiredKeys = { + [K in keyof T]-?: {} extends {[P in K]: T[K]} ? never : K; +}[keyof T]; + +export type OptionalKeys = { + [K in keyof T]-?: {} extends {[P in K]: T[K]} ? K : never; +}[keyof T]; + export type DocumentUpdate = { - [K in keyof T]?: T[K] | null; + [K in RequiredKeys]?: T[K]; +} & { + [K in OptionalKeys]?: T[K] | null; }; +export type DocumentUpdateEntry = + | [RequiredKeys, T[RequiredKeys]] + | [OptionalKeys, T[OptionalKeys] | null]; + export const documentUpdate = ( tSchema: S, ): schema.ZodType>> => { @@ -12,7 +26,9 @@ export const documentUpdate = ( Object.fromEntries( Object.entries(tSchema.shape).map(([key, value]) => [ key, - (value as any).nullable().optional(), + value instanceof schema.ZodOptional + ? schema.nullable(value) + : schema.optional(value as schema.ZodTypeAny), ]), ), ) as schema.ZodType>>; diff --git a/imports/api/GenericQueryHook.ts b/imports/api/GenericQueryHook.ts index e646f4a2f..e8d86df2b 100644 --- a/imports/api/GenericQueryHook.ts +++ b/imports/api/GenericQueryHook.ts @@ -9,7 +9,7 @@ type GenericQueryHookReturnType = { }; type GenericQueryHook = ( - query: UserQuery, + query: UserQuery | null, deps: DependencyList, ) => GenericQueryHookReturnType; diff --git a/imports/api/ObserveChangesCallbacks.ts b/imports/api/ObserveChangesCallbacks.ts deleted file mode 100644 index ed54f4fb5..000000000 --- a/imports/api/ObserveChangesCallbacks.ts +++ /dev/null @@ -1,5 +0,0 @@ -import {type Mongo} from 'meteor/mongo'; - -type ObserveChangesCallbacks = Mongo.ObserveChangesCallbacks; - -export default ObserveChangesCallbacks; diff --git a/imports/api/ObserveSequenceChangesCallbacks.ts b/imports/api/ObserveSequenceChangesCallbacks.ts new file mode 100644 index 000000000..d8a718889 --- /dev/null +++ b/imports/api/ObserveSequenceChangesCallbacks.ts @@ -0,0 +1,12 @@ +type ObserveSequenceChangesCallbacks = { + addedBefore?( + id: string, + fields: Partial>, + before: string | null, + ): Promise | void; + changed?(id: string, fields: Partial>): Promise | void; + movedBefore?(id: string, before: string | null): Promise | void; + removed?(id: string): Promise | void; +}; + +export default ObserveSequenceChangesCallbacks; diff --git a/imports/api/ObserveSetChangesCallbacks.ts b/imports/api/ObserveSetChangesCallbacks.ts new file mode 100644 index 000000000..84411a994 --- /dev/null +++ b/imports/api/ObserveSetChangesCallbacks.ts @@ -0,0 +1,7 @@ +type ObserveSetChangesCallbacks = { + added?(id: string, fields: Partial>): Promise | void; + changed?(id: string, fields: Partial>): Promise | void; + removed?(id: string): Promise | void; +}; + +export default ObserveSetChangesCallbacks; diff --git a/imports/api/_apply.ts b/imports/api/_apply.ts index c9933e97b..83b44d25a 100644 --- a/imports/api/_apply.ts +++ b/imports/api/_apply.ts @@ -3,10 +3,14 @@ import {DDP} from 'meteor/ddp'; import promisify from '../lib/async/promisify'; +import {AsyncLock} from '../lib/async/lock'; + import type Options from './endpoint/Options'; import type Args from './Args'; import type Serializable from './Serializable'; +const _lock = new AsyncLock(); + const __meteor_apply_promisified = promisify( // @ts-expect-error Private access. Meteor.applyAsync.bind(Meteor), @@ -24,18 +28,19 @@ const _apply = async ( isFromCallAsync: true, ...options, }; - // @ts-expect-error Private access. - DDP._CurrentMethodInvocation._set(); - // @ts-expect-error Private access. - DDP._CurrentMethodInvocation._setCallAsyncMethodRunning(true); - const result = (await __meteor_apply_promisified( - name, - args, - applyOptions, - )) as Promise; - // @ts-expect-error Private access. - DDP._CurrentMethodInvocation._setCallAsyncMethodRunning(false); - return result; + const handle = await _lock.acquire(); + try { + // @ts-expect-error Private access. + DDP._CurrentMethodInvocation._set(); + // @ts-expect-error Private access. + DDP._CurrentMethodInvocation._setCallAsyncMethodRunning(true); + const result = await __meteor_apply_promisified(name, args, applyOptions); + // @ts-expect-error Private access. + DDP._CurrentMethodInvocation._setCallAsyncMethodRunning(false); + return result as R; + } finally { + _lock.release(handle); + } }; export default _apply; diff --git a/imports/api/appointments.ts b/imports/api/appointments.ts index c24d584aa..fa1a29c35 100644 --- a/imports/api/appointments.ts +++ b/imports/api/appointments.ts @@ -61,8 +61,8 @@ export const appointmentUpdate = schema.union([ schema.object({ patient: schema.object({ _id: schema.string(), - firstname: schema.string(), - lastname: schema.string(), + firstname: schema.string().optional(), + lastname: schema.string().optional(), }), phone: schema.string().optional(), datetime: schema.date().optional(), diff --git a/imports/api/books.ts b/imports/api/books.ts index 462d3a797..03c71dd3f 100644 --- a/imports/api/books.ts +++ b/imports/api/books.ts @@ -3,8 +3,6 @@ import addYears from 'date-fns/addYears'; import schema from '../lib/schema'; -import makeQuery from './makeQuery'; -import makeObservedQueryHook from './makeObservedQueryHook'; import { type NormalizedLine, normalizedLine, @@ -13,17 +11,8 @@ import { } from './string'; import {Books, collection} from './collection/books'; -import {BooksCache} from './collection/books/cache'; - -import publication from './publication/books/find'; -import cachePublication from './publication/books/observe'; import type TransactionDriver from './transaction/TransactionDriver'; -export const useBooks = makeQuery(Books, publication); - -// TODO rename to useObservedBooks -export const useBooksFind = makeObservedQueryHook(BooksCache, cachePublication); - const sanitizeInput = normalizedLineInput; const sanitize = normalizedLine; diff --git a/imports/api/collection/define.ts b/imports/api/collection/define.ts index 482362ca4..45943a7b7 100644 --- a/imports/api/collection/define.ts +++ b/imports/api/collection/define.ts @@ -1,11 +1,21 @@ +import assert from 'assert'; + import type Document from '../Document'; import Collection from '../Collection'; +import {hasCollection, addCollection} from './registry'; + const define = (name: string) => { - return new Collection(name, { + assert(!hasCollection(name)); + + const collection = new Collection(name, { idGeneration: 'STRING', defineMutationMethods: false, }); + + addCollection(name, collection); + + return collection; }; export default define; diff --git a/imports/api/collection/patients.ts b/imports/api/collection/patients.ts index b9578bd59..474199f18 100644 --- a/imports/api/collection/patients.ts +++ b/imports/api/collection/patients.ts @@ -88,6 +88,17 @@ export const patientFields = patientIdFields .merge(patientTagFields); export type PatientFields = schema.infer; +export const patientUpdate = patientFields + .merge( + schema.object({ + sex: patientFields.shape.sex.nullable(), + deathdateModifiedAt: patientFields.shape.deathdateModifiedAt.nullable(), + deathdate: patientFields.shape.deathdate.nullable(), + }), + ) + .partial(); +export type PatientUpdate = schema.infer; + export const patientComputedFields = schema.object({ normalizedName: schema.string(), }); diff --git a/imports/api/collection/registry.ts b/imports/api/collection/registry.ts new file mode 100644 index 000000000..d1d079487 --- /dev/null +++ b/imports/api/collection/registry.ts @@ -0,0 +1,31 @@ +import assert from 'assert'; + +import type Document from '../Document'; +import type Collection from '../Collection'; + +const _registry = new Map>(); + +export const getCollection = ( + name: string, +): Collection => { + const collection = _registry.get(name); + assert(collection !== undefined); + return collection; +}; + +export const hasCollection = (name: string) => { + return _registry.has(name); +}; + +export const addCollection = ( + name: string, + collection: Collection, +) => { + assert(!hasCollection(name)); + _registry.set(name, collection); +}; + +export const removeCollection = (name: string) => { + assert(hasCollection(name)); + _registry.delete(name); +}; diff --git a/imports/api/collection/settings.ts b/imports/api/collection/settings.ts index fe14ef1ca..d44b62bf7 100644 --- a/imports/api/collection/settings.ts +++ b/imports/api/collection/settings.ts @@ -1,6 +1,6 @@ import define from './define'; -type SettingDocument = { +export type SettingDocument = { owner: string; key: string; value: any; diff --git a/imports/api/consultations.ts b/imports/api/consultations.ts index 9f33058e1..1b96868a9 100644 --- a/imports/api/consultations.ts +++ b/imports/api/consultations.ts @@ -32,6 +32,7 @@ import findOneSync from './publication/findOneSync'; import type Selector from './query/Selector'; import {type AuthenticatedContext} from './publication/Context'; import {type DocumentUpdate} from './DocumentUpdate'; +import observeSetChanges from './query/observeSetChanges'; export const DEFAULT_DURATION_IN_MINUTES = 15; export const DEFAULT_DURATION_IN_SECONDS = DEFAULT_DURATION_IN_MINUTES * 60; @@ -127,18 +128,18 @@ export const filterBookPrefill = () => ({ }, }); -export function setupConsultationsStatsPublication( +export async function setupConsultationsStatsPublication( this: AuthenticatedContext, collectionName: string, filter: Filter, ) { // Generate unique key depending on parameters const key = statsKey(filter); - const selector = { + const scopedFilter = { ...filter, isDone: true, owner: this.userId, - } as Selector; + } as Filter; const options = {fields: {_id: 1, price: 1, datetime: 1}}; const minHeap = new PairingHeap(increasing); @@ -158,48 +159,53 @@ export function setupConsultationsStatsPublication( // Until then, we don't want to send a lot of `changed` messages—hence // tracking the `initializing` state. let initializing = true; - const handle = Consultations.find(selector, options).observeChanges({ - added: (_id, {price, datetime}) => { - count += 1; - if (price) total += price; - const minRef = minHeap.push(datetime); - const maxRef = maxHeap.push(datetime); - refs.set(_id, [price, minRef, maxRef]); - - if (!initializing) { - this.changed(collectionName, key, state()); - } - }, - - changed: (_id, fields) => { - const [oldPrice, minRef, maxRef] = refs.get(_id); - let newPrice: number = oldPrice; - if (Object.prototype.hasOwnProperty.call(fields, 'price')) { - newPrice = fields.price!; - if (oldPrice) total -= oldPrice; - if (newPrice) total += newPrice; - refs.set(_id, [newPrice, minRef, maxRef]); - } - - if (Object.prototype.hasOwnProperty.call(fields, 'datetime')) { - const datetime = fields.datetime; - minHeap.update(minRef, datetime); - maxHeap.update(maxRef, datetime); - } - - this.changed(collectionName, key, state()); - }, + const handle = await observeSetChanges( + Consultations, + scopedFilter, + options, + { + added: (_id, {price, datetime}) => { + count += 1; + if (price) total += price; + const minRef = minHeap.push(datetime); + const maxRef = maxHeap.push(datetime); + refs.set(_id, [price, minRef, maxRef]); + + if (!initializing) { + this.changed(collectionName, key, state()); + } + }, + + changed: (_id, fields) => { + const [oldPrice, minRef, maxRef] = refs.get(_id); + if (Object.prototype.hasOwnProperty.call(fields, 'price')) { + const newPrice = fields.price; + if (oldPrice) total -= oldPrice; + if (newPrice) total += newPrice; + refs.set(_id, [newPrice, minRef, maxRef]); + } + + if (Object.prototype.hasOwnProperty.call(fields, 'datetime')) { + const datetime = fields.datetime; + minHeap.update(minRef, datetime); + maxHeap.update(maxRef, datetime); + } - removed: (_id) => { - count -= 1; - const [price, minRef, maxRef] = refs.get(_id); - if (price) total -= price; - minHeap.delete(minRef); - maxHeap.delete(maxRef); - refs.delete(_id); - this.changed(collectionName, key, state()); + this.changed(collectionName, key, state()); + }, + + removed: (_id) => { + count -= 1; + const [price, minRef, maxRef] = refs.get(_id); + if (price) total -= price; + minHeap.delete(minRef); + maxHeap.delete(maxRef); + refs.delete(_id); + this.changed(collectionName, key, state()); + }, }, - }); + ({price, datetime}) => ({price, datetime}), + ); // Instead, we'll send one `added` message right after `observeChanges` has // returned, and mark the subscription as ready. diff --git a/imports/api/createTagCollection.ts b/imports/api/createTagCollection.ts index 4d07eccda..074d1f0c4 100644 --- a/imports/api/createTagCollection.ts +++ b/imports/api/createTagCollection.ts @@ -32,7 +32,7 @@ import type CacheItem from './CacheItem'; import {type TagComputedFields, type TagNameFields} from './tags/TagDocument'; import type TagDocument from './tags/TagDocument'; import makeItem from './tags/makeItem'; -import type Publication from './publication/Publication'; +import type PublicationEndpoint from './publication/PublicationEndpoint'; import type TransactionDriver from './transaction/TransactionDriver'; import type Filter from './query/Filter'; import type Collection from './Collection'; @@ -43,6 +43,7 @@ import type UserQuery from './query/UserQuery'; import type UserFilter from './query/UserFilter'; import type Options from './query/Options'; import type Projection from './query/Projection'; +import observeSetChanges from './query/observeSetChanges'; export const STATS_SUFFIX = '.stats'; export const FIND_CACHE_SUFFIX = '.find.cache'; @@ -84,7 +85,7 @@ type TagCollectionOptions< publication: string; singlePublication: string; Parent: Collection

; - parentPublication: Publication<[UserQuery

]>; + parentPublication: PublicationEndpoint<[UserQuery

]>; key: string; }; @@ -171,14 +172,16 @@ const createTagCollection = < ...rest, }; - const isLoading = useSubscription(parentPublication, query); - const loading = isLoading(); + const isLoading = useSubscription(parentPublication, [query]); + const loadingSubscription = isLoading(); - const results = useCursor( + const {loading: loadingResults, results} = useCursor( () => Parent.find(filter as Selector

, options), [name, JSON.stringify(options)], ); + const loading = loadingSubscription || loadingResults; + return { loading, results, @@ -190,12 +193,12 @@ const createTagCollection = < name: statsPublication, authentication: AuthenticationLoggedIn, schema: schema.tuple([schema.string()]), - handle(name) { + async handle(name) { const uid = JSON.stringify({name, owner: this.userId}); const query = { [key]: {$elemMatch: {name}}, owner: this.userId, - } as Selector

; + } as Filter

; // We only include relevant fields const options = {fields: {_id: 1, [key]: 1}}; @@ -205,22 +208,28 @@ const createTagCollection = < // `observeChanges` only returns after the initial `added` callbacks have run. // Until then, we don't want to send a lot of `changed` messages—hence // tracking the `initializing` state. - const handle = Parent.find(query, options).observeChanges({ - added: () => { - count += 1; + const handle = await observeSetChanges( + Parent, + query, + options, + { + added: () => { + count += 1; - if (!initializing) { + if (!initializing) { + this.changed(stats, uid, {count}); + } + }, + + removed: () => { + count -= 1; this.changed(stats, uid, {count}); - } - }, + }, - removed: () => { - count -= 1; - this.changed(stats, uid, {count}); + // We don't care about `changed` events. }, - - // We don't care about `changed` events. - }); + (_fields) => ({}), + ); // Instead, we'll send one `added` message right after `observeChanges` has // returned, and mark the subscription as ready. @@ -231,20 +240,22 @@ const createTagCollection = < // Stop observing the cursor when the client unsubscribes. Stopping a // subscription automatically takes care of sending the client any `removed` // messages. - this.onStop(() => { - handle.stop(); + this.onStop(async (error?: Error) => { + await handle.emit('stop', error); }); }, }); const useTagStats = (name) => { - const isLoading = useSubscription(_statsPublication, name); - const loading = isLoading(); + const isLoading = useSubscription(_statsPublication, [name]); + const loadingSubscription = isLoading(); - const result = useItem(Stats, {name}, undefined, [name]); + const {loading: loadingResult, result} = useItem(Stats, {name}, undefined, [ + name, + ]); return { - loading, + loading: loadingSubscription || loadingResult, result, }; }; diff --git a/imports/api/endpoint/_dev/_disableRateLimiting.ts b/imports/api/endpoint/_dev/_disableRateLimiting.ts new file mode 100644 index 000000000..c164dad5b --- /dev/null +++ b/imports/api/endpoint/_dev/_disableRateLimiting.ts @@ -0,0 +1,9 @@ +import {Meteor} from 'meteor/meteor'; +import {Accounts} from 'meteor/accounts-base'; + +import isTest from '../../../app/isTest'; + +if (Meteor.isServer && isTest()) { + // @ts-expect-error Missing from type definitions. + Accounts.removeDefaultRateLimit(); +} diff --git a/imports/api/endpoint/patients/update.ts b/imports/api/endpoint/patients/update.ts index 4f985577b..74e2c3d7b 100644 --- a/imports/api/endpoint/patients/update.ts +++ b/imports/api/endpoint/patients/update.ts @@ -1,7 +1,7 @@ import {AuthenticationLoggedIn} from '../../Authentication'; import schema from '../../../lib/schema'; -import {patientFields, Patients} from '../../collection/patients'; +import {patientUpdate, Patients} from '../../collection/patients'; import {computeUpdate, patients} from '../../patients'; import type TransactionDriver from '../../transaction/TransactionDriver'; @@ -13,7 +13,7 @@ const {sanitize, updateIndex, updateTags} = patients; export default define({ name: '/api/patients/update', authentication: AuthenticationLoggedIn, - schema: schema.tuple([schema.string(), patientFields.partial().strict()]), + schema: schema.tuple([schema.string(), patientUpdate.strict()]), async transaction(db: TransactionDriver, patientId, newfields) { const owner = this.userId; diff --git a/imports/api/events.ts b/imports/api/events.ts index 4e4dfdf06..b0524153c 100644 --- a/imports/api/events.ts +++ b/imports/api/events.ts @@ -11,12 +11,16 @@ import { } from './collection/consultations'; import {DEFAULT_DURATION_IN_MINUTES} from './consultations'; +import {documentDiffApply} from './update'; import {Patients} from './collection/patients'; import {type EventDocument, events} from './collection/events'; -import findOneSync from './publication/findOneSync'; -export const event = ( +import {type Options} from './transaction/TransactionDriver'; +import observeSetChanges from './query/observeSetChanges'; +import type Filter from './query/Filter'; + +export const event = async ( _id: string, { owner, @@ -28,8 +32,9 @@ export const event = ( doneDatetime, createdAt, }: Omit, -): EventDocument => { - const patient = findOneSync(Patients, {_id: patientId}); // TODO Make reactive (maybe)? +): Promise => { + // TODO Maybe some sort of joined view? + const patient = await Patients.findOneAsync({_id: patientId}); // TODO Make reactive (maybe)? assert(patient !== undefined); const begin = datetime; const end = isDone @@ -59,17 +64,31 @@ export const event = ( }; }; -export const publishEvents = function (query, options) { - const handle = Consultations.find(query, options).observe({ - added: ({_id, ...fields}) => { - this.added(events, _id, event(_id, fields)); +export const publishEvents = async function ( + this: Subscription, + query: Filter, + options: Options, +) { + const docs = new Map(); + const handle = await observeSetChanges(Consultations, query, options, { + added: async (_id, document: Omit) => { + docs.set(_id, document); + const entry = await event(_id, document); + this.added(events, _id, entry); }, - changed: ({_id, ...fields}) => { - this.changed(events, _id, event(_id, fields)); + changed: async (_id, changes) => { + const document = docs.get(_id); + assert(document !== undefined); + + const updatedDoc = documentDiffApply(document, changes); + + const entry = await event(_id, updatedDoc); + this.changed(events, _id, entry); }, - removed: ({_id}) => { + removed: (_id) => { + docs.delete(_id); this.removed(events, _id); }, }); @@ -80,8 +99,8 @@ export const publishEvents = function (query, options) { // Stop observing the cursor when the client unsubscribes. Stopping a // subscription automatically takes care of sending the client any `removed` // messages. - this.onStop(() => { - handle.stop(); + this.onStop(async (error?: Error) => { + await handle.emit('stop', error); }); }; diff --git a/imports/api/makeCachedFindOne.ts b/imports/api/makeCachedFindOne.ts index 621199c15..f056fab93 100644 --- a/imports/api/makeCachedFindOne.ts +++ b/imports/api/makeCachedFindOne.ts @@ -1,21 +1,22 @@ import {type DependencyList, useRef} from 'react'; -import type Publication from './publication/Publication'; +import type PublicationEndpoint from './publication/PublicationEndpoint'; import useSubscription from './publication/useSubscription'; import useItem from './publication/useItem'; import type Collection from './Collection'; import type Document from './Document'; import type UserQuery from './query/UserQuery'; import queryToSelectorOptionsPair from './query/queryToSelectorOptionsPair'; +import {documentDiff, documentDiffApply} from './update'; type ReturnValue = | {loading: boolean; found: false; fields: I & Partial} | {loading: boolean; found: true; fields: I & U}; const makeCachedFindOne = - ( + ( collection: Collection, - publication: Publication<[UserQuery]>, + publication: PublicationEndpoint<[UserQuery]>, ) => >( init: I, @@ -24,20 +25,26 @@ const makeCachedFindOne = ): ReturnValue => { const ref = useRef(init); - const isLoading = useSubscription(publication, query); - const loading = isLoading(); + const isLoading = useSubscription(publication, [query]); + const loadingSubscription = isLoading(); const [selector, options] = queryToSelectorOptionsPair(query); - const upToDate = useItem(loading ? null : collection, selector, options, [ - loading, - ...deps, - ]); - - const found = Boolean(upToDate); - const fields = {...ref.current, ...upToDate}; - ref.current = fields; - - return {loading, found, fields} as ReturnValue; + const { + loading: loadingResult, + found, + result: upToDate, + } = useItem(collection, selector, options, deps); + + if (upToDate !== undefined) { + const _diff = documentDiff(ref.current, upToDate); + ref.current = documentDiffApply(ref.current, _diff); + } + + return { + loading: loadingSubscription || loadingResult, + found, + fields: ref.current, + } as ReturnValue; }; export default makeCachedFindOne; diff --git a/imports/api/makeCachedFindOneOpt.ts b/imports/api/makeCachedFindOneOpt.ts deleted file mode 100644 index bd2075988..000000000 --- a/imports/api/makeCachedFindOneOpt.ts +++ /dev/null @@ -1,81 +0,0 @@ -import {type DependencyList, useState, useEffect} from 'react'; -import {type Meteor} from 'meteor/meteor'; - -import useRandom from '../ui/hooks/useRandom'; - -import type Publication from './publication/Publication'; -import subscribe, {type SubscriptionError} from './publication/subscribe'; -import type Options from './query/Options'; -import type Selector from './query/Selector'; -import type Collection from './Collection'; -import type Document from './Document'; - -type LiveQueryHandle = Meteor.LiveQueryHandle; - -/** - * WARNING: Does not work properly if used multiple times with the same - * parameters on the same page. - */ -const makeCachedFindOneOpt = - ( - collection: Collection, - publication: Publication<[Selector, Options]>, - ) => - ( - init: Partial, - selector: Selector, - options: Options, - deps: DependencyList, - ) => { - console.debug({init, query: selector, options, deps}); - - const [loading, setLoading] = useState(true); - const [found, setFound] = useState(false); - const [fields, setFields] = useState(init); - const [key, reset] = useRandom(); - - console.debug({loading, found, fields}); - - useEffect(() => { - setLoading(true); - setFound(false); - setFields(init); - let current = init; - - let queryHandle: LiveQueryHandle; - const handle = subscribe(publication, selector, options, { - onStop(e: SubscriptionError) { - console.debug('onStop()', {e, queryHandle}); - if (queryHandle) queryHandle.stop(); - else reset(); - }, - onReady() { - console.debug('onReady()'); - setLoading(false); - queryHandle = collection.find(selector, options).observeChanges({ - added(_id, upToDate) { - setFound(true); - current = {...init, ...upToDate}; - setFields(current); - }, - changed(_id, upToDate) { - current = {...current, ...upToDate}; - setFields(current); - }, - removed(_id) { - setFound(false); - }, - }); - }, - }); - - return () => { - console.debug('handle.stop()'); - handle.stop(); - }; - }, [key, ...deps]); - - return {loading, found, fields}; - }; - -export default makeCachedFindOneOpt; diff --git a/imports/api/makeDebouncedResultsQuery.ts b/imports/api/makeDebouncedResultsQuery.ts index 2225009a7..ece754075 100644 --- a/imports/api/makeDebouncedResultsQuery.ts +++ b/imports/api/makeDebouncedResultsQuery.ts @@ -1,29 +1,29 @@ import {type DependencyList, useEffect, useRef} from 'react'; -import type Publication from './publication/Publication'; -import useSubscription from './publication/useSubscription'; -import useCursor from './publication/useCursor'; +import type PublicationEndpoint from './publication/PublicationEndpoint'; import type Collection from './Collection'; import type Document from './Document'; import type UserQuery from './query/UserQuery'; import queryToSelectorOptionsPair from './query/queryToSelectorOptionsPair'; +import useQuery from './publication/useQuery'; const init = []; const makeDebouncedResultsQuery = ( collection: Collection, - publication: Publication<[UserQuery]>, + publication: PublicationEndpoint<[UserQuery]>, ) => (query: UserQuery, deps: DependencyList) => { const lastValue = useRef(init); - const isLoading = useSubscription(publication, query); - const loading = isLoading(); - - const [selector, options] = queryToSelectorOptionsPair(query); - const currentValue = useCursor( - () => collection.find(selector, options), + const {loading, results: currentValue} = useQuery( + publication, + [query], + () => { + const [selector, options] = queryToSelectorOptionsPair(query); + return collection.find(selector, options); + }, deps, ); diff --git a/imports/api/makeFilteredCollection.ts b/imports/api/makeFilteredCollection.ts index 283031e42..5a512330d 100644 --- a/imports/api/makeFilteredCollection.ts +++ b/imports/api/makeFilteredCollection.ts @@ -14,6 +14,9 @@ import useSubscription from './publication/useSubscription'; import {AuthenticationLoggedIn} from './Authentication'; import {userFilter} from './query/UserFilter'; import type UserFilter from './query/UserFilter'; +import observeSetChanges from './query/observeSetChanges'; +import type Filter from './query/Filter'; +import {publishCursorObserver} from './publication/publishCursors'; const makeFilteredCollection = < S extends schema.ZodTypeAny, @@ -32,15 +35,15 @@ const makeFilteredCollection = < userFilter(tSchema).nullable(), options(tSchema).nullable(), ]), - handle( + async handle( publicationFilter: UserFilter> | null, publicationOptions: Options> | null, ) { - const selector = { + const scopedFilter = { ...filterSelector, ...publicationFilter, owner: this.userId, - } as Selector>; + } as Filter>; const options = { ...filterOptions, @@ -49,22 +52,15 @@ const makeFilteredCollection = < limit: 0, }; - const handle = collection.find(selector, options).observeChanges({ - added: (_id, fields) => { - this.added(name, _id, fields); - }, + const handle = await observeSetChanges( + collection, + scopedFilter, + options, + publishCursorObserver(this, name), + ); - changed: (_id, fields) => { - this.changed(name, _id, fields); - }, - - removed: (_id) => { - this.removed(name, _id); - }, - }); - - this.onStop(() => { - handle.stop(); + this.onStop(async (error?: Error) => { + await handle.emit('stop', error); }); this.ready(); }, @@ -77,9 +73,13 @@ const makeFilteredCollection = < options: Options> | undefined = undefined, deps: DependencyList = [], ) => { - const isLoading = useSubscription(publication, null, options ?? null); - const loading = isLoading(); - const results = useCursor(() => Filtered.find(hookSelector, options), deps); + const isLoading = useSubscription(publication, [null, options ?? null]); + const loadingSubscription = isLoading(); + const {loading: loadingResults, results} = useCursor( + () => Filtered.find(hookSelector, options), + deps, + ); + const loading = loadingSubscription || loadingResults; return {loading, results}; }; }; diff --git a/imports/api/makeFindOne.ts b/imports/api/makeFindOne.ts index 2bfd69db7..b1dd7217b 100644 --- a/imports/api/makeFindOne.ts +++ b/imports/api/makeFindOne.ts @@ -1,6 +1,6 @@ import {type DependencyList} from 'react'; -import type Publication from './publication/Publication'; +import type PublicationEndpoint from './publication/PublicationEndpoint'; import useItem from './publication/useItem'; import useSubscription from './publication/useSubscription'; import type Collection from './Collection'; @@ -15,27 +15,30 @@ type ReturnValue = const makeFindOne = ( collection: Collection, - publication: Publication<[UserQuery]>, + publication: PublicationEndpoint<[UserQuery]>, ) => >( init: I, query: UserQuery, deps: DependencyList, ): ReturnValue => { - const isLoading = useSubscription(publication, query); - const loading = isLoading(); + const isLoading = useSubscription(publication, [query]); + const loadingSubscription = isLoading(); const [selector, options] = queryToSelectorOptionsPair(query); - const upToDate = useItem(loading ? null : collection, selector, options, [ - loading, - ...deps, - ]); - - const found = Boolean(upToDate); + const { + loading: loadingResult, + found, + result: upToDate, + } = useItem(collection, selector, options, deps); const fields = {...init, ...upToDate}; - return {loading, found, fields} as ReturnValue; + return { + loading: loadingSubscription || loadingResult, + found, + fields, + } as ReturnValue; }; export default makeFindOne; diff --git a/imports/api/makeObservedQueryHook.ts b/imports/api/makeObservedQueryHook.ts index dbb08f539..d67528d20 100644 --- a/imports/api/makeObservedQueryHook.ts +++ b/imports/api/makeObservedQueryHook.ts @@ -1,75 +1,84 @@ -import {type DependencyList, useEffect, useRef} from 'react'; +import {type DependencyList, useEffect, useRef, useState} from 'react'; -import useForceUpdate from '../ui/hooks/useForceUpdate'; import useChanged from '../ui/hooks/useChanged'; import type ObservedQueryCacheCollection from './ObservedQueryCacheCollection'; -import type Publication from './publication/Publication'; +import type PublicationEndpoint from './publication/PublicationEndpoint'; import subscribe from './publication/subscribe'; import type GenericQueryHook from './GenericQueryHook'; -import findOneSync from './publication/findOneSync'; import type UserQuery from './query/UserQuery'; -import type SubscriptionHandle from './publication/SubscriptionHandle'; import {type ObserveOptions} from './makeObservedQueryPublication'; +import stopSubscription from './publication/stopSubscription'; +import {subscription} from './publication/Subscription'; const makeObservedQueryHook = ( Collection: ObservedQueryCacheCollection, - publication: Publication<[string, UserQuery, ObserveOptions | null]>, + publication: PublicationEndpoint< + [string, UserQuery, ObserveOptions | null] + >, + observe: ObserveOptions | null = null, ): GenericQueryHook => - (query: UserQuery, deps: DependencyList) => { - const loading = useRef(true); - const results = useRef([]); - const dirty = useRef(false); - const handleRef = useRef(null); - const forceUpdate = useForceUpdate(); + (query: UserQuery | null, deps: DependencyList) => { + const [loading, setLoading] = useState(query !== null); + const [results, setResults] = useState([]); + const [dirty, setDirty] = useState(false); + const handleRef = useRef(null); const effectWillTrigger = useChanged(deps); - if (effectWillTrigger) { - // This is to make sure we return the correct values on first - // render. - // TODO Find a better way to do this. It may cause problems in - // future concurrent mode. - dirty.current = false; - loading.current = true; - } - useEffect(() => { - dirty.current = false; - loading.current = true; + if (query === null) { + setLoading(false); + setResults([]); + setDirty(false); + return; + } + + const id = {}; + handleRef.current = id; + setDirty(false); + setLoading(true); const timestamp = Date.now(); const key = JSON.stringify({timestamp, query}); - const handle = subscribe(publication, key, query, null, { - onStop() { - if (handleRef.current === handle) { - dirty.current = true; - loading.current = false; - forceUpdate(); - } - }, - onReady() { - if (handleRef.current === handle) { - results.current = findOneSync(Collection, {key})?.results ?? []; - loading.current = false; - forceUpdate(); - } - }, - }); - handleRef.current = handle; + const handle = subscribe( + subscription(publication, [key, query, observe], { + onStop() { + if (handleRef.current === id) { + setDirty(true); + setLoading(false); + } + }, + async onReady() { + if (handleRef.current === id) { + const response = await Collection.findOneAsync({key}); + if (handleRef.current === id) { + setResults(response?.results ?? []); + setLoading(false); + } + } + }, + }), + ); return () => { - handle.stop(); + stopSubscription(handle); }; }, deps); - return { - loading: loading.current, - results: results.current, - dirty: dirty.current, - }; + return query === null + ? { + loading: false, + results: [], + dirty: false, + } + : { + loading: effectWillTrigger || loading, + results, + dirty: !effectWillTrigger && dirty, + }; }; export default makeObservedQueryHook; diff --git a/imports/api/makeObservedQueryPublication.ts b/imports/api/makeObservedQueryPublication.ts index 2c2ac97a6..ffc4fbc9a 100644 --- a/imports/api/makeObservedQueryPublication.ts +++ b/imports/api/makeObservedQueryPublication.ts @@ -2,14 +2,19 @@ import schema from '../lib/schema'; import type Collection from './Collection'; import type Document from './Document'; -import type ObserveChangesCallbacks from './ObserveChangesCallbacks'; +import type Filter from './query/Filter'; import queryToSelectorOptionsPair from './query/queryToSelectorOptionsPair'; import {userQuery} from './query/UserQuery'; import type UserQuery from './query/UserQuery'; +import watch from './query/watch'; +import {type Context} from './publication/Context'; +import {diffSequences} from './query/diffSequences'; +import type ObserveSequenceChangesCallbacks from './ObserveSequenceChangesCallbacks'; const observeOptions = schema .object({ - added: schema.boolean().optional(), + addedBefore: schema.boolean().optional(), + movedBefore: schema.boolean().optional(), removed: schema.boolean().optional(), changed: schema.boolean().optional(), }) @@ -28,58 +33,74 @@ const makeObservedQueryPublication = ( QueriedCollection: Collection, observedQueryCacheCollectionName: string, ) => - function (key: string, query: UserQuery, observe: ObserveOptions | null) { + async function ( + this: Context, + key: string, + query: UserQuery, + observe: ObserveOptions | null, + ) { let [selector, options] = queryToSelectorOptionsPair(query); selector = { ...selector, owner: this.userId, }; + const callbacks: ObserveOptions = { - added: true, + addedBefore: true, + movedBefore: true, removed: true, ...observe, }; + const uid = JSON.stringify({ key, selector, options, observe, }); - const results: T[] = []; - let initializing = true; const stop = () => { this.stop(); }; - const observers: ObserveChangesCallbacks = { - added(_id, fields) { - if (initializing) results.push({_id, ...fields} as unknown as T); - else if (callbacks.added) stop(); - }, - }; + // NOTE: We diff ids only if we do not care about change events. + const projection = callbacks.changed ? undefined : (_fields: T) => ({}); - if (callbacks.removed) observers.removed = stop; - if (callbacks.changed) observers.changed = stop; + const observer: ObserveSequenceChangesCallbacks = {}; - const handle = QueriedCollection.find(selector, options).observeChanges( - observers, + if (callbacks.addedBefore) observer.addedBefore = stop; + if (callbacks.movedBefore) observer.movedBefore = stop; + if (callbacks.removed) observer.removed = stop; + if (callbacks.changed) observer.changed = stop; + + const handle = await watch( + QueriedCollection, + selector as Filter, + options, ); - // Instead, we'll send one `added` message right after `observeChanges` has - // returned, and mark the subscription as ready. - initializing = false; - this.added(observedQueryCacheCollectionName, uid, { - key, - results, - }); - this.ready(); + handle.once('change').then( + (init) => { + handle.on('change', async (next) => { + return diffSequences(init, next, observer, projection); + }); + + this.added(observedQueryCacheCollectionName, uid, { + key, + results: init, + }); + this.ready(); + }, + (error: Error) => { + this.error(error); + }, + ); + + await handle.emit('start'); - // Stop observing the cursor when the client unsubscribes. Stopping a - // subscription automatically takes care of sending the client any `removed` - // messages. - this.onStop(() => { - handle.stop(); + // NOTE: Stop observing the cursor when the client unsubscribes. + this.onStop(async (error?: Error) => { + await handle.emit('stop', error); }); }; diff --git a/imports/api/makeQuery.ts b/imports/api/makeQuery.ts index 06d6c545d..ab1e4716f 100644 --- a/imports/api/makeQuery.ts +++ b/imports/api/makeQuery.ts @@ -1,8 +1,9 @@ +import assert from 'assert'; + import {type DependencyList} from 'react'; -import type Publication from './publication/Publication'; -import useSubscription from './publication/useSubscription'; -import useCursor from './publication/useCursor'; +import type PublicationEndpoint from './publication/PublicationEndpoint'; +import useQuery from './publication/useQuery'; import type Collection from './Collection'; import type Document from './Document'; @@ -12,14 +13,20 @@ import queryToSelectorOptionsPair from './query/queryToSelectorOptionsPair'; const makeQuery = ( collection: Collection, - publication: Publication<[UserQuery]>, + publication: PublicationEndpoint<[UserQuery]>, ) => - (query: UserQuery, deps: DependencyList) => { - const isLoading = useSubscription(publication, query); - const loading = isLoading(); - const [selector, options] = queryToSelectorOptionsPair(query); - const results = useCursor(() => collection.find(selector, options), deps); - return {loading, results}; + (query: UserQuery | null, deps: DependencyList) => { + return useQuery( + publication, + [query], + () => { + assert(query !== null); + const [selector, options] = queryToSelectorOptionsPair(query); + return collection.find(selector, options); + }, + deps, + query !== null, + ); }; export default makeQuery; diff --git a/imports/api/patients/virtualFields.ts b/imports/api/patients/virtualFields.ts index d76fd01e4..1a24d93c5 100644 --- a/imports/api/patients/virtualFields.ts +++ b/imports/api/patients/virtualFields.ts @@ -1,7 +1,9 @@ import {type PatientDocument} from '../collection/patients'; import eidParseBirthdate from '../eidParseBirthdate'; -const virtualFields = (patient: PatientDocument) => { +const virtualFields = ( + patient: Omit & {deathdate?: Date | null}, +) => { const birthdate = eidParseBirthdate(patient.birthdate ?? ''); const deathdateModifiedAt = patient.deathdateModifiedAt ?? undefined; const deathdateLegal = patient.deathdate ?? undefined; diff --git a/imports/api/publication/Publication.ts b/imports/api/publication/Publication.ts deleted file mode 100644 index 2d5068da4..000000000 --- a/imports/api/publication/Publication.ts +++ /dev/null @@ -1,7 +0,0 @@ -import type Args from '../Args'; - -type Publication<_ extends Args> = { - readonly name: string; -}; - -export default Publication; diff --git a/imports/api/publication/PublicationEndpoint.ts b/imports/api/publication/PublicationEndpoint.ts new file mode 100644 index 000000000..21536e0cf --- /dev/null +++ b/imports/api/publication/PublicationEndpoint.ts @@ -0,0 +1,7 @@ +import type Args from '../Args'; + +type PublicationEndpoint<_ extends Args> = { + readonly name: string; +}; + +export default PublicationEndpoint; diff --git a/imports/api/publication/Subscription.ts b/imports/api/publication/Subscription.ts index 7dd6ecc97..47b2ec277 100644 --- a/imports/api/publication/Subscription.ts +++ b/imports/api/publication/Subscription.ts @@ -1,5 +1,16 @@ -import {type Subscription as MeteorPublicationThisType} from 'meteor/meteor'; +import type Args from '../Args'; -type Subscription = MeteorPublicationThisType & {userId: string}; +import type PublicationEndpoint from './PublicationEndpoint'; +import type SubscriptionCallbacks from './SubscriptionCallbacks'; -export default Subscription; +export type Subscription = { + publication: PublicationEndpoint; + args: A; + callbacks?: SubscriptionCallbacks; +}; + +export const subscription = ( + publication: PublicationEndpoint, + args: A, + callbacks?: SubscriptionCallbacks, +): Subscription => ({publication, args, callbacks}); diff --git a/imports/api/publication/SubscriptionCallbacks.ts b/imports/api/publication/SubscriptionCallbacks.ts new file mode 100644 index 000000000..1a1a75b59 --- /dev/null +++ b/imports/api/publication/SubscriptionCallbacks.ts @@ -0,0 +1,14 @@ +import type SubscriptionError from './SubscriptionError'; +import {type SubscriptionId} from './subscriptionId'; + +type SubscriptionCallbacks = { + onSubscribe?: (id: SubscriptionId) => Promise | void; + onLoading?: (id: SubscriptionId) => Promise | void; + onReady?: (id: SubscriptionId) => Promise | void; + onStop?: ( + id: SubscriptionId, + error?: SubscriptionError, + ) => Promise | void; +}; + +export default SubscriptionCallbacks; diff --git a/imports/api/publication/SubscriptionError.ts b/imports/api/publication/SubscriptionError.ts new file mode 100644 index 000000000..b1f89ab09 --- /dev/null +++ b/imports/api/publication/SubscriptionError.ts @@ -0,0 +1,5 @@ +import {type Meteor} from 'meteor/meteor'; + +type SubscriptionError = Meteor.Error; + +export default SubscriptionError; diff --git a/imports/api/publication/SubscriptionHandle.ts b/imports/api/publication/SubscriptionHandle.ts deleted file mode 100644 index 489b3d3ff..000000000 --- a/imports/api/publication/SubscriptionHandle.ts +++ /dev/null @@ -1,5 +0,0 @@ -import {type Meteor} from 'meteor/meteor'; - -type SubscriptionHandle = Meteor.SubscriptionHandle; - -export default SubscriptionHandle; diff --git a/imports/api/publication/SubscriptionRegistryEntry.ts b/imports/api/publication/SubscriptionRegistryEntry.ts new file mode 100644 index 000000000..fdc20560e --- /dev/null +++ b/imports/api/publication/SubscriptionRegistryEntry.ts @@ -0,0 +1,14 @@ +import type SubscriptionError from './SubscriptionError'; +import {type SubscriptionId} from './subscriptionId'; + +type SubscriptionRegistryEntry = { + id: SubscriptionId; + key: string; + onReady?: (id: SubscriptionId) => Promise | void; + onStop?: ( + id: SubscriptionId, + error?: SubscriptionError, + ) => Promise | void; +}; + +export default SubscriptionRegistryEntry; diff --git a/imports/api/publication/consultations/stats.ts b/imports/api/publication/consultations/stats.ts index 3c521de0a..606d5f1ae 100644 --- a/imports/api/publication/consultations/stats.ts +++ b/imports/api/publication/consultations/stats.ts @@ -14,10 +14,10 @@ export default define({ name: stats, authentication: AuthenticationLoggedIn, schema: schema.tuple([userFilter(consultationDocument)]), - handle(filter: UserFilter) { + async handle(filter: UserFilter) { const collection = stats; - const handle = setupConsultationsStatsPublication.call( + const handle = await setupConsultationsStatsPublication.call( this, collection, filter, @@ -27,8 +27,8 @@ export default define({ // Stop observing the cursor when the client unsubscribes. Stopping a // subscription automatically takes care of sending the client any `removed` // messages. - this.onStop(() => { - handle.stop(); + this.onStop(async (error?: Error) => { + await handle.emit('stop', error); }); }, }); diff --git a/imports/api/publication/define.ts b/imports/api/publication/define.ts index 90633024f..c4ce6ed88 100644 --- a/imports/api/publication/define.ts +++ b/imports/api/publication/define.ts @@ -2,8 +2,8 @@ import assert from 'assert'; import {Meteor} from 'meteor/meteor'; -import {map} from '@iterable-iterator/map'; -import {sum} from '@iterable-iterator/reduce'; +import {filter} from '@iterable-iterator/filter'; +import {next} from '@iterable-iterator/next'; import authorized from '../authorized'; import {type Authentication} from '../Authentication'; @@ -11,12 +11,12 @@ import type InferArgs from '../InferArgs'; import type ArgsSchema from '../ArgsSchema'; import type Params from './Params'; -import type Publication from './Publication'; -import PublicationError from './PublicationError'; +import type PublicationEndpoint from './PublicationEndpoint'; +import type Cursor from './Cursor'; +import publishCursors from './publishCursors'; -// TODO early branch out -const exactlyOne = (array: any[]) => - sum(map((x: any) => (x ? 1 : 0), array)) === 1; +import PublicationError from './PublicationError'; +import {type Context} from './Context'; const define = < S extends ArgsSchema, @@ -29,43 +29,81 @@ const define = < authentication, schema, ...rest -}: Params): Publication => { +}: Params): PublicationEndpoint => { if (Meteor.isServer) { // @ts-expect-error Ignore this for now. - const fns = [rest.cursor, rest.cursors, rest.handle]; - assert(exactlyOne(fns)); - for (const fn of fns) { - if (fn) { - Meteor.publish(name, function (...args) { - if (!authorized(authentication, this)) { - this.ready(); - return; - } - - let parsedArgs: A; - - try { - schema.parse(args); - parsedArgs = args as A; // TODO Use parsed value once it does not reorder object keys. - } catch (error: unknown) { - console.debug({ - publication: name, - args: JSON.stringify(args), - error, - }); - throw new PublicationError( - 'schema validation of publication args failed', - ); - } - - return Reflect.apply(fn, this, parsedArgs); - }); - break; + const {cursor, cursors, handle} = rest; + const defined = filter( + (x: unknown) => x !== undefined, + [cursor, cursors, handle], + ); + const callback = next(defined); + assert(next(defined, null) === null); + + const postProcess = _getPostProcess({cursor, cursors, handle}, callback); + + Meteor.publish(name, async function (...args) { + if (!authorized(authentication, this)) { + this.ready(); + return; } - } + + const parsedArgs = _parse(name, schema, args); + + const result = await Reflect.apply(callback, this, parsedArgs); + return postProcess(this, result); + }); } return {name}; }; +const _getPostProcess = ( + {cursor, cursors, handle}: Record, + callback: T, +) => { + switch (callback) { + case cursor: { + return postProcessCursor; + } + + case cursors: { + return postProcessCursors; + } + + default: { + assert(callback === handle); + return postProcessHandle; + } + } +}; + +const _parse = = InferArgs>( + name: string, + schema: S, + args: unknown[], +): A => { + try { + schema.parse(args); + return args as A; // TODO Use parsed value once it does not reorder object keys. + } catch (error: unknown) { + console.debug({ + publication: name, + args: JSON.stringify(args), + error, + }); + throw new PublicationError('schema validation of publication args failed'); + } +}; + +const postProcessCursor = async ( + context: Context, + cursor: Cursor, +) => publishCursors(context, [cursor]); +const postProcessCursors = async ( + context: Context, + cursors: Array>, +) => publishCursors(context, cursors); +const postProcessHandle = (_context: Context, result: any) => result; + export default define; diff --git a/imports/api/publication/events/intersects.ts b/imports/api/publication/events/intersects.ts index 697af0ccc..e4dc5edfd 100644 --- a/imports/api/publication/events/intersects.ts +++ b/imports/api/publication/events/intersects.ts @@ -4,13 +4,14 @@ import schema from '../../../lib/schema'; import {AuthenticationLoggedIn} from '../../Authentication'; import {intersectsInterval, publishEvents} from '../../events'; import define from '../define'; +import PublicationError from '../PublicationError'; export default define({ name: 'events.intersects', authentication: AuthenticationLoggedIn, schema: schema.tuple([schema.date(), schema.date()]), - handle(begin, end) { - if (isAfter(begin, end)) throw new Error('begin is after end'); + async handle(begin, end) { + if (isAfter(begin, end)) throw new PublicationError('begin is after end'); const query = { ...intersectsInterval(begin, end), @@ -32,6 +33,6 @@ export default define({ }, }; - publishEvents.call(this, query, options); + return publishEvents.call(this, query, options); }, }); diff --git a/imports/api/publication/events/interval.ts b/imports/api/publication/events/interval.ts index cd6803323..dc3e9884a 100644 --- a/imports/api/publication/events/interval.ts +++ b/imports/api/publication/events/interval.ts @@ -7,7 +7,7 @@ export default define({ name: 'events.interval', authentication: AuthenticationLoggedIn, schema: schema.tuple([schema.date(), schema.date()]), - handle(begin, end) { + async handle(begin, end) { const query = { ...beginsInInterval(begin, end), owner: this.userId, @@ -28,6 +28,6 @@ export default define({ }, }; - publishEvents.call(this, query, options); + return publishEvents.call(this, query, options); }, }); diff --git a/imports/api/publication/patient/noShows.ts b/imports/api/publication/patient/noShows.ts index b6547d74f..3b1a69d15 100644 --- a/imports/api/publication/patient/noShows.ts +++ b/imports/api/publication/patient/noShows.ts @@ -7,16 +7,17 @@ import {noShows, type State} from '../../collection/noShows'; import define from '../define'; import {AuthenticationLoggedIn} from '../../Authentication'; import schema from '../../../lib/schema'; +import observeSetChanges from '../../query/observeSetChanges'; export default define({ name: 'patient.noShows', authentication: AuthenticationLoggedIn, schema: schema.tuple([schema.string()]), - handle(patientId) { + async handle(patientId) { const Collection = Appointments; const collection = noShows; const key = patientId; - const selector = { + const filter = { owner: this.userId, patientId, isDone: false, @@ -29,23 +30,29 @@ export default define({ const state = (): State => ({count}); let initializing = true; - const handle = Collection.find(selector, options).observeChanges({ - added: (_id, _fields) => { - count += 1; - if (!initializing) { + const handle = await observeSetChanges( + Collection, + filter, + options, + { + added: (_id, _fields) => { + count += 1; + if (!initializing) { + this.changed(collection, key, state()); + } + }, + removed: (_id) => { + count -= 1; this.changed(collection, key, state()); - } + }, }, - removed: (_id) => { - count -= 1; - this.changed(collection, key, state()); - }, - }); + (_fields) => ({}), + ); initializing = false; this.added(collection, key, state()); - this.onStop(() => { - handle.stop(); + this.onStop(async (error?: Error) => { + await handle.emit('stop', error); }); this.ready(); }, diff --git a/imports/api/publication/publishCursors.tests.ts b/imports/api/publication/publishCursors.tests.ts new file mode 100644 index 000000000..13aacddf0 --- /dev/null +++ b/imports/api/publication/publishCursors.tests.ts @@ -0,0 +1,247 @@ +import {chain} from '@iterable-iterator/chain'; +import {filter} from '@iterable-iterator/filter'; +import {map} from '@iterable-iterator/map'; +import {list} from '@iterable-iterator/list'; + +import {assert} from 'chai'; + +import {randomUserId, server} from '../../_test/fixtures'; +import {newPatient} from '../_dev/populate/patients'; +import {Patients} from '../collection/patients'; + +import type Document from '../Document'; +import {newConsultation} from '../_dev/populate/consultations'; +import {Consultations} from '../collection/consultations'; +import {Appointments} from '../collection/appointments'; +import {Attachments} from '../collection/attachments'; + +import publishCursors from './publishCursors'; +import {type Context} from './Context'; + +type ReadyCall = ['ready']; +type StopCall = ['stop']; +type OnStopCall = ['onStop', () => Promise | void]; +type UnblockCall = ['unblock']; +type ErrorCall = ['error', Error]; +type AddedCall = ['added', string, string, Document]; +type ChangedCall = ['changed', string, string, Document]; +type RemovedCall = ['removed', string, string]; + +type Call = + | ReadyCall + | StopCall + | OnStopCall + | UnblockCall + | ErrorCall + | AddedCall + | ChangedCall + | RemovedCall; + +type ContextState = { + calls: { + push: (call: Call) => void; + all: () => Call[]; + ready: () => ReadyCall[]; + stop: () => StopCall[]; + onStop: () => OnStopCall[]; + unblock: () => UnblockCall[]; + error: () => ErrorCall[]; + added: () => AddedCall[]; + changed: () => ChangedCall[]; + removed: () => RemovedCall[]; + }; +}; + +const makeMockedContextState = (): ContextState => { + const _calls: Call[] = []; + return { + calls: { + all: () => list(_calls), + ready: () => list(filter(([method]) => method === 'ready', _calls)), + stop: () => list(filter(([method]) => method === 'stop', _calls)), + onStop: () => list(filter(([method]) => method === 'onStop', _calls)), + unblock: () => list(filter(([method]) => method === 'unblock', _calls)), + error: () => list(filter(([method]) => method === 'error', _calls)), + added: () => list(filter(([method]) => method === 'added', _calls)), + changed: () => list(filter(([method]) => method === 'changed', _calls)), + removed: () => list(filter(([method]) => method === 'removed', _calls)), + push(call: Call) { + _calls.push(call); + }, + }, + }; +}; + +const makeMockedContext = ( + state: ContextState, + userId: string | null, +): Context => { + return { + connection: { + id: '', + clientAddress: '', + httpHeaders: {}, + _subscriptions: {}, + // eslint-disable-next-line @typescript-eslint/no-empty-function + close() {}, + // eslint-disable-next-line @typescript-eslint/no-empty-function + onClose() {}, + }, + userId, + error(error: Error) { + state.calls.push(['error', error]); + }, + ready() { + state.calls.push(['ready']); + }, + stop() { + state.calls.push(['stop']); + }, + onStop(callback: () => Promise | void) { + state.calls.push(['onStop', callback]); + }, + unblock() { + state.calls.push(['unblock']); + }, + added(collection: string, id: string, fields: Document) { + // TODO: Cover async callbacks. + state.calls.push(['added', collection, id, fields]); + }, + changed(collection: string, id: string, fields: Document) { + state.calls.push(['changed', collection, id, fields]); + }, + removed(collection: string, id: string) { + state.calls.push(['removed', collection, id]); + }, + }; +}; + +server(__filename, () => { + it('should handle one cursor', async () => { + const userId = randomUserId(); + await newPatient({userId}); + await newPatient({userId}); + await newPatient({userId}); + const filter = {}; + const cursor = Patients.find(filter); + const patients = await Patients.find(filter).fetchAsync(); + + const state = makeMockedContextState(); + const context = makeMockedContext(state, userId); + await publishCursors(context, [cursor]); + + const {calls} = state; + assert.lengthOf(calls.onStop(), 1); + assert.lengthOf(calls.ready(), 1); + + assert.deepEqual(calls.error(), []); + assert.deepEqual(calls.stop(), []); + + assert.deepEqual(calls.unblock(), []); + + assert.deepEqual(calls.removed(), []); + assert.deepEqual(calls.changed(), []); + assert.sameDeepMembers( + calls.added(), + list( + chain( + map(({_id, ...rest}) => ['added', 'patients', _id, rest], patients), + ), + ), + ); + }); + + it('should handle two cursors', async () => { + const userId = randomUserId(); + + const patientId = await newPatient({userId}); + await newPatient({userId}); + await newPatient({userId}); + const filter = {}; + + const patientsCursor = Patients.find(filter); + const patients = await Patients.find(filter).fetchAsync(); + + await newConsultation({userId}, {patientId}); + await newConsultation({userId}, {patientId}); + + const consultationsCursor = Consultations.find(filter); + const consultations = await Consultations.find(filter).fetchAsync(); + + const state = makeMockedContextState(); + const context = makeMockedContext(state, userId); + await publishCursors(context, [patientsCursor, consultationsCursor]); + + const {calls} = state; + assert.lengthOf(calls.onStop(), 2); + assert.lengthOf(calls.ready(), 1); + + assert.deepEqual(calls.error(), []); + assert.deepEqual(calls.stop(), []); + + assert.deepEqual(calls.unblock(), []); + + assert.deepEqual(calls.removed(), []); + assert.deepEqual(calls.changed(), []); + assert.sameDeepMembers( + calls.added(), + list( + chain( + map(({_id, ...rest}) => ['added', 'patients', _id, rest], patients), + map( + ({_id, ...rest}) => ['added', 'consultations', _id, rest], + consultations, + ), + ), + ), + ); + }); + + it('should error if cursors cannot be merged', async () => { + const filter = {}; + const a = Consultations.find(filter); + const b = Appointments.find(filter); + + const state = makeMockedContextState(); + const context = makeMockedContext(state, null); + await publishCursors(context, [a, b]); + + const {calls} = state; + assert.lengthOf(calls.error(), 1); + + assert.deepEqual(calls.onStop(), []); + assert.deepEqual(calls.ready(), []); + + assert.deepEqual(calls.stop(), []); + + assert.deepEqual(calls.unblock(), []); + + assert.deepEqual(calls.removed(), []); + assert.deepEqual(calls.changed(), []); + assert.deepEqual(calls.added(), []); + }); + + it('should not error if cursors can be merged', async () => { + const filter = {}; + const a = Patients.find(filter); + const b = Appointments.find(filter); + const c = Attachments.find(filter); + + const state = makeMockedContextState(); + const context = makeMockedContext(state, null); + await publishCursors(context, [a, b, c]); + + const {calls} = state; + assert.lengthOf(calls.onStop(), 3); + assert.lengthOf(calls.ready(), 1); + + assert.deepEqual(calls.error(), []); + assert.deepEqual(calls.stop(), []); + + assert.deepEqual(calls.unblock(), []); + + assert.deepEqual(calls.removed(), []); + assert.deepEqual(calls.changed(), []); + assert.deepEqual(calls.added(), []); + }); +}); diff --git a/imports/api/publication/publishCursors.ts b/imports/api/publication/publishCursors.ts new file mode 100644 index 000000000..a59ddaec8 --- /dev/null +++ b/imports/api/publication/publishCursors.ts @@ -0,0 +1,97 @@ +import observeSetChanges from '../query/observeSetChanges'; +import type Filter from '../query/Filter'; +import type Document from '../Document'; + +import duplicates from '../../lib/iterable-iterator/duplicates'; +import unique from '../../lib/iterable-iterator/unique'; + +import {getCollection} from '../collection/registry'; + +import type ObserveSetChangesCallbacks from '../ObserveSetChangesCallbacks'; + +import {type Context} from './Context'; +import type Cursor from './Cursor'; + +const _assertCursorsCanBeMerged = ( + cursors: Array>, +): void => { + const collections = cursors.map((cursor) => cursor._getCollectionName()); + const duplicated = Array.from(unique(duplicates(collections))); + if (duplicated.length > 0) { + throw new Error( + `Publish function returned multiple cursors for collections in ${JSON.stringify( + duplicated, + )}`, + ); + } +}; + +const publishCursors = async ( + subscription: Context, + cursors: Array>, +): Promise => { + try { + _assertCursorsCanBeMerged(cursors); + } catch (error: unknown) { + subscription.error(error as Error); + return; + } + + return _publishCursors(subscription, cursors); +}; + +const _publishCursors = async ( + subscription: Context, + cursors: Array>, +): Promise => { + try { + const pipes = await Promise.all( + cursors.map(async (cursor) => _pipe(subscription, cursor)), + ); + + for (const pipe of pipes) { + subscription.onStop(async (error?: Error) => pipe.emit('stop', error)); + } + + subscription.ready(); + } catch (error: unknown) { + subscription.error(error as Error); + } +}; + +const _pipe = async ( + subscription: Context, + cursor: Cursor, +) => { + const collection = cursor._getCollectionName(); + const QueriedCollection = getCollection(collection); + const { + _cursorDescription: {selector, options}, + } = cursor; + + const filter = selector as Filter; + + return observeSetChanges( + QueriedCollection, + filter, + options, + publishCursorObserver(subscription, collection), + ); +}; + +export const publishCursorObserver = ( + subscription: Context, + collection: string, +): ObserveSetChangesCallbacks => ({ + added(id, fields) { + subscription.added(collection, id, fields); + }, + changed(id, fields) { + subscription.changed(collection, id, fields); + }, + removed(id) { + subscription.removed(collection, id); + }, +}); + +export default publishCursors; diff --git a/imports/api/publication/stats/frequencyBySex.ts b/imports/api/publication/stats/frequencyBySex.ts index c33204972..9cac59f2f 100644 --- a/imports/api/publication/stats/frequencyBySex.ts +++ b/imports/api/publication/stats/frequencyBySex.ts @@ -1,3 +1,5 @@ +import assert from 'assert'; + import schema from '../../../lib/schema'; import {AuthenticationLoggedIn} from '../../Authentication'; import { @@ -7,10 +9,11 @@ import { } from '../../collection/consultations'; import {Patients} from '../../collection/patients'; import {countCollection, type PollResult} from '../../collection/stats'; -import type Selector from '../../query/Selector'; import define from '../define'; import {userFilter} from '../../query/UserFilter'; import type UserFilter from '../../query/UserFilter'; +import observeSetChanges from '../../query/observeSetChanges'; +import type Filter from '../../query/Filter'; export const frequencySexKey = (query) => `frequencySex-${JSON.stringify(query ?? {})}`; @@ -24,26 +27,33 @@ export type GenderCount = { undefined?: number; }; +type Consultation = string; +type Patient = { + consultations: Set; + sex: keyof GenderCount; + removed: boolean; +}; + export default define({ name: frequencySexPublication, authentication: AuthenticationLoggedIn, schema: schema.tuple([userFilter(consultationDocument).nullable()]), - handle(filter: UserFilter | null) { + async handle(filter: UserFilter | null) { const collection = countCollection; const key = frequencySexKey(filter); const selector = { ...filter, isDone: true, owner: this.userId, - } as Selector; + } as Filter; const options = { fields: { patientId: 1, }, }; let total = 0; - const refs = new Map(); - const pRefs = new Map(); + const cRefs = new Map(); + const pRefs = new Map(); const count: [GenderCount, ...GenderCount[]] = [{}]; const state = (): PollResult => ({ @@ -51,30 +61,29 @@ export default define({ count, }); - const inc = (patientId: string | undefined) => { - if (patientId === undefined || !pRefs.has(patientId)) - throw new Error(`inc: patientId ${patientId} does not exist`); - const patient = pRefs.get(patientId)!; - count[patient.freq]![patient.sex ?? 'undefined'] -= 1; - patient.freq += 1; - if (count[patient.freq] === undefined) count[patient.freq] = {}; - if (count[patient.freq]![patient.sex ?? 'undefined'] === undefined) - count[patient.freq]![patient.sex ?? 'undefined'] = 0; - // eslint-disable-next-line @typescript-eslint/restrict-plus-operands - count[patient.freq]![patient.sex ?? 'undefined'] += 1; + const _erase = (freq: number, sex: keyof GenderCount) => { + const {[sex]: current, ...rest} = count[freq]!; + assert(current !== undefined); + count[freq] = current === 1 ? rest : {[sex]: current - 1, ...rest}; }; - const dec = (patientId: string | undefined) => { - if (patientId === undefined || !pRefs.has(patientId)) - throw new Error(`dec: patientId ${patientId} does not exist`); - const patient = pRefs.get(patientId)!; - count[patient.freq]![patient.sex ?? 'undefined'] -= 1; - patient.freq -= 1; - if (count[patient.freq] === undefined) count[patient.freq] = {}; - if (count[patient.freq]![patient.sex ?? 'undefined'] === undefined) - count[patient.freq]![patient.sex ?? 'undefined'] = 0; + const _record = (freq: number, sex: string) => { + if (count[freq] === undefined) count[freq] = {}; + if (count[freq]![sex] === undefined) count[freq]![sex] = 0; // eslint-disable-next-line @typescript-eslint/restrict-plus-operands - count[patient.freq]![patient.sex ?? 'undefined'] += 1; + count[freq]![sex] += 1; + }; + + const inc = (patient: Patient) => { + total += 1; + _erase(patient.consultations.size, patient.sex); + _record(patient.consultations.size + 1, patient.sex); + }; + + const dec = (patient: Patient) => { + total -= 1; + _erase(patient.consultations.size, patient.sex); + _record(patient.consultations.size - 1, patient.sex); }; let initializing = true; @@ -84,66 +93,114 @@ export default define({ } }; - const pHandle = Patients.find( + const _ensurePatient = (patientId: string) => { + const existing = pRefs.get(patientId); + if (existing !== undefined) return existing; + + const removed: Patient = { + consultations: new Set(), + sex: 'undefined', + removed: true, + }; + pRefs.set(patientId, removed); + return removed; + }; + + const addConsultation = (patientId: string, consultationId: string) => { + const patient = _ensurePatient(patientId); + if (!patient.removed) inc(patient); + patient.consultations.add(consultationId); + cRefs.set(consultationId, patientId); + }; + + const removeConsultation = (patientId: string, consultationId: string) => { + const patient = pRefs.get(patientId); + assert(patient !== undefined); + if (!patient.removed) dec(patient); + patient.consultations.delete(consultationId); + cRefs.delete(consultationId); + }; + + const pHandle = await observeSetChanges( + Patients, {owner: this.userId}, {fields: {sex: 1}}, - ).observeChanges({ - added(_id, {sex}) { - const sexKey = `${sex}`; - pRefs.set(_id, {freq: 0, sex: sexKey}); - if (count[0][sexKey] === undefined) count[0][sexKey] = 0; - // eslint-disable-next-line @typescript-eslint/restrict-plus-operands - count[0][sexKey] += 1; - commit(); - }, - changed(_id, {sex}) { - const {freq, sex: prev} = pRefs.get(_id)!; - const prevKey = `${prev}`; - count[freq]![prevKey] -= 1; - const sexKey = `${sex}`; - if (count[freq]![sexKey] === undefined) count[freq]![sexKey] = 0; - // eslint-disable-next-line @typescript-eslint/restrict-plus-operands - count[freq]![sexKey] += 1; - pRefs.set(_id, {freq, sex: sexKey}); - commit(); - }, - removed(_id) { - pRefs.delete(_id); - // everything should be commited by the consultations observer - }, - }); + { + added(_id, {sex}) { + const sexKey: keyof GenderCount = `${sex}`; + const previous = pRefs.get(_id); + assert(previous === undefined || previous.removed); + const consultations = previous?.consultations ?? new Set(); + pRefs.set(_id, {consultations, sex: sexKey, removed: false}); + + _record(consultations.size, sexKey); + + total += consultations.size; + commit(); + }, + changed(_id, {sex}) { + const {consultations, sex: prevKey, removed} = pRefs.get(_id)!; + assert(!removed); + const freq = consultations.size; + _erase(freq, prevKey); + const sexKey: keyof GenderCount = `${sex}`; + _record(freq, sexKey); + pRefs.set(_id, {consultations, sex: sexKey, removed: false}); + commit(); + }, + removed(_id) { + const patient = pRefs.get(_id); + assert(patient !== undefined); + assert(!patient.removed); + const sexKey = patient.sex; + if (patient.consultations.size === 0) { + _erase(0, sexKey); + pRefs.delete(_id); + } else { + pRefs.set(_id, {...patient, removed: true}); + total -= patient.consultations.size; + _erase(patient.consultations.size, patient.sex); + } - const cHandle = Consultations.find(selector, options).observeChanges({ - added(_id, {patientId}) { - if (patientId === undefined) - throw new Error( - `added: consultation ${_id} is not linked to a patient.`, - ); - total += 1; - inc(patientId); - refs.set(_id, patientId); - commit(); + commit(); + }, }, + ({sex}) => ({sex}), + ); - // changed: ... // TODO We assume a consultation does not change - // patientId. Handle that. + const cHandle = await observeSetChanges( + Consultations, + selector, + options, + { + added(_id, {patientId}) { + assert(patientId !== undefined); + addConsultation(patientId, _id); + commit(); + }, - removed(_id) { - total -= 1; - const patientId = refs.get(_id)!; - dec(patientId); - refs.delete(_id); - commit(); + // changed: ... // TODO We assume a consultation does not change + // patientId. Handle that. + + removed(_id) { + const patientId = cRefs.get(_id); + assert(patientId !== undefined); + removeConsultation(patientId, _id); + commit(); + }, }, - }); + ({patientId}) => ({patientId}), + ); initializing = false; this.added(collection, key, state()); this.ready(); - this.onStop(() => { - cHandle.stop(); - pHandle.stop(); + this.onStop(async (error?: Error) => { + await Promise.all([ + cHandle.emit('stop', error), + pHandle.emit('stop', error), + ]); }); }, }); diff --git a/imports/api/publication/stopSubscription.ts b/imports/api/publication/stopSubscription.ts new file mode 100644 index 000000000..37340e85d --- /dev/null +++ b/imports/api/publication/stopSubscription.ts @@ -0,0 +1,51 @@ +import assert from 'assert'; + +import {defer, type Deferred} from '../../lib/async/defer'; + +import type SubscriptionRegistryEntry from './SubscriptionRegistryEntry'; +import {get, set} from './subscriptionRegistry'; + +const _gcQueue = new Map(); + +const stopSubscription = ( + {id, key, onReady, onStop}: SubscriptionRegistryEntry, + delay = 0, +) => { + const entry = get(key); + if (entry === undefined) { + return; + } + + --entry.refCount; + assert(entry.refCount >= 0, `Negative refCount for ${key}.`); + if (onReady !== undefined) entry.onReady.delete(id); + if (onStop !== undefined) { + entry.onStop.delete(id); + const maybePromise = onStop(id); + if (maybePromise instanceof Promise) { + maybePromise.catch((error: unknown) => { + console.error({error}); + }); + } + } + + if (entry.refCount === 0) { + const sub = entry.internals; + sub.inactive = true; + const prev = _gcQueue.get(sub.id); + if (prev !== undefined) prev.cancel(); + + const next = defer(() => { + if (sub.inactive) { + set(key, undefined); + entry.handle.stop(); + } + + _gcQueue.delete(sub.id); + }, delay); + + _gcQueue.set(sub.id, next); + } +}; + +export default stopSubscription; diff --git a/imports/api/publication/subscribe.ts b/imports/api/publication/subscribe.ts index 0456bcf2f..0eba76453 100644 --- a/imports/api/publication/subscribe.ts +++ b/imports/api/publication/subscribe.ts @@ -1,19 +1,90 @@ import {Meteor} from 'meteor/meteor'; +import {map} from '@iterable-iterator/map'; + import type Args from '../Args'; -import type Publication from './Publication'; +import type SubscriptionRegistryEntry from './SubscriptionRegistryEntry'; +import subscriptionInternals from './subscriptionInternals'; +import {get, identify, set} from './subscriptionRegistry'; +import type SubscriptionError from './SubscriptionError'; +import {subscriptionId} from './subscriptionId'; +import {type Subscription} from './Subscription'; + +const _runCallbacks = ( + run: (callback: [K, (...args: A) => R]) => R, + callbacks: Iterable<[K, (...a: A) => R]>, +) => { + Promise.allSettled(map(run, callbacks)) + .then((outcomes) => { + for (const outcome of outcomes) { + if (outcome.status === 'rejected') { + console.error({error: outcome.reason}); + } + } + }) + .catch((error: unknown) => { + console.error({error}); + }); +}; -export type SubscriptionError = Meteor.Error; +const _callbacks = (id: K, init: V | undefined) => + new Map(init === undefined ? [] : [[id, init]]); -export type SubscriptionCallbacks = { - onReady?: () => void; - onStop?: (error: SubscriptionError) => void; +const _safeMaybePromise = (maybePromise: unknown) => { + Promise.resolve(maybePromise).catch((error: unknown) => { + console.error({error}); + }); }; const subscribe = ( - {name}: Publication, - ...args: [...A, SubscriptionCallbacks?] -) => Meteor.subscribe(name, ...args); + {publication: {name}, args, callbacks}: Subscription, + enabled = true, +): SubscriptionRegistryEntry => { + const id = subscriptionId(); + _safeMaybePromise(callbacks?.onSubscribe?.(id)); + const key = identify(name, args); + if (enabled) { + const entry = get(key); + if (entry === undefined) { + const onReady = _callbacks(id, callbacks?.onReady); + const onStop = _callbacks(id, callbacks?.onStop); + const handle = Meteor.subscribe(name, ...args, { + onReady() { + _runCallbacks(async ([id, callback]) => callback(id), onReady); + onReady.clear(); + }, + onStop(error: SubscriptionError) { + set(key, undefined); + _runCallbacks(async ([id, callback]) => callback(id, error), onStop); + onStop.clear(); + }, + }); + const internals = subscriptionInternals(handle); + const ready = internals.ready; + if (!ready) _safeMaybePromise(callbacks?.onLoading?.(id)); + set(key, {handle, internals, refCount: 1, onReady, onStop}); + } else { + ++entry.refCount; + entry.internals.inactive = false; + const ready = entry.internals.ready; + if (!ready) _safeMaybePromise(callbacks?.onLoading?.(id)); + if (callbacks?.onReady !== undefined) { + if (ready) { + _safeMaybePromise(callbacks.onReady(id)); + } else { + entry.onReady.set(id, callbacks.onReady); + } + } + + if (callbacks?.onStop !== undefined) + entry.onStop.set(id, callbacks.onStop); + } + } else { + _safeMaybePromise(callbacks?.onLoading?.(id)); + } + + return {id, key, onReady: callbacks?.onReady, onStop: callbacks?.onStop}; +}; export default subscribe; diff --git a/imports/api/publication/subscriptionId.ts b/imports/api/publication/subscriptionId.ts new file mode 100644 index 000000000..7301d477b --- /dev/null +++ b/imports/api/publication/subscriptionId.ts @@ -0,0 +1,5 @@ +export type SubscriptionId = { + __brand: 'SUBSCRIPTION_ID'; +}; + +export const subscriptionId = () => ({} as SubscriptionId); diff --git a/imports/api/publication/subscriptionInternals.ts b/imports/api/publication/subscriptionInternals.ts new file mode 100644 index 000000000..a6f4e986e --- /dev/null +++ b/imports/api/publication/subscriptionInternals.ts @@ -0,0 +1,38 @@ +import assert from 'assert'; + +import {Meteor} from 'meteor/meteor'; + +let prev = ''; + +export const debugMeteorSubscriptions = () => { + const subscriptions = Meteor.connection._subscriptions; + const subs = Object.values(subscriptions).filter(Boolean); + const next = JSON.stringify( + { + subCount: subs.length, + subs: subs.map(({name, params}) => ({name, params})), + }, + undefined, + 2, + ); + + if (next !== prev) { + console.debug(next); + prev = next; + } +}; + +const subscriptionInternals = ( + handle: Meteor.SubscriptionHandle, +): Meteor.InternalSubscriptionHandle => { + const id = handle.subscriptionId; + const subscriptions = Meteor.connection._subscriptions; + const internals = subscriptions[id]; + assert( + internals !== undefined, + `Cannot find internals for subscription ${id}.`, + ); + return internals; +}; + +export default subscriptionInternals; diff --git a/imports/api/publication/subscriptionRegistry.ts b/imports/api/publication/subscriptionRegistry.ts new file mode 100644 index 000000000..85ca0ffb3 --- /dev/null +++ b/imports/api/publication/subscriptionRegistry.ts @@ -0,0 +1,35 @@ +import {Meteor} from 'meteor/meteor'; +import {EJSON} from 'meteor/ejson'; + +import type Args from '../Args'; + +import type SubscriptionError from './SubscriptionError'; +import {type SubscriptionId} from './subscriptionId'; + +type MetaHandle = { + handle: Meteor.SubscriptionHandle; + internals: Meteor.InternalSubscriptionHandle; + refCount: number; + onReady: Map Promise | void>; + onStop: Map< + SubscriptionId, + (id: SubscriptionId, error: SubscriptionError) => Promise | void + >; +}; + +const _registry = new Map(); + +export const identify = (name: string, args: A) => + EJSON.stringify( + {userId: Meteor.userId(), name, args}, + {indent: '', canonical: false}, + ); + +export const get = (key: string) => _registry.get(key); +export const set = (key: string, value: MetaHandle | undefined) => { + if (value === undefined) { + _registry.delete(key); + } else { + _registry.set(key, value); + } +}; diff --git a/imports/api/publication/useCursor.ts b/imports/api/publication/useCursor.ts index ef9bf8585..b473b762c 100644 --- a/imports/api/publication/useCursor.ts +++ b/imports/api/publication/useCursor.ts @@ -1,14 +1,15 @@ import {type DependencyList} from 'react'; -import {useFind} from 'meteor/react-meteor-data'; + +import type Document from '../Document'; import type Cursor from './Cursor'; +import useFind from './useFind'; -const useCursor = ( - factory: () => Cursor | undefined | null, +const useCursor = ( + factory: () => Cursor | null, deps: DependencyList, -): U[] => { - // @ts-expect-error useFind types are wrong or incomplete. - return useFind(factory, deps) ?? []; +): {loading: boolean; results: U[]} => { + return useFind(factory, deps); }; export default useCursor; diff --git a/imports/api/publication/useFind.ts b/imports/api/publication/useFind.ts new file mode 100644 index 000000000..f4bd1f482 --- /dev/null +++ b/imports/api/publication/useFind.ts @@ -0,0 +1,125 @@ +import assert from 'assert'; + +import {type Mongo} from 'meteor/mongo'; +import { + useMemo, + useEffect, + type DependencyList, + useState, + type SetStateAction, + type Dispatch, +} from 'react'; + +import type Document from '../Document'; + +export const findClientEffect = ( + cursor: Mongo.Cursor | null, + setLoading: Dispatch>, + setResults: Dispatch>, +) => { + if (cursor === null) { + setLoading(false); + return; + } + + setLoading(true); + + let initializing = true; + let stopped = false; + const init: U[] = []; + let handle: Mongo.ObserveHandle; + + cursor + .observeAsync({ + addedAt(document, atIndex, _before) { + assert(!stopped, 'addedAt called after stop'); + if (initializing) { + assert( + atIndex === init.length, + `incorrect atIndex during init: ${atIndex} !== ${init.length}`, + ); + init.push(document); + } else { + setResults((results) => [ + ...results.slice(0, atIndex), + document, + ...results.slice(atIndex), + ]); + } + }, + + changedAt(newDocument, _oldDocument, atIndex) { + assert(!initializing, `changedAt called during init`); + assert(!stopped, 'changedAt called after stop'); + setResults((data) => [ + ...data.slice(0, atIndex), + newDocument, + ...data.slice(atIndex + 1), + ]); + }, + + removedAt(_oldDocument, atIndex) { + assert(!initializing, `removedAt called during init`); + assert(!stopped, 'removedAt called after stop'); + setResults((data) => [ + ...data.slice(0, atIndex), + ...data.slice(atIndex + 1), + ]); + }, + + movedTo(_document, fromIndex, toIndex, _before) { + assert(!initializing, `movedTo called during init`); + assert(!stopped, 'movedTo called after stop'); + setResults((data) => { + const doc = data[fromIndex]!; + const copy = [ + ...data.slice(0, fromIndex), + ...data.slice(fromIndex + 1), + ]; + copy.splice(toIndex, 0, doc); + return copy; + }); + }, + }) + .then((_handle) => { + initializing = false; + if (stopped) { + _handle.stop(); + return; + } + + setResults(init); + setLoading(false); + handle = _handle; + }) + .catch((error: unknown) => { + setLoading(false); + console.error({error}); + }); + + return () => { + stopped = true; + if (handle !== undefined) handle.stop(); + }; +}; + +const useFindClient = ( + factory: () => Mongo.Cursor | null, + deps: DependencyList = [], +) => { + const cursor = useMemo(factory, deps); + + const [loading, setLoading] = useState(cursor !== null); + const [results, setResults] = useState([]); + + useEffect( + () => findClientEffect(cursor, setLoading, setResults), + [cursor, setLoading, setResults], + ); + + return {loading, results}; +}; + +const useFind = useFindClient; + +export default useFind; diff --git a/imports/api/publication/useItem.ts b/imports/api/publication/useItem.ts index 6ce2df4bd..6794de0d2 100644 --- a/imports/api/publication/useItem.ts +++ b/imports/api/publication/useItem.ts @@ -1,3 +1,5 @@ +import assert from 'assert'; + import {type DependencyList} from 'react'; import type Collection from '../Collection'; @@ -5,21 +7,26 @@ import type Document from '../Document'; import type Selector from '../query/Selector'; import type Options from '../query/Options'; -import useReactive from './useReactive'; -import findOneSync from './findOneSync'; +import useCursor from './useCursor'; const useItem = ( collection: Collection | null, selector: Selector, options: Options | undefined, deps: DependencyList, -): U | undefined => - useReactive( +) => { + const {loading, results: items} = useCursor( () => collection === null - ? undefined - : findOneSync(collection, selector, options), + ? null + : collection.find(selector, {...options, limit: 1}), deps, ); + assert(items.length <= 1, `useItem got items.length === ${items.length}`); + const result = items[0]; + const found = Boolean(result); + return {loading, found, result}; +}; + export default useItem; diff --git a/imports/api/publication/useQuery.ts b/imports/api/publication/useQuery.ts new file mode 100644 index 000000000..2b7d82a28 --- /dev/null +++ b/imports/api/publication/useQuery.ts @@ -0,0 +1,53 @@ +import {useMemo, useState, type DependencyList} from 'react'; + +import type Document from '../Document'; + +import type Args from '../Args'; + +import type Cursor from './Cursor'; + +import {subscription} from './Subscription'; +import {findClientEffect} from './useFind'; +import useSubscriptionEffect from './useSubscriptionEffect'; +import type PublicationEndpoint from './PublicationEndpoint'; + +const useQuery = ( + publication: PublicationEndpoint, + args: A, + factory: () => Cursor, + deps: DependencyList, + enabled = true, +) => { + const cursor = useMemo(factory, deps); + + const [loading, setLoading] = useState(enabled); + const [results, setResults] = useState([]); + + let cleanup: (() => void) | undefined; + + useSubscriptionEffect( + subscription(publication, args, { + onLoading() { + setLoading(true); + }, + + onReady() { + cleanup = findClientEffect(cursor, setLoading, setResults); + }, + + onStop() { + setLoading(!enabled); + cleanup?.(); + }, + }), + [enabled, cursor, setLoading, setResults], + enabled, + ); + + return { + loading, + results, + }; +}; + +export default useQuery; diff --git a/imports/api/publication/useSubscription.tests.ts b/imports/api/publication/useSubscription.tests.ts new file mode 100644 index 000000000..9edd9d6c2 --- /dev/null +++ b/imports/api/publication/useSubscription.tests.ts @@ -0,0 +1,29 @@ +import {assert} from 'chai'; + +import {renderHook, waitFor} from '../../_test/react'; + +import {client} from '../../_test/fixtures'; + +import publication from './consultations/find'; +import useSubscription from './useSubscription'; + +client(__filename, () => { + it('should load immediately on second call', async () => { + const query = {filter: {}}; + const {result: resultA} = renderHook(() => + useSubscription(publication, [query]), + ); + + assert(resultA.current()); + + await waitFor(() => { + assert(!resultA.current()); + }); + + const {result: resultB} = renderHook(() => + useSubscription(publication, [query]), + ); + + assert(!resultB.current()); + }); +}); diff --git a/imports/api/publication/useSubscription.ts b/imports/api/publication/useSubscription.ts index 2fee139a2..bd68b4974 100644 --- a/imports/api/publication/useSubscription.ts +++ b/imports/api/publication/useSubscription.ts @@ -1,37 +1,50 @@ import {Meteor} from 'meteor/meteor'; -import {Tracker} from 'meteor/tracker'; -import {useState, useEffect} from 'react'; +import {useState, useRef, useMemo} from 'react'; import useChanged from '../../ui/hooks/useChanged'; import type Args from '../Args'; -import subscribe from './subscribe'; -import type Publication from './Publication'; +import type PublicationEndpoint from './PublicationEndpoint'; +import {useSubscriptionEffectClient} from './useSubscriptionEffect'; +import {subscription} from './Subscription'; +import {type SubscriptionId} from './subscriptionId'; const useSubscriptionClient = ( - publication?: Publication | null, - ...args: A + publication: PublicationEndpoint, + args: A, + enabled = true, ): (() => boolean) => { const [loading, setLoading] = useState(true); + const handleRef = useRef(null); - const deps = [publication, JSON.stringify(args)]; + const deps = [handleRef, setLoading, publication, JSON.stringify(args)]; - useEffect(() => { - const computation = Tracker.nonreactive(() => - Tracker.autorun(() => { - const ready = !publication || subscribe(publication, ...args).ready(); - setLoading(!ready); - }), - ); - - // Stop the computation on when publication changes or unmount. - return () => { - computation.stop(); + const sub = useMemo(() => { + const setNotLoading = (id: SubscriptionId) => { + setLoading((prev) => (handleRef.current === id ? false : prev)); }; + + return subscription(publication, args, { + onSubscribe(id: SubscriptionId) { + handleRef.current = id; + }, + onLoading(id: SubscriptionId) { + // NOTE `setLoading(true)` is called: + // - on first execution, + // - on subsequent executions, if the subscription is not ready yet + // (e.g. double-render in strict mode in development, concurrent mode) + // - when restarting a stopped or errored subscription + setLoading((prev) => (handleRef.current === id ? true : prev)); + }, + onReady: setNotLoading, + onStop: setNotLoading, + }); }, deps); - const effectWillTrigger = useChanged(deps); + useSubscriptionEffectClient(sub, [sub, enabled], enabled); + + const effectWillTrigger = useChanged([sub, enabled]); const userFacingLoadingState = effectWillTrigger || loading; return () => userFacingLoadingState; @@ -40,9 +53,11 @@ const useSubscriptionClient = ( const useSubscriptionServer = ( // @ts-expect-error Those parameters are not used. - publication?: Publication | null, + publication: PublicationEndpoint | null, + // @ts-expect-error Those parameters are not used. + args: A, // @ts-expect-error Those parameters are not used. - ...args: A + enabled = true, ): (() => boolean) => () => false; diff --git a/imports/api/publication/useSubscriptionEffect.ts b/imports/api/publication/useSubscriptionEffect.ts new file mode 100644 index 000000000..a9095dd50 --- /dev/null +++ b/imports/api/publication/useSubscriptionEffect.ts @@ -0,0 +1,36 @@ +import {Meteor} from 'meteor/meteor'; +import {type DependencyList, useEffect} from 'react'; + +import type Args from '../Args'; + +import stopSubscription from './stopSubscription'; +import subscribe from './subscribe'; +import {type Subscription} from './Subscription'; + +export const useSubscriptionEffectClient = ( + subscription: Subscription, + deps: DependencyList, + enabled = true, +): void => { + useEffect(() => { + const handle = subscribe(subscription, enabled); + + return enabled + ? () => { + stopSubscription(handle); + } + : undefined; + }, deps); +}; + +const useSubscriptionEffectServer = ( + // @ts-expect-error Those parameters are not used. + subscription: Subscription | null, + // eslint-disable-next-line @typescript-eslint/no-empty-function +): void => {}; + +const useSubscriptionEffect = Meteor.isServer + ? useSubscriptionEffectServer + : useSubscriptionEffectClient; + +export default useSubscriptionEffect; diff --git a/imports/api/query/diffSequences.tests.ts b/imports/api/query/diffSequences.tests.ts new file mode 100644 index 000000000..ade83006f --- /dev/null +++ b/imports/api/query/diffSequences.tests.ts @@ -0,0 +1,187 @@ +import {assert} from 'chai'; + +import {filter} from '@iterable-iterator/filter'; +import {list} from '@iterable-iterator/list'; + +import {isomorphic} from '../../_test/fixtures'; + +import type ObserveSequenceChangesCallbacks from '../ObserveSequenceChangesCallbacks'; + +import {diffSequences} from './diffSequences'; + +type AddedBeforeCall = [ + 'addedBefore', + K, + K | null, + Partial>, +]; +type MovedBeforeCall = ['movedBefore', K, K | null]; +type ChangedCall = ['changed', K, Partial>]; +type RemovedCall = ['removed', K]; + +export type Call = + | AddedBeforeCall + | MovedBeforeCall + | ChangedCall + | RemovedCall; + +type MockObserver = { + calls: { + all: () => Array>; + addedBefore: () => Array>; + movedBefore: () => Array>; + changed: () => Array>; + removed: () => Array>; + }; +}; + +export type MockedDocument = {_id: string; satellite: string}; + +export const mockedDocument = ( + _id: string, + satellite: string = _id, +): MockedDocument => ({_id, satellite}); + +export const makeMockedObserver = (): MockObserver & + ObserveSequenceChangesCallbacks => { + const _calls: Array> = []; + return { + calls: { + all: () => list(_calls), + addedBefore: () => + list(filter(([method]) => method === 'addedBefore', _calls)), + movedBefore: () => + list(filter(([method]) => method === 'movedBefore', _calls)), + changed: () => list(filter(([method]) => method === 'changed', _calls)), + removed: () => list(filter(([method]) => method === 'removed', _calls)), + }, + addedBefore( + id: string, + fields: Partial>, + before: string | null, + ) { + _calls.push(['addedBefore', id, before, fields]); + }, + changed(id: string, fields: Partial>) { + _calls.push(['changed', id, fields]); + }, + movedBefore(id: string, before: string | null) { + _calls.push(['movedBefore', id, before]); + }, + removed(id: string) { + _calls.push(['removed', id]); + }, + }; +}; + +isomorphic(__filename, () => { + it('works with no changes', async () => { + const mockedObserver = makeMockedObserver(); + await diffSequences( + [mockedDocument('a'), mockedDocument('b')], + [mockedDocument('a'), mockedDocument('b')], + mockedObserver, + ); + + assert.deepEqual(mockedObserver.calls.all(), []); + }); + + it('works with only added items', async () => { + const mockedObserver = makeMockedObserver(); + await diffSequences( + [], + [mockedDocument('a'), mockedDocument('b')], + mockedObserver, + ); + + assert.deepEqual(mockedObserver.calls.all(), [ + ['addedBefore', 'a', null, {satellite: 'a'}], + ['addedBefore', 'b', null, {satellite: 'b'}], + ]); + }); + + it('works with only removed items', async () => { + const mockedObserver = makeMockedObserver(); + await diffSequences( + [mockedDocument('a'), mockedDocument('b')], + [], + mockedObserver, + ); + + assert.deepEqual(mockedObserver.calls.all(), [ + ['removed', 'a'], + ['removed', 'b'], + ]); + }); + + it('works with only changed items', async () => { + const mockedObserver = makeMockedObserver(); + await diffSequences( + [mockedDocument('a'), mockedDocument('b')], + [mockedDocument('a', 'A'), mockedDocument('b', 'B')], + mockedObserver, + ); + + assert.deepEqual(mockedObserver.calls.all(), [ + ['changed', 'a', {satellite: 'A'}], + ['changed', 'b', {satellite: 'B'}], + ]); + }); + + it('works with only moved items', async () => { + const mockedObserver = makeMockedObserver(); + await diffSequences( + [mockedDocument('a'), mockedDocument('b')], + [mockedDocument('b'), mockedDocument('a')], + mockedObserver, + ); + + assert.deepEqual(mockedObserver.calls.all(), [['movedBefore', 'b', 'a']]); + }); + + it('works with projection function', async () => { + const mockedObserver = makeMockedObserver(); + await diffSequences( + [mockedDocument('a'), mockedDocument('b')], + [mockedDocument('a', 'A'), mockedDocument('b', 'B')], + mockedObserver, + () => ({}), + ); + + assert.deepEqual(mockedObserver.calls.all(), []); + }); + + it('works in complex case (1)', async () => { + const mockedObserver = makeMockedObserver(); + await diffSequences( + [mockedDocument('a'), mockedDocument('b'), mockedDocument('c')], + [mockedDocument('c'), mockedDocument('b', 'x'), mockedDocument('d')], + mockedObserver, + ({satellite}) => ({satellite: satellite.toUpperCase()}), + ); + + assert.deepEqual(mockedObserver.calls.all(), [ + ['removed', 'a'], + ['movedBefore', 'c', 'b'], + ['changed', 'b', {satellite: 'X'}], + ['addedBefore', 'd', null, {satellite: 'D'}], + ]); + }); + + it('works in complex case (2)', async () => { + const mockedObserver = makeMockedObserver(); + await diffSequences( + [mockedDocument('a'), mockedDocument('b'), mockedDocument('c')], + [mockedDocument('c'), mockedDocument('d'), mockedDocument('b', 'x')], + mockedObserver, + ({satellite}) => ({satellite: satellite.toUpperCase()}), + ); + + assert.deepEqual(mockedObserver.calls.all(), [ + ['removed', 'a'], + ['movedBefore', 'c', 'b'], + ['addedBefore', 'd', 'b', {satellite: 'D'}], + ['changed', 'b', {satellite: 'X'}], + ]); + }); +}); diff --git a/imports/api/query/diffSequences.ts b/imports/api/query/diffSequences.ts new file mode 100644 index 000000000..65c4c856d --- /dev/null +++ b/imports/api/query/diffSequences.ts @@ -0,0 +1,146 @@ +import assert from 'assert'; + +import {AsyncQueue} from '../../lib/async/queue'; + +import type Document from '../Document'; + +import type ObserveSequenceChangesCallbacks from '../ObserveSequenceChangesCallbacks'; +import {documentDiff, isDiffEmpty} from '../update'; +import {lis} from '../../lib/lcs/lis'; + +const identity = (x: T) => x; +export type Project = (document: T) => Partial; + +export const diffSequences = async ( + prev: T[], + next: T[], + observer: ObserveSequenceChangesCallbacks, + projection: Project = identity, +): Promise => { + const queue = new AsyncQueue(); + _diffSequences(queue, prev, next, observer, projection); + return queue.drain(); +}; + +const _diffSequences = ( + queue: AsyncQueue, + prev: T[], + next: T[], + { + addedBefore, + removed, + movedBefore, + changed, + }: ObserveSequenceChangesCallbacks, + projection: Project, +) => { + // NOTE: First, we handle all removed items. + if (removed !== undefined) { + const nextIds = new Set(); + for (const {_id} of next) { + assert(!nextIds.has(_id), `Duplicate _id ${_id} in next`); + nextIds.add(_id); + } + + for (const {_id} of prev) { + if (!nextIds.has(_id)) { + // NOTE: Item is in prev, but not in next. + queue.enqueue(async () => removed(_id)); + } + } + } + + // NOTE: Then, we handle changed, moved, and added items. + // Suppose we have identified a sequence of "unmoved" items: items that + // have the same relative positions in prev and next. + // All we need to do then is to move the other items, interleave the + // added items with respect to those, and notify of any item changes. + // For this purpose, we will map next items to prev items, so we need an + // index: + const prevIndex = new Map(); + let i = -1; + for (const {_id} of prev) { + assert(!prevIndex.has(_id), `Duplicate _id ${_id} in prev`); + prevIndex.set(_id, ++i); + } + + // NOTE: A common subsequence of the item ids in prev and + // next corresponds to a set of elements that can be considered unmoved, + // since their relative positions have not changed. + // Since each item appears only once in each of prev and next, this can be + // simplified to mapping their indices. We can take next as the reference + // identity permutation, and thus this reduces to computing + // an increasing subsequence of the indices in prev of the items in + // next. + // We compute the longest such sequence to minimize resource consumption. + const intersection = next + .map(({_id}: T, index: number) => ({_id, index})) + .filter(({_id}) => prevIndex.has(_id)); + + const unmoved = lis( + intersection.length, + // NOTE: Sequence of prev indices of next items. + Int32Array.from(intersection, ({_id}) => prevIndex.get(_id)!), + ).map((i: number) => intersection[i]!.index); + + // NOTE: The last group is anchored at the end. + const groups = [...unmoved, next.length]; + + const processChanges = + changed === undefined + ? undefined + : (prevItem: T, nextItem: T) => { + const changes = documentDiff( + projection(prevItem), + projection(nextItem), + ); + if (!isDiffEmpty(changes)) { + queue.enqueue(async () => + changed( + nextItem._id, + Object.fromEntries( + Object.entries(changes).map(([key, value]) => [ + key, + value === null ? undefined : value, + ]), + ) as Partial, + ), + ); + } + }; + + // NOTE: Iterate though each group anchored by an unmoved item at its end. + let start = 0; + for (const end of groups) { + const last = next[end]; + // NOTE: This handles the last group. + const groupId = last !== undefined ? last._id : null; + for (let i = start; i < end; ++i) { + const nextItem = next[i]!; + const oldIndex = prevIndex.get(nextItem._id); + if (oldIndex === undefined) { + // NOTE: Added items. + if (addedBefore !== undefined) { + const {_id, ...fields} = projection(nextItem); + queue.enqueue(async () => addedBefore(nextItem._id, fields, groupId)); + } + } else { + // NOTE: Moved items. + if (processChanges !== undefined) { + processChanges(prev[oldIndex]!, nextItem); + } + + if (movedBefore !== undefined) { + queue.enqueue(async () => movedBefore(nextItem._id, groupId)); + } + } + } + + if (last !== undefined && processChanges !== undefined) { + // NOTE: Last (unmoved) item. + processChanges(prev[prevIndex.get(last._id)!]!, last); + } + + start = end + 1; + } +}; diff --git a/imports/api/query/diffSets.tests.ts b/imports/api/query/diffSets.tests.ts new file mode 100644 index 000000000..042b61ca1 --- /dev/null +++ b/imports/api/query/diffSets.tests.ts @@ -0,0 +1,172 @@ +import {assert} from 'chai'; + +import {filter} from '@iterable-iterator/filter'; +import {list} from '@iterable-iterator/list'; + +import {isomorphic} from '../../_test/fixtures'; + +import type ObserveSetChangesCallbacks from '../ObserveSetChangesCallbacks'; + +import {diffSets} from './diffSets'; + +type AddedCall = ['added', K, Partial>]; +type ChangedCall = ['changed', K, Partial>]; +type RemovedCall = ['removed', K]; + +export type Call = AddedCall | ChangedCall | RemovedCall; + +type MockObserver = { + calls: { + all: () => Array>; + added: () => Array>; + changed: () => Array>; + removed: () => Array>; + }; +}; + +export type MockedDocument = {_id: string; satellite: string}; + +export const mockedDocument = ( + _id: string, + satellite: string = _id, +): [string, MockedDocument] => [_id, {_id, satellite}]; + +export const makeMockedObserver = (): MockObserver & + ObserveSetChangesCallbacks => { + const _calls: Array> = []; + return { + calls: { + all: () => list(_calls), + added: () => list(filter(([method]) => method === 'addedBefore', _calls)), + changed: () => list(filter(([method]) => method === 'changed', _calls)), + removed: () => list(filter(([method]) => method === 'removed', _calls)), + }, + added(id: string, fields: Partial>) { + _calls.push(['added', id, fields]); + }, + changed(id: string, fields: Partial>) { + _calls.push(['changed', id, fields]); + }, + removed(id: string) { + _calls.push(['removed', id]); + }, + }; +}; + +isomorphic(__filename, () => { + it('works with no changes', async () => { + const mockedObserver = makeMockedObserver(); + await diffSets( + new Map([mockedDocument('a'), mockedDocument('b')]), + new Map([mockedDocument('a'), mockedDocument('b')]), + mockedObserver, + ); + + assert.deepEqual(mockedObserver.calls.all(), []); + }); + + it('works with only added items', async () => { + const mockedObserver = makeMockedObserver(); + await diffSets( + new Map(), + new Map([mockedDocument('a'), mockedDocument('b')]), + mockedObserver, + ); + + assert.deepEqual(mockedObserver.calls.all(), [ + ['added', 'a', {satellite: 'a'}], + ['added', 'b', {satellite: 'b'}], + ]); + }); + + it('works with only removed items', async () => { + const mockedObserver = makeMockedObserver(); + await diffSets( + new Map([mockedDocument('a'), mockedDocument('b')]), + new Map([]), + mockedObserver, + ); + + assert.deepEqual(mockedObserver.calls.all(), [ + ['removed', 'a'], + ['removed', 'b'], + ]); + }); + + it('works with only changed items', async () => { + const mockedObserver = makeMockedObserver(); + await diffSets( + new Map([mockedDocument('a'), mockedDocument('b')]), + new Map([mockedDocument('a', 'A'), mockedDocument('b', 'B')]), + mockedObserver, + ); + + assert.deepEqual(mockedObserver.calls.all(), [ + ['changed', 'a', {satellite: 'A'}], + ['changed', 'b', {satellite: 'B'}], + ]); + }); + + it('works with only moved items', async () => { + const mockedObserver = makeMockedObserver(); + await diffSets( + new Map([mockedDocument('a'), mockedDocument('b')]), + new Map([mockedDocument('b'), mockedDocument('a')]), + mockedObserver, + ); + + assert.deepEqual(mockedObserver.calls.all(), []); + }); + + it('works with projection function', async () => { + const mockedObserver = makeMockedObserver(); + await diffSets( + new Map([mockedDocument('a'), mockedDocument('b')]), + new Map([mockedDocument('a', 'A'), mockedDocument('b', 'B')]), + mockedObserver, + () => ({}), + ); + + assert.deepEqual(mockedObserver.calls.all(), []); + }); + + it('works in complex case (1)', async () => { + const mockedObserver = makeMockedObserver(); + await diffSets( + new Map([mockedDocument('a'), mockedDocument('b'), mockedDocument('c')]), + new Map([ + mockedDocument('c'), + mockedDocument('b', 'x'), + mockedDocument('d'), + ]), + mockedObserver, + ({satellite}) => ({satellite: satellite.toUpperCase()}), + ); + + assert.deepEqual(mockedObserver.calls.all(), [ + ['changed', 'b', {satellite: 'X'}], + ['added', 'd', {satellite: 'D'}], + ['removed', 'a'], + ]); + }); + + it('works in complex case (2)', async () => { + const mockedObserver = makeMockedObserver(); + await diffSets( + new Map([mockedDocument('a'), mockedDocument('b'), mockedDocument('c')]), + new Map([ + mockedDocument('c'), + mockedDocument('d'), + mockedDocument('b', 'x'), + ]), + mockedObserver, + ({satellite}) => ({satellite: satellite.toUpperCase()}), + ); + + assert.deepEqual(mockedObserver.calls.all(), [ + ['added', 'd', {satellite: 'D'}], + ['changed', 'b', {satellite: 'X'}], + ['removed', 'a'], + ]); + }); +}); diff --git a/imports/api/query/diffSets.ts b/imports/api/query/diffSets.ts new file mode 100644 index 000000000..0d11aab4a --- /dev/null +++ b/imports/api/query/diffSets.ts @@ -0,0 +1,67 @@ +import {AsyncQueue} from '../../lib/async/queue'; + +import type Document from '../Document'; + +import type ObserveSetChangesCallbacks from '../ObserveSetChangesCallbacks'; +import {documentDiff, isDiffEmpty} from '../update'; + +const identity = (x: T) => x; +export type Project = (document: T) => Partial; + +export const diffSets = async ( + prev: Map, + next: Map, + observer: ObserveSetChangesCallbacks, + projection: Project = identity, +): Promise => { + const queue = new AsyncQueue(); + _diffSets(queue, prev, next, observer, projection); + return queue.drain(); +}; + +const _diffSets = ( + queue: AsyncQueue, + prev: Map, + next: Map, + {added, changed, removed}: ObserveSetChangesCallbacks, + projection: Project, +) => { + if (changed !== undefined || added !== undefined) { + for (const [id, nextItem] of next) { + const prevItem = prev.get(id); + if (prevItem !== undefined) { + if (changed !== undefined) { + const changes = documentDiff( + projection(prevItem), + projection(nextItem), + ); + if (!isDiffEmpty(changes)) { + // TODO: DRY. + queue.enqueue(async () => + changed( + id, + Object.fromEntries( + Object.entries(changes).map(([key, value]) => [ + key, + value === null ? undefined : value, + ]), + ) as Partial, + ), + ); + } + } + } else if (added !== undefined) { + const {_id, ...fields} = projection(nextItem); + queue.enqueue(async () => added(nextItem._id, fields)); + } + } + } + + if (removed) { + for (const id of prev.keys()) { + if (!next.has(id)) { + queue.enqueue(async () => removed(id)); + } + } + } +}; diff --git a/imports/api/query/observeSequenceChanges.tests.ts b/imports/api/query/observeSequenceChanges.tests.ts new file mode 100644 index 000000000..859e3639d --- /dev/null +++ b/imports/api/query/observeSequenceChanges.tests.ts @@ -0,0 +1,109 @@ +import {assert} from 'chai'; + +import {server, waitFor, withMockCollection} from '../../_test/fixtures'; + +import { + type Call, + makeMockedObserver, + type MockedDocument, +} from './diffSequences.tests'; +import observeSequenceChanges from './observeSequenceChanges'; + +server(__filename, () => { + it( + 'works', + withMockCollection>(async (documents) => { + const observer = makeMockedObserver(); + + const expected: Array> = []; + + const assertExpected = async () => { + await waitFor(() => observer.calls.all().length === expected.length); + + assert.deepEqual(observer.calls.all(), expected); + }; + + const doc1 = await documents.insertAsync({satellite: 'x'}); + await assertExpected(); + + const handle = await observeSequenceChanges(documents, {}, {}, observer); + try { + expected.push(['addedBefore', doc1, null, {satellite: 'x'}]); + await assertExpected(); + + const doc2 = await documents.insertAsync({satellite: 'z'}); + expected.push(['addedBefore', doc2, null, {satellite: 'z'}]); + await assertExpected(); + + const doc3 = await documents.insertAsync({satellite: 'y'}); + expected.push(['addedBefore', doc3, null, {satellite: 'y'}]); + await assertExpected(); + + await documents.removeAsync(doc2); + expected.push(['removed', doc2]); + await assertExpected(); + + await documents.updateAsync(doc1, {satellite: 'X'}); + expected.push(['changed', doc1, {satellite: 'X'}]); + await assertExpected(); + } finally { + await handle.emit('stop', undefined); + } + }), + ); + + it( + 'works with sort', + withMockCollection>(async (documents) => { + const observer = makeMockedObserver(); + + const expected: Array> = []; + + const assertExpected = async () => { + await waitFor(() => observer.calls.all().length === expected.length); + + assert.deepEqual(observer.calls.all(), expected); + }; + + const doc1 = await documents.insertAsync({satellite: 'a'}); + await assertExpected(); + + const handle = await observeSequenceChanges( + documents, + {}, + { + sort: { + satellite: 1, + }, + }, + observer, + ); + + try { + expected.push(['addedBefore', doc1, null, {satellite: 'a'}]); + await assertExpected(); + + const doc2 = await documents.insertAsync({satellite: 'c'}); + expected.push(['addedBefore', doc2, null, {satellite: 'c'}]); + await assertExpected(); + + const doc3 = await documents.insertAsync({satellite: 'b'}); + expected.push(['addedBefore', doc3, doc2, {satellite: 'b'}]); + await assertExpected(); + + await documents.removeAsync(doc2); + expected.push(['removed', doc2]); + await assertExpected(); + + await documents.updateAsync(doc1, {satellite: 'd'}); + expected.push( + ['movedBefore', doc3, doc1], + ['changed', doc1, {satellite: 'd'}], + ); + await assertExpected(); + } finally { + await handle.emit('stop', undefined); + } + }), + ); +}); diff --git a/imports/api/query/observeSequenceChanges.ts b/imports/api/query/observeSequenceChanges.ts new file mode 100644 index 000000000..4584fc49b --- /dev/null +++ b/imports/api/query/observeSequenceChanges.ts @@ -0,0 +1,46 @@ +import {AsyncLock} from '../../lib/async/lock'; +import type Collection from '../Collection'; +import type Document from '../Document'; + +import type ObserveSequenceChangesCallbacks from '../ObserveSequenceChangesCallbacks'; +import {type Options} from '../transaction/TransactionDriver'; + +import {diffSequences, type Project} from './diffSequences'; + +import type Filter from './Filter'; +import watch from './watch'; + +const _makeOnChange = ( + observer: ObserveSequenceChangesCallbacks, + projection?: Project, +) => { + let previous: T[] = []; + + // TODO: Use an async queue to be able to cancel previous tasks. + const lock = new AsyncLock(); + + return async (next: T[]) => { + const handle = await lock.acquire(); + try { + await diffSequences(previous, next, observer, projection); + previous = next; + } finally { + lock.release(handle); + } + }; +}; + +const observeSequenceChanges = async ( + collection: Collection, + filter: Filter, + options: Options, + observer: ObserveSequenceChangesCallbacks, + projection?: Project, +) => { + const handle = await watch(collection, filter, options); + handle.on('change', _makeOnChange(observer, projection)); + await handle.emit('start'); + return handle; +}; + +export default observeSequenceChanges; diff --git a/imports/api/query/observeSetChanges.tests.ts b/imports/api/query/observeSetChanges.tests.ts new file mode 100644 index 000000000..670c7df58 --- /dev/null +++ b/imports/api/query/observeSetChanges.tests.ts @@ -0,0 +1,55 @@ +import {assert} from 'chai'; + +import {server, waitFor, withMockCollection} from '../../_test/fixtures'; + +import { + type Call, + makeMockedObserver, + type MockedDocument, +} from './diffSets.tests'; +import observeSetChanges from './observeSetChanges'; + +server(__filename, () => { + it( + 'works', + withMockCollection>(async (documents) => { + const observer = makeMockedObserver(); + + const expected: Array> = []; + + const assertExpected = async () => { + await waitFor(() => observer.calls.all().length === expected.length); + + assert.deepEqual(observer.calls.all(), expected); + }; + + const doc1 = await documents.insertAsync({satellite: 'x'}); + await assertExpected(); + + const handle = await observeSetChanges(documents, {}, {}, observer); + + try { + expected.push(['added', doc1, {satellite: 'x'}]); + await assertExpected(); + + const doc2 = await documents.insertAsync({satellite: 'z'}); + expected.push(['added', doc2, {satellite: 'z'}]); + await assertExpected(); + + const doc3 = await documents.insertAsync({satellite: 'y'}); + expected.push(['added', doc3, {satellite: 'y'}]); + await assertExpected(); + + await documents.removeAsync(doc2); + expected.push(['removed', doc2]); + await assertExpected(); + + await documents.updateAsync(doc1, {satellite: 'X'}); + expected.push(['changed', doc1, {satellite: 'X'}]); + await assertExpected(); + } finally { + await handle.emit('stop', undefined); + } + }), + ); +}); diff --git a/imports/api/query/observeSetChanges.ts b/imports/api/query/observeSetChanges.ts new file mode 100644 index 000000000..5f88ea7e7 --- /dev/null +++ b/imports/api/query/observeSetChanges.ts @@ -0,0 +1,51 @@ +import {AsyncLock} from '../../lib/async/lock'; +import type Collection from '../Collection'; +import type Document from '../Document'; + +import type ObserveSetChangesCallbacks from '../ObserveSetChangesCallbacks'; +import {type Options} from '../transaction/TransactionDriver'; + +import {type Project, diffSets} from './diffSets'; + +import type Filter from './Filter'; +import watch from './watch'; + +const _toSet = (items: T[]): Map => + new Map(items.map((item) => [item._id, item])); + +const _makeOnChange = ( + observer: ObserveSetChangesCallbacks, + projection?: Project, +) => { + let previous = _toSet([]); + + // TODO: Use an async queue to be able to cancel previous tasks. + const lock = new AsyncLock(); + + return async (items: T[]) => { + const next = _toSet(items); + const handle = await lock.acquire(); + + try { + await diffSets(previous, next, observer, projection); + previous = next; + } finally { + lock.release(handle); + } + }; +}; + +const observeSetChanges = async ( + collection: Collection, + filter: Filter, + options: Options, + observer: ObserveSetChangesCallbacks, + projection?: Project, +) => { + const handle = await watch(collection, filter, options); + handle.on('change', _makeOnChange(observer, projection)); + await handle.emit('start'); + return handle; +}; + +export default observeSetChanges; diff --git a/imports/api/query/watch.ts b/imports/api/query/watch.ts new file mode 100644 index 000000000..7ff9c6ebe --- /dev/null +++ b/imports/api/query/watch.ts @@ -0,0 +1,402 @@ +import assert from 'assert'; + +import {type Filter as MongoFilter} from 'mongodb'; + +import { + type ClientSessionOptions, + type ChangeStream, + type ChangeStreamOptions, + type Timestamp, +} from 'mongodb'; + +import debounce from 'debounce'; + +import {isObject} from '@functional-abstraction/type'; + +import type Collection from '../Collection'; +import type Document from '../Document'; + +import {type Options} from '../transaction/TransactionDriver'; +import withSession from '../transaction/withSession'; + +import {EventEmitter, eventEmitter} from '../../lib/events'; + +import {AsyncQueue} from '../../lib/async/queue'; + +import type Filter from './Filter'; + +const _watchInit = async ( + collection: Collection, + filter: Filter, + options: Options, + sessionOptions?: ClientSessionOptions, +) => + withSession(async (session) => { + // TODO Reuse session for subsequent polling, see: + // https://www.mongodb.com/docs/manual/core/read-isolation-consistency-recency/#std-label-causal-consistency-examples + // https://vkontech.com/causal-consistency-guarantees-in-mongodb-lamport-clock-cluster-time-operation-time-and-causally-consistent-sessions/ + // I guess one should use advanceClusterTime using the clusterTime + // returned by the last change stream event + const init = await collection + .rawCollection() + .find(filter as MongoFilter, {...options, session}) + .toArray(); + const {operationTime} = session; + assert(operationTime !== undefined, 'operationTime is undefined'); + return {init, operationTime}; + }, sessionOptions); + +const _filterToFullDocumentFilter = ( + operationKey: string, + filter: Filter, +) => + Object.fromEntries( + Object.entries(filter).map(([key, value]) => [ + key.startsWith('$') ? key : `${operationKey}.${key}`, + isObject(value) + ? _filterToFullDocumentFilter(operationKey, value as Filter) + : value, + ]), + ); + +type Match = { + $match: {}; +}; + +type Pipeline = { + pipeline: Match[]; + isSuperset: boolean; +}; + +const _fullDocumentMissingFilter = {fullDocument: undefined}; +const _fullDocumentBeforeChangeMissingFilter = { + fullDocumentBeforeChange: undefined, +}; + +const _filterToMatch = (filter: Filter): Match => ({ + $match: { + $or: [ + // TODO Correctly configure collections to define fullDocument* + // Currently not possible because this requires top-level await + // Should be doable in Meteor 3.0 + // SEE + // https://www.mongodb.com/docs/manual/reference/command/collMod/#std-label-collMod-change-stream-pre-and-post-images + _filterToFullDocumentFilter('fullDocument', filter), + _filterToFullDocumentFilter('fullDocumentBeforeChange', filter), + _fullDocumentMissingFilter, + _fullDocumentBeforeChangeMissingFilter, + ], + }, +}); + +const _filterToPipeline = ({$text, ...rest}: Filter): Pipeline => { + return { + pipeline: [_filterToMatch(rest as Filter)], + // TODO Any occurrence of $text should yield this, not just top-level. + isSuperset: $text !== undefined, + }; +}; + +const _noFullDocumentMatch = (): Match => ({ + $match: { + // NOTE This matches everything if pre- or post- images are not + // configured, which is very inefficient. + $or: [_fullDocumentMissingFilter, _fullDocumentBeforeChangeMissingFilter], + }, +}); + +// @ts-expect-error TODO +const _noFullDocumentPipeline = (): Pipeline => { + return { + pipeline: [_noFullDocumentMatch()], + isSuperset: true, + }; +}; + +const _optionsToPipeline = (options: Options) => + options.project === undefined ? [] : [{$project: options.project}]; + +let _watchStreamCount = 0; +let _maxWatchStreamCount = 0; + +export const getWatchStreamCount = () => _watchStreamCount; + +const _watchStream = ( + collection: Collection, + filterPipeline: Match[], + options: Options, + startAtOperationTime: Timestamp, + changeStreamOptions?: ChangeStreamOptions, +) => { + const pipeline = [ + ...filterPipeline, + {$match: {clusterTime: {$gt: startAtOperationTime}}}, + ..._optionsToPipeline(options), + // SEE + // https://www.mongodb.com/docs/manual/reference/operator/aggregation/changeStreamSplitLargeEvent/ + {$changeStreamSplitLargeEvent: {}}, + ]; + + const rawCollection = collection.rawCollection(); + + const stream = rawCollection.watch(pipeline, { + startAtOperationTime, + fullDocument: 'whenAvailable', + fullDocumentBeforeChange: 'whenAvailable', + ...changeStreamOptions, + }); + + let open = true; + ++_watchStreamCount; + if (_watchStreamCount > _maxWatchStreamCount) { + _maxWatchStreamCount = _watchStreamCount; + console.debug({_watchStreamCount}); + } + + stream.on('close', () => { + if (open) { + open = false; + --_watchStreamCount; + } + }); + + return _groupFragments(stream); +}; + +const _groupFragments = (stream: ChangeStream) => { + const emitter = eventEmitter<{entry: ChangeStreamEvent; close: undefined}>(); + + let event: Fragment = { + _id: { + _data: '', + }, + splitEvent: { + fragment: 1, + of: 1, + }, + }; + + stream.on('change', (fragment: ChangeStreamEvent | Fragment) => { + if (fragment.splitEvent === undefined) { + assert(fragment._id._data !== event._id._data); + assert(event.splitEvent.fragment === event.splitEvent.of); + event = {...fragment, splitEvent: {fragment: 1, of: 1}}; + } else if (fragment.splitEvent.fragment === 1) { + assert(fragment._id._data !== event._id._data); + assert(event.splitEvent.fragment === event.splitEvent.of); + assert(fragment.splitEvent.fragment === 1); + event = fragment; + } else { + assert(fragment._id._data === event._id._data); + assert(fragment.splitEvent.fragment === event.splitEvent.fragment + 1); + assert(fragment.splitEvent.of === event.splitEvent.of); + assert(fragment.splitEvent.fragment <= fragment.splitEvent.of); + event = {...event, ...fragment}; + } + + if (event.splitEvent.fragment !== event.splitEvent.of) return; + + const {splitEvent, ...rest} = event; + + emitter.emitSerial('entry', rest).catch((error: unknown) => { + console.error({error}); + }); + }); + + emitter + .once('close') + .then(async () => stream.close()) + .catch((error: unknown) => { + console.error({error}); + }); + + return emitter; +}; + +const _watchSetup = async ( + collection: Collection, + filter: Filter, + options: Options, + changeStreamOptions?: ChangeStreamOptions, + sessionOptions?: ClientSessionOptions, +) => { + const {init, operationTime} = await _watchInit( + collection, + filter, + options, + sessionOptions, + ); + + const {pipeline: filterPipeline, isSuperset: filterIsSuperset} = + _filterToPipeline(filter); + + const filteredStream = _watchStream( + collection, + filterPipeline, + options, + operationTime, + changeStreamOptions, + ); + + // TODO: Watch multiple streams concurrently, and recycle them. + // const unfilteredStream = _watchStream( + // collection, + // _noFullDocumentPipeline(), + // options, + // operationTime, + // changeStreamOptions, + // ); + + return {init, stream: filteredStream, filterIsSuperset}; +}; + +type Fragment = { + _id: { + _data: string; + }; + splitEvent: { + fragment: number; + of: number; + }; +}; + +type ChangeStreamEvent = { + _id: { + _data: string; + }; + splitEvent?: undefined; +}; + +export type FilteredOplogHandle = EventEmitter<{ + entry: ChangeStreamEvent; + close: undefined; +}>; + +export type WatchHandle = EventEmitter<{ + change: T[]; + start: undefined; + stop?: Error; +}>; + +class Watch extends EventEmitter { + collection: Collection; + filter: Filter; + options: Options; + changeStreamOptions?: ChangeStreamOptions; + sessionOptions?: ClientSessionOptions; + + constructor(collection, filter, options, sessionOptions) { + super(); + this.collection = collection; + this.filter = filter; + this.options = options; + this.sessionOptions = sessionOptions; + } + + get init() { + return []; + } + + stop() { + // TODO: ??? + } +} + +const PIPE_DEBOUNCE = 50; + +const _pipe = async ( + handle: WatchHandle, + emitter: FilteredOplogHandle, + w: Watch, +) => { + const queue = new AsyncQueue(); + + const onEntry = debounce(() => { + if (queue.length > 0) return; + queue.enqueue(async () => { + const {init} = await _watchInit( + w.collection, + w.filter, + w.options, + w.sessionOptions, + ); + + await handle.emitSerial('change', init); + }); + }, PIPE_DEBOUNCE); + + emitter.on('entry', onEntry); + + // TODO: stream.on('stop', ???) +}; + +const _watch = async ( + handle: WatchHandle, + collection: Collection, + filter: Filter, + options: Options, + changeStreamOptions?: ChangeStreamOptions, + sessionOptions?: ClientSessionOptions, +) => { + const {init, stream} = await _watchSetup( + collection, + filter, + options, + changeStreamOptions, + sessionOptions, + ); + + await handle.emitSerial('change', init); + + const w = new Watch(collection, filter, options, sessionOptions); + await _pipe(handle, stream, w); + + const stop = async () => stream.emitSerial('close'); + + return stop; +}; + +const watch = async ( + collection: Collection, + filter: Filter, + options: Options, + changeStreamOptions?: ChangeStreamOptions, + sessionOptions?: ClientSessionOptions, +) => { + const handle: WatchHandle = eventEmitter(); + + let stopped = false; + + handle + .once('stop') + .then(() => { + stopped = true; + }) + .catch((error: unknown) => { + console.error({error}); + }); + + handle.on('start', async () => { + if (stopped) return; + const stop = await _watch( + handle, + collection, + filter, + options, + changeStreamOptions, + sessionOptions, + ); + if (stopped) await stop(); + else + handle + .once('stop') + .then(stop) + .catch((error: unknown) => { + console.error({error}); + }); + }); + + return handle; +}; + +export default watch; diff --git a/imports/api/stats.ts b/imports/api/stats.ts index 106021b94..75d84b5ae 100644 --- a/imports/api/stats.ts +++ b/imports/api/stats.ts @@ -1,15 +1,16 @@ -import {type Subscription} from 'meteor/meteor'; - import schema from '../lib/schema'; import type Optional from '../lib/types/Optional'; +import {type Context} from './publication/Context'; + import {countCollection, type PollResult} from './collection/stats'; import define from './publication/define'; import type Collection from './Collection'; import type Document from './Document'; import {AuthenticationLoggedIn} from './Authentication'; -import type Selector from './query/Selector'; import type UserFilter from './query/UserFilter'; +import observeSetChanges from './query/observeSetChanges'; +import type Filter from './query/Filter'; export const countPublicationName = ( QueriedCollection: Collection, @@ -53,10 +54,10 @@ const countPublication = ( QueriedCollection: Collection, {fields, discretize, values}: CountOptions, ) => - function (this: Subscription, filter: UserFilter | null) { + async function (this: Context, filter: UserFilter | null) { const collection = countCollection; const key = countPublicationKey(QueriedCollection, {values}, filter); - const selector = {...filter, owner: this.userId} as Selector; + const selector = {...filter, owner: this.userId} as Filter; const options = { fields: Object.fromEntries(fields.map((field) => [field, 1])), }; @@ -96,33 +97,38 @@ const countPublication = ( }; let initializing = true; - const handle = QueriedCollection.find(selector, options).observeChanges({ - added: (_id, object) => { - total += 1; - inc(object); - refs.set(_id, {...object}); - if (!initializing) { + const handle = await observeSetChanges( + QueriedCollection, + selector, + options, + { + added: (_id, object) => { + total += 1; + inc(object); + refs.set(_id, {...object}); + if (!initializing) { + this.changed(collection, key, state()); + } + }, + + changed: (_id, changes) => { + const previousObject = refs.get(_id); + dec(previousObject); + const newObject = {...previousObject, ...changes}; + inc(newObject); + refs.set(_id, newObject); this.changed(collection, key, state()); - } - }, + }, - changed: (_id, changes) => { - const previousObject = refs.get(_id); - dec(previousObject); - const newObject = {...previousObject, ...changes}; - inc(newObject); - refs.set(_id, newObject); - this.changed(collection, key, state()); - }, - - removed: (_id) => { - total -= 1; - const previousObject = refs.get(_id); - dec(previousObject); - refs.delete(_id); - this.changed(collection, key, state()); + removed: (_id) => { + total -= 1; + const previousObject = refs.get(_id); + dec(previousObject); + refs.delete(_id); + this.changed(collection, key, state()); + }, }, - }); + ); // Instead, we'll send one `added` message right after `observeChanges` has // returned, and mark the subscription as ready. @@ -133,8 +139,8 @@ const countPublication = ( // Stop observing the cursor when the client unsubscribes. Stopping a // subscription automatically takes care of sending the client any `removed` // messages. - this.onStop(() => { - handle.stop(); + this.onStop(async (error?: Error) => { + await handle.emit('stop', error); }); }; diff --git a/imports/api/tags/makeItem.ts b/imports/api/tags/makeItem.ts index 6033e45c9..6cc3cfefd 100644 --- a/imports/api/tags/makeItem.ts +++ b/imports/api/tags/makeItem.ts @@ -5,7 +5,7 @@ import type Selector from '../query/Selector'; import useSubscription from '../publication/useSubscription'; import useItem from '../publication/useItem'; -import type Publication from '../publication/Publication'; +import type PublicationEndpoint from '../publication/PublicationEndpoint'; import type TagDocument from './TagDocument'; @@ -17,22 +17,22 @@ type ReturnType = { const makeItem = ( collection: Collection, - singlePublication: Publication<[string]>, + singlePublication: PublicationEndpoint<[string]>, ) => (name: string, deps: DependencyList): ReturnType => { - const isLoading = useSubscription(singlePublication, name); - const loading = isLoading(); + const isLoading = useSubscription(singlePublication, [name]); + const loadingSubscription = isLoading(); - const item = useItem( - loading ? null : collection, + const {loading: loadingResult, result} = useItem( + loadingSubscription ? null : collection, {name} as Selector, undefined, - [loading, ...deps], + [loadingSubscription, ...deps], ); return { - loading, - item, + loading: loadingSubscription || loadingResult, + item: result, }; }; diff --git a/imports/api/update.ts b/imports/api/update.ts index 4252978f2..2769ec966 100644 --- a/imports/api/update.ts +++ b/imports/api/update.ts @@ -1,8 +1,16 @@ +import {EJSON} from 'meteor/ejson'; + import {asyncIterableToArray} from '@async-iterable-iterator/async-iterable-to-array'; import type schema from '../lib/schema'; -import {type DocumentUpdate} from './DocumentUpdate'; +import type Document from './Document'; +import { + type DocumentUpdateEntry as UpdateEntry, + type DocumentUpdate, + type OptionalKeys, + type RequiredKeys, +} from './DocumentUpdate'; import type TransactionDriver from './transaction/TransactionDriver'; const id = (x: T): T => x; @@ -11,9 +19,9 @@ export type Entry = { [K in keyof T]: [K, T[K]]; }[keyof T]; -export type UpdateEntry = { - [K in keyof T]: [K, T[K] | null]; -}[keyof T]; +const _hasOwn = Object.prototype.hasOwnProperty; +const hasOwn = (object: T, property: string | number | symbol) => + _hasOwn.call(object, property); export const yieldKey = function* ( fields: T, @@ -23,7 +31,7 @@ export const yieldKey = function* ( x: Exclude, ) => Exclude = id>, ): IterableIterator<[K, Exclude]> { - if (Object.prototype.hasOwnProperty.call(fields, key)) { + if (hasOwn(fields, key)) { const value = fields[key] as Exclude; type.parse(value); yield [key, transform(value)]; @@ -36,7 +44,7 @@ export const yieldResettableKey = function* ( type: schema.ZodType, transform: (x: T[K]) => T[K] = id, ): IterableIterator<[K, T[K]]> { - if (Object.prototype.hasOwnProperty.call(fields, key)) { + if (hasOwn(fields, key)) { type.nullable().optional().parse(fields[key]); yield [key, transform(fields[key])]; } @@ -111,26 +119,21 @@ export const makeComputeUpdate = }; }; -type SanitizeUpdate = ( +type SanitizeUpdate = ( fields: DocumentUpdate, ) => IterableIterator>; -const fromEntries = ( - entries: Array<[K, V]>, -): undefined | Record => - entries.length === 0 - ? undefined - : (Object.fromEntries(entries) as Record); - export const makeSanitize = - (sanitizeUpdate: SanitizeUpdate) => + ( + sanitizeUpdate: SanitizeUpdate, + ) => (fields: DocumentUpdate) => { const update = Array.from(sanitizeUpdate(fields)); return { - $set: fromEntries( + $set: Object.fromEntries( update.filter(([, value]) => value !== undefined && value !== null), - ) as Partial, - $unset: fromEntries( + ), + $unset: Object.fromEntries( update .filter(([, value]) => value === undefined || value === null) .map(([key]) => [key, true]), @@ -138,19 +141,70 @@ export const makeSanitize = }; }; -const documentDiffGen = function* ( +const documentDiffGen = function* ( prevState: T, - newState: Required extends U ? U : never, -): IterableIterator> { - for (const [key, newValue] of Object.entries(newState)) { - if (JSON.stringify(newValue) !== JSON.stringify(prevState[key])) { - yield [key as keyof U, newValue as U[keyof U]]; + newState: U, +): IterableIterator> { + const newIndex = new Map(Object.entries(newState)); + const prevIndex = new Map(Object.entries(prevState)); + for (const [key, newValue] of newIndex) { + if ( + !EJSON.equals( + newValue, + // @ts-expect-error Typing of EJSON.equals is incorrect. + prevIndex.get(key), + ) + ) { + yield [key as RequiredKeys & OptionalKeys, newValue]; + } + } + + for (const key of prevIndex.keys()) { + if (!newIndex.has(key)) { + yield [key as OptionalKeys, null]; } } }; -export const documentDiff = ( +export const documentDiff = ( + prevState: T, + newState: U, +): DocumentUpdate => + Object.fromEntries(documentDiffGen(prevState, newState)) as DocumentUpdate; + +export const documentDiffApplyGen = function* ( prevState: T, - newState: Required extends U ? U : never, -): Partial => - Object.fromEntries(documentDiffGen(prevState, newState)) as Partial; + changes: DocumentUpdate, +): IterableIterator> { + const index = new Map(Object.entries(changes)); + for (const [key, value] of Object.entries(prevState)) { + if (index.has(key)) { + const newValue = index.get(key); + if (newValue !== undefined && newValue !== null) { + yield [key, newValue as T[keyof T]]; + } + + index.delete(key); + } else { + yield [key, value]; + } + } + + for (const [key, newValue] of index) { + if (newValue !== undefined && newValue !== null) { + yield [key, newValue as T[keyof T]]; + } + } +}; + +export const documentDiffApply = ( + prevState: T, + changes: DocumentUpdate, +): T => { + return Object.fromEntries(documentDiffApplyGen(prevState, changes)) as T; +}; + +export const isDiffEmpty = (obj: DocumentUpdate) => + Object.keys(obj).length === 0; + +export {type DocumentUpdateEntry as UpdateEntry} from './DocumentUpdate'; diff --git a/imports/api/useNoShowsForPatient.tests.ts b/imports/api/useNoShowsForPatient.tests.ts new file mode 100644 index 000000000..66283ffba --- /dev/null +++ b/imports/api/useNoShowsForPatient.tests.ts @@ -0,0 +1,219 @@ +import {range} from '@iterable-iterator/range'; + +import {assert} from 'chai'; + +import {startOfTomorrow, startOfYesterday} from 'date-fns'; + +import {renderHook, waitFor} from '../_test/react'; + +import {client, randomPassword, randomUserId} from '../_test/fixtures'; + +import createUserWithPassword from './user/createUserWithPassword'; +import loginWithPassword from './user/loginWithPassword'; +import call from './endpoint/call'; +import {newAppointmentFormData} from './_dev/populate/appointments'; + +import appointmentsSchedule from './endpoint/appointments/schedule'; +import appointmentsCancel from './endpoint/appointments/cancel'; +import randomId from './randomId'; +import useNoShowsForPatient from './useNoShowsForPatient'; +import {newPatientFormData} from './_dev/populate/patients'; +import patientsInsert from './endpoint/patients/insert'; +import beginConsultation from './endpoint/appointments/beginConsultation'; +import appointmentsReschedule from './endpoint/appointments/reschedule'; + +client(__filename, () => { + it('should render when logged out', async () => { + const patientId = randomId(); + const {result} = renderHook(() => useNoShowsForPatient(patientId)); + assert.deepEqual(result.current, { + loading: true, + found: false, + value: undefined, + }); + + await waitFor(() => { + assert(!result.current.loading); + }); + + assert.deepEqual(result.current, { + loading: false, + found: false, + value: undefined, + }); + }); + + it('should render when logged in', async () => { + const username = randomUserId(); + const password = randomPassword(); + await createUserWithPassword(username, password); + await loginWithPassword(username, password); + + const patientId = randomId(); + const {result} = renderHook(() => useNoShowsForPatient(patientId)); + assert.deepEqual(result.current, { + loading: true, + found: false, + value: undefined, + }); + + await waitFor(() => { + assert(!result.current.loading); + }); + + assert.deepEqual(result.current, { + loading: false, + found: true, + value: 0, + }); + }); + + it('should have aggregate on load', async () => { + const username = randomUserId(); + const password = randomPassword(); + await createUserWithPassword(username, password); + await loginWithPassword(username, password); + + const patientId = await call(patientsInsert, newPatientFormData()); + + const yesterday = startOfYesterday(); + const tomorrow = startOfTomorrow(); + + for (const _ of range(3)) { + // eslint-disable-next-line no-await-in-loop + await call( + appointmentsSchedule, + newAppointmentFormData({ + datetime: yesterday, + patient: {_id: patientId}, + }), + ); + } + + for (const _ of range(2)) { + // eslint-disable-next-line no-await-in-loop + await call( + appointmentsSchedule, + newAppointmentFormData({ + datetime: tomorrow, + patient: {_id: patientId}, + }), + ); + } + + const {result} = renderHook(() => useNoShowsForPatient(patientId)); + await waitFor(() => { + assert(!result.current.loading); + }); + + assert.deepEqual(result.current, { + loading: false, + found: true, + value: 3, + }); + }); + + it('should react to changes', async () => { + const username = randomUserId(); + const password = randomPassword(); + await createUserWithPassword(username, password); + await loginWithPassword(username, password); + + const patientId = await call(patientsInsert, newPatientFormData()); + + const otherPatientId = await call(patientsInsert, newPatientFormData()); + + const yesterday = startOfYesterday(); + const tomorrow = startOfTomorrow(); + const {result} = renderHook(() => useNoShowsForPatient(patientId)); + await waitFor(() => { + assert(!result.current.loading); + }); + + assert.deepEqual(result.current, { + loading: false, + found: true, + value: 0, + }); + + const _insertOneNoShow = async () => { + await call( + appointmentsSchedule, + newAppointmentFormData({ + datetime: tomorrow, + patient: {_id: patientId}, + }), + ); + + const {_id} = await call( + appointmentsSchedule, + newAppointmentFormData({ + datetime: yesterday, + patient: {_id: patientId}, + }), + ); + + await call( + appointmentsSchedule, + newAppointmentFormData({ + datetime: tomorrow, + patient: {_id: patientId}, + }), + ); + + return _id; + }; + + const a = await _insertOneNoShow(); + + await waitFor(() => { + assert.strictEqual(result.current.value, 1); + }); + + const b = await _insertOneNoShow(); + + await waitFor(() => { + assert.strictEqual(result.current.value, 2); + }); + + const c = await _insertOneNoShow(); + + await waitFor(() => { + assert.strictEqual(result.current.value, 3); + }); + + const d = await _insertOneNoShow(); + + await waitFor(() => { + assert.deepEqual(result.current, { + loading: false, + found: true, + value: 4, + }); + }); + + await call(appointmentsReschedule, d, {patient: {_id: otherPatientId}}); + + await waitFor(() => { + assert.strictEqual(result.current.value, 3); + }); + + await call(appointmentsCancel, b, '', ''); + + await waitFor(() => { + assert.strictEqual(result.current.value, 2); + }); + + await call(beginConsultation, a); + + await waitFor(() => { + assert.strictEqual(result.current.value, 1); + }); + + await call(appointmentsReschedule, c, {datetime: tomorrow, duration: 1}); + + await waitFor(() => { + assert.strictEqual(result.current.value, 0); + }); + }); +}); diff --git a/imports/api/useNoShowsForPatient.ts b/imports/api/useNoShowsForPatient.ts index 8b65e3967..4f1a13e22 100644 --- a/imports/api/useNoShowsForPatient.ts +++ b/imports/api/useNoShowsForPatient.ts @@ -4,7 +4,7 @@ import useSubscription from './publication/useSubscription'; import useReactive from './publication/useReactive'; const useNoShowsForPatient = (patientId: string) => { - const isLoading = useSubscription(noShows, patientId); + const isLoading = useSubscription(noShows, [patientId]); const loading = isLoading(); const upToDate = useReactive( diff --git a/imports/api/user/createUserWithPassword.ts b/imports/api/user/createUserWithPassword.ts index 6671272ad..50b50c284 100644 --- a/imports/api/user/createUserWithPassword.ts +++ b/imports/api/user/createUserWithPassword.ts @@ -1,11 +1,8 @@ import {Accounts} from 'meteor/accounts-base'; -const createUserWithPassword = async (username: string, password: string) => - new Promise((resolve, reject) => { - Accounts.createUser({username, password}, (err) => { - if (err) reject(err); - else resolve(); - }); - }); +const createUserWithPassword = async ( + username: string, + password: string, +): Promise => Accounts.createUserAsync({username, password}); export default createUserWithPassword; diff --git a/imports/lib/array/removeUndefined.ts b/imports/lib/array/removeUndefined.ts new file mode 100644 index 000000000..45e0a78de --- /dev/null +++ b/imports/lib/array/removeUndefined.ts @@ -0,0 +1,6 @@ +const removeUndefined = (array: T) => + array.filter((value: T) => value !== undefined) as { + [K in keyof T]: T[K] extends {} ? T[K] : never; + }; + +export default removeUndefined; diff --git a/imports/lib/async/defer.tests.ts b/imports/lib/async/defer.tests.ts new file mode 100644 index 000000000..03642b6d7 --- /dev/null +++ b/imports/lib/async/defer.tests.ts @@ -0,0 +1,233 @@ +import {assert} from 'chai'; + +import {isNode, isomorphic} from '../../_test/fixtures'; + +import {cancelAll, defer, flushAll} from './defer'; +import sleep from './sleep'; + +isomorphic(__filename, () => { + it('should queue to macrotask queue', async () => { + const x: number[] = []; + + defer(() => x.push(1)); + + assert.deepEqual(x, []); + + await Promise.resolve().then(() => { + assert.deepEqual(x, []); + }); + + await sleep(0); + + assert.deepEqual(x, [1]); + }); + + it('should allow cancellation', async () => { + const x: number[] = []; + + const deferred = defer(() => x.push(1)); + + assert.deepEqual(x, []); + + await Promise.resolve().then(() => { + assert.deepEqual(x, []); + }); + + deferred.cancel(); + + await sleep(0); + + assert.deepEqual(x, []); + }); + + it('should allow flushing before microtask queue', async () => { + const x: number[] = []; + + const deferred = defer(() => x.push(1)); + + assert.deepEqual(x, []); + + deferred.flush(); + + await Promise.resolve().then(() => { + assert.deepEqual(x, [1]); + }); + + await sleep(0); + + assert.deepEqual(x, [1]); + }); + + it('should flush after main loop', async () => { + const x: number[] = []; + + const deferred = defer(() => x.push(1)); + + deferred.flush(); + + assert.deepEqual(x, []); + + await Promise.resolve().then(() => { + assert.deepEqual(x, [1]); + }); + + await sleep(0); + + assert.deepEqual(x, [1]); + }); + + it('should catch errors', async () => { + const x: number[] = []; + + defer(() => { + x.push(1); + throw new Error('test'); + }); + + await sleep(0); + + assert.deepEqual(x, [1]); + }); + + it('should catch errors when flushing', async () => { + const x: number[] = []; + + const deferred = defer(() => { + x.push(1); + throw new Error('test'); + }); + + deferred.flush(); + + await sleep(0); + + assert.deepEqual(x, [1]); + }); + + it('should allow cancellation of all deferred computations', async () => { + const x: number[] = []; + + defer(() => x.push(1)); + defer(() => x.push(2)); + + assert.deepEqual(x, []); + + await Promise.resolve().then(() => { + assert.deepEqual(x, []); + }); + + cancelAll(); + + await sleep(0); + + assert.deepEqual(x, []); + }); + + it('should allow flushing all deferred computations before microtask queue', async () => { + const x: number[] = []; + + defer(() => x.push(1)); + defer(() => x.push(2)); + + assert.deepEqual(x, []); + + flushAll(); + + await Promise.resolve().then(() => { + assert.deepEqual(x, [1, 2]); + }); + + await sleep(0); + + assert.deepEqual(x, [1, 2]); + }); + + it('should flush all after main loop', async () => { + const x: number[] = []; + + defer(() => x.push(1)); + defer(() => x.push(2)); + + flushAll(); + + assert.deepEqual(x, []); + + await Promise.resolve().then(() => { + assert.deepEqual(x, [1, 2]); + }); + + await sleep(0); + + assert.deepEqual(x, [1, 2]); + }); + + it('should execute in order', async () => { + const x: number[] = []; + + defer(() => { + x.push(1); + }); + defer(() => { + x.push(2); + }); + + assert.deepEqual(x, []); + + await Promise.resolve().then(() => { + assert.deepEqual(x, []); + }); + + await sleep(0); + + assert.deepEqual(x, [1, 2]); + }); + + it('should respect timeout', async () => { + const x: number[] = []; + + const delay = isNode() ? 5 : 1; + + defer(() => { + x.push(1); + }, delay); + defer(() => { + x.push(2); + }); + + assert.deepEqual(x, []); + + await Promise.all([ + sleep(delay).then(() => { + assert.deepEqual(x, [2, 1]); + }), + sleep(0).then(() => { + assert.deepEqual(x, [2]); + }), + Promise.resolve().then(() => { + assert.deepEqual(x, []); + }), + ]); + }); + + it('should allow passing arguments', async () => { + const x: number[] = []; + defer( + (a, b) => { + x.push(a, b); + }, + 0, + 1, + 2, + ); + + assert.deepEqual(x, []); + + await Promise.resolve().then(() => { + assert.deepEqual(x, []); + }); + + await sleep(0); + + assert.deepEqual(x, [1, 2]); + }); +}); diff --git a/imports/lib/async/defer.ts b/imports/lib/async/defer.ts new file mode 100644 index 000000000..f12803310 --- /dev/null +++ b/imports/lib/async/defer.ts @@ -0,0 +1,79 @@ +import type Timeout from '../types/Timeout'; + +import createPromise from './createPromise'; + +type Resolve = (value?: any) => void; +type Reject = (reason?: any) => void; + +type Callback = (...args: A) => void; + +const _pending = new Set(); + +export class Deferred { + #timeout: Timeout; + #resolve: Resolve; + #reject: Reject; + + constructor(timeout: Timeout, resolve: Resolve, reject: Reject) { + this.#timeout = timeout; + this.#resolve = resolve; + this.#reject = reject; + } + + cancel() { + if (!_pending.has(this)) return; + _pending.delete(this); + clearTimeout(this.#timeout); + this.#reject(); + } + + flush() { + if (!_pending.has(this)) return; + _pending.delete(this); + clearTimeout(this.#timeout); + this.#resolve(); + } +} + +export const defer = ( + callback: Callback, + timeout?: number, + ...args: A +): Deferred => { + const {promise, resolve, reject} = createPromise(); + promise + .then( + () => { + _pending.delete(deferred); + callback(...args); + }, + + () => { + // NOTE: This handles cancellation. + }, + ) + .catch((error: unknown) => { + console.error({error}); + }); + const deferred = new Deferred(setTimeout(resolve, timeout), resolve, reject); + _pending.add(deferred); + return deferred; +}; + +const _cancelAll = (pending: Iterable) => { + for (const deferred of pending) deferred.cancel(); +}; + +export const cancelAll = () => { + _cancelAll(_pending); + _pending.clear(); +}; + +const _flushAll = (pending: Iterable) => { + for (const deferred of pending) deferred.flush(); +}; + +export const flushAll = () => { + _flushAll(_pending); + _pending.clear(); +}; diff --git a/imports/lib/async/lock.ts b/imports/lib/async/lock.ts new file mode 100644 index 000000000..c695a5205 --- /dev/null +++ b/imports/lib/async/lock.ts @@ -0,0 +1,94 @@ +import createPromise from './createPromise'; + +class LockReleaseHandle { + #internals = createPromise(); + + public async _promise(): Promise { + return this.#internals.promise; + } + + public _resolve() { + this.#internals.resolve(); + } +} + +export class AcquiringLockWouldBlockError extends Error { + constructor() { + super('Acquiring lock would block'); + } +} + +export class AsyncLock { + #lastHandle: LockReleaseHandle | null = null; + + public acquireNonBlocking() { + if (this.#lastHandle !== null) { + throw new AcquiringLockWouldBlockError(); + } + + const handle = new LockReleaseHandle(); + + this.#lastHandle = handle; + + return handle; + } + + public async acquire() { + const release = this.#lastHandle?._promise(); + const handle = new LockReleaseHandle(); + + this.#lastHandle = handle; + + await release; + + return handle; + } + + public release(handle: LockReleaseHandle) { + if (handle === this.#lastHandle) { + // NOTE: Garbage collect. Part of non-blocking acquisition logic. + this.#lastHandle = null; + } + + handle._resolve(); + } +} + +export const withBlocking = async ( + lock: AsyncLock, + callback: () => Promise | T, +) => { + const handle = await lock.acquire(); + + try { + await callback(); + } finally { + lock.release(handle); + } +}; + +export const withNonBlockingAsync = async ( + lock: AsyncLock, + callback: () => Promise | T, +) => { + const handle = lock.acquireNonBlocking(); + + try { + await callback(); + } finally { + lock.release(handle); + } +}; + +export const withNonBlocking = async ( + lock: AsyncLock, + callback: () => T, +) => { + const handle = lock.acquireNonBlocking(); + + try { + callback(); + } finally { + lock.release(handle); + } +}; diff --git a/imports/lib/async/queue.ts b/imports/lib/async/queue.ts new file mode 100644 index 000000000..1579cb2a1 --- /dev/null +++ b/imports/lib/async/queue.ts @@ -0,0 +1,31 @@ +export class AsyncQueue { + #queued = 0; + #queue: Promise | null = null; + + public get length() { + return this.#queued; + } + + // TODO: Enable task cancellation. + public enqueue(task: () => Promise | void) { + ++this.#queued; + const handle = Promise.resolve(this.#queue) + .then(async () => { + --this.#queued; + await task(); + if (this.#queue === handle) { + // NOTE: Garbage collection. + this.#queue = null; + } + }) + .catch((error) => { + console.error({error}); + }); + + this.#queue = handle; + } + + public async drain() { + await this.#queue; + } +} diff --git a/imports/lib/events.ts b/imports/lib/events.ts new file mode 100644 index 000000000..9ff73d6f4 --- /dev/null +++ b/imports/lib/events.ts @@ -0,0 +1,6 @@ +import Emittery from 'emittery'; + +export const EventEmitter = Emittery; +// eslint-disable-next-line @typescript-eslint/no-redeclare +export type EventEmitter = Emittery; +export const eventEmitter = () => new Emittery(); diff --git a/imports/lib/iterable-iterator/duplicates.tests.ts b/imports/lib/iterable-iterator/duplicates.tests.ts new file mode 100644 index 000000000..1056a5f68 --- /dev/null +++ b/imports/lib/iterable-iterator/duplicates.tests.ts @@ -0,0 +1,25 @@ +import {assert} from 'chai'; + +import {isomorphic} from '../../_test/fixtures'; + +import duplicates from './duplicates'; + +isomorphic(__filename, () => { + it('should return nothing on input without duplicates', () => { + const expected = Array.from(''); + const actual = Array.from(duplicates('abcd')); + assert.deepEqual(actual, expected); + }); + + it('should work on empty inputs', () => { + const expected = []; + const actual = Array.from(duplicates([])); + assert.deepEqual(actual, expected); + }); + + it('should work', () => { + const expected = Array.from('aaabra'); + const actual = Array.from(duplicates('abracadabra')); + assert.deepEqual(actual, expected); + }); +}); diff --git a/imports/lib/iterable-iterator/duplicates.ts b/imports/lib/iterable-iterator/duplicates.ts new file mode 100644 index 000000000..6a8052e0d --- /dev/null +++ b/imports/lib/iterable-iterator/duplicates.ts @@ -0,0 +1,9 @@ +const duplicates = function* (items: Iterable): Generator { + const seen = new Set(); + for (const item of items) { + if (seen.has(item)) yield item; + else seen.add(item); + } +}; + +export default duplicates; diff --git a/imports/lib/iterable-iterator/unique.tests.ts b/imports/lib/iterable-iterator/unique.tests.ts new file mode 100644 index 000000000..2885de54c --- /dev/null +++ b/imports/lib/iterable-iterator/unique.tests.ts @@ -0,0 +1,25 @@ +import {assert} from 'chai'; + +import {isomorphic} from '../../_test/fixtures'; + +import unique from './unique'; + +isomorphic(__filename, () => { + it('should return nothing on input without duplicates', () => { + const expected = Array.from('abcd'); + const actual = Array.from(unique('abcd')); + assert.deepEqual(actual, expected); + }); + + it('should work on empty inputs', () => { + const expected = []; + const actual = Array.from(unique([])); + assert.deepEqual(actual, expected); + }); + + it('should work', () => { + const expected = Array.from('abrcd'); + const actual = Array.from(unique('abracadabra')); + assert.deepEqual(actual, expected); + }); +}); diff --git a/imports/lib/iterable-iterator/unique.ts b/imports/lib/iterable-iterator/unique.ts new file mode 100644 index 000000000..9eba0e8af --- /dev/null +++ b/imports/lib/iterable-iterator/unique.ts @@ -0,0 +1,3 @@ +const unique = (items: Iterable): Iterable => new Set(items); + +export default unique; diff --git a/imports/lib/lcs/lis.tests.ts b/imports/lib/lcs/lis.tests.ts new file mode 100644 index 000000000..9b74520c6 --- /dev/null +++ b/imports/lib/lcs/lis.tests.ts @@ -0,0 +1,89 @@ +import {assert} from 'chai'; + +import {isomorphic} from '../../_test/fixtures'; + +import {lis} from './lis'; + +isomorphic(__filename, () => { + it('should work on an empty input', () => { + const p = [0, 8, 4, 12, 2, 10, 6, 14, 1, 9, 5, 13, 3, 11, 7, 15]; + const subsequence = lis(0, p); + assert.deepEqual(subsequence.length, 0); + }); + + it('should work on the identity', () => { + const p = [0, 1, 2, 3, 4, 5]; + assert.deepEqual(lis(p.length, p), [0, 1, 2, 3, 4, 5]); + }); + + it('should work with negative values', () => { + const p = [-5, -4, -3, -2, -1]; + assert.deepEqual(lis(p.length, p), [0, 1, 2, 3, 4]); + }); + + it('should work with negative and positive values', () => { + const p = [-5, 2, -3, 4, -1, 6, 7]; + const subsequence = lis(p.length, p); + assert.deepEqual(subsequence.length, 5); + }); + + it('should work on length-1 input', () => { + const p = [0, 8, 4, 12, 2, 10, 6, 14, 1, 9, 5, 13, 3, 11, 7, 15]; + const subsequence = lis(1, p); + assert.deepEqual(subsequence.length, 1); + }); + + it('should work on length-2 input', () => { + const p = [0, 8, 4, 12, 2, 10, 6, 14, 1, 9, 5, 13, 3, 11, 7, 15]; + const subsequence = lis(2, p); + assert.deepEqual(subsequence.length, 2); + }); + + it('should work on length-3 input', () => { + const p = [0, 8, 4, 12, 2, 10, 6, 14, 1, 9, 5, 13, 3, 11, 7, 15]; + const subsequence = lis(3, p); + assert.deepEqual(subsequence.length, 2); + }); + + it('should work on length-4 input', () => { + const p = [0, 8, 4, 12, 2, 10, 6, 14, 1, 9, 5, 13, 3, 11, 7, 15]; + const subsequence = lis(4, p); + assert.deepEqual(subsequence.length, 3); + }); + + it('should work on length-5 input', () => { + const p = [0, 8, 4, 12, 2, 10, 6, 14, 1, 9, 5, 13, 3, 11, 7, 15]; + const subsequence = lis(5, p); + assert.deepEqual(subsequence.length, 3); + }); + + it('should work on length-6 input', () => { + const p = [0, 8, 4, 12, 2, 10, 6, 14, 1, 9, 5, 13, 3, 11, 7, 15]; + const subsequence = lis(6, p); + assert.deepEqual(subsequence.length, 3); + }); + + it('should work on length-7 input', () => { + const p = [0, 8, 4, 12, 2, 10, 6, 14, 1, 9, 5, 13, 3, 11, 7, 15]; + const subsequence = lis(7, p); + assert.deepEqual(subsequence.length, 3); + }); + + it('should work on length-8 input', () => { + const p = [0, 8, 4, 12, 2, 10, 6, 14, 1, 9, 5, 13, 3, 11, 7, 15]; + const subsequence = lis(8, p); + assert.deepEqual(subsequence.length, 4); + }); + + it('should work on length-9 input', () => { + const p = [0, 8, 4, 12, 2, 10, 6, 14, 1, 9, 5, 13, 3, 11, 7, 15]; + const subsequence = lis(9, p); + assert.deepEqual(subsequence.length, 4); + }); + + it('should work on a Van der Corput sequence', () => { + const p = [0, 8, 4, 12, 2, 10, 6, 14, 1, 9, 5, 13, 3, 11, 7, 15]; + const subsequence = lis(p.length, p); + assert.deepEqual(subsequence.length, 6); + }); +}); diff --git a/imports/lib/lcs/lis.ts b/imports/lib/lcs/lis.ts new file mode 100644 index 000000000..772524adb --- /dev/null +++ b/imports/lib/lcs/lis.ts @@ -0,0 +1,72 @@ +import assert from 'assert'; + +type Sequence = { + [i: number]: number; + length: number; +}; + +export const lis = (n: number, sequence: Sequence) => { + assert(n <= sequence.length); + + // NOTE: Early return. + if (n <= 0) return []; + + // NOTE: Longest Increasing Subsequence in time O(N min(|LIS|, N-|LIS|)). + + // NOTE: Length of LIS found so far (>= 1 for a non-empty sequence). + let len = 1; + + // NOTE: Invariant: + // The last element of the revlex-minimum length-j increasing subsequence + // of sequence[:i+1] is sequence[ends[j]], + // for all 0 <= i < n and 0 <= j <= len. + // + // NOTE: + // Corrolary: ends[j] < ends[j+1], for all 0 <= j < len. + const ends = new Int32Array(n + 1); + ends[0] = -1; // NOTE: The empty sequence. + assert(ends[1] === 0); // NOTE: Int32Array's are 0-initialized. + + // NOTE: Invariant: + // The LIS ending with sequence[i] extends the LIS ending with sequence[prev[i]], + // or the empty sequence if prev[i] is -1. + const prev = new Int32Array(n); + prev[0] = -1; + + for (let i = 1; i < n; ++i) { + const k = sequence[i]!; + assert(len >= 1); + let j = len; + + // NOTE: Extend the longest possible increasing subsequence of sequence[:i] + // that can be extended with sequence[i]. The extended subsequence has length j + 1. + while (sequence[ends[j]!]! >= k && --j !== 0); + // NOTE: Could implement binary search on j. + // The whole algorithm would be O(N log N) in this case. + + assert(j >= 0); + assert(j <= len); + assert(j < n); + prev[i] = ends[j]!; + ends[j + 1] = i; // NOTE: prev[ends[j+1]] = ends[j] + + // NOTE: If j = len, then we found the longest increasing subsequence so far. + // Otherwise, j < len, and we have replaced the (existing and unique) increasing + // subsequence such that the new one comes before in revlex order. + // TODO: Go branch-less. + if (j === len) ++len; + } + + // NOTE: Populate output array by walking the LIS backwards, then reversing it. + assert(len >= 1); + let l = ends[len]!; + assert(l >= 0); + const subsequence: number[] = [l]; + l = prev[l]!; + while (l !== -1) { + subsequence.push(l); + l = prev[l]!; + } + + return subsequence.reverse(); +}; diff --git a/imports/lib/pdf/pdf.tests.ts b/imports/lib/pdf/pdf.tests.ts index 805d3c0c9..327e19238 100644 --- a/imports/lib/pdf/pdf.tests.ts +++ b/imports/lib/pdf/pdf.tests.ts @@ -14,5 +14,5 @@ server(__filename, () => { it('should work on the server', async () => { const data = randomPDFUint8Array(); await fetchPDF({data}); - }); + }).timeout(5000); }); diff --git a/imports/lib/types/Timeout.ts b/imports/lib/types/Timeout.ts index 4b6b1ca3f..b693ad729 100644 --- a/imports/lib/types/Timeout.ts +++ b/imports/lib/types/Timeout.ts @@ -1,3 +1,3 @@ -type Timeout = ReturnType; +type Timeout = ReturnType; export default Timeout; diff --git a/imports/ui/App.tests.tsx b/imports/ui/App.tests.tsx index e6c85d437..18b728632 100644 --- a/imports/ui/App.tests.tsx +++ b/imports/ui/App.tests.tsx @@ -1,5 +1,6 @@ import React from 'react'; -import {render, waitForElementToBeRemoved} from '@testing-library/react'; + +import {render, waitForElementToBeRemoved} from '../_test/react'; import {client, randomPassword, randomUserId} from '../_test/fixtures'; diff --git a/imports/ui/App.tsx b/imports/ui/App.tsx index 9517451e8..cb10bdbbe 100644 --- a/imports/ui/App.tsx +++ b/imports/ui/App.tsx @@ -1,6 +1,13 @@ import React from 'react'; -import {BrowserRouter} from 'react-router-dom'; +import { + BrowserRouter, + Routes, + Route, + useNavigate, + type NavigateFunction, + type Path, +} from 'react-router-dom'; import {CacheProvider} from '@emotion/react'; import createCache from '@emotion/cache'; @@ -10,6 +17,8 @@ import {SnackbarProvider} from 'notistack'; import CssBaseline from '@mui/material/CssBaseline'; +import isTest from '../app/isTest'; + import DateTimeLocalizationProvider from './i18n/DateTimeLocalizationProvider'; import CustomWholeWindowDropZone from './input/CustomWholeWindowDropZone'; import ModalProvider from './modal/ModelProvider'; @@ -24,11 +33,40 @@ export const muiCache = createCache({ prepend: true, }); +export const _router: {navigate: NavigateFunction} = { + navigate(_to: Partial | string | number) { + console.warn('Using unitialized test-only _navigate function call.'); + }, +}; + +const WithTestRoutes = ({children}) => { + _router.navigate = useNavigate(); + return ( + + test-only unmount route} + path="/_test/unmount" + /> + + + ); +}; + +const Router = isTest() + ? ({children}) => { + return ( + + {children} + + ); + } + : BrowserRouter; + const App = () => { const theme = useUserTheme(); return ( - + @@ -47,7 +85,7 @@ const App = () => { - + ); }; diff --git a/imports/ui/accessibility/addTooltip.tsx b/imports/ui/accessibility/addTooltip.tsx index 0454cfe03..c4ac88b21 100644 --- a/imports/ui/accessibility/addTooltip.tsx +++ b/imports/ui/accessibility/addTooltip.tsx @@ -14,7 +14,7 @@ const addTooltip = ( React.forwardRef(({tooltip, ...rest}: Props, ref) => { const title = transform(rest, tooltip); - return title ? ( + return title !== undefined ? ( diff --git a/imports/ui/appointments/AppointmentCancellationDialog.tsx b/imports/ui/appointments/AppointmentCancellationDialog.tsx index 44dcc4590..d77433b99 100644 --- a/imports/ui/appointments/AppointmentCancellationDialog.tsx +++ b/imports/ui/appointments/AppointmentCancellationDialog.tsx @@ -1,4 +1,4 @@ -import React, {useState} from 'react'; +import React, {useState, useMemo} from 'react'; import Button from '@mui/material/Button'; import Dialog from '@mui/material/Dialog'; @@ -49,6 +49,15 @@ const AppointmentCancellationDialog = ({open, onClose, appointment}: Props) => { } }; + const renderValue = useMemo( + () => + loading + ? (value) => value || 'Loading options ...' + : (value) => + value || 'Please choose a reason for the cancellation (required)', + [loading], + ); + return (

{loading && } @@ -68,18 +77,9 @@ const AppointmentCancellationDialog = ({open, onClose, appointment}: Props) => { Cancellation reason