diff --git a/examples/dts-inspector/CHANGELOG.md b/examples/dts-inspector/CHANGELOG.md index d982ac1c1..3e67b0d8e 100644 --- a/examples/dts-inspector/CHANGELOG.md +++ b/examples/dts-inspector/CHANGELOG.md @@ -1,5 +1,12 @@ # dts-inspector +## 1.0.16 + +### Patch Changes + +- Updated dependencies [4deb5d4] + - @openfn/describe-package@0.0.19 + ## 1.0.15 ### Patch Changes diff --git a/examples/dts-inspector/package.json b/examples/dts-inspector/package.json index 09d01576c..eeeff65ae 100644 --- a/examples/dts-inspector/package.json +++ b/examples/dts-inspector/package.json @@ -1,6 +1,6 @@ { "name": "dts-inspector", - "version": "1.0.15", + "version": "1.0.16", "description": "", "main": "index.js", "type": "module", diff --git a/integration-tests/cli/test/errors.test.ts b/integration-tests/cli/test/errors.test.ts index 410afbe56..003517ed5 100644 --- a/integration-tests/cli/test/errors.test.ts +++ b/integration-tests/cli/test/errors.test.ts @@ -121,7 +121,7 @@ test.serial('multiple inputs', async (t) => { t.regex(error.message[0].message, /multiple dependencies detected for: c/i); }); -test.serial('invalid start', async (t) => { +test.serial('invalid start on workflow (not found)', async (t) => { const { stdout, err } = await run( `openfn ${jobsPath}/invalid-start.json --log-json` ); @@ -130,9 +130,21 @@ test.serial('invalid start', async (t) => { const stdlogs = extractLogs(stdout); assertLog(t, stdlogs, /Error validating execution plan/i); - assertLog(t, stdlogs, /Workflow failed/i); + assertLog(t, stdlogs, /aborting/i); - // Find the error obejct which is logged out const error = stdlogs.find((l) => l.message[0].name === 'ValidationError'); - t.regex(error.message[0].message, /could not find start job: nope/i); + t.regex(error.message[0].message, /Could not find start job: nope/i); +}); + +test.serial('invalid end (ambiguous)', async (t) => { + // Note that the start should override + const { stdout, err } = await run( + `openfn ${jobsPath}/invalid-start.json --log-json --start x1 --end x` + ); + t.is(err.code, 1); + + const stdlogs = extractLogs(stdout); + + assertLog(t, stdlogs, /Error: end pattern matched multiple steps/i); + assertLog(t, stdlogs, /aborting/i); }); diff --git a/integration-tests/cli/test/execute-workflow.test.ts b/integration-tests/cli/test/execute-workflow.test.ts index 550128c7c..676de1a11 100644 --- a/integration-tests/cli/test/execute-workflow.test.ts +++ b/integration-tests/cli/test/execute-workflow.test.ts @@ -148,7 +148,7 @@ test.serial( } ); -test.serial.only( +test.serial( `openfn ${jobsPath}/wf-errors.json -iS "{ \\"data\\": { \\"number\\": 32 } }"`, async (t) => { const { err } = await run(t.title); @@ -176,3 +176,15 @@ test.serial.only( }); } ); + +// export issues https://github.com/OpenFn/kit/issues/238 +test.serial( + `openfn ${jobsPath}/common-date.json -s ${jobsPath}/common-date-input.json`, + async (t) => { + const { err } = await run(t.title); + t.falsy(err); + + const out = getJSON(); + t.deepEqual(out, { data: '01/01/2024', result: '1/1/2024, 12:00:00 AM' }) + } +); diff --git a/integration-tests/cli/test/fixtures/common-date-input.json b/integration-tests/cli/test/fixtures/common-date-input.json new file mode 100644 index 000000000..5b9b9c4ce --- /dev/null +++ b/integration-tests/cli/test/fixtures/common-date-input.json @@ -0,0 +1,3 @@ +{ + "data": "01/01/2024" +} \ No newline at end of file diff --git a/integration-tests/cli/test/fixtures/common-date.json b/integration-tests/cli/test/fixtures/common-date.json new file mode 100644 index 000000000..1570410e5 --- /dev/null +++ b/integration-tests/cli/test/fixtures/common-date.json @@ -0,0 +1,10 @@ +{ + "workflow": { + "steps": [ + { + "adaptor": "common", + "expression": "fn((state) => { state.result = dateFns.parse(state.data, 'MM/dd/yyyy', new Date()).toLocaleString(); return state; })" + } + ] + } +} diff --git a/integration-tests/cli/test/fixtures/invalid-start.json b/integration-tests/cli/test/fixtures/invalid-start.json index 13f0f9ee1..4dfe89e66 100644 --- a/integration-tests/cli/test/fixtures/invalid-start.json +++ b/integration-tests/cli/test/fixtures/invalid-start.json @@ -5,7 +5,11 @@ "workflow": { "steps": [ { - "id": "x", + "id": "x1", + "expression": "fn((state) => state)" + }, + { + "id": "x2", "expression": "fn((state) => state)" } ] diff --git a/integration-tests/worker/CHANGELOG.md b/integration-tests/worker/CHANGELOG.md index b7297e499..e306fbc72 100644 --- a/integration-tests/worker/CHANGELOG.md +++ b/integration-tests/worker/CHANGELOG.md @@ -1,5 +1,15 @@ # @openfn/integration-tests-worker +## 1.0.39 + +### Patch Changes + +- Updated dependencies [7ddc5d8] +- Updated dependencies [4deb5d4] + - @openfn/ws-worker@1.1.4 + - @openfn/engine-multi@1.1.4 + - @openfn/lightning-mock@2.0.4 + ## 1.0.38 ### Patch Changes diff --git a/integration-tests/worker/package.json b/integration-tests/worker/package.json index 305b7125c..23829c1e5 100644 --- a/integration-tests/worker/package.json +++ b/integration-tests/worker/package.json @@ -1,7 +1,7 @@ { "name": "@openfn/integration-tests-worker", "private": true, - "version": "1.0.38", + "version": "1.0.39", "description": "Lightning WOrker integration tests", "author": "Open Function Group ", "license": "ISC", diff --git a/packages/cli/CHANGELOG.md b/packages/cli/CHANGELOG.md index da45a6c39..94a03753b 100644 --- a/packages/cli/CHANGELOG.md +++ b/packages/cli/CHANGELOG.md @@ -1,5 +1,23 @@ # @openfn/cli +## 1.2.0 + +### Minor Changes + +- ea248a3: Allow step output to be cached + Accept fuzzy step ids in `--start`, `--end` and `--only` + +### Patch Changes + +- 7ddc5d8: Support expressions in lazy state operators +- 4deb5d4: Recognise import aliases in job code +- Updated dependencies [cecdb60] +- Updated dependencies [4deb5d4] +- Updated dependencies [7ddc5d8] + - @openfn/runtime@1.1.2 + - @openfn/describe-package@0.0.19 + - @openfn/compiler@0.1.2 + ## 1.1.4 ### Patch Changes diff --git a/packages/cli/README.md b/packages/cli/README.md index 2eccdfa4e..225647154 100644 --- a/packages/cli/README.md +++ b/packages/cli/README.md @@ -127,6 +127,46 @@ If no command is specified, execute will run. To get more information about a command, including usage examples, run `openfn help`, ie, `openfn compile help`. +## Caching step output + +The CLI can write the output of every single step to disk (rather than just the final output). To do this, just run a job with the `--cache-steps` flag. + +``` +openfn tmp/job.js --cache-steps +``` + +The cached output is written to `.cli-cache//.json`, relative to the input job or workflow file. This folder has a `.gitignore` file and should be ignored from your version control. + +So for a workflow at `./tmp/workflow.json` you'll get a cache path something like `./tmp/.cli-cache/workflow/step-1.json.` + +The cache is cleared when execution starts, so you know all artefacts in the cache folder relate to the last run. + +Step caching is disabled by default, but you can switch it on by setting the `OPENFN_ALWAYS_CACHE_STEPS` env var to `true`. To disable for a single execution, pass the `--no-compile-steps` flag. + +## Starting from a custom step + +When executing a workflow, the CLI will run from the first step (which is usually the first step in the `steps` array, unless `options.start` is set in the workflow). + +You can run from any starting step by passing `--start `, like this: + +``` +openfn tmp/job.js --start upload-to-salesforce +``` + +If you previously cached the steps from this workflow, the CLI will automatically load the correct input state from the cache. Otherwise, you can pass in whatever state you need with `-s ./transformed-state.json`. + +You can also pass `--end` to make the workflow end early, or `--only` to only run a single step. + +All step names name supports "fuzzy" inputs. If you pass an exact step id, that step will always be the starting step. But you can also pass part of step name or id. + +So to match a step called with id `236baf56-e6c7-40f2-80ad-00d5a10b6b64` (such as you might download from Lightning), you can do: + +``` +openfn tmp/job.js --start 236b +``` + +Any unique continuous sequence of characters on the name or id will match. If there are multiple matches, an error will be thrown. + ## Deploying Workflows > ⚠️ This feature is still in active development. Expect breaking changes. diff --git a/packages/cli/package.json b/packages/cli/package.json index 9d13e68d9..657a5e673 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -1,6 +1,6 @@ { "name": "@openfn/cli", - "version": "1.1.4", + "version": "1.2.0", "description": "CLI devtools for the openfn toolchain.", "engines": { "node": ">=18", diff --git a/packages/cli/src/execute/command.ts b/packages/cli/src/execute/command.ts index 41950cca2..a54caae51 100644 --- a/packages/cli/src/execute/command.ts +++ b/packages/cli/src/execute/command.ts @@ -9,9 +9,12 @@ export type ExecuteOptions = Required< Opts, | 'adaptors' | 'autoinstall' + | 'baseDir' + | 'cacheSteps' | 'command' | 'compile' | 'expandAdaptors' + | 'end' | 'immutable' | 'ignoreImports' | 'expressionPath' @@ -19,6 +22,7 @@ export type ExecuteOptions = Required< | 'logJson' | 'outputPath' | 'outputStdout' + | 'only' | 'path' | 'repoDir' | 'skipAdaptorValidation' @@ -38,6 +42,7 @@ const options = [ o.adaptors, o.autoinstall, + o.cacheSteps, o.compile, o.immutable, o.ignoreImports, diff --git a/packages/cli/src/execute/execute.ts b/packages/cli/src/execute/execute.ts index 9b4b4a576..1f5fe120f 100644 --- a/packages/cli/src/execute/execute.ts +++ b/packages/cli/src/execute/execute.ts @@ -1,9 +1,15 @@ -import run, { getNameAndVersion } from '@openfn/runtime'; +import run, { NOTIFY_JOB_COMPLETE, getNameAndVersion } from '@openfn/runtime'; import type { ExecutionPlan, Job } from '@openfn/lexicon'; -import type { ModuleInfo, ModuleInfoMap } from '@openfn/runtime'; +import type { + ModuleInfo, + ModuleInfoMap, + NotifyJobCompletePayload, +} from '@openfn/runtime'; -import createLogger, { RUNTIME, JOB } from '../util/logger'; -import { ExecuteOptions } from './command'; +import createLogger, { RUNTIME, JOB, Logger } from '../util/logger'; +import { saveToCache } from '../util/cache'; + +import type { ExecuteOptions } from './command'; type ExtendedModuleInfo = ModuleInfo & { name: string; @@ -12,7 +18,8 @@ type ExtendedModuleInfo = ModuleInfo & { export default async ( plan: ExecutionPlan, input: any, - opts: ExecuteOptions + opts: ExecuteOptions, + logger: Logger ): Promise => { try { const result = await run(plan, input, { @@ -23,6 +30,14 @@ export default async ( repo: opts.repoDir, modules: parseAdaptors(plan), }, + callbacks: { + notify: async (eventName, payload) => { + if (eventName === NOTIFY_JOB_COMPLETE) { + const { state, jobId } = payload as NotifyJobCompletePayload; + await saveToCache(plan, jobId, state, opts, logger); + } + }, + }, }); return result; } catch (e: any) { diff --git a/packages/cli/src/execute/handler.ts b/packages/cli/src/execute/handler.ts index 060a06c22..063b84793 100644 --- a/packages/cli/src/execute/handler.ts +++ b/packages/cli/src/execute/handler.ts @@ -13,6 +13,34 @@ import loadState from '../util/load-state'; import validateAdaptors from '../util/validate-adaptors'; import loadPlan from '../util/load-plan'; import assertPath from '../util/assert-path'; +import { clearCache } from '../util/cache'; +import fuzzyMatchStep from '../util/fuzzy-match-step'; +import abort from '../util/abort'; + +const matchStep = ( + plan: ExecutionPlan, + stepPattern: string, + stepName: string, + logger: Logger +): string => { + try { + return fuzzyMatchStep(plan, stepPattern) ?? stepPattern; + } catch (err: any) { + let message; + let help; + if (err.message === 'AMBIGUOUS_INPUT') { + message = `${stepName} pattern matched multiple steps`; + help = `The ${stepName} option can contain an exact match of a step id, or a partial match if a name or id so long as it is unique.`; + } else if (err.message === 'NOT_FOUND') { + message = `${stepName} step not found`; + help = `The step "${stepPattern}" could not be be found in the workflow`; + } else { + message = `Error parsing ${stepName} option`; + } + abort(logger, `Error: ${message}`, undefined, help); + } + return ''; +}; const executeHandler = async (options: ExecuteOptions, logger: Logger) => { const start = new Date().getTime(); @@ -20,6 +48,11 @@ const executeHandler = async (options: ExecuteOptions, logger: Logger) => { await validateAdaptors(options, logger); let plan = await loadPlan(options, logger); + + if (options.cacheSteps) { + await clearCache(plan, options, logger); + } + const { repoDir, monorepoPath, autoinstall } = options; if (autoinstall) { if (monorepoPath) { @@ -33,7 +66,38 @@ const executeHandler = async (options: ExecuteOptions, logger: Logger) => { } } - const state = await loadState(options, logger); + let customStart; + let customEnd; + + // Handle start, end and only + if (options.only) { + const step = matchStep(plan, options.only, 'only', logger); + + customStart = step; + customEnd = step; + logger.always(`Only running workflow step "${options.start}"`); + } else { + if (options.start) { + customStart = matchStep( + plan, + options.start ?? plan.options.start, + 'start', + logger + ); + logger.info(`Starting workflow from step "${options.start}"`); + } + + if (options.end) { + customEnd = matchStep( + plan, + options.end ?? plan.options.end, + 'end', + logger + ); + logger.always(`Ending workflow at step "${options.end}"`); + } + } + const state = await loadState(plan, options, logger, customStart); if (options.compile) { plan = (await compile(plan, options, logger)) as ExecutionPlan; @@ -41,8 +105,25 @@ const executeHandler = async (options: ExecuteOptions, logger: Logger) => { logger.info('Skipping compilation as noCompile is set'); } + const finalPlan = { + ...plan, + options: { + ...plan.options, + start: customStart || plan.options.start, + end: customEnd, + }, + workflow: plan.workflow, + }; + try { - const result = await execute(plan, state, options); + const result = await execute(finalPlan, state, options, logger); + + if (options.cacheSteps) { + logger.success( + 'Cached output written to ./cli-cache (see info logs for details)' + ); + } + await serializeOutput(options, result, logger); const duration = printDuration(new Date().getTime() - start); if (result?.errors) { diff --git a/packages/cli/src/metadata/handler.ts b/packages/cli/src/metadata/handler.ts index 59e949f3b..eb050acf4 100644 --- a/packages/cli/src/metadata/handler.ts +++ b/packages/cli/src/metadata/handler.ts @@ -3,6 +3,7 @@ import { MetadataOpts } from './command'; import loadState from '../util/load-state'; import cache from './cache'; import { getModuleEntryPoint } from '@openfn/runtime'; +import { ExecutionPlan } from '@openfn/lexicon'; // Add extra, uh, metadata to the, uh, metadata object const decorateMetadata = (metadata: any) => { @@ -51,7 +52,7 @@ const metadataHandler = async (options: MetadataOpts, logger: Logger) => { const { repoDir, adaptors } = options; const adaptor = adaptors[0]; - const state = await loadState(options, logger); + const state = await loadState({} as ExecutionPlan, options, logger); logger.success(`Generating metadata`); // Note that the config will be sanitised, so logging it may not be terrible helpful diff --git a/packages/cli/src/options.ts b/packages/cli/src/options.ts index 95b42da93..f6a0d46b8 100644 --- a/packages/cli/src/options.ts +++ b/packages/cli/src/options.ts @@ -20,34 +20,37 @@ export type Opts = { adaptor?: boolean | string; adaptors?: string[]; autoinstall?: boolean; + cacheSteps?: boolean; compile?: boolean; + configPath?: string; confirm?: boolean; describe?: string; - configPath?: string; + end?: string; // workflow end node expandAdaptors?: boolean; // for unit tests really + expressionPath?: string; force?: boolean; - immutable?: boolean; ignoreImports?: boolean | string[]; - expressionPath?: string; + immutable?: boolean; log?: Record; logJson?: boolean; monorepoPath?: string; + only?: string; // only run this workflow node operation?: string; outputPath?: string; outputStdout?: boolean; packages?: string[]; planPath?: string; + projectId?: string; projectPath?: string; repoDir?: string; + sanitize: 'none' | 'remove' | 'summarize' | 'obfuscate'; skipAdaptorValidation?: boolean; specifier?: string; // docgen - start?: string; // workflow start node + start?: string; // workflow start step statePath?: string; stateStdin?: string; - sanitize: 'none' | 'remove' | 'summarize' | 'obfuscate'; timeout?: number; // ms useAdaptorsMonorepo?: boolean; - projectId?: string; // deprecated workflowPath?: string; @@ -119,6 +122,23 @@ export const autoinstall: CLIOption = { }, }; +export const cacheSteps: CLIOption = { + name: 'cache-steps', + yargs: { + boolean: true, + description: + 'Cache the output of steps to ./.cache//.json', + }, + ensure: (opts) => { + if ( + process.env.OPENFN_ALWAYS_CACHE_STEPS && + !opts.hasOwnProperty('cacheSteps') + ) { + opts.cacheSteps = process.env.OPENFN_ALWAYS_CACHE_STEPS === 'true'; + } + }, +}; + export const compile: CLIOption = { name: 'no-compile', yargs: { @@ -323,7 +343,23 @@ export const start: CLIOption = { name: 'start', yargs: { string: true, - description: 'Specifiy the start node in a workflow', + description: 'Specifiy the start step in a workflow', + }, +}; + +export const end: CLIOption = { + name: 'end', + yargs: { + string: true, + description: 'Specifiy the end step in a workflow', + }, +}; + +export const only: CLIOption = { + name: 'only', + yargs: { + string: true, + description: 'Specifiy to only run one step in a workflow', }, }; diff --git a/packages/cli/src/test/handler.ts b/packages/cli/src/test/handler.ts index 52cb2cc32..1d148df4a 100644 --- a/packages/cli/src/test/handler.ts +++ b/packages/cli/src/test/handler.ts @@ -59,9 +59,9 @@ const testHandler = async (options: TestOptions, logger: Logger) => { logger.debug('eg: -S "{ "data": { "answer": 33 } }"'); } - const state = await loadState(opts, createNullLogger()); + const state = await loadState(plan, opts, createNullLogger()); const compiledPlan = (await compile(plan, opts, logger)) as ExecutionPlan; - const result = await execute(compiledPlan, state, opts as ExecuteOptions); + const result = await execute(compiledPlan, state, opts as ExecuteOptions, logger); logger.success(`Result: ${result.data.answer}`); return result; }; diff --git a/packages/cli/src/util/abort.ts b/packages/cli/src/util/abort.ts index 4219843ed..1644f110a 100644 --- a/packages/cli/src/util/abort.ts +++ b/packages/cli/src/util/abort.ts @@ -15,6 +15,7 @@ export default ( help?: string ) => { const e = new AbortError(reason); + logger.break(); logger.error(reason); if (error) { logger.error(error.message); @@ -24,5 +25,8 @@ export default ( } logger.break(); logger.error('Critical error: aborting command'); + + process.exitCode = 1; + throw e; }; diff --git a/packages/cli/src/util/cache.ts b/packages/cli/src/util/cache.ts new file mode 100644 index 000000000..b9db05cc6 --- /dev/null +++ b/packages/cli/src/util/cache.ts @@ -0,0 +1,81 @@ +import fs from 'node:fs'; +import path from 'node:path'; +import { rmdir } from 'node:fs/promises'; + +import type { ExecutionPlan } from '@openfn/lexicon'; +import type { Opts } from '../options'; +import type { Logger } from './logger'; + +export const getCachePath = async ( + plan: ExecutionPlan, + options: Pick, + stepId?: string +) => { + const { baseDir } = options; + + const { name } = plan.workflow; + + const basePath = `${baseDir}/.cli-cache/${name}`; + + if (stepId) { + return path.resolve(`${basePath}/${stepId.replace(/ /, '-')}.json`); + } + return path.resolve(basePath); +}; + +const ensureGitIgnore = (options: any) => { + if (!options._hasGitIgnore) { + const ignorePath = path.resolve( + options.baseDir, + '.cli-cache', + '.gitignore' + ); + try { + fs.accessSync(ignorePath); + } catch (e) { + // doesn't exist! + fs.writeFileSync(ignorePath, '*'); + } + } + options._hasGitIgnore = true; +}; + +export const saveToCache = async ( + plan: ExecutionPlan, + stepId: string, + output: any, + options: Pick, + logger: Logger +) => { + if (options.cacheSteps) { + const cachePath = await getCachePath(plan, options, stepId); + // Note that this is sync because other execution order gets messed up + fs.mkdirSync(path.dirname(cachePath), { recursive: true }); + + ensureGitIgnore(options); + + logger.info(`Writing ${stepId} output to ${cachePath}`); + fs.writeFileSync(cachePath, JSON.stringify(output)); + } +} + +export const clearCache = async ( + plan: ExecutionPlan, + options: Pick, + logger: Logger +) => { + const cacheDir = await getCachePath(plan, options); + + try { + await rmdir(cacheDir, { recursive: true }) + + logger.info(`Cleared cache at ${cacheDir}`); + } catch(e: any) { + if (e.code === 'ENOENT') { + // No cached files exist - this is fine, do nothing + } else { + logger.error(`Error while clearing cache at ${cacheDir}`) + logger.error(e) + } + } +} diff --git a/packages/cli/src/util/fuzzy-match-step.ts b/packages/cli/src/util/fuzzy-match-step.ts new file mode 100644 index 000000000..14ffda27e --- /dev/null +++ b/packages/cli/src/util/fuzzy-match-step.ts @@ -0,0 +1,31 @@ +import { ExecutionPlan } from '@openfn/lexicon'; + +export default (plan: ExecutionPlan, stepPattern?: string) => { + if (stepPattern) { + const { steps } = plan.workflow; + // first, check for an exact id match + const exact = steps.find((step) => step.id === stepPattern); + if (exact) return exact.id; + + // next, build a list of all matching steps by name or id + const matches: Record = {}; + steps.forEach((step) => { + if (step.id?.includes(stepPattern) || step.name?.includes(stepPattern)) { + matches[step.id!] = true; + } + }); + + // if there is only one match, we're good + const results = Object.keys(matches); + if (results.length === 1) { + return results[0]; + } + + // if there are multiple matches, we must abort with error + if (results.length > 1) { + throw new Error('AMBIGUOUS_INPUT'); + } + + throw new Error('NOT_FOUND'); + } +}; diff --git a/packages/cli/src/util/load-state.ts b/packages/cli/src/util/load-state.ts index aebfabdf8..6dfeeec7d 100644 --- a/packages/cli/src/util/load-state.ts +++ b/packages/cli/src/util/load-state.ts @@ -1,10 +1,35 @@ import fs from 'node:fs/promises'; + +import { getCachePath } from './cache'; + +import type { ExecutionPlan } from '@openfn/lexicon'; import type { Logger } from '@openfn/logger'; import type { Opts } from '../options'; +export const getUpstreamStepId = (plan: ExecutionPlan, stepId: string) => { + const upstreamStep = plan.workflow.steps.find((step) => { + if (step.next) { + if (typeof step.next === 'string') { + return step.next === stepId; + } + + return stepId in step.next ?? null; + } + }); + + if (upstreamStep) { + return typeof upstreamStep === 'string' ? upstreamStep : upstreamStep.id!; + } +}; + export default async ( - opts: Pick, - log: Logger + plan: ExecutionPlan, + opts: Pick< + Opts, + 'baseDir' | 'stateStdin' | 'statePath' | 'cacheSteps' | 'start' + >, + log: Logger, + start?: string ) => { const { stateStdin, statePath } = opts; log.debug('Loading state...'); @@ -35,6 +60,46 @@ export default async ( } } + if (start) { + log.info( + 'No state provided to CLI. Will attempt to load state from cache instead' + ); + log.always( + `Attempting to load cached input state for starting step "${start}"` + ); + try { + const upstreamStepId = getUpstreamStepId(plan, start); + if (upstreamStepId) { + log.debug(`Input step for "${start}" is "${upstreamStepId}"`); + const cachedStatePath = await getCachePath(plan, opts, upstreamStepId); + log.debug('Loading cached state from', cachedStatePath); + + try { + await fs.access(cachedStatePath); + + const str = await fs.readFile(cachedStatePath, 'utf8'); + const json = JSON.parse(str); + log.success( + `Loaded cached state for step "${start}" from ${cachedStatePath}` + ); + log.info(` To force disable the cache, run again with --no-cache`); + return json; + } catch (e) { + log.warn(`No cached state found for step "${start}"`); + log.warn( + 'Re-run this workflow with --cache to save the output of each step' + ); + log.break(); + } + } else { + log.warn(`Could not find an input step for step "${start}"`); + } + } catch (e) { + log.warn('Error loading cached state'); + log.warn(e); + } + } + log.info( 'No state provided - using default state { data: {}, configuration: {} }' ); diff --git a/packages/cli/test/execute/execute.test.ts b/packages/cli/test/execute/execute.test.ts index a3e648b6b..9c082e2a3 100644 --- a/packages/cli/test/execute/execute.test.ts +++ b/packages/cli/test/execute/execute.test.ts @@ -3,6 +3,7 @@ // I don't want any io or adaptor tests here, really just looking for the actual execute flow import { createMockLogger } from '@openfn/logger'; import test from 'ava'; +import fs from 'node:fs/promises'; import { ExecuteOptions } from '../../src/execute/command'; import handler from '../../src/execute/handler'; import { mockFs, resetMockFs } from '../util'; @@ -128,6 +129,69 @@ test.serial('run a workflow with state', async (t) => { t.is(result.data.count, 4); }); +test.serial('run a workflow with cached steps', async (t) => { + const workflow = { + workflow: { + steps: [ + { + id: 'a', + expression: `${fn}fn((state) => ({ a: true }))`, + next: { b: true }, + }, + { + id: 'b', + expression: `${fn}fn((state) => ({ ...state, b: true }))`, + }, + ], + }, + }; + mockFs({ + '/workflow.json': JSON.stringify(workflow), + '/.cli-cache/workflow/': {}, + }); + + const options = { + ...defaultOptions, + workflowPath: '/workflow.json', + cacheSteps: true, + }; + const result = await handler(options, logger); + t.is(result.a, true); + t.is(result.b, true); + + const cache_a = await fs.readFile('/.cli-cache/workflow/a.json', 'utf8'); + t.deepEqual(JSON.parse(cache_a), { a: true }); + + const cache_b = await fs.readFile('/.cli-cache/workflow/b.json', 'utf8'); + t.deepEqual(JSON.parse(cache_b), { a: true, b: true, data: {} }); +}); + +test.serial('.cli-cache has a gitignore', async (t) => { + const workflow = { + workflow: { + steps: [ + { + expression: `${fn}fn((state) => ({ a: true }))`, + }, + ], + }, + }; + mockFs({ + '/workflow.json': JSON.stringify(workflow), + '/.cli-cache/workflow/': {}, + }); + + const options = { + ...defaultOptions, + workflowPath: '/workflow.json', + cacheSteps: true, + }; + await handler(options, logger); + + const gitignore = await fs.readFile('/.cli-cache/.gitignore', 'utf8'); + t.is(gitignore, '*'); +}); + test.serial('run a workflow with initial state from stdin', async (t) => { const workflow = { workflow: { @@ -204,7 +268,7 @@ test.serial('run a workflow with config as a path', async (t) => { t.is(result.cfg.id, 'x'); }); -test.serial('run a workflow from a start node', async (t) => { +test.serial('run a workflow from --start', async (t) => { const workflow = { workflow: { steps: [ @@ -232,6 +296,75 @@ test.serial('run a workflow from a start node', async (t) => { t.is(result.data.result, 'b'); }); +test.serial('run a workflow from --start and cached state', async (t) => { + const workflow = { + workflow: { + steps: [ + { + id: 'a', + expression: `${fn}fn((state) => state)`, + next: { b: true }, + }, + { + id: 'b', + expression: `${fn}fn((state) => state)`, + }, + ], + }, + }; + mockFs({ + '/workflow.json': JSON.stringify(workflow), + '/.cli-cache/workflow/a.json': JSON.stringify({ x: 22 }), + }); + + const options = { + ...defaultOptions, + workflowPath: '/workflow.json', + start: 'b', + }; + const result = await handler(options, logger); + t.is(result.x, 22); +}); + +test.serial('run a workflow from --only and cached state', async (t) => { + const workflow = { + workflow: { + steps: [ + { + id: 'a', + expression: `${fn}fn((state) => ({ ...state, a: true }))`, + next: { b: true }, + }, + { + id: 'b', + expression: `${fn}fn((state) => ({ ...state, b: true }))`, + next: { c: true }, + }, + { + id: 'c', + expression: `${fn}fn((state) => ({ ...state, c: true }))`, + }, + ], + }, + }; + mockFs({ + '/workflow.json': JSON.stringify(workflow), + '/.cli-cache/workflow/a.json': JSON.stringify({ x: 22 }), + }); + + const options = { + ...defaultOptions, + workflowPath: '/workflow.json', + only: 'b', + }; + const result = await handler(options, logger); + t.deepEqual(result, { + b: true, + x: 22, + data: {}, + }); +}); + test.serial('run a workflow with an adaptor (longform)', async (t) => { const workflow = { workflow: { diff --git a/packages/cli/test/execute/options.test.ts b/packages/cli/test/execute/options.test.ts index 4b9d0e7cd..f6d3bc56b 100644 --- a/packages/cli/test/execute/options.test.ts +++ b/packages/cli/test/execute/options.test.ts @@ -153,3 +153,25 @@ test('disable some imports', (t) => { t.is(a, 'jam'); t.is(b, 'jar'); }); + +test('cache: off by default', (t) => { + const options = parse('execute job.js'); + t.falsy(options.cacheSteps); +}); + +test('cache: enable with --cache-steps', (t) => { + const options = parse('execute job.js --cache-steps'); + t.true(options.cacheSteps); +}); + +test('cache: enable with env OPENFN_ALWAYS_CACHE_STEPS', (t) => { + process.env.OPENFN_ALWAYS_CACHE_STEPS = 'true'; + const options = parse('execute job.js'); + t.true(options.cacheSteps); +}); + +test('cache: override env OPENFN_ALWAYS_CACHE_STEPS with --no-cache-steps', (t) => { + process.env.OPENFN_ALWAYS_CACHE_STEPS = 'true'; + const options = parse('execute job.js --no-cache-steps'); + t.falsy(options.cacheSteps); +}); diff --git a/packages/cli/test/util.ts b/packages/cli/test/util.ts index 550720736..1b2e3dbf3 100644 --- a/packages/cli/test/util.ts +++ b/packages/cli/test/util.ts @@ -4,6 +4,8 @@ import mock from 'mock-fs'; import path from 'node:path'; +import type { ExecutionPlan, Job, StepEdge } from '@openfn/lexicon'; + export const mockFs = (files: Record) => { const pnpm = path.resolve('../../node_modules/.pnpm'); mock({ @@ -16,3 +18,42 @@ export const mockFs = (files: Record) => { export const resetMockFs = () => { mock.restore(); }; + +type CreateWorkflowOptions = { + id?: string; +}; + +export const createWorkflow = ( + steps: Job[], + options: CreateWorkflowOptions = {} +) => { + const { id = 'wf' } = options; + + return { + id, + workflow: { steps }, + } as ExecutionPlan; +}; + +type CreateStepOptions = { + id?: string; + name?: string; + expression?: string; + adaptor?: string; + next?: StepEdge; +}; + +export const createStep = ({ + id, + expression, + name, + adaptor, + next, +}: CreateStepOptions = {}) => + ({ + id: id || 'a', + name, + expression: expression || '.', + adaptor, + next, + } as Job); diff --git a/packages/cli/test/util/fuzzy-match-step.test.ts b/packages/cli/test/util/fuzzy-match-step.test.ts new file mode 100644 index 000000000..e39ff39ec --- /dev/null +++ b/packages/cli/test/util/fuzzy-match-step.test.ts @@ -0,0 +1,58 @@ +import test from 'ava'; +import { createStep, createWorkflow } from '../util'; +import fuzzyMatchStep from '../../src/util/fuzzy-match-step'; + +const workflow = createWorkflow([ + createStep({ id: 'sf', name: 'get from salesforce' }), + createStep({ id: 'pri', name: 'get from primero' }), + createStep({ id: 'transform-salesforce', name: 'transform salesforce data' }), + createStep({ id: 'err', name: 'report salesforce error' }), +]); + +test('do nothing if no step provided', (t) => { + const result = fuzzyMatchStep(workflow); + + t.falsy(result); +}); + +test('match an exact id', (t) => { + const result = fuzzyMatchStep(workflow, 'transform-salesforce'); + + t.is(result, 'transform-salesforce'); +}); + +test('fuzzy match an id', (t) => { + const result = fuzzyMatchStep(workflow, 'orm-sal'); + + t.is(result, 'transform-salesforce'); +}); + +test('fuzzy match a name', (t) => { + const result = fuzzyMatchStep(workflow, 'from salesforce'); + + t.is(result, 'sf'); +}); + +test('exact match a name', (t) => { + const result = fuzzyMatchStep(workflow, 'transform salesforce data'); + + t.is(result, 'transform-salesforce'); +}); + +test('throw if results are ambiguous (name and id)', (t) => { + t.throws(() => fuzzyMatchStep(workflow, 'salesforce'), { + message: 'AMBIGUOUS_INPUT', + }); +}); + +test('throw if results are ambiguous (name)', (t) => { + t.throws(() => fuzzyMatchStep(workflow, 'from'), { + message: 'AMBIGUOUS_INPUT', + }); +}); + +test('throw if the step is not found', (t) => { + t.throws(() => fuzzyMatchStep(workflow, 'magneto'), { + message: 'NOT_FOUND', + }); +}); diff --git a/packages/cli/test/util/load-state.test.ts b/packages/cli/test/util/load-state.test.ts new file mode 100644 index 000000000..d562f3cad --- /dev/null +++ b/packages/cli/test/util/load-state.test.ts @@ -0,0 +1,51 @@ +import test from 'ava'; +import { getUpstreamStepId } from '../../src/util/load-state'; +import { createWorkflow, createStep } from '../util'; + +// Low value test - we can just check order/priotity of args +// otherwise its mostly logging +test.todo('load state from stdin'); + +// Another low value test +// well covered in other places +test.todo('load state from path'); + +// I will write a couple of tests around this +test.todo('load cached state'); + +test('getUpstreamStepId: basic usage', (t) => { + const workflow = createWorkflow([ + createStep({ id: 'a', next: { b: true } }), + createStep({ id: 'b', next: { c: true } }), + createStep({ id: 'c' }), + ]); + + t.is(getUpstreamStepId(workflow, 'b'), 'a'); + t.is(getUpstreamStepId(workflow, 'c'), 'b'); +}); + +test("getUpstreamStepId: don't blow up if now next", (t) => { + const workflow = createWorkflow([ + createStep({ id: 'c' }), + createStep({ id: 'a', next: { b: true } }), + createStep({ id: 'b', next: { c: true } }), + ]); + + t.is(getUpstreamStepId(workflow, 'b'), 'a'); + t.is(getUpstreamStepId(workflow, 'c'), 'b'); +}); + +// TODO unsure at the moment how smart we need to be with this stuff +test.todo('getUpstreamStepId: ignore falsy values'); +test.todo('getUpstreamStepId: ignore disabled edges'); + +test('getUpstreamStepId: hande string nexts', (t) => { + const workflow = createWorkflow([ + createStep({ id: 'a', next: 'b' }), + createStep({ id: 'b', next: 'c' }), + createStep({ id: 'c' }), + ]); + + t.is(getUpstreamStepId(workflow, 'b'), 'a'); + t.is(getUpstreamStepId(workflow, 'c'), 'b'); +}); diff --git a/packages/compiler/CHANGELOG.md b/packages/compiler/CHANGELOG.md index 491075ef8..aa102a6b5 100644 --- a/packages/compiler/CHANGELOG.md +++ b/packages/compiler/CHANGELOG.md @@ -1,5 +1,13 @@ # @openfn/compiler +## 0.1.2 + +### Patch Changes + +- 7ddc5d8: Allow lazy state functions to be hoisted further up the tree (lazy state expressions) +- Updated dependencies [4deb5d4] + - @openfn/describe-package@0.0.19 + ## 0.1.1 ### Patch Changes diff --git a/packages/compiler/package.json b/packages/compiler/package.json index e86662f67..c8acded16 100644 --- a/packages/compiler/package.json +++ b/packages/compiler/package.json @@ -1,6 +1,6 @@ { "name": "@openfn/compiler", - "version": "0.1.1", + "version": "0.1.2", "description": "Compiler and language tooling for openfn jobs.", "author": "Open Function Group ", "license": "ISC", diff --git a/packages/compiler/src/transform.ts b/packages/compiler/src/transform.ts index 954fcddde..06f90195b 100644 --- a/packages/compiler/src/transform.ts +++ b/packages/compiler/src/transform.ts @@ -27,10 +27,9 @@ export type Transformer = { id: TransformerName; types: string[]; visitor: TransformFunction; + order?: number; }; -type TransformerIndex = Partial>; - export type TransformOptions = { logger?: Logger; // TODO maybe in the wrong place? @@ -49,61 +48,44 @@ export default function transform( options: TransformOptions = {} ) { if (!transformers) { - transformers = [lazyState, ensureExports, topLevelOps, addImports] as Transformer[]; + transformers = [ + lazyState, + ensureExports, + topLevelOps, + addImports, + ] as Transformer[]; } const logger = options.logger || defaultLogger; - const transformerIndex = indexTransformers(transformers, options); - - const v = buildVisitors(transformerIndex, logger, options); - // @ts-ignore generic disagree on Visitor, so disabling type checking for now - visit(ast, v); - return ast; -} - -// Build a map of AST node types against an array of transform functions -export function indexTransformers( - transformers: Transformer[], - options: TransformOptions = {} -): TransformerIndex { - const index: TransformerIndex = {}; - for (const t of transformers) { - const { types, id } = t; - if (options[id] !== false) { + transformers + // Ignore transformers which are explicitly disabled + .filter(({ id }) => options[id] ?? true) + // Set default orders + .map((t) => ({ ...t, order: t.order ?? 1 })) + // Sort by order + .sort((a, b) => { + if (a.order > b.order) return 1; + if (a.order < b.order) return -1; + return 0; + }) + // Run each transformer + .forEach(({ id, types, visitor }) => { + const astTypes: Visitor = {}; for (const type of types) { const name = `visit${type}` as keyof Visitor; - if (!index[name]) { - index[name] = []; - } - index[name]!.push(t); + astTypes[name] = function (path: NodePath) { + const opts = options[id] || {}; + const abort = visitor!(path, logger, opts); + if (abort) { + return false; + } + this.traverse(path); + }; } - } - } - return index; -} -// Build an index of AST visitors, where each node type is mapped to a visitor function which -// calls out to the correct transformer, passing a logger and options -export function buildVisitors( - transformerIndex: TransformerIndex, - logger: Logger, - options: TransformOptions = {} -) { - const visitors: Visitor = {}; + // @ts-ignore + visit(ast, astTypes); + }); - for (const k in transformerIndex) { - const astType = k as keyof Visitor; - visitors[astType] = function (path: NodePath) { - const transformers = transformerIndex[astType]!; - for (const { id, visitor } of transformers) { - const opts = options[id] || {}; - const abort = visitor!(path, logger, opts); - if (abort) { - return false; - } - } - this.traverse(path); - }; - } - return visitors; + return ast; } diff --git a/packages/compiler/src/transforms/add-imports.ts b/packages/compiler/src/transforms/add-imports.ts index ca9d8ac80..5a4a268db 100644 --- a/packages/compiler/src/transforms/add-imports.ts +++ b/packages/compiler/src/transforms/add-imports.ts @@ -15,8 +15,6 @@ import type { Transformer } from '../transform'; import type { Logger } from '@openfn/logger'; const globals = [ - '\\$', // TMP hack to fix a problem with lazy-state (needs double escaping to work) - 'AggregateError', 'Array', 'ArrayBuffer', diff --git a/packages/compiler/src/transforms/lazy-state.ts b/packages/compiler/src/transforms/lazy-state.ts index 5147f0051..ec1154b3e 100644 --- a/packages/compiler/src/transforms/lazy-state.ts +++ b/packages/compiler/src/transforms/lazy-state.ts @@ -1,27 +1,80 @@ /* * Convert $.a.b.c references into (state) => state.a.b.c - * + * * Converts all $.a.b chains unless: - * - $ was assigned previously in that scope + * - $ was assigned previously in that scope * - * TODO (maybe): - * - only convert $-expressions which are arguments to operations (needs type defs) - * - warn if converting a non-top-level $-expression - * - if not top level, convert to state.a.b.c (ie don't wrap the function) + * */ -import { builders as b, namedTypes } from 'ast-types'; +import { builders as b, namedTypes as n} from 'ast-types'; import type { NodePath } from 'ast-types/lib/node-path'; import type { Transformer } from '../transform'; -function visitor(path: NodePath) { +// Walk up the AST and work out where the parent arrow function should go +const ensureParentArrow = (path: NodePath) => { + let root = path; + let last; + + // find the parenting call expression + // Ie, the operation we're passing this arrow into + while(root && !n.CallExpression.check(root.node)) { + last = root; + root = root.parent; + + // if this is any kind of statement, we should throw + // TODO we may relax this, see https://github.com/OpenFn/kit/issues/660 + if (n.Statement.check(root.node) || n.Declaration.check(root.node)) { + throw new Error(`invalid state operator: must be inside an expression`) + } + } + + if (root && n.CallExpression.check(root.node)) { + const arg = last as NodePath; + + if (!isOpenFunction(arg)) { + const params = b.identifier('state'); + const arrow = b.arrowFunctionExpression([params], arg.node); + arg.replace(arrow); + } + } else { + // Actually I don't think we'll ever get here + throw new Error(`invalid state operator: must be be passed as an argument to an operator`) + } +} + +// Checks whether the passed node is an open function, ie, (state) => {...} +const isOpenFunction = (path: NodePath) => { + // is it a function? + if (n.ArrowFunctionExpression.check(path.node)) { + const arrow = path.node as n.ArrowFunctionExpression; + // does it have one param? + if(arrow.params.length == 1) { + const name = (arrow.params[0] as n.Identifier).name + // is the param called state? + if (name === "state") { + // We already have a valid open function here + return true; + } + throw new Error(`invalid state operator: parameter "${name}" should be called "state"`) + } + throw new Error('invalid state operator: parent has wrong arity') + } + + // if we get here, then path is: + // a) a Javascript Expression (and not an arrow) + // b) appropriate for being wrapped in an arrow + return false; +}; + +function visitor(path: NodePath) { let first = path.node.object; - while(first.hasOwnProperty('object')) { - first = (first as namedTypes.MemberExpression).object; + while (first.hasOwnProperty('object')) { + first = (first as n.MemberExpression).object; } - let firstIdentifer = first as namedTypes.Identifier; - - if (first && firstIdentifer.name === "$") { + let firstIdentifer = first as n.Identifier; + + if (first && firstIdentifer.name === '$') { // But if a $ declared a parent scope, ignore it let scope = path.scope; while (scope) { @@ -32,15 +85,10 @@ function visitor(path: NodePath) { } // rename $ to state - firstIdentifer.name = "state"; - - // Now nest the whole thing in an arrow - const params = b.identifier('state') - const arrow = b.arrowFunctionExpression( - [params], - path.node - ) - path.replace(arrow) + firstIdentifer.name = 'state'; + + // from the parenting member expression, ensure the parent arrow is nicely wrapped + ensureParentArrow(path); } // Stop parsing this member expression @@ -51,4 +99,6 @@ export default { id: 'lazy-state', types: ['MemberExpression'], visitor, + // It's important that $ symbols are escaped before any other transformations can run + order: 0, } as Transformer; diff --git a/packages/compiler/test/transform.test.ts b/packages/compiler/test/transform.test.ts index 8965ea5f6..b42da02bd 100644 --- a/packages/compiler/test/transform.test.ts +++ b/packages/compiler/test/transform.test.ts @@ -1,70 +1,99 @@ import test from 'ava'; import { builders as b } from 'ast-types'; -import { visit } from 'recast'; -import { createMockLogger } from '@openfn/logger'; -import transform, { - indexTransformers, - buildVisitors, - TransformerName, -} from '../src/transform'; - -const logger = createMockLogger(); - -const noop = () => false; +import transform, { TransformerName } from '../src/transform'; const TEST = 'test' as TransformerName; const ENSURE_EXPORTS = 'ensure-exports' as TransformerName; -test('build a visitor map with one visitor', (t) => { - const transformers = [{ id: TEST, types: ['CallExpression'], visitor: noop }]; +test('transform will visit nodes once', (t) => { + let visitCount = 0; + const visitor = () => { + visitCount++; + }; + const transformers = [{ id: TEST, types: ['CallExpression'], visitor }]; - const map = indexTransformers(transformers); + const program = b.program([ + b.expressionStatement(b.callExpression(b.identifier('jam'), [])), + ]); - t.truthy(map.visitCallExpression); - t.assert(map.visitCallExpression!.length === 1); + transform(program, transformers); + t.assert(visitCount === 1); }); -test('build a visitor map with multiple visitors', (t) => { +test('visit with mutiple transformes', (t) => { + let callCount = 0; + let idCount = 0; + const transformers = [ - { id: TEST, types: ['CallExpression'], visitor: noop }, - { id: TEST, types: ['VariableDeclaration'], visitor: noop }, + { + id: '1' as TransformerName, + types: ['CallExpression'], + visitor: () => { + callCount++; + }, + }, + { + id: '2' as TransformerName, + types: ['Identifier'], + visitor: () => { + idCount++; + }, + }, ]; - const map = indexTransformers(transformers); - - t.truthy(map.visitCallExpression); - t.assert(map.visitCallExpression!.length === 1); + const program = b.program([ + b.expressionStatement(b.callExpression(b.identifier('jam'), [])), + ]); - t.truthy(map.visitVariableDeclaration); - t.assert(map.visitVariableDeclaration!.length === 1); + transform(program, transformers); + t.is(callCount, 1); + t.is(idCount, 1); }); -test('build a visitor map with multiple visitors of the same type', (t) => { +test('run transformers in order', (t) => { + const results: number[] = []; + const transformers = [ - { id: TEST, types: ['CallExpression'], visitor: noop }, - { id: TEST, types: ['CallExpression'], visitor: noop }, + { + id: '1' as TransformerName, + types: ['Identifier'], + visitor: () => { + results.push(1); + }, + order: 2, + }, + { + id: '2' as TransformerName, + types: ['Identifier'], + visitor: () => { + results.push(2); + }, + order: 1, + }, + { + id: '3' as TransformerName, + types: ['Identifier'], + visitor: () => { + results.push(3); + }, + // order defaults to 1, so we shouldn't need to set this + //order: 1, + }, + { + id: '4' as TransformerName, + types: ['Identifier'], + visitor: () => { + results.push(4); + }, + order: 0, + }, ]; - const map = indexTransformers(transformers); - - t.truthy(map.visitCallExpression); - t.assert(map.visitCallExpression!.length === 2); -}); - -test('transform will visit nodes once', (t) => { - let visitCount = 0; - const visitor = () => { - visitCount++; - }; - const transformers = [{ id: TEST, types: ['CallExpression'], visitor }]; - - const program = b.program([ - b.expressionStatement(b.callExpression(b.identifier('jam'), [])), - ]); + const program = b.program([b.expressionStatement(b.identifier('jam'))]); transform(program, transformers); - t.assert(visitCount === 1); + t.deepEqual(results, [4, 2, 3, 1]); }); test('transform will visit nested nodes', (t) => { @@ -97,32 +126,77 @@ test('transform will stop if a visitor returns truthy', (t) => { t.assert(visitCount === 1); }); -test('ignore visitors disabled in options', (t) => { - const transformers = [{ id: TEST, types: ['Program'], visitor: noop }]; +test('one transform stopping does not affect another', (t) => { + let callCount = 0; + let idCount = 0; + + const transformers = [ + { + id: '1' as TransformerName, + types: ['CallExpression'], + visitor: () => { + callCount++; + return true; + }, + }, + { + id: '2' as TransformerName, + types: ['Identifier'], + visitor: () => { + idCount++; + }, + }, + ]; + + const program = b.program([ + b.expressionStatement( + b.callExpression(b.callExpression(b.identifier('jam'), []), []) + ), + ]); + transform(program, transformers); + t.assert(callCount === 1); + t.assert(idCount === 1); +}); + +test('ignore transformers disabled in options', (t) => { + let visitCount = 0; + const transformers = [ + { + id: TEST, + types: ['Identifier'], + visitor: () => { + ++visitCount; + }, + }, + ]; + + const program = b.program([ + b.expressionStatement(b.callExpression(b.identifier('jam'), [])), + ]); - const map = indexTransformers(transformers, { test: false }); + transform(program, transformers, { [TEST]: false }); - // Should add no visitors - t.assert(Object.keys(map).length === 0); + t.is(visitCount, 0); }); -test('passes options to a visitor', (t) => { +test('passes options to a transformer', (t) => { let result; const visitor = (_node: unknown, _logger: unknown, options: any) => { result = options.value; }; const transformers = [{ id: TEST, types: ['Program'], visitor }]; - const map = indexTransformers(transformers); const options = { [TEST]: { value: 42 } }; + const program = b.program([]); + // Visit an AST and ensure the visitor is called with the right options - visit(b.program([]), buildVisitors(map, logger, options)); + transform(program, transformers, options); - t.assert(result === 42); + t.is(result, 42); }); -test('passes options to several visitors', (t) => { +test('passes options to several transformers', (t) => { let total = 0; const visitor = (_node: unknown, _logger: unknown, options: any) => { total += options.value; @@ -133,13 +207,13 @@ test('passes options to several visitors', (t) => { ]; // Build a visitor map which should trap the options - const map = indexTransformers(transformers); const options = { [TEST]: { value: 2 } }; + const program = b.program([]); // Visit an AST and ensure the visitor is called with the right options - visit(b.program([]), buildVisitors(map, logger, options)); + transform(program, transformers, options); - t.assert(total === 4); + t.is(total, 4); }); test('passes options to the correct visitor', (t) => { @@ -152,6 +226,7 @@ test('passes options to the correct visitor', (t) => { const visitor_b = (_node: unknown, _logger: unknown, options: any) => { y = options.value; }; + const transformers = [ { id: ENSURE_EXPORTS, types: ['Program'], visitor: visitor_a }, { id: TEST, types: ['Program'], visitor: visitor_b }, @@ -162,11 +237,12 @@ test('passes options to the correct visitor', (t) => { [ENSURE_EXPORTS]: { value: 99 }, // x [TEST]: { value: 42 }, // y }; - const map = indexTransformers(transformers); + + const program = b.program([]); // Visit an AST and ensure the visitor is called with the right options - visit(b.program([]), buildVisitors(map, logger, options)); + transform(program, transformers, options); - t.assert(x === 99); - t.assert(y === 42); + t.is(x, 99); + t.is(y, 42); }); diff --git a/packages/compiler/test/transforms/lazy-state.test.ts b/packages/compiler/test/transforms/lazy-state.test.ts index 52621bca8..aa509bef9 100644 --- a/packages/compiler/test/transforms/lazy-state.test.ts +++ b/packages/compiler/test/transforms/lazy-state.test.ts @@ -9,7 +9,6 @@ import visitors from '../../src/transforms/lazy-state'; test('convert a simple dollar reference', (t) => { const ast = parse('get($.data)'); - const transformed = transform(ast, [visitors]); const { code } = print(transformed) @@ -34,7 +33,122 @@ test('ignore a regular chain reference', (t) => { t.is(code, 'get(a.b.c.d)') }) -test('ignore a string', (t) => { +test('convert a template literal', (t) => { + const src = 'get(`hello ${$.data}`)' + t.log(src) + const ast = parse(src); + const transformed = transform(ast, [visitors]); + const { code } = print(transformed) + t.log(code) + + t.is(code, 'get(state => `hello ${state.data}`)') +}) + +test('convert a template literal with two refs', (t) => { + const src = 'get(`hello ${$.firstname} ${$.lastname}`)' + t.log(src) + const ast = parse(src); + const transformed = transform(ast, [visitors]); + const { code } = print(transformed) + t.log(code) + + t.is(code, 'get(state => `hello ${state.firstname} ${state.lastname}`)') +}) + +test('convert a template literal with a pre-existing parent arrow', (t) => { + const src = 'get(state => `hello ${$.data}`)' + t.log(src) + const ast = parse(src); + const transformed = transform(ast, [visitors]); + const { code } = print(transformed) + t.log(code) + + t.is(code, 'get(state => `hello ${state.data}`)') +}) + +test('throw if a $ is already inside a non-compatible arrow (state name)', (t) => { + const src = 'get((s) => `hello ${$.data}`)' // throw!! + t.log(src) + const ast = parse(src); + + t.throws(() => transform(ast, [visitors]), { + message: `invalid state operator: parameter "s" should be called "state"` + }); +}) + +test('throw if a $ is already inside a non-compatible arrow (arity)', (t) => { + const src = 'get((state, b) => `hello ${$.data}`)' // throw!! + t.log(src) + const ast = parse(src); + + t.throws(() => transform(ast, [visitors]), { + message: 'invalid state operator: parent has wrong arity' + }); +}) + +test('throw if $ is not inside an operation', (t) => { + const src = 'const x = $.data;' // throw!! + t.log(src) + const ast = parse(src); + + t.throws(() => transform(ast, [visitors]), { + message: 'invalid state operator: must be inside an expression' + }); +}) + +test('throw if $ is on the left hand side of an assignment', (t) => { + const src = '$.data = 20;' // throw!! + t.log(src) + const ast = parse(src); + + t.throws(() => transform(ast, [visitors]), { + message: 'invalid state operator: must be inside an expression' + }); +}) + +test('throw if $ is on the left hand side of a nested assignment', (t) => { + const src = 'fn(() => { $.data = 20; })' // throw!! + t.log(src) + const ast = parse(src); + + t.throws(() => transform(ast, [visitors]), { + message: 'invalid state operator: must be inside an expression' + }); +}) + +test('throw if $ is on the left hand side of a multi assignment', (t) => { + const src = 'const z = $.data = 20;' // throw!! + t.log(src) + const ast = parse(src); + + t.throws(() => transform(ast, [visitors]), { + message: 'invalid state operator: must be inside an expression' + }); +}) + +test('wrap a concatenation', (t) => { + const src = 'get($.firstname + " " + $.lastname)' + t.log(src) + const ast = parse(src); + const transformed = transform(ast, [visitors]); + const { code } = print(transformed) + t.log(code) + + t.is(code, 'get(state => state.firstname + " " + state.lastname)') +}) + +test('wrap a dynamic property reference', (t) => { + const src = 'get(city[$.location])' + t.log(src) + const ast = parse(src); + const transformed = transform(ast, [visitors]); + const { code } = print(transformed) + t.log(code) + + t.is(code, 'get(state => city[state.location])') +}) + +test('ignore a dollar ref in a string', (t) => { const ast = parse('get("$.a.b")'); const transformed = transform(ast, [visitors]); diff --git a/packages/describe-package/CHANGELOG.md b/packages/describe-package/CHANGELOG.md index 800c41eae..068dc7f75 100644 --- a/packages/describe-package/CHANGELOG.md +++ b/packages/describe-package/CHANGELOG.md @@ -1,5 +1,11 @@ # @openfn/describe-package +## 0.0.19 + +### Patch Changes + +- 4deb5d4: Recognise import aliases in job code + ## 0.0.18 ### Patch Changes diff --git a/packages/describe-package/package.json b/packages/describe-package/package.json index 8c486a4f7..8ac190cd1 100644 --- a/packages/describe-package/package.json +++ b/packages/describe-package/package.json @@ -1,6 +1,6 @@ { "name": "@openfn/describe-package", - "version": "0.0.18", + "version": "0.0.19", "description": "Utilities to inspect an npm package.", "author": "Open Function Group ", "license": "ISC", diff --git a/packages/describe-package/src/api.ts b/packages/describe-package/src/api.ts index 45bca215c..a49a006f9 100644 --- a/packages/describe-package/src/api.ts +++ b/packages/describe-package/src/api.ts @@ -26,10 +26,12 @@ export type PackageDescription = { name: string; version: string; functions: FunctionDescription[]; + namespaces: NamespaceDescription[]; }; export type FunctionDescription = { name: string; + type: 'function'; magic: boolean; // keep you-know-who happy isOperation: boolean; // Is this an Operation? parameters: ParameterDescription[]; @@ -38,6 +40,11 @@ export type FunctionDescription = { parent?: string; }; +export type NamespaceDescription = { + name: string; + type: 'namespace'; +}; + type ExampleDescription = { code: string; caption?: string; @@ -96,12 +103,20 @@ export const describePackage = async ( const files = await fetchDTSListing(specifier); const functions: FunctionDescription[] = []; + const namespaces: NamespaceDescription[] = []; for await (const fileName of files) { // Exclude the beta file if (!/beta\.d\.ts$/.test(fileName)) { const f = await fetchFile(`${specifier}${fileName}`); project.createFile(f, fileName); - functions.push(...describeProject(project, fileName)); + + describeProject(project, fileName).forEach((member) => { + if (member.type === 'function') { + functions.push(member); + } else if (member.type === 'namespace') { + namespaces.push(member); + } + }); } } @@ -109,6 +124,7 @@ export const describePackage = async ( name, version, functions, + namespaces, }; }; diff --git a/packages/describe-package/src/describe-project.ts b/packages/describe-package/src/describe-project.ts index 3b9c69af2..18f9f7593 100644 --- a/packages/describe-package/src/describe-project.ts +++ b/packages/describe-package/src/describe-project.ts @@ -1,7 +1,11 @@ import { WrappedSymbol } from './typescript/wrapped-symbol'; import { NO_SYMBOLS_FOUND } from './typescript/project'; import type { Project } from './typescript/project'; -import type { FunctionDescription, ParameterDescription } from './api'; +import type { + FunctionDescription, + ParameterDescription, + NamespaceDescription, +} from './api'; type DescribeOptions = { // Should we describe privately declared exports? @@ -22,6 +26,16 @@ const describeParameter = ( }; }; +const describeNamespace = ( + _project: Project, + symbol: WrappedSymbol +): NamespaceDescription => { + return { + name: symbol.name, + type: 'namespace', + }; +}; + const describeFunction = ( project: Project, symbol: WrappedSymbol, @@ -35,6 +49,7 @@ const describeFunction = ( } return { + type: 'function', name: moduleName ? `${moduleName}.${symbol.name}` : symbol.name, description: symbol.comment, parameters: symbol.parameters.map((p) => describeParameter(project, p)), @@ -87,18 +102,18 @@ const describeProject = ( .reduce((symbols, symbol) => { if (symbol.isFunctionDeclaration) { symbols.push(describeFunction(project, symbol)); - } - - if (symbol.isModuleDeclaration) { + } else if (symbol.isModuleDeclaration) { symbol.exports.map((modSymbol) => { if (modSymbol.isFunctionDeclaration) { symbols.push(describeFunction(project, modSymbol, symbol.name)); } }); + } else if (symbol.isExportAlias) { + symbols.push(describeNamespace(project, symbol)); } return symbols; - }, [] as FunctionDescription[]); + }, [] as Array); }; export default describeProject; diff --git a/packages/describe-package/src/typescript/wrapped-symbol.ts b/packages/describe-package/src/typescript/wrapped-symbol.ts index 33c232eed..09d30453b 100644 --- a/packages/describe-package/src/typescript/wrapped-symbol.ts +++ b/packages/describe-package/src/typescript/wrapped-symbol.ts @@ -43,7 +43,9 @@ export class WrappedSymbol { const newSymbol: ts.Symbol = this.typeChecker.getAliasedSymbol( this.symbol ); - return new WrappedSymbol(this.typeChecker, newSymbol); + if (newSymbol.escapedName !== 'unknown') { + return new WrappedSymbol(this.typeChecker, newSymbol); + } } return this; } @@ -95,7 +97,8 @@ export class WrappedSymbol { // @ts-ignore symbol.parent const parentSymbol = this.symbol.parent; return ( - parentSymbol && parentSymbol.escapedName.match(/^\"\/node_modules\//) + this.symbol.flags === ts.SymbolFlags.AliasExcludes || + parentSymbol?.escapedName.match(/^\"\/node_modules\//) ); } diff --git a/packages/describe-package/test/describe-project.test.ts b/packages/describe-package/test/describe-project.test.ts index d7ae04e8a..0a95bbb83 100644 --- a/packages/describe-package/test/describe-project.test.ts +++ b/packages/describe-package/test/describe-project.test.ts @@ -3,22 +3,23 @@ import { setupProject } from './helpers'; import describeProject from '../src/describe-project'; -let fns; +let members; // Load the fixture once and then run a bunch of tests against it test.before(async () => { const project = await setupProject('stroopwafel'); - fns = await describeProject(project); + members = await describeProject(project); }); -const get = (name) => fns.find((fn) => fn.name === name); +const get = (name) => members.find((fn) => fn.name === name); -test('List all the exported functions', async (t) => { - t.assert(fns.length === 4); +test('List all the exported members', async (t) => { + t.assert(members.length === 5); t.truthy(get('traditional')); t.truthy(get('oneFlavour')); t.truthy(get('manyFlavours')); t.truthy(get('fn')); + t.truthy(get('flavours')); }); test('Does not include private functions', async (t) => { @@ -70,3 +71,10 @@ test('Parse an empty file', async (t) => { const fns = await describeProject(project); t.is(fns.length, 0); }); + +test('Recognise a namespace', async (t) => { + const ns = get('flavours'); + t.is(ns.type, 'namespace') + + // Note that we don't do a lot with the namespace right now - we just acknowledge that its there +}); \ No newline at end of file diff --git a/packages/describe-package/test/fixtures/stroopwafel-flavours.d.ts b/packages/describe-package/test/fixtures/stroopwafel-flavours.d.ts new file mode 100644 index 000000000..b21991a9b --- /dev/null +++ b/packages/describe-package/test/fixtures/stroopwafel-flavours.d.ts @@ -0,0 +1,3 @@ +export const strawberry = 'strawberry'; + +export const chocolate = 'chocolate'; diff --git a/packages/describe-package/test/fixtures/stroopwafel.d.ts b/packages/describe-package/test/fixtures/stroopwafel.d.ts index 4167c0313..0a1fc1327 100644 --- a/packages/describe-package/test/fixtures/stroopwafel.d.ts +++ b/packages/describe-package/test/fixtures/stroopwafel.d.ts @@ -30,3 +30,5 @@ export declare function somethingPrivate(): void; // Note that this is mocked by the helper project setup export { fn } from '@openfn/language-common'; + +export * as flavours from './stroopwafel-flavours'; diff --git a/packages/engine-multi/CHANGELOG.md b/packages/engine-multi/CHANGELOG.md index e61cd4896..2702919c0 100644 --- a/packages/engine-multi/CHANGELOG.md +++ b/packages/engine-multi/CHANGELOG.md @@ -1,5 +1,14 @@ # engine-multi +## 1.1.4 + +### Patch Changes + +- Updated dependencies [cecdb60] +- Updated dependencies [7ddc5d8] + - @openfn/runtime@1.1.2 + - @openfn/compiler@0.1.2 + ## 1.1.3 ### Patch Changes diff --git a/packages/engine-multi/package.json b/packages/engine-multi/package.json index 27a6200c4..8eeebf2b6 100644 --- a/packages/engine-multi/package.json +++ b/packages/engine-multi/package.json @@ -1,6 +1,6 @@ { "name": "@openfn/engine-multi", - "version": "1.1.3", + "version": "1.1.4", "description": "Multi-process runtime engine", "main": "dist/index.js", "type": "module", diff --git a/packages/lexicon/core.d.ts b/packages/lexicon/core.d.ts index 08fae4f6a..fe2f3f50e 100644 --- a/packages/lexicon/core.d.ts +++ b/packages/lexicon/core.d.ts @@ -89,6 +89,7 @@ export type WorkflowOptions = { timeout?: number; stepTimeout?: number; start?: StepId; + end?: StepId; // TODO not supported yet I don't think? sanitize?: SanitizePolicies; diff --git a/packages/lightning-mock/CHANGELOG.md b/packages/lightning-mock/CHANGELOG.md index 4f65eee23..605ea2805 100644 --- a/packages/lightning-mock/CHANGELOG.md +++ b/packages/lightning-mock/CHANGELOG.md @@ -1,5 +1,13 @@ # @openfn/lightning-mock +## 2.0.4 + +### Patch Changes + +- Updated dependencies [cecdb60] + - @openfn/runtime@1.1.2 + - @openfn/engine-multi@1.1.4 + ## 2.0.3 ### Patch Changes diff --git a/packages/lightning-mock/package.json b/packages/lightning-mock/package.json index 10a0cf2e3..99079beba 100644 --- a/packages/lightning-mock/package.json +++ b/packages/lightning-mock/package.json @@ -1,6 +1,6 @@ { "name": "@openfn/lightning-mock", - "version": "2.0.3", + "version": "2.0.4", "private": true, "description": "A mock Lightning server", "main": "dist/index.js", diff --git a/packages/runtime/CHANGELOG.md b/packages/runtime/CHANGELOG.md index 2af1d4e1d..0837c88f6 100644 --- a/packages/runtime/CHANGELOG.md +++ b/packages/runtime/CHANGELOG.md @@ -1,11 +1,18 @@ # @openfn/runtime +## 1.1.2 + +### Patch Changes + +- cecdb60: Support an end step option + ## 1.1.1 ### Patch Changes - Updated dependencies [2fde0ad] - @openfn/logger@1.0.1 + ## 1.1.0 ### Minor Changes diff --git a/packages/runtime/package.json b/packages/runtime/package.json index 506668af1..0cdce0dde 100644 --- a/packages/runtime/package.json +++ b/packages/runtime/package.json @@ -1,6 +1,6 @@ { "name": "@openfn/runtime", - "version": "1.1.1", + "version": "1.1.2", "description": "Job processing runtime.", "type": "module", "exports": { diff --git a/packages/runtime/src/execute/plan.ts b/packages/runtime/src/execute/plan.ts index ee32431fa..d87940714 100644 --- a/packages/runtime/src/execute/plan.ts +++ b/packages/runtime/src/execute/plan.ts @@ -56,7 +56,6 @@ const executePlan = async ( opts.callbacks?.notify?.(NOTIFY_STATE_LOAD, { duration, jobId: id }); logger.success(`loaded state for ${id} in ${duration}ms`); } - // Right now this executes in series, even if jobs are parallelised while (queue.length) { const next = queue.shift()!; @@ -67,10 +66,17 @@ const executePlan = async ( const result = await executeStep(ctx, job, prevState); stateHistory[next] = result.state; - if (!result.next.length) { + const exitEarly = options.end === next; + if (exitEarly || !result.next.length) { leaves[next] = stateHistory[next]; } + if (exitEarly) { + // If this is designated an end point, we should abort + // (even if there are more steps queued up) + break; + } + if (result.next) { queue.push(...result.next); } diff --git a/packages/runtime/src/util/validate-plan.ts b/packages/runtime/src/util/validate-plan.ts index 2dd86628d..737424d7b 100644 --- a/packages/runtime/src/util/validate-plan.ts +++ b/packages/runtime/src/util/validate-plan.ts @@ -73,7 +73,7 @@ export const buildModel = ({ workflow }: ExecutionPlan) => { const assertStart = (plan: ExecutionPlan) => { const { start } = plan.options; if (typeof start === 'string') { - if (!plan.workflow.steps.find(({ id }) => id == start)) { + if (!plan.workflow.steps.find(({ id }) => id === start)) { throw new ValidationError(`Could not find start job: ${start}`); } } @@ -105,6 +105,10 @@ const assertNoCircularReferences = (model: Model) => { } }; +// This ensures that each step only has a single upstream edge, +// ie, each step only has a single input +// This is importand for the `--cache` functionality in the CLI, +// which assumes this rule when working out the input to a custom start node const assertSingletonDependencies = (model: Model) => { for (const id in model) { const node = model[id]; diff --git a/packages/runtime/test/runtime.test.ts b/packages/runtime/test/runtime.test.ts index bfafc694a..13157f6e5 100644 --- a/packages/runtime/test/runtime.test.ts +++ b/packages/runtime/test/runtime.test.ts @@ -285,7 +285,6 @@ test('run a workflow with initial state (data key) and optional start', async (t workflow: { steps: [ { - // won't run id: 'a', expression: 'export default [(s) => { s.data.count +=1 ; return s}]', next: { b: true }, @@ -310,6 +309,65 @@ test('run a workflow with initial state (data key) and optional start', async (t t.is(result.data.count, 12); }); +test('run a workflow with an end', async (t) => { + const plan: ExecutionPlan = { + workflow: { + steps: [ + { + id: 'a', + expression: 'export default [(s) => { s.data.a = 1 ; return s}]', + next: { b: true }, + }, + { + id: 'b', + expression: 'export default [(s) => { s.data.b = 1; return s}]', + next: { c: true }, + }, + { + id: 'c', + expression: 'export default [(s) => { s.data.c = 1 ; return s}]', + }, + ], + }, + options: { + end: 'b', + }, + }; + + const result: any = await run(plan, {}); + t.deepEqual(result, { data: { a: 1, b: 1 } }); +}); + +test('run a workflow with a start and end', async (t) => { + const plan: ExecutionPlan = { + workflow: { + steps: [ + { + id: 'a', + expression: 'export default [(s) => { s.data.a = 1 ; return s}]', + next: { b: true }, + }, + { + id: 'b', + expression: 'export default [(s) => { s.data.b = 1; return s}]', + next: { c: true }, + }, + { + id: 'c', + expression: 'export default [(s) => { s.data.c = 1 ; return s}]', + }, + ], + }, + options: { + start: 'b', + end: 'b', + }, + }; + + const result: any = await run(plan, {}); + t.deepEqual(result, { data: { b: 1 } }); +}); + test('run a workflow with a trigger node', async (t) => { const plan: ExecutionPlanNoOptions = { workflow: { diff --git a/packages/ws-worker/CHANGELOG.md b/packages/ws-worker/CHANGELOG.md index c91c6fba9..10f2a900f 100644 --- a/packages/ws-worker/CHANGELOG.md +++ b/packages/ws-worker/CHANGELOG.md @@ -1,5 +1,15 @@ # ws-worker +## 1.1.4 + +### Patch Changes + +- 7ddc5d8: Support expressions in lazy state operators +- 4deb5d4: Recognise import aliases in job code +- Updated dependencies [cecdb60] + - @openfn/runtime@1.1.2 + - @openfn/engine-multi@1.1.4 + ## 1.1.3 ### Patch Changes diff --git a/packages/ws-worker/package.json b/packages/ws-worker/package.json index 144a8c078..f9efd0fd5 100644 --- a/packages/ws-worker/package.json +++ b/packages/ws-worker/package.json @@ -1,6 +1,6 @@ { "name": "@openfn/ws-worker", - "version": "1.1.3", + "version": "1.1.4", "description": "A Websocket Worker to connect Lightning to a Runtime Engine", "main": "dist/index.js", "type": "module", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 7e6cb90f8..af24d48c4 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -108,7 +108,7 @@ importers: dependencies: '@openfn/language-common_1.12.0': specifier: npm:@openfn/language-common@^1.12.0 - version: /@openfn/language-common@1.12.0 + version: /@openfn/language-common@1.13.1 integration-tests/worker: dependencies: @@ -1356,8 +1356,8 @@ packages: heap: 0.2.7 dev: false - /@fastify/busboy@2.1.0: - resolution: {integrity: sha512-+KpH+QxZU7O4675t3mnkQKcZZg56u+K/Ct2K+N2AZYNVK8kyeo/bI18tI8aPm3tvNNRyTWfj6s5tnGNlcbQRsA==} + /@fastify/busboy@2.1.1: + resolution: {integrity: sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==} engines: {node: '>=14'} dev: false @@ -1617,18 +1617,18 @@ packages: semver: 7.5.4 dev: true - /@openfn/language-common@1.12.0: - resolution: {integrity: sha512-JQjJpRNdwG5LMmAIO7P7HLgtHYS0UssoibAhMJOpoHk5/kFLDpH3tywpp40Pai33NMzgofxb5gb0MZTgoEk3fw==} + /@openfn/language-common@1.13.1: + resolution: {integrity: sha512-aEcXy1KB3Z2ODuL0eI9jgDkXOIMUANR9U8NHost1WB+r66KhJUn50T4ezZr6KQ3W9pji/IFcu6ukW2PDccJUNw==} dependencies: ajv: 8.12.0 axios: 1.1.3 - csv-parse: 5.5.3 + csv-parse: 5.5.5 csvtojson: 2.0.10 date-fns: 2.30.0 http-status-codes: 2.3.0 jsonpath-plus: 4.0.0 lodash: 4.17.21 - undici: 5.28.3 + undici: 5.28.4 transitivePeerDependencies: - debug dev: false @@ -3016,8 +3016,8 @@ packages: resolution: {integrity: sha512-cO1I/zmz4w2dcKHVvpCr7JVRu8/FymG5OEpmvsZYlccYolPBLoVGKUHgNoc4ZGkFeFlWGEDmMyBM+TTqRdW/wg==} dev: true - /csv-parse@5.5.3: - resolution: {integrity: sha512-v0KW6C0qlZzoGjk6u5tLmVfyZxNgPGXZsWTXshpAgKVGmGXzaVWGdlCFxNx5iuzcXT/oJN1HHM9DZKwtAtYa+A==} + /csv-parse@5.5.5: + resolution: {integrity: sha512-erCk7tyU3yLWAhk6wvKxnyPtftuy/6Ak622gOO7BCJ05+TYffnPCJF905wmOQm+BpkX54OdAl8pveJwUdpnCXQ==} dev: false /csv-stringify@5.6.5: @@ -7718,11 +7718,11 @@ packages: resolution: {integrity: sha512-hEQt0+ZLDVUMhebKxL4x1BTtDY7bavVofhZ9KZ4aI26X9SRaE+Y3m83XUL1UP2jn8ynjndwCCpEHdUG+9pP1Tw==} dev: true - /undici@5.28.3: - resolution: {integrity: sha512-3ItfzbrhDlINjaP0duwnNsKpDQk3acHI3gVJ1z4fmwMK31k5G9OVIAMLSIaP6w4FaGkaAkN6zaQO9LUvZ1t7VA==} + /undici@5.28.4: + resolution: {integrity: sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g==} engines: {node: '>=14.0'} dependencies: - '@fastify/busboy': 2.1.0 + '@fastify/busboy': 2.1.1 dev: false /union-value@1.0.1: