Skip to content

Commit

Permalink
deploy: improve error messages (#697)
Browse files Browse the repository at this point in the history
* deploy: add a user-friendly path to all error objects

* deploy: update validation tests

* add path to error output

* deploy: improve langauge of duplicate key errors

* update test
  • Loading branch information
josephjclark authored May 28, 2024
1 parent 015055c commit f89c03c
Show file tree
Hide file tree
Showing 3 changed files with 136 additions and 101 deletions.
2 changes: 1 addition & 1 deletion packages/deploy/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ export async function deploy(config: DeployConfig, logger: Logger) {

logger.debug('spec', spec);
if (spec.errors.length > 0) {
spec.errors.forEach((e: any) => logger.warn(e.message));
spec.errors.forEach((e: any) => logger.warn(`${e.path} :: ${e.message}`));
throw new DeployError(`${config.specPath} has errors`, 'VALIDATION_ERROR');
}
const nextState = mergeSpecIntoState(state, spec.doc);
Expand Down
57 changes: 40 additions & 17 deletions packages/deploy/src/validator.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import { ProjectSpec } from './types';
export interface Error {
context: any;
message: string;
path?: string[];
path: string; // human readable path to the error (ie, workflow-1/job-2)
range?: [number, number, number];
}

Expand All @@ -15,30 +15,40 @@ export function parseAndValidate(input: string): {
let errors: Error[] = [];
const doc = YAML.parseDocument(input);

function pushUniqueKey(context: YAML.Pair, key: string, arr: string[]) {
function ensureUniqueId(key: string, arr: string[]) {
if (arr.includes(key)) {
errors.push({
context,
message: `duplicate key: ${key}`,
});
throw `duplicate id: ${key}`;
} else {
arr.push(key);
}
}

function validateJobs(workflow: YAMLMap, jobKeys: string[]) {
function validateJobs(
workflowName: string,
workflow: YAMLMap,
jobKeys: string[]
) {
const jobs = workflow.getIn(['jobs']);

if (jobs) {
if (isMap(jobs)) {
for (const job of jobs.items) {
if (isPair(job)) {
const jobName = (job as any).key.value;
pushUniqueKey(job, jobName, jobKeys);
try {
ensureUniqueId(jobName, jobKeys);
} catch (err: any) {
errors.push({
path: `${workflowName}/${jobName}`,
context: job,
message: err,
});
}
}
}
} else {
errors.push({
path: 'workflow',
context: jobs,
message: 'jobs: must be a map',
});
Expand All @@ -57,24 +67,32 @@ export function parseAndValidate(input: string): {
if (isPair(workflow)) {
const workflowName = (workflow as any).key.value;
const jobKeys: string[] = [];
pushUniqueKey(workflow, workflowName, workflowKeys);
try {
ensureUniqueId(workflowName, workflowKeys);
} catch (err: any) {
errors.push({
path: `${workflowName}`,
context: workflow,
message: err,
});
}
const workflowValue = (workflow as any).value;
if (isMap(workflowValue)) {
validateJobs(workflowValue, jobKeys);
validateJobs(workflowName, workflowValue, jobKeys);
} else {
errors.push({
context: workflowValue,
message: `workflow '${workflowName}': must be a map`,
path: ['workflows', workflowName],
message: `${workflowName}: must be a map`,
path: 'workflowName',
});
}
}
}
} else {
errors.push({
context: workflows,
message: 'workflows: must be a map',
path: ['workflows'],
message: 'must be a map',
path: 'workflows',
});
}
}
Expand All @@ -86,7 +104,7 @@ export function parseAndValidate(input: string): {
errors.push({
context: pair,
message: 'project: must provide at least one workflow',
path: ['workflows'],
path: 'workflows',
});

return doc.createPair('workflows', {});
Expand All @@ -96,6 +114,7 @@ export function parseAndValidate(input: string): {
if (pair.key && pair.key.value === 'jobs') {
if (pair.value.value === null) {
errors.push({
path: 'workflows',
context: pair,
message: 'jobs: must be a map',
range: pair.value.range,
Expand All @@ -114,7 +133,11 @@ export function parseAndValidate(input: string): {
});

if (!doc.has('name')) {
errors.push({ context: doc, message: 'Project must have a name' });
errors.push({
context: doc,
message: 'Project must have a name',
path: 'project',
});
}

const workflows = doc.getIn(['workflows']);
Expand All @@ -124,4 +147,4 @@ export function parseAndValidate(input: string): {
// or put our own errors in the yamlDoc

return { errors, doc: doc.toJSON() as ProjectSpec };
}
}
178 changes: 95 additions & 83 deletions packages/deploy/test/validator.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,90 +5,103 @@ function findError(errors: any[], message: string) {
return errors.find((e) => e.message === message);
}

test('validator', (t) => {
let doc = `
name: project-name
workflows:
- name: workflow-one
- name: workflow-two
`;
test('Workflows must be a map', (t) => {
const doc = `
name: project-name
workflows:
- name: workflow-one
- name: workflow-two
`;

let results = parseAndValidate(doc);
const results = parseAndValidate(doc);

t.truthy(
results.errors.find((e) => e.message === 'workflows: must be a map')
);
const err = findError(results.errors, 'must be a map');

// disallow two workflows with same name
doc = `
name: project-name
workflows:
workflow-one:
name: workflow one
workflow-one:
name: workflow two
jobs:
foo:
workflow-three:
name: workflow three
jobs:
foo:
workflow-four:
jobs:
- 1
- 2
`;
t.truthy(err);
t.is(err.path, 'workflows');
});

results = parseAndValidate(doc);

t.truthy(findError(results.errors, 'duplicate key: workflow-one'));

t.truthy(findError(results.errors, 'jobs: must be a map'));

// disallow two jobs of the same name in the same workflow
doc = `
name: project-name
workflows:
workflow-one:
name: workflow one
workflow-two:
name: workflow two
jobs:
foo:
foo:
workflow-three:
name: workflow three
jobs:
bar:
test('Workflows must have unique ids', (t) => {
const doc = `
name: project-name
workflows:
workflow-one:
name: workflow one
workflow-one:
name: workflow two
workflow-three:
name: workflow three
`;

results = parseAndValidate(doc);

t.truthy(findError(results.errors, 'duplicate key: foo'));

doc = `
name: project-name
workflows:
workflow-one:
name: workflow one
jobs:
Transform-data-to-FHIR-standard:
name: Transform data to FHIR standard
adaptor: '@openfn/language-http@latest'
body: |
fn(state => state);
triggers:
webhook:
type: webhook
enabled: true
edges:
webhook->Transform-data-to-FHIR-standard:
condition_type: js_expression
condition_expression: true
const results = parseAndValidate(doc);

const err = findError(results.errors, 'duplicate id: workflow-one');
t.truthy(err);
t.is(err.path, 'workflow-one');
});

test('Jobs must have unique ids within a workflow', (t) => {
const doc = `
name: project-name
workflows:
workflow-two:
name: workflow two
jobs:
foo:
foo:
bar:
`;

const results = parseAndValidate(doc);

const err = findError(results.errors, 'duplicate id: foo');
t.is(err.path, 'workflow-two/foo');
t.truthy(err);
});

test('Job ids can duplicate across workflows', (t) => {
const doc = `
name: project-name
workflows:
workflow-one:
name: workflow one
jobs:
foo:
workflow-two:
name: workflow two
jobs:
foo:
`;

const results = parseAndValidate(doc);

t.is(results.errors.length, 0);
});

test('Workflow edges are parsed correctly', (t) => {
const doc = `
name: project-name
workflows:
workflow-one:
name: workflow one
jobs:
Transform-data-to-FHIR-standard:
name: Transform data to FHIR standard
adaptor: '@openfn/language-http@latest'
body: |
fn(state => state);
triggers:
webhook:
type: webhook
enabled: true
edges:
webhook->Transform-data-to-FHIR-standard:
condition_type: js_expression
condition_expression: true
`;

results = parseAndValidate(doc);
const results = parseAndValidate(doc);

t.assert(
results.doc.workflows['workflow-one'].edges![
Expand All @@ -97,18 +110,17 @@ workflows:
);
});


test('allow empty workflows', (t) => {
let doc = `
name: project-name
name: project-name
`;

let result = parseAndValidate(doc);
const result = parseAndValidate(doc);

t.is(result.errors.length, 0)
t.is(result.errors.length, 0);

t.deepEqual(result.doc, {
name: 'project-name',
workflows: {}
})
});
workflows: {},
});
});

0 comments on commit f89c03c

Please sign in to comment.