diff --git a/packages/data-models/flowTypes.ts b/packages/data-models/flowTypes.ts index ebb695c7d..5a08d1440 100644 --- a/packages/data-models/flowTypes.ts +++ b/packages/data-models/flowTypes.ts @@ -82,6 +82,8 @@ export namespace FlowTypes { export interface DataPipeFlow extends FlowTypeWithData { flow_type: "data_pipe"; rows: IDataPipeOperation[]; + /** Generated list of output flows created by generator */ + _output_flows?: FlowTypeBase[]; } export interface GeneratorFlow extends FlowTypeWithData { flow_type: "generator"; @@ -91,6 +93,8 @@ export namespace FlowTypes { output_flow_subtype?: string; output_flow_type?: FlowType; }; + /** Generated list of output flows created by generator */ + _output_flows?: FlowTypeBase[]; } export interface Translation_strings { [sourceText: string]: string; diff --git a/packages/scripts/src/commands/app-data/convert/convert.spec.ts b/packages/scripts/src/commands/app-data/convert/convert.spec.ts index fd4c9e8e0..230989803 100644 --- a/packages/scripts/src/commands/app-data/convert/convert.spec.ts +++ b/packages/scripts/src/commands/app-data/convert/convert.spec.ts @@ -65,7 +65,7 @@ describe("App Data Converter", () => { it("Populates output to folder by data type", async () => { await converter.run(); const outputFolders = readdirSync(paths.outputFolder); - expect(outputFolders).toEqual(["data_list", "template"]); + expect(outputFolders).toEqual(["data_list", "data_pipe", "template"]); }); it("Supports input from multiple source folders", async () => { const multipleSourceConverter = new AppDataConverter({ diff --git a/packages/scripts/src/commands/app-data/convert/processors/flowParser/flowParser.spec.ts b/packages/scripts/src/commands/app-data/convert/processors/flowParser/flowParser.spec.ts index e7e77ea1b..5db542669 100644 --- a/packages/scripts/src/commands/app-data/convert/processors/flowParser/flowParser.spec.ts +++ b/packages/scripts/src/commands/app-data/convert/processors/flowParser/flowParser.spec.ts @@ -81,7 +81,7 @@ describe("FlowParser Processor", () => { it("Outputs flows by type", async () => { const output = await processor.process(testInputs); // NOTE - data_pipe and generator flows will not populate self but instead generated outputs - expect(Object.keys(output)).toEqual(["data_list", "template"]); + expect(Object.keys(output)).toEqual(["data_list", "template", "data_pipe"]); const errorLogs = getLogs("error"); if (errorLogs.length > 0) { console.log("Unexpected Errors:\n", errorLogs); diff --git a/packages/scripts/src/commands/app-data/convert/processors/flowParser/flowParser.ts b/packages/scripts/src/commands/app-data/convert/processors/flowParser/flowParser.ts index 7577ee156..2daa9d392 100644 --- a/packages/scripts/src/commands/app-data/convert/processors/flowParser/flowParser.ts +++ b/packages/scripts/src/commands/app-data/convert/processors/flowParser/flowParser.ts @@ -4,7 +4,7 @@ import { IConverterPaths, IFlowHashmapByType, IParsedWorkbookData } from "../../ import { arrayToHashmap, groupJsonByKey, IContentsEntry } from "../../utils"; import BaseProcessor from "../base"; -const cacheVersion = 20240924.4; +const cacheVersion = 20241001.2; export class FlowParserProcessor extends BaseProcessor { public parsers: { [flowType in FlowTypes.FlowType]: Parsers.DefaultParser } = { diff --git a/packages/scripts/src/commands/app-data/convert/processors/flowParser/parsers/data_pipe.parser.spec.ts b/packages/scripts/src/commands/app-data/convert/processors/flowParser/parsers/data_pipe.parser.spec.ts index 11a79cb12..c33115109 100644 --- a/packages/scripts/src/commands/app-data/convert/processors/flowParser/parsers/data_pipe.parser.spec.ts +++ b/packages/scripts/src/commands/app-data/convert/processors/flowParser/parsers/data_pipe.parser.spec.ts @@ -18,7 +18,7 @@ describe("data_pipe Parser", () => { parser.flowProcessor.processedFlowHashmap = getTestData(); }); it("Populates generated data lists", async () => { - parser.run({ + const res = parser.run({ flow_name: "test_pipe_parse", flow_type: "data_pipe", rows: [ @@ -45,6 +45,20 @@ describe("data_pipe Parser", () => { test_output_1: [{ id: 2 }, { id: 3 }], test_output_2: [{ id: 3 }], }); + + // Also check output references stored + expect(res._output_flows).toEqual([ + { + flow_type: "data_list", + flow_subtype: "generated", + flow_name: "test_output_1", + }, + { + flow_type: "data_list", + flow_subtype: "generated", + flow_name: "test_output_2", + }, + ]); }); it("Allows outputs from one pipe to be used in another", async () => { diff --git a/packages/scripts/src/commands/app-data/convert/processors/flowParser/parsers/data_pipe.parser.ts b/packages/scripts/src/commands/app-data/convert/processors/flowParser/parsers/data_pipe.parser.ts index 52c86d1ba..423e5a565 100644 --- a/packages/scripts/src/commands/app-data/convert/processors/flowParser/parsers/data_pipe.parser.ts +++ b/packages/scripts/src/commands/app-data/convert/processors/flowParser/parsers/data_pipe.parser.ts @@ -7,7 +7,6 @@ export class DataPipeParser extends DefaultParser { private outputHashmap: { [flow_name: string]: { [output_name: string]: any } } = {}; /** If extending the class add additional postprocess pipeline here */ - public postProcessFlow(flow: FlowTypes.DataPipeFlow): FlowTypes.DataPipeFlow { const inputSources = this.loadInputSources(); const pipe = new DataPipe(flow.rows, inputSources); @@ -24,30 +23,31 @@ export class DataPipeParser extends DefaultParser { // HACK - populate to output hashmap for use in tests. Clone output due to deep nest issues this.outputHashmap[flow.flow_name] = JSON.parse(JSON.stringify(outputs)); - this.populateGeneratedFlows(outputs); - // As the populated flows will be passed directly to the processor queue - // can just return undefined so that the data pipe will not be stored in outputs - return undefined; + const generated = this.generateFlows(outputs); + + // Pass all generated flows to the back of the current processing queue so that they can be + // populated to processed hashmap and referenced from other processes as required + for (const generatedFlow of generated) { + const deferId = `${generatedFlow.flow_type}.${generatedFlow.flow_subtype}.${generatedFlow.flow_name}`; + this.flowProcessor.deferInputProcess(generatedFlow, deferId); + } + + // Return the parsed flow along with a summary of output flows to store within outputs + flow._output_flows = generated.map(({ rows, ...keptFields }) => keptFields); + return flow; } catch (error) { console.trace(error); throw error; } } - private populateGeneratedFlows(outputs: { [output_name: string]: any[] }) { - for (const [flow_name, rows] of Object.entries(outputs)) { - const flow: FlowTypes.FlowTypeWithData = { - flow_name, - flow_subtype: "generated", - flow_type: "data_list", - rows, - }; - const deferId = `${flow.flow_type}.${flow.flow_subtype}.${flow.flow_name}`; - - // Pass all generated flows to the back of the current processing queue so that they can be - // populated to processed hashmap and referenced from other processes as required - this.flowProcessor.deferInputProcess(flow, deferId); - } + private generateFlows(outputs: { [output_name: string]: any[] }) { + const generatedFlows: FlowTypes.Data_list[] = Object.entries(outputs).map( + ([flow_name, rows]) => { + return { flow_name, flow_subtype: "generated", flow_type: "data_list", rows }; + } + ); + return generatedFlows; } private loadInputSources() { diff --git a/packages/scripts/src/commands/app-data/convert/processors/flowParser/parsers/generator.parser.spec.ts b/packages/scripts/src/commands/app-data/convert/processors/flowParser/parsers/generator.parser.spec.ts index d892a33b5..ac171ce59 100644 --- a/packages/scripts/src/commands/app-data/convert/processors/flowParser/parsers/generator.parser.spec.ts +++ b/packages/scripts/src/commands/app-data/convert/processors/flowParser/parsers/generator.parser.spec.ts @@ -99,6 +99,21 @@ describe("generator Parser", () => { }, ]); }); + it("populates list of outputs", async () => { + const res = parser.run(generatorInput()) as FlowTypes.GeneratorFlow; + expect(res._output_flows).toEqual([ + { + flow_type: "template", + flow_subtype: "generated_type_1", + flow_name: "generated_template_1", + }, + { + flow_type: "template", + flow_subtype: "generated_type_2", + flow_name: "generated_template_2", + }, + ]); + }); it("parses generated flows using type parser", async () => { parser.run(generatorInput()); await parser.flowProcessor.queue.onIdle(); diff --git a/packages/scripts/src/commands/app-data/convert/processors/flowParser/parsers/generator.parser.ts b/packages/scripts/src/commands/app-data/convert/processors/flowParser/parsers/generator.parser.ts index 9a5d971ab..fe90af867 100644 --- a/packages/scripts/src/commands/app-data/convert/processors/flowParser/parsers/generator.parser.ts +++ b/packages/scripts/src/commands/app-data/convert/processors/flowParser/parsers/generator.parser.ts @@ -30,9 +30,9 @@ export class GeneratorParser extends DefaultParser { const deferId = `${generatedFlow.flow_type}.${generatedFlow.flow_subtype}.${generatedFlow.flow_name}`; this.flowProcessor.deferInputProcess(generatedFlow, deferId); } - // As the populated flows will be passed directly to the processor queue - // can just return undefined so that the data pipe will not be stored in outputs - return undefined; + // Return the parsed generator along with a summary of output flows to store within outputs + flow._output_flows = generated.map(({ rows, ...keptFields }) => keptFields); + return flow; } catch (error) { console.trace(error); throw error;