Skip to content

Commit

Permalink
feat: add support for chained generator flows
Browse files Browse the repository at this point in the history
  • Loading branch information
chrismclarke committed Sep 25, 2024
1 parent a7a57e7 commit 4b73f78
Show file tree
Hide file tree
Showing 6 changed files with 27 additions and 53 deletions.
3 changes: 2 additions & 1 deletion .idems_app/deployments/local/.gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,5 @@ cache
tasks
config.json
sheets/
!sheets/demo.xlsx
!sheets/demo.xlsx
reports
2 changes: 0 additions & 2 deletions packages/data-models/flowTypes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -66,8 +66,6 @@ export namespace FlowTypes {
rows: any[];
/** Datalists populate rows as a hashmap instead to allow easier access to nested structures */
rowsHashmap?: { [id: string]: any };
/** Additional flows generated during parsing, such as data pipe or generator flow outputs */
_generated?: { [flow_type in FlowType]?: { [flow_name: string]: FlowTypeWithData } };
}

/*********************************************************************************************
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import { IConverterPaths, IFlowHashmapByType, IParsedWorkbookData } from "../../
import { arrayToHashmap, groupJsonByKey, IContentsEntry } from "../../utils";
import BaseProcessor from "../base";

const cacheVersion = 20240502.0;
const cacheVersion = 20240924.4;

export class FlowParserProcessor extends BaseProcessor<FlowTypes.FlowTypeWithData> {
public parsers: { [flowType in FlowTypes.FlowType]: Parsers.DefaultParser } = {
Expand Down Expand Up @@ -93,49 +93,15 @@ export class FlowParserProcessor extends BaseProcessor<FlowTypes.FlowTypeWithDat
return k;
});
}
// populate any generated flows to main list
const flowTypesWithGenerated = this.populateGeneratedFlows(flowHashmapByType);

// convert back from hashmap to hashArrays for final output
const outputData: IParsedWorkbookData = {};
for (const [type, typeHashmap] of Object.entries(flowTypesWithGenerated)) {
for (const [type, typeHashmap] of Object.entries(flowHashmapByType)) {
outputData[type] = Object.values(typeHashmap);
}
return outputData;
}

/**
* Iterate over all flows to check for any that populate additional _generated flows
* that should be extracted to top-level
*/
private populateGeneratedFlows(flowsByType: IFlowHashmapByType) {
// handle any additional methods that operate on full list of processed flows,
// e.g. populating additional generated flows
for (const typeFlows of Object.values(flowsByType)) {
for (const { _generated, ...flow } of Object.values(typeFlows)) {
if (_generated) {
// remove _generated field from flow
flowsByType[flow.flow_type][flow.flow_name] = flow;
// populate generated to main list, ensure generated flows are also fully processed
for (const generatedFlows of Object.values(_generated)) {
for (const generatedFlow of Object.values(generatedFlows)) {
flowsByType[generatedFlow.flow_type] ??= {};
if (flowsByType[generatedFlow.flow_type][generatedFlow.flow_name]) {
this.logger.error({
message: "Generated flow will override existing flow",
details: [generatedFlow.flow_type, generatedFlow.flow_name],
});
}
const processed = this.processInput(JSON.parse(JSON.stringify(generatedFlow)));
flowsByType[generatedFlow.flow_type][generatedFlow.flow_name] = processed;
}
}
}
}
}
return flowsByType;
}

public shouldUseCachedEntry(
input: FlowTypes.FlowTypeWithData,
cachedEntry: IContentsEntry
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,28 +19,31 @@ export class DataPipeParser extends DefaultParser<FlowTypes.DataPipeFlow> {
try {
const outputs = pipe.run();
this.populateGeneratedFlows(outputs);
// As the populated flows will be passed directly to the processor queue
// can just return undefined so that the data pipe will not be stored in outputs
return undefined;
} catch (error) {
console.trace(error);
throw error;
}
return this.flow;
}

private populateGeneratedFlows(outputs: { [output_name: string]: any[] }) {
const generated: FlowTypes.DataPipeFlow["_generated"] = { data_list: {} };
this.flowProcessor.processedFlowHashmap.data_list ??= {};

for (const [flow_name, rows] of Object.entries(outputs)) {
generated.data_list[flow_name] = {
const flow: FlowTypes.FlowTypeWithData = {
flow_name,
flow_subtype: "generated",
flow_type: "data_list",
rows,
};
// also populate generated outputs to be available for future input sources
this.flowProcessor.processedFlowHashmap.data_list[flow_name] = rows;
const deferId = `${flow.flow_type}.${flow.flow_subtype}.${flow.flow_name}`;

// Pass all generated flows to the back of the current processing queue so that they can be
// populated to processed hashmap and referenced from other processes as required
this.flowProcessor.deferInputProcess(flow, deferId);
}
this.flow._generated = generated;
}

private loadInputSources() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ export class GeneratorParser extends DefaultParser<FlowTypes.GeneratorFlow> {
*/
public static populateProcessedFlows() {}

public postProcessFlow(flow: FlowTypes.GeneratorFlow): FlowTypes.GeneratorFlow {
public override postProcessFlow(flow: FlowTypes.GeneratorFlow): FlowTypes.GeneratorFlow {
flow.parameter_list = this.validateParameterList(flow);

const inputSources = this.loadInputSources();
Expand All @@ -23,13 +23,20 @@ export class GeneratorParser extends DefaultParser<FlowTypes.GeneratorFlow> {
return;
}
try {
this.flow._generated = this.generateFlows(flow, dataListRows);
// this.handleOutputs(generated);
const generated = this.generateFlows(flow, dataListRows);

// Pass all generated flows to the back of the current processing queue so that they can be
// populated to processed hashmap and referenced from other processes as required
for (const generatedFlow of generated) {
const deferId = `${generatedFlow.flow_type}.${generatedFlow.flow_subtype}.${generatedFlow.flow_name}`;
this.flowProcessor.deferInputProcess(generatedFlow, deferId);
}
} catch (error) {
console.trace(error);
throw error;
}
return this.flow;
// As the generator has been fully used it no longer needs to be stored, so just return undefined
return undefined;
}

private validateParameterList(
Expand Down Expand Up @@ -61,7 +68,7 @@ export class GeneratorParser extends DefaultParser<FlowTypes.GeneratorFlow> {
generator: FlowTypes.GeneratorFlow,
dataListRows: FlowTypes.Data_listRow[]
) {
const generated: FlowTypes.FlowTypeWithData["_generated"] = {};
const generated: FlowTypes.FlowTypeWithData[] = [];

for (const listRow of dataListRows) {
const parser = new TemplatedData({
Expand All @@ -82,8 +89,7 @@ export class GeneratorParser extends DefaultParser<FlowTypes.GeneratorFlow> {
flow_type: output_flow_type,
rows: parsedRows,
};
generated[output_flow_type] ??= {};
generated[output_flow_type][output_flow_name] = generatedFlow;
generated.push(generatedFlow);
}
return generated;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ class SheetsPostProcessor {

private extractContentsData(flow: FlowTypes.FlowTypeWithData): FlowTypes.FlowTypeBase {
// remove rows property (if exists)
const { rows, status, _generated, ...keptFields } = flow;
const { rows, status, ...keptFields } = flow;
return keptFields as FlowTypes.FlowTypeBase;
}
private sheetsWriteContents(baseFolder: string, contents: ISheetContents) {
Expand Down

0 comments on commit 4b73f78

Please sign in to comment.