Skip to content

Commit

Permalink
[KOGITO-9614] Async api and reload
Browse files Browse the repository at this point in the history
  • Loading branch information
fjtirado committed Aug 8, 2023
1 parent 8699087 commit 9240a8a
Show file tree
Hide file tree
Showing 13 changed files with 125 additions and 81 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,9 @@

public abstract class ValidationDecorator {

protected final Map<String, Exception> exceptions;
protected final Map<String, Throwable> exceptions;

protected ValidationDecorator(Map<String, Exception> exceptions) {
protected ValidationDecorator(Map<String, Throwable> exceptions) {
this.exceptions = exceptions;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ public class ValidationLogDecorator extends ValidationDecorator {

private static final Logger LOGGER = LoggerFactory.getLogger(ValidationLogDecorator.class);

public ValidationLogDecorator(Map<String, Exception> exceptions) {
public ValidationLogDecorator(Map<String, Throwable> exceptions) {
super(exceptions);
}

Expand Down
2 changes: 1 addition & 1 deletion kogito-build/kogito-dependencies-bom/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
<version.net.thisptr.jackson-jq>1.0.0-preview.20220705</version.net.thisptr.jackson-jq>
<version.io.quarkiverse.jackson-jq>1.1.0</version.io.quarkiverse.jackson-jq>
<version.io.quarkiverse.openapi.generator>1.3.8</version.io.quarkiverse.openapi.generator>
<version.io.quarkiverse.asyncapi>0.0.3</version.io.quarkiverse.asyncapi>
<version.io.quarkiverse.asyncapi>0.0.6</version.io.quarkiverse.asyncapi>
<version.io.quarkiverse.reactivemessaging.http>1.1.5</version.io.quarkiverse.reactivemessaging.http>
<version.io.quarkiverse.embedded.postgresql>0.0.8</version.io.quarkiverse.embedded.postgresql>
<version.com.github.haifengl.smile>1.5.2</version.com.github.haifengl.smile>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ public class ProcessCodegen extends AbstractGenerator {

public static ProcessCodegen ofCollectedResources(KogitoBuildContext context, Collection<CollectedResource> resources) {
Map<String, byte[]> processSVGMap = new HashMap<>();
Map<String, Exception> processesErrors = new HashMap<>();
Map<String, Throwable> processesErrors = new HashMap<>();
boolean useSvgAddon = context.getAddonsConfig().useProcessSVG();
final List<GeneratedInfo<KogitoWorkflowProcess>> processes = resources.stream()
.map(CollectedResource::resource)
Expand All @@ -123,13 +123,13 @@ public static ProcessCodegen ofCollectedResources(KogitoBuildContext context, Co
GeneratedInfo<KogitoWorkflowProcess> generatedInfo = parseWorkflowFile(resource, WorkflowFormat.fromFileName(resource.getSourcePath()), context);
notifySourceFileCodegenBindListeners(context, resource, Collections.singletonList(generatedInfo.info()));
return Stream.of(addResource(generatedInfo, resource));
} else {
return Stream.empty();
}
} catch (ValidationException | ProcessParsingException e) {
} catch (ValidationException e) {
processesErrors.put(resource.getSourcePath(), e);
return Stream.empty();
} catch (ProcessParsingException e) {
processesErrors.put(resource.getSourcePath(), e.getCause());
}
return Stream.empty();
})
//Validate parsed processes
.map(processInfo -> validate(processInfo, processesErrors))
Expand All @@ -154,7 +154,7 @@ private static void notifySourceFileCodegenBindListeners(KogitoBuildContext cont
.ifPresent(notifier -> processes.forEach(p -> notifier.notify(new SourceFileCodegenBindEvent(p.getId(), resource.getSourcePath()))));
}

private static void handleValidation(KogitoBuildContext context, Map<String, Exception> processesErrors) {
private static void handleValidation(KogitoBuildContext context, Map<String, Throwable> processesErrors) {
if (!processesErrors.isEmpty()) {
ValidationLogDecorator decorator = new ValidationLogDecorator(processesErrors);
decorator.decorate();
Expand All @@ -165,7 +165,7 @@ private static void handleValidation(KogitoBuildContext context, Map<String, Exc
}
}

private static GeneratedInfo<KogitoWorkflowProcess> validate(GeneratedInfo<KogitoWorkflowProcess> processInfo, Map<String, Exception> processesErrors) {
private static GeneratedInfo<KogitoWorkflowProcess> validate(GeneratedInfo<KogitoWorkflowProcess> processInfo, Map<String, Throwable> processesErrors) {
Process process = processInfo.info();
try {
ProcessValidatorRegistry.getInstance().getValidator(process, process.getResource()).validate(process);
Expand Down Expand Up @@ -215,8 +215,8 @@ private static ProcessCodegen ofProcesses(KogitoBuildContext context, List<Gener
protected static GeneratedInfo<KogitoWorkflowProcess> parseWorkflowFile(Resource r, WorkflowFormat format, KogitoBuildContext context) {
try (Reader reader = r.getReader()) {
return ServerlessWorkflowParser.of(reader, format, context).getProcessInfo();
} catch (IOException | RuntimeException e) {
throw new ProcessParsingException("Could not parse file " + r.getSourcePath(), e);
} catch (Exception e) {
throw new ProcessParsingException(e);
}
}

Expand All @@ -227,7 +227,7 @@ protected static Collection<Process> parseProcessFile(Resource r) {
Thread.currentThread().getContextClassLoader());
return xmlReader.read(reader);
} catch (SAXException | IOException e) {
throw new ProcessParsingException("Could not parse file " + r.getSourcePath(), e);
throw new ProcessParsingException(e);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,9 @@

public class ProcessParsingException extends RuntimeException {

private static final long serialVersionUID = 1L;

public ProcessParsingException(Throwable cause) {
super(cause);
}

public ProcessParsingException(String s, Throwable e) {
super(s, e);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -50,17 +50,16 @@ private ChannelMappingStrategy() {
private static final String OVERFLOW_STRATEGY_PROP = "overflow-strategy";
private static final String BUFFER_SIZE_PROP = "buffer-size";

private static Config config = ConfigProvider.getConfig();

public static Collection<ChannelInfo> getChannelMapping() {
Config config = ConfigProvider.getConfig();
Map<String, Collection<String>> inTriggers = new HashMap<>();
Map<String, Collection<String>> outTriggers = new HashMap<>();

for (String property : config.getPropertyNames()) {
if (property.startsWith(INCOMING_TRIGGER)) {
addTrigger(INCOMING_TRIGGER, property, inTriggers);
addTrigger(config, INCOMING_TRIGGER, property, inTriggers);
} else if (property.startsWith(OUTGOING_TRIGGER)) {
addTrigger(OUTGOING_TRIGGER, property, outTriggers);
addTrigger(config, OUTGOING_TRIGGER, property, outTriggers);
}
}

Expand All @@ -69,15 +68,15 @@ public static Collection<ChannelInfo> getChannelMapping() {
final String defaultOutgoingChannel = config.getOptionalValue(OUTGOING_DEFAULT_CHANNEL, String.class).orElse(KogitoEventStreams.OUTGOING);
for (String property : config.getPropertyNames()) {
if (property.startsWith(INCOMING_PREFIX) && property.endsWith(".connector")) {
result.add(getChannelInfo(property, INCOMING_PREFIX, true, defaultIncomingChannel, inTriggers));
result.add(getChannelInfo(config, property, INCOMING_PREFIX, true, defaultIncomingChannel, inTriggers));
} else if (property.startsWith(OUTGOING_PREFIX) && property.endsWith(".connector")) {
result.add(getChannelInfo(property, OUTGOING_PREFIX, false, defaultOutgoingChannel, outTriggers));
result.add(getChannelInfo(config, property, OUTGOING_PREFIX, false, defaultOutgoingChannel, outTriggers));
}
}
return result;
}

private static void addTrigger(String prefix, String property, Map<String, Collection<String>> triggers) {
private static void addTrigger(Config config, String prefix, String property, Map<String, Collection<String>> triggers) {
String channelName = config.getValue(property, String.class);
String triggerName = property.substring(prefix.length());
triggers.computeIfAbsent(channelName, ChannelMappingStrategy::initTriggers).add(triggerName);
Expand All @@ -89,15 +88,15 @@ private static Collection<String> initTriggers(String channelName) {
return result;
}

private static ChannelInfo getChannelInfo(String property, String prefix, boolean isInput, String defaultChannelName, Map<String, Collection<String>> triggers) {
private static ChannelInfo getChannelInfo(Config config, String property, String prefix, boolean isInput, String defaultChannelName, Map<String, Collection<String>> triggers) {
String name = property.substring(prefix.length(), property.lastIndexOf('.'));
return new ChannelInfo(name, triggers.getOrDefault(name, Collections.singleton(name)),
getClassName(config.getOptionalValue(getPropertyName(prefix, name, "value." + (isInput ? "deserializer" : "serializer")), String.class)), isInput,
name.equals(defaultChannelName), config.getOptionalValue((isInput ? UNMARSHALLLER_PREFIX : MARSHALLER_PREFIX) + name, String.class),
isInput ? Optional.empty() : onOverflowInfo(name));
isInput ? Optional.empty() : onOverflowInfo(config, name));
}

private static Optional<OnOverflowInfo> onOverflowInfo(String name) {
private static Optional<OnOverflowInfo> onOverflowInfo(Config config, String name) {
final String namePrefix = KOGITO_EMITTER_PREFIX + name + ".";
Optional<Strategy> strategy = config.getOptionalValue(namePrefix + OVERFLOW_STRATEGY_PROP, String.class).map(Strategy::valueOf);
Optional<Long> bufferSize = config.getOptionalValue(namePrefix + BUFFER_SIZE_PROP, Long.class);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,19 +15,31 @@
*/
package org.kie.kogito.quarkus.common.deployment;

import java.util.Optional;

import org.jboss.jandex.IndexView;

import io.quarkus.builder.item.SimpleBuildItem;

public final class LiveReloadExecutionBuildItem extends SimpleBuildItem {

private final IndexView indexView;
private final ClassLoader classLoader;

public LiveReloadExecutionBuildItem(IndexView indexView) {
this(indexView, null);
}

public LiveReloadExecutionBuildItem(IndexView indexView, ClassLoader classLoader) {
this.indexView = indexView;
this.classLoader = classLoader;
}

public IndexView getIndexView() {
return indexView;
}

public Optional<ClassLoader> getClassLoader() {
return Optional.ofNullable(classLoader);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@

import java.io.IOException;
import java.io.Reader;
import java.io.UncheckedIOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Map;
Expand All @@ -28,7 +27,6 @@

import org.drools.codegen.common.GeneratedFile;
import org.drools.codegen.common.GeneratedFileType;
import org.eclipse.microprofile.config.ConfigProvider;
import org.kie.kogito.codegen.api.context.KogitoBuildContext;
import org.kie.kogito.internal.SupportedExtensions;
import org.kie.kogito.serverless.workflow.io.URIContentLoaderFactory;
Expand All @@ -46,8 +44,6 @@
import io.serverlessworkflow.api.Workflow;
import io.serverlessworkflow.api.functions.FunctionDefinition;

import static org.kie.kogito.serverless.workflow.utils.ServerlessWorkflowUtils.FAIL_ON_ERROR_PROPERTY;

public class WorkflowCodeGenUtils {

private static final Logger logger = LoggerFactory.getLogger(WorkflowCodeGenUtils.class);
Expand Down Expand Up @@ -96,20 +92,16 @@ private static WorkflowOperationResource getResource(Workflow workflow, Function
}

private static Optional<Workflow> getWorkflow(Path path) {
return workflowCache.computeIfAbsent(path, p -> SupportedExtensions.getSWFExtensions()
.stream()
.filter(e -> p.getFileName().toString().endsWith(e))
.map(e -> {
try (Reader r = Files.newBufferedReader(p)) {
return Optional.of(ServerlessWorkflowUtils.getWorkflow(r, WorkflowFormat.fromFileName(p.getFileName())));
} catch (IOException ex) {
if (ConfigProvider.getConfig().getOptionalValue(FAIL_ON_ERROR_PROPERTY, Boolean.class).orElse(true)) {
throw new UncheckedIOException(ex);
} else {
logger.error("Error reading workflow file {}", p, ex);
return Optional.<Workflow> empty();
}
}
}).flatMap(Optional::stream).findFirst());
if (SupportedExtensions.getSWFExtensions().stream().anyMatch(ext -> path.toString().endsWith(ext))) {
return workflowCache.computeIfAbsent(path, p -> {
try (Reader r = Files.newBufferedReader(p)) {
return Optional.of(ServerlessWorkflowUtils.getWorkflow(r, WorkflowFormat.fromFileName(p.getFileName())));
} catch (IOException ex) {
logger.info("Error reading workflow file {}. Ignoring exception {}", p, ex);
return Optional.<Workflow> empty();
}
});
}
return Optional.empty();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -15,21 +15,23 @@
*/
package org.kie.kogito.quarkus.serverless.workflow.asyncapi;

import java.util.List;
import java.util.stream.Collectors;
import java.util.ServiceLoader;

import org.kie.kogito.quarkus.common.deployment.KogitoBuildContextAttributeBuildItem;
import org.kie.kogito.quarkus.common.deployment.KogitoAddonsPreGeneratedSourcesBuildItem;
import org.kie.kogito.quarkus.common.deployment.KogitoBuildContextBuildItem;
import org.kie.kogito.quarkus.common.deployment.LiveReloadExecutionBuildItem;
import org.kie.kogito.serverless.workflow.parser.ParserContext;

import io.quarkiverse.asyncapi.config.AsyncAPISupplier;
import io.quarkiverse.asyncapi.config.MapAsyncAPIRegistry;
import io.quarkiverse.asyncapi.generator.AsyncAPIBuildItem;
import io.quarkus.deployment.annotations.BuildProducer;
import io.quarkus.deployment.annotations.BuildStep;

public class AsyncAPIProcessor {

@BuildStep
KogitoBuildContextAttributeBuildItem asyncAPIContext(List<AsyncAPIBuildItem> asyncAPIBuildItem) {
return new KogitoBuildContextAttributeBuildItem(ParserContext.ASYNC_CONVERTER_KEY, new AsyncAPIInfoConverter(
new MapAsyncAPIRegistry(asyncAPIBuildItem.stream().map(AsyncAPIBuildItem::getAsyncAPI).collect(Collectors.toList()))));
void asyncAPIContext(LiveReloadExecutionBuildItem reload, KogitoBuildContextBuildItem context, BuildProducer<KogitoAddonsPreGeneratedSourcesBuildItem> sources) throws ClassNotFoundException {
context.getKogitoBuildContext().addContextAttribute(ParserContext.ASYNC_CONVERTER_KEY, new AsyncAPIInfoConverter(
new MapAsyncAPIRegistry(ServiceLoader.load(AsyncAPISupplier.class, reload.getClassLoader().orElse(Thread.currentThread().getContextClassLoader())))));
}
}
Loading

0 comments on commit 9240a8a

Please sign in to comment.